mirror of
https://github.com/home-assistant/core.git
synced 2025-10-08 11:19:30 +00:00
Compare commits
2 Commits
water_hier
...
zjs-config
Author | SHA1 | Date | |
---|---|---|---|
![]() |
fe35fac8ee | ||
![]() |
4bccc57b46 |
710
.github/workflows/ci.yaml
vendored
710
.github/workflows/ci.yaml
vendored
File diff suppressed because it is too large
Load Diff
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
|
||||
uses: github/codeql-action/init@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
|
||||
uses: github/codeql-action/analyze@3599b3baa15b485a2e49ef411a7a4bb2452e7f93 # v3.30.5
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
# - No PRs marked as no-stale
|
||||
# - No issues (-1)
|
||||
- name: 60 days stale PRs policy
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
# - No issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: 90 days stale issues
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
days-before-stale: 90
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
# - No Issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: Needs more information stale issues policy
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
only-labels: "needs-more-information"
|
||||
|
@@ -555,7 +555,6 @@ homeassistant.components.vacuum.*
|
||||
homeassistant.components.vallox.*
|
||||
homeassistant.components.valve.*
|
||||
homeassistant.components.velbus.*
|
||||
homeassistant.components.vivotek.*
|
||||
homeassistant.components.vlc_telnet.*
|
||||
homeassistant.components.vodafone_station.*
|
||||
homeassistant.components.volvo.*
|
||||
|
4
CODEOWNERS
generated
4
CODEOWNERS
generated
@@ -1065,8 +1065,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/nilu/ @hfurubotten
|
||||
/homeassistant/components/nina/ @DeerMaximum
|
||||
/tests/components/nina/ @DeerMaximum
|
||||
/homeassistant/components/nintendo_parental/ @pantherale0
|
||||
/tests/components/nintendo_parental/ @pantherale0
|
||||
/homeassistant/components/nissan_leaf/ @filcole
|
||||
/homeassistant/components/noaa_tides/ @jdelaney72
|
||||
/homeassistant/components/nobo_hub/ @echoromeo @oyvindwe
|
||||
@@ -1198,6 +1196,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/plex/ @jjlawren
|
||||
/homeassistant/components/plugwise/ @CoMPaTech @bouwew
|
||||
/tests/components/plugwise/ @CoMPaTech @bouwew
|
||||
/homeassistant/components/plum_lightpad/ @ColinHarrington @prystupa
|
||||
/tests/components/plum_lightpad/ @ColinHarrington @prystupa
|
||||
/homeassistant/components/point/ @fredrike
|
||||
/tests/components/point/ @fredrike
|
||||
/homeassistant/components/pooldose/ @lmaertin
|
||||
|
@@ -635,15 +635,25 @@ async def async_enable_logging(
|
||||
err_log_path = os.path.abspath(log_file)
|
||||
|
||||
if err_log_path:
|
||||
err_handler = await hass.async_add_executor_job(
|
||||
_create_log_file, err_log_path, log_rotate_days
|
||||
)
|
||||
err_path_exists = os.path.isfile(err_log_path)
|
||||
err_dir = os.path.dirname(err_log_path)
|
||||
|
||||
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
|
||||
logger.addHandler(err_handler)
|
||||
# Check if we can write to the error log if it exists or that
|
||||
# we can create files in the containing directory if not.
|
||||
if (err_path_exists and os.access(err_log_path, os.W_OK)) or (
|
||||
not err_path_exists and os.access(err_dir, os.W_OK)
|
||||
):
|
||||
err_handler = await hass.async_add_executor_job(
|
||||
_create_log_file, err_log_path, log_rotate_days
|
||||
)
|
||||
|
||||
# Save the log file location for access by other components.
|
||||
hass.data[DATA_LOGGING] = err_log_path
|
||||
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
|
||||
logger.addHandler(err_handler)
|
||||
|
||||
# Save the log file location for access by other components.
|
||||
hass.data[DATA_LOGGING] = err_log_path
|
||||
else:
|
||||
_LOGGER.error("Unable to set up error log %s (access denied)", err_log_path)
|
||||
|
||||
async_activate_log_queue_handler(hass)
|
||||
|
||||
|
5
homeassistant/brands/ibm.json
Normal file
5
homeassistant/brands/ibm.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "ibm",
|
||||
"name": "IBM",
|
||||
"integrations": ["watson_iot", "watson_tts"]
|
||||
}
|
@@ -12,13 +12,11 @@ from homeassistant.components.bluetooth import async_get_scanner
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ADDRESS
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import CONF_IS_NEW_STYLE_SCALE
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=15)
|
||||
UPDATE_DEBOUNCE_TIME = 0.2
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -40,19 +38,11 @@ class AcaiaCoordinator(DataUpdateCoordinator[None]):
|
||||
config_entry=entry,
|
||||
)
|
||||
|
||||
debouncer = Debouncer(
|
||||
hass=hass,
|
||||
logger=_LOGGER,
|
||||
cooldown=UPDATE_DEBOUNCE_TIME,
|
||||
immediate=True,
|
||||
function=self.async_update_listeners,
|
||||
)
|
||||
|
||||
self._scale = AcaiaScale(
|
||||
address_or_ble_device=entry.data[CONF_ADDRESS],
|
||||
name=entry.title,
|
||||
is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE],
|
||||
notify_callback=debouncer.async_schedule_call,
|
||||
notify_callback=self.async_update_listeners,
|
||||
scanner=async_get_scanner(hass),
|
||||
)
|
||||
|
||||
|
@@ -1,9 +1,6 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"air_quality": {
|
||||
"default": "mdi:air-filter"
|
||||
},
|
||||
"cloud_ceiling": {
|
||||
"default": "mdi:weather-fog"
|
||||
},
|
||||
@@ -37,6 +34,9 @@
|
||||
"thunderstorm_probability_night": {
|
||||
"default": "mdi:weather-lightning"
|
||||
},
|
||||
"translation_key": {
|
||||
"default": "mdi:air-filter"
|
||||
},
|
||||
"tree_pollen": {
|
||||
"default": "mdi:tree-outline"
|
||||
},
|
||||
|
@@ -1,9 +1,7 @@
|
||||
"""Airgradient Update platform."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from airgradient import AirGradientConnectionError
|
||||
from propcache.api import cached_property
|
||||
|
||||
from homeassistant.components.update import UpdateDeviceClass, UpdateEntity
|
||||
@@ -15,7 +13,6 @@ from .entity import AirGradientEntity
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
SCAN_INTERVAL = timedelta(hours=1)
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -34,7 +31,6 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity):
|
||||
"""Representation of Airgradient Update."""
|
||||
|
||||
_attr_device_class = UpdateDeviceClass.FIRMWARE
|
||||
_server_unreachable_logged = False
|
||||
|
||||
def __init__(self, coordinator: AirGradientCoordinator) -> None:
|
||||
"""Initialize the entity."""
|
||||
@@ -51,27 +47,10 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity):
|
||||
"""Return the installed version of the entity."""
|
||||
return self.coordinator.data.measures.firmware_version
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and self._attr_available
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the entity."""
|
||||
try:
|
||||
self._attr_latest_version = (
|
||||
await self.coordinator.client.get_latest_firmware_version(
|
||||
self.coordinator.serial_number
|
||||
)
|
||||
self._attr_latest_version = (
|
||||
await self.coordinator.client.get_latest_firmware_version(
|
||||
self.coordinator.serial_number
|
||||
)
|
||||
except AirGradientConnectionError:
|
||||
self._attr_latest_version = None
|
||||
self._attr_available = False
|
||||
if not self._server_unreachable_logged:
|
||||
_LOGGER.error(
|
||||
"Unable to connect to AirGradient server to check for updates"
|
||||
)
|
||||
self._server_unreachable_logged = True
|
||||
else:
|
||||
self._server_unreachable_logged = False
|
||||
self._attr_available = True
|
||||
)
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airos",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["airos==0.5.5"]
|
||||
"requirements": ["airos==0.5.4"]
|
||||
}
|
||||
|
@@ -6,13 +6,8 @@ import dataclasses
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from airthings_ble import (
|
||||
AirthingsBluetoothDeviceData,
|
||||
AirthingsDevice,
|
||||
UnsupportedDeviceError,
|
||||
)
|
||||
from airthings_ble import AirthingsBluetoothDeviceData, AirthingsDevice
|
||||
from bleak import BleakError
|
||||
from habluetooth import BluetoothServiceInfoBleak
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import bluetooth
|
||||
@@ -32,7 +27,6 @@ SERVICE_UUIDS = [
|
||||
"b42e4a8e-ade7-11e4-89d3-123b93f75cba",
|
||||
"b42e1c08-ade7-11e4-89d3-123b93f75cba",
|
||||
"b42e3882-ade7-11e4-89d3-123b93f75cba",
|
||||
"b42e90a2-ade7-11e4-89d3-123b93f75cba",
|
||||
]
|
||||
|
||||
|
||||
@@ -43,7 +37,6 @@ class Discovery:
|
||||
name: str
|
||||
discovery_info: BluetoothServiceInfo
|
||||
device: AirthingsDevice
|
||||
data: AirthingsBluetoothDeviceData
|
||||
|
||||
|
||||
def get_name(device: AirthingsDevice) -> str:
|
||||
@@ -51,7 +44,7 @@ def get_name(device: AirthingsDevice) -> str:
|
||||
|
||||
name = device.friendly_name()
|
||||
if identifier := device.identifier:
|
||||
name += f" ({device.model.value}{identifier})"
|
||||
name += f" ({identifier})"
|
||||
return name
|
||||
|
||||
|
||||
@@ -69,8 +62,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._discovered_device: Discovery | None = None
|
||||
self._discovered_devices: dict[str, Discovery] = {}
|
||||
|
||||
async def _get_device(
|
||||
self, data: AirthingsBluetoothDeviceData, discovery_info: BluetoothServiceInfo
|
||||
async def _get_device_data(
|
||||
self, discovery_info: BluetoothServiceInfo
|
||||
) -> AirthingsDevice:
|
||||
ble_device = bluetooth.async_ble_device_from_address(
|
||||
self.hass, discovery_info.address
|
||||
@@ -79,8 +72,10 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.debug("no ble_device in _get_device_data")
|
||||
raise AirthingsDeviceUpdateError("No ble_device")
|
||||
|
||||
airthings = AirthingsBluetoothDeviceData(_LOGGER)
|
||||
|
||||
try:
|
||||
device = await data.update_device(ble_device)
|
||||
data = await airthings.update_device(ble_device)
|
||||
except BleakError as err:
|
||||
_LOGGER.error(
|
||||
"Error connecting to and getting data from %s: %s",
|
||||
@@ -88,15 +83,12 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
err,
|
||||
)
|
||||
raise AirthingsDeviceUpdateError("Failed getting device data") from err
|
||||
except UnsupportedDeviceError:
|
||||
_LOGGER.debug("Skipping unsupported device: %s", discovery_info.name)
|
||||
raise
|
||||
except Exception as err:
|
||||
_LOGGER.error(
|
||||
"Unknown error occurred from %s: %s", discovery_info.address, err
|
||||
)
|
||||
raise
|
||||
return device
|
||||
return data
|
||||
|
||||
async def async_step_bluetooth(
|
||||
self, discovery_info: BluetoothServiceInfo
|
||||
@@ -106,21 +98,17 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
await self.async_set_unique_id(discovery_info.address)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
data = AirthingsBluetoothDeviceData(logger=_LOGGER)
|
||||
|
||||
try:
|
||||
device = await self._get_device(data=data, discovery_info=discovery_info)
|
||||
device = await self._get_device_data(discovery_info)
|
||||
except AirthingsDeviceUpdateError:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except UnsupportedDeviceError:
|
||||
return self.async_abort(reason="unsupported_device")
|
||||
except Exception:
|
||||
_LOGGER.exception("Unknown error occurred")
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
name = get_name(device)
|
||||
self.context["title_placeholders"] = {"name": name}
|
||||
self._discovered_device = Discovery(name, discovery_info, device, data=data)
|
||||
self._discovered_device = Discovery(name, discovery_info, device)
|
||||
|
||||
return await self.async_step_bluetooth_confirm()
|
||||
|
||||
@@ -129,12 +117,6 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm discovery."""
|
||||
if user_input is not None:
|
||||
if (
|
||||
self._discovered_device is not None
|
||||
and self._discovered_device.device.firmware.need_firmware_upgrade
|
||||
):
|
||||
return self.async_abort(reason="firmware_upgrade_required")
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self.context["title_placeholders"]["name"], data={}
|
||||
)
|
||||
@@ -155,9 +137,6 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self._abort_if_unique_id_configured()
|
||||
discovery = self._discovered_devices[address]
|
||||
|
||||
if discovery.device.firmware.need_firmware_upgrade:
|
||||
return self.async_abort(reason="firmware_upgrade_required")
|
||||
|
||||
self.context["title_placeholders"] = {
|
||||
"name": discovery.name,
|
||||
}
|
||||
@@ -167,47 +146,26 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_create_entry(title=discovery.name, data={})
|
||||
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
devices: list[BluetoothServiceInfoBleak] = []
|
||||
for discovery_info in async_discovered_service_info(self.hass):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
continue
|
||||
|
||||
if MFCT_ID not in discovery_info.manufacturer_data:
|
||||
continue
|
||||
if not any(uuid in SERVICE_UUIDS for uuid in discovery_info.service_uuids):
|
||||
_LOGGER.debug(
|
||||
"Skipping unsupported device: %s (%s)", discovery_info.name, address
|
||||
)
|
||||
continue
|
||||
devices.append(discovery_info)
|
||||
|
||||
for discovery_info in devices:
|
||||
address = discovery_info.address
|
||||
data = AirthingsBluetoothDeviceData(logger=_LOGGER)
|
||||
if not any(uuid in SERVICE_UUIDS for uuid in discovery_info.service_uuids):
|
||||
continue
|
||||
|
||||
try:
|
||||
device = await self._get_device(data, discovery_info)
|
||||
device = await self._get_device_data(discovery_info)
|
||||
except AirthingsDeviceUpdateError:
|
||||
_LOGGER.error(
|
||||
"Error connecting to and getting data from %s (%s)",
|
||||
discovery_info.name,
|
||||
discovery_info.address,
|
||||
)
|
||||
continue
|
||||
except UnsupportedDeviceError:
|
||||
_LOGGER.debug(
|
||||
"Skipping unsupported device: %s (%s)",
|
||||
discovery_info.name,
|
||||
discovery_info.address,
|
||||
)
|
||||
continue
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except Exception:
|
||||
_LOGGER.exception("Unknown error occurred")
|
||||
return self.async_abort(reason="unknown")
|
||||
name = get_name(device)
|
||||
_LOGGER.debug("Discovered Airthings device: %s (%s)", name, address)
|
||||
self._discovered_devices[address] = Discovery(
|
||||
name, discovery_info, device, data
|
||||
)
|
||||
self._discovered_devices[address] = Discovery(name, discovery_info, device)
|
||||
|
||||
if not self._discovered_devices:
|
||||
return self.async_abort(reason="no_devices_found")
|
||||
|
@@ -17,10 +17,6 @@
|
||||
{
|
||||
"manufacturer_id": 820,
|
||||
"service_uuid": "b42e3882-ade7-11e4-89d3-123b93f75cba"
|
||||
},
|
||||
{
|
||||
"manufacturer_id": 820,
|
||||
"service_uuid": "b42e90a2-ade7-11e4-89d3-123b93f75cba"
|
||||
}
|
||||
],
|
||||
"codeowners": ["@vincegio", "@LaStrada"],
|
||||
@@ -28,5 +24,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/airthings_ble",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["airthings-ble==1.1.1"]
|
||||
"requirements": ["airthings-ble==0.9.2"]
|
||||
}
|
||||
|
@@ -20,8 +20,6 @@
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"firmware_upgrade_required": "Your device requires a firmware upgrade. Please use the Airthings app (Android/iOS) to upgrade it.",
|
||||
"unsupported_device": "Unsupported device",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
},
|
||||
|
@@ -18,9 +18,7 @@ from homeassistant.components.binary_sensor import (
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
|
||||
from .const import _LOGGER, DOMAIN
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
from .utils import async_update_unique_id
|
||||
@@ -53,47 +51,11 @@ BINARY_SENSORS: Final = (
|
||||
),
|
||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||
is_available_fn=lambda device, key: (
|
||||
device.online
|
||||
and (sensor := device.sensors.get(key)) is not None
|
||||
and sensor.error is False
|
||||
device.online and device.sensors[key].error is False
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
DEPRECATED_BINARY_SENSORS: Final = (
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="bluetooth",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
translation_key="bluetooth",
|
||||
is_on_fn=lambda device, key: False,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="babyCryDetectionState",
|
||||
translation_key="baby_cry_detection",
|
||||
is_on_fn=lambda device, key: False,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="beepingApplianceDetectionState",
|
||||
translation_key="beeping_appliance_detection",
|
||||
is_on_fn=lambda device, key: False,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="coughDetectionState",
|
||||
translation_key="cough_detection",
|
||||
is_on_fn=lambda device, key: False,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="dogBarkDetectionState",
|
||||
translation_key="dog_bark_detection",
|
||||
is_on_fn=lambda device, key: False,
|
||||
),
|
||||
AmazonBinarySensorEntityDescription(
|
||||
key="waterSoundsDetectionState",
|
||||
translation_key="water_sounds_detection",
|
||||
is_on_fn=lambda device, key: False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -104,8 +66,6 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
# Replace unique id for "detectionState" binary sensor
|
||||
await async_update_unique_id(
|
||||
hass,
|
||||
@@ -115,16 +75,6 @@ async def async_setup_entry(
|
||||
"detectionState",
|
||||
)
|
||||
|
||||
# Clean up deprecated sensors
|
||||
for sensor_desc in DEPRECATED_BINARY_SENSORS:
|
||||
for serial_num in coordinator.data:
|
||||
unique_id = f"{serial_num}-{sensor_desc.key}"
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
BINARY_SENSOR_DOMAIN, DOMAIN, unique_id
|
||||
):
|
||||
_LOGGER.debug("Removing deprecated entity %s", entity_id)
|
||||
entity_registry.async_remove(entity_id)
|
||||
|
||||
known_devices: set[str] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==6.2.9"]
|
||||
"requirements": ["aioamazondevices==6.2.7"]
|
||||
}
|
||||
|
@@ -32,9 +32,7 @@ class AmazonSensorEntityDescription(SensorEntityDescription):
|
||||
|
||||
native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None
|
||||
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
|
||||
device.online
|
||||
and (sensor := device.sensors.get(key)) is not None
|
||||
and sensor.error is False
|
||||
device.online and device.sensors[key].error is False
|
||||
)
|
||||
|
||||
|
||||
@@ -42,9 +40,9 @@ SENSORS: Final = (
|
||||
AmazonSensorEntityDescription(
|
||||
key="temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement_fn=lambda device, key: (
|
||||
native_unit_of_measurement_fn=lambda device, _key: (
|
||||
UnitOfTemperature.CELSIUS
|
||||
if key in device.sensors and device.sensors[key].scale == "CELSIUS"
|
||||
if device.sensors[_key].scale == "CELSIUS"
|
||||
else UnitOfTemperature.FAHRENHEIT
|
||||
),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
|
@@ -18,11 +18,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AmazonConfigEntry
|
||||
from .entity import AmazonEntity
|
||||
from .utils import (
|
||||
alexa_api_call,
|
||||
async_remove_dnd_from_virtual_group,
|
||||
async_update_unique_id,
|
||||
)
|
||||
from .utils import alexa_api_call, async_update_unique_id
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
@@ -33,9 +29,7 @@ class AmazonSwitchEntityDescription(SwitchEntityDescription):
|
||||
|
||||
is_on_fn: Callable[[AmazonDevice], bool]
|
||||
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
|
||||
device.online
|
||||
and (sensor := device.sensors.get(key)) is not None
|
||||
and sensor.error is False
|
||||
device.online and device.sensors[key].error is False
|
||||
)
|
||||
method: str
|
||||
|
||||
@@ -64,9 +58,6 @@ async def async_setup_entry(
|
||||
hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd"
|
||||
)
|
||||
|
||||
# Remove DND switch from virtual groups
|
||||
await async_remove_dnd_from_virtual_group(hass, coordinator)
|
||||
|
||||
known_devices: set[str] = set()
|
||||
|
||||
def _check_device() -> None:
|
||||
|
@@ -4,10 +4,8 @@ from collections.abc import Awaitable, Callable, Coroutine
|
||||
from functools import wraps
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
@@ -63,21 +61,3 @@ async def async_update_unique_id(
|
||||
|
||||
# Update the registry with the new unique_id
|
||||
entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id)
|
||||
|
||||
|
||||
async def async_remove_dnd_from_virtual_group(
|
||||
hass: HomeAssistant,
|
||||
coordinator: AmazonDevicesCoordinator,
|
||||
) -> None:
|
||||
"""Remove entity DND from virtual group."""
|
||||
entity_registry = er.async_get(hass)
|
||||
|
||||
for serial_num in coordinator.data:
|
||||
unique_id = f"{serial_num}-do_not_disturb"
|
||||
entity_id = entity_registry.async_get_entity_id(
|
||||
DOMAIN, SWITCH_DOMAIN, unique_id
|
||||
)
|
||||
is_group = coordinator.data[serial_num].device_family == SPEAKER_GROUP_FAMILY
|
||||
if entity_id and is_group:
|
||||
entity_registry.async_remove(entity_id)
|
||||
_LOGGER.debug("Removed DND switch from virtual group %s", entity_id)
|
||||
|
@@ -65,31 +65,6 @@ SENSOR_DESCRIPTIONS = [
|
||||
suggested_display_precision=2,
|
||||
translation_placeholders={"sensor_name": "BME280"},
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
key="BME680_humidity",
|
||||
translation_key="humidity",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
suggested_display_precision=2,
|
||||
translation_placeholders={"sensor_name": "BME680"},
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
key="BME680_pressure",
|
||||
translation_key="pressure",
|
||||
native_unit_of_measurement=UnitOfPressure.PA,
|
||||
suggested_unit_of_measurement=UnitOfPressure.MMHG,
|
||||
suggested_display_precision=0,
|
||||
translation_placeholders={"sensor_name": "BME680"},
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
key="BME680_temperature",
|
||||
translation_key="temperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
suggested_display_precision=2,
|
||||
translation_placeholders={"sensor_name": "BME680"},
|
||||
),
|
||||
AltruistSensorEntityDescription(
|
||||
device_class=SensorDeviceClass.PRESSURE,
|
||||
key="BMP_pressure",
|
||||
|
@@ -19,8 +19,9 @@ CONF_THINKING_BUDGET = "thinking_budget"
|
||||
RECOMMENDED_THINKING_BUDGET = 0
|
||||
MIN_THINKING_BUDGET = 1024
|
||||
|
||||
NON_THINKING_MODELS = [
|
||||
"claude-3-5", # Both sonnet and haiku
|
||||
"claude-3-opus",
|
||||
"claude-3-haiku",
|
||||
THINKING_MODELS = [
|
||||
"claude-3-7-sonnet",
|
||||
"claude-sonnet-4-0",
|
||||
"claude-opus-4-0",
|
||||
"claude-opus-4-1",
|
||||
]
|
||||
|
@@ -51,11 +51,11 @@ from .const import (
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
MIN_THINKING_BUDGET,
|
||||
NON_THINKING_MODELS,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_THINKING_BUDGET,
|
||||
THINKING_MODELS,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
@@ -364,7 +364,7 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
if tools:
|
||||
model_args["tools"] = tools
|
||||
if (
|
||||
not model.startswith(tuple(NON_THINKING_MODELS))
|
||||
model.startswith(tuple(THINKING_MODELS))
|
||||
and thinking_budget >= MIN_THINKING_BUDGET
|
||||
):
|
||||
model_args["thinking"] = ThinkingConfigEnabledParam(
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["anthropic==0.69.0"]
|
||||
"requirements": ["anthropic==0.62.0"]
|
||||
}
|
||||
|
@@ -19,8 +19,8 @@
|
||||
"bleak-retry-connector==4.4.3",
|
||||
"bluetooth-adapters==2.1.0",
|
||||
"bluetooth-auto-recovery==1.5.3",
|
||||
"bluetooth-data-tools==1.28.3",
|
||||
"dbus-fast==2.44.5",
|
||||
"habluetooth==5.7.0"
|
||||
"bluetooth-data-tools==1.28.2",
|
||||
"dbus-fast==2.44.3",
|
||||
"habluetooth==5.6.4"
|
||||
]
|
||||
}
|
||||
|
@@ -15,7 +15,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitVedoSystem
|
||||
from .utils import DeviceType, new_device_listener
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -30,19 +29,23 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
ComelitVedoBinarySensorEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data["alarm_zones"].values()
|
||||
if device in new_devices
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
known_devices: set[int] = set()
|
||||
|
||||
config_entry.async_on_unload(
|
||||
new_device_listener(coordinator, _add_new_entities, "alarm_zones")
|
||||
)
|
||||
def _check_device() -> None:
|
||||
current_devices = set(coordinator.data["alarm_zones"])
|
||||
new_devices = current_devices - known_devices
|
||||
if new_devices:
|
||||
known_devices.update(new_devices)
|
||||
async_add_entities(
|
||||
ComelitVedoBinarySensorEntity(
|
||||
coordinator, device, config_entry.entry_id
|
||||
)
|
||||
for device in coordinator.data["alarm_zones"].values()
|
||||
if device.index in new_devices
|
||||
)
|
||||
|
||||
_check_device()
|
||||
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
|
||||
|
||||
class ComelitVedoBinarySensorEntity(
|
||||
|
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from asyncio.exceptions import TimeoutError
|
||||
from collections.abc import Mapping
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
from aiocomelit import (
|
||||
@@ -28,20 +27,25 @@ from .utils import async_client_session
|
||||
DEFAULT_HOST = "192.168.1.252"
|
||||
DEFAULT_PIN = "111111"
|
||||
|
||||
|
||||
pin_regex = r"^[0-9]{4,10}$"
|
||||
|
||||
USER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=DEFAULT_HOST): cv.string,
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
|
||||
vol.Required(CONF_TYPE, default=BRIDGE): vol.In(DEVICE_TYPE_LIST),
|
||||
}
|
||||
)
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.string})
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema(
|
||||
{vol.Required(CONF_PIN): cv.matches_regex(pin_regex)}
|
||||
)
|
||||
STEP_RECONFIGURE = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.matches_regex(pin_regex),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -51,9 +55,6 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
||||
|
||||
api: ComelitCommonApi
|
||||
|
||||
if not re.fullmatch(r"[0-9]{4,10}", data[CONF_PIN]):
|
||||
raise InvalidPin
|
||||
|
||||
session = await async_client_session(hass)
|
||||
if data.get(CONF_TYPE, BRIDGE) == BRIDGE:
|
||||
api = ComeliteSerialBridgeApi(
|
||||
@@ -104,8 +105,6 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except InvalidPin:
|
||||
errors["base"] = "invalid_pin"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
@@ -147,8 +146,6 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except InvalidPin:
|
||||
errors["base"] = "invalid_pin"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
@@ -192,8 +189,6 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except InvalidPin:
|
||||
errors["base"] = "invalid_pin"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
@@ -215,7 +210,3 @@ class CannotConnect(HomeAssistantError):
|
||||
|
||||
class InvalidAuth(HomeAssistantError):
|
||||
"""Error to indicate there is invalid auth."""
|
||||
|
||||
|
||||
class InvalidPin(HomeAssistantError):
|
||||
"""Error to indicate an invalid pin."""
|
||||
|
@@ -161,7 +161,7 @@ class ComelitSerialBridge(
|
||||
entry: ComelitConfigEntry,
|
||||
host: str,
|
||||
port: int,
|
||||
pin: str,
|
||||
pin: int,
|
||||
session: ClientSession,
|
||||
) -> None:
|
||||
"""Initialize the scanner."""
|
||||
@@ -195,7 +195,7 @@ class ComelitVedoSystem(ComelitBaseCoordinator[AlarmDataObject]):
|
||||
entry: ComelitConfigEntry,
|
||||
host: str,
|
||||
port: int,
|
||||
pin: str,
|
||||
pin: int,
|
||||
session: ClientSession,
|
||||
) -> None:
|
||||
"""Initialize the scanner."""
|
||||
|
@@ -7,21 +7,14 @@ from typing import Any, cast
|
||||
from aiocomelit import ComelitSerialBridgeObject
|
||||
from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON
|
||||
|
||||
from homeassistant.components.cover import (
|
||||
STATE_CLOSED,
|
||||
STATE_CLOSING,
|
||||
STATE_OPEN,
|
||||
STATE_OPENING,
|
||||
CoverDeviceClass,
|
||||
CoverEntity,
|
||||
)
|
||||
from homeassistant.components.cover import CoverDeviceClass, CoverEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import DeviceType, bridge_api_call, new_device_listener
|
||||
from .utils import bridge_api_call
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -36,19 +29,21 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
ComelitCoverEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data[dev_type].values()
|
||||
if device in new_devices
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
known_devices: set[int] = set()
|
||||
|
||||
config_entry.async_on_unload(
|
||||
new_device_listener(coordinator, _add_new_entities, COVER)
|
||||
)
|
||||
def _check_device() -> None:
|
||||
current_devices = set(coordinator.data[COVER])
|
||||
new_devices = current_devices - known_devices
|
||||
if new_devices:
|
||||
known_devices.update(new_devices)
|
||||
async_add_entities(
|
||||
ComelitCoverEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data[COVER].values()
|
||||
if device.index in new_devices
|
||||
)
|
||||
|
||||
_check_device()
|
||||
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
|
||||
|
||||
class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
||||
@@ -67,6 +62,7 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
||||
super().__init__(coordinator, device, config_entry_entry_id)
|
||||
# Device doesn't provide a status so we assume UNKNOWN at first startup
|
||||
self._last_action: int | None = None
|
||||
self._last_state: str | None = None
|
||||
|
||||
def _current_action(self, action: str) -> bool:
|
||||
"""Return the current cover action."""
|
||||
@@ -102,6 +98,7 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
||||
@bridge_api_call
|
||||
async def _cover_set_state(self, action: int, state: int) -> None:
|
||||
"""Set desired cover state."""
|
||||
self._last_state = self.state
|
||||
await self.coordinator.api.set_device_status(COVER, self._device.index, action)
|
||||
self.coordinator.data[COVER][self._device.index].status = state
|
||||
self.async_write_ha_state()
|
||||
@@ -127,10 +124,5 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
||||
|
||||
await super().async_added_to_hass()
|
||||
|
||||
if (state := await self.async_get_last_state()) is not None:
|
||||
if state.state == STATE_CLOSED:
|
||||
self._last_action = STATE_COVER.index(STATE_CLOSING)
|
||||
if state.state == STATE_OPEN:
|
||||
self._last_action = STATE_COVER.index(STATE_OPENING)
|
||||
|
||||
self._attr_is_closed = state.state == STATE_CLOSED
|
||||
if last_state := await self.async_get_last_state():
|
||||
self._last_state = last_state.state
|
||||
|
@@ -12,7 +12,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import DeviceType, bridge_api_call, new_device_listener
|
||||
from .utils import bridge_api_call
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -27,19 +27,21 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
ComelitLightEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data[dev_type].values()
|
||||
if device in new_devices
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
known_devices: set[int] = set()
|
||||
|
||||
config_entry.async_on_unload(
|
||||
new_device_listener(coordinator, _add_new_entities, LIGHT)
|
||||
)
|
||||
def _check_device() -> None:
|
||||
current_devices = set(coordinator.data[LIGHT])
|
||||
new_devices = current_devices - known_devices
|
||||
if new_devices:
|
||||
known_devices.update(new_devices)
|
||||
async_add_entities(
|
||||
ComelitLightEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data[LIGHT].values()
|
||||
if device.index in new_devices
|
||||
)
|
||||
|
||||
_check_device()
|
||||
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
|
||||
|
||||
class ComelitLightEntity(ComelitBridgeBaseEntity, LightEntity):
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiocomelit==1.1.1"]
|
||||
"requirements": ["aiocomelit==0.12.3"]
|
||||
}
|
||||
|
@@ -20,7 +20,6 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge, ComelitVedoSystem
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import DeviceType, new_device_listener
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -66,22 +65,24 @@ async def async_setup_bridge_entry(
|
||||
|
||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
ComelitBridgeSensorEntity(
|
||||
coordinator, device, config_entry.entry_id, sensor_desc
|
||||
)
|
||||
for sensor_desc in SENSOR_BRIDGE_TYPES
|
||||
for device in coordinator.data[dev_type].values()
|
||||
if device in new_devices
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
known_devices: set[int] = set()
|
||||
|
||||
config_entry.async_on_unload(
|
||||
new_device_listener(coordinator, _add_new_entities, OTHER)
|
||||
)
|
||||
def _check_device() -> None:
|
||||
current_devices = set(coordinator.data[OTHER])
|
||||
new_devices = current_devices - known_devices
|
||||
if new_devices:
|
||||
known_devices.update(new_devices)
|
||||
async_add_entities(
|
||||
ComelitBridgeSensorEntity(
|
||||
coordinator, device, config_entry.entry_id, sensor_desc
|
||||
)
|
||||
for sensor_desc in SENSOR_BRIDGE_TYPES
|
||||
for device in coordinator.data[OTHER].values()
|
||||
if device.index in new_devices
|
||||
)
|
||||
|
||||
_check_device()
|
||||
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
|
||||
|
||||
async def async_setup_vedo_entry(
|
||||
@@ -93,22 +94,24 @@ async def async_setup_vedo_entry(
|
||||
|
||||
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
ComelitVedoSensorEntity(
|
||||
coordinator, device, config_entry.entry_id, sensor_desc
|
||||
)
|
||||
for sensor_desc in SENSOR_VEDO_TYPES
|
||||
for device in coordinator.data["alarm_zones"].values()
|
||||
if device in new_devices
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
known_devices: set[int] = set()
|
||||
|
||||
config_entry.async_on_unload(
|
||||
new_device_listener(coordinator, _add_new_entities, "alarm_zones")
|
||||
)
|
||||
def _check_device() -> None:
|
||||
current_devices = set(coordinator.data["alarm_zones"])
|
||||
new_devices = current_devices - known_devices
|
||||
if new_devices:
|
||||
known_devices.update(new_devices)
|
||||
async_add_entities(
|
||||
ComelitVedoSensorEntity(
|
||||
coordinator, device, config_entry.entry_id, sensor_desc
|
||||
)
|
||||
for sensor_desc in SENSOR_VEDO_TYPES
|
||||
for device in coordinator.data["alarm_zones"].values()
|
||||
if device.index in new_devices
|
||||
)
|
||||
|
||||
_check_device()
|
||||
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
|
||||
|
||||
class ComelitBridgeSensorEntity(ComelitBridgeBaseEntity, SensorEntity):
|
||||
|
@@ -43,13 +43,11 @@
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"invalid_pin": "The provided PIN is invalid. It must be a 4-10 digit number.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"invalid_pin": "[%key:component::comelit::config::abort::invalid_pin%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
}
|
||||
},
|
||||
|
@@ -13,7 +13,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import DeviceType, bridge_api_call, new_device_listener
|
||||
from .utils import bridge_api_call
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -28,20 +28,35 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
ComelitSwitchEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data[dev_type].values()
|
||||
if device in new_devices
|
||||
]
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
entities: list[ComelitSwitchEntity] = []
|
||||
entities.extend(
|
||||
ComelitSwitchEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data[IRRIGATION].values()
|
||||
)
|
||||
entities.extend(
|
||||
ComelitSwitchEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data[OTHER].values()
|
||||
)
|
||||
async_add_entities(entities)
|
||||
|
||||
for dev_type in (IRRIGATION, OTHER):
|
||||
config_entry.async_on_unload(
|
||||
new_device_listener(coordinator, _add_new_entities, dev_type)
|
||||
)
|
||||
known_devices: dict[str, set[int]] = {
|
||||
dev_type: set() for dev_type in (IRRIGATION, OTHER)
|
||||
}
|
||||
|
||||
def _check_device() -> None:
|
||||
for dev_type in (IRRIGATION, OTHER):
|
||||
current_devices = set(coordinator.data[dev_type])
|
||||
new_devices = current_devices - known_devices[dev_type]
|
||||
if new_devices:
|
||||
known_devices[dev_type].update(new_devices)
|
||||
async_add_entities(
|
||||
ComelitSwitchEntity(coordinator, device, config_entry.entry_id)
|
||||
for device in coordinator.data[dev_type].values()
|
||||
if device.index in new_devices
|
||||
)
|
||||
|
||||
_check_device()
|
||||
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||
|
||||
|
||||
class ComelitSwitchEntity(ComelitBridgeBaseEntity, SwitchEntity):
|
||||
|
@@ -4,11 +4,7 @@ from collections.abc import Awaitable, Callable, Coroutine
|
||||
from functools import wraps
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from aiocomelit.api import (
|
||||
ComelitSerialBridgeObject,
|
||||
ComelitVedoAreaObject,
|
||||
ComelitVedoZoneObject,
|
||||
)
|
||||
from aiocomelit import ComelitSerialBridgeObject
|
||||
from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData
|
||||
from aiohttp import ClientSession, CookieJar
|
||||
|
||||
@@ -23,11 +19,8 @@ from homeassistant.helpers import (
|
||||
)
|
||||
|
||||
from .const import _LOGGER, DOMAIN
|
||||
from .coordinator import ComelitBaseCoordinator
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
|
||||
DeviceType = ComelitSerialBridgeObject | ComelitVedoAreaObject | ComelitVedoZoneObject
|
||||
|
||||
|
||||
async def async_client_session(hass: HomeAssistant) -> ClientSession:
|
||||
"""Return a new aiohttp session."""
|
||||
@@ -120,41 +113,3 @@ def bridge_api_call[_T: ComelitBridgeBaseEntity, **_P](
|
||||
self.coordinator.config_entry.async_start_reauth(self.hass)
|
||||
|
||||
return cmd_wrapper
|
||||
|
||||
|
||||
def new_device_listener(
|
||||
coordinator: ComelitBaseCoordinator,
|
||||
new_devices_callback: Callable[
|
||||
[
|
||||
list[
|
||||
ComelitSerialBridgeObject
|
||||
| ComelitVedoAreaObject
|
||||
| ComelitVedoZoneObject
|
||||
],
|
||||
str,
|
||||
],
|
||||
None,
|
||||
],
|
||||
data_type: str,
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe to coordinator updates to check for new devices."""
|
||||
known_devices: set[int] = set()
|
||||
|
||||
def _check_devices() -> None:
|
||||
"""Check for new devices and call callback with any new monitors."""
|
||||
if not coordinator.data:
|
||||
return
|
||||
|
||||
new_devices: list[DeviceType] = []
|
||||
for _id in coordinator.data[data_type]:
|
||||
if _id not in known_devices:
|
||||
known_devices.add(_id)
|
||||
new_devices.append(coordinator.data[data_type][_id])
|
||||
|
||||
if new_devices:
|
||||
new_devices_callback(new_devices, data_type)
|
||||
|
||||
# Check for devices immediately
|
||||
_check_devices()
|
||||
|
||||
return coordinator.async_add_listener(_check_devices)
|
||||
|
@@ -45,18 +45,13 @@ from home_assistant_intents import (
|
||||
)
|
||||
import yaml
|
||||
|
||||
from homeassistant import core
|
||||
from homeassistant.components.homeassistant.exposed_entities import (
|
||||
async_listen_entity_updates,
|
||||
async_should_expose,
|
||||
)
|
||||
from homeassistant.const import EVENT_STATE_CHANGED, MATCH_ALL
|
||||
from homeassistant.core import (
|
||||
Event,
|
||||
EventStateChangedData,
|
||||
HomeAssistant,
|
||||
State,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.core import Event, callback
|
||||
from homeassistant.helpers import (
|
||||
area_registry as ar,
|
||||
device_registry as dr,
|
||||
@@ -197,7 +192,7 @@ class IntentCache:
|
||||
|
||||
|
||||
async def async_setup_default_agent(
|
||||
hass: HomeAssistant,
|
||||
hass: core.HomeAssistant,
|
||||
entity_component: EntityComponent[ConversationEntity],
|
||||
config_intents: dict[str, Any],
|
||||
) -> None:
|
||||
@@ -206,13 +201,15 @@ async def async_setup_default_agent(
|
||||
await entity_component.async_add_entities([agent])
|
||||
await get_agent_manager(hass).async_setup_default_agent(agent)
|
||||
|
||||
@callback
|
||||
def async_entity_state_listener(event: Event[EventStateChangedData]) -> None:
|
||||
@core.callback
|
||||
def async_entity_state_listener(
|
||||
event: core.Event[core.EventStateChangedData],
|
||||
) -> None:
|
||||
"""Set expose flag on new entities."""
|
||||
async_should_expose(hass, DOMAIN, event.data["entity_id"])
|
||||
|
||||
@callback
|
||||
def async_hass_started(hass: HomeAssistant) -> None:
|
||||
@core.callback
|
||||
def async_hass_started(hass: core.HomeAssistant) -> None:
|
||||
"""Set expose flag on all entities."""
|
||||
for state in hass.states.async_all():
|
||||
async_should_expose(hass, DOMAIN, state.entity_id)
|
||||
@@ -227,7 +224,9 @@ class DefaultAgent(ConversationEntity):
|
||||
_attr_name = "Home Assistant"
|
||||
_attr_supported_features = ConversationEntityFeature.CONTROL
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_intents: dict[str, Any]) -> None:
|
||||
def __init__(
|
||||
self, hass: core.HomeAssistant, config_intents: dict[str, Any]
|
||||
) -> None:
|
||||
"""Initialize the default agent."""
|
||||
self.hass = hass
|
||||
self._lang_intents: dict[str, LanguageIntents | object] = {}
|
||||
@@ -260,7 +259,7 @@ class DefaultAgent(ConversationEntity):
|
||||
"""Return a list of supported languages."""
|
||||
return get_languages()
|
||||
|
||||
@callback
|
||||
@core.callback
|
||||
def _filter_entity_registry_changes(
|
||||
self, event_data: er.EventEntityRegistryUpdatedData
|
||||
) -> bool:
|
||||
@@ -269,12 +268,12 @@ class DefaultAgent(ConversationEntity):
|
||||
field in event_data["changes"] for field in _ENTITY_REGISTRY_UPDATE_FIELDS
|
||||
)
|
||||
|
||||
@callback
|
||||
def _filter_state_changes(self, event_data: EventStateChangedData) -> bool:
|
||||
@core.callback
|
||||
def _filter_state_changes(self, event_data: core.EventStateChangedData) -> bool:
|
||||
"""Filter state changed events."""
|
||||
return not event_data["old_state"] or not event_data["new_state"]
|
||||
|
||||
@callback
|
||||
@core.callback
|
||||
def _listen_clear_slot_list(self) -> None:
|
||||
"""Listen for changes that can invalidate slot list."""
|
||||
assert self._unsub_clear_slot_list is None
|
||||
@@ -891,7 +890,7 @@ class DefaultAgent(ConversationEntity):
|
||||
) -> str:
|
||||
# Get first matched or unmatched state.
|
||||
# This is available in the response template as "state".
|
||||
state1: State | None = None
|
||||
state1: core.State | None = None
|
||||
if intent_response.matched_states:
|
||||
state1 = intent_response.matched_states[0]
|
||||
elif intent_response.unmatched_states:
|
||||
@@ -1590,7 +1589,7 @@ def _get_unmatched_response(result: RecognizeResult) -> tuple[ErrorKey, dict[str
|
||||
|
||||
|
||||
def _get_match_error_response(
|
||||
hass: HomeAssistant,
|
||||
hass: core.HomeAssistant,
|
||||
match_error: intent.MatchFailedError,
|
||||
) -> tuple[ErrorKey, dict[str, Any]]:
|
||||
"""Return key and template arguments for error when target matching fails."""
|
||||
|
@@ -23,7 +23,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
|
||||
from homeassistant.util.ssl import client_context_no_verify
|
||||
|
||||
from .const import KEY_MAC, TIMEOUT_SEC
|
||||
from .const import KEY_MAC, TIMEOUT
|
||||
from .coordinator import DaikinConfigEntry, DaikinCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -42,7 +42,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: DaikinConfigEntry) -> bo
|
||||
session = async_get_clientsession(hass)
|
||||
host = conf[CONF_HOST]
|
||||
try:
|
||||
async with asyncio.timeout(TIMEOUT_SEC):
|
||||
async with asyncio.timeout(TIMEOUT):
|
||||
device: Appliance = await DaikinFactory(
|
||||
host,
|
||||
session,
|
||||
@@ -53,7 +53,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: DaikinConfigEntry) -> bo
|
||||
)
|
||||
_LOGGER.debug("Connection to %s successful", host)
|
||||
except TimeoutError as err:
|
||||
_LOGGER.debug("Connection to %s timed out in %s seconds", host, TIMEOUT_SEC)
|
||||
_LOGGER.debug("Connection to %s timed out in 60 seconds", host)
|
||||
raise ConfigEntryNotReady from err
|
||||
except ClientConnectionError as err:
|
||||
_LOGGER.debug("ClientConnectionError to %s", host)
|
||||
|
@@ -20,7 +20,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
from homeassistant.util.ssl import client_context_no_verify
|
||||
|
||||
from .const import DOMAIN, KEY_MAC, TIMEOUT_SEC
|
||||
from .const import DOMAIN, KEY_MAC, TIMEOUT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -84,7 +84,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
password = None
|
||||
|
||||
try:
|
||||
async with asyncio.timeout(TIMEOUT_SEC):
|
||||
async with asyncio.timeout(TIMEOUT):
|
||||
device: Appliance = await DaikinFactory(
|
||||
host,
|
||||
async_get_clientsession(self.hass),
|
||||
|
@@ -24,4 +24,4 @@ ATTR_STATE_OFF = "off"
|
||||
KEY_MAC = "mac"
|
||||
KEY_IP = "ip"
|
||||
|
||||
TIMEOUT_SEC = 120
|
||||
TIMEOUT = 60
|
||||
|
@@ -9,7 +9,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN, TIMEOUT_SEC
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -28,7 +28,7 @@ class DaikinCoordinator(DataUpdateCoordinator[None]):
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=device.values.get("name", DOMAIN),
|
||||
update_interval=timedelta(seconds=TIMEOUT_SEC),
|
||||
update_interval=timedelta(seconds=60),
|
||||
)
|
||||
self.device = device
|
||||
|
||||
|
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/daikin",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pydaikin"],
|
||||
"requirements": ["pydaikin==2.17.1"],
|
||||
"requirements": ["pydaikin==2.16.0"],
|
||||
"zeroconf": ["_dkapi._tcp.local."]
|
||||
}
|
||||
|
@@ -17,6 +17,6 @@
|
||||
"requirements": [
|
||||
"aiodhcpwatcher==1.2.1",
|
||||
"aiodiscover==2.7.1",
|
||||
"cached-ipaddress==1.0.1"
|
||||
"cached-ipaddress==0.10.0"
|
||||
]
|
||||
}
|
||||
|
@@ -116,9 +116,6 @@
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"active_map": {
|
||||
"default": "mdi:floor-plan"
|
||||
},
|
||||
"water_amount": {
|
||||
"default": "mdi:water"
|
||||
},
|
||||
|
@@ -2,13 +2,12 @@
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from typing import Any
|
||||
|
||||
from deebot_client.capabilities import CapabilityMap, CapabilitySet, CapabilitySetTypes
|
||||
from deebot_client.capabilities import CapabilitySetTypes
|
||||
from deebot_client.device import Device
|
||||
from deebot_client.events import WorkModeEvent
|
||||
from deebot_client.events.base import Event
|
||||
from deebot_client.events.map import CachedMapInfoEvent, MajorMapEvent
|
||||
from deebot_client.events.water_info import WaterAmountEvent
|
||||
|
||||
from homeassistant.components.select import SelectEntity, SelectEntityDescription
|
||||
@@ -17,11 +16,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import EcovacsConfigEntry
|
||||
from .entity import (
|
||||
EcovacsCapabilityEntityDescription,
|
||||
EcovacsDescriptionEntity,
|
||||
EcovacsEntity,
|
||||
)
|
||||
from .entity import EcovacsCapabilityEntityDescription, EcovacsDescriptionEntity
|
||||
from .util import get_name_key, get_supported_entities
|
||||
|
||||
|
||||
@@ -71,12 +66,6 @@ async def async_setup_entry(
|
||||
entities = get_supported_entities(
|
||||
controller, EcovacsSelectEntity, ENTITY_DESCRIPTIONS
|
||||
)
|
||||
entities.extend(
|
||||
EcovacsActiveMapSelectEntity(device, device.capabilities.map)
|
||||
for device in controller.devices
|
||||
if (map_cap := device.capabilities.map)
|
||||
and isinstance(map_cap.major, CapabilitySet)
|
||||
)
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
@@ -114,76 +103,3 @@ class EcovacsSelectEntity[EventT: Event](
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
await self._device.execute_command(self._capability.set(option))
|
||||
|
||||
|
||||
class EcovacsActiveMapSelectEntity(
|
||||
EcovacsEntity[CapabilityMap],
|
||||
SelectEntity,
|
||||
):
|
||||
"""Ecovacs active map select entity."""
|
||||
|
||||
entity_description = SelectEntityDescription(
|
||||
key="active_map",
|
||||
translation_key="active_map",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
device: Device,
|
||||
capability: CapabilityMap,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Initialize entity."""
|
||||
super().__init__(device, capability, **kwargs)
|
||||
self._option_to_id: dict[str, str] = {}
|
||||
self._id_to_option: dict[str, str] = {}
|
||||
|
||||
self._handle_on_cached_map(
|
||||
device.events.get_last_event(CachedMapInfoEvent)
|
||||
or CachedMapInfoEvent(set())
|
||||
)
|
||||
|
||||
def _handle_on_cached_map(self, event: CachedMapInfoEvent) -> None:
|
||||
self._id_to_option.clear()
|
||||
self._option_to_id.clear()
|
||||
|
||||
for map_info in event.maps:
|
||||
name = map_info.name if map_info.name else map_info.id
|
||||
self._id_to_option[map_info.id] = name
|
||||
self._option_to_id[name] = map_info.id
|
||||
|
||||
if map_info.using:
|
||||
self._attr_current_option = name
|
||||
|
||||
if self._attr_current_option not in self._option_to_id:
|
||||
self._attr_current_option = None
|
||||
|
||||
# Sort named maps first, then numeric IDs (unnamed maps during building) in ascending order.
|
||||
self._attr_options = sorted(
|
||||
self._option_to_id.keys(), key=lambda x: (x.isdigit(), x.lower())
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Set up the event listeners now that hass is ready."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
async def on_cached_map(event: CachedMapInfoEvent) -> None:
|
||||
self._handle_on_cached_map(event)
|
||||
self.async_write_ha_state()
|
||||
|
||||
self._subscribe(self._capability.cached_info.event, on_cached_map)
|
||||
|
||||
async def on_major_map(event: MajorMapEvent) -> None:
|
||||
self._attr_current_option = self._id_to_option.get(event.map_id)
|
||||
self.async_write_ha_state()
|
||||
|
||||
self._subscribe(self._capability.major.event, on_major_map)
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(self._capability.major, CapabilitySet)
|
||||
await self._device.execute_command(
|
||||
self._capability.major.set(self._option_to_id[option])
|
||||
)
|
||||
|
@@ -178,9 +178,6 @@
|
||||
}
|
||||
},
|
||||
"select": {
|
||||
"active_map": {
|
||||
"name": "Active map"
|
||||
},
|
||||
"water_amount": {
|
||||
"name": "[%key:component::ecovacs::entity::number::water_amount::name%]",
|
||||
"state": {
|
||||
|
@@ -116,10 +116,6 @@ class WaterSourceType(TypedDict):
|
||||
# an EnergyCostSensor will be automatically created
|
||||
stat_cost: str | None
|
||||
|
||||
# An optional statistic_id identifying a device
|
||||
# that includes this device's consumption in its total
|
||||
included_in_stat: str | None
|
||||
|
||||
# Used to generate costs if stat_cost is set to None
|
||||
entity_energy_price: str | None # entity_id of an entity providing price ($/m³)
|
||||
number_energy_price: float | None # Price for energy ($/m³)
|
||||
|
@@ -7,7 +7,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyenphase"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyenphase==2.4.0"],
|
||||
"requirements": ["pyenphase==2.3.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_enphase-envoy._tcp.local."
|
||||
|
@@ -396,7 +396,6 @@ class EnvoyCTSensorEntityDescription(SensorEntityDescription):
|
||||
int | float | str | CtType | CtMeterStatus | CtStatusFlags | CtState | None,
|
||||
]
|
||||
on_phase: str | None
|
||||
cttype: str | None = None
|
||||
|
||||
|
||||
CT_NET_CONSUMPTION_SENSORS = (
|
||||
@@ -410,7 +409,6 @@ CT_NET_CONSUMPTION_SENSORS = (
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("energy_delivered"),
|
||||
on_phase=None,
|
||||
cttype=CtType.NET_CONSUMPTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="lifetime_net_production",
|
||||
@@ -422,7 +420,6 @@ CT_NET_CONSUMPTION_SENSORS = (
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("energy_received"),
|
||||
on_phase=None,
|
||||
cttype=CtType.NET_CONSUMPTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="net_consumption",
|
||||
@@ -434,7 +431,6 @@ CT_NET_CONSUMPTION_SENSORS = (
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("active_power"),
|
||||
on_phase=None,
|
||||
cttype=CtType.NET_CONSUMPTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="frequency",
|
||||
@@ -446,7 +442,6 @@ CT_NET_CONSUMPTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("frequency"),
|
||||
on_phase=None,
|
||||
cttype=CtType.NET_CONSUMPTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="voltage",
|
||||
@@ -459,7 +454,6 @@ CT_NET_CONSUMPTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("voltage"),
|
||||
on_phase=None,
|
||||
cttype=CtType.NET_CONSUMPTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="net_ct_current",
|
||||
@@ -472,7 +466,6 @@ CT_NET_CONSUMPTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("current"),
|
||||
on_phase=None,
|
||||
cttype=CtType.NET_CONSUMPTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="net_ct_powerfactor",
|
||||
@@ -483,7 +476,6 @@ CT_NET_CONSUMPTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("power_factor"),
|
||||
on_phase=None,
|
||||
cttype=CtType.NET_CONSUMPTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="net_consumption_ct_metering_status",
|
||||
@@ -494,7 +486,6 @@ CT_NET_CONSUMPTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("metering_status"),
|
||||
on_phase=None,
|
||||
cttype=CtType.NET_CONSUMPTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="net_consumption_ct_status_flags",
|
||||
@@ -504,7 +495,6 @@ CT_NET_CONSUMPTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda ct: 0 if ct.status_flags is None else len(ct.status_flags),
|
||||
on_phase=None,
|
||||
cttype=CtType.NET_CONSUMPTION,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -535,7 +525,6 @@ CT_PRODUCTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("frequency"),
|
||||
on_phase=None,
|
||||
cttype=CtType.PRODUCTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="production_ct_voltage",
|
||||
@@ -548,7 +537,6 @@ CT_PRODUCTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("voltage"),
|
||||
on_phase=None,
|
||||
cttype=CtType.PRODUCTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="production_ct_current",
|
||||
@@ -561,7 +549,6 @@ CT_PRODUCTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("current"),
|
||||
on_phase=None,
|
||||
cttype=CtType.PRODUCTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="production_ct_powerfactor",
|
||||
@@ -572,7 +559,6 @@ CT_PRODUCTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("power_factor"),
|
||||
on_phase=None,
|
||||
cttype=CtType.PRODUCTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="production_ct_metering_status",
|
||||
@@ -583,7 +569,6 @@ CT_PRODUCTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("metering_status"),
|
||||
on_phase=None,
|
||||
cttype=CtType.PRODUCTION,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="production_ct_status_flags",
|
||||
@@ -593,7 +578,6 @@ CT_PRODUCTION_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda ct: 0 if ct.status_flags is None else len(ct.status_flags),
|
||||
on_phase=None,
|
||||
cttype=CtType.PRODUCTION,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -623,7 +607,6 @@ CT_STORAGE_SENSORS = (
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("energy_delivered"),
|
||||
on_phase=None,
|
||||
cttype=CtType.STORAGE,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="lifetime_battery_charged",
|
||||
@@ -635,7 +618,6 @@ CT_STORAGE_SENSORS = (
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("energy_received"),
|
||||
on_phase=None,
|
||||
cttype=CtType.STORAGE,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="battery_discharge",
|
||||
@@ -647,7 +629,6 @@ CT_STORAGE_SENSORS = (
|
||||
suggested_display_precision=3,
|
||||
value_fn=attrgetter("active_power"),
|
||||
on_phase=None,
|
||||
cttype=CtType.STORAGE,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="storage_ct_frequency",
|
||||
@@ -659,7 +640,6 @@ CT_STORAGE_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("frequency"),
|
||||
on_phase=None,
|
||||
cttype=CtType.STORAGE,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="storage_voltage",
|
||||
@@ -672,7 +652,6 @@ CT_STORAGE_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("voltage"),
|
||||
on_phase=None,
|
||||
cttype=CtType.STORAGE,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="storage_ct_current",
|
||||
@@ -685,7 +664,6 @@ CT_STORAGE_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("current"),
|
||||
on_phase=None,
|
||||
cttype=CtType.STORAGE,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="storage_ct_powerfactor",
|
||||
@@ -696,7 +674,6 @@ CT_STORAGE_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("power_factor"),
|
||||
on_phase=None,
|
||||
cttype=CtType.STORAGE,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="storage_ct_metering_status",
|
||||
@@ -707,7 +684,6 @@ CT_STORAGE_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=attrgetter("metering_status"),
|
||||
on_phase=None,
|
||||
cttype=CtType.STORAGE,
|
||||
),
|
||||
EnvoyCTSensorEntityDescription(
|
||||
key="storage_ct_status_flags",
|
||||
@@ -717,7 +693,6 @@ CT_STORAGE_SENSORS = (
|
||||
entity_registry_enabled_default=False,
|
||||
value_fn=lambda ct: 0 if ct.status_flags is None else len(ct.status_flags),
|
||||
on_phase=None,
|
||||
cttype=CtType.STORAGE,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -1040,31 +1015,50 @@ async def async_setup_entry(
|
||||
for description in NET_CONSUMPTION_PHASE_SENSORS[use_phase]
|
||||
if phase is not None
|
||||
)
|
||||
# Add Current Transformer entities
|
||||
if envoy_data.ctmeters:
|
||||
# Add net consumption CT entities
|
||||
if ctmeter := envoy_data.ctmeter_consumption:
|
||||
entities.extend(
|
||||
EnvoyCTEntity(coordinator, description)
|
||||
for sensors in (
|
||||
CT_NET_CONSUMPTION_SENSORS,
|
||||
CT_PRODUCTION_SENSORS,
|
||||
CT_STORAGE_SENSORS,
|
||||
)
|
||||
for description in sensors
|
||||
if description.cttype in envoy_data.ctmeters
|
||||
EnvoyConsumptionCTEntity(coordinator, description)
|
||||
for description in CT_NET_CONSUMPTION_SENSORS
|
||||
if ctmeter.measurement_type == CtType.NET_CONSUMPTION
|
||||
)
|
||||
# Add Current Transformer phase entities
|
||||
if ctmeters_phases := envoy_data.ctmeters_phases:
|
||||
# For each net consumption ct phase reported add net consumption entities
|
||||
if phase_data := envoy_data.ctmeter_consumption_phases:
|
||||
entities.extend(
|
||||
EnvoyCTPhaseEntity(coordinator, description)
|
||||
for sensors in (
|
||||
CT_NET_CONSUMPTION_PHASE_SENSORS,
|
||||
CT_PRODUCTION_PHASE_SENSORS,
|
||||
CT_STORAGE_PHASE_SENSORS,
|
||||
)
|
||||
for phase, descriptions in sensors.items()
|
||||
for description in descriptions
|
||||
if (cttype := description.cttype) in ctmeters_phases
|
||||
and phase in ctmeters_phases[cttype]
|
||||
EnvoyConsumptionCTPhaseEntity(coordinator, description)
|
||||
for use_phase, phase in phase_data.items()
|
||||
for description in CT_NET_CONSUMPTION_PHASE_SENSORS[use_phase]
|
||||
if phase.measurement_type == CtType.NET_CONSUMPTION
|
||||
)
|
||||
# Add production CT entities
|
||||
if ctmeter := envoy_data.ctmeter_production:
|
||||
entities.extend(
|
||||
EnvoyProductionCTEntity(coordinator, description)
|
||||
for description in CT_PRODUCTION_SENSORS
|
||||
if ctmeter.measurement_type == CtType.PRODUCTION
|
||||
)
|
||||
# For each production ct phase reported add production ct entities
|
||||
if phase_data := envoy_data.ctmeter_production_phases:
|
||||
entities.extend(
|
||||
EnvoyProductionCTPhaseEntity(coordinator, description)
|
||||
for use_phase, phase in phase_data.items()
|
||||
for description in CT_PRODUCTION_PHASE_SENSORS[use_phase]
|
||||
if phase.measurement_type == CtType.PRODUCTION
|
||||
)
|
||||
# Add storage CT entities
|
||||
if ctmeter := envoy_data.ctmeter_storage:
|
||||
entities.extend(
|
||||
EnvoyStorageCTEntity(coordinator, description)
|
||||
for description in CT_STORAGE_SENSORS
|
||||
if ctmeter.measurement_type == CtType.STORAGE
|
||||
)
|
||||
# For each storage ct phase reported add storage ct entities
|
||||
if phase_data := envoy_data.ctmeter_storage_phases:
|
||||
entities.extend(
|
||||
EnvoyStorageCTPhaseEntity(coordinator, description)
|
||||
for use_phase, phase in phase_data.items()
|
||||
for description in CT_STORAGE_PHASE_SENSORS[use_phase]
|
||||
if phase.measurement_type == CtType.STORAGE
|
||||
)
|
||||
|
||||
if envoy_data.inverters:
|
||||
@@ -1251,8 +1245,8 @@ class EnvoyNetConsumptionPhaseEntity(EnvoySystemSensorEntity):
|
||||
return self.entity_description.value_fn(system_net_consumption)
|
||||
|
||||
|
||||
class EnvoyCTEntity(EnvoySystemSensorEntity):
|
||||
"""Envoy CT entity."""
|
||||
class EnvoyConsumptionCTEntity(EnvoySystemSensorEntity):
|
||||
"""Envoy net consumption CT entity."""
|
||||
|
||||
entity_description: EnvoyCTSensorEntityDescription
|
||||
|
||||
@@ -1261,13 +1255,13 @@ class EnvoyCTEntity(EnvoySystemSensorEntity):
|
||||
self,
|
||||
) -> int | float | str | CtType | CtMeterStatus | CtStatusFlags | None:
|
||||
"""Return the state of the CT sensor."""
|
||||
if (cttype := self.entity_description.cttype) not in self.data.ctmeters:
|
||||
if (ctmeter := self.data.ctmeter_consumption) is None:
|
||||
return None
|
||||
return self.entity_description.value_fn(self.data.ctmeters[cttype])
|
||||
return self.entity_description.value_fn(ctmeter)
|
||||
|
||||
|
||||
class EnvoyCTPhaseEntity(EnvoySystemSensorEntity):
|
||||
"""Envoy CT phase entity."""
|
||||
class EnvoyConsumptionCTPhaseEntity(EnvoySystemSensorEntity):
|
||||
"""Envoy net consumption CT phase entity."""
|
||||
|
||||
entity_description: EnvoyCTSensorEntityDescription
|
||||
|
||||
@@ -1278,14 +1272,78 @@ class EnvoyCTPhaseEntity(EnvoySystemSensorEntity):
|
||||
"""Return the state of the CT phase sensor."""
|
||||
if TYPE_CHECKING:
|
||||
assert self.entity_description.on_phase
|
||||
if (cttype := self.entity_description.cttype) not in self.data.ctmeters_phases:
|
||||
return None
|
||||
if (phase := self.entity_description.on_phase) not in self.data.ctmeters_phases[
|
||||
cttype
|
||||
]:
|
||||
if (ctmeter := self.data.ctmeter_consumption_phases) is None:
|
||||
return None
|
||||
return self.entity_description.value_fn(
|
||||
self.data.ctmeters_phases[cttype][phase]
|
||||
ctmeter[self.entity_description.on_phase]
|
||||
)
|
||||
|
||||
|
||||
class EnvoyProductionCTEntity(EnvoySystemSensorEntity):
|
||||
"""Envoy net consumption CT entity."""
|
||||
|
||||
entity_description: EnvoyCTSensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(
|
||||
self,
|
||||
) -> int | float | str | CtType | CtMeterStatus | CtStatusFlags | None:
|
||||
"""Return the state of the CT sensor."""
|
||||
if (ctmeter := self.data.ctmeter_production) is None:
|
||||
return None
|
||||
return self.entity_description.value_fn(ctmeter)
|
||||
|
||||
|
||||
class EnvoyProductionCTPhaseEntity(EnvoySystemSensorEntity):
|
||||
"""Envoy net consumption CT phase entity."""
|
||||
|
||||
entity_description: EnvoyCTSensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(
|
||||
self,
|
||||
) -> int | float | str | CtType | CtMeterStatus | CtStatusFlags | None:
|
||||
"""Return the state of the CT phase sensor."""
|
||||
if TYPE_CHECKING:
|
||||
assert self.entity_description.on_phase
|
||||
if (ctmeter := self.data.ctmeter_production_phases) is None:
|
||||
return None
|
||||
return self.entity_description.value_fn(
|
||||
ctmeter[self.entity_description.on_phase]
|
||||
)
|
||||
|
||||
|
||||
class EnvoyStorageCTEntity(EnvoySystemSensorEntity):
|
||||
"""Envoy net storage CT entity."""
|
||||
|
||||
entity_description: EnvoyCTSensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(
|
||||
self,
|
||||
) -> int | float | str | CtType | CtMeterStatus | CtStatusFlags | None:
|
||||
"""Return the state of the CT sensor."""
|
||||
if (ctmeter := self.data.ctmeter_storage) is None:
|
||||
return None
|
||||
return self.entity_description.value_fn(ctmeter)
|
||||
|
||||
|
||||
class EnvoyStorageCTPhaseEntity(EnvoySystemSensorEntity):
|
||||
"""Envoy net storage CT phase entity."""
|
||||
|
||||
entity_description: EnvoyCTSensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(
|
||||
self,
|
||||
) -> int | float | str | CtType | CtMeterStatus | CtStatusFlags | None:
|
||||
"""Return the state of the CT phase sensor."""
|
||||
if TYPE_CHECKING:
|
||||
assert self.entity_description.on_phase
|
||||
if (ctmeter := self.data.ctmeter_storage_phases) is None:
|
||||
return None
|
||||
return self.entity_description.value_fn(
|
||||
ctmeter[self.entity_description.on_phase]
|
||||
)
|
||||
|
||||
|
||||
|
@@ -22,23 +22,19 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import zeroconf
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_ESPHOME,
|
||||
SOURCE_IGNORE,
|
||||
SOURCE_REAUTH,
|
||||
SOURCE_RECONFIGURE,
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
FlowType,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.data_entry_flow import AbortFlow, FlowResultType
|
||||
from homeassistant.helpers import discovery_flow
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.service_info.dhcp import DhcpServiceInfo
|
||||
from homeassistant.helpers.service_info.esphome import ESPHomeServiceInfo
|
||||
from homeassistant.helpers.service_info.hassio import HassioServiceInfo
|
||||
from homeassistant.helpers.service_info.mqtt import MqttServiceInfo
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
@@ -79,7 +75,6 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
def __init__(self) -> None:
|
||||
"""Initialize flow."""
|
||||
self._host: str | None = None
|
||||
self._connected_address: str | None = None
|
||||
self.__name: str | None = None
|
||||
self._port: int | None = None
|
||||
self._password: str | None = None
|
||||
@@ -503,55 +498,18 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
await self.hass.config_entries.async_remove(
|
||||
self._entry_with_name_conflict.entry_id
|
||||
)
|
||||
return await self._async_create_entry()
|
||||
return self._async_create_entry()
|
||||
|
||||
async def _async_create_entry(self) -> ConfigFlowResult:
|
||||
@callback
|
||||
def _async_create_entry(self) -> ConfigFlowResult:
|
||||
"""Create the config entry."""
|
||||
assert self._name is not None
|
||||
assert self._device_info is not None
|
||||
|
||||
# Check if Z-Wave capabilities are present and start discovery flow
|
||||
next_flow_id: str | None = None
|
||||
if self._device_info.zwave_proxy_feature_flags:
|
||||
assert self._connected_address is not None
|
||||
assert self._port is not None
|
||||
|
||||
# Start Z-Wave discovery flow and get the flow ID
|
||||
zwave_result = await self.hass.config_entries.flow.async_init(
|
||||
"zwave_js",
|
||||
context={
|
||||
"source": SOURCE_ESPHOME,
|
||||
"discovery_key": discovery_flow.DiscoveryKey(
|
||||
domain=DOMAIN,
|
||||
key=self._device_info.mac_address,
|
||||
version=1,
|
||||
),
|
||||
},
|
||||
data=ESPHomeServiceInfo(
|
||||
name=self._device_info.name,
|
||||
zwave_home_id=self._device_info.zwave_home_id or None,
|
||||
ip_address=self._connected_address,
|
||||
port=self._port,
|
||||
noise_psk=self._noise_psk,
|
||||
),
|
||||
)
|
||||
if zwave_result["type"] in (
|
||||
FlowResultType.ABORT,
|
||||
FlowResultType.CREATE_ENTRY,
|
||||
):
|
||||
_LOGGER.debug(
|
||||
"Unable to continue created Z-Wave JS config flow: %s", zwave_result
|
||||
)
|
||||
else:
|
||||
next_flow_id = zwave_result["flow_id"]
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self._name,
|
||||
data=self._async_make_config_data(),
|
||||
options={
|
||||
CONF_ALLOW_SERVICE_CALLS: DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS,
|
||||
},
|
||||
next_flow=(FlowType.CONFIG_FLOW, next_flow_id) if next_flow_id else None,
|
||||
)
|
||||
|
||||
@callback
|
||||
@@ -598,7 +556,7 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
if entry.data.get(CONF_DEVICE_NAME) == self._device_name:
|
||||
self._entry_with_name_conflict = entry
|
||||
return await self.async_step_name_conflict()
|
||||
return await self._async_create_entry()
|
||||
return self._async_create_entry()
|
||||
|
||||
async def _async_reauth_validated_connection(self) -> ConfigFlowResult:
|
||||
"""Handle reauth validated connection."""
|
||||
@@ -745,7 +703,6 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
try:
|
||||
await cli.connect()
|
||||
self._device_info = await cli.device_info()
|
||||
self._connected_address = cli.connected_address
|
||||
except InvalidAuthAPIError:
|
||||
return ERROR_INVALID_PASSWORD_AUTH
|
||||
except RequiresEncryptionAPIError:
|
||||
|
@@ -17,9 +17,9 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==41.12.0",
|
||||
"aioesphomeapi==41.11.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
"bleak-esphome==3.3.0"
|
||||
],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
@@ -67,7 +67,7 @@ def suitable_nextchange_time(device: FritzhomeDevice) -> bool:
|
||||
|
||||
def suitable_temperature(device: FritzhomeDevice) -> bool:
|
||||
"""Check suitablity for temperature sensor."""
|
||||
return bool(device.has_temperature_sensor)
|
||||
return device.has_temperature_sensor and not device.has_thermostat
|
||||
|
||||
|
||||
def entity_category_temperature(device: FritzhomeDevice) -> EntityCategory | None:
|
||||
|
@@ -54,7 +54,7 @@ async def async_setup_entry(
|
||||
except aiohttp.ClientResponseError as err:
|
||||
if 400 <= err.status < 500:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN, translation_key="reauth_required"
|
||||
"OAuth session is not valid, reauth required"
|
||||
) from err
|
||||
raise ConfigEntryNotReady from err
|
||||
except aiohttp.ClientError as err:
|
||||
@@ -76,6 +76,10 @@ async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: GoogleAssistantSDKConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if not hass.config_entries.async_loaded_entries(DOMAIN):
|
||||
for service_name in hass.services.async_services_for_domain(DOMAIN):
|
||||
hass.services.async_remove(DOMAIN, service_name)
|
||||
|
||||
conversation.async_unset_agent(hass, entry)
|
||||
|
||||
return True
|
||||
|
@@ -26,7 +26,7 @@ from homeassistant.components.media_player import (
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_ENTITY_ID, CONF_ACCESS_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import OAuth2Session
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
|
||||
@@ -68,13 +68,7 @@ async def async_send_text_commands(
|
||||
) -> list[CommandResponse]:
|
||||
"""Send text commands to Google Assistant Service."""
|
||||
# There can only be 1 entry (config_flow has single_instance_allowed)
|
||||
entries = hass.config_entries.async_loaded_entries(DOMAIN)
|
||||
if not entries:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_loaded",
|
||||
)
|
||||
entry: GoogleAssistantSDKConfigEntry = entries[0]
|
||||
entry: GoogleAssistantSDKConfigEntry = hass.config_entries.async_entries(DOMAIN)[0]
|
||||
|
||||
session = entry.runtime_data.session
|
||||
try:
|
||||
|
@@ -1,4 +1,4 @@
|
||||
"""Services for the Google Assistant SDK integration."""
|
||||
"""Support for Google Assistant SDK."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
@@ -59,20 +59,14 @@
|
||||
},
|
||||
"media_player": {
|
||||
"name": "Media player entity",
|
||||
"description": "Name(s) of media player entities to play the Google Assistant's audio response on. This does not target the device for the command itself."
|
||||
"description": "Name(s) of media player entities to play response on."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"entry_not_loaded": {
|
||||
"message": "Entry not loaded"
|
||||
},
|
||||
"grpc_error": {
|
||||
"message": "Failed to communicate with Google Assistant"
|
||||
},
|
||||
"reauth_required": {
|
||||
"message": "Credentials are invalid, re-authentication required"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -22,7 +22,6 @@ from homeassistant.exceptions import (
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
|
||||
_UPLOAD_AND_DOWNLOAD_TIMEOUT = 12 * 3600
|
||||
_UPLOAD_MAX_RETRIES = 20
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -151,7 +150,6 @@ class DriveClient:
|
||||
backup_metadata,
|
||||
open_stream,
|
||||
backup.size,
|
||||
max_retries=_UPLOAD_MAX_RETRIES,
|
||||
timeout=ClientTimeout(total=_UPLOAD_AND_DOWNLOAD_TIMEOUT),
|
||||
)
|
||||
_LOGGER.debug(
|
||||
|
@@ -456,7 +456,6 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
"""Initialize the agent."""
|
||||
self.entry = entry
|
||||
self.subentry = subentry
|
||||
self.default_model = default_model
|
||||
self._attr_name = subentry.title
|
||||
self._genai_client = entry.runtime_data
|
||||
self._attr_unique_id = subentry.subentry_id
|
||||
@@ -490,7 +489,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
tools = tools or []
|
||||
tools.append(Tool(google_search=GoogleSearch()))
|
||||
|
||||
model_name = options.get(CONF_CHAT_MODEL, self.default_model)
|
||||
model_name = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
# Avoid INVALID_ARGUMENT Developer instruction is not enabled for <model>
|
||||
supports_system_instruction = (
|
||||
"gemma" not in model_name
|
||||
@@ -621,7 +620,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
def create_generate_content_config(self) -> GenerateContentConfig:
|
||||
"""Create the GenerateContentConfig for the LLM."""
|
||||
options = self.subentry.data
|
||||
model = options.get(CONF_CHAT_MODEL, self.default_model)
|
||||
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
thinking_config: ThinkingConfig | None = None
|
||||
if model.startswith("models/gemini-2.5") and not model.endswith(
|
||||
("tts", "image", "image-preview")
|
||||
|
@@ -22,7 +22,6 @@ from google.protobuf import timestamp_pb2
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -92,16 +91,6 @@ def convert_time(time_str: str) -> timestamp_pb2.Timestamp | None:
|
||||
return timestamp
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS = [
|
||||
SensorEntityDescription(
|
||||
key="duration",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
@@ -116,20 +105,20 @@ async def async_setup_entry(
|
||||
client_options = ClientOptions(api_key=api_key)
|
||||
client = RoutesAsyncClient(client_options=client_options)
|
||||
|
||||
sensors = [
|
||||
GoogleTravelTimeSensor(
|
||||
config_entry, name, api_key, origin, destination, client, sensor_description
|
||||
)
|
||||
for sensor_description in SENSOR_DESCRIPTIONS
|
||||
]
|
||||
sensor = GoogleTravelTimeSensor(
|
||||
config_entry, name, api_key, origin, destination, client
|
||||
)
|
||||
|
||||
async_add_entities(sensors, False)
|
||||
async_add_entities([sensor], False)
|
||||
|
||||
|
||||
class GoogleTravelTimeSensor(SensorEntity):
|
||||
"""Representation of a Google travel time sensor."""
|
||||
|
||||
_attr_attribution = ATTRIBUTION
|
||||
_attr_native_unit_of_measurement = UnitOfTime.MINUTES
|
||||
_attr_device_class = SensorDeviceClass.DURATION
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -139,10 +128,8 @@ class GoogleTravelTimeSensor(SensorEntity):
|
||||
origin: str,
|
||||
destination: str,
|
||||
client: RoutesAsyncClient,
|
||||
sensor_description: SensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self.entity_description = sensor_description
|
||||
self._attr_name = name
|
||||
self._attr_unique_id = config_entry.entry_id
|
||||
self._attr_device_info = DeviceInfo(
|
||||
|
@@ -73,6 +73,7 @@ class HassioAddonSwitch(HassioAddonEntity, SwitchEntity):
|
||||
try:
|
||||
await supervisor_client.addons.start_addon(self._addon_slug)
|
||||
except SupervisorError as err:
|
||||
_LOGGER.error("Failed to start addon %s: %s", self._addon_slug, err)
|
||||
raise HomeAssistantError(err) from err
|
||||
|
||||
await self.coordinator.force_addon_info_data_refresh(self._addon_slug)
|
||||
|
@@ -10,7 +10,7 @@
|
||||
"loggers": ["pyhap"],
|
||||
"requirements": [
|
||||
"HAP-python==5.0.0",
|
||||
"fnv-hash-fast==1.6.0",
|
||||
"fnv-hash-fast==1.5.0",
|
||||
"PyQRCode==1.2.1",
|
||||
"base36==0.1.1"
|
||||
],
|
||||
|
@@ -14,6 +14,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiohomekit", "commentjson"],
|
||||
"requirements": ["aiohomekit==3.2.20"],
|
||||
"requirements": ["aiohomekit==3.2.19"],
|
||||
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."]
|
||||
}
|
||||
|
@@ -122,24 +122,11 @@ async def async_setup_entry(
|
||||
coordinators.main.new_zones_callbacks.append(_add_new_zones)
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
platform.async_register_entity_service(SERVICE_RESUME, None, "resume")
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_RESUME,
|
||||
None,
|
||||
"resume",
|
||||
entity_device_classes=(BinarySensorDeviceClass.RUNNING,),
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_START_WATERING,
|
||||
SCHEMA_START_WATERING,
|
||||
"start_watering",
|
||||
entity_device_classes=(BinarySensorDeviceClass.RUNNING,),
|
||||
)
|
||||
platform.async_register_entity_service(
|
||||
SERVICE_SUSPEND,
|
||||
SCHEMA_SUSPEND,
|
||||
"suspend",
|
||||
entity_device_classes=(BinarySensorDeviceClass.RUNNING,),
|
||||
SERVICE_START_WATERING, SCHEMA_START_WATERING, "start_watering"
|
||||
)
|
||||
platform.async_register_entity_service(SERVICE_SUSPEND, SCHEMA_SUSPEND, "suspend")
|
||||
|
||||
|
||||
class HydrawiseBinarySensor(HydrawiseEntity, BinarySensorEntity):
|
||||
|
@@ -8,16 +8,13 @@ from idasen_ha import Desk
|
||||
|
||||
from homeassistant.components import bluetooth
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type IdasenDeskConfigEntry = ConfigEntry[IdasenDeskCoordinator]
|
||||
|
||||
UPDATE_DEBOUNCE_TIME = 0.2
|
||||
|
||||
|
||||
class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]):
|
||||
"""Class to manage updates for the Idasen Desk."""
|
||||
@@ -36,18 +33,9 @@ class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]):
|
||||
hass, _LOGGER, config_entry=config_entry, name=config_entry.title
|
||||
)
|
||||
self.address = address
|
||||
self.desk = Desk(self._async_handle_update)
|
||||
|
||||
self._expected_connected = False
|
||||
self._height: int | None = None
|
||||
|
||||
self._debouncer = Debouncer(
|
||||
hass=self.hass,
|
||||
logger=_LOGGER,
|
||||
cooldown=UPDATE_DEBOUNCE_TIME,
|
||||
immediate=True,
|
||||
function=callback(lambda: self.async_set_updated_data(self._height)),
|
||||
)
|
||||
self.desk = Desk(self.async_set_updated_data)
|
||||
|
||||
async def async_connect(self) -> bool:
|
||||
"""Connect to desk."""
|
||||
@@ -72,9 +60,3 @@ class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]):
|
||||
"""Ensure that the desk is connected if that is the expected state."""
|
||||
if self._expected_connected:
|
||||
await self.async_connect()
|
||||
|
||||
@callback
|
||||
def _async_handle_update(self, height: int | None) -> None:
|
||||
"""Handle an update from the desk."""
|
||||
self._height = height
|
||||
self._debouncer.async_schedule_call()
|
||||
|
@@ -147,9 +147,8 @@ class KrakenData:
|
||||
|
||||
def _get_websocket_name_asset_pairs(self) -> str:
|
||||
return ",".join(
|
||||
pair
|
||||
self.tradable_asset_pairs[tracked_pair]
|
||||
for tracked_pair in self._config_entry.options[CONF_TRACKED_ASSET_PAIRS]
|
||||
if (pair := self.tradable_asset_pairs.get(tracked_pair)) is not None
|
||||
)
|
||||
|
||||
def set_update_interval(self, update_interval: int) -> None:
|
||||
|
@@ -156,7 +156,7 @@ async def async_setup_entry(
|
||||
for description in SENSOR_TYPES
|
||||
]
|
||||
)
|
||||
async_add_entities(entities)
|
||||
async_add_entities(entities, True)
|
||||
|
||||
_async_add_kraken_sensors(config_entry.options[CONF_TRACKED_ASSET_PAIRS])
|
||||
|
||||
|
@@ -5,7 +5,7 @@ from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from typing import cast
|
||||
|
||||
from pylamarzocco.const import BackFlushStatus, MachineState, ModelName, WidgetType
|
||||
from pylamarzocco.const import BackFlushStatus, ModelName, WidgetType
|
||||
from pylamarzocco.models import (
|
||||
BackFlush,
|
||||
BaseWidgetOutput,
|
||||
@@ -97,14 +97,7 @@ ENTITIES: tuple[LaMarzoccoSensorEntityDescription, ...] = (
|
||||
).brewing_start_time
|
||||
),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
available_fn=(
|
||||
lambda coordinator: not coordinator.websocket_terminated
|
||||
and cast(
|
||||
MachineStatus,
|
||||
coordinator.device.dashboard.config[WidgetType.CM_MACHINE_STATUS],
|
||||
).status
|
||||
is MachineState.BREWING
|
||||
),
|
||||
available_fn=(lambda coordinator: not coordinator.websocket_terminated),
|
||||
),
|
||||
LaMarzoccoSensorEntityDescription(
|
||||
key="steam_boiler_ready_time",
|
||||
|
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ld2410_ble",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["bluetooth-data-tools==1.28.3", "ld2410-ble==0.1.1"]
|
||||
"requirements": ["bluetooth-data-tools==1.28.2", "ld2410-ble==0.1.1"]
|
||||
}
|
||||
|
@@ -35,5 +35,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/led_ble",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["bluetooth-data-tools==1.28.3", "led-ble==1.1.7"]
|
||||
"requirements": ["bluetooth-data-tools==1.28.2", "led-ble==1.1.7"]
|
||||
}
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["letpot"],
|
||||
"quality_scale": "silver",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["letpot==0.6.2"]
|
||||
}
|
||||
|
@@ -41,10 +41,7 @@ rules:
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable:
|
||||
status: done
|
||||
comment: |
|
||||
Logging handled by library when (un)available once (push) or coordinator (pull).
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
@@ -196,11 +196,11 @@ class LocalTodoListEntity(TodoListEntity):
|
||||
item_idx: dict[str, int] = {itm.uid: idx for idx, itm in enumerate(todos)}
|
||||
if uid not in item_idx:
|
||||
raise HomeAssistantError(
|
||||
f"Item '{uid}' not found in todo list {self.entity_id}"
|
||||
"Item '{uid}' not found in todo list {self.entity_id}"
|
||||
)
|
||||
if previous_uid and previous_uid not in item_idx:
|
||||
raise HomeAssistantError(
|
||||
f"Item '{previous_uid}' not found in todo list {self.entity_id}"
|
||||
"Item '{previous_uid}' not found in todo list {self.entity_id}"
|
||||
)
|
||||
dst_idx = item_idx[previous_uid] + 1 if previous_uid else 0
|
||||
src_idx = item_idx[uid]
|
||||
|
@@ -88,17 +88,6 @@ DISCOVERY_SCHEMAS = [
|
||||
entity_class=MatterBinarySensor,
|
||||
required_attributes=(clusters.OccupancySensing.Attributes.Occupancy,),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.BINARY_SENSOR,
|
||||
entity_description=MatterBinarySensorEntityDescription(
|
||||
key="ThermostatOccupancySensor",
|
||||
device_class=BinarySensorDeviceClass.OCCUPANCY,
|
||||
# The first bit = if occupied
|
||||
device_to_ha=lambda x: (x & 1 == 1) if x is not None else None,
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
required_attributes=(clusters.Thermostat.Attributes.Occupancy,),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.BINARY_SENSOR,
|
||||
entity_description=MatterBinarySensorEntityDescription(
|
||||
|
@@ -146,13 +146,6 @@
|
||||
"off": "mdi:lock-off"
|
||||
}
|
||||
},
|
||||
"speaker_mute": {
|
||||
"default": "mdi:volume-high",
|
||||
"state": {
|
||||
"on": "mdi:volume-mute",
|
||||
"off": "mdi:volume-high"
|
||||
}
|
||||
},
|
||||
"evse_charging_switch": {
|
||||
"default": "mdi:ev-station"
|
||||
},
|
||||
|
@@ -176,7 +176,6 @@ DISCOVERY_SCHEMAS = [
|
||||
),
|
||||
entity_class=MatterNumber,
|
||||
required_attributes=(clusters.LevelControl.Attributes.OnLevel,),
|
||||
not_device_type=(device_types.Speaker,),
|
||||
# allow None value to account for 'default' value
|
||||
allow_none_value=True,
|
||||
),
|
||||
|
@@ -152,7 +152,6 @@ PUMP_CONTROL_MODE_MAP = {
|
||||
clusters.PumpConfigurationAndControl.Enums.ControlModeEnum.kUnknownEnumValue: None,
|
||||
}
|
||||
|
||||
HUMIDITY_SCALING_FACTOR = 100
|
||||
TEMPERATURE_SCALING_FACTOR = 100
|
||||
|
||||
|
||||
@@ -309,7 +308,7 @@ DISCOVERY_SCHEMAS = [
|
||||
key="TemperatureSensor",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
device_to_ha=lambda x: x / TEMPERATURE_SCALING_FACTOR,
|
||||
device_to_ha=lambda x: x / 100,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
@@ -345,7 +344,7 @@ DISCOVERY_SCHEMAS = [
|
||||
key="HumiditySensor",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
device_to_ha=lambda x: x / HUMIDITY_SCALING_FACTOR,
|
||||
device_to_ha=lambda x: x / 100,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
@@ -1137,7 +1136,7 @@ DISCOVERY_SCHEMAS = [
|
||||
key="ThermostatLocalTemperature",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
device_to_ha=lambda x: x / TEMPERATURE_SCALING_FACTOR,
|
||||
device_to_ha=lambda x: x / 100,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
entity_class=MatterSensor,
|
||||
|
@@ -514,9 +514,6 @@
|
||||
"power": {
|
||||
"name": "Power"
|
||||
},
|
||||
"speaker_mute": {
|
||||
"name": "Mute"
|
||||
},
|
||||
"child_lock": {
|
||||
"name": "Child lock"
|
||||
},
|
||||
|
@@ -203,6 +203,7 @@ DISCOVERY_SCHEMAS = [
|
||||
device_types.Refrigerator,
|
||||
device_types.RoboticVacuumCleaner,
|
||||
device_types.RoomAirConditioner,
|
||||
device_types.Speaker,
|
||||
),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
@@ -241,24 +242,6 @@ DISCOVERY_SCHEMAS = [
|
||||
device_types.Speaker,
|
||||
),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SWITCH,
|
||||
entity_description=MatterNumericSwitchEntityDescription(
|
||||
key="MatterMuteToggle",
|
||||
translation_key="speaker_mute",
|
||||
device_to_ha={
|
||||
True: False, # True means volume is on, so HA should show mute as off
|
||||
False: True, # False means volume is off (muted), so HA should show mute as on
|
||||
}.get,
|
||||
ha_to_device={
|
||||
False: True, # HA showing mute as off means volume is on, so send True
|
||||
True: False, # HA showing mute as on means volume is off (muted), so send False
|
||||
}.get,
|
||||
),
|
||||
entity_class=MatterNumericSwitch,
|
||||
required_attributes=(clusters.OnOff.Attributes.OnOff,),
|
||||
device_type=(device_types.Speaker,),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SWITCH,
|
||||
entity_description=MatterNumericSwitchEntityDescription(
|
||||
|
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
@@ -24,13 +23,7 @@ from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
|
||||
from . import async_get_config_entry_implementation
|
||||
from .application_credentials import authorization_server_context
|
||||
from .const import (
|
||||
CONF_ACCESS_TOKEN,
|
||||
CONF_AUTHORIZATION_URL,
|
||||
CONF_SCOPE,
|
||||
CONF_TOKEN_URL,
|
||||
DOMAIN,
|
||||
)
|
||||
from .const import CONF_ACCESS_TOKEN, CONF_AUTHORIZATION_URL, CONF_TOKEN_URL, DOMAIN
|
||||
from .coordinator import TokenManager, mcp_client
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -48,17 +41,9 @@ MCP_DISCOVERY_HEADERS = {
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class OAuthConfig:
|
||||
"""Class to hold OAuth configuration."""
|
||||
|
||||
authorization_server: AuthorizationServer
|
||||
scopes: list[str] | None = None
|
||||
|
||||
|
||||
async def async_discover_oauth_config(
|
||||
hass: HomeAssistant, mcp_server_url: str
|
||||
) -> OAuthConfig:
|
||||
) -> AuthorizationServer:
|
||||
"""Discover the OAuth configuration for the MCP server.
|
||||
|
||||
This implements the functionality in the MCP spec for discovery. If the MCP server URL
|
||||
@@ -80,11 +65,9 @@ async def async_discover_oauth_config(
|
||||
except httpx.HTTPStatusError as error:
|
||||
if error.response.status_code == 404:
|
||||
_LOGGER.info("Authorization Server Metadata not found, using default paths")
|
||||
return OAuthConfig(
|
||||
authorization_server=AuthorizationServer(
|
||||
authorize_url=str(parsed_url.with_path("/authorize")),
|
||||
token_url=str(parsed_url.with_path("/token")),
|
||||
)
|
||||
return AuthorizationServer(
|
||||
authorize_url=str(parsed_url.with_path("/authorize")),
|
||||
token_url=str(parsed_url.with_path("/token")),
|
||||
)
|
||||
raise CannotConnect from error
|
||||
except httpx.HTTPError as error:
|
||||
@@ -98,15 +81,9 @@ async def async_discover_oauth_config(
|
||||
authorize_url = str(parsed_url.with_path(authorize_url))
|
||||
if token_url.startswith("/"):
|
||||
token_url = str(parsed_url.with_path(token_url))
|
||||
# We have no way to know the minimum set of scopes needed, so request
|
||||
# all of them and let the user limit during the authorization step.
|
||||
scopes = data.get("scopes_supported")
|
||||
return OAuthConfig(
|
||||
authorization_server=AuthorizationServer(
|
||||
authorize_url=authorize_url,
|
||||
token_url=token_url,
|
||||
),
|
||||
scopes=scopes,
|
||||
return AuthorizationServer(
|
||||
authorize_url=authorize_url,
|
||||
token_url=token_url,
|
||||
)
|
||||
|
||||
|
||||
@@ -153,7 +130,6 @@ class ModelContextProtocolConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
"""Initialize the config flow."""
|
||||
super().__init__()
|
||||
self.data: dict[str, Any] = {}
|
||||
self.oauth_config: OAuthConfig | None = None
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -194,7 +170,7 @@ class ModelContextProtocolConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
to find the OAuth medata then run the OAuth authentication flow.
|
||||
"""
|
||||
try:
|
||||
oauth_config = await async_discover_oauth_config(
|
||||
authorization_server = await async_discover_oauth_config(
|
||||
self.hass, self.data[CONF_URL]
|
||||
)
|
||||
except TimeoutConnectError:
|
||||
@@ -205,13 +181,11 @@ class ModelContextProtocolConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
return self.async_abort(reason="unknown")
|
||||
else:
|
||||
_LOGGER.info("OAuth configuration: %s", oauth_config)
|
||||
self.oauth_config = oauth_config
|
||||
_LOGGER.info("OAuth configuration: %s", authorization_server)
|
||||
self.data.update(
|
||||
{
|
||||
CONF_AUTHORIZATION_URL: oauth_config.authorization_server.authorize_url,
|
||||
CONF_TOKEN_URL: oauth_config.authorization_server.token_url,
|
||||
CONF_SCOPE: oauth_config.scopes,
|
||||
CONF_AUTHORIZATION_URL: authorization_server.authorize_url,
|
||||
CONF_TOKEN_URL: authorization_server.token_url,
|
||||
}
|
||||
)
|
||||
return await self.async_step_credentials_choice()
|
||||
@@ -223,15 +197,6 @@ class ModelContextProtocolConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
||||
self.data[CONF_TOKEN_URL],
|
||||
)
|
||||
|
||||
@property
|
||||
def extra_authorize_data(self) -> dict:
|
||||
"""Extra data that needs to be appended to the authorize url."""
|
||||
data = {}
|
||||
if self.data and (scopes := self.data[CONF_SCOPE]) is not None:
|
||||
data[CONF_SCOPE] = " ".join(scopes)
|
||||
data.update(super().extra_authorize_data)
|
||||
return data
|
||||
|
||||
async def async_step_credentials_choice(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
@@ -5,4 +5,3 @@ DOMAIN = "mcp"
|
||||
CONF_ACCESS_TOKEN = "access_token"
|
||||
CONF_AUTHORIZATION_URL = "authorization_url"
|
||||
CONF_TOKEN_URL = "token_url"
|
||||
CONF_SCOPE = "scope"
|
||||
|
@@ -1,16 +1,7 @@
|
||||
"""Model Context Protocol transport protocol for Streamable HTTP and SSE.
|
||||
"""Model Context Protocol transport protocol for Server Sent Events (SSE).
|
||||
|
||||
This registers HTTP endpoints that support the Streamable HTTP protocol as
|
||||
well as the older SSE as a transport layer.
|
||||
|
||||
The Streamable HTTP protocol uses a single HTTP endpoint:
|
||||
|
||||
- /api/mcp_server: The Streamable HTTP endpoint currently implements the
|
||||
stateless protocol for simplicity. This receives client requests and
|
||||
sends them to the MCP server, then waits for a response to send back to
|
||||
the client.
|
||||
|
||||
The older SSE protocol has two HTTP endpoints:
|
||||
This registers HTTP endpoints that supports SSE as a transport layer
|
||||
for the Model Context Protocol. There are two HTTP endpoints:
|
||||
|
||||
- /mcp_server/sse: The SSE endpoint that is used to establish a session
|
||||
with the client and glue to the MCP server. This is used to push responses
|
||||
@@ -23,9 +14,6 @@ The older SSE protocol has two HTTP endpoints:
|
||||
See https://modelcontextprotocol.io/docs/concepts/transports
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
|
||||
from aiohttp import web
|
||||
@@ -33,14 +21,13 @@ from aiohttp.web_exceptions import HTTPBadRequest, HTTPNotFound
|
||||
from aiohttp_sse import sse_response
|
||||
import anyio
|
||||
from anyio.streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream
|
||||
from mcp import JSONRPCRequest, types
|
||||
from mcp.server import InitializationOptions, Server
|
||||
from mcp import types
|
||||
from mcp.shared.message import SessionMessage
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.components.http import KEY_HASS, HomeAssistantView
|
||||
from homeassistant.const import CONF_LLM_HASS_API
|
||||
from homeassistant.core import Context, HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import llm
|
||||
|
||||
from .const import DOMAIN
|
||||
@@ -50,14 +37,6 @@ from .types import MCPServerConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Streamable HTTP endpoint
|
||||
STREAMABLE_API = "/api/mcp"
|
||||
TIMEOUT = 60 # Seconds
|
||||
|
||||
# Content types
|
||||
CONTENT_TYPE_JSON = "application/json"
|
||||
|
||||
# Legacy SSE endpoint
|
||||
SSE_API = f"/{DOMAIN}/sse"
|
||||
MESSAGES_API = f"/{DOMAIN}/messages/{{session_id}}"
|
||||
|
||||
@@ -67,7 +46,6 @@ def async_register(hass: HomeAssistant) -> None:
|
||||
"""Register the websocket API."""
|
||||
hass.http.register_view(ModelContextProtocolSSEView())
|
||||
hass.http.register_view(ModelContextProtocolMessagesView())
|
||||
hass.http.register_view(ModelContextProtocolStreamableView())
|
||||
|
||||
|
||||
def async_get_config_entry(hass: HomeAssistant) -> MCPServerConfigEntry:
|
||||
@@ -88,52 +66,6 @@ def async_get_config_entry(hass: HomeAssistant) -> MCPServerConfigEntry:
|
||||
return config_entries[0]
|
||||
|
||||
|
||||
@dataclass
|
||||
class Streams:
|
||||
"""Pairs of streams for MCP server communication."""
|
||||
|
||||
# The MCP server reads from the read stream. The HTTP handler receives
|
||||
# incoming client messages and writes the to the read_stream_writer.
|
||||
read_stream: MemoryObjectReceiveStream[SessionMessage | Exception]
|
||||
read_stream_writer: MemoryObjectSendStream[SessionMessage | Exception]
|
||||
|
||||
# The MCP server writes to the write stream. The HTTP handler reads from
|
||||
# the write stream and sends messages to the client.
|
||||
write_stream: MemoryObjectSendStream[SessionMessage]
|
||||
write_stream_reader: MemoryObjectReceiveStream[SessionMessage]
|
||||
|
||||
|
||||
def create_streams() -> Streams:
|
||||
"""Create a new pair of streams for MCP server communication."""
|
||||
read_stream_writer, read_stream = anyio.create_memory_object_stream(0)
|
||||
write_stream, write_stream_reader = anyio.create_memory_object_stream(0)
|
||||
return Streams(
|
||||
read_stream=read_stream,
|
||||
read_stream_writer=read_stream_writer,
|
||||
write_stream=write_stream,
|
||||
write_stream_reader=write_stream_reader,
|
||||
)
|
||||
|
||||
|
||||
async def create_mcp_server(
|
||||
hass: HomeAssistant, context: Context, entry: MCPServerConfigEntry
|
||||
) -> tuple[Server, InitializationOptions]:
|
||||
"""Initialize the MCP server to ensure it's ready to handle requests."""
|
||||
llm_context = llm.LLMContext(
|
||||
platform=DOMAIN,
|
||||
context=context,
|
||||
language="*",
|
||||
assistant=conversation.DOMAIN,
|
||||
device_id=None,
|
||||
)
|
||||
llm_api_id = entry.data[CONF_LLM_HASS_API]
|
||||
server = await create_server(hass, llm_api_id, llm_context)
|
||||
options = await hass.async_add_executor_job(
|
||||
server.create_initialization_options # Reads package for version info
|
||||
)
|
||||
return server, options
|
||||
|
||||
|
||||
class ModelContextProtocolSSEView(HomeAssistantView):
|
||||
"""Model Context Protocol SSE endpoint."""
|
||||
|
||||
@@ -154,12 +86,30 @@ class ModelContextProtocolSSEView(HomeAssistantView):
|
||||
entry = async_get_config_entry(hass)
|
||||
session_manager = entry.runtime_data
|
||||
|
||||
server, options = await create_mcp_server(hass, self.context(request), entry)
|
||||
streams = create_streams()
|
||||
context = llm.LLMContext(
|
||||
platform=DOMAIN,
|
||||
context=self.context(request),
|
||||
language="*",
|
||||
assistant=conversation.DOMAIN,
|
||||
device_id=None,
|
||||
)
|
||||
llm_api_id = entry.data[CONF_LLM_HASS_API]
|
||||
server = await create_server(hass, llm_api_id, context)
|
||||
options = await hass.async_add_executor_job(
|
||||
server.create_initialization_options # Reads package for version info
|
||||
)
|
||||
|
||||
read_stream: MemoryObjectReceiveStream[SessionMessage | Exception]
|
||||
read_stream_writer: MemoryObjectSendStream[SessionMessage | Exception]
|
||||
read_stream_writer, read_stream = anyio.create_memory_object_stream(0)
|
||||
|
||||
write_stream: MemoryObjectSendStream[SessionMessage]
|
||||
write_stream_reader: MemoryObjectReceiveStream[SessionMessage]
|
||||
write_stream, write_stream_reader = anyio.create_memory_object_stream(0)
|
||||
|
||||
async with (
|
||||
sse_response(request) as response,
|
||||
session_manager.create(Session(streams.read_stream_writer)) as session_id,
|
||||
session_manager.create(Session(read_stream_writer)) as session_id,
|
||||
):
|
||||
session_uri = MESSAGES_API.format(session_id=session_id)
|
||||
_LOGGER.debug("Sending SSE endpoint: %s", session_uri)
|
||||
@@ -167,7 +117,7 @@ class ModelContextProtocolSSEView(HomeAssistantView):
|
||||
|
||||
async def sse_reader() -> None:
|
||||
"""Forward MCP server responses to the client."""
|
||||
async for session_message in streams.write_stream_reader:
|
||||
async for session_message in write_stream_reader:
|
||||
_LOGGER.debug("Sending SSE message: %s", session_message)
|
||||
await response.send(
|
||||
session_message.message.model_dump_json(
|
||||
@@ -178,7 +128,7 @@ class ModelContextProtocolSSEView(HomeAssistantView):
|
||||
|
||||
async with anyio.create_task_group() as tg:
|
||||
tg.start_soon(sse_reader)
|
||||
await server.run(streams.read_stream, streams.write_stream, options)
|
||||
await server.run(read_stream, write_stream, options)
|
||||
|
||||
return response
|
||||
|
||||
@@ -218,64 +168,3 @@ class ModelContextProtocolMessagesView(HomeAssistantView):
|
||||
_LOGGER.debug("Received client message: %s", message)
|
||||
await session.read_stream_writer.send(SessionMessage(message))
|
||||
return web.Response(status=200)
|
||||
|
||||
|
||||
class ModelContextProtocolStreamableView(HomeAssistantView):
|
||||
"""Model Context Protocol Streamable HTTP endpoint."""
|
||||
|
||||
name = f"{DOMAIN}:streamable"
|
||||
url = STREAMABLE_API
|
||||
|
||||
async def get(self, request: web.Request) -> web.StreamResponse:
|
||||
"""Handle unsupported methods."""
|
||||
return web.Response(
|
||||
status=HTTPStatus.METHOD_NOT_ALLOWED, text="Only POST method is supported"
|
||||
)
|
||||
|
||||
async def post(self, request: web.Request) -> web.StreamResponse:
|
||||
"""Process JSON-RPC messages for the Model Context Protocol."""
|
||||
hass = request.app[KEY_HASS]
|
||||
entry = async_get_config_entry(hass)
|
||||
|
||||
# The request must include a JSON-RPC message
|
||||
if CONTENT_TYPE_JSON not in request.headers.get("accept", ""):
|
||||
raise HTTPBadRequest(text=f"Client must accept {CONTENT_TYPE_JSON}")
|
||||
if request.content_type != CONTENT_TYPE_JSON:
|
||||
raise HTTPBadRequest(text=f"Content-Type must be {CONTENT_TYPE_JSON}")
|
||||
try:
|
||||
json_data = await request.json()
|
||||
message = types.JSONRPCMessage.model_validate(json_data)
|
||||
except ValueError as err:
|
||||
_LOGGER.debug("Failed to parse message as JSON-RPC message: %s", err)
|
||||
raise HTTPBadRequest(text="Request must be a JSON-RPC message") from err
|
||||
|
||||
_LOGGER.debug("Received client message: %s", message)
|
||||
|
||||
# For notifications and responses only, return 202 Accepted
|
||||
if not isinstance(message.root, JSONRPCRequest):
|
||||
_LOGGER.debug("Notification or response received, returning 202")
|
||||
return web.Response(status=HTTPStatus.ACCEPTED)
|
||||
|
||||
# The MCP server runs as a background task for the duration of the
|
||||
# request. We open a buffered stream pair to communicate with it. The
|
||||
# request is sent to the MCP server and we wait for a single response
|
||||
# then shut down the server.
|
||||
server, options = await create_mcp_server(hass, self.context(request), entry)
|
||||
streams = create_streams()
|
||||
|
||||
async def run_server() -> None:
|
||||
await server.run(
|
||||
streams.read_stream, streams.write_stream, options, stateless=True
|
||||
)
|
||||
|
||||
async with asyncio.timeout(TIMEOUT), anyio.create_task_group() as tg:
|
||||
tg.start_soon(run_server)
|
||||
|
||||
await streams.read_stream_writer.send(SessionMessage(message))
|
||||
session_message = await anext(streams.write_stream_reader)
|
||||
tg.cancel_scope.cancel()
|
||||
|
||||
_LOGGER.debug("Sending response: %s", session_message)
|
||||
return web.json_response(
|
||||
data=session_message.message.model_dump(by_alias=True, exclude_none=True),
|
||||
)
|
||||
|
@@ -48,6 +48,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: MealieConfigEntry) -> bo
|
||||
),
|
||||
)
|
||||
try:
|
||||
await client.define_household_support()
|
||||
about = await client.get_about()
|
||||
version = create_version(about.version)
|
||||
except MealieAuthenticationError as error:
|
||||
|
@@ -19,4 +19,4 @@ ATTR_NOTE_TEXT = "note_text"
|
||||
ATTR_SEARCH_TERMS = "search_terms"
|
||||
ATTR_RESULT_LIMIT = "result_limit"
|
||||
|
||||
MIN_REQUIRED_MEALIE_VERSION = AwesomeVersion("v2.0.0")
|
||||
MIN_REQUIRED_MEALIE_VERSION = AwesomeVersion("v1.0.0")
|
||||
|
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/mealie",
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiomealie==1.0.0"]
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aiomealie==0.11.0"]
|
||||
}
|
||||
|
@@ -49,11 +49,11 @@ rules:
|
||||
The integration will discover a Mealie addon posting a discovery message.
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: done
|
||||
comment: |
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/melcloud",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pymelcloud"],
|
||||
"requirements": ["python-melcloud==0.1.2"]
|
||||
"requirements": ["python-melcloud==0.1.0"]
|
||||
}
|
||||
|
@@ -37,8 +37,8 @@ class MieleEntity(CoordinatorEntity[MieleDataUpdateCoordinator]):
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device_id)},
|
||||
serial_number=device_id,
|
||||
name=device.device_name or appliance_type or device.tech_type,
|
||||
translation_key=None if device.device_name else appliance_type,
|
||||
name=appliance_type or device.tech_type,
|
||||
translation_key=appliance_type,
|
||||
manufacturer=MANUFACTURER,
|
||||
model=device.tech_type,
|
||||
hw_version=device.xkm_tech_type,
|
||||
|
@@ -54,7 +54,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
DEFAULT_PLATE_COUNT = 4
|
||||
|
||||
PLATE_COUNT = {
|
||||
"KM7575": 6,
|
||||
"KM7678": 6,
|
||||
"KM7697": 6,
|
||||
"KM7878": 6,
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/mill",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["mill", "mill_local"],
|
||||
"requirements": ["millheater==0.14.0", "mill-local==0.3.0"]
|
||||
"requirements": ["millheater==0.13.1", "mill-local==0.3.0"]
|
||||
}
|
||||
|
@@ -11,9 +11,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up Min/Max from a config entry."""
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def config_entry_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
"""Update listener, called when the config entry options are changed."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
@@ -71,7 +71,6 @@ class ConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN):
|
||||
|
||||
config_flow = CONFIG_FLOW
|
||||
options_flow = OPTIONS_FLOW
|
||||
options_flow_reloads = True
|
||||
|
||||
def async_config_entry_title(self, options: Mapping[str, Any]) -> str:
|
||||
"""Return config entry title."""
|
||||
|
@@ -46,14 +46,6 @@ from homeassistant.components.light import (
|
||||
VALID_COLOR_MODES,
|
||||
valid_supported_color_modes,
|
||||
)
|
||||
from homeassistant.components.number import (
|
||||
DEFAULT_MAX_VALUE,
|
||||
DEFAULT_MIN_VALUE,
|
||||
DEFAULT_STEP,
|
||||
DEVICE_CLASS_UNITS as NUMBER_DEVICE_CLASS_UNITS,
|
||||
NumberDeviceClass,
|
||||
NumberMode,
|
||||
)
|
||||
from homeassistant.components.sensor import (
|
||||
CONF_STATE_CLASS,
|
||||
DEVICE_CLASS_UNITS,
|
||||
@@ -74,7 +66,6 @@ from homeassistant.config_entries import (
|
||||
from homeassistant.const import (
|
||||
ATTR_CONFIGURATION_URL,
|
||||
ATTR_HW_VERSION,
|
||||
ATTR_MANUFACTURER,
|
||||
ATTR_MODEL,
|
||||
ATTR_MODEL_ID,
|
||||
ATTR_NAME,
|
||||
@@ -88,7 +79,6 @@ from homeassistant.const import (
|
||||
CONF_EFFECT,
|
||||
CONF_ENTITY_CATEGORY,
|
||||
CONF_HOST,
|
||||
CONF_MODE,
|
||||
CONF_NAME,
|
||||
CONF_OPTIMISTIC,
|
||||
CONF_PASSWORD,
|
||||
@@ -221,9 +211,7 @@ from .const import (
|
||||
CONF_IMAGE_TOPIC,
|
||||
CONF_KEEPALIVE,
|
||||
CONF_LAST_RESET_VALUE_TEMPLATE,
|
||||
CONF_MAX,
|
||||
CONF_MAX_KELVIN,
|
||||
CONF_MIN,
|
||||
CONF_MIN_KELVIN,
|
||||
CONF_MODE_COMMAND_TEMPLATE,
|
||||
CONF_MODE_COMMAND_TOPIC,
|
||||
@@ -305,7 +293,6 @@ from .const import (
|
||||
CONF_STATE_UNLOCKED,
|
||||
CONF_STATE_UNLOCKING,
|
||||
CONF_STATE_VALUE_TEMPLATE,
|
||||
CONF_STEP,
|
||||
CONF_SUGGESTED_DISPLAY_PRECISION,
|
||||
CONF_SUPPORTED_COLOR_MODES,
|
||||
CONF_SUPPORTED_FEATURES,
|
||||
@@ -457,7 +444,6 @@ SUBENTRY_PLATFORMS = [
|
||||
Platform.LIGHT,
|
||||
Platform.LOCK,
|
||||
Platform.NOTIFY,
|
||||
Platform.NUMBER,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
@@ -693,24 +679,6 @@ LIGHT_SCHEMA_SELECTOR = SelectSelector(
|
||||
translation_key="light_schema",
|
||||
)
|
||||
)
|
||||
MIN_MAX_SELECTOR = NumberSelector(NumberSelectorConfig(step=1e-3))
|
||||
NUMBER_DEVICE_CLASS_SELECTOR = SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[device_class.value for device_class in NumberDeviceClass],
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
# The number device classes are all shared with the sensor device classes
|
||||
translation_key="device_class_sensor",
|
||||
sort=True,
|
||||
)
|
||||
)
|
||||
NUMBER_MODE_SELECTOR = SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[mode.value for mode in NumberMode],
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
translation_key="number_mode",
|
||||
sort=True,
|
||||
)
|
||||
)
|
||||
ON_COMMAND_TYPE_SELECTOR = SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=VALUES_ON_COMMAND_TYPE,
|
||||
@@ -758,7 +726,6 @@ SENSOR_STATE_CLASS_SELECTOR = SelectSelector(
|
||||
translation_key=CONF_STATE_CLASS,
|
||||
)
|
||||
)
|
||||
STEP_SELECTOR = NumberSelector(NumberSelectorConfig(min=1e-3, step=1e-3))
|
||||
SUPPORTED_COLOR_MODES_SELECTOR = SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[platform.value for platform in VALID_COLOR_MODES],
|
||||
@@ -915,23 +882,6 @@ def unit_of_measurement_selector(user_data: dict[str, Any | None]) -> Selector:
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def number_unit_of_measurement_selector(user_data: dict[str, Any | None]) -> Selector:
|
||||
"""Return a context based unit of measurement selector for number entities."""
|
||||
|
||||
if (
|
||||
device_class := user_data.get(CONF_DEVICE_CLASS)
|
||||
) is None or device_class not in NUMBER_DEVICE_CLASS_UNITS:
|
||||
return TEXT_SELECTOR
|
||||
return SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[str(uom) for uom in NUMBER_DEVICE_CLASS_UNITS[device_class]],
|
||||
sort=True,
|
||||
custom_value=True,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def validate(validator: Callable[[Any], Any]) -> Callable[[Any], Any]:
|
||||
"""Run validator, then return the unmodified input."""
|
||||
@@ -1055,29 +1005,6 @@ def validate_light_platform_config(user_data: dict[str, Any]) -> dict[str, str]:
|
||||
return errors
|
||||
|
||||
|
||||
@callback
|
||||
def validate_number_platform_config(config: dict[str, Any]) -> dict[str, str]:
|
||||
"""Validate MQTT number configuration."""
|
||||
errors: dict[str, Any] = {}
|
||||
if (
|
||||
CONF_MIN in config
|
||||
and CONF_MAX in config
|
||||
and config[CONF_MIN] > config[CONF_MAX]
|
||||
):
|
||||
errors[CONF_MIN] = "max_below_min"
|
||||
errors[CONF_MAX] = "max_below_min"
|
||||
|
||||
if (
|
||||
(device_class := config.get(CONF_DEVICE_CLASS)) is not None
|
||||
and device_class in NUMBER_DEVICE_CLASS_UNITS
|
||||
and config.get(CONF_UNIT_OF_MEASUREMENT)
|
||||
not in NUMBER_DEVICE_CLASS_UNITS[device_class]
|
||||
):
|
||||
errors[CONF_UNIT_OF_MEASUREMENT] = "invalid_uom"
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
@callback
|
||||
def validate_sensor_platform_config(
|
||||
config: dict[str, Any],
|
||||
@@ -1140,7 +1067,6 @@ ENTITY_CONFIG_VALIDATOR: dict[
|
||||
Platform.LIGHT.value: validate_light_platform_config,
|
||||
Platform.LOCK.value: None,
|
||||
Platform.NOTIFY.value: None,
|
||||
Platform.NUMBER.value: validate_number_platform_config,
|
||||
Platform.SENSOR.value: validate_sensor_platform_config,
|
||||
Platform.SWITCH.value: None,
|
||||
}
|
||||
@@ -1356,17 +1282,6 @@ PLATFORM_ENTITY_FIELDS: dict[str, dict[str, PlatformField]] = {
|
||||
},
|
||||
Platform.LOCK.value: {},
|
||||
Platform.NOTIFY.value: {},
|
||||
Platform.NUMBER: {
|
||||
CONF_DEVICE_CLASS: PlatformField(
|
||||
selector=NUMBER_DEVICE_CLASS_SELECTOR,
|
||||
required=False,
|
||||
),
|
||||
CONF_UNIT_OF_MEASUREMENT: PlatformField(
|
||||
selector=number_unit_of_measurement_selector,
|
||||
required=False,
|
||||
custom_filtering=True,
|
||||
),
|
||||
},
|
||||
Platform.SENSOR.value: {
|
||||
CONF_DEVICE_CLASS: PlatformField(
|
||||
selector=SENSOR_DEVICE_CLASS_SELECTOR, required=False
|
||||
@@ -3051,58 +2966,6 @@ PLATFORM_MQTT_FIELDS: dict[str, dict[str, PlatformField]] = {
|
||||
),
|
||||
CONF_RETAIN: PlatformField(selector=BOOLEAN_SELECTOR, required=False),
|
||||
},
|
||||
Platform.NUMBER.value: {
|
||||
CONF_COMMAND_TOPIC: PlatformField(
|
||||
selector=TEXT_SELECTOR,
|
||||
required=True,
|
||||
validator=valid_publish_topic,
|
||||
error="invalid_publish_topic",
|
||||
),
|
||||
CONF_COMMAND_TEMPLATE: PlatformField(
|
||||
selector=TEMPLATE_SELECTOR,
|
||||
required=False,
|
||||
validator=validate(cv.template),
|
||||
error="invalid_template",
|
||||
),
|
||||
CONF_STATE_TOPIC: PlatformField(
|
||||
selector=TEXT_SELECTOR,
|
||||
required=False,
|
||||
validator=valid_subscribe_topic,
|
||||
error="invalid_subscribe_topic",
|
||||
),
|
||||
CONF_VALUE_TEMPLATE: PlatformField(
|
||||
selector=TEMPLATE_SELECTOR,
|
||||
required=False,
|
||||
validator=validate(cv.template),
|
||||
error="invalid_template",
|
||||
),
|
||||
CONF_MIN: PlatformField(
|
||||
selector=MIN_MAX_SELECTOR,
|
||||
required=True,
|
||||
default=DEFAULT_MIN_VALUE,
|
||||
),
|
||||
CONF_MAX: PlatformField(
|
||||
selector=MIN_MAX_SELECTOR,
|
||||
required=True,
|
||||
default=DEFAULT_MAX_VALUE,
|
||||
),
|
||||
CONF_STEP: PlatformField(
|
||||
selector=STEP_SELECTOR,
|
||||
required=True,
|
||||
default=DEFAULT_STEP,
|
||||
),
|
||||
CONF_MODE: PlatformField(
|
||||
selector=NUMBER_MODE_SELECTOR,
|
||||
required=True,
|
||||
default=NumberMode.AUTO.value,
|
||||
),
|
||||
CONF_PAYLOAD_RESET: PlatformField(
|
||||
selector=TEXT_SELECTOR,
|
||||
required=False,
|
||||
default=DEFAULT_PAYLOAD_RESET,
|
||||
),
|
||||
CONF_RETAIN: PlatformField(selector=BOOLEAN_SELECTOR, required=False),
|
||||
},
|
||||
Platform.SENSOR.value: {
|
||||
CONF_STATE_TOPIC: PlatformField(
|
||||
selector=TEXT_SELECTOR,
|
||||
@@ -3187,7 +3050,6 @@ MQTT_DEVICE_PLATFORM_FIELDS = {
|
||||
),
|
||||
ATTR_MODEL: PlatformField(selector=TEXT_SELECTOR, required=False),
|
||||
ATTR_MODEL_ID: PlatformField(selector=TEXT_SELECTOR, required=False),
|
||||
ATTR_MANUFACTURER: PlatformField(selector=TEXT_SELECTOR, required=False),
|
||||
ATTR_CONFIGURATION_URL: PlatformField(
|
||||
selector=TEXT_SELECTOR, required=False, validator=cv.url, error="invalid_url"
|
||||
),
|
||||
|
@@ -120,10 +120,8 @@ CONF_HUMIDITY_STATE_TOPIC = "target_humidity_state_topic"
|
||||
CONF_HUMIDITY_MAX = "max_humidity"
|
||||
CONF_HUMIDITY_MIN = "min_humidity"
|
||||
CONF_LAST_RESET_VALUE_TEMPLATE = "last_reset_value_template"
|
||||
CONF_MAX = "max"
|
||||
CONF_MAX_KELVIN = "max_kelvin"
|
||||
CONF_MAX_MIREDS = "max_mireds"
|
||||
CONF_MIN = "min"
|
||||
CONF_MIN_KELVIN = "min_kelvin"
|
||||
CONF_MIN_MIREDS = "min_mireds"
|
||||
CONF_MODE_COMMAND_TEMPLATE = "mode_command_template"
|
||||
@@ -198,7 +196,6 @@ CONF_STATE_OPENING = "state_opening"
|
||||
CONF_STATE_STOPPED = "state_stopped"
|
||||
CONF_STATE_UNLOCKED = "state_unlocked"
|
||||
CONF_STATE_UNLOCKING = "state_unlocking"
|
||||
CONF_STEP = "step"
|
||||
CONF_SUGGESTED_DISPLAY_PRECISION = "suggested_display_precision"
|
||||
CONF_SUPPORTED_COLOR_MODES = "supported_color_modes"
|
||||
CONF_SWING_HORIZONTAL_MODE_COMMAND_TEMPLATE = "swing_horizontal_mode_command_template"
|
||||
|
@@ -188,10 +188,7 @@ class MqttLock(MqttEntity, LockEntity):
|
||||
return
|
||||
if payload == self._config[CONF_PAYLOAD_RESET]:
|
||||
# Reset the state to `unknown`
|
||||
self._attr_is_locked = self._attr_is_locking = None
|
||||
self._attr_is_unlocking = None
|
||||
self._attr_is_open = self._attr_is_opening = None
|
||||
self._attr_is_jammed = None
|
||||
self._attr_is_locked = None
|
||||
elif payload in self._valid_states:
|
||||
self._attr_is_locked = payload == self._config[CONF_STATE_LOCKED]
|
||||
self._attr_is_locking = payload == self._config[CONF_STATE_LOCKING]
|
||||
|
@@ -37,12 +37,8 @@ from .config import MQTT_RW_SCHEMA
|
||||
from .const import (
|
||||
CONF_COMMAND_TEMPLATE,
|
||||
CONF_COMMAND_TOPIC,
|
||||
CONF_MAX,
|
||||
CONF_MIN,
|
||||
CONF_PAYLOAD_RESET,
|
||||
CONF_STATE_TOPIC,
|
||||
CONF_STEP,
|
||||
DEFAULT_PAYLOAD_RESET,
|
||||
)
|
||||
from .entity import MqttEntity, async_setup_entity_entry_helper
|
||||
from .models import (
|
||||
@@ -57,7 +53,12 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
CONF_MIN = "min"
|
||||
CONF_MAX = "max"
|
||||
CONF_STEP = "step"
|
||||
|
||||
DEFAULT_NAME = "MQTT Number"
|
||||
DEFAULT_PAYLOAD_RESET = "None"
|
||||
|
||||
MQTT_NUMBER_ATTRIBUTES_BLOCKED = frozenset(
|
||||
{
|
||||
|
@@ -165,15 +165,13 @@
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"configuration_url": "Configuration URL",
|
||||
"model": "Model",
|
||||
"model_id": "Model ID",
|
||||
"manufacturer": "Manufacturer"
|
||||
"model_id": "Model ID"
|
||||
},
|
||||
"data_description": {
|
||||
"name": "The name of the manually added MQTT device.",
|
||||
"configuration_url": "A link to the webpage that can manage the configuration of this device. Can be either a 'http://', 'https://' or an internal 'homeassistant://' URL.",
|
||||
"model": "E.g. 'Cleanmaster Pro'.",
|
||||
"model_id": "E.g. '123NK2PRO'.",
|
||||
"manufacturer": "E.g. Cleanmaster Ltd."
|
||||
"model_id": "E.g. '123NK2PRO'."
|
||||
},
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
@@ -300,7 +298,7 @@
|
||||
"suggested_display_precision": "The number of decimals which should be used in the {platform} entity state after rounding. [Learn more.]({url}#suggested_display_precision)",
|
||||
"supported_features": "The features that the entity supports.",
|
||||
"temperature_unit": "This determines the native unit of measurement the MQTT climate device works with.",
|
||||
"unit_of_measurement": "Defines the unit of measurement, if any."
|
||||
"unit_of_measurement": "Defines the unit of measurement of the sensor, if any."
|
||||
},
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
@@ -336,9 +334,6 @@
|
||||
"image_encoding": "Image encoding",
|
||||
"image_topic": "Image topic",
|
||||
"last_reset_value_template": "Last reset value template",
|
||||
"max": "Maximum",
|
||||
"min": "Minimum",
|
||||
"mode": "Mode",
|
||||
"modes": "Supported operation modes",
|
||||
"mode_command_topic": "Operation mode command topic",
|
||||
"mode_command_template": "Operation mode command template",
|
||||
@@ -349,7 +344,6 @@
|
||||
"payload_off": "Payload \"off\"",
|
||||
"payload_on": "Payload \"on\"",
|
||||
"payload_press": "Payload \"press\"",
|
||||
"payload_reset": "Payload \"reset\"",
|
||||
"qos": "QoS",
|
||||
"red_template": "Red template",
|
||||
"retain": "Retain",
|
||||
@@ -358,7 +352,6 @@
|
||||
"state_template": "State template",
|
||||
"state_topic": "State topic",
|
||||
"state_value_template": "State value template",
|
||||
"step": "Step",
|
||||
"supported_color_modes": "Supported color modes",
|
||||
"url_template": "URL template",
|
||||
"url_topic": "URL topic",
|
||||
@@ -383,9 +376,6 @@
|
||||
"image_encoding": "Select the encoding of the received image data",
|
||||
"image_topic": "The MQTT topic subscribed to receive messages containing the image data. [Learn more.]({url}#image_topic)",
|
||||
"last_reset_value_template": "Defines a [template](https://www.home-assistant.io/docs/configuration/templating/#using-value-templates-with-mqtt) to extract the last reset. When Last reset template is set, the State class option must be Total. [Learn more.]({url}#last_reset_value_template)",
|
||||
"max": "Maximum value. [Learn more.]({url}#max)",
|
||||
"min": "Minimum value. [Learn more.]({url}#min)",
|
||||
"mode": "Control how the number should be displayed in the UI. [Learn more.]({url}#mode)",
|
||||
"modes": "A list of supported operation modes. [Learn more.]({url}#modes)",
|
||||
"mode_command_topic": "The MQTT topic to publish commands to change the climate operation mode. [Learn more.]({url}#mode_command_topic)",
|
||||
"mode_command_template": "[Template](https://www.home-assistant.io/docs/configuration/templating/#using-command-templates-with-mqtt) to define the operation mode to be sent to the operation mode command topic. [Learn more.]({url}#mode_command_template)",
|
||||
@@ -396,7 +386,6 @@
|
||||
"payload_off": "The payload that represents the \"off\" state.",
|
||||
"payload_on": "The payload that represents the \"on\" state.",
|
||||
"payload_press": "The payload to send when the button is triggered.",
|
||||
"payload_reset": "The payload received at the state topic that resets the entity to an unknown state.",
|
||||
"qos": "The QoS value a {platform} entity should use.",
|
||||
"red_template": "[Template](https://www.home-assistant.io/docs/configuration/templating/#using-value-templates-with-mqtt) to extract red color from the state payload value. Expected result of the template is an integer from 0-255 range.",
|
||||
"retain": "Select if values published by the {platform} entity should be retained at the MQTT broker.",
|
||||
@@ -404,7 +393,6 @@
|
||||
"state_on": "The incoming payload that represents the \"on\" state. Use only when the value that represents \"on\" state in the state topic is different from value that should be sent to the command topic to turn the device on.",
|
||||
"state_template": "[Template](https://www.home-assistant.io/docs/configuration/templating/#using-value-templates-with-mqtt) to extract state from the state payload value.",
|
||||
"state_topic": "The MQTT topic subscribed to receive {platform} state values. [Learn more.]({url}#state_topic)",
|
||||
"step": "Step value. Smallest value 0.001.",
|
||||
"supported_color_modes": "A list of color modes supported by the light. Possible color modes are On/Off, Brightness, Color temperature, HS, XY, RGB, RGBW, RGBWW, White. Note that if On/Off or Brightness are used, that must be the only value in the list. [Learn more.]({url}#supported_color_modes)",
|
||||
"url_template": "[Template](https://www.home-assistant.io/docs/configuration/templating/#using-value-templates-with-mqtt) to extract an URL from the received URL topic payload value. [Learn more.]({url}#url_template)",
|
||||
"url_topic": "The MQTT topic subscribed to receive messages containing the image URL. [Learn more.]({url}#url_topic)",
|
||||
@@ -1007,7 +995,6 @@
|
||||
"invalid_uom_for_state_class": "The unit of measurement \"{unit_of_measurement}\" is not supported by the selected state class, please either remove the state class, select a state class which supports \"{unit_of_measurement}\", or pick a supported unit of measurement from the list",
|
||||
"invalid_url": "Invalid URL",
|
||||
"last_reset_not_with_state_class_total": "The last reset value template option should be used with state class 'Total' only",
|
||||
"max_below_min": "Max value should be greater or equal to min value",
|
||||
"max_below_min_humidity": "Max humidity value should be greater than min humidity value",
|
||||
"max_below_min_kelvin": "Max Kelvin value should be greater than min Kelvin value",
|
||||
"max_below_min_temperature": "Max temperature value should be greater than min temperature value",
|
||||
@@ -1307,13 +1294,6 @@
|
||||
"template": "Template"
|
||||
}
|
||||
},
|
||||
"number_mode": {
|
||||
"options": {
|
||||
"auto": "[%key:component::number::entity_component::_::state_attributes::mode::state::auto%]",
|
||||
"box": "[%key:component::number::entity_component::_::state_attributes::mode::state::box%]",
|
||||
"slider": "[%key:component::number::entity_component::_::state_attributes::mode::state::slider%]"
|
||||
}
|
||||
},
|
||||
"on_command_type": {
|
||||
"options": {
|
||||
"brightness": "Brightness",
|
||||
@@ -1333,7 +1313,6 @@
|
||||
"light": "[%key:component::light::title%]",
|
||||
"lock": "[%key:component::lock::title%]",
|
||||
"notify": "[%key:component::notify::title%]",
|
||||
"number": "[%key:component::number::title%]",
|
||||
"sensor": "[%key:component::sensor::title%]",
|
||||
"switch": "[%key:component::switch::title%]"
|
||||
}
|
||||
|
@@ -1,51 +0,0 @@
|
||||
"""The Nintendo Switch Parental Controls integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pynintendoparental import Authenticator
|
||||
from pynintendoparental.exceptions import (
|
||||
InvalidOAuthConfigurationException,
|
||||
InvalidSessionTokenException,
|
||||
)
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CONF_SESSION_TOKEN, DOMAIN
|
||||
from .coordinator import NintendoParentalConfigEntry, NintendoUpdateCoordinator
|
||||
|
||||
_PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: NintendoParentalConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Nintendo Switch Parental Controls from a config entry."""
|
||||
try:
|
||||
nintendo_auth = await Authenticator.complete_login(
|
||||
auth=None,
|
||||
response_token=entry.data[CONF_SESSION_TOKEN],
|
||||
is_session_token=True,
|
||||
client_session=async_get_clientsession(hass),
|
||||
)
|
||||
except (InvalidSessionTokenException, InvalidOAuthConfigurationException) as err:
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="auth_expired",
|
||||
) from err
|
||||
entry.runtime_data = coordinator = NintendoUpdateCoordinator(
|
||||
hass, nintendo_auth, entry
|
||||
)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: NintendoParentalConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
@@ -1,61 +0,0 @@
|
||||
"""Config flow for the Nintendo Switch Parental Controls integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from pynintendoparental import Authenticator
|
||||
from pynintendoparental.exceptions import HttpException, InvalidSessionTokenException
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_API_TOKEN
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CONF_SESSION_TOKEN, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class NintendoConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Nintendo Switch Parental Controls."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize a new config flow instance."""
|
||||
self.auth: Authenticator | None = None
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors = {}
|
||||
if self.auth is None:
|
||||
self.auth = Authenticator.generate_login(
|
||||
client_session=async_get_clientsession(self.hass)
|
||||
)
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
await self.auth.complete_login(
|
||||
self.auth, user_input[CONF_API_TOKEN], False
|
||||
)
|
||||
except (ValueError, InvalidSessionTokenException, HttpException):
|
||||
errors["base"] = "invalid_auth"
|
||||
else:
|
||||
if TYPE_CHECKING:
|
||||
assert self.auth.account_id
|
||||
await self.async_set_unique_id(self.auth.account_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=self.auth.account_id,
|
||||
data={
|
||||
CONF_SESSION_TOKEN: self.auth.get_session_token,
|
||||
},
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
description_placeholders={"link": self.auth.login_url},
|
||||
data_schema=vol.Schema({vol.Required(CONF_API_TOKEN): str}),
|
||||
errors=errors,
|
||||
)
|
@@ -1,5 +0,0 @@
|
||||
"""Constants for the Nintendo Switch Parental Controls integration."""
|
||||
|
||||
DOMAIN = "nintendo_parental"
|
||||
CONF_UPDATE_INTERVAL = "update_interval"
|
||||
CONF_SESSION_TOKEN = "session_token"
|
@@ -1,52 +0,0 @@
|
||||
"""Nintendo Parental Controls data coordinator."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pynintendoparental import Authenticator, NintendoParental
|
||||
from pynintendoparental.exceptions import InvalidOAuthConfigurationException
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
type NintendoParentalConfigEntry = ConfigEntry[NintendoUpdateCoordinator]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
UPDATE_INTERVAL = timedelta(seconds=60)
|
||||
|
||||
|
||||
class NintendoUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Nintendo data update coordinator."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
authenticator: Authenticator,
|
||||
config_entry: NintendoParentalConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize update coordinator."""
|
||||
super().__init__(
|
||||
hass=hass,
|
||||
logger=_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=UPDATE_INTERVAL,
|
||||
config_entry=config_entry,
|
||||
)
|
||||
self.api = NintendoParental(
|
||||
authenticator, hass.config.time_zone, hass.config.language
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Update data from Nintendo's API."""
|
||||
try:
|
||||
return await self.api.update()
|
||||
except InvalidOAuthConfigurationException as err:
|
||||
raise ConfigEntryError(
|
||||
err, translation_domain=DOMAIN, translation_key="invalid_auth"
|
||||
) from err
|
@@ -1,41 +0,0 @@
|
||||
"""Base entity definition for Nintendo Parental."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pynintendoparental.device import Device
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import NintendoUpdateCoordinator
|
||||
|
||||
|
||||
class NintendoDevice(CoordinatorEntity[NintendoUpdateCoordinator]):
|
||||
"""Represent a Nintendo Switch."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self, coordinator: NintendoUpdateCoordinator, device: Device, key: str
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
self._device = device
|
||||
self._attr_unique_id = f"{device.device_id}_{key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device.device_id)},
|
||||
manufacturer="Nintendo",
|
||||
name=device.name,
|
||||
sw_version=device.extra["firmwareVersion"]["displayedVersion"],
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is loaded."""
|
||||
await super().async_added_to_hass()
|
||||
self._device.add_device_callback(self.async_write_ha_state)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""When will be removed from HASS."""
|
||||
self._device.remove_device_callback(self.async_write_ha_state)
|
||||
await super().async_will_remove_from_hass()
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user