mirror of
https://github.com/home-assistant/core.git
synced 2025-10-13 21:59:35 +00:00
Compare commits
1 Commits
light_targ
...
remove-cod
Author | SHA1 | Date | |
---|---|---|---|
![]() |
20740326cb |
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -741,7 +741,7 @@ jobs:
|
|||||||
- name: Generate partial mypy restore key
|
- name: Generate partial mypy restore key
|
||||||
id: generate-mypy-key
|
id: generate-mypy-key
|
||||||
run: |
|
run: |
|
||||||
mypy_version=$(cat requirements_test.txt | grep 'mypy.*=' | cut -d '=' -f 3)
|
mypy_version=$(cat requirements_test.txt | grep mypy | cut -d '=' -f 3)
|
||||||
echo "version=$mypy_version" >> $GITHUB_OUTPUT
|
echo "version=$mypy_version" >> $GITHUB_OUTPUT
|
||||||
echo "key=mypy-${{ env.MYPY_CACHE_VERSION }}-$mypy_version-${{
|
echo "key=mypy-${{ env.MYPY_CACHE_VERSION }}-$mypy_version-${{
|
||||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
|||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@e296a935590eb16afc0c0108289f68c87e2a89a5 # v4.30.7
|
uses: github/codeql-action/init@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
|
||||||
with:
|
with:
|
||||||
languages: python
|
languages: python
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@e296a935590eb16afc0c0108289f68c87e2a89a5 # v4.30.7
|
uses: github/codeql-action/analyze@64d10c13136e1c5bce3e5fbde8d4906eeaafc885 # v3.30.6
|
||||||
with:
|
with:
|
||||||
category: "/language:python"
|
category: "/language:python"
|
||||||
|
4
CODEOWNERS
generated
4
CODEOWNERS
generated
@@ -1413,8 +1413,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/sfr_box/ @epenet
|
/tests/components/sfr_box/ @epenet
|
||||||
/homeassistant/components/sftp_storage/ @maretodoric
|
/homeassistant/components/sftp_storage/ @maretodoric
|
||||||
/tests/components/sftp_storage/ @maretodoric
|
/tests/components/sftp_storage/ @maretodoric
|
||||||
/homeassistant/components/sharkiq/ @JeffResc @funkybunch @TheOneOgre
|
/homeassistant/components/sharkiq/ @JeffResc @funkybunch
|
||||||
/tests/components/sharkiq/ @JeffResc @funkybunch @TheOneOgre
|
/tests/components/sharkiq/ @JeffResc @funkybunch
|
||||||
/homeassistant/components/shell_command/ @home-assistant/core
|
/homeassistant/components/shell_command/ @home-assistant/core
|
||||||
/tests/components/shell_command/ @home-assistant/core
|
/tests/components/shell_command/ @home-assistant/core
|
||||||
/homeassistant/components/shelly/ @bieniu @thecode @chemelli74 @bdraco
|
/homeassistant/components/shelly/ @bieniu @thecode @chemelli74 @bdraco
|
||||||
|
@@ -71,4 +71,4 @@ POLLEN_CATEGORY_MAP = {
|
|||||||
}
|
}
|
||||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10)
|
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10)
|
||||||
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
||||||
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(minutes=30)
|
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(hours=30)
|
||||||
|
@@ -1,9 +1,6 @@
|
|||||||
{
|
{
|
||||||
"entity": {
|
"entity": {
|
||||||
"sensor": {
|
"sensor": {
|
||||||
"air_quality": {
|
|
||||||
"default": "mdi:air-filter"
|
|
||||||
},
|
|
||||||
"cloud_ceiling": {
|
"cloud_ceiling": {
|
||||||
"default": "mdi:weather-fog"
|
"default": "mdi:weather-fog"
|
||||||
},
|
},
|
||||||
@@ -37,6 +34,9 @@
|
|||||||
"thunderstorm_probability_night": {
|
"thunderstorm_probability_night": {
|
||||||
"default": "mdi:weather-lightning"
|
"default": "mdi:weather-lightning"
|
||||||
},
|
},
|
||||||
|
"translation_key": {
|
||||||
|
"default": "mdi:air-filter"
|
||||||
|
},
|
||||||
"tree_pollen": {
|
"tree_pollen": {
|
||||||
"default": "mdi:tree-outline"
|
"default": "mdi:tree-outline"
|
||||||
},
|
},
|
||||||
|
@@ -1,9 +1,7 @@
|
|||||||
"""Airgradient Update platform."""
|
"""Airgradient Update platform."""
|
||||||
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import logging
|
|
||||||
|
|
||||||
from airgradient import AirGradientConnectionError
|
|
||||||
from propcache.api import cached_property
|
from propcache.api import cached_property
|
||||||
|
|
||||||
from homeassistant.components.update import UpdateDeviceClass, UpdateEntity
|
from homeassistant.components.update import UpdateDeviceClass, UpdateEntity
|
||||||
@@ -15,7 +13,6 @@ from .entity import AirGradientEntity
|
|||||||
|
|
||||||
PARALLEL_UPDATES = 1
|
PARALLEL_UPDATES = 1
|
||||||
SCAN_INTERVAL = timedelta(hours=1)
|
SCAN_INTERVAL = timedelta(hours=1)
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
@@ -34,7 +31,6 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity):
|
|||||||
"""Representation of Airgradient Update."""
|
"""Representation of Airgradient Update."""
|
||||||
|
|
||||||
_attr_device_class = UpdateDeviceClass.FIRMWARE
|
_attr_device_class = UpdateDeviceClass.FIRMWARE
|
||||||
_server_unreachable_logged = False
|
|
||||||
|
|
||||||
def __init__(self, coordinator: AirGradientCoordinator) -> None:
|
def __init__(self, coordinator: AirGradientCoordinator) -> None:
|
||||||
"""Initialize the entity."""
|
"""Initialize the entity."""
|
||||||
@@ -51,27 +47,10 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity):
|
|||||||
"""Return the installed version of the entity."""
|
"""Return the installed version of the entity."""
|
||||||
return self.coordinator.data.measures.firmware_version
|
return self.coordinator.data.measures.firmware_version
|
||||||
|
|
||||||
@property
|
|
||||||
def available(self) -> bool:
|
|
||||||
"""Return if entity is available."""
|
|
||||||
return super().available and self._attr_available
|
|
||||||
|
|
||||||
async def async_update(self) -> None:
|
async def async_update(self) -> None:
|
||||||
"""Update the entity."""
|
"""Update the entity."""
|
||||||
try:
|
self._attr_latest_version = (
|
||||||
self._attr_latest_version = (
|
await self.coordinator.client.get_latest_firmware_version(
|
||||||
await self.coordinator.client.get_latest_firmware_version(
|
self.coordinator.serial_number
|
||||||
self.coordinator.serial_number
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
except AirGradientConnectionError:
|
)
|
||||||
self._attr_latest_version = None
|
|
||||||
self._attr_available = False
|
|
||||||
if not self._server_unreachable_logged:
|
|
||||||
_LOGGER.error(
|
|
||||||
"Unable to connect to AirGradient server to check for updates"
|
|
||||||
)
|
|
||||||
self._server_unreachable_logged = True
|
|
||||||
else:
|
|
||||||
self._server_unreachable_logged = False
|
|
||||||
self._attr_available = True
|
|
||||||
|
@@ -16,12 +16,10 @@ from homeassistant.components.sensor import (
|
|||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
CONCENTRATION_PARTS_PER_BILLION,
|
CONCENTRATION_PARTS_PER_BILLION,
|
||||||
CONCENTRATION_PARTS_PER_MILLION,
|
CONCENTRATION_PARTS_PER_MILLION,
|
||||||
LIGHT_LUX,
|
|
||||||
PERCENTAGE,
|
PERCENTAGE,
|
||||||
EntityCategory,
|
EntityCategory,
|
||||||
Platform,
|
Platform,
|
||||||
UnitOfPressure,
|
UnitOfPressure,
|
||||||
UnitOfSoundPressure,
|
|
||||||
UnitOfTemperature,
|
UnitOfTemperature,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
@@ -114,21 +112,6 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
|
|||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
suggested_display_precision=0,
|
suggested_display_precision=0,
|
||||||
),
|
),
|
||||||
"lux": SensorEntityDescription(
|
|
||||||
key="lux",
|
|
||||||
device_class=SensorDeviceClass.ILLUMINANCE,
|
|
||||||
native_unit_of_measurement=LIGHT_LUX,
|
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
|
||||||
suggested_display_precision=0,
|
|
||||||
),
|
|
||||||
"noise": SensorEntityDescription(
|
|
||||||
key="noise",
|
|
||||||
translation_key="ambient_noise",
|
|
||||||
device_class=SensorDeviceClass.SOUND_PRESSURE,
|
|
||||||
native_unit_of_measurement=UnitOfSoundPressure.WEIGHTED_DECIBEL_A,
|
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
|
||||||
suggested_display_precision=0,
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
PARALLEL_UPDATES = 0
|
PARALLEL_UPDATES = 0
|
||||||
|
@@ -41,9 +41,6 @@
|
|||||||
},
|
},
|
||||||
"illuminance": {
|
"illuminance": {
|
||||||
"name": "[%key:component::sensor::entity_component::illuminance::name%]"
|
"name": "[%key:component::sensor::entity_component::illuminance::name%]"
|
||||||
},
|
|
||||||
"ambient_noise": {
|
|
||||||
"name": "Ambient noise"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -8,5 +8,5 @@
|
|||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["aioamazondevices"],
|
"loggers": ["aioamazondevices"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["aioamazondevices==6.4.0"]
|
"requirements": ["aioamazondevices==6.2.9"]
|
||||||
}
|
}
|
||||||
|
@@ -7,8 +7,6 @@ from typing import Any
|
|||||||
from pyaprilaire.const import Attribute
|
from pyaprilaire.const import Attribute
|
||||||
|
|
||||||
from homeassistant.components.climate import (
|
from homeassistant.components.climate import (
|
||||||
ATTR_TARGET_TEMP_HIGH,
|
|
||||||
ATTR_TARGET_TEMP_LOW,
|
|
||||||
FAN_AUTO,
|
FAN_AUTO,
|
||||||
FAN_ON,
|
FAN_ON,
|
||||||
PRESET_AWAY,
|
PRESET_AWAY,
|
||||||
@@ -18,12 +16,7 @@ from homeassistant.components.climate import (
|
|||||||
HVACAction,
|
HVACAction,
|
||||||
HVACMode,
|
HVACMode,
|
||||||
)
|
)
|
||||||
from homeassistant.const import (
|
from homeassistant.const import PRECISION_HALVES, PRECISION_WHOLE, UnitOfTemperature
|
||||||
ATTR_TEMPERATURE,
|
|
||||||
PRECISION_HALVES,
|
|
||||||
PRECISION_WHOLE,
|
|
||||||
UnitOfTemperature,
|
|
||||||
)
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
@@ -239,15 +232,15 @@ class AprilaireClimate(BaseAprilaireEntity, ClimateEntity):
|
|||||||
cool_setpoint = 0
|
cool_setpoint = 0
|
||||||
heat_setpoint = 0
|
heat_setpoint = 0
|
||||||
|
|
||||||
if temperature := kwargs.get(ATTR_TEMPERATURE):
|
if temperature := kwargs.get("temperature"):
|
||||||
if self.coordinator.data.get(Attribute.MODE) == 3:
|
if self.coordinator.data.get(Attribute.MODE) == 3:
|
||||||
cool_setpoint = temperature
|
cool_setpoint = temperature
|
||||||
else:
|
else:
|
||||||
heat_setpoint = temperature
|
heat_setpoint = temperature
|
||||||
else:
|
else:
|
||||||
if target_temp_low := kwargs.get(ATTR_TARGET_TEMP_LOW):
|
if target_temp_low := kwargs.get("target_temp_low"):
|
||||||
heat_setpoint = target_temp_low
|
heat_setpoint = target_temp_low
|
||||||
if target_temp_high := kwargs.get(ATTR_TARGET_TEMP_HIGH):
|
if target_temp_high := kwargs.get("target_temp_high"):
|
||||||
cool_setpoint = target_temp_high
|
cool_setpoint = target_temp_high
|
||||||
|
|
||||||
if cool_setpoint == 0 and heat_setpoint == 0:
|
if cool_setpoint == 0 and heat_setpoint == 0:
|
||||||
|
@@ -5,5 +5,5 @@
|
|||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/autarco",
|
"documentation": "https://www.home-assistant.io/integrations/autarco",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"requirements": ["autarco==3.2.0"]
|
"requirements": ["autarco==3.1.0"]
|
||||||
}
|
}
|
||||||
|
@@ -7,14 +7,12 @@ from typing import Any
|
|||||||
from evolutionhttp import BryantEvolutionLocalClient
|
from evolutionhttp import BryantEvolutionLocalClient
|
||||||
|
|
||||||
from homeassistant.components.climate import (
|
from homeassistant.components.climate import (
|
||||||
ATTR_TARGET_TEMP_HIGH,
|
|
||||||
ATTR_TARGET_TEMP_LOW,
|
|
||||||
ClimateEntity,
|
ClimateEntity,
|
||||||
ClimateEntityFeature,
|
ClimateEntityFeature,
|
||||||
HVACAction,
|
HVACAction,
|
||||||
HVACMode,
|
HVACMode,
|
||||||
)
|
)
|
||||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
from homeassistant.const import UnitOfTemperature
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers.device_registry import DeviceInfo
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
@@ -210,24 +208,24 @@ class BryantEvolutionClimate(ClimateEntity):
|
|||||||
|
|
||||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||||
"""Set new target temperature."""
|
"""Set new target temperature."""
|
||||||
if value := kwargs.get(ATTR_TARGET_TEMP_HIGH):
|
if kwargs.get("target_temp_high"):
|
||||||
temp = int(value)
|
temp = int(kwargs["target_temp_high"])
|
||||||
if not await self._client.set_cooling_setpoint(temp):
|
if not await self._client.set_cooling_setpoint(temp):
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
translation_domain=DOMAIN, translation_key="failed_to_set_clsp"
|
translation_domain=DOMAIN, translation_key="failed_to_set_clsp"
|
||||||
)
|
)
|
||||||
self._attr_target_temperature_high = temp
|
self._attr_target_temperature_high = temp
|
||||||
|
|
||||||
if value := kwargs.get(ATTR_TARGET_TEMP_LOW):
|
if kwargs.get("target_temp_low"):
|
||||||
temp = int(value)
|
temp = int(kwargs["target_temp_low"])
|
||||||
if not await self._client.set_heating_setpoint(temp):
|
if not await self._client.set_heating_setpoint(temp):
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
translation_domain=DOMAIN, translation_key="failed_to_set_htsp"
|
translation_domain=DOMAIN, translation_key="failed_to_set_htsp"
|
||||||
)
|
)
|
||||||
self._attr_target_temperature_low = temp
|
self._attr_target_temperature_low = temp
|
||||||
|
|
||||||
if value := kwargs.get(ATTR_TEMPERATURE):
|
if kwargs.get("temperature"):
|
||||||
temp = int(value)
|
temp = int(kwargs["temperature"])
|
||||||
fn = (
|
fn = (
|
||||||
self._client.set_heating_setpoint
|
self._client.set_heating_setpoint
|
||||||
if self.hvac_mode == HVACMode.HEAT
|
if self.hvac_mode == HVACMode.HEAT
|
||||||
|
@@ -169,7 +169,7 @@ class CalendarEventListener:
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
job: HassJob[..., Coroutine[Any, Any, None] | Any],
|
job: HassJob[..., Coroutine[Any, Any, None]],
|
||||||
trigger_data: dict[str, Any],
|
trigger_data: dict[str, Any],
|
||||||
fetcher: QueuedEventFetcher,
|
fetcher: QueuedEventFetcher,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@@ -15,7 +15,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from .coordinator import ComelitConfigEntry, ComelitVedoSystem
|
from .coordinator import ComelitConfigEntry, ComelitVedoSystem
|
||||||
from .utils import DeviceType, new_device_listener
|
|
||||||
|
|
||||||
# Coordinator is used to centralize the data updates
|
# Coordinator is used to centralize the data updates
|
||||||
PARALLEL_UPDATES = 0
|
PARALLEL_UPDATES = 0
|
||||||
@@ -30,19 +29,23 @@ async def async_setup_entry(
|
|||||||
|
|
||||||
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
||||||
|
|
||||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
known_devices: set[int] = set()
|
||||||
"""Add entities for new monitors."""
|
|
||||||
entities = [
|
|
||||||
ComelitVedoBinarySensorEntity(coordinator, device, config_entry.entry_id)
|
|
||||||
for device in coordinator.data["alarm_zones"].values()
|
|
||||||
if device in new_devices
|
|
||||||
]
|
|
||||||
if entities:
|
|
||||||
async_add_entities(entities)
|
|
||||||
|
|
||||||
config_entry.async_on_unload(
|
def _check_device() -> None:
|
||||||
new_device_listener(coordinator, _add_new_entities, "alarm_zones")
|
current_devices = set(coordinator.data["alarm_zones"])
|
||||||
)
|
new_devices = current_devices - known_devices
|
||||||
|
if new_devices:
|
||||||
|
known_devices.update(new_devices)
|
||||||
|
async_add_entities(
|
||||||
|
ComelitVedoBinarySensorEntity(
|
||||||
|
coordinator, device, config_entry.entry_id
|
||||||
|
)
|
||||||
|
for device in coordinator.data["alarm_zones"].values()
|
||||||
|
if device.index in new_devices
|
||||||
|
)
|
||||||
|
|
||||||
|
_check_device()
|
||||||
|
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||||
|
|
||||||
|
|
||||||
class ComelitVedoBinarySensorEntity(
|
class ComelitVedoBinarySensorEntity(
|
||||||
|
@@ -7,21 +7,14 @@ from typing import Any, cast
|
|||||||
from aiocomelit import ComelitSerialBridgeObject
|
from aiocomelit import ComelitSerialBridgeObject
|
||||||
from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON
|
from aiocomelit.const import COVER, STATE_COVER, STATE_OFF, STATE_ON
|
||||||
|
|
||||||
from homeassistant.components.cover import (
|
from homeassistant.components.cover import CoverDeviceClass, CoverEntity
|
||||||
STATE_CLOSED,
|
|
||||||
STATE_CLOSING,
|
|
||||||
STATE_OPEN,
|
|
||||||
STATE_OPENING,
|
|
||||||
CoverDeviceClass,
|
|
||||||
CoverEntity,
|
|
||||||
)
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
from homeassistant.helpers.restore_state import RestoreEntity
|
from homeassistant.helpers.restore_state import RestoreEntity
|
||||||
|
|
||||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||||
from .entity import ComelitBridgeBaseEntity
|
from .entity import ComelitBridgeBaseEntity
|
||||||
from .utils import DeviceType, bridge_api_call, new_device_listener
|
from .utils import bridge_api_call
|
||||||
|
|
||||||
# Coordinator is used to centralize the data updates
|
# Coordinator is used to centralize the data updates
|
||||||
PARALLEL_UPDATES = 0
|
PARALLEL_UPDATES = 0
|
||||||
@@ -36,19 +29,21 @@ async def async_setup_entry(
|
|||||||
|
|
||||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||||
|
|
||||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
known_devices: set[int] = set()
|
||||||
"""Add entities for new monitors."""
|
|
||||||
entities = [
|
|
||||||
ComelitCoverEntity(coordinator, device, config_entry.entry_id)
|
|
||||||
for device in coordinator.data[dev_type].values()
|
|
||||||
if device in new_devices
|
|
||||||
]
|
|
||||||
if entities:
|
|
||||||
async_add_entities(entities)
|
|
||||||
|
|
||||||
config_entry.async_on_unload(
|
def _check_device() -> None:
|
||||||
new_device_listener(coordinator, _add_new_entities, COVER)
|
current_devices = set(coordinator.data[COVER])
|
||||||
)
|
new_devices = current_devices - known_devices
|
||||||
|
if new_devices:
|
||||||
|
known_devices.update(new_devices)
|
||||||
|
async_add_entities(
|
||||||
|
ComelitCoverEntity(coordinator, device, config_entry.entry_id)
|
||||||
|
for device in coordinator.data[COVER].values()
|
||||||
|
if device.index in new_devices
|
||||||
|
)
|
||||||
|
|
||||||
|
_check_device()
|
||||||
|
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||||
|
|
||||||
|
|
||||||
class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
||||||
@@ -67,6 +62,7 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
|||||||
super().__init__(coordinator, device, config_entry_entry_id)
|
super().__init__(coordinator, device, config_entry_entry_id)
|
||||||
# Device doesn't provide a status so we assume UNKNOWN at first startup
|
# Device doesn't provide a status so we assume UNKNOWN at first startup
|
||||||
self._last_action: int | None = None
|
self._last_action: int | None = None
|
||||||
|
self._last_state: str | None = None
|
||||||
|
|
||||||
def _current_action(self, action: str) -> bool:
|
def _current_action(self, action: str) -> bool:
|
||||||
"""Return the current cover action."""
|
"""Return the current cover action."""
|
||||||
@@ -102,6 +98,7 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
|||||||
@bridge_api_call
|
@bridge_api_call
|
||||||
async def _cover_set_state(self, action: int, state: int) -> None:
|
async def _cover_set_state(self, action: int, state: int) -> None:
|
||||||
"""Set desired cover state."""
|
"""Set desired cover state."""
|
||||||
|
self._last_state = self.state
|
||||||
await self.coordinator.api.set_device_status(COVER, self._device.index, action)
|
await self.coordinator.api.set_device_status(COVER, self._device.index, action)
|
||||||
self.coordinator.data[COVER][self._device.index].status = state
|
self.coordinator.data[COVER][self._device.index].status = state
|
||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
@@ -127,10 +124,5 @@ class ComelitCoverEntity(ComelitBridgeBaseEntity, RestoreEntity, CoverEntity):
|
|||||||
|
|
||||||
await super().async_added_to_hass()
|
await super().async_added_to_hass()
|
||||||
|
|
||||||
if (state := await self.async_get_last_state()) is not None:
|
if last_state := await self.async_get_last_state():
|
||||||
if state.state == STATE_CLOSED:
|
self._last_state = last_state.state
|
||||||
self._last_action = STATE_COVER.index(STATE_CLOSING)
|
|
||||||
if state.state == STATE_OPEN:
|
|
||||||
self._last_action = STATE_COVER.index(STATE_OPENING)
|
|
||||||
|
|
||||||
self._attr_is_closed = state.state == STATE_CLOSED
|
|
||||||
|
@@ -12,7 +12,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|||||||
|
|
||||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||||
from .entity import ComelitBridgeBaseEntity
|
from .entity import ComelitBridgeBaseEntity
|
||||||
from .utils import DeviceType, bridge_api_call, new_device_listener
|
from .utils import bridge_api_call
|
||||||
|
|
||||||
# Coordinator is used to centralize the data updates
|
# Coordinator is used to centralize the data updates
|
||||||
PARALLEL_UPDATES = 0
|
PARALLEL_UPDATES = 0
|
||||||
@@ -27,19 +27,21 @@ async def async_setup_entry(
|
|||||||
|
|
||||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||||
|
|
||||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
known_devices: set[int] = set()
|
||||||
"""Add entities for new monitors."""
|
|
||||||
entities = [
|
|
||||||
ComelitLightEntity(coordinator, device, config_entry.entry_id)
|
|
||||||
for device in coordinator.data[dev_type].values()
|
|
||||||
if device in new_devices
|
|
||||||
]
|
|
||||||
if entities:
|
|
||||||
async_add_entities(entities)
|
|
||||||
|
|
||||||
config_entry.async_on_unload(
|
def _check_device() -> None:
|
||||||
new_device_listener(coordinator, _add_new_entities, LIGHT)
|
current_devices = set(coordinator.data[LIGHT])
|
||||||
)
|
new_devices = current_devices - known_devices
|
||||||
|
if new_devices:
|
||||||
|
known_devices.update(new_devices)
|
||||||
|
async_add_entities(
|
||||||
|
ComelitLightEntity(coordinator, device, config_entry.entry_id)
|
||||||
|
for device in coordinator.data[LIGHT].values()
|
||||||
|
if device.index in new_devices
|
||||||
|
)
|
||||||
|
|
||||||
|
_check_device()
|
||||||
|
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||||
|
|
||||||
|
|
||||||
class ComelitLightEntity(ComelitBridgeBaseEntity, LightEntity):
|
class ComelitLightEntity(ComelitBridgeBaseEntity, LightEntity):
|
||||||
|
@@ -20,7 +20,6 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
|||||||
|
|
||||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge, ComelitVedoSystem
|
from .coordinator import ComelitConfigEntry, ComelitSerialBridge, ComelitVedoSystem
|
||||||
from .entity import ComelitBridgeBaseEntity
|
from .entity import ComelitBridgeBaseEntity
|
||||||
from .utils import DeviceType, new_device_listener
|
|
||||||
|
|
||||||
# Coordinator is used to centralize the data updates
|
# Coordinator is used to centralize the data updates
|
||||||
PARALLEL_UPDATES = 0
|
PARALLEL_UPDATES = 0
|
||||||
@@ -66,22 +65,24 @@ async def async_setup_bridge_entry(
|
|||||||
|
|
||||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||||
|
|
||||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
known_devices: set[int] = set()
|
||||||
"""Add entities for new monitors."""
|
|
||||||
entities = [
|
|
||||||
ComelitBridgeSensorEntity(
|
|
||||||
coordinator, device, config_entry.entry_id, sensor_desc
|
|
||||||
)
|
|
||||||
for sensor_desc in SENSOR_BRIDGE_TYPES
|
|
||||||
for device in coordinator.data[dev_type].values()
|
|
||||||
if device in new_devices
|
|
||||||
]
|
|
||||||
if entities:
|
|
||||||
async_add_entities(entities)
|
|
||||||
|
|
||||||
config_entry.async_on_unload(
|
def _check_device() -> None:
|
||||||
new_device_listener(coordinator, _add_new_entities, OTHER)
|
current_devices = set(coordinator.data[OTHER])
|
||||||
)
|
new_devices = current_devices - known_devices
|
||||||
|
if new_devices:
|
||||||
|
known_devices.update(new_devices)
|
||||||
|
async_add_entities(
|
||||||
|
ComelitBridgeSensorEntity(
|
||||||
|
coordinator, device, config_entry.entry_id, sensor_desc
|
||||||
|
)
|
||||||
|
for sensor_desc in SENSOR_BRIDGE_TYPES
|
||||||
|
for device in coordinator.data[OTHER].values()
|
||||||
|
if device.index in new_devices
|
||||||
|
)
|
||||||
|
|
||||||
|
_check_device()
|
||||||
|
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_vedo_entry(
|
async def async_setup_vedo_entry(
|
||||||
@@ -93,22 +94,24 @@ async def async_setup_vedo_entry(
|
|||||||
|
|
||||||
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
||||||
|
|
||||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
known_devices: set[int] = set()
|
||||||
"""Add entities for new monitors."""
|
|
||||||
entities = [
|
|
||||||
ComelitVedoSensorEntity(
|
|
||||||
coordinator, device, config_entry.entry_id, sensor_desc
|
|
||||||
)
|
|
||||||
for sensor_desc in SENSOR_VEDO_TYPES
|
|
||||||
for device in coordinator.data["alarm_zones"].values()
|
|
||||||
if device in new_devices
|
|
||||||
]
|
|
||||||
if entities:
|
|
||||||
async_add_entities(entities)
|
|
||||||
|
|
||||||
config_entry.async_on_unload(
|
def _check_device() -> None:
|
||||||
new_device_listener(coordinator, _add_new_entities, "alarm_zones")
|
current_devices = set(coordinator.data["alarm_zones"])
|
||||||
)
|
new_devices = current_devices - known_devices
|
||||||
|
if new_devices:
|
||||||
|
known_devices.update(new_devices)
|
||||||
|
async_add_entities(
|
||||||
|
ComelitVedoSensorEntity(
|
||||||
|
coordinator, device, config_entry.entry_id, sensor_desc
|
||||||
|
)
|
||||||
|
for sensor_desc in SENSOR_VEDO_TYPES
|
||||||
|
for device in coordinator.data["alarm_zones"].values()
|
||||||
|
if device.index in new_devices
|
||||||
|
)
|
||||||
|
|
||||||
|
_check_device()
|
||||||
|
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||||
|
|
||||||
|
|
||||||
class ComelitBridgeSensorEntity(ComelitBridgeBaseEntity, SensorEntity):
|
class ComelitBridgeSensorEntity(ComelitBridgeBaseEntity, SensorEntity):
|
||||||
|
@@ -13,7 +13,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|||||||
|
|
||||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||||
from .entity import ComelitBridgeBaseEntity
|
from .entity import ComelitBridgeBaseEntity
|
||||||
from .utils import DeviceType, bridge_api_call, new_device_listener
|
from .utils import bridge_api_call
|
||||||
|
|
||||||
# Coordinator is used to centralize the data updates
|
# Coordinator is used to centralize the data updates
|
||||||
PARALLEL_UPDATES = 0
|
PARALLEL_UPDATES = 0
|
||||||
@@ -28,20 +28,35 @@ async def async_setup_entry(
|
|||||||
|
|
||||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||||
|
|
||||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
entities: list[ComelitSwitchEntity] = []
|
||||||
"""Add entities for new monitors."""
|
entities.extend(
|
||||||
entities = [
|
ComelitSwitchEntity(coordinator, device, config_entry.entry_id)
|
||||||
ComelitSwitchEntity(coordinator, device, config_entry.entry_id)
|
for device in coordinator.data[IRRIGATION].values()
|
||||||
for device in coordinator.data[dev_type].values()
|
)
|
||||||
if device in new_devices
|
entities.extend(
|
||||||
]
|
ComelitSwitchEntity(coordinator, device, config_entry.entry_id)
|
||||||
if entities:
|
for device in coordinator.data[OTHER].values()
|
||||||
async_add_entities(entities)
|
)
|
||||||
|
async_add_entities(entities)
|
||||||
|
|
||||||
for dev_type in (IRRIGATION, OTHER):
|
known_devices: dict[str, set[int]] = {
|
||||||
config_entry.async_on_unload(
|
dev_type: set() for dev_type in (IRRIGATION, OTHER)
|
||||||
new_device_listener(coordinator, _add_new_entities, dev_type)
|
}
|
||||||
)
|
|
||||||
|
def _check_device() -> None:
|
||||||
|
for dev_type in (IRRIGATION, OTHER):
|
||||||
|
current_devices = set(coordinator.data[dev_type])
|
||||||
|
new_devices = current_devices - known_devices[dev_type]
|
||||||
|
if new_devices:
|
||||||
|
known_devices[dev_type].update(new_devices)
|
||||||
|
async_add_entities(
|
||||||
|
ComelitSwitchEntity(coordinator, device, config_entry.entry_id)
|
||||||
|
for device in coordinator.data[dev_type].values()
|
||||||
|
if device.index in new_devices
|
||||||
|
)
|
||||||
|
|
||||||
|
_check_device()
|
||||||
|
config_entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||||
|
|
||||||
|
|
||||||
class ComelitSwitchEntity(ComelitBridgeBaseEntity, SwitchEntity):
|
class ComelitSwitchEntity(ComelitBridgeBaseEntity, SwitchEntity):
|
||||||
|
@@ -4,11 +4,7 @@ from collections.abc import Awaitable, Callable, Coroutine
|
|||||||
from functools import wraps
|
from functools import wraps
|
||||||
from typing import Any, Concatenate
|
from typing import Any, Concatenate
|
||||||
|
|
||||||
from aiocomelit.api import (
|
from aiocomelit import ComelitSerialBridgeObject
|
||||||
ComelitSerialBridgeObject,
|
|
||||||
ComelitVedoAreaObject,
|
|
||||||
ComelitVedoZoneObject,
|
|
||||||
)
|
|
||||||
from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData
|
from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData
|
||||||
from aiohttp import ClientSession, CookieJar
|
from aiohttp import ClientSession, CookieJar
|
||||||
|
|
||||||
@@ -23,11 +19,8 @@ from homeassistant.helpers import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
from .const import _LOGGER, DOMAIN
|
from .const import _LOGGER, DOMAIN
|
||||||
from .coordinator import ComelitBaseCoordinator
|
|
||||||
from .entity import ComelitBridgeBaseEntity
|
from .entity import ComelitBridgeBaseEntity
|
||||||
|
|
||||||
DeviceType = ComelitSerialBridgeObject | ComelitVedoAreaObject | ComelitVedoZoneObject
|
|
||||||
|
|
||||||
|
|
||||||
async def async_client_session(hass: HomeAssistant) -> ClientSession:
|
async def async_client_session(hass: HomeAssistant) -> ClientSession:
|
||||||
"""Return a new aiohttp session."""
|
"""Return a new aiohttp session."""
|
||||||
@@ -120,41 +113,3 @@ def bridge_api_call[_T: ComelitBridgeBaseEntity, **_P](
|
|||||||
self.coordinator.config_entry.async_start_reauth(self.hass)
|
self.coordinator.config_entry.async_start_reauth(self.hass)
|
||||||
|
|
||||||
return cmd_wrapper
|
return cmd_wrapper
|
||||||
|
|
||||||
|
|
||||||
def new_device_listener(
|
|
||||||
coordinator: ComelitBaseCoordinator,
|
|
||||||
new_devices_callback: Callable[
|
|
||||||
[
|
|
||||||
list[
|
|
||||||
ComelitSerialBridgeObject
|
|
||||||
| ComelitVedoAreaObject
|
|
||||||
| ComelitVedoZoneObject
|
|
||||||
],
|
|
||||||
str,
|
|
||||||
],
|
|
||||||
None,
|
|
||||||
],
|
|
||||||
data_type: str,
|
|
||||||
) -> Callable[[], None]:
|
|
||||||
"""Subscribe to coordinator updates to check for new devices."""
|
|
||||||
known_devices: set[int] = set()
|
|
||||||
|
|
||||||
def _check_devices() -> None:
|
|
||||||
"""Check for new devices and call callback with any new monitors."""
|
|
||||||
if not coordinator.data:
|
|
||||||
return
|
|
||||||
|
|
||||||
new_devices: list[DeviceType] = []
|
|
||||||
for _id in coordinator.data[data_type]:
|
|
||||||
if _id not in known_devices:
|
|
||||||
known_devices.add(_id)
|
|
||||||
new_devices.append(coordinator.data[data_type][_id])
|
|
||||||
|
|
||||||
if new_devices:
|
|
||||||
new_devices_callback(new_devices, data_type)
|
|
||||||
|
|
||||||
# Check for devices immediately
|
|
||||||
_check_devices()
|
|
||||||
|
|
||||||
return coordinator.async_add_listener(_check_devices)
|
|
||||||
|
@@ -514,7 +514,7 @@ class ChatLog:
|
|||||||
"""Set the LLM system prompt."""
|
"""Set the LLM system prompt."""
|
||||||
llm_api: llm.APIInstance | None = None
|
llm_api: llm.APIInstance | None = None
|
||||||
|
|
||||||
if not user_llm_hass_api:
|
if user_llm_hass_api is None:
|
||||||
pass
|
pass
|
||||||
elif isinstance(user_llm_hass_api, llm.API):
|
elif isinstance(user_llm_hass_api, llm.API):
|
||||||
llm_api = await user_llm_hass_api.async_get_api_instance(llm_context)
|
llm_api = await user_llm_hass_api.async_get_api_instance(llm_context)
|
||||||
|
@@ -38,30 +38,22 @@ from home_assistant_intents import (
|
|||||||
ErrorKey,
|
ErrorKey,
|
||||||
FuzzyConfig,
|
FuzzyConfig,
|
||||||
FuzzyLanguageResponses,
|
FuzzyLanguageResponses,
|
||||||
LanguageScores,
|
|
||||||
get_fuzzy_config,
|
get_fuzzy_config,
|
||||||
get_fuzzy_language,
|
get_fuzzy_language,
|
||||||
get_intents,
|
get_intents,
|
||||||
get_language_scores,
|
|
||||||
get_languages,
|
get_languages,
|
||||||
)
|
)
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
from homeassistant import core
|
||||||
from homeassistant.components.homeassistant.exposed_entities import (
|
from homeassistant.components.homeassistant.exposed_entities import (
|
||||||
async_listen_entity_updates,
|
async_listen_entity_updates,
|
||||||
async_should_expose,
|
async_should_expose,
|
||||||
)
|
)
|
||||||
from homeassistant.const import EVENT_STATE_CHANGED, MATCH_ALL
|
from homeassistant.const import EVENT_STATE_CHANGED, MATCH_ALL
|
||||||
from homeassistant.core import (
|
from homeassistant.core import Event, callback
|
||||||
Event,
|
|
||||||
EventStateChangedData,
|
|
||||||
HomeAssistant,
|
|
||||||
State,
|
|
||||||
callback,
|
|
||||||
)
|
|
||||||
from homeassistant.helpers import (
|
from homeassistant.helpers import (
|
||||||
area_registry as ar,
|
area_registry as ar,
|
||||||
config_validation as cv,
|
|
||||||
device_registry as dr,
|
device_registry as dr,
|
||||||
entity_registry as er,
|
entity_registry as er,
|
||||||
floor_registry as fr,
|
floor_registry as fr,
|
||||||
@@ -200,7 +192,7 @@ class IntentCache:
|
|||||||
|
|
||||||
|
|
||||||
async def async_setup_default_agent(
|
async def async_setup_default_agent(
|
||||||
hass: HomeAssistant,
|
hass: core.HomeAssistant,
|
||||||
entity_component: EntityComponent[ConversationEntity],
|
entity_component: EntityComponent[ConversationEntity],
|
||||||
config_intents: dict[str, Any],
|
config_intents: dict[str, Any],
|
||||||
) -> None:
|
) -> None:
|
||||||
@@ -209,13 +201,15 @@ async def async_setup_default_agent(
|
|||||||
await entity_component.async_add_entities([agent])
|
await entity_component.async_add_entities([agent])
|
||||||
await get_agent_manager(hass).async_setup_default_agent(agent)
|
await get_agent_manager(hass).async_setup_default_agent(agent)
|
||||||
|
|
||||||
@callback
|
@core.callback
|
||||||
def async_entity_state_listener(event: Event[EventStateChangedData]) -> None:
|
def async_entity_state_listener(
|
||||||
|
event: core.Event[core.EventStateChangedData],
|
||||||
|
) -> None:
|
||||||
"""Set expose flag on new entities."""
|
"""Set expose flag on new entities."""
|
||||||
async_should_expose(hass, DOMAIN, event.data["entity_id"])
|
async_should_expose(hass, DOMAIN, event.data["entity_id"])
|
||||||
|
|
||||||
@callback
|
@core.callback
|
||||||
def async_hass_started(hass: HomeAssistant) -> None:
|
def async_hass_started(hass: core.HomeAssistant) -> None:
|
||||||
"""Set expose flag on all entities."""
|
"""Set expose flag on all entities."""
|
||||||
for state in hass.states.async_all():
|
for state in hass.states.async_all():
|
||||||
async_should_expose(hass, DOMAIN, state.entity_id)
|
async_should_expose(hass, DOMAIN, state.entity_id)
|
||||||
@@ -230,7 +224,9 @@ class DefaultAgent(ConversationEntity):
|
|||||||
_attr_name = "Home Assistant"
|
_attr_name = "Home Assistant"
|
||||||
_attr_supported_features = ConversationEntityFeature.CONTROL
|
_attr_supported_features = ConversationEntityFeature.CONTROL
|
||||||
|
|
||||||
def __init__(self, hass: HomeAssistant, config_intents: dict[str, Any]) -> None:
|
def __init__(
|
||||||
|
self, hass: core.HomeAssistant, config_intents: dict[str, Any]
|
||||||
|
) -> None:
|
||||||
"""Initialize the default agent."""
|
"""Initialize the default agent."""
|
||||||
self.hass = hass
|
self.hass = hass
|
||||||
self._lang_intents: dict[str, LanguageIntents | object] = {}
|
self._lang_intents: dict[str, LanguageIntents | object] = {}
|
||||||
@@ -263,7 +259,7 @@ class DefaultAgent(ConversationEntity):
|
|||||||
"""Return a list of supported languages."""
|
"""Return a list of supported languages."""
|
||||||
return get_languages()
|
return get_languages()
|
||||||
|
|
||||||
@callback
|
@core.callback
|
||||||
def _filter_entity_registry_changes(
|
def _filter_entity_registry_changes(
|
||||||
self, event_data: er.EventEntityRegistryUpdatedData
|
self, event_data: er.EventEntityRegistryUpdatedData
|
||||||
) -> bool:
|
) -> bool:
|
||||||
@@ -272,12 +268,12 @@ class DefaultAgent(ConversationEntity):
|
|||||||
field in event_data["changes"] for field in _ENTITY_REGISTRY_UPDATE_FIELDS
|
field in event_data["changes"] for field in _ENTITY_REGISTRY_UPDATE_FIELDS
|
||||||
)
|
)
|
||||||
|
|
||||||
@callback
|
@core.callback
|
||||||
def _filter_state_changes(self, event_data: EventStateChangedData) -> bool:
|
def _filter_state_changes(self, event_data: core.EventStateChangedData) -> bool:
|
||||||
"""Filter state changed events."""
|
"""Filter state changed events."""
|
||||||
return not event_data["old_state"] or not event_data["new_state"]
|
return not event_data["old_state"] or not event_data["new_state"]
|
||||||
|
|
||||||
@callback
|
@core.callback
|
||||||
def _listen_clear_slot_list(self) -> None:
|
def _listen_clear_slot_list(self) -> None:
|
||||||
"""Listen for changes that can invalidate slot list."""
|
"""Listen for changes that can invalidate slot list."""
|
||||||
assert self._unsub_clear_slot_list is None
|
assert self._unsub_clear_slot_list is None
|
||||||
@@ -346,81 +342,6 @@ class DefaultAgent(ConversationEntity):
|
|||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
async def async_debug_recognize(
|
|
||||||
self, user_input: ConversationInput
|
|
||||||
) -> dict[str, Any] | None:
|
|
||||||
"""Debug recognize from user input."""
|
|
||||||
result_dict: dict[str, Any] | None = None
|
|
||||||
|
|
||||||
if trigger_result := await self.async_recognize_sentence_trigger(user_input):
|
|
||||||
result_dict = {
|
|
||||||
# Matched a user-defined sentence trigger.
|
|
||||||
# We can't provide the response here without executing the
|
|
||||||
# trigger.
|
|
||||||
"match": True,
|
|
||||||
"source": "trigger",
|
|
||||||
"sentence_template": trigger_result.sentence_template or "",
|
|
||||||
}
|
|
||||||
elif intent_result := await self.async_recognize_intent(user_input):
|
|
||||||
successful_match = not intent_result.unmatched_entities
|
|
||||||
result_dict = {
|
|
||||||
# Name of the matching intent (or the closest)
|
|
||||||
"intent": {
|
|
||||||
"name": intent_result.intent.name,
|
|
||||||
},
|
|
||||||
# Slot values that would be received by the intent
|
|
||||||
"slots": { # direct access to values
|
|
||||||
entity_key: entity.text or entity.value
|
|
||||||
for entity_key, entity in intent_result.entities.items()
|
|
||||||
},
|
|
||||||
# Extra slot details, such as the originally matched text
|
|
||||||
"details": {
|
|
||||||
entity_key: {
|
|
||||||
"name": entity.name,
|
|
||||||
"value": entity.value,
|
|
||||||
"text": entity.text,
|
|
||||||
}
|
|
||||||
for entity_key, entity in intent_result.entities.items()
|
|
||||||
},
|
|
||||||
# Entities/areas/etc. that would be targeted
|
|
||||||
"targets": {},
|
|
||||||
# True if match was successful
|
|
||||||
"match": successful_match,
|
|
||||||
# Text of the sentence template that matched (or was closest)
|
|
||||||
"sentence_template": "",
|
|
||||||
# When match is incomplete, this will contain the best slot guesses
|
|
||||||
"unmatched_slots": _get_unmatched_slots(intent_result),
|
|
||||||
# True if match was not exact
|
|
||||||
"fuzzy_match": False,
|
|
||||||
}
|
|
||||||
|
|
||||||
if successful_match:
|
|
||||||
result_dict["targets"] = {
|
|
||||||
state.entity_id: {"matched": is_matched}
|
|
||||||
for state, is_matched in _get_debug_targets(
|
|
||||||
self.hass, intent_result
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
if intent_result.intent_sentence is not None:
|
|
||||||
result_dict["sentence_template"] = intent_result.intent_sentence.text
|
|
||||||
|
|
||||||
if intent_result.intent_metadata:
|
|
||||||
# Inspect metadata to determine if this matched a custom sentence
|
|
||||||
if intent_result.intent_metadata.get(METADATA_CUSTOM_SENTENCE):
|
|
||||||
result_dict["source"] = "custom"
|
|
||||||
result_dict["file"] = intent_result.intent_metadata.get(
|
|
||||||
METADATA_CUSTOM_FILE
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
result_dict["source"] = "builtin"
|
|
||||||
|
|
||||||
result_dict["fuzzy_match"] = intent_result.intent_metadata.get(
|
|
||||||
METADATA_FUZZY_MATCH, False
|
|
||||||
)
|
|
||||||
|
|
||||||
return result_dict
|
|
||||||
|
|
||||||
async def _async_handle_message(
|
async def _async_handle_message(
|
||||||
self,
|
self,
|
||||||
user_input: ConversationInput,
|
user_input: ConversationInput,
|
||||||
@@ -969,7 +890,7 @@ class DefaultAgent(ConversationEntity):
|
|||||||
) -> str:
|
) -> str:
|
||||||
# Get first matched or unmatched state.
|
# Get first matched or unmatched state.
|
||||||
# This is available in the response template as "state".
|
# This is available in the response template as "state".
|
||||||
state1: State | None = None
|
state1: core.State | None = None
|
||||||
if intent_response.matched_states:
|
if intent_response.matched_states:
|
||||||
state1 = intent_response.matched_states[0]
|
state1 = intent_response.matched_states[0]
|
||||||
elif intent_response.unmatched_states:
|
elif intent_response.unmatched_states:
|
||||||
@@ -1607,10 +1528,6 @@ class DefaultAgent(ConversationEntity):
|
|||||||
return None
|
return None
|
||||||
return response
|
return response
|
||||||
|
|
||||||
async def async_get_language_scores(self) -> dict[str, LanguageScores]:
|
|
||||||
"""Get support scores per language."""
|
|
||||||
return await self.hass.async_add_executor_job(get_language_scores)
|
|
||||||
|
|
||||||
|
|
||||||
def _make_error_result(
|
def _make_error_result(
|
||||||
language: str,
|
language: str,
|
||||||
@@ -1672,7 +1589,7 @@ def _get_unmatched_response(result: RecognizeResult) -> tuple[ErrorKey, dict[str
|
|||||||
|
|
||||||
|
|
||||||
def _get_match_error_response(
|
def _get_match_error_response(
|
||||||
hass: HomeAssistant,
|
hass: core.HomeAssistant,
|
||||||
match_error: intent.MatchFailedError,
|
match_error: intent.MatchFailedError,
|
||||||
) -> tuple[ErrorKey, dict[str, Any]]:
|
) -> tuple[ErrorKey, dict[str, Any]]:
|
||||||
"""Return key and template arguments for error when target matching fails."""
|
"""Return key and template arguments for error when target matching fails."""
|
||||||
@@ -1807,75 +1724,3 @@ def _collect_list_references(expression: Expression, list_names: set[str]) -> No
|
|||||||
elif isinstance(expression, ListReference):
|
elif isinstance(expression, ListReference):
|
||||||
# {list}
|
# {list}
|
||||||
list_names.add(expression.slot_name)
|
list_names.add(expression.slot_name)
|
||||||
|
|
||||||
|
|
||||||
def _get_debug_targets(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
result: RecognizeResult,
|
|
||||||
) -> Iterable[tuple[State, bool]]:
|
|
||||||
"""Yield state/is_matched pairs for a hassil recognition."""
|
|
||||||
entities = result.entities
|
|
||||||
|
|
||||||
name: str | None = None
|
|
||||||
area_name: str | None = None
|
|
||||||
domains: set[str] | None = None
|
|
||||||
device_classes: set[str] | None = None
|
|
||||||
state_names: set[str] | None = None
|
|
||||||
|
|
||||||
if "name" in entities:
|
|
||||||
name = str(entities["name"].value)
|
|
||||||
|
|
||||||
if "area" in entities:
|
|
||||||
area_name = str(entities["area"].value)
|
|
||||||
|
|
||||||
if "domain" in entities:
|
|
||||||
domains = set(cv.ensure_list(entities["domain"].value))
|
|
||||||
|
|
||||||
if "device_class" in entities:
|
|
||||||
device_classes = set(cv.ensure_list(entities["device_class"].value))
|
|
||||||
|
|
||||||
if "state" in entities:
|
|
||||||
# HassGetState only
|
|
||||||
state_names = set(cv.ensure_list(entities["state"].value))
|
|
||||||
|
|
||||||
if (
|
|
||||||
(name is None)
|
|
||||||
and (area_name is None)
|
|
||||||
and (not domains)
|
|
||||||
and (not device_classes)
|
|
||||||
and (not state_names)
|
|
||||||
):
|
|
||||||
# Avoid "matching" all entities when there is no filter
|
|
||||||
return
|
|
||||||
|
|
||||||
states = intent.async_match_states(
|
|
||||||
hass,
|
|
||||||
name=name,
|
|
||||||
area_name=area_name,
|
|
||||||
domains=domains,
|
|
||||||
device_classes=device_classes,
|
|
||||||
)
|
|
||||||
|
|
||||||
for state in states:
|
|
||||||
# For queries, a target is "matched" based on its state
|
|
||||||
is_matched = (state_names is None) or (state.state in state_names)
|
|
||||||
yield state, is_matched
|
|
||||||
|
|
||||||
|
|
||||||
def _get_unmatched_slots(
|
|
||||||
result: RecognizeResult,
|
|
||||||
) -> dict[str, str | int | float]:
|
|
||||||
"""Return a dict of unmatched text/range slot entities."""
|
|
||||||
unmatched_slots: dict[str, str | int | float] = {}
|
|
||||||
for entity in result.unmatched_entities_list:
|
|
||||||
if isinstance(entity, UnmatchedTextEntity):
|
|
||||||
if entity.text == MISSING_ENTITY:
|
|
||||||
# Don't report <missing> since these are just missing context
|
|
||||||
# slots.
|
|
||||||
continue
|
|
||||||
|
|
||||||
unmatched_slots[entity.name] = entity.text
|
|
||||||
elif isinstance(entity, UnmatchedRangeEntity):
|
|
||||||
unmatched_slots[entity.name] = entity.value
|
|
||||||
|
|
||||||
return unmatched_slots
|
|
||||||
|
@@ -2,16 +2,21 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Iterable
|
||||||
from dataclasses import asdict
|
from dataclasses import asdict
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
|
from hassil.recognize import MISSING_ENTITY, RecognizeResult
|
||||||
|
from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity
|
||||||
|
from home_assistant_intents import get_language_scores
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components import http, websocket_api
|
from homeassistant.components import http, websocket_api
|
||||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||||
from homeassistant.const import MATCH_ALL
|
from homeassistant.const import MATCH_ALL
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, State, callback
|
||||||
|
from homeassistant.helpers import config_validation as cv, intent
|
||||||
from homeassistant.util import language as language_util
|
from homeassistant.util import language as language_util
|
||||||
|
|
||||||
from .agent_manager import (
|
from .agent_manager import (
|
||||||
@@ -21,6 +26,11 @@ from .agent_manager import (
|
|||||||
get_agent_manager,
|
get_agent_manager,
|
||||||
)
|
)
|
||||||
from .const import DATA_COMPONENT
|
from .const import DATA_COMPONENT
|
||||||
|
from .default_agent import (
|
||||||
|
METADATA_CUSTOM_FILE,
|
||||||
|
METADATA_CUSTOM_SENTENCE,
|
||||||
|
METADATA_FUZZY_MATCH,
|
||||||
|
)
|
||||||
from .entity import ConversationEntity
|
from .entity import ConversationEntity
|
||||||
from .models import ConversationInput
|
from .models import ConversationInput
|
||||||
|
|
||||||
@@ -196,12 +206,150 @@ async def websocket_hass_agent_debug(
|
|||||||
language=msg.get("language", hass.config.language),
|
language=msg.get("language", hass.config.language),
|
||||||
agent_id=agent.entity_id,
|
agent_id=agent.entity_id,
|
||||||
)
|
)
|
||||||
result_dict = await agent.async_debug_recognize(user_input)
|
result_dict: dict[str, Any] | None = None
|
||||||
|
|
||||||
|
if trigger_result := await agent.async_recognize_sentence_trigger(user_input):
|
||||||
|
result_dict = {
|
||||||
|
# Matched a user-defined sentence trigger.
|
||||||
|
# We can't provide the response here without executing the
|
||||||
|
# trigger.
|
||||||
|
"match": True,
|
||||||
|
"source": "trigger",
|
||||||
|
"sentence_template": trigger_result.sentence_template or "",
|
||||||
|
}
|
||||||
|
elif intent_result := await agent.async_recognize_intent(user_input):
|
||||||
|
successful_match = not intent_result.unmatched_entities
|
||||||
|
result_dict = {
|
||||||
|
# Name of the matching intent (or the closest)
|
||||||
|
"intent": {
|
||||||
|
"name": intent_result.intent.name,
|
||||||
|
},
|
||||||
|
# Slot values that would be received by the intent
|
||||||
|
"slots": { # direct access to values
|
||||||
|
entity_key: entity.text or entity.value
|
||||||
|
for entity_key, entity in intent_result.entities.items()
|
||||||
|
},
|
||||||
|
# Extra slot details, such as the originally matched text
|
||||||
|
"details": {
|
||||||
|
entity_key: {
|
||||||
|
"name": entity.name,
|
||||||
|
"value": entity.value,
|
||||||
|
"text": entity.text,
|
||||||
|
}
|
||||||
|
for entity_key, entity in intent_result.entities.items()
|
||||||
|
},
|
||||||
|
# Entities/areas/etc. that would be targeted
|
||||||
|
"targets": {},
|
||||||
|
# True if match was successful
|
||||||
|
"match": successful_match,
|
||||||
|
# Text of the sentence template that matched (or was closest)
|
||||||
|
"sentence_template": "",
|
||||||
|
# When match is incomplete, this will contain the best slot guesses
|
||||||
|
"unmatched_slots": _get_unmatched_slots(intent_result),
|
||||||
|
# True if match was not exact
|
||||||
|
"fuzzy_match": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
if successful_match:
|
||||||
|
result_dict["targets"] = {
|
||||||
|
state.entity_id: {"matched": is_matched}
|
||||||
|
for state, is_matched in _get_debug_targets(hass, intent_result)
|
||||||
|
}
|
||||||
|
|
||||||
|
if intent_result.intent_sentence is not None:
|
||||||
|
result_dict["sentence_template"] = intent_result.intent_sentence.text
|
||||||
|
|
||||||
|
if intent_result.intent_metadata:
|
||||||
|
# Inspect metadata to determine if this matched a custom sentence
|
||||||
|
if intent_result.intent_metadata.get(METADATA_CUSTOM_SENTENCE):
|
||||||
|
result_dict["source"] = "custom"
|
||||||
|
result_dict["file"] = intent_result.intent_metadata.get(
|
||||||
|
METADATA_CUSTOM_FILE
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
result_dict["source"] = "builtin"
|
||||||
|
|
||||||
|
result_dict["fuzzy_match"] = intent_result.intent_metadata.get(
|
||||||
|
METADATA_FUZZY_MATCH, False
|
||||||
|
)
|
||||||
|
|
||||||
result_dicts.append(result_dict)
|
result_dicts.append(result_dict)
|
||||||
|
|
||||||
connection.send_result(msg["id"], {"results": result_dicts})
|
connection.send_result(msg["id"], {"results": result_dicts})
|
||||||
|
|
||||||
|
|
||||||
|
def _get_debug_targets(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
result: RecognizeResult,
|
||||||
|
) -> Iterable[tuple[State, bool]]:
|
||||||
|
"""Yield state/is_matched pairs for a hassil recognition."""
|
||||||
|
entities = result.entities
|
||||||
|
|
||||||
|
name: str | None = None
|
||||||
|
area_name: str | None = None
|
||||||
|
domains: set[str] | None = None
|
||||||
|
device_classes: set[str] | None = None
|
||||||
|
state_names: set[str] | None = None
|
||||||
|
|
||||||
|
if "name" in entities:
|
||||||
|
name = str(entities["name"].value)
|
||||||
|
|
||||||
|
if "area" in entities:
|
||||||
|
area_name = str(entities["area"].value)
|
||||||
|
|
||||||
|
if "domain" in entities:
|
||||||
|
domains = set(cv.ensure_list(entities["domain"].value))
|
||||||
|
|
||||||
|
if "device_class" in entities:
|
||||||
|
device_classes = set(cv.ensure_list(entities["device_class"].value))
|
||||||
|
|
||||||
|
if "state" in entities:
|
||||||
|
# HassGetState only
|
||||||
|
state_names = set(cv.ensure_list(entities["state"].value))
|
||||||
|
|
||||||
|
if (
|
||||||
|
(name is None)
|
||||||
|
and (area_name is None)
|
||||||
|
and (not domains)
|
||||||
|
and (not device_classes)
|
||||||
|
and (not state_names)
|
||||||
|
):
|
||||||
|
# Avoid "matching" all entities when there is no filter
|
||||||
|
return
|
||||||
|
|
||||||
|
states = intent.async_match_states(
|
||||||
|
hass,
|
||||||
|
name=name,
|
||||||
|
area_name=area_name,
|
||||||
|
domains=domains,
|
||||||
|
device_classes=device_classes,
|
||||||
|
)
|
||||||
|
|
||||||
|
for state in states:
|
||||||
|
# For queries, a target is "matched" based on its state
|
||||||
|
is_matched = (state_names is None) or (state.state in state_names)
|
||||||
|
yield state, is_matched
|
||||||
|
|
||||||
|
|
||||||
|
def _get_unmatched_slots(
|
||||||
|
result: RecognizeResult,
|
||||||
|
) -> dict[str, str | int | float]:
|
||||||
|
"""Return a dict of unmatched text/range slot entities."""
|
||||||
|
unmatched_slots: dict[str, str | int | float] = {}
|
||||||
|
for entity in result.unmatched_entities_list:
|
||||||
|
if isinstance(entity, UnmatchedTextEntity):
|
||||||
|
if entity.text == MISSING_ENTITY:
|
||||||
|
# Don't report <missing> since these are just missing context
|
||||||
|
# slots.
|
||||||
|
continue
|
||||||
|
|
||||||
|
unmatched_slots[entity.name] = entity.text
|
||||||
|
elif isinstance(entity, UnmatchedRangeEntity):
|
||||||
|
unmatched_slots[entity.name] = entity.value
|
||||||
|
|
||||||
|
return unmatched_slots
|
||||||
|
|
||||||
|
|
||||||
@websocket_api.websocket_command(
|
@websocket_api.websocket_command(
|
||||||
{
|
{
|
||||||
vol.Required("type"): "conversation/agent/homeassistant/language_scores",
|
vol.Required("type"): "conversation/agent/homeassistant/language_scores",
|
||||||
@@ -216,13 +364,10 @@ async def websocket_hass_agent_language_scores(
|
|||||||
msg: dict[str, Any],
|
msg: dict[str, Any],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Get support scores per language."""
|
"""Get support scores per language."""
|
||||||
agent = get_agent_manager(hass).default_agent
|
|
||||||
assert agent is not None
|
|
||||||
|
|
||||||
language = msg.get("language", hass.config.language)
|
language = msg.get("language", hass.config.language)
|
||||||
country = msg.get("country", hass.config.country)
|
country = msg.get("country", hass.config.country)
|
||||||
|
|
||||||
scores = await agent.async_get_language_scores()
|
scores = await hass.async_add_executor_job(get_language_scores)
|
||||||
matching_langs = language_util.matches(language, scores.keys(), country=country)
|
matching_langs = language_util.matches(language, scores.keys(), country=country)
|
||||||
preferred_lang = matching_langs[0] if matching_langs else language
|
preferred_lang = matching_langs[0] if matching_langs else language
|
||||||
result = {
|
result = {
|
||||||
|
@@ -24,7 +24,6 @@ from homeassistant.core import HomeAssistant, callback
|
|||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
from homeassistant.util.unit_conversion import EnergyConverter
|
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
|
|
||||||
@@ -147,7 +146,6 @@ class DukeEnergyCoordinator(DataUpdateCoordinator[None]):
|
|||||||
name=f"{name_prefix} Consumption",
|
name=f"{name_prefix} Consumption",
|
||||||
source=DOMAIN,
|
source=DOMAIN,
|
||||||
statistic_id=consumption_statistic_id,
|
statistic_id=consumption_statistic_id,
|
||||||
unit_class=EnergyConverter.UNIT_CLASS,
|
|
||||||
unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR
|
unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR
|
||||||
if meter["serviceType"] == "ELECTRIC"
|
if meter["serviceType"] == "ELECTRIC"
|
||||||
else UnitOfVolume.CENTUM_CUBIC_FEET,
|
else UnitOfVolume.CENTUM_CUBIC_FEET,
|
||||||
|
@@ -20,7 +20,6 @@ from homeassistant.components.recorder.statistics import (
|
|||||||
from homeassistant.components.recorder.util import get_instance
|
from homeassistant.components.recorder.util import get_instance
|
||||||
from homeassistant.const import UnitOfEnergy
|
from homeassistant.const import UnitOfEnergy
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
from homeassistant.util.unit_conversion import EnergyConverter
|
|
||||||
|
|
||||||
from .const import DOMAIN, LOGGER
|
from .const import DOMAIN, LOGGER
|
||||||
|
|
||||||
@@ -154,7 +153,6 @@ class ElviaImporter:
|
|||||||
name=f"{self.metering_point_id} Consumption",
|
name=f"{self.metering_point_id} Consumption",
|
||||||
source=DOMAIN,
|
source=DOMAIN,
|
||||||
statistic_id=statistic_id,
|
statistic_id=statistic_id,
|
||||||
unit_class=EnergyConverter.UNIT_CLASS,
|
|
||||||
unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||||
),
|
),
|
||||||
statistics=statistics,
|
statistics=statistics,
|
||||||
|
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
|
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["env_canada"],
|
"loggers": ["env_canada"],
|
||||||
"requirements": ["env-canada==0.11.3"]
|
"requirements": ["env-canada==0.11.2"]
|
||||||
}
|
}
|
||||||
|
@@ -17,7 +17,7 @@
|
|||||||
"mqtt": ["esphome/discover/#"],
|
"mqtt": ["esphome/discover/#"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": [
|
"requirements": [
|
||||||
"aioesphomeapi==41.13.0",
|
"aioesphomeapi==41.12.0",
|
||||||
"esphome-dashboard-api==1.3.0",
|
"esphome-dashboard-api==1.3.0",
|
||||||
"bleak-esphome==3.4.0"
|
"bleak-esphome==3.4.0"
|
||||||
],
|
],
|
||||||
|
@@ -29,12 +29,7 @@ from homeassistant.components.climate import (
|
|||||||
ClimateEntityFeature,
|
ClimateEntityFeature,
|
||||||
HVACMode,
|
HVACMode,
|
||||||
)
|
)
|
||||||
from homeassistant.const import (
|
from homeassistant.const import ATTR_MODE, PRECISION_TENTHS, UnitOfTemperature
|
||||||
ATTR_MODE,
|
|
||||||
ATTR_TEMPERATURE,
|
|
||||||
PRECISION_TENTHS,
|
|
||||||
UnitOfTemperature,
|
|
||||||
)
|
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
@@ -248,7 +243,7 @@ class EvoZone(EvoChild, EvoClimateEntity):
|
|||||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||||
"""Set a new target temperature."""
|
"""Set a new target temperature."""
|
||||||
|
|
||||||
temperature = kwargs[ATTR_TEMPERATURE]
|
temperature = kwargs["temperature"]
|
||||||
|
|
||||||
if (until := kwargs.get("until")) is None:
|
if (until := kwargs.get("until")) is None:
|
||||||
if self._evo_device.mode == EvoZoneMode.TEMPORARY_OVERRIDE:
|
if self._evo_device.mode == EvoZoneMode.TEMPORARY_OVERRIDE:
|
||||||
|
@@ -744,9 +744,7 @@ class ManifestJSONView(HomeAssistantView):
|
|||||||
@websocket_api.websocket_command(
|
@websocket_api.websocket_command(
|
||||||
{
|
{
|
||||||
"type": "frontend/get_icons",
|
"type": "frontend/get_icons",
|
||||||
vol.Required("category"): vol.In(
|
vol.Required("category"): vol.In({"entity", "entity_component", "services"}),
|
||||||
{"entity", "entity_component", "services", "triggers"}
|
|
||||||
),
|
|
||||||
vol.Optional("integration"): vol.All(cv.ensure_list, [str]),
|
vol.Optional("integration"): vol.All(cv.ensure_list, [str]),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@@ -5,5 +5,5 @@
|
|||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"requirements": ["holidays==0.82", "babel==2.15.0"]
|
"requirements": ["holidays==0.81", "babel==2.15.0"]
|
||||||
}
|
}
|
||||||
|
@@ -13,12 +13,6 @@
|
|||||||
"pid": "4001",
|
"pid": "4001",
|
||||||
"description": "*zbt-2*",
|
"description": "*zbt-2*",
|
||||||
"known_devices": ["ZBT-2"]
|
"known_devices": ["ZBT-2"]
|
||||||
},
|
|
||||||
{
|
|
||||||
"vid": "303A",
|
|
||||||
"pid": "831A",
|
|
||||||
"description": "*zbt-2*",
|
|
||||||
"known_devices": ["ZBT-2"]
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@@ -456,7 +456,7 @@ class HomeAccessory(Accessory): # type: ignore[misc]
|
|||||||
return self._available
|
return self._available
|
||||||
|
|
||||||
@ha_callback
|
@ha_callback
|
||||||
@pyhap_callback # type: ignore[untyped-decorator]
|
@pyhap_callback # type: ignore[misc]
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
"""Handle accessory driver started event."""
|
"""Handle accessory driver started event."""
|
||||||
if state := self.hass.states.get(self.entity_id):
|
if state := self.hass.states.get(self.entity_id):
|
||||||
@@ -725,7 +725,7 @@ class HomeDriver(AccessoryDriver): # type: ignore[misc]
|
|||||||
self._entry_title = entry_title
|
self._entry_title = entry_title
|
||||||
self.iid_storage = iid_storage
|
self.iid_storage = iid_storage
|
||||||
|
|
||||||
@pyhap_callback # type: ignore[untyped-decorator]
|
@pyhap_callback # type: ignore[misc]
|
||||||
def pair(
|
def pair(
|
||||||
self, client_username_bytes: bytes, client_public: str, client_permissions: int
|
self, client_username_bytes: bytes, client_public: str, client_permissions: int
|
||||||
) -> bool:
|
) -> bool:
|
||||||
@@ -735,7 +735,7 @@ class HomeDriver(AccessoryDriver): # type: ignore[misc]
|
|||||||
async_dismiss_setup_message(self.hass, self.entry_id)
|
async_dismiss_setup_message(self.hass, self.entry_id)
|
||||||
return cast(bool, success)
|
return cast(bool, success)
|
||||||
|
|
||||||
@pyhap_callback # type: ignore[untyped-decorator]
|
@pyhap_callback # type: ignore[misc]
|
||||||
def unpair(self, client_uuid: UUID) -> None:
|
def unpair(self, client_uuid: UUID) -> None:
|
||||||
"""Override super function to show setup message if unpaired."""
|
"""Override super function to show setup message if unpaired."""
|
||||||
super().unpair(client_uuid)
|
super().unpair(client_uuid)
|
||||||
|
@@ -71,7 +71,7 @@ class HomeDoorbellAccessory(HomeAccessory):
|
|||||||
self.async_update_doorbell_state(None, state)
|
self.async_update_doorbell_state(None, state)
|
||||||
|
|
||||||
@ha_callback
|
@ha_callback
|
||||||
@pyhap_callback # type: ignore[untyped-decorator]
|
@pyhap_callback # type: ignore[misc]
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
"""Handle doorbell event."""
|
"""Handle doorbell event."""
|
||||||
if self._char_doorbell_detected:
|
if self._char_doorbell_detected:
|
||||||
|
@@ -219,7 +219,7 @@ class AirPurifier(Fan):
|
|||||||
return preset_mode.lower() != "auto"
|
return preset_mode.lower() != "auto"
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
@pyhap_callback # type: ignore[untyped-decorator]
|
@pyhap_callback # type: ignore[misc]
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
"""Handle accessory driver started event.
|
"""Handle accessory driver started event.
|
||||||
|
|
||||||
|
@@ -229,7 +229,7 @@ class Camera(HomeDoorbellAccessory, PyhapCamera): # type: ignore[misc]
|
|||||||
)
|
)
|
||||||
self._async_update_motion_state(None, state)
|
self._async_update_motion_state(None, state)
|
||||||
|
|
||||||
@pyhap_callback # type: ignore[untyped-decorator]
|
@pyhap_callback # type: ignore[misc]
|
||||||
@callback
|
@callback
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
"""Handle accessory driver started event.
|
"""Handle accessory driver started event.
|
||||||
|
@@ -127,7 +127,7 @@ class GarageDoorOpener(HomeAccessory):
|
|||||||
self.async_update_state(state)
|
self.async_update_state(state)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
@pyhap_callback # type: ignore[untyped-decorator]
|
@pyhap_callback # type: ignore[misc]
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
"""Handle accessory driver started event.
|
"""Handle accessory driver started event.
|
||||||
|
|
||||||
|
@@ -178,7 +178,7 @@ class HumidifierDehumidifier(HomeAccessory):
|
|||||||
self._async_update_current_humidity(humidity_state)
|
self._async_update_current_humidity(humidity_state)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
@pyhap_callback # type: ignore[untyped-decorator]
|
@pyhap_callback # type: ignore[misc]
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
"""Handle accessory driver started event.
|
"""Handle accessory driver started event.
|
||||||
|
|
||||||
|
@@ -108,7 +108,7 @@ class DeviceTriggerAccessory(HomeAccessory):
|
|||||||
_LOGGER.log,
|
_LOGGER.log,
|
||||||
)
|
)
|
||||||
|
|
||||||
@pyhap_callback # type: ignore[untyped-decorator]
|
@pyhap_callback # type: ignore[misc]
|
||||||
@callback
|
@callback
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
"""Run the accessory."""
|
"""Run the accessory."""
|
||||||
|
@@ -41,12 +41,16 @@ class IdasenDeskCoordinator(DataUpdateCoordinator[int | None]):
|
|||||||
self._expected_connected = False
|
self._expected_connected = False
|
||||||
self._height: int | None = None
|
self._height: int | None = None
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_update_data() -> None:
|
||||||
|
self.async_set_updated_data(self._height)
|
||||||
|
|
||||||
self._debouncer = Debouncer(
|
self._debouncer = Debouncer(
|
||||||
hass=self.hass,
|
hass=self.hass,
|
||||||
logger=_LOGGER,
|
logger=_LOGGER,
|
||||||
cooldown=UPDATE_DEBOUNCE_TIME,
|
cooldown=UPDATE_DEBOUNCE_TIME,
|
||||||
immediate=True,
|
immediate=True,
|
||||||
function=callback(lambda: self.async_set_updated_data(self._height)),
|
function=async_update_data,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_connect(self) -> bool:
|
async def async_connect(self) -> bool:
|
||||||
|
@@ -5,6 +5,6 @@
|
|||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/imgw_pib",
|
"documentation": "https://www.home-assistant.io/integrations/imgw_pib",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "silver",
|
||||||
"requirements": ["imgw_pib==1.5.6"]
|
"requirements": ["imgw_pib==1.5.6"]
|
||||||
}
|
}
|
||||||
|
@@ -50,17 +50,17 @@ rules:
|
|||||||
discovery:
|
discovery:
|
||||||
status: exempt
|
status: exempt
|
||||||
comment: The integration is a cloud service and thus does not support discovery.
|
comment: The integration is a cloud service and thus does not support discovery.
|
||||||
docs-data-update: done
|
docs-data-update: todo
|
||||||
docs-examples: done
|
docs-examples: todo
|
||||||
docs-known-limitations: done
|
docs-known-limitations: todo
|
||||||
docs-supported-devices:
|
docs-supported-devices:
|
||||||
status: exempt
|
status: exempt
|
||||||
comment: This is a service, which doesn't integrate with any devices.
|
comment: This is a service, which doesn't integrate with any devices.
|
||||||
docs-supported-functions: done
|
docs-supported-functions: todo
|
||||||
docs-troubleshooting:
|
docs-troubleshooting:
|
||||||
status: exempt
|
status: exempt
|
||||||
comment: No known issues that could be resolved by the user.
|
comment: No known issues that could be resolved by the user.
|
||||||
docs-use-cases: done
|
docs-use-cases: todo
|
||||||
dynamic-devices:
|
dynamic-devices:
|
||||||
status: exempt
|
status: exempt
|
||||||
comment: This integration has a fixed single service.
|
comment: This integration has a fixed single service.
|
||||||
|
@@ -34,7 +34,6 @@ from homeassistant.helpers.device_registry import (
|
|||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
from homeassistant.helpers.typing import StateType
|
from homeassistant.helpers.typing import StateType
|
||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
from homeassistant.util.unit_conversion import EnergyConverter, VolumeConverter
|
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
from .coordinator import IstaConfigEntry, IstaCoordinator
|
from .coordinator import IstaConfigEntry, IstaCoordinator
|
||||||
@@ -50,7 +49,6 @@ class IstaSensorEntityDescription(SensorEntityDescription):
|
|||||||
"""Ista EcoTrend Sensor Description."""
|
"""Ista EcoTrend Sensor Description."""
|
||||||
|
|
||||||
consumption_type: IstaConsumptionType
|
consumption_type: IstaConsumptionType
|
||||||
unit_class: str | None = None
|
|
||||||
value_type: IstaValueType | None = None
|
value_type: IstaValueType | None = None
|
||||||
|
|
||||||
|
|
||||||
@@ -86,7 +84,6 @@ SENSOR_DESCRIPTIONS: tuple[IstaSensorEntityDescription, ...] = (
|
|||||||
suggested_display_precision=1,
|
suggested_display_precision=1,
|
||||||
consumption_type=IstaConsumptionType.HEATING,
|
consumption_type=IstaConsumptionType.HEATING,
|
||||||
value_type=IstaValueType.ENERGY,
|
value_type=IstaValueType.ENERGY,
|
||||||
unit_class=EnergyConverter.UNIT_CLASS,
|
|
||||||
),
|
),
|
||||||
IstaSensorEntityDescription(
|
IstaSensorEntityDescription(
|
||||||
key=IstaSensorEntity.HEATING_COST,
|
key=IstaSensorEntity.HEATING_COST,
|
||||||
@@ -107,7 +104,6 @@ SENSOR_DESCRIPTIONS: tuple[IstaSensorEntityDescription, ...] = (
|
|||||||
state_class=SensorStateClass.TOTAL,
|
state_class=SensorStateClass.TOTAL,
|
||||||
suggested_display_precision=1,
|
suggested_display_precision=1,
|
||||||
consumption_type=IstaConsumptionType.HOT_WATER,
|
consumption_type=IstaConsumptionType.HOT_WATER,
|
||||||
unit_class=VolumeConverter.UNIT_CLASS,
|
|
||||||
),
|
),
|
||||||
IstaSensorEntityDescription(
|
IstaSensorEntityDescription(
|
||||||
key=IstaSensorEntity.HOT_WATER_ENERGY,
|
key=IstaSensorEntity.HOT_WATER_ENERGY,
|
||||||
@@ -118,7 +114,6 @@ SENSOR_DESCRIPTIONS: tuple[IstaSensorEntityDescription, ...] = (
|
|||||||
suggested_display_precision=1,
|
suggested_display_precision=1,
|
||||||
consumption_type=IstaConsumptionType.HOT_WATER,
|
consumption_type=IstaConsumptionType.HOT_WATER,
|
||||||
value_type=IstaValueType.ENERGY,
|
value_type=IstaValueType.ENERGY,
|
||||||
unit_class=EnergyConverter.UNIT_CLASS,
|
|
||||||
),
|
),
|
||||||
IstaSensorEntityDescription(
|
IstaSensorEntityDescription(
|
||||||
key=IstaSensorEntity.HOT_WATER_COST,
|
key=IstaSensorEntity.HOT_WATER_COST,
|
||||||
@@ -140,7 +135,6 @@ SENSOR_DESCRIPTIONS: tuple[IstaSensorEntityDescription, ...] = (
|
|||||||
suggested_display_precision=1,
|
suggested_display_precision=1,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
consumption_type=IstaConsumptionType.WATER,
|
consumption_type=IstaConsumptionType.WATER,
|
||||||
unit_class=VolumeConverter.UNIT_CLASS,
|
|
||||||
),
|
),
|
||||||
IstaSensorEntityDescription(
|
IstaSensorEntityDescription(
|
||||||
key=IstaSensorEntity.WATER_COST,
|
key=IstaSensorEntity.WATER_COST,
|
||||||
@@ -282,7 +276,6 @@ class IstaSensor(CoordinatorEntity[IstaCoordinator], SensorEntity):
|
|||||||
"name": f"{self.device_entry.name} {self.name}",
|
"name": f"{self.device_entry.name} {self.name}",
|
||||||
"source": DOMAIN,
|
"source": DOMAIN,
|
||||||
"statistic_id": statistic_id,
|
"statistic_id": statistic_id,
|
||||||
"unit_class": self.entity_description.unit_class,
|
|
||||||
"unit_of_measurement": self.entity_description.native_unit_of_measurement,
|
"unit_of_measurement": self.entity_description.native_unit_of_measurement,
|
||||||
}
|
}
|
||||||
if statistics:
|
if statistics:
|
||||||
|
@@ -36,11 +36,6 @@ from homeassistant.helpers.device_registry import DeviceEntry
|
|||||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
from homeassistant.util.unit_conversion import (
|
|
||||||
EnergyConverter,
|
|
||||||
TemperatureConverter,
|
|
||||||
VolumeConverter,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN
|
from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN
|
||||||
|
|
||||||
@@ -259,7 +254,6 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
|||||||
"source": DOMAIN,
|
"source": DOMAIN,
|
||||||
"name": "Outdoor temperature",
|
"name": "Outdoor temperature",
|
||||||
"statistic_id": f"{DOMAIN}:temperature_outdoor",
|
"statistic_id": f"{DOMAIN}:temperature_outdoor",
|
||||||
"unit_class": TemperatureConverter.UNIT_CLASS,
|
|
||||||
"unit_of_measurement": UnitOfTemperature.CELSIUS,
|
"unit_of_measurement": UnitOfTemperature.CELSIUS,
|
||||||
"mean_type": StatisticMeanType.ARITHMETIC,
|
"mean_type": StatisticMeanType.ARITHMETIC,
|
||||||
"has_sum": False,
|
"has_sum": False,
|
||||||
@@ -273,7 +267,6 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
|||||||
"source": DOMAIN,
|
"source": DOMAIN,
|
||||||
"name": "Energy consumption 1",
|
"name": "Energy consumption 1",
|
||||||
"statistic_id": f"{DOMAIN}:energy_consumption_kwh",
|
"statistic_id": f"{DOMAIN}:energy_consumption_kwh",
|
||||||
"unit_class": EnergyConverter.UNIT_CLASS,
|
|
||||||
"unit_of_measurement": UnitOfEnergy.KILO_WATT_HOUR,
|
"unit_of_measurement": UnitOfEnergy.KILO_WATT_HOUR,
|
||||||
"mean_type": StatisticMeanType.NONE,
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
@@ -286,7 +279,6 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
|||||||
"source": DOMAIN,
|
"source": DOMAIN,
|
||||||
"name": "Energy consumption 2",
|
"name": "Energy consumption 2",
|
||||||
"statistic_id": f"{DOMAIN}:energy_consumption_mwh",
|
"statistic_id": f"{DOMAIN}:energy_consumption_mwh",
|
||||||
"unit_class": EnergyConverter.UNIT_CLASS,
|
|
||||||
"unit_of_measurement": UnitOfEnergy.MEGA_WATT_HOUR,
|
"unit_of_measurement": UnitOfEnergy.MEGA_WATT_HOUR,
|
||||||
"mean_type": StatisticMeanType.NONE,
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
@@ -301,7 +293,6 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
|||||||
"source": DOMAIN,
|
"source": DOMAIN,
|
||||||
"name": "Gas consumption 1",
|
"name": "Gas consumption 1",
|
||||||
"statistic_id": f"{DOMAIN}:gas_consumption_m3",
|
"statistic_id": f"{DOMAIN}:gas_consumption_m3",
|
||||||
"unit_class": VolumeConverter.UNIT_CLASS,
|
|
||||||
"unit_of_measurement": UnitOfVolume.CUBIC_METERS,
|
"unit_of_measurement": UnitOfVolume.CUBIC_METERS,
|
||||||
"mean_type": StatisticMeanType.NONE,
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
@@ -316,7 +307,6 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
|||||||
"source": DOMAIN,
|
"source": DOMAIN,
|
||||||
"name": "Gas consumption 2",
|
"name": "Gas consumption 2",
|
||||||
"statistic_id": f"{DOMAIN}:gas_consumption_ft3",
|
"statistic_id": f"{DOMAIN}:gas_consumption_ft3",
|
||||||
"unit_class": VolumeConverter.UNIT_CLASS,
|
|
||||||
"unit_of_measurement": UnitOfVolume.CUBIC_FEET,
|
"unit_of_measurement": UnitOfVolume.CUBIC_FEET,
|
||||||
"mean_type": StatisticMeanType.NONE,
|
"mean_type": StatisticMeanType.NONE,
|
||||||
"has_sum": True,
|
"has_sum": True,
|
||||||
@@ -329,7 +319,6 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
|||||||
"source": RECORDER_DOMAIN,
|
"source": RECORDER_DOMAIN,
|
||||||
"name": None,
|
"name": None,
|
||||||
"statistic_id": "sensor.statistics_issues_issue_1",
|
"statistic_id": "sensor.statistics_issues_issue_1",
|
||||||
"unit_class": VolumeConverter.UNIT_CLASS,
|
|
||||||
"unit_of_measurement": UnitOfVolume.CUBIC_METERS,
|
"unit_of_measurement": UnitOfVolume.CUBIC_METERS,
|
||||||
"mean_type": StatisticMeanType.ARITHMETIC,
|
"mean_type": StatisticMeanType.ARITHMETIC,
|
||||||
"has_sum": False,
|
"has_sum": False,
|
||||||
@@ -342,7 +331,6 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
|||||||
"source": RECORDER_DOMAIN,
|
"source": RECORDER_DOMAIN,
|
||||||
"name": None,
|
"name": None,
|
||||||
"statistic_id": "sensor.statistics_issues_issue_2",
|
"statistic_id": "sensor.statistics_issues_issue_2",
|
||||||
"unit_class": None,
|
|
||||||
"unit_of_measurement": "cats",
|
"unit_of_measurement": "cats",
|
||||||
"mean_type": StatisticMeanType.ARITHMETIC,
|
"mean_type": StatisticMeanType.ARITHMETIC,
|
||||||
"has_sum": False,
|
"has_sum": False,
|
||||||
@@ -355,7 +343,6 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
|||||||
"source": RECORDER_DOMAIN,
|
"source": RECORDER_DOMAIN,
|
||||||
"name": None,
|
"name": None,
|
||||||
"statistic_id": "sensor.statistics_issues_issue_3",
|
"statistic_id": "sensor.statistics_issues_issue_3",
|
||||||
"unit_class": VolumeConverter.UNIT_CLASS,
|
|
||||||
"unit_of_measurement": UnitOfVolume.CUBIC_METERS,
|
"unit_of_measurement": UnitOfVolume.CUBIC_METERS,
|
||||||
"mean_type": StatisticMeanType.ARITHMETIC,
|
"mean_type": StatisticMeanType.ARITHMETIC,
|
||||||
"has_sum": False,
|
"has_sum": False,
|
||||||
@@ -368,7 +355,6 @@ async def _insert_statistics(hass: HomeAssistant) -> None:
|
|||||||
"source": RECORDER_DOMAIN,
|
"source": RECORDER_DOMAIN,
|
||||||
"name": None,
|
"name": None,
|
||||||
"statistic_id": "sensor.statistics_issues_issue_4",
|
"statistic_id": "sensor.statistics_issues_issue_4",
|
||||||
"unit_class": VolumeConverter.UNIT_CLASS,
|
|
||||||
"unit_of_measurement": UnitOfVolume.CUBIC_METERS,
|
"unit_of_measurement": UnitOfVolume.CUBIC_METERS,
|
||||||
"mean_type": StatisticMeanType.ARITHMETIC,
|
"mean_type": StatisticMeanType.ARITHMETIC,
|
||||||
"has_sum": False,
|
"has_sum": False,
|
||||||
@@ -389,7 +375,6 @@ async def _insert_wrong_wind_direction_statistics(hass: HomeAssistant) -> None:
|
|||||||
"source": RECORDER_DOMAIN,
|
"source": RECORDER_DOMAIN,
|
||||||
"name": None,
|
"name": None,
|
||||||
"statistic_id": "sensor.statistics_issues_issue_5",
|
"statistic_id": "sensor.statistics_issues_issue_5",
|
||||||
"unit_class": None,
|
|
||||||
"unit_of_measurement": DEGREE,
|
"unit_of_measurement": DEGREE,
|
||||||
"mean_type": StatisticMeanType.ARITHMETIC,
|
"mean_type": StatisticMeanType.ARITHMETIC,
|
||||||
"has_sum": False,
|
"has_sum": False,
|
||||||
|
@@ -37,5 +37,5 @@
|
|||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["pylamarzocco"],
|
"loggers": ["pylamarzocco"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["pylamarzocco==2.1.2"]
|
"requirements": ["pylamarzocco==2.1.1"]
|
||||||
}
|
}
|
||||||
|
@@ -1,138 +0,0 @@
|
|||||||
"""Provides conditions for lights."""
|
|
||||||
|
|
||||||
from typing import TYPE_CHECKING, Any, Final, override
|
|
||||||
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from homeassistant.const import (
|
|
||||||
CONF_OPTIONS,
|
|
||||||
CONF_STATE,
|
|
||||||
CONF_TARGET,
|
|
||||||
STATE_OFF,
|
|
||||||
STATE_ON,
|
|
||||||
)
|
|
||||||
from homeassistant.core import HomeAssistant, split_entity_id
|
|
||||||
from homeassistant.helpers import config_validation as cv, target
|
|
||||||
from homeassistant.helpers.condition import (
|
|
||||||
Condition,
|
|
||||||
ConditionCheckerType,
|
|
||||||
ConditionConfig,
|
|
||||||
trace_condition_function,
|
|
||||||
)
|
|
||||||
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
|
|
||||||
|
|
||||||
from .const import DOMAIN
|
|
||||||
|
|
||||||
ATTR_BEHAVIOR: Final = "behavior"
|
|
||||||
BEHAVIOR_ONE: Final = "one"
|
|
||||||
BEHAVIOR_ANY: Final = "any"
|
|
||||||
BEHAVIOR_ALL: Final = "all"
|
|
||||||
|
|
||||||
STATE_CONDITION_TYPE: Final = "state"
|
|
||||||
|
|
||||||
STATE_CONDITION_OPTIONS_SCHEMA: dict[vol.Marker, Any] = {
|
|
||||||
vol.Required(CONF_STATE): vol.In([STATE_ON, STATE_OFF]),
|
|
||||||
vol.Required(ATTR_BEHAVIOR, default=BEHAVIOR_ANY): vol.In(
|
|
||||||
[BEHAVIOR_ONE, BEHAVIOR_ANY, BEHAVIOR_ALL]
|
|
||||||
),
|
|
||||||
}
|
|
||||||
STATE_CONDITION_SCHEMA = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
|
|
||||||
CONF_OPTIONS: STATE_CONDITION_OPTIONS_SCHEMA,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class StateCondition(Condition):
|
|
||||||
"""State condition."""
|
|
||||||
|
|
||||||
def __init__(self, hass: HomeAssistant, config: ConditionConfig) -> None:
|
|
||||||
"""Initialize condition."""
|
|
||||||
self._hass = hass
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
assert config.target
|
|
||||||
assert config.options
|
|
||||||
self._target = config.target
|
|
||||||
self._state = config.options[CONF_STATE]
|
|
||||||
self._behavior = config.options[ATTR_BEHAVIOR]
|
|
||||||
|
|
||||||
@override
|
|
||||||
@classmethod
|
|
||||||
async def async_validate_config(
|
|
||||||
cls, hass: HomeAssistant, config: ConfigType
|
|
||||||
) -> ConfigType:
|
|
||||||
"""Validate config."""
|
|
||||||
return STATE_CONDITION_SCHEMA(config) # type: ignore[no-any-return]
|
|
||||||
|
|
||||||
@override
|
|
||||||
async def async_get_checker(self) -> ConditionCheckerType:
|
|
||||||
"""Get the condition checker."""
|
|
||||||
|
|
||||||
def check_any_match_state(entity_ids: set[str]) -> bool:
|
|
||||||
"""Test if any entity match the state."""
|
|
||||||
return any(
|
|
||||||
entity_state.state == self._state
|
|
||||||
for entity_id in entity_ids
|
|
||||||
if (entity_state := self._hass.states.get(entity_id))
|
|
||||||
is not None # Ignore unavailable entities
|
|
||||||
)
|
|
||||||
|
|
||||||
def check_all_match_state(entity_ids: set[str]) -> bool:
|
|
||||||
"""Test if all entities match the state."""
|
|
||||||
return all(
|
|
||||||
entity_state.state == self._state
|
|
||||||
for entity_id in entity_ids
|
|
||||||
if (entity_state := self._hass.states.get(entity_id))
|
|
||||||
is not None # Ignore unavailable entities
|
|
||||||
)
|
|
||||||
|
|
||||||
def check_one_match_state(entity_ids: set[str]) -> bool:
|
|
||||||
"""Check that only one entity matches the state."""
|
|
||||||
matched = False
|
|
||||||
for entity_id in entity_ids:
|
|
||||||
# Ignore unavailable entities
|
|
||||||
if (entity_state := self._hass.states.get(entity_id)) is None:
|
|
||||||
continue
|
|
||||||
if entity_state.state != self._state:
|
|
||||||
continue
|
|
||||||
if matched:
|
|
||||||
return False
|
|
||||||
matched = True
|
|
||||||
return matched
|
|
||||||
|
|
||||||
if self._behavior == BEHAVIOR_ANY:
|
|
||||||
matcher = check_any_match_state
|
|
||||||
elif self._behavior == BEHAVIOR_ALL:
|
|
||||||
matcher = check_all_match_state
|
|
||||||
elif self._behavior == BEHAVIOR_ONE:
|
|
||||||
matcher = check_one_match_state
|
|
||||||
|
|
||||||
@trace_condition_function
|
|
||||||
def test_state(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool:
|
|
||||||
"""Test state condition."""
|
|
||||||
selector_data = target.TargetSelectorData(self._target)
|
|
||||||
targeted_entities = target.async_extract_referenced_entity_ids(
|
|
||||||
hass, selector_data, expand_group=False
|
|
||||||
)
|
|
||||||
referenced_entity_ids = targeted_entities.referenced.union(
|
|
||||||
targeted_entities.indirectly_referenced
|
|
||||||
)
|
|
||||||
light_entity_ids = {
|
|
||||||
entity_id
|
|
||||||
for entity_id in referenced_entity_ids
|
|
||||||
if split_entity_id(entity_id)[0] == DOMAIN
|
|
||||||
}
|
|
||||||
return matcher(light_entity_ids)
|
|
||||||
|
|
||||||
return test_state
|
|
||||||
|
|
||||||
|
|
||||||
CONDITIONS: dict[str, type[Condition]] = {
|
|
||||||
STATE_CONDITION_TYPE: StateCondition,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async def async_get_conditions(hass: HomeAssistant) -> dict[str, type[Condition]]:
|
|
||||||
"""Return the light conditions."""
|
|
||||||
return CONDITIONS
|
|
@@ -1,24 +0,0 @@
|
|||||||
state:
|
|
||||||
target:
|
|
||||||
entity:
|
|
||||||
domain: light
|
|
||||||
fields:
|
|
||||||
state:
|
|
||||||
required: true
|
|
||||||
default: "on"
|
|
||||||
selector:
|
|
||||||
select:
|
|
||||||
options:
|
|
||||||
- "off"
|
|
||||||
- "on"
|
|
||||||
translation_key: state
|
|
||||||
behavior:
|
|
||||||
required: true
|
|
||||||
default: any
|
|
||||||
selector:
|
|
||||||
select:
|
|
||||||
translation_key: condition_behavior
|
|
||||||
options:
|
|
||||||
- one
|
|
||||||
- all
|
|
||||||
- any
|
|
@@ -1,9 +1,4 @@
|
|||||||
{
|
{
|
||||||
"conditions": {
|
|
||||||
"state": {
|
|
||||||
"condition": "mdi:state-machine"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"entity_component": {
|
"entity_component": {
|
||||||
"_": {
|
"_": {
|
||||||
"default": "mdi:lightbulb",
|
"default": "mdi:lightbulb",
|
||||||
@@ -30,10 +25,5 @@
|
|||||||
"turn_on": {
|
"turn_on": {
|
||||||
"service": "mdi:lightbulb-on"
|
"service": "mdi:lightbulb-on"
|
||||||
}
|
}
|
||||||
},
|
|
||||||
"triggers": {
|
|
||||||
"state": {
|
|
||||||
"trigger": "mdi:state-machine"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -37,23 +37,6 @@
|
|||||||
"field_xy_color_name": "XY-color",
|
"field_xy_color_name": "XY-color",
|
||||||
"section_advanced_fields_name": "Advanced options"
|
"section_advanced_fields_name": "Advanced options"
|
||||||
},
|
},
|
||||||
"conditions": {
|
|
||||||
"state": {
|
|
||||||
"name": "State",
|
|
||||||
"description": "If lights are in a specific state, such as on or off.",
|
|
||||||
"description_configured": "If light states match",
|
|
||||||
"fields": {
|
|
||||||
"state": {
|
|
||||||
"name": "State",
|
|
||||||
"description": "The state to match."
|
|
||||||
},
|
|
||||||
"behavior": {
|
|
||||||
"name": "Behavior",
|
|
||||||
"description": "How the state should match on the targeted lights."
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"device_automation": {
|
"device_automation": {
|
||||||
"action_type": {
|
"action_type": {
|
||||||
"brightness_decrease": "Decrease {entity_name} brightness",
|
"brightness_decrease": "Decrease {entity_name} brightness",
|
||||||
@@ -150,20 +133,6 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"selector": {
|
"selector": {
|
||||||
"condition_behavior": {
|
|
||||||
"options": {
|
|
||||||
"all": "All",
|
|
||||||
"any": "Any",
|
|
||||||
"one": "One"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"trigger_behavior": {
|
|
||||||
"options": {
|
|
||||||
"first": "First",
|
|
||||||
"last": "Last",
|
|
||||||
"any": "Any"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"color_name": {
|
"color_name": {
|
||||||
"options": {
|
"options": {
|
||||||
"homeassistant": "Home Assistant",
|
"homeassistant": "Home Assistant",
|
||||||
@@ -321,12 +290,6 @@
|
|||||||
"short": "Short",
|
"short": "Short",
|
||||||
"long": "Long"
|
"long": "Long"
|
||||||
}
|
}
|
||||||
},
|
|
||||||
"state": {
|
|
||||||
"options": {
|
|
||||||
"off": "[%key:common::state::off%]",
|
|
||||||
"on": "[%key:common::state::on%]"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"services": {
|
"services": {
|
||||||
@@ -499,22 +462,5 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
|
||||||
"triggers": {
|
|
||||||
"state": {
|
|
||||||
"name": "State",
|
|
||||||
"description": "When the state of a light changes, such as turning on or off.",
|
|
||||||
"description_configured": "When the state of a light changes",
|
|
||||||
"fields": {
|
|
||||||
"state": {
|
|
||||||
"name": "State",
|
|
||||||
"description": "The state to trigger on."
|
|
||||||
},
|
|
||||||
"behavior": {
|
|
||||||
"name": "Behavior",
|
|
||||||
"description": "The behavior of the targeted entities to trigger on."
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -1,152 +0,0 @@
|
|||||||
"""Provides triggers for lights."""
|
|
||||||
|
|
||||||
from typing import TYPE_CHECKING, Final, cast, override
|
|
||||||
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from homeassistant.const import (
|
|
||||||
ATTR_ENTITY_ID,
|
|
||||||
CONF_STATE,
|
|
||||||
CONF_TARGET,
|
|
||||||
STATE_OFF,
|
|
||||||
STATE_ON,
|
|
||||||
)
|
|
||||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback, split_entity_id
|
|
||||||
from homeassistant.helpers import config_validation as cv
|
|
||||||
from homeassistant.helpers.event import process_state_match
|
|
||||||
from homeassistant.helpers.target import (
|
|
||||||
TargetStateChangedData,
|
|
||||||
async_track_target_selector_state_change_event,
|
|
||||||
)
|
|
||||||
from homeassistant.helpers.trigger import Trigger, TriggerActionRunner, TriggerConfig
|
|
||||||
from homeassistant.helpers.typing import ConfigType
|
|
||||||
|
|
||||||
from .const import DOMAIN
|
|
||||||
|
|
||||||
# remove when #151314 is merged
|
|
||||||
CONF_OPTIONS: Final = "options"
|
|
||||||
|
|
||||||
ATTR_BEHAVIOR: Final = "behavior"
|
|
||||||
BEHAVIOR_FIRST: Final = "first"
|
|
||||||
BEHAVIOR_LAST: Final = "last"
|
|
||||||
BEHAVIOR_ANY: Final = "any"
|
|
||||||
|
|
||||||
STATE_PLATFORM_TYPE: Final = "state"
|
|
||||||
STATE_TRIGGER_SCHEMA = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Required(CONF_OPTIONS): {
|
|
||||||
vol.Required(CONF_STATE): vol.In([STATE_ON, STATE_OFF]),
|
|
||||||
vol.Required(ATTR_BEHAVIOR, default=BEHAVIOR_ANY): vol.In(
|
|
||||||
[BEHAVIOR_FIRST, BEHAVIOR_LAST, BEHAVIOR_ANY]
|
|
||||||
),
|
|
||||||
},
|
|
||||||
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class StateTrigger(Trigger):
|
|
||||||
"""Trigger for state changes."""
|
|
||||||
|
|
||||||
@override
|
|
||||||
@classmethod
|
|
||||||
async def async_validate_config(
|
|
||||||
cls, hass: HomeAssistant, config: ConfigType
|
|
||||||
) -> ConfigType:
|
|
||||||
"""Validate config."""
|
|
||||||
return cast(ConfigType, STATE_TRIGGER_SCHEMA(config))
|
|
||||||
|
|
||||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
|
||||||
"""Initialize the state trigger."""
|
|
||||||
super().__init__(hass, config)
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
assert config.options is not None
|
|
||||||
assert config.target is not None
|
|
||||||
self._options = config.options
|
|
||||||
self._target = config.target
|
|
||||||
|
|
||||||
@override
|
|
||||||
async def async_attach_runner(
|
|
||||||
self, run_action: TriggerActionRunner
|
|
||||||
) -> CALLBACK_TYPE:
|
|
||||||
"""Attach the trigger to an action runner."""
|
|
||||||
match_config_state = process_state_match(self._options.get(CONF_STATE))
|
|
||||||
|
|
||||||
def check_all_match(entity_ids: set[str]) -> bool:
|
|
||||||
"""Check if all entity states match."""
|
|
||||||
return all(
|
|
||||||
match_config_state(state.state)
|
|
||||||
for entity_id in entity_ids
|
|
||||||
if (state := self._hass.states.get(entity_id)) is not None
|
|
||||||
)
|
|
||||||
|
|
||||||
def check_one_match(entity_ids: set[str]) -> bool:
|
|
||||||
"""Check that only one entity state matches."""
|
|
||||||
return (
|
|
||||||
sum(
|
|
||||||
match_config_state(state.state)
|
|
||||||
for entity_id in entity_ids
|
|
||||||
if (state := self._hass.states.get(entity_id)) is not None
|
|
||||||
)
|
|
||||||
== 1
|
|
||||||
)
|
|
||||||
|
|
||||||
behavior = self._options.get(ATTR_BEHAVIOR)
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def state_change_listener(
|
|
||||||
target_state_change_data: TargetStateChangedData,
|
|
||||||
) -> None:
|
|
||||||
"""Listen for state changes and call action."""
|
|
||||||
event = target_state_change_data.state_change_event
|
|
||||||
entity_id = event.data["entity_id"]
|
|
||||||
from_state = event.data["old_state"]
|
|
||||||
to_state = event.data["new_state"]
|
|
||||||
|
|
||||||
if to_state is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
# This check is required for "first" behavior, to check that it went from zero
|
|
||||||
# entities matching the state to one. Otherwise, if previously there were two
|
|
||||||
# entities on CONF_STATE and one changed, this would trigger.
|
|
||||||
# For "last" behavior it is not required, but serves as a quicker fail check.
|
|
||||||
if not match_config_state(to_state.state):
|
|
||||||
return
|
|
||||||
if behavior == BEHAVIOR_LAST:
|
|
||||||
if not check_all_match(target_state_change_data.targeted_entity_ids):
|
|
||||||
return
|
|
||||||
elif behavior == BEHAVIOR_FIRST:
|
|
||||||
if not check_one_match(target_state_change_data.targeted_entity_ids):
|
|
||||||
return
|
|
||||||
|
|
||||||
run_action(
|
|
||||||
{
|
|
||||||
ATTR_ENTITY_ID: entity_id,
|
|
||||||
"from_state": from_state,
|
|
||||||
"to_state": to_state,
|
|
||||||
},
|
|
||||||
f"state of {entity_id}",
|
|
||||||
event.context,
|
|
||||||
)
|
|
||||||
|
|
||||||
def entity_filter(entities: set[str]) -> set[str]:
|
|
||||||
"""Filter entities of this domain."""
|
|
||||||
return {
|
|
||||||
entity_id
|
|
||||||
for entity_id in entities
|
|
||||||
if split_entity_id(entity_id)[0] == DOMAIN
|
|
||||||
}
|
|
||||||
|
|
||||||
return async_track_target_selector_state_change_event(
|
|
||||||
self._hass, self._target, state_change_listener, entity_filter
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
TRIGGERS: dict[str, type[Trigger]] = {
|
|
||||||
STATE_PLATFORM_TYPE: StateTrigger,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
|
||||||
"""Return the triggers for lights."""
|
|
||||||
return TRIGGERS
|
|
@@ -1,24 +0,0 @@
|
|||||||
state:
|
|
||||||
target:
|
|
||||||
entity:
|
|
||||||
domain: light
|
|
||||||
fields:
|
|
||||||
state:
|
|
||||||
required: true
|
|
||||||
default: "on"
|
|
||||||
selector:
|
|
||||||
select:
|
|
||||||
options:
|
|
||||||
- "off"
|
|
||||||
- "on"
|
|
||||||
translation_key: state
|
|
||||||
behavior:
|
|
||||||
required: true
|
|
||||||
default: any
|
|
||||||
selector:
|
|
||||||
select:
|
|
||||||
translation_key: trigger_behavior
|
|
||||||
options:
|
|
||||||
- first
|
|
||||||
- last
|
|
||||||
- any
|
|
@@ -1,36 +1 @@
|
|||||||
"""The london_underground component."""
|
"""The london_underground component."""
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from homeassistant.const import Platform
|
|
||||||
from homeassistant.core import HomeAssistant
|
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
|
||||||
|
|
||||||
from .const import DOMAIN as DOMAIN
|
|
||||||
from .coordinator import LondonTubeCoordinator, LondonUndergroundConfigEntry, TubeData
|
|
||||||
|
|
||||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
|
||||||
hass: HomeAssistant, entry: LondonUndergroundConfigEntry
|
|
||||||
) -> bool:
|
|
||||||
"""Set up London Underground from a config entry."""
|
|
||||||
|
|
||||||
session = async_get_clientsession(hass)
|
|
||||||
data = TubeData(session)
|
|
||||||
coordinator = LondonTubeCoordinator(hass, data, config_entry=entry)
|
|
||||||
await coordinator.async_config_entry_first_refresh()
|
|
||||||
|
|
||||||
entry.runtime_data = coordinator
|
|
||||||
# Forward the setup to the sensor platform
|
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
async def async_unload_entry(
|
|
||||||
hass: HomeAssistant, entry: LondonUndergroundConfigEntry
|
|
||||||
) -> bool:
|
|
||||||
"""Unload a config entry."""
|
|
||||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
|
||||||
|
@@ -1,152 +0,0 @@
|
|||||||
"""Config flow for London Underground integration."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from london_tube_status import TubeData
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from homeassistant.config_entries import (
|
|
||||||
ConfigEntry,
|
|
||||||
ConfigFlow,
|
|
||||||
ConfigFlowResult,
|
|
||||||
OptionsFlowWithReload,
|
|
||||||
)
|
|
||||||
from homeassistant.core import callback
|
|
||||||
from homeassistant.helpers import selector
|
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
|
||||||
from homeassistant.helpers.typing import ConfigType
|
|
||||||
|
|
||||||
from .const import CONF_LINE, DEFAULT_LINES, DOMAIN, TUBE_LINES
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class LondonUndergroundConfigFlow(ConfigFlow, domain=DOMAIN):
|
|
||||||
"""Handle a config flow for London Underground."""
|
|
||||||
|
|
||||||
VERSION = 1
|
|
||||||
MINOR_VERSION = 1
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
@callback
|
|
||||||
def async_get_options_flow(
|
|
||||||
_: ConfigEntry,
|
|
||||||
) -> LondonUndergroundOptionsFlow:
|
|
||||||
"""Get the options flow for this handler."""
|
|
||||||
return LondonUndergroundOptionsFlow()
|
|
||||||
|
|
||||||
async def async_step_user(
|
|
||||||
self, user_input: dict[str, Any] | None = None
|
|
||||||
) -> ConfigFlowResult:
|
|
||||||
"""Handle the initial step."""
|
|
||||||
errors: dict[str, str] = {}
|
|
||||||
|
|
||||||
if user_input is not None:
|
|
||||||
session = async_get_clientsession(self.hass)
|
|
||||||
data = TubeData(session)
|
|
||||||
try:
|
|
||||||
async with asyncio.timeout(10):
|
|
||||||
await data.update()
|
|
||||||
except TimeoutError:
|
|
||||||
errors["base"] = "timeout_connect"
|
|
||||||
except Exception:
|
|
||||||
_LOGGER.exception("Unexpected error")
|
|
||||||
errors["base"] = "cannot_connect"
|
|
||||||
else:
|
|
||||||
return self.async_create_entry(
|
|
||||||
title="London Underground",
|
|
||||||
data={},
|
|
||||||
options={CONF_LINE: user_input.get(CONF_LINE, DEFAULT_LINES)},
|
|
||||||
)
|
|
||||||
|
|
||||||
return self.async_show_form(
|
|
||||||
step_id="user",
|
|
||||||
data_schema=vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Optional(
|
|
||||||
CONF_LINE,
|
|
||||||
default=DEFAULT_LINES,
|
|
||||||
): selector.SelectSelector(
|
|
||||||
selector.SelectSelectorConfig(
|
|
||||||
options=TUBE_LINES,
|
|
||||||
multiple=True,
|
|
||||||
mode=selector.SelectSelectorMode.DROPDOWN,
|
|
||||||
)
|
|
||||||
),
|
|
||||||
}
|
|
||||||
),
|
|
||||||
errors=errors,
|
|
||||||
)
|
|
||||||
|
|
||||||
async def async_step_import(self, import_data: ConfigType) -> ConfigFlowResult:
|
|
||||||
"""Handle import from configuration.yaml."""
|
|
||||||
session = async_get_clientsession(self.hass)
|
|
||||||
data = TubeData(session)
|
|
||||||
try:
|
|
||||||
async with asyncio.timeout(10):
|
|
||||||
await data.update()
|
|
||||||
except Exception:
|
|
||||||
_LOGGER.exception(
|
|
||||||
"Unexpected error trying to connect before importing config, aborting import "
|
|
||||||
)
|
|
||||||
return self.async_abort(reason="cannot_connect")
|
|
||||||
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Importing London Underground config from configuration.yaml: %s",
|
|
||||||
import_data,
|
|
||||||
)
|
|
||||||
# Extract lines from the sensor platform config
|
|
||||||
lines = import_data.get(CONF_LINE, DEFAULT_LINES)
|
|
||||||
if "London Overground" in lines:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"London Overground was removed from the configuration as the line has been divided and renamed"
|
|
||||||
)
|
|
||||||
lines.remove("London Overground")
|
|
||||||
return self.async_create_entry(
|
|
||||||
title="London Underground",
|
|
||||||
data={},
|
|
||||||
options={CONF_LINE: import_data.get(CONF_LINE, DEFAULT_LINES)},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class LondonUndergroundOptionsFlow(OptionsFlowWithReload):
|
|
||||||
"""Handle options."""
|
|
||||||
|
|
||||||
async def async_step_init(
|
|
||||||
self, user_input: dict[str, Any] | None = None
|
|
||||||
) -> ConfigFlowResult:
|
|
||||||
"""Manage the options."""
|
|
||||||
if user_input is not None:
|
|
||||||
_LOGGER.debug(
|
|
||||||
"Updating london underground with options flow user_input: %s",
|
|
||||||
user_input,
|
|
||||||
)
|
|
||||||
return self.async_create_entry(
|
|
||||||
title="",
|
|
||||||
data={CONF_LINE: user_input[CONF_LINE]},
|
|
||||||
)
|
|
||||||
|
|
||||||
return self.async_show_form(
|
|
||||||
step_id="init",
|
|
||||||
data_schema=vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Optional(
|
|
||||||
CONF_LINE,
|
|
||||||
default=self.config_entry.options.get(
|
|
||||||
CONF_LINE,
|
|
||||||
self.config_entry.data.get(CONF_LINE, DEFAULT_LINES),
|
|
||||||
),
|
|
||||||
): selector.SelectSelector(
|
|
||||||
selector.SelectSelectorConfig(
|
|
||||||
options=TUBE_LINES,
|
|
||||||
multiple=True,
|
|
||||||
mode=selector.SelectSelectorMode.DROPDOWN,
|
|
||||||
)
|
|
||||||
),
|
|
||||||
}
|
|
||||||
),
|
|
||||||
)
|
|
@@ -6,6 +6,7 @@ DOMAIN = "london_underground"
|
|||||||
|
|
||||||
CONF_LINE = "line"
|
CONF_LINE = "line"
|
||||||
|
|
||||||
|
|
||||||
SCAN_INTERVAL = timedelta(seconds=30)
|
SCAN_INTERVAL = timedelta(seconds=30)
|
||||||
|
|
||||||
TUBE_LINES = [
|
TUBE_LINES = [
|
||||||
@@ -17,7 +18,7 @@ TUBE_LINES = [
|
|||||||
"Elizabeth line",
|
"Elizabeth line",
|
||||||
"Hammersmith & City",
|
"Hammersmith & City",
|
||||||
"Jubilee",
|
"Jubilee",
|
||||||
"London Overground", # no longer supported
|
"London Overground",
|
||||||
"Metropolitan",
|
"Metropolitan",
|
||||||
"Northern",
|
"Northern",
|
||||||
"Piccadilly",
|
"Piccadilly",
|
||||||
@@ -30,20 +31,3 @@ TUBE_LINES = [
|
|||||||
"Weaver",
|
"Weaver",
|
||||||
"Windrush",
|
"Windrush",
|
||||||
]
|
]
|
||||||
|
|
||||||
# Default lines to monitor if none selected
|
|
||||||
DEFAULT_LINES = [
|
|
||||||
"Bakerloo",
|
|
||||||
"Central",
|
|
||||||
"Circle",
|
|
||||||
"District",
|
|
||||||
"DLR",
|
|
||||||
"Elizabeth line",
|
|
||||||
"Hammersmith & City",
|
|
||||||
"Jubilee",
|
|
||||||
"Metropolitan",
|
|
||||||
"Northern",
|
|
||||||
"Piccadilly",
|
|
||||||
"Victoria",
|
|
||||||
"Waterloo & City",
|
|
||||||
]
|
|
||||||
|
@@ -8,7 +8,6 @@ from typing import cast
|
|||||||
|
|
||||||
from london_tube_status import TubeData
|
from london_tube_status import TubeData
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||||
|
|
||||||
@@ -16,23 +15,16 @@ from .const import DOMAIN, SCAN_INTERVAL
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
type LondonUndergroundConfigEntry = ConfigEntry[LondonTubeCoordinator]
|
|
||||||
|
|
||||||
|
|
||||||
class LondonTubeCoordinator(DataUpdateCoordinator[dict[str, dict[str, str]]]):
|
class LondonTubeCoordinator(DataUpdateCoordinator[dict[str, dict[str, str]]]):
|
||||||
"""London Underground sensor coordinator."""
|
"""London Underground sensor coordinator."""
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, hass: HomeAssistant, data: TubeData) -> None:
|
||||||
self,
|
|
||||||
hass: HomeAssistant,
|
|
||||||
data: TubeData,
|
|
||||||
config_entry: LondonUndergroundConfigEntry,
|
|
||||||
) -> None:
|
|
||||||
"""Initialize coordinator."""
|
"""Initialize coordinator."""
|
||||||
super().__init__(
|
super().__init__(
|
||||||
hass,
|
hass,
|
||||||
_LOGGER,
|
_LOGGER,
|
||||||
config_entry=config_entry,
|
config_entry=None,
|
||||||
name=DOMAIN,
|
name=DOMAIN,
|
||||||
update_interval=SCAN_INTERVAL,
|
update_interval=SCAN_INTERVAL,
|
||||||
)
|
)
|
||||||
|
@@ -2,12 +2,9 @@
|
|||||||
"domain": "london_underground",
|
"domain": "london_underground",
|
||||||
"name": "London Underground",
|
"name": "London Underground",
|
||||||
"codeowners": ["@jpbede"],
|
"codeowners": ["@jpbede"],
|
||||||
"config_flow": true,
|
|
||||||
"documentation": "https://www.home-assistant.io/integrations/london_underground",
|
"documentation": "https://www.home-assistant.io/integrations/london_underground",
|
||||||
"integration_type": "service",
|
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["london_tube_status"],
|
"loggers": ["london_tube_status"],
|
||||||
"quality_scale": "legacy",
|
"quality_scale": "legacy",
|
||||||
"requirements": ["london-tube-status==0.5"],
|
"requirements": ["london-tube-status==0.5"]
|
||||||
"single_config_entry": true
|
|
||||||
}
|
}
|
||||||
|
@@ -5,26 +5,23 @@ from __future__ import annotations
|
|||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
from london_tube_status import TubeData
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components.sensor import (
|
from homeassistant.components.sensor import (
|
||||||
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
|
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
|
||||||
SensorEntity,
|
SensorEntity,
|
||||||
)
|
)
|
||||||
from homeassistant.config_entries import SOURCE_IMPORT
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
from homeassistant.exceptions import PlatformNotReady
|
||||||
from homeassistant.data_entry_flow import FlowResultType
|
from homeassistant.helpers import config_validation as cv
|
||||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
from homeassistant.helpers.entity_platform import (
|
|
||||||
AddConfigEntryEntitiesCallback,
|
|
||||||
AddEntitiesCallback,
|
|
||||||
)
|
|
||||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from .const import CONF_LINE, DOMAIN, TUBE_LINES
|
from .const import CONF_LINE, TUBE_LINES
|
||||||
from .coordinator import LondonTubeCoordinator, LondonUndergroundConfigEntry
|
from .coordinator import LondonTubeCoordinator
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -41,54 +38,18 @@ async def async_setup_platform(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Set up the Tube sensor."""
|
"""Set up the Tube sensor."""
|
||||||
|
|
||||||
# If configuration.yaml config exists, trigger the import flow.
|
session = async_get_clientsession(hass)
|
||||||
# If the config entry already exists, this will not be triggered as only one config is allowed.
|
|
||||||
result = await hass.config_entries.flow.async_init(
|
|
||||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=config
|
|
||||||
)
|
|
||||||
if (
|
|
||||||
result.get("type") is FlowResultType.ABORT
|
|
||||||
and result.get("reason") != "already_configured"
|
|
||||||
):
|
|
||||||
ir.async_create_issue(
|
|
||||||
hass,
|
|
||||||
DOMAIN,
|
|
||||||
f"deprecated_yaml_import_issue_{result.get('reason')}",
|
|
||||||
is_fixable=False,
|
|
||||||
issue_domain=DOMAIN,
|
|
||||||
severity=ir.IssueSeverity.WARNING,
|
|
||||||
translation_key="deprecated_yaml_import_issue",
|
|
||||||
translation_placeholders={
|
|
||||||
"domain": DOMAIN,
|
|
||||||
"integration_title": "London Underground",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
ir.async_create_issue(
|
data = TubeData(session)
|
||||||
hass,
|
coordinator = LondonTubeCoordinator(hass, data)
|
||||||
HOMEASSISTANT_DOMAIN,
|
|
||||||
"deprecated_yaml",
|
|
||||||
is_fixable=False,
|
|
||||||
issue_domain=DOMAIN,
|
|
||||||
severity=ir.IssueSeverity.WARNING,
|
|
||||||
translation_key="deprecated_yaml",
|
|
||||||
translation_placeholders={
|
|
||||||
"domain": DOMAIN,
|
|
||||||
"integration_title": "London Underground",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
await coordinator.async_refresh()
|
||||||
|
|
||||||
async def async_setup_entry(
|
if not coordinator.last_update_success:
|
||||||
hass: HomeAssistant,
|
raise PlatformNotReady
|
||||||
entry: LondonUndergroundConfigEntry,
|
|
||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
|
||||||
) -> None:
|
|
||||||
"""Set up the London Underground sensor from config entry."""
|
|
||||||
|
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
LondonTubeSensor(entry.runtime_data, line) for line in entry.options[CONF_LINE]
|
LondonTubeSensor(coordinator, line) for line in config[CONF_LINE]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -97,21 +58,11 @@ class LondonTubeSensor(CoordinatorEntity[LondonTubeCoordinator], SensorEntity):
|
|||||||
|
|
||||||
_attr_attribution = "Powered by TfL Open Data"
|
_attr_attribution = "Powered by TfL Open Data"
|
||||||
_attr_icon = "mdi:subway"
|
_attr_icon = "mdi:subway"
|
||||||
_attr_has_entity_name = True # Use modern entity naming
|
|
||||||
|
|
||||||
def __init__(self, coordinator: LondonTubeCoordinator, name: str) -> None:
|
def __init__(self, coordinator: LondonTubeCoordinator, name: str) -> None:
|
||||||
"""Initialize the London Underground sensor."""
|
"""Initialize the London Underground sensor."""
|
||||||
super().__init__(coordinator)
|
super().__init__(coordinator)
|
||||||
self._name = name
|
self._name = name
|
||||||
# Add unique_id for proper entity registry
|
|
||||||
self._attr_unique_id = f"tube_{name.lower().replace(' ', '_')}"
|
|
||||||
self._attr_device_info = DeviceInfo(
|
|
||||||
identifiers={(DOMAIN, DOMAIN)},
|
|
||||||
name="London Underground",
|
|
||||||
manufacturer="Transport for London",
|
|
||||||
model="Tube Status",
|
|
||||||
entry_type=DeviceEntryType.SERVICE,
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
|
@@ -1,38 +0,0 @@
|
|||||||
{
|
|
||||||
"config": {
|
|
||||||
"step": {
|
|
||||||
"user": {
|
|
||||||
"title": "Set up London Underground",
|
|
||||||
"description": "Select which tube lines you want to monitor",
|
|
||||||
"data": {
|
|
||||||
"line": "Tube lines"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"error": {
|
|
||||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
|
||||||
"timeout_connect": "[%key:common::config_flow::error::timeout_connect%]",
|
|
||||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
|
||||||
},
|
|
||||||
"abort": {
|
|
||||||
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"options": {
|
|
||||||
"step": {
|
|
||||||
"init": {
|
|
||||||
"title": "Configure London Underground",
|
|
||||||
"description": "[%key:component::london_underground::config::step::user::description%]",
|
|
||||||
"data": {
|
|
||||||
"line": "[%key:component::london_underground::config::step::user::data::line%]"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"issues": {
|
|
||||||
"deprecated_yaml_import_issue": {
|
|
||||||
"title": "London Underground YAML configuration deprecated",
|
|
||||||
"description": "Configuring London Underground using YAML sensor platform is deprecated.\n\nWhile importing your configuration, an error occurred when trying to connect to the Transport for London API. Please restart Home Assistant to try again, or remove the existing YAML configuration and set the integration up via the UI."
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@@ -3,7 +3,6 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from dataclasses import dataclass
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
@@ -24,13 +23,7 @@ from homeassistant.helpers.config_entry_oauth2_flow import (
|
|||||||
|
|
||||||
from . import async_get_config_entry_implementation
|
from . import async_get_config_entry_implementation
|
||||||
from .application_credentials import authorization_server_context
|
from .application_credentials import authorization_server_context
|
||||||
from .const import (
|
from .const import CONF_ACCESS_TOKEN, CONF_AUTHORIZATION_URL, CONF_TOKEN_URL, DOMAIN
|
||||||
CONF_ACCESS_TOKEN,
|
|
||||||
CONF_AUTHORIZATION_URL,
|
|
||||||
CONF_SCOPE,
|
|
||||||
CONF_TOKEN_URL,
|
|
||||||
DOMAIN,
|
|
||||||
)
|
|
||||||
from .coordinator import TokenManager, mcp_client
|
from .coordinator import TokenManager, mcp_client
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -48,17 +41,9 @@ MCP_DISCOVERY_HEADERS = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class OAuthConfig:
|
|
||||||
"""Class to hold OAuth configuration."""
|
|
||||||
|
|
||||||
authorization_server: AuthorizationServer
|
|
||||||
scopes: list[str] | None = None
|
|
||||||
|
|
||||||
|
|
||||||
async def async_discover_oauth_config(
|
async def async_discover_oauth_config(
|
||||||
hass: HomeAssistant, mcp_server_url: str
|
hass: HomeAssistant, mcp_server_url: str
|
||||||
) -> OAuthConfig:
|
) -> AuthorizationServer:
|
||||||
"""Discover the OAuth configuration for the MCP server.
|
"""Discover the OAuth configuration for the MCP server.
|
||||||
|
|
||||||
This implements the functionality in the MCP spec for discovery. If the MCP server URL
|
This implements the functionality in the MCP spec for discovery. If the MCP server URL
|
||||||
@@ -80,11 +65,9 @@ async def async_discover_oauth_config(
|
|||||||
except httpx.HTTPStatusError as error:
|
except httpx.HTTPStatusError as error:
|
||||||
if error.response.status_code == 404:
|
if error.response.status_code == 404:
|
||||||
_LOGGER.info("Authorization Server Metadata not found, using default paths")
|
_LOGGER.info("Authorization Server Metadata not found, using default paths")
|
||||||
return OAuthConfig(
|
return AuthorizationServer(
|
||||||
authorization_server=AuthorizationServer(
|
authorize_url=str(parsed_url.with_path("/authorize")),
|
||||||
authorize_url=str(parsed_url.with_path("/authorize")),
|
token_url=str(parsed_url.with_path("/token")),
|
||||||
token_url=str(parsed_url.with_path("/token")),
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
raise CannotConnect from error
|
raise CannotConnect from error
|
||||||
except httpx.HTTPError as error:
|
except httpx.HTTPError as error:
|
||||||
@@ -98,15 +81,9 @@ async def async_discover_oauth_config(
|
|||||||
authorize_url = str(parsed_url.with_path(authorize_url))
|
authorize_url = str(parsed_url.with_path(authorize_url))
|
||||||
if token_url.startswith("/"):
|
if token_url.startswith("/"):
|
||||||
token_url = str(parsed_url.with_path(token_url))
|
token_url = str(parsed_url.with_path(token_url))
|
||||||
# We have no way to know the minimum set of scopes needed, so request
|
return AuthorizationServer(
|
||||||
# all of them and let the user limit during the authorization step.
|
authorize_url=authorize_url,
|
||||||
scopes = data.get("scopes_supported")
|
token_url=token_url,
|
||||||
return OAuthConfig(
|
|
||||||
authorization_server=AuthorizationServer(
|
|
||||||
authorize_url=authorize_url,
|
|
||||||
token_url=token_url,
|
|
||||||
),
|
|
||||||
scopes=scopes,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -153,7 +130,6 @@ class ModelContextProtocolConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
|||||||
"""Initialize the config flow."""
|
"""Initialize the config flow."""
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.data: dict[str, Any] = {}
|
self.data: dict[str, Any] = {}
|
||||||
self.oauth_config: OAuthConfig | None = None
|
|
||||||
|
|
||||||
async def async_step_user(
|
async def async_step_user(
|
||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
@@ -194,7 +170,7 @@ class ModelContextProtocolConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
|||||||
to find the OAuth medata then run the OAuth authentication flow.
|
to find the OAuth medata then run the OAuth authentication flow.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
oauth_config = await async_discover_oauth_config(
|
authorization_server = await async_discover_oauth_config(
|
||||||
self.hass, self.data[CONF_URL]
|
self.hass, self.data[CONF_URL]
|
||||||
)
|
)
|
||||||
except TimeoutConnectError:
|
except TimeoutConnectError:
|
||||||
@@ -205,13 +181,11 @@ class ModelContextProtocolConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
|||||||
_LOGGER.exception("Unexpected exception")
|
_LOGGER.exception("Unexpected exception")
|
||||||
return self.async_abort(reason="unknown")
|
return self.async_abort(reason="unknown")
|
||||||
else:
|
else:
|
||||||
_LOGGER.info("OAuth configuration: %s", oauth_config)
|
_LOGGER.info("OAuth configuration: %s", authorization_server)
|
||||||
self.oauth_config = oauth_config
|
|
||||||
self.data.update(
|
self.data.update(
|
||||||
{
|
{
|
||||||
CONF_AUTHORIZATION_URL: oauth_config.authorization_server.authorize_url,
|
CONF_AUTHORIZATION_URL: authorization_server.authorize_url,
|
||||||
CONF_TOKEN_URL: oauth_config.authorization_server.token_url,
|
CONF_TOKEN_URL: authorization_server.token_url,
|
||||||
CONF_SCOPE: oauth_config.scopes,
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
return await self.async_step_credentials_choice()
|
return await self.async_step_credentials_choice()
|
||||||
@@ -223,15 +197,6 @@ class ModelContextProtocolConfigFlow(AbstractOAuth2FlowHandler, domain=DOMAIN):
|
|||||||
self.data[CONF_TOKEN_URL],
|
self.data[CONF_TOKEN_URL],
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
|
||||||
def extra_authorize_data(self) -> dict:
|
|
||||||
"""Extra data that needs to be appended to the authorize url."""
|
|
||||||
data = {}
|
|
||||||
if self.data and (scopes := self.data[CONF_SCOPE]) is not None:
|
|
||||||
data[CONF_SCOPE] = " ".join(scopes)
|
|
||||||
data.update(super().extra_authorize_data)
|
|
||||||
return data
|
|
||||||
|
|
||||||
async def async_step_credentials_choice(
|
async def async_step_credentials_choice(
|
||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
|
@@ -5,4 +5,3 @@ DOMAIN = "mcp"
|
|||||||
CONF_ACCESS_TOKEN = "access_token"
|
CONF_ACCESS_TOKEN = "access_token"
|
||||||
CONF_AUTHORIZATION_URL = "authorization_url"
|
CONF_AUTHORIZATION_URL = "authorization_url"
|
||||||
CONF_TOKEN_URL = "token_url"
|
CONF_TOKEN_URL = "token_url"
|
||||||
CONF_SCOPE = "scope"
|
|
||||||
|
@@ -59,7 +59,7 @@ async def create_server(
|
|||||||
# Backwards compatibility with old MCP Server config
|
# Backwards compatibility with old MCP Server config
|
||||||
return await llm.async_get_api(hass, llm_api_id, llm_context)
|
return await llm.async_get_api(hass, llm_api_id, llm_context)
|
||||||
|
|
||||||
@server.list_prompts() # type: ignore[no-untyped-call,untyped-decorator]
|
@server.list_prompts() # type: ignore[no-untyped-call, misc]
|
||||||
async def handle_list_prompts() -> list[types.Prompt]:
|
async def handle_list_prompts() -> list[types.Prompt]:
|
||||||
llm_api = await get_api_instance()
|
llm_api = await get_api_instance()
|
||||||
return [
|
return [
|
||||||
@@ -69,7 +69,7 @@ async def create_server(
|
|||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
@server.get_prompt() # type: ignore[no-untyped-call,untyped-decorator]
|
@server.get_prompt() # type: ignore[no-untyped-call, misc]
|
||||||
async def handle_get_prompt(
|
async def handle_get_prompt(
|
||||||
name: str, arguments: dict[str, str] | None
|
name: str, arguments: dict[str, str] | None
|
||||||
) -> types.GetPromptResult:
|
) -> types.GetPromptResult:
|
||||||
@@ -90,13 +90,13 @@ async def create_server(
|
|||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
@server.list_tools() # type: ignore[no-untyped-call,untyped-decorator]
|
@server.list_tools() # type: ignore[no-untyped-call, misc]
|
||||||
async def list_tools() -> list[types.Tool]:
|
async def list_tools() -> list[types.Tool]:
|
||||||
"""List available time tools."""
|
"""List available time tools."""
|
||||||
llm_api = await get_api_instance()
|
llm_api = await get_api_instance()
|
||||||
return [_format_tool(tool, llm_api.custom_serializer) for tool in llm_api.tools]
|
return [_format_tool(tool, llm_api.custom_serializer) for tool in llm_api.tools]
|
||||||
|
|
||||||
@server.call_tool() # type: ignore[untyped-decorator]
|
@server.call_tool() # type: ignore[misc]
|
||||||
async def call_tool(name: str, arguments: dict) -> Sequence[types.TextContent]:
|
async def call_tool(name: str, arguments: dict) -> Sequence[types.TextContent]:
|
||||||
"""Handle calling tools."""
|
"""Handle calling tools."""
|
||||||
llm_api = await get_api_instance()
|
llm_api = await get_api_instance()
|
||||||
|
@@ -408,5 +408,5 @@ class AtwDeviceZoneClimate(MelCloudClimate):
|
|||||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||||
"""Set new target temperature."""
|
"""Set new target temperature."""
|
||||||
await self._zone.set_target_temperature(
|
await self._zone.set_target_temperature(
|
||||||
kwargs.get(ATTR_TEMPERATURE, self.target_temperature)
|
kwargs.get("temperature", self.target_temperature)
|
||||||
)
|
)
|
||||||
|
@@ -54,7 +54,6 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
DEFAULT_PLATE_COUNT = 4
|
DEFAULT_PLATE_COUNT = 4
|
||||||
|
|
||||||
PLATE_COUNT = {
|
PLATE_COUNT = {
|
||||||
"KM7575": 6,
|
|
||||||
"KM7678": 6,
|
"KM7678": 6,
|
||||||
"KM7697": 6,
|
"KM7697": 6,
|
||||||
"KM7878": 6,
|
"KM7878": 6,
|
||||||
|
@@ -10,11 +10,7 @@ from mill import Heater, Mill
|
|||||||
from mill_local import Mill as MillLocal
|
from mill_local import Mill as MillLocal
|
||||||
|
|
||||||
from homeassistant.components.recorder import get_instance
|
from homeassistant.components.recorder import get_instance
|
||||||
from homeassistant.components.recorder.models import (
|
from homeassistant.components.recorder.models import StatisticData, StatisticMetaData
|
||||||
StatisticData,
|
|
||||||
StatisticMeanType,
|
|
||||||
StatisticMetaData,
|
|
||||||
)
|
|
||||||
from homeassistant.components.recorder.statistics import (
|
from homeassistant.components.recorder.statistics import (
|
||||||
async_add_external_statistics,
|
async_add_external_statistics,
|
||||||
get_last_statistics,
|
get_last_statistics,
|
||||||
@@ -25,7 +21,6 @@ from homeassistant.const import UnitOfEnergy
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||||
from homeassistant.util import dt as dt_util, slugify
|
from homeassistant.util import dt as dt_util, slugify
|
||||||
from homeassistant.util.unit_conversion import EnergyConverter
|
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
|
|
||||||
@@ -152,12 +147,11 @@ class MillHistoricDataUpdateCoordinator(DataUpdateCoordinator):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
metadata = StatisticMetaData(
|
metadata = StatisticMetaData(
|
||||||
mean_type=StatisticMeanType.NONE,
|
has_mean=False,
|
||||||
has_sum=True,
|
has_sum=True,
|
||||||
name=f"{heater.name}",
|
name=f"{heater.name}",
|
||||||
source=DOMAIN,
|
source=DOMAIN,
|
||||||
statistic_id=statistic_id,
|
statistic_id=statistic_id,
|
||||||
unit_class=EnergyConverter.UNIT_CLASS,
|
|
||||||
unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||||
)
|
)
|
||||||
async_add_external_statistics(self.hass, metadata, statistics)
|
async_add_external_statistics(self.hass, metadata, statistics)
|
||||||
|
@@ -253,7 +253,6 @@ class ModbusHub:
|
|||||||
self._client: (
|
self._client: (
|
||||||
AsyncModbusSerialClient | AsyncModbusTcpClient | AsyncModbusUdpClient | None
|
AsyncModbusSerialClient | AsyncModbusTcpClient | AsyncModbusUdpClient | None
|
||||||
) = None
|
) = None
|
||||||
self._lock = asyncio.Lock()
|
|
||||||
self.event_connected = asyncio.Event()
|
self.event_connected = asyncio.Event()
|
||||||
self.hass = hass
|
self.hass = hass
|
||||||
self.name = client_config[CONF_NAME]
|
self.name = client_config[CONF_NAME]
|
||||||
@@ -416,9 +415,7 @@ class ModbusHub:
|
|||||||
"""Convert async to sync pymodbus call."""
|
"""Convert async to sync pymodbus call."""
|
||||||
if not self._client:
|
if not self._client:
|
||||||
return None
|
return None
|
||||||
async with self._lock:
|
result = await self.low_level_pb_call(unit, address, value, use_call)
|
||||||
result = await self.low_level_pb_call(unit, address, value, use_call)
|
if self._msg_wait:
|
||||||
if self._msg_wait:
|
await asyncio.sleep(self._msg_wait)
|
||||||
# small delay until next request/response
|
return result
|
||||||
await asyncio.sleep(self._msg_wait)
|
|
||||||
return result
|
|
||||||
|
@@ -174,7 +174,7 @@ class MotionBaseDevice(MotionCoordinatorEntity, CoverEntity):
|
|||||||
|
|
||||||
_restore_tilt = False
|
_restore_tilt = False
|
||||||
|
|
||||||
def __init__(self, coordinator, blind, device_class) -> None:
|
def __init__(self, coordinator, blind, device_class):
|
||||||
"""Initialize the blind."""
|
"""Initialize the blind."""
|
||||||
super().__init__(coordinator, blind)
|
super().__init__(coordinator, blind)
|
||||||
|
|
||||||
@@ -275,7 +275,7 @@ class MotionTiltDevice(MotionPositionDevice):
|
|||||||
"""
|
"""
|
||||||
if self._blind.angle is None:
|
if self._blind.angle is None:
|
||||||
return None
|
return None
|
||||||
return 100 - (self._blind.angle * 100 / 180)
|
return self._blind.angle * 100 / 180
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_closed(self) -> bool | None:
|
def is_closed(self) -> bool | None:
|
||||||
@@ -287,14 +287,14 @@ class MotionTiltDevice(MotionPositionDevice):
|
|||||||
async def async_open_cover_tilt(self, **kwargs: Any) -> None:
|
async def async_open_cover_tilt(self, **kwargs: Any) -> None:
|
||||||
"""Open the cover tilt."""
|
"""Open the cover tilt."""
|
||||||
async with self._api_lock:
|
async with self._api_lock:
|
||||||
await self.hass.async_add_executor_job(self._blind.Set_angle, 0)
|
await self.hass.async_add_executor_job(self._blind.Set_angle, 180)
|
||||||
|
|
||||||
await self.async_request_position_till_stop()
|
await self.async_request_position_till_stop()
|
||||||
|
|
||||||
async def async_close_cover_tilt(self, **kwargs: Any) -> None:
|
async def async_close_cover_tilt(self, **kwargs: Any) -> None:
|
||||||
"""Close the cover tilt."""
|
"""Close the cover tilt."""
|
||||||
async with self._api_lock:
|
async with self._api_lock:
|
||||||
await self.hass.async_add_executor_job(self._blind.Set_angle, 180)
|
await self.hass.async_add_executor_job(self._blind.Set_angle, 0)
|
||||||
|
|
||||||
await self.async_request_position_till_stop()
|
await self.async_request_position_till_stop()
|
||||||
|
|
||||||
@@ -302,7 +302,7 @@ class MotionTiltDevice(MotionPositionDevice):
|
|||||||
"""Move the cover tilt to a specific position."""
|
"""Move the cover tilt to a specific position."""
|
||||||
angle = kwargs[ATTR_TILT_POSITION] * 180 / 100
|
angle = kwargs[ATTR_TILT_POSITION] * 180 / 100
|
||||||
async with self._api_lock:
|
async with self._api_lock:
|
||||||
await self.hass.async_add_executor_job(self._blind.Set_angle, 180 - angle)
|
await self.hass.async_add_executor_job(self._blind.Set_angle, angle)
|
||||||
|
|
||||||
await self.async_request_position_till_stop()
|
await self.async_request_position_till_stop()
|
||||||
|
|
||||||
@@ -347,9 +347,9 @@ class MotionTiltOnlyDevice(MotionTiltDevice):
|
|||||||
if self._blind.position is None:
|
if self._blind.position is None:
|
||||||
if self._blind.angle is None:
|
if self._blind.angle is None:
|
||||||
return None
|
return None
|
||||||
return 100 - (self._blind.angle * 100 / 180)
|
return self._blind.angle * 100 / 180
|
||||||
|
|
||||||
return 100 - self._blind.position
|
return self._blind.position
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_closed(self) -> bool | None:
|
def is_closed(self) -> bool | None:
|
||||||
@@ -357,9 +357,9 @@ class MotionTiltOnlyDevice(MotionTiltDevice):
|
|||||||
if self._blind.position is None:
|
if self._blind.position is None:
|
||||||
if self._blind.angle is None:
|
if self._blind.angle is None:
|
||||||
return None
|
return None
|
||||||
return self._blind.angle == 180
|
return self._blind.angle == 0
|
||||||
|
|
||||||
return self._blind.position == 100
|
return self._blind.position == 0
|
||||||
|
|
||||||
async def async_open_cover_tilt(self, **kwargs: Any) -> None:
|
async def async_open_cover_tilt(self, **kwargs: Any) -> None:
|
||||||
"""Open the cover tilt."""
|
"""Open the cover tilt."""
|
||||||
@@ -381,14 +381,10 @@ class MotionTiltOnlyDevice(MotionTiltDevice):
|
|||||||
if self._blind.position is None:
|
if self._blind.position is None:
|
||||||
angle = angle * 180 / 100
|
angle = angle * 180 / 100
|
||||||
async with self._api_lock:
|
async with self._api_lock:
|
||||||
await self.hass.async_add_executor_job(
|
await self.hass.async_add_executor_job(self._blind.Set_angle, angle)
|
||||||
self._blind.Set_angle, 180 - angle
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
async with self._api_lock:
|
async with self._api_lock:
|
||||||
await self.hass.async_add_executor_job(
|
await self.hass.async_add_executor_job(self._blind.Set_position, angle)
|
||||||
self._blind.Set_position, 100 - angle
|
|
||||||
)
|
|
||||||
|
|
||||||
await self.async_request_position_till_stop()
|
await self.async_request_position_till_stop()
|
||||||
|
|
||||||
@@ -401,14 +397,10 @@ class MotionTiltOnlyDevice(MotionTiltDevice):
|
|||||||
if self._blind.position is None:
|
if self._blind.position is None:
|
||||||
angle = angle * 180 / 100
|
angle = angle * 180 / 100
|
||||||
async with self._api_lock:
|
async with self._api_lock:
|
||||||
await self.hass.async_add_executor_job(
|
await self.hass.async_add_executor_job(self._blind.Set_angle, angle)
|
||||||
self._blind.Set_angle, 180 - angle
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
async with self._api_lock:
|
async with self._api_lock:
|
||||||
await self.hass.async_add_executor_job(
|
await self.hass.async_add_executor_job(self._blind.Set_position, angle)
|
||||||
self._blind.Set_position, 100 - angle
|
|
||||||
)
|
|
||||||
|
|
||||||
await self.async_request_position_till_stop()
|
await self.async_request_position_till_stop()
|
||||||
|
|
||||||
@@ -416,7 +408,7 @@ class MotionTiltOnlyDevice(MotionTiltDevice):
|
|||||||
class MotionTDBUDevice(MotionBaseDevice):
|
class MotionTDBUDevice(MotionBaseDevice):
|
||||||
"""Representation of a Motion Top Down Bottom Up blind Device."""
|
"""Representation of a Motion Top Down Bottom Up blind Device."""
|
||||||
|
|
||||||
def __init__(self, coordinator, blind, device_class, motor) -> None:
|
def __init__(self, coordinator, blind, device_class, motor):
|
||||||
"""Initialize the blind."""
|
"""Initialize the blind."""
|
||||||
super().__init__(coordinator, blind, device_class)
|
super().__init__(coordinator, blind, device_class)
|
||||||
self._motor = motor
|
self._motor = motor
|
||||||
|
@@ -13,7 +13,6 @@ import voluptuous as vol
|
|||||||
|
|
||||||
from homeassistant.components.sensor import (
|
from homeassistant.components.sensor import (
|
||||||
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
|
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
|
||||||
SensorDeviceClass,
|
|
||||||
SensorEntity,
|
SensorEntity,
|
||||||
)
|
)
|
||||||
from homeassistant.config_entries import SOURCE_IMPORT
|
from homeassistant.config_entries import SOURCE_IMPORT
|
||||||
@@ -143,7 +142,6 @@ async def async_setup_entry(
|
|||||||
class NSDepartureSensor(SensorEntity):
|
class NSDepartureSensor(SensorEntity):
|
||||||
"""Implementation of a NS Departure Sensor."""
|
"""Implementation of a NS Departure Sensor."""
|
||||||
|
|
||||||
_attr_device_class = SensorDeviceClass.TIMESTAMP
|
|
||||||
_attr_attribution = "Data provided by NS"
|
_attr_attribution = "Data provided by NS"
|
||||||
_attr_icon = "mdi:train"
|
_attr_icon = "mdi:train"
|
||||||
|
|
||||||
@@ -163,6 +161,7 @@ class NSDepartureSensor(SensorEntity):
|
|||||||
self._via = via
|
self._via = via
|
||||||
self._heading = heading
|
self._heading = heading
|
||||||
self._time = time
|
self._time = time
|
||||||
|
self._state: str | None = None
|
||||||
self._trips: list[Trip] | None = None
|
self._trips: list[Trip] | None = None
|
||||||
self._first_trip: Trip | None = None
|
self._first_trip: Trip | None = None
|
||||||
self._next_trip: Trip | None = None
|
self._next_trip: Trip | None = None
|
||||||
@@ -172,6 +171,11 @@ class NSDepartureSensor(SensorEntity):
|
|||||||
"""Return the name of the sensor."""
|
"""Return the name of the sensor."""
|
||||||
return self._name
|
return self._name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def native_value(self) -> str | None:
|
||||||
|
"""Return the next departure time."""
|
||||||
|
return self._state
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||||
"""Return the state attributes."""
|
"""Return the state attributes."""
|
||||||
@@ -265,7 +269,7 @@ class NSDepartureSensor(SensorEntity):
|
|||||||
(datetime.now() + timedelta(minutes=30)).time() < self._time
|
(datetime.now() + timedelta(minutes=30)).time() < self._time
|
||||||
or (datetime.now() - timedelta(minutes=30)).time() > self._time
|
or (datetime.now() - timedelta(minutes=30)).time() > self._time
|
||||||
):
|
):
|
||||||
self._attr_native_value = None
|
self._state = None
|
||||||
self._trips = None
|
self._trips = None
|
||||||
self._first_trip = None
|
self._first_trip = None
|
||||||
return
|
return
|
||||||
@@ -305,7 +309,7 @@ class NSDepartureSensor(SensorEntity):
|
|||||||
if len(filtered_times) > 0:
|
if len(filtered_times) > 0:
|
||||||
sorted_times = sorted(filtered_times, key=lambda x: x[1])
|
sorted_times = sorted(filtered_times, key=lambda x: x[1])
|
||||||
self._first_trip = self._trips[sorted_times[0][0]]
|
self._first_trip = self._trips[sorted_times[0][0]]
|
||||||
self._attr_native_value = sorted_times[0][1]
|
self._state = sorted_times[0][1].strftime("%H:%M")
|
||||||
|
|
||||||
# Filter again to remove trains that leave at the exact same time.
|
# Filter again to remove trains that leave at the exact same time.
|
||||||
filtered_times = [
|
filtered_times = [
|
||||||
@@ -322,7 +326,7 @@ class NSDepartureSensor(SensorEntity):
|
|||||||
|
|
||||||
else:
|
else:
|
||||||
self._first_trip = None
|
self._first_trip = None
|
||||||
self._attr_native_value = None
|
self._state = None
|
||||||
|
|
||||||
except (
|
except (
|
||||||
requests.exceptions.ConnectionError,
|
requests.exceptions.ConnectionError,
|
||||||
|
@@ -53,7 +53,7 @@ class NikoHomeControlLight(NikoHomeControlEntity, LightEntity):
|
|||||||
|
|
||||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||||
"""Instruct the light to turn on."""
|
"""Instruct the light to turn on."""
|
||||||
await self._action.turn_on(kwargs.get(ATTR_BRIGHTNESS))
|
await self._action.turn_on(kwargs.get(ATTR_BRIGHTNESS, 255))
|
||||||
|
|
||||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||||
"""Instruct the light to turn off."""
|
"""Instruct the light to turn off."""
|
||||||
|
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/niko_home_control",
|
"documentation": "https://www.home-assistant.io/integrations/niko_home_control",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["nikohomecontrol"],
|
"loggers": ["nikohomecontrol"],
|
||||||
"requirements": ["nhc==0.6.1"]
|
"requirements": ["nhc==0.4.12"]
|
||||||
}
|
}
|
||||||
|
@@ -7,5 +7,5 @@
|
|||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["pynintendoparental"],
|
"loggers": ["pynintendoparental"],
|
||||||
"quality_scale": "bronze",
|
"quality_scale": "bronze",
|
||||||
"requirements": ["pynintendoparental==1.1.1"]
|
"requirements": ["pynintendoparental==1.0.1"]
|
||||||
}
|
}
|
||||||
|
@@ -157,7 +157,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
|||||||
) from error
|
) from error
|
||||||
except NordPoolEmptyResponseError:
|
except NordPoolEmptyResponseError:
|
||||||
return {area: [] for area in areas}
|
return {area: [] for area in areas}
|
||||||
except (NordPoolError, TimeoutError) as error:
|
except NordPoolError as error:
|
||||||
raise ServiceValidationError(
|
raise ServiceValidationError(
|
||||||
translation_domain=DOMAIN,
|
translation_domain=DOMAIN,
|
||||||
translation_key="connection_error",
|
translation_key="connection_error",
|
||||||
|
@@ -31,39 +31,38 @@ async def async_setup_entry(
|
|||||||
events = device.events.get_platform("binary_sensor")
|
events = device.events.get_platform("binary_sensor")
|
||||||
entity_names = build_event_entity_names(events)
|
entity_names = build_event_entity_names(events)
|
||||||
|
|
||||||
uids = set()
|
entities = {
|
||||||
entities = []
|
event.uid: ONVIFBinarySensor(event.uid, device, name=entity_names[event.uid])
|
||||||
for event in events:
|
for event in events
|
||||||
uids.add(event.uid)
|
}
|
||||||
entities.append(
|
|
||||||
ONVIFBinarySensor(event.uid, device, name=entity_names[event.uid])
|
|
||||||
)
|
|
||||||
|
|
||||||
ent_reg = er.async_get(hass)
|
ent_reg = er.async_get(hass)
|
||||||
for entry in er.async_entries_for_config_entry(ent_reg, config_entry.entry_id):
|
for entry in er.async_entries_for_config_entry(ent_reg, config_entry.entry_id):
|
||||||
if entry.domain == "binary_sensor" and entry.unique_id not in uids:
|
if entry.domain == "binary_sensor" and entry.unique_id not in entities:
|
||||||
uids.add(entry.unique_id)
|
entities[entry.unique_id] = ONVIFBinarySensor(
|
||||||
entities.append(ONVIFBinarySensor(entry.unique_id, device, entry=entry))
|
entry.unique_id, device, entry=entry
|
||||||
|
)
|
||||||
|
|
||||||
async_add_entities(entities)
|
async_add_entities(entities.values())
|
||||||
uids_by_platform = device.events.get_uids_by_platform("binary_sensor")
|
uids_by_platform = device.events.get_uids_by_platform("binary_sensor")
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_check_entities() -> None:
|
def async_check_entities() -> None:
|
||||||
"""Check if we have added an entity for the event."""
|
"""Check if we have added an entity for the event."""
|
||||||
nonlocal uids_by_platform
|
nonlocal uids_by_platform
|
||||||
if not (missing := uids_by_platform.difference(uids)):
|
if not (missing := uids_by_platform.difference(entities)):
|
||||||
return
|
return
|
||||||
|
|
||||||
events = device.events.get_platform("binary_sensor")
|
events = device.events.get_platform("binary_sensor")
|
||||||
entity_names = build_event_entity_names(events)
|
entity_names = build_event_entity_names(events)
|
||||||
|
|
||||||
new_entities = [
|
new_entities: dict[str, ONVIFBinarySensor] = {
|
||||||
ONVIFBinarySensor(uid, device, name=entity_names[uid]) for uid in missing
|
uid: ONVIFBinarySensor(uid, device, name=entity_names[uid])
|
||||||
]
|
for uid in missing
|
||||||
|
}
|
||||||
if new_entities:
|
if new_entities:
|
||||||
uids.update(missing)
|
entities.update(new_entities)
|
||||||
async_add_entities(new_entities)
|
async_add_entities(new_entities.values())
|
||||||
|
|
||||||
device.events.async_add_listener(async_check_entities)
|
device.events.async_add_listener(async_check_entities)
|
||||||
|
|
||||||
|
@@ -30,37 +30,37 @@ async def async_setup_entry(
|
|||||||
events = device.events.get_platform("sensor")
|
events = device.events.get_platform("sensor")
|
||||||
entity_names = build_event_entity_names(events)
|
entity_names = build_event_entity_names(events)
|
||||||
|
|
||||||
uids = set()
|
entities = {
|
||||||
entities = []
|
event.uid: ONVIFSensor(event.uid, device, name=entity_names[event.uid])
|
||||||
for event in events:
|
for event in events
|
||||||
uids.add(event.uid)
|
}
|
||||||
entities.append(ONVIFSensor(event.uid, device, name=entity_names[event.uid]))
|
|
||||||
|
|
||||||
ent_reg = er.async_get(hass)
|
ent_reg = er.async_get(hass)
|
||||||
for entry in er.async_entries_for_config_entry(ent_reg, config_entry.entry_id):
|
for entry in er.async_entries_for_config_entry(ent_reg, config_entry.entry_id):
|
||||||
if entry.domain == "sensor" and entry.unique_id not in uids:
|
if entry.domain == "sensor" and entry.unique_id not in entities:
|
||||||
uids.add(entry.unique_id)
|
entities[entry.unique_id] = ONVIFSensor(
|
||||||
entities.append(ONVIFSensor(entry.unique_id, device, entry=entry))
|
entry.unique_id, device, entry=entry
|
||||||
|
)
|
||||||
|
|
||||||
async_add_entities(entities)
|
async_add_entities(entities.values())
|
||||||
uids_by_platform = device.events.get_uids_by_platform("sensor")
|
uids_by_platform = device.events.get_uids_by_platform("sensor")
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_check_entities() -> None:
|
def async_check_entities() -> None:
|
||||||
"""Check if we have added an entity for the event."""
|
"""Check if we have added an entity for the event."""
|
||||||
nonlocal uids_by_platform
|
nonlocal uids_by_platform
|
||||||
if not (missing := uids_by_platform.difference(uids)):
|
if not (missing := uids_by_platform.difference(entities)):
|
||||||
return
|
return
|
||||||
|
|
||||||
events = device.events.get_platform("sensor")
|
events = device.events.get_platform("sensor")
|
||||||
entity_names = build_event_entity_names(events)
|
entity_names = build_event_entity_names(events)
|
||||||
|
|
||||||
new_entities = [
|
new_entities: dict[str, ONVIFSensor] = {
|
||||||
ONVIFSensor(uid, device, name=entity_names[uid]) for uid in missing
|
uid: ONVIFSensor(uid, device, name=entity_names[uid]) for uid in missing
|
||||||
]
|
}
|
||||||
if new_entities:
|
if new_entities:
|
||||||
uids.update(missing)
|
entities.update(new_entities)
|
||||||
async_add_entities(new_entities)
|
async_add_entities(new_entities.values())
|
||||||
|
|
||||||
device.events.async_add_listener(async_check_entities)
|
device.events.async_add_listener(async_check_entities)
|
||||||
|
|
||||||
|
@@ -9,5 +9,5 @@
|
|||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"quality_scale": "bronze",
|
"quality_scale": "bronze",
|
||||||
"requirements": ["openai==2.2.0", "python-open-router==0.3.1"]
|
"requirements": ["openai==1.99.5", "python-open-router==0.3.1"]
|
||||||
}
|
}
|
||||||
|
@@ -316,23 +316,16 @@ class OpenAISubentryFlowHandler(ConfigSubentryFlow):
|
|||||||
options = self.options
|
options = self.options
|
||||||
errors: dict[str, str] = {}
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
step_schema: VolDictType = {}
|
step_schema: VolDictType = {
|
||||||
|
vol.Optional(
|
||||||
|
CONF_CODE_INTERPRETER,
|
||||||
|
default=RECOMMENDED_CODE_INTERPRETER,
|
||||||
|
): bool,
|
||||||
|
}
|
||||||
|
|
||||||
model = options[CONF_CHAT_MODEL]
|
model = options[CONF_CHAT_MODEL]
|
||||||
|
|
||||||
if not model.startswith(("gpt-5-pro", "gpt-5-codex")):
|
if model.startswith(("o", "gpt-5")):
|
||||||
step_schema.update(
|
|
||||||
{
|
|
||||||
vol.Optional(
|
|
||||||
CONF_CODE_INTERPRETER,
|
|
||||||
default=RECOMMENDED_CODE_INTERPRETER,
|
|
||||||
): bool,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
elif CONF_CODE_INTERPRETER in options:
|
|
||||||
options.pop(CONF_CODE_INTERPRETER)
|
|
||||||
|
|
||||||
if model.startswith(("o", "gpt-5")) and not model.startswith("gpt-5-pro"):
|
|
||||||
step_schema.update(
|
step_schema.update(
|
||||||
{
|
{
|
||||||
vol.Optional(
|
vol.Optional(
|
||||||
|
@@ -468,9 +468,7 @@ class OpenAIBaseLLMEntity(Entity):
|
|||||||
model_args["reasoning"] = {
|
model_args["reasoning"] = {
|
||||||
"effort": options.get(
|
"effort": options.get(
|
||||||
CONF_REASONING_EFFORT, RECOMMENDED_REASONING_EFFORT
|
CONF_REASONING_EFFORT, RECOMMENDED_REASONING_EFFORT
|
||||||
)
|
),
|
||||||
if not model_args["model"].startswith("gpt-5-pro")
|
|
||||||
else "high", # GPT-5 pro only supports reasoning.effort: high
|
|
||||||
"summary": "auto",
|
"summary": "auto",
|
||||||
}
|
}
|
||||||
model_args["include"] = ["reasoning.encrypted_content"]
|
model_args["include"] = ["reasoning.encrypted_content"]
|
||||||
@@ -489,7 +487,7 @@ class OpenAIBaseLLMEntity(Entity):
|
|||||||
|
|
||||||
if options.get(CONF_WEB_SEARCH):
|
if options.get(CONF_WEB_SEARCH):
|
||||||
web_search = WebSearchToolParam(
|
web_search = WebSearchToolParam(
|
||||||
type="web_search",
|
type="web_search_preview",
|
||||||
search_context_size=options.get(
|
search_context_size=options.get(
|
||||||
CONF_WEB_SEARCH_CONTEXT_SIZE, RECOMMENDED_WEB_SEARCH_CONTEXT_SIZE
|
CONF_WEB_SEARCH_CONTEXT_SIZE, RECOMMENDED_WEB_SEARCH_CONTEXT_SIZE
|
||||||
),
|
),
|
||||||
|
@@ -8,5 +8,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/openai_conversation",
|
"documentation": "https://www.home-assistant.io/integrations/openai_conversation",
|
||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"requirements": ["openai==2.2.0"]
|
"requirements": ["openai==1.99.5"]
|
||||||
}
|
}
|
||||||
|
@@ -8,7 +8,7 @@ import logging
|
|||||||
from pyopenweathermap import create_owm_client
|
from pyopenweathermap import create_owm_client
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_API_KEY, CONF_LANGUAGE, CONF_MODE
|
from homeassistant.const import CONF_API_KEY, CONF_LANGUAGE, CONF_MODE, CONF_NAME
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
from .const import CONFIG_FLOW_VERSION, DEFAULT_OWM_MODE, OWM_MODES, PLATFORMS
|
from .const import CONFIG_FLOW_VERSION, DEFAULT_OWM_MODE, OWM_MODES, PLATFORMS
|
||||||
@@ -25,6 +25,7 @@ type OpenweathermapConfigEntry = ConfigEntry[OpenweathermapData]
|
|||||||
class OpenweathermapData:
|
class OpenweathermapData:
|
||||||
"""Runtime data definition."""
|
"""Runtime data definition."""
|
||||||
|
|
||||||
|
name: str
|
||||||
mode: str
|
mode: str
|
||||||
coordinator: OWMUpdateCoordinator
|
coordinator: OWMUpdateCoordinator
|
||||||
|
|
||||||
@@ -33,6 +34,7 @@ async def async_setup_entry(
|
|||||||
hass: HomeAssistant, entry: OpenweathermapConfigEntry
|
hass: HomeAssistant, entry: OpenweathermapConfigEntry
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""Set up OpenWeatherMap as config entry."""
|
"""Set up OpenWeatherMap as config entry."""
|
||||||
|
name = entry.data[CONF_NAME]
|
||||||
api_key = entry.data[CONF_API_KEY]
|
api_key = entry.data[CONF_API_KEY]
|
||||||
language = entry.options[CONF_LANGUAGE]
|
language = entry.options[CONF_LANGUAGE]
|
||||||
mode = entry.options[CONF_MODE]
|
mode = entry.options[CONF_MODE]
|
||||||
@@ -49,7 +51,7 @@ async def async_setup_entry(
|
|||||||
|
|
||||||
entry.async_on_unload(entry.add_update_listener(async_update_options))
|
entry.async_on_unload(entry.add_update_listener(async_update_options))
|
||||||
|
|
||||||
entry.runtime_data = OpenweathermapData(mode, owm_coordinator)
|
entry.runtime_data = OpenweathermapData(name, mode, owm_coordinator)
|
||||||
|
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
|
|
||||||
|
@@ -14,17 +14,12 @@ from homeassistant.const import (
|
|||||||
CONF_API_KEY,
|
CONF_API_KEY,
|
||||||
CONF_LANGUAGE,
|
CONF_LANGUAGE,
|
||||||
CONF_LATITUDE,
|
CONF_LATITUDE,
|
||||||
CONF_LOCATION,
|
|
||||||
CONF_LONGITUDE,
|
CONF_LONGITUDE,
|
||||||
CONF_MODE,
|
CONF_MODE,
|
||||||
|
CONF_NAME,
|
||||||
)
|
)
|
||||||
from homeassistant.core import callback
|
from homeassistant.core import callback
|
||||||
from homeassistant.helpers.selector import (
|
from homeassistant.helpers import config_validation as cv
|
||||||
LanguageSelector,
|
|
||||||
LanguageSelectorConfig,
|
|
||||||
LocationSelector,
|
|
||||||
LocationSelectorConfig,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
CONFIG_FLOW_VERSION,
|
CONFIG_FLOW_VERSION,
|
||||||
@@ -39,12 +34,10 @@ from .utils import build_data_and_options, validate_api_key
|
|||||||
|
|
||||||
USER_SCHEMA = vol.Schema(
|
USER_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Required(CONF_LOCATION): LocationSelector(
|
vol.Optional(CONF_NAME, default=DEFAULT_NAME): str,
|
||||||
LocationSelectorConfig(radius=False)
|
vol.Optional(CONF_LATITUDE): cv.latitude,
|
||||||
),
|
vol.Optional(CONF_LONGITUDE): cv.longitude,
|
||||||
vol.Optional(CONF_LANGUAGE, default=DEFAULT_LANGUAGE): LanguageSelector(
|
vol.Optional(CONF_LANGUAGE, default=DEFAULT_LANGUAGE): vol.In(LANGUAGES),
|
||||||
LanguageSelectorConfig(languages=LANGUAGES, native_name=True)
|
|
||||||
),
|
|
||||||
vol.Required(CONF_API_KEY): str,
|
vol.Required(CONF_API_KEY): str,
|
||||||
vol.Optional(CONF_MODE, default=DEFAULT_OWM_MODE): vol.In(OWM_MODES),
|
vol.Optional(CONF_MODE, default=DEFAULT_OWM_MODE): vol.In(OWM_MODES),
|
||||||
}
|
}
|
||||||
@@ -52,9 +45,7 @@ USER_SCHEMA = vol.Schema(
|
|||||||
|
|
||||||
OPTIONS_SCHEMA = vol.Schema(
|
OPTIONS_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(CONF_LANGUAGE, default=DEFAULT_LANGUAGE): LanguageSelector(
|
vol.Optional(CONF_LANGUAGE, default=DEFAULT_LANGUAGE): vol.In(LANGUAGES),
|
||||||
LanguageSelectorConfig(languages=LANGUAGES, native_name=True)
|
|
||||||
),
|
|
||||||
vol.Optional(CONF_MODE, default=DEFAULT_OWM_MODE): vol.In(OWM_MODES),
|
vol.Optional(CONF_MODE, default=DEFAULT_OWM_MODE): vol.In(OWM_MODES),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@@ -79,8 +70,8 @@ class OpenWeatherMapConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
description_placeholders = {}
|
description_placeholders = {}
|
||||||
|
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
latitude = user_input[CONF_LOCATION][CONF_LATITUDE]
|
latitude = user_input[CONF_LATITUDE]
|
||||||
longitude = user_input[CONF_LOCATION][CONF_LONGITUDE]
|
longitude = user_input[CONF_LONGITUDE]
|
||||||
mode = user_input[CONF_MODE]
|
mode = user_input[CONF_MODE]
|
||||||
|
|
||||||
await self.async_set_unique_id(f"{latitude}-{longitude}")
|
await self.async_set_unique_id(f"{latitude}-{longitude}")
|
||||||
@@ -91,21 +82,15 @@ class OpenWeatherMapConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if not errors:
|
if not errors:
|
||||||
# Flatten location
|
|
||||||
location = user_input.pop(CONF_LOCATION)
|
|
||||||
user_input[CONF_LATITUDE] = location[CONF_LATITUDE]
|
|
||||||
user_input[CONF_LONGITUDE] = location[CONF_LONGITUDE]
|
|
||||||
data, options = build_data_and_options(user_input)
|
data, options = build_data_and_options(user_input)
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
title=DEFAULT_NAME, data=data, options=options
|
title=user_input[CONF_NAME], data=data, options=options
|
||||||
)
|
)
|
||||||
schema_data = user_input
|
schema_data = user_input
|
||||||
else:
|
else:
|
||||||
schema_data = {
|
schema_data = {
|
||||||
CONF_LOCATION: {
|
CONF_LATITUDE: self.hass.config.latitude,
|
||||||
CONF_LATITUDE: self.hass.config.latitude,
|
CONF_LONGITUDE: self.hass.config.longitude,
|
||||||
CONF_LONGITUDE: self.hass.config.longitude,
|
|
||||||
},
|
|
||||||
CONF_LANGUAGE: self.hass.config.language,
|
CONF_LANGUAGE: self.hass.config.language,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -1,29 +1,4 @@
|
|||||||
{
|
{
|
||||||
"entity": {
|
|
||||||
"sensor": {
|
|
||||||
"clouds": {
|
|
||||||
"default": "mdi:weather-cloudy"
|
|
||||||
},
|
|
||||||
"precipitation_kind": {
|
|
||||||
"default": "mdi:weather-snowy-rainy"
|
|
||||||
},
|
|
||||||
"rain": {
|
|
||||||
"default": "mdi:weather-rainy"
|
|
||||||
},
|
|
||||||
"snow": {
|
|
||||||
"default": "mdi:weather-snowy"
|
|
||||||
},
|
|
||||||
"uv_index": {
|
|
||||||
"default": "mdi:weather-sunny"
|
|
||||||
},
|
|
||||||
"visibility_distance": {
|
|
||||||
"default": "mdi:eye"
|
|
||||||
},
|
|
||||||
"weather_code": {
|
|
||||||
"default": "mdi:barcode"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"services": {
|
"services": {
|
||||||
"get_minute_forecast": {
|
"get_minute_forecast": {
|
||||||
"service": "mdi:weather-snowy-rainy"
|
"service": "mdi:weather-snowy-rainy"
|
||||||
|
@@ -64,105 +64,108 @@ from .coordinator import OWMUpdateCoordinator
|
|||||||
WEATHER_SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
WEATHER_SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
key=ATTR_API_WEATHER,
|
key=ATTR_API_WEATHER,
|
||||||
translation_key=ATTR_API_WEATHER,
|
name="Weather",
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
key=ATTR_API_DEW_POINT,
|
key=ATTR_API_DEW_POINT,
|
||||||
translation_key=ATTR_API_DEW_POINT,
|
name="Dew Point",
|
||||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
key=ATTR_API_TEMPERATURE,
|
key=ATTR_API_TEMPERATURE,
|
||||||
|
name="Temperature",
|
||||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
key=ATTR_API_FEELS_LIKE_TEMPERATURE,
|
key=ATTR_API_FEELS_LIKE_TEMPERATURE,
|
||||||
translation_key=ATTR_API_FEELS_LIKE_TEMPERATURE,
|
name="Feels like temperature",
|
||||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
key=ATTR_API_WIND_SPEED,
|
key=ATTR_API_WIND_SPEED,
|
||||||
|
name="Wind speed",
|
||||||
native_unit_of_measurement=UnitOfSpeed.METERS_PER_SECOND,
|
native_unit_of_measurement=UnitOfSpeed.METERS_PER_SECOND,
|
||||||
device_class=SensorDeviceClass.WIND_SPEED,
|
device_class=SensorDeviceClass.WIND_SPEED,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
key=ATTR_API_WIND_GUST,
|
key=ATTR_API_WIND_GUST,
|
||||||
translation_key=ATTR_API_WIND_GUST,
|
name="Wind gust",
|
||||||
native_unit_of_measurement=UnitOfSpeed.METERS_PER_SECOND,
|
native_unit_of_measurement=UnitOfSpeed.METERS_PER_SECOND,
|
||||||
device_class=SensorDeviceClass.WIND_SPEED,
|
device_class=SensorDeviceClass.WIND_SPEED,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
key=ATTR_API_WIND_BEARING,
|
key=ATTR_API_WIND_BEARING,
|
||||||
|
name="Wind bearing",
|
||||||
native_unit_of_measurement=DEGREE,
|
native_unit_of_measurement=DEGREE,
|
||||||
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
state_class=SensorStateClass.MEASUREMENT_ANGLE,
|
||||||
device_class=SensorDeviceClass.WIND_DIRECTION,
|
device_class=SensorDeviceClass.WIND_DIRECTION,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
key=ATTR_API_HUMIDITY,
|
key=ATTR_API_HUMIDITY,
|
||||||
|
name="Humidity",
|
||||||
native_unit_of_measurement=PERCENTAGE,
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
device_class=SensorDeviceClass.HUMIDITY,
|
device_class=SensorDeviceClass.HUMIDITY,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
key=ATTR_API_PRESSURE,
|
key=ATTR_API_PRESSURE,
|
||||||
|
name="Pressure",
|
||||||
native_unit_of_measurement=UnitOfPressure.HPA,
|
native_unit_of_measurement=UnitOfPressure.HPA,
|
||||||
device_class=SensorDeviceClass.PRESSURE,
|
device_class=SensorDeviceClass.PRESSURE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
suggested_display_precision=0,
|
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
key=ATTR_API_CLOUDS,
|
key=ATTR_API_CLOUDS,
|
||||||
translation_key=ATTR_API_CLOUDS,
|
name="Cloud coverage",
|
||||||
native_unit_of_measurement=PERCENTAGE,
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
key=ATTR_API_RAIN,
|
key=ATTR_API_RAIN,
|
||||||
translation_key=ATTR_API_RAIN,
|
name="Rain",
|
||||||
native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||||
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
|
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
key=ATTR_API_SNOW,
|
key=ATTR_API_SNOW,
|
||||||
translation_key=ATTR_API_SNOW,
|
name="Snow",
|
||||||
native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||||
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
|
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
key=ATTR_API_PRECIPITATION_KIND,
|
key=ATTR_API_PRECIPITATION_KIND,
|
||||||
translation_key=ATTR_API_PRECIPITATION_KIND,
|
name="Precipitation kind",
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
key=ATTR_API_UV_INDEX,
|
key=ATTR_API_UV_INDEX,
|
||||||
translation_key=ATTR_API_UV_INDEX,
|
name="UV Index",
|
||||||
native_unit_of_measurement=UV_INDEX,
|
native_unit_of_measurement=UV_INDEX,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
key=ATTR_API_VISIBILITY_DISTANCE,
|
key=ATTR_API_VISIBILITY_DISTANCE,
|
||||||
translation_key=ATTR_API_VISIBILITY_DISTANCE,
|
name="Visibility",
|
||||||
native_unit_of_measurement=UnitOfLength.METERS,
|
native_unit_of_measurement=UnitOfLength.METERS,
|
||||||
device_class=SensorDeviceClass.DISTANCE,
|
device_class=SensorDeviceClass.DISTANCE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
suggested_display_precision=0,
|
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
key=ATTR_API_CONDITION,
|
key=ATTR_API_CONDITION,
|
||||||
translation_key=ATTR_API_CONDITION,
|
name="Condition",
|
||||||
),
|
),
|
||||||
SensorEntityDescription(
|
SensorEntityDescription(
|
||||||
key=ATTR_API_WEATHER_CODE,
|
key=ATTR_API_WEATHER_CODE,
|
||||||
translation_key=ATTR_API_WEATHER_CODE,
|
name="Weather Code",
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -224,6 +227,7 @@ async def async_setup_entry(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Set up OpenWeatherMap sensor entities based on a config entry."""
|
"""Set up OpenWeatherMap sensor entities based on a config entry."""
|
||||||
domain_data = config_entry.runtime_data
|
domain_data = config_entry.runtime_data
|
||||||
|
name = domain_data.name
|
||||||
unique_id = config_entry.unique_id
|
unique_id = config_entry.unique_id
|
||||||
assert unique_id is not None
|
assert unique_id is not None
|
||||||
coordinator = domain_data.coordinator
|
coordinator = domain_data.coordinator
|
||||||
@@ -238,6 +242,7 @@ async def async_setup_entry(
|
|||||||
elif domain_data.mode == OWM_MODE_AIRPOLLUTION:
|
elif domain_data.mode == OWM_MODE_AIRPOLLUTION:
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
OpenWeatherMapSensor(
|
OpenWeatherMapSensor(
|
||||||
|
name,
|
||||||
unique_id,
|
unique_id,
|
||||||
description,
|
description,
|
||||||
coordinator,
|
coordinator,
|
||||||
@@ -247,6 +252,7 @@ async def async_setup_entry(
|
|||||||
else:
|
else:
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
OpenWeatherMapSensor(
|
OpenWeatherMapSensor(
|
||||||
|
name,
|
||||||
unique_id,
|
unique_id,
|
||||||
description,
|
description,
|
||||||
coordinator,
|
coordinator,
|
||||||
@@ -264,6 +270,7 @@ class AbstractOpenWeatherMapSensor(SensorEntity):
|
|||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
name: str,
|
||||||
unique_id: str,
|
unique_id: str,
|
||||||
description: SensorEntityDescription,
|
description: SensorEntityDescription,
|
||||||
coordinator: OWMUpdateCoordinator,
|
coordinator: OWMUpdateCoordinator,
|
||||||
@@ -277,6 +284,7 @@ class AbstractOpenWeatherMapSensor(SensorEntity):
|
|||||||
entry_type=DeviceEntryType.SERVICE,
|
entry_type=DeviceEntryType.SERVICE,
|
||||||
identifiers={(DOMAIN, unique_id)},
|
identifiers={(DOMAIN, unique_id)},
|
||||||
manufacturer=MANUFACTURER,
|
manufacturer=MANUFACTURER,
|
||||||
|
name=name,
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@@ -12,14 +12,16 @@
|
|||||||
"data": {
|
"data": {
|
||||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||||
"language": "[%key:common::config_flow::data::language%]",
|
"language": "[%key:common::config_flow::data::language%]",
|
||||||
"location": "[%key:common::config_flow::data::location%]",
|
"latitude": "[%key:common::config_flow::data::latitude%]",
|
||||||
|
"longitude": "[%key:common::config_flow::data::longitude%]",
|
||||||
"mode": "[%key:common::config_flow::data::mode%]",
|
"mode": "[%key:common::config_flow::data::mode%]",
|
||||||
"name": "[%key:common::config_flow::data::name%]"
|
"name": "[%key:common::config_flow::data::name%]"
|
||||||
},
|
},
|
||||||
"data_description": {
|
"data_description": {
|
||||||
"api_key": "API key for the OpenWeatherMap integration",
|
"api_key": "API key for the OpenWeatherMap integration",
|
||||||
"language": "Language for the OpenWeatherMap content",
|
"language": "Language for the OpenWeatherMap content",
|
||||||
"location": "Location to get the weather data for",
|
"latitude": "Latitude of the location",
|
||||||
|
"longitude": "Longitude of the location",
|
||||||
"mode": "Mode for the OpenWeatherMap API",
|
"mode": "Mode for the OpenWeatherMap API",
|
||||||
"name": "Name for this OpenWeatherMap location"
|
"name": "Name for this OpenWeatherMap location"
|
||||||
},
|
},
|
||||||
@@ -41,46 +43,6 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"entity": {
|
|
||||||
"sensor": {
|
|
||||||
"dew_point": {
|
|
||||||
"name": "[%key:component::weather::entity_component::_::state_attributes::dew_point::name%]"
|
|
||||||
},
|
|
||||||
"feels_like_temperature": {
|
|
||||||
"name": "[%key:component::weather::entity_component::_::state_attributes::apparent_temperature::name%]"
|
|
||||||
},
|
|
||||||
"wind_gust": {
|
|
||||||
"name": "[%key:component::weather::entity_component::_::state_attributes::wind_gust_speed::name%]"
|
|
||||||
},
|
|
||||||
"clouds": {
|
|
||||||
"name": "[%key:component::weather::entity_component::_::state_attributes::cloud_coverage::name%]"
|
|
||||||
},
|
|
||||||
"rain": {
|
|
||||||
"name": "Rain intensity"
|
|
||||||
},
|
|
||||||
"snow": {
|
|
||||||
"name": "Snow intensity"
|
|
||||||
},
|
|
||||||
"precipitation_kind": {
|
|
||||||
"name": "Precipitation kind"
|
|
||||||
},
|
|
||||||
"uv_index": {
|
|
||||||
"name": "[%key:component::weather::entity_component::_::state_attributes::uv_index::name%]"
|
|
||||||
},
|
|
||||||
"visibility_distance": {
|
|
||||||
"name": "[%key:component::weather::entity_component::_::state_attributes::visibility::name%]"
|
|
||||||
},
|
|
||||||
"condition": {
|
|
||||||
"name": "Condition"
|
|
||||||
},
|
|
||||||
"weather": {
|
|
||||||
"name": "[%key:component::weather::title%]"
|
|
||||||
},
|
|
||||||
"weather_code": {
|
|
||||||
"name": "Weather code"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"issues": {
|
"issues": {
|
||||||
"deprecated_v25": {
|
"deprecated_v25": {
|
||||||
"title": "OpenWeatherMap API V2.5 deprecated",
|
"title": "OpenWeatherMap API V2.5 deprecated",
|
||||||
|
@@ -57,13 +57,14 @@ async def async_setup_entry(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Set up OpenWeatherMap weather entity based on a config entry."""
|
"""Set up OpenWeatherMap weather entity based on a config entry."""
|
||||||
domain_data = config_entry.runtime_data
|
domain_data = config_entry.runtime_data
|
||||||
|
name = domain_data.name
|
||||||
mode = domain_data.mode
|
mode = domain_data.mode
|
||||||
|
|
||||||
if mode != OWM_MODE_AIRPOLLUTION:
|
if mode != OWM_MODE_AIRPOLLUTION:
|
||||||
weather_coordinator = domain_data.coordinator
|
weather_coordinator = domain_data.coordinator
|
||||||
|
|
||||||
unique_id = f"{config_entry.unique_id}"
|
unique_id = f"{config_entry.unique_id}"
|
||||||
owm_weather = OpenWeatherMapWeather(unique_id, mode, weather_coordinator)
|
owm_weather = OpenWeatherMapWeather(name, unique_id, mode, weather_coordinator)
|
||||||
|
|
||||||
async_add_entities([owm_weather], False)
|
async_add_entities([owm_weather], False)
|
||||||
|
|
||||||
@@ -92,6 +93,7 @@ class OpenWeatherMapWeather(SingleCoordinatorWeatherEntity[OWMUpdateCoordinator]
|
|||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
name: str,
|
||||||
unique_id: str,
|
unique_id: str,
|
||||||
mode: str,
|
mode: str,
|
||||||
weather_coordinator: OWMUpdateCoordinator,
|
weather_coordinator: OWMUpdateCoordinator,
|
||||||
@@ -103,6 +105,7 @@ class OpenWeatherMapWeather(SingleCoordinatorWeatherEntity[OWMUpdateCoordinator]
|
|||||||
entry_type=DeviceEntryType.SERVICE,
|
entry_type=DeviceEntryType.SERVICE,
|
||||||
identifiers={(DOMAIN, unique_id)},
|
identifiers={(DOMAIN, unique_id)},
|
||||||
manufacturer=MANUFACTURER,
|
manufacturer=MANUFACTURER,
|
||||||
|
name=name,
|
||||||
)
|
)
|
||||||
self.mode = mode
|
self.mode = mode
|
||||||
|
|
||||||
|
@@ -35,7 +35,6 @@ from homeassistant.helpers import issue_registry as ir
|
|||||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
from homeassistant.util.unit_conversion import EnergyConverter, VolumeConverter
|
|
||||||
|
|
||||||
from .const import CONF_LOGIN_DATA, CONF_TOTP_SECRET, CONF_UTILITY, DOMAIN
|
from .const import CONF_LOGIN_DATA, CONF_TOTP_SECRET, CONF_UTILITY, DOMAIN
|
||||||
|
|
||||||
@@ -150,7 +149,6 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
|
|||||||
name=f"{name_prefix} cost",
|
name=f"{name_prefix} cost",
|
||||||
source=DOMAIN,
|
source=DOMAIN,
|
||||||
statistic_id=cost_statistic_id,
|
statistic_id=cost_statistic_id,
|
||||||
unit_class=None,
|
|
||||||
unit_of_measurement=None,
|
unit_of_measurement=None,
|
||||||
)
|
)
|
||||||
compensation_metadata = StatisticMetaData(
|
compensation_metadata = StatisticMetaData(
|
||||||
@@ -159,14 +157,8 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
|
|||||||
name=f"{name_prefix} compensation",
|
name=f"{name_prefix} compensation",
|
||||||
source=DOMAIN,
|
source=DOMAIN,
|
||||||
statistic_id=compensation_statistic_id,
|
statistic_id=compensation_statistic_id,
|
||||||
unit_class=None,
|
|
||||||
unit_of_measurement=None,
|
unit_of_measurement=None,
|
||||||
)
|
)
|
||||||
consumption_unit_class = (
|
|
||||||
EnergyConverter.UNIT_CLASS
|
|
||||||
if account.meter_type == MeterType.ELEC
|
|
||||||
else VolumeConverter.UNIT_CLASS
|
|
||||||
)
|
|
||||||
consumption_unit = (
|
consumption_unit = (
|
||||||
UnitOfEnergy.KILO_WATT_HOUR
|
UnitOfEnergy.KILO_WATT_HOUR
|
||||||
if account.meter_type == MeterType.ELEC
|
if account.meter_type == MeterType.ELEC
|
||||||
@@ -178,7 +170,6 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
|
|||||||
name=f"{name_prefix} consumption",
|
name=f"{name_prefix} consumption",
|
||||||
source=DOMAIN,
|
source=DOMAIN,
|
||||||
statistic_id=consumption_statistic_id,
|
statistic_id=consumption_statistic_id,
|
||||||
unit_class=consumption_unit_class,
|
|
||||||
unit_of_measurement=consumption_unit,
|
unit_of_measurement=consumption_unit,
|
||||||
)
|
)
|
||||||
return_metadata = StatisticMetaData(
|
return_metadata = StatisticMetaData(
|
||||||
@@ -187,7 +178,6 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]):
|
|||||||
name=f"{name_prefix} return",
|
name=f"{name_prefix} return",
|
||||||
source=DOMAIN,
|
source=DOMAIN,
|
||||||
statistic_id=return_statistic_id,
|
statistic_id=return_statistic_id,
|
||||||
unit_class=consumption_unit_class,
|
|
||||||
unit_of_measurement=consumption_unit,
|
unit_of_measurement=consumption_unit,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@@ -18,8 +18,7 @@ from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
|||||||
|
|
||||||
from .coordinator import PortainerCoordinator
|
from .coordinator import PortainerCoordinator
|
||||||
|
|
||||||
_PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH]
|
_PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.SWITCH]
|
||||||
|
|
||||||
|
|
||||||
type PortainerConfigEntry = ConfigEntry[PortainerCoordinator]
|
type PortainerConfigEntry = ConfigEntry[PortainerCoordinator]
|
||||||
|
|
||||||
|
@@ -1,10 +1,5 @@
|
|||||||
{
|
{
|
||||||
"entity": {
|
"entity": {
|
||||||
"sensor": {
|
|
||||||
"image": {
|
|
||||||
"default": "mdi:docker"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"switch": {
|
"switch": {
|
||||||
"container": {
|
"container": {
|
||||||
"default": "mdi:arrow-down-box",
|
"default": "mdi:arrow-down-box",
|
||||||
|
@@ -1,83 +0,0 @@
|
|||||||
"""Sensor platform for Portainer integration."""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
from collections.abc import Callable
|
|
||||||
from dataclasses import dataclass
|
|
||||||
|
|
||||||
from pyportainer.models.docker import DockerContainer
|
|
||||||
|
|
||||||
from homeassistant.components.sensor import SensorEntity, SensorEntityDescription
|
|
||||||
from homeassistant.core import HomeAssistant
|
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|
||||||
|
|
||||||
from .coordinator import PortainerConfigEntry, PortainerCoordinator
|
|
||||||
from .entity import PortainerContainerEntity, PortainerCoordinatorData
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True, kw_only=True)
|
|
||||||
class PortainerSensorEntityDescription(SensorEntityDescription):
|
|
||||||
"""Class to hold Portainer sensor description."""
|
|
||||||
|
|
||||||
value_fn: Callable[[DockerContainer], str | None]
|
|
||||||
|
|
||||||
|
|
||||||
CONTAINER_SENSORS: tuple[PortainerSensorEntityDescription, ...] = (
|
|
||||||
PortainerSensorEntityDescription(
|
|
||||||
key="image",
|
|
||||||
translation_key="image",
|
|
||||||
value_fn=lambda data: data.image,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
entry: PortainerConfigEntry,
|
|
||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
|
||||||
) -> None:
|
|
||||||
"""Set up Portainer sensors based on a config entry."""
|
|
||||||
coordinator = entry.runtime_data
|
|
||||||
|
|
||||||
async_add_entities(
|
|
||||||
PortainerContainerSensor(
|
|
||||||
coordinator,
|
|
||||||
entity_description,
|
|
||||||
container,
|
|
||||||
endpoint,
|
|
||||||
)
|
|
||||||
for endpoint in coordinator.data.values()
|
|
||||||
for container in endpoint.containers.values()
|
|
||||||
for entity_description in CONTAINER_SENSORS
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class PortainerContainerSensor(PortainerContainerEntity, SensorEntity):
|
|
||||||
"""Representation of a Portainer container sensor."""
|
|
||||||
|
|
||||||
entity_description: PortainerSensorEntityDescription
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
coordinator: PortainerCoordinator,
|
|
||||||
entity_description: PortainerSensorEntityDescription,
|
|
||||||
device_info: DockerContainer,
|
|
||||||
via_device: PortainerCoordinatorData,
|
|
||||||
) -> None:
|
|
||||||
"""Initialize the Portainer container sensor."""
|
|
||||||
self.entity_description = entity_description
|
|
||||||
super().__init__(device_info, coordinator, via_device)
|
|
||||||
|
|
||||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def available(self) -> bool:
|
|
||||||
"""Return if the device is available."""
|
|
||||||
return super().available and self.endpoint_id in self.coordinator.data
|
|
||||||
|
|
||||||
@property
|
|
||||||
def native_value(self) -> str | None:
|
|
||||||
"""Return the state of the sensor."""
|
|
||||||
return self.entity_description.value_fn(
|
|
||||||
self.coordinator.data[self.endpoint_id].containers[self.device_id]
|
|
||||||
)
|
|
@@ -46,11 +46,6 @@
|
|||||||
"name": "Status"
|
"name": "Status"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"sensor": {
|
|
||||||
"image": {
|
|
||||||
"name": "Image"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"switch": {
|
"switch": {
|
||||||
"container": {
|
"container": {
|
||||||
"name": "Container"
|
"name": "Container"
|
||||||
|
@@ -54,7 +54,6 @@ CONTEXT_ID_AS_BINARY_SCHEMA_VERSION = 36
|
|||||||
EVENT_TYPE_IDS_SCHEMA_VERSION = 37
|
EVENT_TYPE_IDS_SCHEMA_VERSION = 37
|
||||||
STATES_META_SCHEMA_VERSION = 38
|
STATES_META_SCHEMA_VERSION = 38
|
||||||
CIRCULAR_MEAN_SCHEMA_VERSION = 49
|
CIRCULAR_MEAN_SCHEMA_VERSION = 49
|
||||||
UNIT_CLASS_SCHEMA_VERSION = 51
|
|
||||||
|
|
||||||
LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION = 28
|
LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION = 28
|
||||||
LEGACY_STATES_EVENT_FOREIGN_KEYS_FIXED_SCHEMA_VERSION = 43
|
LEGACY_STATES_EVENT_FOREIGN_KEYS_FIXED_SCHEMA_VERSION = 43
|
||||||
|
@@ -574,18 +574,13 @@ class Recorder(threading.Thread):
|
|||||||
statistic_id: str,
|
statistic_id: str,
|
||||||
*,
|
*,
|
||||||
new_statistic_id: str | UndefinedType = UNDEFINED,
|
new_statistic_id: str | UndefinedType = UNDEFINED,
|
||||||
new_unit_class: str | None | UndefinedType = UNDEFINED,
|
|
||||||
new_unit_of_measurement: str | None | UndefinedType = UNDEFINED,
|
new_unit_of_measurement: str | None | UndefinedType = UNDEFINED,
|
||||||
on_done: Callable[[], None] | None = None,
|
on_done: Callable[[], None] | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Update statistics metadata for a statistic_id."""
|
"""Update statistics metadata for a statistic_id."""
|
||||||
self.queue_task(
|
self.queue_task(
|
||||||
UpdateStatisticsMetadataTask(
|
UpdateStatisticsMetadataTask(
|
||||||
on_done,
|
on_done, statistic_id, new_statistic_id, new_unit_of_measurement
|
||||||
statistic_id,
|
|
||||||
new_statistic_id,
|
|
||||||
new_unit_class,
|
|
||||||
new_unit_of_measurement,
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@@ -71,7 +71,7 @@ class LegacyBase(DeclarativeBase):
|
|||||||
"""Base class for tables, used for schema migration."""
|
"""Base class for tables, used for schema migration."""
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_VERSION = 51
|
SCHEMA_VERSION = 50
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -756,7 +756,6 @@ class _StatisticsMeta:
|
|||||||
)
|
)
|
||||||
source: Mapped[str | None] = mapped_column(String(32))
|
source: Mapped[str | None] = mapped_column(String(32))
|
||||||
unit_of_measurement: Mapped[str | None] = mapped_column(String(255))
|
unit_of_measurement: Mapped[str | None] = mapped_column(String(255))
|
||||||
unit_class: Mapped[str | None] = mapped_column(String(255))
|
|
||||||
has_mean: Mapped[bool | None] = mapped_column(Boolean)
|
has_mean: Mapped[bool | None] = mapped_column(Boolean)
|
||||||
has_sum: Mapped[bool | None] = mapped_column(Boolean)
|
has_sum: Mapped[bool | None] = mapped_column(Boolean)
|
||||||
name: Mapped[str | None] = mapped_column(String(255))
|
name: Mapped[str | None] = mapped_column(String(255))
|
||||||
|
@@ -9,7 +9,6 @@ from homeassistant.helpers import entity_registry as er
|
|||||||
from homeassistant.helpers.event import async_has_entity_registry_updated_listeners
|
from homeassistant.helpers.event import async_has_entity_registry_updated_listeners
|
||||||
|
|
||||||
from .core import Recorder
|
from .core import Recorder
|
||||||
from .statistics import async_update_statistics_metadata
|
|
||||||
from .util import filter_unique_constraint_integrity_error, get_instance, session_scope
|
from .util import filter_unique_constraint_integrity_error, get_instance, session_scope
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -28,8 +27,8 @@ def async_setup(hass: HomeAssistant) -> None:
|
|||||||
assert event.data["action"] == "update" and "old_entity_id" in event.data
|
assert event.data["action"] == "update" and "old_entity_id" in event.data
|
||||||
old_entity_id = event.data["old_entity_id"]
|
old_entity_id = event.data["old_entity_id"]
|
||||||
new_entity_id = event.data["entity_id"]
|
new_entity_id = event.data["entity_id"]
|
||||||
async_update_statistics_metadata(
|
instance.async_update_statistics_metadata(
|
||||||
hass, old_entity_id, new_statistic_id=new_entity_id
|
old_entity_id, new_statistic_id=new_entity_id
|
||||||
)
|
)
|
||||||
instance.async_update_states_metadata(
|
instance.async_update_states_metadata(
|
||||||
old_entity_id, new_entity_id=new_entity_id
|
old_entity_id, new_entity_id=new_entity_id
|
||||||
|
@@ -103,11 +103,7 @@ from .queries import (
|
|||||||
migrate_single_short_term_statistics_row_to_timestamp,
|
migrate_single_short_term_statistics_row_to_timestamp,
|
||||||
migrate_single_statistics_row_to_timestamp,
|
migrate_single_statistics_row_to_timestamp,
|
||||||
)
|
)
|
||||||
from .statistics import (
|
from .statistics import cleanup_statistics_timestamp_migration, get_start_time
|
||||||
_PRIMARY_UNIT_CONVERTERS,
|
|
||||||
cleanup_statistics_timestamp_migration,
|
|
||||||
get_start_time,
|
|
||||||
)
|
|
||||||
from .tasks import RecorderTask
|
from .tasks import RecorderTask
|
||||||
from .util import (
|
from .util import (
|
||||||
database_job_retry_wrapper,
|
database_job_retry_wrapper,
|
||||||
@@ -2041,21 +2037,6 @@ class _SchemaVersion50Migrator(_SchemaVersionMigrator, target_version=50):
|
|||||||
connection.execute(text("UPDATE statistics_meta SET has_mean=NULL"))
|
connection.execute(text("UPDATE statistics_meta SET has_mean=NULL"))
|
||||||
|
|
||||||
|
|
||||||
class _SchemaVersion51Migrator(_SchemaVersionMigrator, target_version=51):
|
|
||||||
def _apply_update(self) -> None:
|
|
||||||
"""Version specific update method."""
|
|
||||||
# Add unit class column to StatisticsMeta
|
|
||||||
_add_columns(self.session_maker, "statistics_meta", ["unit_class VARCHAR(255)"])
|
|
||||||
with session_scope(session=self.session_maker()) as session:
|
|
||||||
connection = session.connection()
|
|
||||||
for conv in _PRIMARY_UNIT_CONVERTERS:
|
|
||||||
connection.execute(
|
|
||||||
update(StatisticsMeta)
|
|
||||||
.where(StatisticsMeta.unit_of_measurement.in_(conv.VALID_UNITS))
|
|
||||||
.values(unit_class=conv.UNIT_CLASS)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _migrate_statistics_columns_to_timestamp_removing_duplicates(
|
def _migrate_statistics_columns_to_timestamp_removing_duplicates(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
instance: Recorder,
|
instance: Recorder,
|
||||||
|
@@ -70,8 +70,6 @@ class StatisticMetaData(TypedDict):
|
|||||||
name: str | None
|
name: str | None
|
||||||
source: str
|
source: str
|
||||||
statistic_id: str
|
statistic_id: str
|
||||||
unit_class: str | None
|
|
||||||
"""Specifies the unit conversion class to use, if applicable."""
|
|
||||||
unit_of_measurement: str | None
|
unit_of_measurement: str | None
|
||||||
|
|
||||||
|
|
||||||
|
@@ -35,7 +35,6 @@ import voluptuous as vol
|
|||||||
from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT
|
from homeassistant.const import ATTR_UNIT_OF_MEASUREMENT
|
||||||
from homeassistant.core import HomeAssistant, callback, valid_entity_id
|
from homeassistant.core import HomeAssistant, callback, valid_entity_id
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers.frame import report_usage
|
|
||||||
from homeassistant.helpers.recorder import DATA_RECORDER
|
from homeassistant.helpers.recorder import DATA_RECORDER
|
||||||
from homeassistant.helpers.singleton import singleton
|
from homeassistant.helpers.singleton import singleton
|
||||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||||
@@ -194,48 +193,43 @@ QUERY_STATISTICS_SUMMARY_SUM = (
|
|||||||
.label("rownum"),
|
.label("rownum"),
|
||||||
)
|
)
|
||||||
|
|
||||||
_PRIMARY_UNIT_CONVERTERS: list[type[BaseUnitConverter]] = [
|
|
||||||
ApparentPowerConverter,
|
|
||||||
AreaConverter,
|
|
||||||
BloodGlucoseConcentrationConverter,
|
|
||||||
ConductivityConverter,
|
|
||||||
DataRateConverter,
|
|
||||||
DistanceConverter,
|
|
||||||
DurationConverter,
|
|
||||||
ElectricCurrentConverter,
|
|
||||||
ElectricPotentialConverter,
|
|
||||||
EnergyConverter,
|
|
||||||
EnergyDistanceConverter,
|
|
||||||
InformationConverter,
|
|
||||||
MassConverter,
|
|
||||||
MassVolumeConcentrationConverter,
|
|
||||||
PowerConverter,
|
|
||||||
PressureConverter,
|
|
||||||
ReactiveEnergyConverter,
|
|
||||||
ReactivePowerConverter,
|
|
||||||
SpeedConverter,
|
|
||||||
TemperatureConverter,
|
|
||||||
UnitlessRatioConverter,
|
|
||||||
VolumeConverter,
|
|
||||||
VolumeFlowRateConverter,
|
|
||||||
]
|
|
||||||
|
|
||||||
_SECONDARY_UNIT_CONVERTERS: list[type[BaseUnitConverter]] = []
|
|
||||||
|
|
||||||
STATISTIC_UNIT_TO_UNIT_CONVERTER: dict[str | None, type[BaseUnitConverter]] = {
|
STATISTIC_UNIT_TO_UNIT_CONVERTER: dict[str | None, type[BaseUnitConverter]] = {
|
||||||
unit: conv for conv in _PRIMARY_UNIT_CONVERTERS for unit in conv.VALID_UNITS
|
**dict.fromkeys(ApparentPowerConverter.VALID_UNITS, ApparentPowerConverter),
|
||||||
|
**dict.fromkeys(AreaConverter.VALID_UNITS, AreaConverter),
|
||||||
|
**dict.fromkeys(
|
||||||
|
BloodGlucoseConcentrationConverter.VALID_UNITS,
|
||||||
|
BloodGlucoseConcentrationConverter,
|
||||||
|
),
|
||||||
|
**dict.fromkeys(
|
||||||
|
MassVolumeConcentrationConverter.VALID_UNITS, MassVolumeConcentrationConverter
|
||||||
|
),
|
||||||
|
**dict.fromkeys(ConductivityConverter.VALID_UNITS, ConductivityConverter),
|
||||||
|
**dict.fromkeys(DataRateConverter.VALID_UNITS, DataRateConverter),
|
||||||
|
**dict.fromkeys(DistanceConverter.VALID_UNITS, DistanceConverter),
|
||||||
|
**dict.fromkeys(DurationConverter.VALID_UNITS, DurationConverter),
|
||||||
|
**dict.fromkeys(ElectricCurrentConverter.VALID_UNITS, ElectricCurrentConverter),
|
||||||
|
**dict.fromkeys(ElectricPotentialConverter.VALID_UNITS, ElectricPotentialConverter),
|
||||||
|
**dict.fromkeys(EnergyConverter.VALID_UNITS, EnergyConverter),
|
||||||
|
**dict.fromkeys(EnergyDistanceConverter.VALID_UNITS, EnergyDistanceConverter),
|
||||||
|
**dict.fromkeys(InformationConverter.VALID_UNITS, InformationConverter),
|
||||||
|
**dict.fromkeys(MassConverter.VALID_UNITS, MassConverter),
|
||||||
|
**dict.fromkeys(PowerConverter.VALID_UNITS, PowerConverter),
|
||||||
|
**dict.fromkeys(PressureConverter.VALID_UNITS, PressureConverter),
|
||||||
|
**dict.fromkeys(ReactiveEnergyConverter.VALID_UNITS, ReactiveEnergyConverter),
|
||||||
|
**dict.fromkeys(ReactivePowerConverter.VALID_UNITS, ReactivePowerConverter),
|
||||||
|
**dict.fromkeys(SpeedConverter.VALID_UNITS, SpeedConverter),
|
||||||
|
**dict.fromkeys(TemperatureConverter.VALID_UNITS, TemperatureConverter),
|
||||||
|
**dict.fromkeys(UnitlessRatioConverter.VALID_UNITS, UnitlessRatioConverter),
|
||||||
|
**dict.fromkeys(VolumeConverter.VALID_UNITS, VolumeConverter),
|
||||||
|
**dict.fromkeys(VolumeFlowRateConverter.VALID_UNITS, VolumeFlowRateConverter),
|
||||||
}
|
}
|
||||||
"""Map of units to unit converter.
|
|
||||||
|
|
||||||
This map includes units which can be converted without knowing the unit class.
|
|
||||||
"""
|
|
||||||
|
|
||||||
UNIT_CLASS_TO_UNIT_CONVERTER: dict[str | None, type[BaseUnitConverter]] = {
|
UNIT_CLASSES = {
|
||||||
conv.UNIT_CLASS: conv
|
unit: converter.UNIT_CLASS
|
||||||
for conv in chain(_PRIMARY_UNIT_CONVERTERS, _SECONDARY_UNIT_CONVERTERS)
|
for unit, converter in STATISTIC_UNIT_TO_UNIT_CONVERTER.items()
|
||||||
}
|
}
|
||||||
"""Map of unit class to converter."""
|
|
||||||
|
|
||||||
|
|
||||||
DATA_SHORT_TERM_STATISTICS_RUN_CACHE = "recorder_short_term_statistics_run_cache"
|
DATA_SHORT_TERM_STATISTICS_RUN_CACHE = "recorder_short_term_statistics_run_cache"
|
||||||
|
|
||||||
@@ -321,32 +315,14 @@ class StatisticsRow(BaseStatisticsRow, total=False):
|
|||||||
change: float | None
|
change: float | None
|
||||||
|
|
||||||
|
|
||||||
def _get_unit_converter(
|
|
||||||
unit_class: str | None, from_unit: str | None
|
|
||||||
) -> type[BaseUnitConverter] | None:
|
|
||||||
"""Return the unit converter for the given unit class and unit.
|
|
||||||
|
|
||||||
The unit converter is determined from the unit class and unit if the unit class
|
|
||||||
and unit match, otherwise from the unit.
|
|
||||||
"""
|
|
||||||
if (
|
|
||||||
conv := UNIT_CLASS_TO_UNIT_CONVERTER.get(unit_class)
|
|
||||||
) is not None and from_unit in conv.VALID_UNITS:
|
|
||||||
return conv
|
|
||||||
if (conv := STATISTIC_UNIT_TO_UNIT_CONVERTER.get(from_unit)) is not None:
|
|
||||||
return conv
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def get_display_unit(
|
def get_display_unit(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
statistic_id: str,
|
statistic_id: str,
|
||||||
unit_class: str | None,
|
|
||||||
statistic_unit: str | None,
|
statistic_unit: str | None,
|
||||||
) -> str | None:
|
) -> str | None:
|
||||||
"""Return the unit which the statistic will be displayed in."""
|
"""Return the unit which the statistic will be displayed in."""
|
||||||
|
|
||||||
if (converter := _get_unit_converter(unit_class, statistic_unit)) is None:
|
if (converter := STATISTIC_UNIT_TO_UNIT_CONVERTER.get(statistic_unit)) is None:
|
||||||
return statistic_unit
|
return statistic_unit
|
||||||
|
|
||||||
state_unit: str | None = statistic_unit
|
state_unit: str | None = statistic_unit
|
||||||
@@ -361,14 +337,13 @@ def get_display_unit(
|
|||||||
|
|
||||||
|
|
||||||
def _get_statistic_to_display_unit_converter(
|
def _get_statistic_to_display_unit_converter(
|
||||||
unit_class: str | None,
|
|
||||||
statistic_unit: str | None,
|
statistic_unit: str | None,
|
||||||
state_unit: str | None,
|
state_unit: str | None,
|
||||||
requested_units: dict[str, str] | None,
|
requested_units: dict[str, str] | None,
|
||||||
allow_none: bool = True,
|
allow_none: bool = True,
|
||||||
) -> Callable[[float | None], float | None] | Callable[[float], float] | None:
|
) -> Callable[[float | None], float | None] | Callable[[float], float] | None:
|
||||||
"""Prepare a converter from the statistics unit to display unit."""
|
"""Prepare a converter from the statistics unit to display unit."""
|
||||||
if (converter := _get_unit_converter(unit_class, statistic_unit)) is None:
|
if (converter := STATISTIC_UNIT_TO_UNIT_CONVERTER.get(statistic_unit)) is None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
display_unit: str | None
|
display_unit: str | None
|
||||||
@@ -392,25 +367,24 @@ def _get_statistic_to_display_unit_converter(
|
|||||||
return converter.converter_factory(from_unit=statistic_unit, to_unit=display_unit)
|
return converter.converter_factory(from_unit=statistic_unit, to_unit=display_unit)
|
||||||
|
|
||||||
|
|
||||||
def _get_display_to_statistic_unit_converter_func(
|
def _get_display_to_statistic_unit_converter(
|
||||||
unit_class: str | None,
|
|
||||||
display_unit: str | None,
|
display_unit: str | None,
|
||||||
statistic_unit: str | None,
|
statistic_unit: str | None,
|
||||||
) -> Callable[[float], float] | None:
|
) -> Callable[[float], float] | None:
|
||||||
"""Prepare a converter from the display unit to the statistics unit."""
|
"""Prepare a converter from the display unit to the statistics unit."""
|
||||||
if (
|
if (
|
||||||
display_unit == statistic_unit
|
display_unit == statistic_unit
|
||||||
or (converter := _get_unit_converter(unit_class, statistic_unit)) is None
|
or (converter := STATISTIC_UNIT_TO_UNIT_CONVERTER.get(statistic_unit)) is None
|
||||||
):
|
):
|
||||||
return None
|
return None
|
||||||
return converter.converter_factory(from_unit=display_unit, to_unit=statistic_unit)
|
return converter.converter_factory(from_unit=display_unit, to_unit=statistic_unit)
|
||||||
|
|
||||||
|
|
||||||
def _get_unit_converter_func(
|
def _get_unit_converter(
|
||||||
unit_class: str | None, from_unit: str, to_unit: str
|
from_unit: str, to_unit: str
|
||||||
) -> Callable[[float | None], float | None] | None:
|
) -> Callable[[float | None], float | None] | None:
|
||||||
"""Prepare a converter from a unit to another unit."""
|
"""Prepare a converter from a unit to another unit."""
|
||||||
if (conv := _get_unit_converter(unit_class, from_unit)) is not None:
|
for conv in STATISTIC_UNIT_TO_UNIT_CONVERTER.values():
|
||||||
if from_unit in conv.VALID_UNITS and to_unit in conv.VALID_UNITS:
|
if from_unit in conv.VALID_UNITS and to_unit in conv.VALID_UNITS:
|
||||||
if from_unit == to_unit:
|
if from_unit == to_unit:
|
||||||
return None
|
return None
|
||||||
@@ -420,11 +394,9 @@ def _get_unit_converter_func(
|
|||||||
raise HomeAssistantError
|
raise HomeAssistantError
|
||||||
|
|
||||||
|
|
||||||
def can_convert_units(
|
def can_convert_units(from_unit: str | None, to_unit: str | None) -> bool:
|
||||||
unit_class: str | None, from_unit: str | None, to_unit: str | None
|
|
||||||
) -> bool:
|
|
||||||
"""Return True if it's possible to convert from from_unit to to_unit."""
|
"""Return True if it's possible to convert from from_unit to to_unit."""
|
||||||
if (converter := _get_unit_converter(unit_class, from_unit)) is not None:
|
for converter in STATISTIC_UNIT_TO_UNIT_CONVERTER.values():
|
||||||
if from_unit in converter.VALID_UNITS and to_unit in converter.VALID_UNITS:
|
if from_unit in converter.VALID_UNITS and to_unit in converter.VALID_UNITS:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
@@ -891,71 +863,18 @@ def clear_statistics(instance: Recorder, statistic_ids: list[str]) -> None:
|
|||||||
instance.statistics_meta_manager.delete(session, statistic_ids)
|
instance.statistics_meta_manager.delete(session, statistic_ids)
|
||||||
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def async_update_statistics_metadata(
|
|
||||||
hass: HomeAssistant,
|
|
||||||
statistic_id: str,
|
|
||||||
*,
|
|
||||||
new_statistic_id: str | UndefinedType = UNDEFINED,
|
|
||||||
new_unit_class: str | None | UndefinedType = UNDEFINED,
|
|
||||||
new_unit_of_measurement: str | None | UndefinedType = UNDEFINED,
|
|
||||||
on_done: Callable[[], None] | None = None,
|
|
||||||
_called_from_ws_api: bool = False,
|
|
||||||
) -> None:
|
|
||||||
"""Update statistics metadata for a statistic_id."""
|
|
||||||
if new_unit_of_measurement is not UNDEFINED and new_unit_class is UNDEFINED:
|
|
||||||
if not _called_from_ws_api:
|
|
||||||
report_usage(
|
|
||||||
(
|
|
||||||
"doesn't specify unit_class when calling "
|
|
||||||
"async_update_statistics_metadata"
|
|
||||||
),
|
|
||||||
breaks_in_ha_version="2026.11",
|
|
||||||
exclude_integrations={DOMAIN},
|
|
||||||
)
|
|
||||||
|
|
||||||
unit = new_unit_of_measurement
|
|
||||||
if unit in STATISTIC_UNIT_TO_UNIT_CONVERTER:
|
|
||||||
new_unit_class = STATISTIC_UNIT_TO_UNIT_CONVERTER[unit].UNIT_CLASS
|
|
||||||
else:
|
|
||||||
new_unit_class = None
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
# After the above check, new_unit_class is guaranteed to not be UNDEFINED
|
|
||||||
assert new_unit_class is not UNDEFINED
|
|
||||||
|
|
||||||
if new_unit_of_measurement is not UNDEFINED and new_unit_class is not None:
|
|
||||||
if (converter := UNIT_CLASS_TO_UNIT_CONVERTER.get(new_unit_class)) is None:
|
|
||||||
raise HomeAssistantError(f"Unsupported unit_class: '{new_unit_class}'")
|
|
||||||
|
|
||||||
if new_unit_of_measurement not in converter.VALID_UNITS:
|
|
||||||
raise HomeAssistantError(
|
|
||||||
f"Unsupported unit_of_measurement '{new_unit_of_measurement}' "
|
|
||||||
f"for unit_class '{new_unit_class}'"
|
|
||||||
)
|
|
||||||
|
|
||||||
get_instance(hass).async_update_statistics_metadata(
|
|
||||||
statistic_id,
|
|
||||||
new_statistic_id=new_statistic_id,
|
|
||||||
new_unit_class=new_unit_class,
|
|
||||||
new_unit_of_measurement=new_unit_of_measurement,
|
|
||||||
on_done=on_done,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def update_statistics_metadata(
|
def update_statistics_metadata(
|
||||||
instance: Recorder,
|
instance: Recorder,
|
||||||
statistic_id: str,
|
statistic_id: str,
|
||||||
new_statistic_id: str | None | UndefinedType,
|
new_statistic_id: str | None | UndefinedType,
|
||||||
new_unit_class: str | None | UndefinedType,
|
|
||||||
new_unit_of_measurement: str | None | UndefinedType,
|
new_unit_of_measurement: str | None | UndefinedType,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Update statistics metadata for a statistic_id."""
|
"""Update statistics metadata for a statistic_id."""
|
||||||
statistics_meta_manager = instance.statistics_meta_manager
|
statistics_meta_manager = instance.statistics_meta_manager
|
||||||
if new_unit_class is not UNDEFINED and new_unit_of_measurement is not UNDEFINED:
|
if new_unit_of_measurement is not UNDEFINED:
|
||||||
with session_scope(session=instance.get_session()) as session:
|
with session_scope(session=instance.get_session()) as session:
|
||||||
statistics_meta_manager.update_unit_of_measurement(
|
statistics_meta_manager.update_unit_of_measurement(
|
||||||
session, statistic_id, new_unit_class, new_unit_of_measurement
|
session, statistic_id, new_unit_of_measurement
|
||||||
)
|
)
|
||||||
if new_statistic_id is not UNDEFINED and new_statistic_id is not None:
|
if new_statistic_id is not UNDEFINED and new_statistic_id is not None:
|
||||||
with session_scope(
|
with session_scope(
|
||||||
@@ -1007,16 +926,13 @@ def _statistic_by_id_from_metadata(
|
|||||||
return {
|
return {
|
||||||
meta["statistic_id"]: {
|
meta["statistic_id"]: {
|
||||||
"display_unit_of_measurement": get_display_unit(
|
"display_unit_of_measurement": get_display_unit(
|
||||||
hass,
|
hass, meta["statistic_id"], meta["unit_of_measurement"]
|
||||||
meta["statistic_id"],
|
|
||||||
meta["unit_class"],
|
|
||||||
meta["unit_of_measurement"],
|
|
||||||
),
|
),
|
||||||
"mean_type": meta["mean_type"],
|
"mean_type": meta["mean_type"],
|
||||||
"has_sum": meta["has_sum"],
|
"has_sum": meta["has_sum"],
|
||||||
"name": meta["name"],
|
"name": meta["name"],
|
||||||
"source": meta["source"],
|
"source": meta["source"],
|
||||||
"unit_class": meta["unit_class"],
|
"unit_class": UNIT_CLASSES.get(meta["unit_of_measurement"]),
|
||||||
"unit_of_measurement": meta["unit_of_measurement"],
|
"unit_of_measurement": meta["unit_of_measurement"],
|
||||||
}
|
}
|
||||||
for _, meta in metadata.values()
|
for _, meta in metadata.values()
|
||||||
@@ -1092,7 +1008,7 @@ def list_statistic_ids(
|
|||||||
"has_sum": meta["has_sum"],
|
"has_sum": meta["has_sum"],
|
||||||
"name": meta["name"],
|
"name": meta["name"],
|
||||||
"source": meta["source"],
|
"source": meta["source"],
|
||||||
"unit_class": meta["unit_class"],
|
"unit_class": UNIT_CLASSES.get(meta["unit_of_measurement"]),
|
||||||
"unit_of_measurement": meta["unit_of_measurement"],
|
"unit_of_measurement": meta["unit_of_measurement"],
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1828,13 +1744,10 @@ def statistic_during_period(
|
|||||||
else:
|
else:
|
||||||
result["change"] = None
|
result["change"] = None
|
||||||
|
|
||||||
unit_class = metadata[1]["unit_class"]
|
|
||||||
state_unit = unit = metadata[1]["unit_of_measurement"]
|
state_unit = unit = metadata[1]["unit_of_measurement"]
|
||||||
if state := hass.states.get(statistic_id):
|
if state := hass.states.get(statistic_id):
|
||||||
state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
||||||
convert = _get_statistic_to_display_unit_converter(
|
convert = _get_statistic_to_display_unit_converter(unit, state_unit, units)
|
||||||
unit_class, unit, state_unit, units
|
|
||||||
)
|
|
||||||
|
|
||||||
if not convert:
|
if not convert:
|
||||||
return result
|
return result
|
||||||
@@ -1917,13 +1830,10 @@ def _augment_result_with_change(
|
|||||||
metadata_by_id = _metadata[row.metadata_id]
|
metadata_by_id = _metadata[row.metadata_id]
|
||||||
statistic_id = metadata_by_id["statistic_id"]
|
statistic_id = metadata_by_id["statistic_id"]
|
||||||
|
|
||||||
unit_class = metadata_by_id["unit_class"]
|
|
||||||
state_unit = unit = metadata_by_id["unit_of_measurement"]
|
state_unit = unit = metadata_by_id["unit_of_measurement"]
|
||||||
if state := hass.states.get(statistic_id):
|
if state := hass.states.get(statistic_id):
|
||||||
state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
||||||
convert = _get_statistic_to_display_unit_converter(
|
convert = _get_statistic_to_display_unit_converter(unit, state_unit, units)
|
||||||
unit_class, unit, state_unit, units
|
|
||||||
)
|
|
||||||
|
|
||||||
if convert is not None:
|
if convert is not None:
|
||||||
prev_sums[statistic_id] = convert(row.sum)
|
prev_sums[statistic_id] = convert(row.sum)
|
||||||
@@ -2516,12 +2426,11 @@ def _sorted_statistics_to_dict(
|
|||||||
metadata_by_id = metadata[meta_id]
|
metadata_by_id = metadata[meta_id]
|
||||||
statistic_id = metadata_by_id["statistic_id"]
|
statistic_id = metadata_by_id["statistic_id"]
|
||||||
if convert_units:
|
if convert_units:
|
||||||
unit_class = metadata_by_id["unit_class"]
|
|
||||||
state_unit = unit = metadata_by_id["unit_of_measurement"]
|
state_unit = unit = metadata_by_id["unit_of_measurement"]
|
||||||
if state := hass.states.get(statistic_id):
|
if state := hass.states.get(statistic_id):
|
||||||
state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
||||||
convert = _get_statistic_to_display_unit_converter(
|
convert = _get_statistic_to_display_unit_converter(
|
||||||
unit_class, unit, state_unit, units, allow_none=False
|
unit, state_unit, units, allow_none=False
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
convert = None
|
convert = None
|
||||||
@@ -2592,27 +2501,6 @@ def _async_import_statistics(
|
|||||||
statistics: Iterable[StatisticData],
|
statistics: Iterable[StatisticData],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Validate timestamps and insert an import_statistics job in the queue."""
|
"""Validate timestamps and insert an import_statistics job in the queue."""
|
||||||
# If unit class is not set, we try to set it based on the unit of measurement
|
|
||||||
# Note: This can't happen from the type checker's perspective, but we need
|
|
||||||
# to guard against custom integrations that have not been updated to set
|
|
||||||
# the unit_class.
|
|
||||||
if "unit_class" not in metadata:
|
|
||||||
unit = metadata["unit_of_measurement"] # type: ignore[unreachable]
|
|
||||||
if unit in STATISTIC_UNIT_TO_UNIT_CONVERTER:
|
|
||||||
metadata["unit_class"] = STATISTIC_UNIT_TO_UNIT_CONVERTER[unit].UNIT_CLASS
|
|
||||||
else:
|
|
||||||
metadata["unit_class"] = None
|
|
||||||
|
|
||||||
if (unit_class := metadata["unit_class"]) is not None:
|
|
||||||
if (converter := UNIT_CLASS_TO_UNIT_CONVERTER.get(unit_class)) is None:
|
|
||||||
raise HomeAssistantError(f"Unsupported unit_class: '{unit_class}'")
|
|
||||||
|
|
||||||
if metadata["unit_of_measurement"] not in converter.VALID_UNITS:
|
|
||||||
raise HomeAssistantError(
|
|
||||||
f"Unsupported unit_of_measurement '{metadata['unit_of_measurement']}' "
|
|
||||||
f"for unit_class '{unit_class}'"
|
|
||||||
)
|
|
||||||
|
|
||||||
for statistic in statistics:
|
for statistic in statistics:
|
||||||
start = statistic["start"]
|
start = statistic["start"]
|
||||||
if start.tzinfo is None or start.tzinfo.utcoffset(start) is None:
|
if start.tzinfo is None or start.tzinfo.utcoffset(start) is None:
|
||||||
@@ -2644,8 +2532,6 @@ def async_import_statistics(
|
|||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
metadata: StatisticMetaData,
|
metadata: StatisticMetaData,
|
||||||
statistics: Iterable[StatisticData],
|
statistics: Iterable[StatisticData],
|
||||||
*,
|
|
||||||
_called_from_ws_api: bool = False,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Import hourly statistics from an internal source.
|
"""Import hourly statistics from an internal source.
|
||||||
|
|
||||||
@@ -2658,13 +2544,6 @@ def async_import_statistics(
|
|||||||
if not metadata["source"] or metadata["source"] != DOMAIN:
|
if not metadata["source"] or metadata["source"] != DOMAIN:
|
||||||
raise HomeAssistantError("Invalid source")
|
raise HomeAssistantError("Invalid source")
|
||||||
|
|
||||||
if "unit_class" not in metadata and not _called_from_ws_api: # type: ignore[unreachable]
|
|
||||||
report_usage( # type: ignore[unreachable]
|
|
||||||
"doesn't specify unit_class when calling async_import_statistics",
|
|
||||||
breaks_in_ha_version="2026.11",
|
|
||||||
exclude_integrations={DOMAIN},
|
|
||||||
)
|
|
||||||
|
|
||||||
_async_import_statistics(hass, metadata, statistics)
|
_async_import_statistics(hass, metadata, statistics)
|
||||||
|
|
||||||
|
|
||||||
@@ -2673,8 +2552,6 @@ def async_add_external_statistics(
|
|||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
metadata: StatisticMetaData,
|
metadata: StatisticMetaData,
|
||||||
statistics: Iterable[StatisticData],
|
statistics: Iterable[StatisticData],
|
||||||
*,
|
|
||||||
_called_from_ws_api: bool = False,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Add hourly statistics from an external source.
|
"""Add hourly statistics from an external source.
|
||||||
|
|
||||||
@@ -2689,13 +2566,6 @@ def async_add_external_statistics(
|
|||||||
if not metadata["source"] or metadata["source"] != domain:
|
if not metadata["source"] or metadata["source"] != domain:
|
||||||
raise HomeAssistantError("Invalid source")
|
raise HomeAssistantError("Invalid source")
|
||||||
|
|
||||||
if "unit_class" not in metadata and not _called_from_ws_api: # type: ignore[unreachable]
|
|
||||||
report_usage( # type: ignore[unreachable]
|
|
||||||
"doesn't specify unit_class when calling async_add_external_statistics",
|
|
||||||
breaks_in_ha_version="2026.11",
|
|
||||||
exclude_integrations={DOMAIN},
|
|
||||||
)
|
|
||||||
|
|
||||||
_async_import_statistics(hass, metadata, statistics)
|
_async_import_statistics(hass, metadata, statistics)
|
||||||
|
|
||||||
|
|
||||||
@@ -2829,10 +2699,9 @@ def adjust_statistics(
|
|||||||
if statistic_id not in metadata:
|
if statistic_id not in metadata:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
unit_class = metadata[statistic_id][1]["unit_class"]
|
|
||||||
statistic_unit = metadata[statistic_id][1]["unit_of_measurement"]
|
statistic_unit = metadata[statistic_id][1]["unit_of_measurement"]
|
||||||
if convert := _get_display_to_statistic_unit_converter_func(
|
if convert := _get_display_to_statistic_unit_converter(
|
||||||
unit_class, adjustment_unit, statistic_unit
|
adjustment_unit, statistic_unit
|
||||||
):
|
):
|
||||||
sum_adjustment = convert(sum_adjustment)
|
sum_adjustment = convert(sum_adjustment)
|
||||||
|
|
||||||
@@ -2900,9 +2769,8 @@ def change_statistics_unit(
|
|||||||
return
|
return
|
||||||
|
|
||||||
metadata_id = metadata[0]
|
metadata_id = metadata[0]
|
||||||
unit_class = metadata[1]["unit_class"]
|
|
||||||
|
|
||||||
if not (convert := _get_unit_converter_func(unit_class, old_unit, new_unit)):
|
if not (convert := _get_unit_converter(old_unit, new_unit)):
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Statistics unit of measurement for %s is already %s",
|
"Statistics unit of measurement for %s is already %s",
|
||||||
statistic_id,
|
statistic_id,
|
||||||
@@ -2918,14 +2786,12 @@ def change_statistics_unit(
|
|||||||
_change_statistics_unit_for_table(session, table, metadata_id, convert)
|
_change_statistics_unit_for_table(session, table, metadata_id, convert)
|
||||||
|
|
||||||
statistics_meta_manager.update_unit_of_measurement(
|
statistics_meta_manager.update_unit_of_measurement(
|
||||||
session,
|
session, statistic_id, new_unit
|
||||||
statistic_id,
|
|
||||||
unit_class,
|
|
||||||
new_unit,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def async_change_statistics_unit(
|
@callback
|
||||||
|
def async_change_statistics_unit(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
statistic_id: str,
|
statistic_id: str,
|
||||||
*,
|
*,
|
||||||
@@ -2933,17 +2799,7 @@ async def async_change_statistics_unit(
|
|||||||
old_unit_of_measurement: str,
|
old_unit_of_measurement: str,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Change statistics unit for a statistic_id."""
|
"""Change statistics unit for a statistic_id."""
|
||||||
metadatas = await get_instance(hass).async_add_executor_job(
|
if not can_convert_units(old_unit_of_measurement, new_unit_of_measurement):
|
||||||
partial(get_metadata, hass, statistic_ids={statistic_id})
|
|
||||||
)
|
|
||||||
if statistic_id not in metadatas:
|
|
||||||
raise HomeAssistantError(f"No metadata found for {statistic_id}")
|
|
||||||
|
|
||||||
metadata = metadatas[statistic_id][1]
|
|
||||||
|
|
||||||
if not can_convert_units(
|
|
||||||
metadata["unit_class"], old_unit_of_measurement, new_unit_of_measurement
|
|
||||||
):
|
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
f"Can't convert {old_unit_of_measurement} to {new_unit_of_measurement}"
|
f"Can't convert {old_unit_of_measurement} to {new_unit_of_measurement}"
|
||||||
)
|
)
|
||||||
|
@@ -13,10 +13,9 @@ from sqlalchemy.orm.session import Session
|
|||||||
from sqlalchemy.sql.expression import true
|
from sqlalchemy.sql.expression import true
|
||||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||||
|
|
||||||
from ..const import CIRCULAR_MEAN_SCHEMA_VERSION, UNIT_CLASS_SCHEMA_VERSION
|
from ..const import CIRCULAR_MEAN_SCHEMA_VERSION
|
||||||
from ..db_schema import StatisticsMeta
|
from ..db_schema import StatisticsMeta
|
||||||
from ..models import StatisticMeanType, StatisticMetaData
|
from ..models import StatisticMeanType, StatisticMetaData
|
||||||
from ..statistics import STATISTIC_UNIT_TO_UNIT_CONVERTER
|
|
||||||
from ..util import execute_stmt_lambda_element
|
from ..util import execute_stmt_lambda_element
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
@@ -42,7 +41,6 @@ INDEX_UNIT_OF_MEASUREMENT: Final = 3
|
|||||||
INDEX_HAS_SUM: Final = 4
|
INDEX_HAS_SUM: Final = 4
|
||||||
INDEX_NAME: Final = 5
|
INDEX_NAME: Final = 5
|
||||||
INDEX_MEAN_TYPE: Final = 6
|
INDEX_MEAN_TYPE: Final = 6
|
||||||
INDEX_UNIT_CLASS: Final = 7
|
|
||||||
|
|
||||||
|
|
||||||
def _generate_get_metadata_stmt(
|
def _generate_get_metadata_stmt(
|
||||||
@@ -60,8 +58,6 @@ def _generate_get_metadata_stmt(
|
|||||||
columns.append(StatisticsMeta.mean_type)
|
columns.append(StatisticsMeta.mean_type)
|
||||||
else:
|
else:
|
||||||
columns.append(StatisticsMeta.has_mean)
|
columns.append(StatisticsMeta.has_mean)
|
||||||
if schema_version >= UNIT_CLASS_SCHEMA_VERSION:
|
|
||||||
columns.append(StatisticsMeta.unit_class)
|
|
||||||
stmt = lambda_stmt(lambda: select(*columns))
|
stmt = lambda_stmt(lambda: select(*columns))
|
||||||
if statistic_ids:
|
if statistic_ids:
|
||||||
stmt += lambda q: q.where(StatisticsMeta.statistic_id.in_(statistic_ids))
|
stmt += lambda q: q.where(StatisticsMeta.statistic_id.in_(statistic_ids))
|
||||||
@@ -144,13 +140,6 @@ class StatisticsMetaManager:
|
|||||||
if row[INDEX_MEAN_TYPE]
|
if row[INDEX_MEAN_TYPE]
|
||||||
else StatisticMeanType.NONE
|
else StatisticMeanType.NONE
|
||||||
)
|
)
|
||||||
if self.recorder.schema_version >= UNIT_CLASS_SCHEMA_VERSION:
|
|
||||||
unit_class = row[INDEX_UNIT_CLASS]
|
|
||||||
else:
|
|
||||||
conv = STATISTIC_UNIT_TO_UNIT_CONVERTER.get(
|
|
||||||
row[INDEX_UNIT_OF_MEASUREMENT]
|
|
||||||
)
|
|
||||||
unit_class = conv.UNIT_CLASS if conv else None
|
|
||||||
meta = {
|
meta = {
|
||||||
"has_mean": mean_type is StatisticMeanType.ARITHMETIC,
|
"has_mean": mean_type is StatisticMeanType.ARITHMETIC,
|
||||||
"mean_type": mean_type,
|
"mean_type": mean_type,
|
||||||
@@ -159,7 +148,6 @@ class StatisticsMetaManager:
|
|||||||
"source": row[INDEX_SOURCE],
|
"source": row[INDEX_SOURCE],
|
||||||
"statistic_id": statistic_id,
|
"statistic_id": statistic_id,
|
||||||
"unit_of_measurement": row[INDEX_UNIT_OF_MEASUREMENT],
|
"unit_of_measurement": row[INDEX_UNIT_OF_MEASUREMENT],
|
||||||
"unit_class": unit_class,
|
|
||||||
}
|
}
|
||||||
id_meta = (row_id, meta)
|
id_meta = (row_id, meta)
|
||||||
results[statistic_id] = id_meta
|
results[statistic_id] = id_meta
|
||||||
@@ -218,7 +206,6 @@ class StatisticsMetaManager:
|
|||||||
old_metadata["mean_type"] != new_metadata["mean_type"]
|
old_metadata["mean_type"] != new_metadata["mean_type"]
|
||||||
or old_metadata["has_sum"] != new_metadata["has_sum"]
|
or old_metadata["has_sum"] != new_metadata["has_sum"]
|
||||||
or old_metadata["name"] != new_metadata["name"]
|
or old_metadata["name"] != new_metadata["name"]
|
||||||
or old_metadata["unit_class"] != new_metadata["unit_class"]
|
|
||||||
or old_metadata["unit_of_measurement"]
|
or old_metadata["unit_of_measurement"]
|
||||||
!= new_metadata["unit_of_measurement"]
|
!= new_metadata["unit_of_measurement"]
|
||||||
):
|
):
|
||||||
@@ -230,7 +217,6 @@ class StatisticsMetaManager:
|
|||||||
StatisticsMeta.mean_type: new_metadata["mean_type"],
|
StatisticsMeta.mean_type: new_metadata["mean_type"],
|
||||||
StatisticsMeta.has_sum: new_metadata["has_sum"],
|
StatisticsMeta.has_sum: new_metadata["has_sum"],
|
||||||
StatisticsMeta.name: new_metadata["name"],
|
StatisticsMeta.name: new_metadata["name"],
|
||||||
StatisticsMeta.unit_class: new_metadata["unit_class"],
|
|
||||||
StatisticsMeta.unit_of_measurement: new_metadata["unit_of_measurement"],
|
StatisticsMeta.unit_of_measurement: new_metadata["unit_of_measurement"],
|
||||||
},
|
},
|
||||||
synchronize_session=False,
|
synchronize_session=False,
|
||||||
@@ -342,11 +328,7 @@ class StatisticsMetaManager:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def update_unit_of_measurement(
|
def update_unit_of_measurement(
|
||||||
self,
|
self, session: Session, statistic_id: str, new_unit: str | None
|
||||||
session: Session,
|
|
||||||
statistic_id: str,
|
|
||||||
new_unit_class: str | None,
|
|
||||||
new_unit: str | None,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Update the unit of measurement for a statistic_id.
|
"""Update the unit of measurement for a statistic_id.
|
||||||
|
|
||||||
@@ -356,12 +338,7 @@ class StatisticsMetaManager:
|
|||||||
self._assert_in_recorder_thread()
|
self._assert_in_recorder_thread()
|
||||||
session.query(StatisticsMeta).filter(
|
session.query(StatisticsMeta).filter(
|
||||||
StatisticsMeta.statistic_id == statistic_id
|
StatisticsMeta.statistic_id == statistic_id
|
||||||
).update(
|
).update({StatisticsMeta.unit_of_measurement: new_unit})
|
||||||
{
|
|
||||||
StatisticsMeta.unit_of_measurement: new_unit,
|
|
||||||
StatisticsMeta.unit_class: new_unit_class,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
self._clear_cache([statistic_id])
|
self._clear_cache([statistic_id])
|
||||||
|
|
||||||
def update_statistic_id(
|
def update_statistic_id(
|
||||||
|
@@ -77,7 +77,6 @@ class UpdateStatisticsMetadataTask(RecorderTask):
|
|||||||
on_done: Callable[[], None] | None
|
on_done: Callable[[], None] | None
|
||||||
statistic_id: str
|
statistic_id: str
|
||||||
new_statistic_id: str | None | UndefinedType
|
new_statistic_id: str | None | UndefinedType
|
||||||
new_unit_class: str | None | UndefinedType
|
|
||||||
new_unit_of_measurement: str | None | UndefinedType
|
new_unit_of_measurement: str | None | UndefinedType
|
||||||
|
|
||||||
def run(self, instance: Recorder) -> None:
|
def run(self, instance: Recorder) -> None:
|
||||||
@@ -86,7 +85,6 @@ class UpdateStatisticsMetadataTask(RecorderTask):
|
|||||||
instance,
|
instance,
|
||||||
self.statistic_id,
|
self.statistic_id,
|
||||||
self.new_statistic_id,
|
self.new_statistic_id,
|
||||||
self.new_unit_class,
|
|
||||||
self.new_unit_of_measurement,
|
self.new_unit_of_measurement,
|
||||||
)
|
)
|
||||||
if self.on_done:
|
if self.on_done:
|
||||||
|
@@ -4,7 +4,6 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from datetime import datetime as dt
|
from datetime import datetime as dt
|
||||||
import logging
|
|
||||||
from typing import Any, Literal, cast
|
from typing import Any, Literal, cast
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
@@ -15,7 +14,6 @@ from homeassistant.core import HomeAssistant, callback, valid_entity_id
|
|||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
from homeassistant.helpers.json import json_bytes
|
from homeassistant.helpers.json import json_bytes
|
||||||
from homeassistant.helpers.typing import UNDEFINED
|
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
from homeassistant.util.unit_conversion import (
|
from homeassistant.util.unit_conversion import (
|
||||||
ApparentPowerConverter,
|
ApparentPowerConverter,
|
||||||
@@ -45,12 +43,11 @@ from homeassistant.util.unit_conversion import (
|
|||||||
|
|
||||||
from .models import StatisticMeanType, StatisticPeriod
|
from .models import StatisticMeanType, StatisticPeriod
|
||||||
from .statistics import (
|
from .statistics import (
|
||||||
UNIT_CLASS_TO_UNIT_CONVERTER,
|
STATISTIC_UNIT_TO_UNIT_CONVERTER,
|
||||||
async_add_external_statistics,
|
async_add_external_statistics,
|
||||||
async_change_statistics_unit,
|
async_change_statistics_unit,
|
||||||
async_import_statistics,
|
async_import_statistics,
|
||||||
async_list_statistic_ids,
|
async_list_statistic_ids,
|
||||||
async_update_statistics_metadata,
|
|
||||||
list_statistic_ids,
|
list_statistic_ids,
|
||||||
statistic_during_period,
|
statistic_during_period,
|
||||||
statistics_during_period,
|
statistics_during_period,
|
||||||
@@ -59,8 +56,6 @@ from .statistics import (
|
|||||||
)
|
)
|
||||||
from .util import PERIOD_SCHEMA, get_instance, resolve_period
|
from .util import PERIOD_SCHEMA, get_instance, resolve_period
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
CLEAR_STATISTICS_TIME_OUT = 10
|
CLEAR_STATISTICS_TIME_OUT = 10
|
||||||
UPDATE_STATISTICS_METADATA_TIME_OUT = 10
|
UPDATE_STATISTICS_METADATA_TIME_OUT = 10
|
||||||
|
|
||||||
@@ -397,7 +392,6 @@ async def ws_get_statistics_metadata(
|
|||||||
{
|
{
|
||||||
vol.Required("type"): "recorder/update_statistics_metadata",
|
vol.Required("type"): "recorder/update_statistics_metadata",
|
||||||
vol.Required("statistic_id"): str,
|
vol.Required("statistic_id"): str,
|
||||||
vol.Optional("unit_class"): vol.Any(str, None),
|
|
||||||
vol.Required("unit_of_measurement"): vol.Any(str, None),
|
vol.Required("unit_of_measurement"): vol.Any(str, None),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@@ -407,8 +401,6 @@ async def ws_update_statistics_metadata(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Update statistics metadata for a statistic_id.
|
"""Update statistics metadata for a statistic_id.
|
||||||
|
|
||||||
The unit_class specifies which unit conversion class to use, if applicable.
|
|
||||||
|
|
||||||
Only the normalized unit of measurement can be updated.
|
Only the normalized unit of measurement can be updated.
|
||||||
"""
|
"""
|
||||||
done_event = asyncio.Event()
|
done_event = asyncio.Event()
|
||||||
@@ -416,20 +408,10 @@ async def ws_update_statistics_metadata(
|
|||||||
def update_statistics_metadata_done() -> None:
|
def update_statistics_metadata_done() -> None:
|
||||||
hass.loop.call_soon_threadsafe(done_event.set)
|
hass.loop.call_soon_threadsafe(done_event.set)
|
||||||
|
|
||||||
if "unit_class" not in msg:
|
get_instance(hass).async_update_statistics_metadata(
|
||||||
_LOGGER.warning(
|
|
||||||
"WS command recorder/update_statistics_metadata called without "
|
|
||||||
"specifying unit_class in metadata, this is deprecated and will "
|
|
||||||
"stop working in HA Core 2026.11"
|
|
||||||
)
|
|
||||||
|
|
||||||
async_update_statistics_metadata(
|
|
||||||
hass,
|
|
||||||
msg["statistic_id"],
|
msg["statistic_id"],
|
||||||
new_unit_class=msg.get("unit_class", UNDEFINED),
|
|
||||||
new_unit_of_measurement=msg["unit_of_measurement"],
|
new_unit_of_measurement=msg["unit_of_measurement"],
|
||||||
on_done=update_statistics_metadata_done,
|
on_done=update_statistics_metadata_done,
|
||||||
_called_from_ws_api=True,
|
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
async with asyncio.timeout(UPDATE_STATISTICS_METADATA_TIME_OUT):
|
async with asyncio.timeout(UPDATE_STATISTICS_METADATA_TIME_OUT):
|
||||||
@@ -452,15 +434,15 @@ async def ws_update_statistics_metadata(
|
|||||||
vol.Required("old_unit_of_measurement"): vol.Any(str, None),
|
vol.Required("old_unit_of_measurement"): vol.Any(str, None),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@websocket_api.async_response
|
@callback
|
||||||
async def ws_change_statistics_unit(
|
def ws_change_statistics_unit(
|
||||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Change the unit_of_measurement for a statistic_id.
|
"""Change the unit_of_measurement for a statistic_id.
|
||||||
|
|
||||||
All existing statistics will be converted to the new unit.
|
All existing statistics will be converted to the new unit.
|
||||||
"""
|
"""
|
||||||
await async_change_statistics_unit(
|
async_change_statistics_unit(
|
||||||
hass,
|
hass,
|
||||||
msg["statistic_id"],
|
msg["statistic_id"],
|
||||||
new_unit_of_measurement=msg["new_unit_of_measurement"],
|
new_unit_of_measurement=msg["new_unit_of_measurement"],
|
||||||
@@ -505,23 +487,17 @@ async def ws_adjust_sum_statistics(
|
|||||||
return
|
return
|
||||||
metadata = metadatas[0]
|
metadata = metadatas[0]
|
||||||
|
|
||||||
def valid_units(
|
def valid_units(statistics_unit: str | None, adjustment_unit: str | None) -> bool:
|
||||||
unit_class: str | None, statistics_unit: str | None, adjustment_unit: str | None
|
|
||||||
) -> bool:
|
|
||||||
if statistics_unit == adjustment_unit:
|
if statistics_unit == adjustment_unit:
|
||||||
return True
|
return True
|
||||||
if (
|
converter = STATISTIC_UNIT_TO_UNIT_CONVERTER.get(statistics_unit)
|
||||||
(converter := UNIT_CLASS_TO_UNIT_CONVERTER.get(unit_class)) is not None
|
if converter is not None and adjustment_unit in converter.VALID_UNITS:
|
||||||
and statistics_unit in converter.VALID_UNITS
|
|
||||||
and adjustment_unit in converter.VALID_UNITS
|
|
||||||
):
|
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
unit_class = metadata["unit_class"]
|
|
||||||
stat_unit = metadata["statistics_unit_of_measurement"]
|
stat_unit = metadata["statistics_unit_of_measurement"]
|
||||||
adjustment_unit = msg["adjustment_unit_of_measurement"]
|
adjustment_unit = msg["adjustment_unit_of_measurement"]
|
||||||
if not valid_units(unit_class, stat_unit, adjustment_unit):
|
if not valid_units(stat_unit, adjustment_unit):
|
||||||
connection.send_error(
|
connection.send_error(
|
||||||
msg["id"],
|
msg["id"],
|
||||||
"invalid_units",
|
"invalid_units",
|
||||||
@@ -545,7 +521,6 @@ async def ws_adjust_sum_statistics(
|
|||||||
vol.Required("name"): vol.Any(str, None),
|
vol.Required("name"): vol.Any(str, None),
|
||||||
vol.Required("source"): str,
|
vol.Required("source"): str,
|
||||||
vol.Required("statistic_id"): str,
|
vol.Required("statistic_id"): str,
|
||||||
vol.Optional("unit_class"): vol.Any(str, None),
|
|
||||||
vol.Required("unit_of_measurement"): vol.Any(str, None),
|
vol.Required("unit_of_measurement"): vol.Any(str, None),
|
||||||
},
|
},
|
||||||
vol.Required("stats"): [
|
vol.Required("stats"): [
|
||||||
@@ -565,25 +540,16 @@ async def ws_adjust_sum_statistics(
|
|||||||
def ws_import_statistics(
|
def ws_import_statistics(
|
||||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Import statistics.
|
"""Import statistics."""
|
||||||
|
|
||||||
The unit_class specifies which unit conversion class to use, if applicable.
|
|
||||||
"""
|
|
||||||
metadata = msg["metadata"]
|
metadata = msg["metadata"]
|
||||||
# The WS command will be changed in a follow up PR
|
# The WS command will be changed in a follow up PR
|
||||||
metadata["mean_type"] = (
|
metadata["mean_type"] = (
|
||||||
StatisticMeanType.ARITHMETIC if metadata["has_mean"] else StatisticMeanType.NONE
|
StatisticMeanType.ARITHMETIC if metadata["has_mean"] else StatisticMeanType.NONE
|
||||||
)
|
)
|
||||||
if "unit_class" not in metadata:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"WS command recorder/import_statistics called without specifying "
|
|
||||||
"unit_class in metadata, this is deprecated and will stop working "
|
|
||||||
"in HA Core 2026.11"
|
|
||||||
)
|
|
||||||
stats = msg["stats"]
|
stats = msg["stats"]
|
||||||
|
|
||||||
if valid_entity_id(metadata["statistic_id"]):
|
if valid_entity_id(metadata["statistic_id"]):
|
||||||
async_import_statistics(hass, metadata, stats, _called_from_ws_api=True)
|
async_import_statistics(hass, metadata, stats)
|
||||||
else:
|
else:
|
||||||
async_add_external_statistics(hass, metadata, stats, _called_from_ws_api=True)
|
async_add_external_statistics(hass, metadata, stats)
|
||||||
connection.send_result(msg["id"])
|
connection.send_result(msg["id"])
|
||||||
|
@@ -48,7 +48,7 @@ from .const import (
|
|||||||
|
|
||||||
DEFAULT_OFF_DELAY = 2.0
|
DEFAULT_OFF_DELAY = 2.0
|
||||||
|
|
||||||
CONNECT_TIMEOUT = 60.0
|
CONNECT_TIMEOUT = 30.0
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@@ -24,7 +24,6 @@
|
|||||||
},
|
},
|
||||||
"config_subentries": {
|
"config_subentries": {
|
||||||
"partition": {
|
"partition": {
|
||||||
"entry_type": "Partition",
|
|
||||||
"initiate_flow": {
|
"initiate_flow": {
|
||||||
"user": "Add partition"
|
"user": "Add partition"
|
||||||
},
|
},
|
||||||
@@ -58,7 +57,6 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"zone": {
|
"zone": {
|
||||||
"entry_type": "Zone",
|
|
||||||
"initiate_flow": {
|
"initiate_flow": {
|
||||||
"user": "Add zone"
|
"user": "Add zone"
|
||||||
},
|
},
|
||||||
@@ -93,7 +91,6 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"output": {
|
"output": {
|
||||||
"entry_type": "Output",
|
|
||||||
"initiate_flow": {
|
"initiate_flow": {
|
||||||
"user": "Add output"
|
"user": "Add output"
|
||||||
},
|
},
|
||||||
@@ -128,7 +125,6 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"switchable_output": {
|
"switchable_output": {
|
||||||
"entry_type": "Switchable output",
|
|
||||||
"initiate_flow": {
|
"initiate_flow": {
|
||||||
"user": "Add switchable output"
|
"user": "Add switchable output"
|
||||||
},
|
},
|
||||||
|
@@ -27,7 +27,6 @@ from homeassistant.components.recorder.models import (
|
|||||||
StatisticResult,
|
StatisticResult,
|
||||||
)
|
)
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
ATTR_DEVICE_CLASS,
|
|
||||||
ATTR_UNIT_OF_MEASUREMENT,
|
ATTR_UNIT_OF_MEASUREMENT,
|
||||||
REVOLUTIONS_PER_MINUTE,
|
REVOLUTIONS_PER_MINUTE,
|
||||||
UnitOfIrradiance,
|
UnitOfIrradiance,
|
||||||
@@ -44,14 +43,12 @@ from homeassistant.util import dt as dt_util
|
|||||||
from homeassistant.util.async_ import run_callback_threadsafe
|
from homeassistant.util.async_ import run_callback_threadsafe
|
||||||
from homeassistant.util.enum import try_parse_enum
|
from homeassistant.util.enum import try_parse_enum
|
||||||
from homeassistant.util.hass_dict import HassKey
|
from homeassistant.util.hass_dict import HassKey
|
||||||
from homeassistant.util.unit_conversion import BaseUnitConverter
|
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
AMBIGUOUS_UNITS,
|
AMBIGUOUS_UNITS,
|
||||||
ATTR_LAST_RESET,
|
ATTR_LAST_RESET,
|
||||||
ATTR_STATE_CLASS,
|
ATTR_STATE_CLASS,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
UNIT_CONVERTERS,
|
|
||||||
SensorStateClass,
|
SensorStateClass,
|
||||||
UnitOfVolumeFlowRate,
|
UnitOfVolumeFlowRate,
|
||||||
)
|
)
|
||||||
@@ -241,41 +238,12 @@ def _is_numeric(state: State) -> bool:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def _get_unit_class(
|
|
||||||
device_class: str | None,
|
|
||||||
unit: str | None,
|
|
||||||
) -> str | None:
|
|
||||||
"""Return the unit class for the given device class and unit.
|
|
||||||
|
|
||||||
The unit class is determined from the device class and unit if possible,
|
|
||||||
otherwise from the unit.
|
|
||||||
"""
|
|
||||||
if (
|
|
||||||
device_class
|
|
||||||
and (conv := UNIT_CONVERTERS.get(device_class))
|
|
||||||
and unit in conv.VALID_UNITS
|
|
||||||
):
|
|
||||||
return conv.UNIT_CLASS
|
|
||||||
if conv := statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER.get(unit):
|
|
||||||
return conv.UNIT_CLASS
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def _get_unit_converter(
|
|
||||||
unit_class: str | None,
|
|
||||||
) -> type[BaseUnitConverter] | None:
|
|
||||||
"""Return the unit converter for the given unit class."""
|
|
||||||
if not unit_class:
|
|
||||||
return None
|
|
||||||
return statistics.UNIT_CLASS_TO_UNIT_CONVERTER[unit_class]
|
|
||||||
|
|
||||||
|
|
||||||
def _normalize_states(
|
def _normalize_states(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
old_metadatas: dict[str, tuple[int, StatisticMetaData]],
|
old_metadatas: dict[str, tuple[int, StatisticMetaData]],
|
||||||
fstates: list[tuple[float, State]],
|
fstates: list[tuple[float, State]],
|
||||||
entity_id: str,
|
entity_id: str,
|
||||||
) -> tuple[str | None, str | None, list[tuple[float, State]]]:
|
) -> tuple[str | None, list[tuple[float, State]]]:
|
||||||
"""Normalize units."""
|
"""Normalize units."""
|
||||||
state_unit: str | None = None
|
state_unit: str | None = None
|
||||||
statistics_unit: str | None
|
statistics_unit: str | None
|
||||||
@@ -285,16 +253,11 @@ def _normalize_states(
|
|||||||
# We've not seen this sensor before, the first valid state determines the unit
|
# We've not seen this sensor before, the first valid state determines the unit
|
||||||
# used for statistics
|
# used for statistics
|
||||||
statistics_unit = state_unit
|
statistics_unit = state_unit
|
||||||
unit_class = _get_unit_class(
|
|
||||||
fstates[0][1].attributes.get(ATTR_DEVICE_CLASS),
|
|
||||||
state_unit,
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
# We have seen this sensor before, use the unit from metadata
|
# We have seen this sensor before, use the unit from metadata
|
||||||
statistics_unit = old_metadata["unit_of_measurement"]
|
statistics_unit = old_metadata["unit_of_measurement"]
|
||||||
unit_class = old_metadata["unit_class"]
|
|
||||||
|
|
||||||
if not (converter := _get_unit_converter(unit_class)):
|
if statistics_unit not in statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER:
|
||||||
# The unit used by this sensor doesn't support unit conversion
|
# The unit used by this sensor doesn't support unit conversion
|
||||||
|
|
||||||
all_units = _get_units(fstates)
|
all_units = _get_units(fstates)
|
||||||
@@ -320,15 +283,11 @@ def _normalize_states(
|
|||||||
extra,
|
extra,
|
||||||
LINK_DEV_STATISTICS,
|
LINK_DEV_STATISTICS,
|
||||||
)
|
)
|
||||||
return None, None, []
|
return None, []
|
||||||
|
|
||||||
if state_unit != statistics_unit:
|
return state_unit, fstates
|
||||||
unit_class = _get_unit_class(
|
|
||||||
fstates[0][1].attributes.get(ATTR_DEVICE_CLASS),
|
|
||||||
state_unit,
|
|
||||||
)
|
|
||||||
return unit_class, state_unit, fstates
|
|
||||||
|
|
||||||
|
converter = statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER[statistics_unit]
|
||||||
valid_fstates: list[tuple[float, State]] = []
|
valid_fstates: list[tuple[float, State]] = []
|
||||||
convert: Callable[[float], float] | None = None
|
convert: Callable[[float], float] | None = None
|
||||||
last_unit: str | None | UndefinedType = UNDEFINED
|
last_unit: str | None | UndefinedType = UNDEFINED
|
||||||
@@ -371,7 +330,7 @@ def _normalize_states(
|
|||||||
|
|
||||||
valid_fstates.append((fstate, state))
|
valid_fstates.append((fstate, state))
|
||||||
|
|
||||||
return unit_class, statistics_unit, valid_fstates
|
return statistics_unit, valid_fstates
|
||||||
|
|
||||||
|
|
||||||
def _suggest_report_issue(hass: HomeAssistant, entity_id: str) -> str:
|
def _suggest_report_issue(hass: HomeAssistant, entity_id: str) -> str:
|
||||||
@@ -557,15 +516,13 @@ def compile_statistics( # noqa: C901
|
|||||||
old_metadatas = statistics.get_metadata_with_session(
|
old_metadatas = statistics.get_metadata_with_session(
|
||||||
get_instance(hass), session, statistic_ids=set(entities_with_float_states)
|
get_instance(hass), session, statistic_ids=set(entities_with_float_states)
|
||||||
)
|
)
|
||||||
to_process: list[
|
to_process: list[tuple[str, str | None, str, list[tuple[float, State]]]] = []
|
||||||
tuple[str, str | None, str | None, str, list[tuple[float, State]]]
|
|
||||||
] = []
|
|
||||||
to_query: set[str] = set()
|
to_query: set[str] = set()
|
||||||
for _state in sensor_states:
|
for _state in sensor_states:
|
||||||
entity_id = _state.entity_id
|
entity_id = _state.entity_id
|
||||||
if not (maybe_float_states := entities_with_float_states.get(entity_id)):
|
if not (maybe_float_states := entities_with_float_states.get(entity_id)):
|
||||||
continue
|
continue
|
||||||
unit_class, statistics_unit, valid_float_states = _normalize_states(
|
statistics_unit, valid_float_states = _normalize_states(
|
||||||
hass,
|
hass,
|
||||||
old_metadatas,
|
old_metadatas,
|
||||||
maybe_float_states,
|
maybe_float_states,
|
||||||
@@ -574,9 +531,7 @@ def compile_statistics( # noqa: C901
|
|||||||
if not valid_float_states:
|
if not valid_float_states:
|
||||||
continue
|
continue
|
||||||
state_class: str = _state.attributes[ATTR_STATE_CLASS]
|
state_class: str = _state.attributes[ATTR_STATE_CLASS]
|
||||||
to_process.append(
|
to_process.append((entity_id, statistics_unit, state_class, valid_float_states))
|
||||||
(entity_id, unit_class, statistics_unit, state_class, valid_float_states)
|
|
||||||
)
|
|
||||||
if "sum" in wanted_statistics[entity_id].types:
|
if "sum" in wanted_statistics[entity_id].types:
|
||||||
to_query.add(entity_id)
|
to_query.add(entity_id)
|
||||||
|
|
||||||
@@ -585,7 +540,6 @@ def compile_statistics( # noqa: C901
|
|||||||
)
|
)
|
||||||
for ( # pylint: disable=too-many-nested-blocks
|
for ( # pylint: disable=too-many-nested-blocks
|
||||||
entity_id,
|
entity_id,
|
||||||
unit_class,
|
|
||||||
statistics_unit,
|
statistics_unit,
|
||||||
state_class,
|
state_class,
|
||||||
valid_float_states,
|
valid_float_states,
|
||||||
@@ -650,7 +604,6 @@ def compile_statistics( # noqa: C901
|
|||||||
"name": None,
|
"name": None,
|
||||||
"source": RECORDER_DOMAIN,
|
"source": RECORDER_DOMAIN,
|
||||||
"statistic_id": entity_id,
|
"statistic_id": entity_id,
|
||||||
"unit_class": unit_class,
|
|
||||||
"unit_of_measurement": statistics_unit,
|
"unit_of_measurement": statistics_unit,
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -816,17 +769,13 @@ def list_statistic_ids(
|
|||||||
if "mean" in provided_statistics.types:
|
if "mean" in provided_statistics.types:
|
||||||
mean_type = provided_statistics.mean_type
|
mean_type = provided_statistics.mean_type
|
||||||
|
|
||||||
unit = attributes.get(ATTR_UNIT_OF_MEASUREMENT)
|
|
||||||
unit_class = _get_unit_class(attributes.get(ATTR_DEVICE_CLASS), unit)
|
|
||||||
|
|
||||||
result[entity_id] = {
|
result[entity_id] = {
|
||||||
"mean_type": mean_type,
|
"mean_type": mean_type,
|
||||||
"has_sum": has_sum,
|
"has_sum": has_sum,
|
||||||
"name": None,
|
"name": None,
|
||||||
"source": RECORDER_DOMAIN,
|
"source": RECORDER_DOMAIN,
|
||||||
"statistic_id": entity_id,
|
"statistic_id": entity_id,
|
||||||
"unit_class": unit_class,
|
"unit_of_measurement": attributes.get(ATTR_UNIT_OF_MEASUREMENT),
|
||||||
"unit_of_measurement": unit,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"domain": "sharkiq",
|
"domain": "sharkiq",
|
||||||
"name": "Shark IQ",
|
"name": "Shark IQ",
|
||||||
"codeowners": ["@JeffResc", "@funkybunch", "@TheOneOgre"],
|
"codeowners": ["@JeffResc", "@funkybunch"],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/sharkiq",
|
"documentation": "https://www.home-assistant.io/integrations/sharkiq",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user