mirror of
https://github.com/home-assistant/core.git
synced 2025-10-04 01:09:27 +00:00
Compare commits
3 Commits
recorder_t
...
improve-zh
Author | SHA1 | Date | |
---|---|---|---|
![]() |
c1b799856a | ||
![]() |
2f4e3b98f3 | ||
![]() |
f2c354eb3d |
@@ -326,7 +326,6 @@ homeassistant.components.london_underground.*
|
||||
homeassistant.components.lookin.*
|
||||
homeassistant.components.lovelace.*
|
||||
homeassistant.components.luftdaten.*
|
||||
homeassistant.components.lunatone.*
|
||||
homeassistant.components.madvr.*
|
||||
homeassistant.components.manual.*
|
||||
homeassistant.components.mastodon.*
|
||||
|
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@@ -910,8 +910,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/luci/ @mzdrale
|
||||
/homeassistant/components/luftdaten/ @fabaff @frenck
|
||||
/tests/components/luftdaten/ @fabaff @frenck
|
||||
/homeassistant/components/lunatone/ @MoonDevLT
|
||||
/tests/components/lunatone/ @MoonDevLT
|
||||
/homeassistant/components/lupusec/ @majuss @suaveolent
|
||||
/tests/components/lupusec/ @majuss @suaveolent
|
||||
/homeassistant/components/lutron/ @cdheiser @wilburCForce
|
||||
|
10
build.yaml
10
build.yaml
@@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.10.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.0
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.3
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.3
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.3
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.3
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.3
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
@@ -616,44 +616,34 @@ async def async_enable_logging(
|
||||
),
|
||||
)
|
||||
|
||||
logger = logging.getLogger()
|
||||
logger.setLevel(logging.INFO if verbose else logging.WARNING)
|
||||
|
||||
# Log errors to a file if we have write access to file or config dir
|
||||
if log_file is None:
|
||||
default_log_path = hass.config.path(ERROR_LOG_FILENAME)
|
||||
if "SUPERVISOR" in os.environ:
|
||||
_LOGGER.info("Running in Supervisor, not logging to file")
|
||||
# Rename the default log file if it exists, since previous versions created
|
||||
# it even on Supervisor
|
||||
if os.path.isfile(default_log_path):
|
||||
with contextlib.suppress(OSError):
|
||||
os.rename(default_log_path, f"{default_log_path}.old")
|
||||
err_log_path = None
|
||||
else:
|
||||
err_log_path = default_log_path
|
||||
err_log_path = hass.config.path(ERROR_LOG_FILENAME)
|
||||
else:
|
||||
err_log_path = os.path.abspath(log_file)
|
||||
|
||||
if err_log_path:
|
||||
err_path_exists = os.path.isfile(err_log_path)
|
||||
err_dir = os.path.dirname(err_log_path)
|
||||
err_path_exists = os.path.isfile(err_log_path)
|
||||
err_dir = os.path.dirname(err_log_path)
|
||||
|
||||
# Check if we can write to the error log if it exists or that
|
||||
# we can create files in the containing directory if not.
|
||||
if (err_path_exists and os.access(err_log_path, os.W_OK)) or (
|
||||
not err_path_exists and os.access(err_dir, os.W_OK)
|
||||
):
|
||||
err_handler = await hass.async_add_executor_job(
|
||||
_create_log_file, err_log_path, log_rotate_days
|
||||
)
|
||||
# Check if we can write to the error log if it exists or that
|
||||
# we can create files in the containing directory if not.
|
||||
if (err_path_exists and os.access(err_log_path, os.W_OK)) or (
|
||||
not err_path_exists and os.access(err_dir, os.W_OK)
|
||||
):
|
||||
err_handler = await hass.async_add_executor_job(
|
||||
_create_log_file, err_log_path, log_rotate_days
|
||||
)
|
||||
|
||||
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
|
||||
logger.addHandler(err_handler)
|
||||
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
|
||||
|
||||
# Save the log file location for access by other components.
|
||||
hass.data[DATA_LOGGING] = err_log_path
|
||||
else:
|
||||
_LOGGER.error("Unable to set up error log %s (access denied)", err_log_path)
|
||||
logger = logging.getLogger()
|
||||
logger.addHandler(err_handler)
|
||||
logger.setLevel(logging.INFO if verbose else logging.WARNING)
|
||||
|
||||
# Save the log file location for access by other components.
|
||||
hass.data[DATA_LOGGING] = err_log_path
|
||||
else:
|
||||
_LOGGER.error("Unable to set up error log %s (access denied)", err_log_path)
|
||||
|
||||
async_activate_log_queue_handler(hass)
|
||||
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airos",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["airos==0.5.4"]
|
||||
"requirements": ["airos==0.5.3"]
|
||||
}
|
||||
|
@@ -23,10 +23,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
URL_API_INTEGRATION = {
|
||||
"url": "https://dashboard.airthings.com/integrations/api-integration"
|
||||
}
|
||||
|
||||
|
||||
class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Airthings."""
|
||||
@@ -41,7 +37,11 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
description_placeholders=URL_API_INTEGRATION,
|
||||
description_placeholders={
|
||||
"url": (
|
||||
"https://dashboard.airthings.com/integrations/api-integration"
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
errors = {}
|
||||
@@ -65,8 +65,5 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_create_entry(title="Airthings", data=user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders=URL_API_INTEGRATION,
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
@@ -4,9 +4,9 @@
|
||||
"user": {
|
||||
"data": {
|
||||
"id": "ID",
|
||||
"secret": "Secret"
|
||||
},
|
||||
"description": "Login at {url} to find your credentials"
|
||||
"secret": "Secret",
|
||||
"description": "Login at {url} to find your credentials"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
|
@@ -171,7 +171,7 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_abort(reason="no_devices_found")
|
||||
|
||||
titles = {
|
||||
address: get_name(discovery.device)
|
||||
address: discovery.device.name
|
||||
for (address, discovery) in self._discovered_devices.items()
|
||||
}
|
||||
return self.async_show_form(
|
||||
|
@@ -114,8 +114,6 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
|
||||
),
|
||||
}
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@callback
|
||||
def async_migrate(hass: HomeAssistant, address: str, sensor_name: str) -> None:
|
||||
|
@@ -6,9 +6,6 @@
|
||||
"description": "[%key:component::bluetooth::config::step::user::description%]",
|
||||
"data": {
|
||||
"address": "[%key:common::config_flow::data::device%]"
|
||||
},
|
||||
"data_description": {
|
||||
"address": "The Airthings devices discovered via Bluetooth."
|
||||
}
|
||||
},
|
||||
"bluetooth_confirm": {
|
||||
|
@@ -2,14 +2,17 @@
|
||||
|
||||
from airtouch4pyapi import AirTouch
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .coordinator import AirTouch4ConfigEntry, AirtouchDataUpdateCoordinator
|
||||
from .coordinator import AirtouchDataUpdateCoordinator
|
||||
|
||||
PLATFORMS = [Platform.CLIMATE]
|
||||
|
||||
type AirTouch4ConfigEntry = ConfigEntry[AirtouchDataUpdateCoordinator]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) -> bool:
|
||||
"""Set up AirTouch4 from a config entry."""
|
||||
@@ -19,7 +22,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) ->
|
||||
info = airtouch.GetAcs()
|
||||
if not info:
|
||||
raise ConfigEntryNotReady
|
||||
coordinator = AirtouchDataUpdateCoordinator(hass, entry, airtouch)
|
||||
coordinator = AirtouchDataUpdateCoordinator(hass, airtouch)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
|
@@ -2,34 +2,26 @@
|
||||
|
||||
import logging
|
||||
|
||||
from airtouch4pyapi import AirTouch
|
||||
from airtouch4pyapi.airtouch import AirTouchStatus
|
||||
|
||||
from homeassistant.components.climate import SCAN_INTERVAL
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type AirTouch4ConfigEntry = ConfigEntry[AirtouchDataUpdateCoordinator]
|
||||
|
||||
|
||||
class AirtouchDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""Class to manage fetching Airtouch data."""
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, entry: AirTouch4ConfigEntry, airtouch: AirTouch
|
||||
) -> None:
|
||||
def __init__(self, hass, airtouch):
|
||||
"""Initialize global Airtouch data updater."""
|
||||
self.airtouch = airtouch
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
@@ -505,7 +505,7 @@ DEFAULT_DEVICE_ANALYTICS_CONFIG = DeviceAnalyticsModifications()
|
||||
DEFAULT_ENTITY_ANALYTICS_CONFIG = EntityAnalyticsModifications()
|
||||
|
||||
|
||||
async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
"""Return detailed information about entities and devices."""
|
||||
dev_reg = dr.async_get(hass)
|
||||
ent_reg = er.async_get(hass)
|
||||
@@ -513,8 +513,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
integration_inputs: dict[str, tuple[list[str], list[str]]] = {}
|
||||
integration_configs: dict[str, AnalyticsModifications] = {}
|
||||
|
||||
removed_devices: set[str] = set()
|
||||
|
||||
# Get device list
|
||||
for device_entry in dev_reg.devices.values():
|
||||
if not device_entry.primary_config_entry:
|
||||
@@ -527,10 +525,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
if config_entry is None:
|
||||
continue
|
||||
|
||||
if device_entry.entry_type is dr.DeviceEntryType.SERVICE:
|
||||
removed_devices.add(device_entry.id)
|
||||
continue
|
||||
|
||||
integration_domain = config_entry.domain
|
||||
|
||||
integration_input = integration_inputs.setdefault(integration_domain, ([], []))
|
||||
@@ -620,15 +614,15 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
device_config = integration_config.devices.get(device_id, device_config)
|
||||
|
||||
if device_config.remove:
|
||||
removed_devices.add(device_id)
|
||||
continue
|
||||
|
||||
device_entry = dev_reg.devices[device_id]
|
||||
|
||||
device_id_mapping[device_id] = (integration_domain, len(devices_info))
|
||||
device_id_mapping[device_entry.id] = (integration_domain, len(devices_info))
|
||||
|
||||
devices_info.append(
|
||||
{
|
||||
"entities": [],
|
||||
"entry_type": device_entry.entry_type,
|
||||
"has_configuration_url": device_entry.configuration_url is not None,
|
||||
"hw_version": device_entry.hw_version,
|
||||
@@ -637,7 +631,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
"model_id": device_entry.model_id,
|
||||
"sw_version": device_entry.sw_version,
|
||||
"via_device": device_entry.via_device_id,
|
||||
"entities": [],
|
||||
}
|
||||
)
|
||||
|
||||
@@ -676,7 +669,7 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
|
||||
entity_entry = ent_reg.entities[entity_id]
|
||||
|
||||
entity_state = hass.states.get(entity_id)
|
||||
entity_state = hass.states.get(entity_entry.entity_id)
|
||||
|
||||
entity_info = {
|
||||
# LIMITATION: `assumed_state` can be overridden by users;
|
||||
@@ -697,19 +690,15 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
||||
"unit_of_measurement": entity_entry.unit_of_measurement,
|
||||
}
|
||||
|
||||
if (device_id_ := entity_entry.device_id) is not None:
|
||||
if device_id_ in removed_devices:
|
||||
# The device was removed, so we remove the entity too
|
||||
continue
|
||||
|
||||
if (
|
||||
new_device_id := device_id_mapping.get(device_id_)
|
||||
) is not None and (new_device_id[0] == integration_domain):
|
||||
device_info = devices_info[new_device_id[1]]
|
||||
device_info["entities"].append(entity_info)
|
||||
continue
|
||||
|
||||
entities_info.append(entity_info)
|
||||
if (
|
||||
((device_id_ := entity_entry.device_id) is not None)
|
||||
and ((new_device_id := device_id_mapping.get(device_id_)) is not None)
|
||||
and (new_device_id[0] == integration_domain)
|
||||
):
|
||||
device_info = devices_info[new_device_id[1]]
|
||||
device_info["entities"].append(entity_info)
|
||||
else:
|
||||
entities_info.append(entity_info)
|
||||
|
||||
return {
|
||||
"version": "home-assistant:1",
|
||||
|
@@ -2,7 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from typing import Any, TypeVar
|
||||
|
||||
T = TypeVar("T", dict[str, Any], list[Any], None)
|
||||
|
||||
TRANSLATION_MAP = {
|
||||
"wan_rx": "sensor_rx_bytes",
|
||||
@@ -34,7 +36,7 @@ def clean_dict(raw: dict[str, Any]) -> dict[str, Any]:
|
||||
return {k: v for k, v in raw.items() if v is not None or k.endswith("state")}
|
||||
|
||||
|
||||
def translate_to_legacy[T: (dict[str, Any], list[Any], None)](raw: T) -> T:
|
||||
def translate_to_legacy(raw: T) -> T:
|
||||
"""Translate raw data to legacy format for dicts and lists."""
|
||||
|
||||
if raw is None:
|
||||
|
@@ -272,13 +272,6 @@ async def async_setup_entry(
|
||||
observations: list[ConfigType] = [
|
||||
dict(subentry.data) for subentry in config_entry.subentries.values()
|
||||
]
|
||||
|
||||
for observation in observations:
|
||||
if observation[CONF_PLATFORM] == CONF_TEMPLATE:
|
||||
observation[CONF_VALUE_TEMPLATE] = Template(
|
||||
observation[CONF_VALUE_TEMPLATE], hass
|
||||
)
|
||||
|
||||
prior: float = config[CONF_PRIOR]
|
||||
probability_threshold: float = config[CONF_PROBABILITY_THRESHOLD]
|
||||
device_class: BinarySensorDeviceClass | None = config.get(CONF_DEVICE_CLASS)
|
||||
|
@@ -51,6 +51,12 @@ from homeassistant.const import (
|
||||
from homeassistant.core import Event, HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, issue_registry as ir
|
||||
from homeassistant.helpers.deprecation import (
|
||||
DeprecatedConstantEnum,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
@@ -112,6 +118,12 @@ ATTR_FILENAME: Final = "filename"
|
||||
ATTR_MEDIA_PLAYER: Final = "media_player"
|
||||
ATTR_FORMAT: Final = "format"
|
||||
|
||||
# These constants are deprecated as of Home Assistant 2024.10
|
||||
# Please use the StreamType enum instead.
|
||||
_DEPRECATED_STATE_RECORDING = DeprecatedConstantEnum(CameraState.RECORDING, "2025.10")
|
||||
_DEPRECATED_STATE_STREAMING = DeprecatedConstantEnum(CameraState.STREAMING, "2025.10")
|
||||
_DEPRECATED_STATE_IDLE = DeprecatedConstantEnum(CameraState.IDLE, "2025.10")
|
||||
|
||||
|
||||
class CameraEntityFeature(IntFlag):
|
||||
"""Supported features of the camera entity."""
|
||||
@@ -1105,3 +1117,11 @@ async def async_handle_record_service(
|
||||
duration=service_call.data[CONF_DURATION],
|
||||
lookback=service_call.data[CONF_LOOKBACK],
|
||||
)
|
||||
|
||||
|
||||
# These can be removed if no deprecated constant are in this module anymore
|
||||
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.2.0", "home-assistant-intents==2025.10.1"]
|
||||
"requirements": ["hassil==3.2.0", "home-assistant-intents==2025.9.24"]
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pycync==0.4.1"]
|
||||
"requirements": ["pycync==0.4.0"]
|
||||
}
|
||||
|
@@ -126,7 +126,7 @@ class DevoloRemoteControl(DevoloDeviceEntity, BinarySensorEntity):
|
||||
self._attr_translation_key = "button"
|
||||
self._attr_translation_placeholders = {"key": str(key)}
|
||||
|
||||
def sync_callback(self, message: tuple) -> None:
|
||||
def _sync(self, message: tuple) -> None:
|
||||
"""Update the binary sensor state."""
|
||||
if (
|
||||
message[0] == self._remote_control_property.element_uid
|
||||
|
@@ -48,6 +48,7 @@ class DevoloDeviceEntity(Entity):
|
||||
)
|
||||
|
||||
self.subscriber: Subscriber | None = None
|
||||
self.sync_callback = self._sync
|
||||
|
||||
self._value: float
|
||||
|
||||
@@ -68,7 +69,7 @@ class DevoloDeviceEntity(Entity):
|
||||
self._device_instance.uid, self.subscriber
|
||||
)
|
||||
|
||||
def sync_callback(self, message: tuple) -> None:
|
||||
def _sync(self, message: tuple) -> None:
|
||||
"""Update the state."""
|
||||
if message[0] == self._attr_unique_id:
|
||||
self._value = message[1]
|
||||
|
@@ -185,7 +185,7 @@ class DevoloConsumptionEntity(DevoloMultiLevelDeviceEntity):
|
||||
"""
|
||||
return f"{self._attr_unique_id}_{self._sensor_type}"
|
||||
|
||||
def sync_callback(self, message: tuple) -> None:
|
||||
def _sync(self, message: tuple) -> None:
|
||||
"""Update the consumption sensor state."""
|
||||
if message[0] == self._attr_unique_id:
|
||||
self._value = getattr(
|
||||
|
@@ -13,3 +13,8 @@ class Subscriber:
|
||||
"""Initiate the subscriber."""
|
||||
self.name = name
|
||||
self.callback = callback
|
||||
|
||||
def update(self, message: str) -> None:
|
||||
"""Trigger hass to update the device."""
|
||||
_LOGGER.debug('%s got message "%s"', self.name, message)
|
||||
self.callback(message)
|
||||
|
@@ -64,7 +64,7 @@ class DevoloSwitch(DevoloDeviceEntity, SwitchEntity):
|
||||
"""Switch off the device."""
|
||||
self._binary_switch_property.set(state=False)
|
||||
|
||||
def sync_callback(self, message: tuple) -> None:
|
||||
def _sync(self, message: tuple) -> None:
|
||||
"""Update the binary switch state and consumption."""
|
||||
if message[0].startswith("devolo.BinarySwitch"):
|
||||
self._attr_is_on = self._device_instance.binary_switch_property[
|
||||
|
@@ -1,11 +0,0 @@
|
||||
"""Analytics platform."""
|
||||
|
||||
from homeassistant.components.analytics import AnalyticsInput, AnalyticsModifications
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
|
||||
async def async_modify_analytics(
|
||||
hass: HomeAssistant, analytics_input: AnalyticsInput
|
||||
) -> AnalyticsModifications:
|
||||
"""Modify the analytics."""
|
||||
return AnalyticsModifications(remove=True)
|
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -85,48 +84,6 @@ class FireflyConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauth when Firefly III API authentication fails."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauth: ask for a new API key and validate."""
|
||||
errors: dict[str, str] = {}
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
if user_input is not None:
|
||||
try:
|
||||
await _validate_input(
|
||||
self.hass,
|
||||
data={
|
||||
**reauth_entry.data,
|
||||
CONF_API_KEY: user_input[CONF_API_KEY],
|
||||
},
|
||||
)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except FireflyClientTimeout:
|
||||
errors["base"] = "timeout_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data_updates={CONF_API_KEY: user_input[CONF_API_KEY]},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema({vol.Required(CONF_API_KEY): str}),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
@@ -18,7 +18,7 @@ from pyfirefly.models import Account, Bill, Budget, Category, Currency
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL, CONF_VERIFY_SSL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
@@ -72,7 +72,7 @@ class FireflyDataUpdateCoordinator(DataUpdateCoordinator[FireflyCoordinatorData]
|
||||
try:
|
||||
await self.firefly.get_about()
|
||||
except FireflyAuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
@@ -109,7 +109,7 @@ class FireflyDataUpdateCoordinator(DataUpdateCoordinator[FireflyCoordinatorData]
|
||||
budgets = await self.firefly.get_budgets()
|
||||
bills = await self.firefly.get_bills()
|
||||
except FireflyAuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/firefly_iii",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyfirefly==0.1.6"]
|
||||
"requirements": ["pyfirefly==0.1.5"]
|
||||
}
|
||||
|
@@ -100,6 +100,15 @@ class FireflyAccountEntity(FireflyBaseEntity, SensorEntity):
|
||||
"""Return the state of the sensor."""
|
||||
return self._account.attributes.current_balance
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, str] | None:
|
||||
"""Return extra state attributes for the account entity."""
|
||||
return {
|
||||
"account_role": self._account.attributes.account_role or "",
|
||||
"account_type": self._account.attributes.type or "",
|
||||
"current_balance": str(self._account.attributes.current_balance or ""),
|
||||
}
|
||||
|
||||
|
||||
class FireflyCategoryEntity(FireflyBaseEntity, SensorEntity):
|
||||
"""Entity for Firefly III category."""
|
||||
|
@@ -13,15 +13,6 @@
|
||||
"verify_ssl": "Verify the SSL certificate of the Firefly instance"
|
||||
},
|
||||
"description": "You can create an API key in the Firefly UI. Go to **Options > Profile** and select the **OAuth** tab. Create a new personal access token and copy it (it will only display once)."
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"api_key": "[%key:common::config_flow::data::api_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"api_key": "The new API access token for authenticating with Firefly III"
|
||||
},
|
||||
"description": "The access token for your Firefly III instance is invalid and needs to be updated. Go to **Options > Profile** and select the **OAuth** tab. Create a new personal access token and copy it (it will only display once)."
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
@@ -31,8 +22,7 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
|
@@ -452,10 +452,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
hass.http.app.router.register_resource(IndexView(repo_path, hass))
|
||||
|
||||
async_register_built_in_panel(hass, "light")
|
||||
async_register_built_in_panel(hass, "security")
|
||||
async_register_built_in_panel(hass, "climate")
|
||||
|
||||
async_register_built_in_panel(hass, "profile")
|
||||
|
||||
async_register_built_in_panel(
|
||||
|
@@ -620,13 +620,6 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
def create_generate_content_config(self) -> GenerateContentConfig:
|
||||
"""Create the GenerateContentConfig for the LLM."""
|
||||
options = self.subentry.data
|
||||
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
thinking_config: ThinkingConfig | None = None
|
||||
if model.startswith("models/gemini-2.5") and not model.endswith(
|
||||
("tts", "image", "image-preview")
|
||||
):
|
||||
thinking_config = ThinkingConfig(include_thoughts=True)
|
||||
|
||||
return GenerateContentConfig(
|
||||
temperature=options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE),
|
||||
top_k=options.get(CONF_TOP_K, RECOMMENDED_TOP_K),
|
||||
@@ -659,7 +652,7 @@ class GoogleGenerativeAILLMBaseEntity(Entity):
|
||||
),
|
||||
),
|
||||
],
|
||||
thinking_config=thinking_config,
|
||||
thinking_config=ThinkingConfig(include_thoughts=True),
|
||||
)
|
||||
|
||||
|
||||
|
@@ -1,18 +1,14 @@
|
||||
"""The Growatt server PV inverter sensor integration."""
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
|
||||
import growattServer
|
||||
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_TOKEN, CONF_URL, CONF_USERNAME
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryError
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
|
||||
from .const import (
|
||||
AUTH_API_TOKEN,
|
||||
AUTH_PASSWORD,
|
||||
CONF_AUTH_TYPE,
|
||||
CONF_PLANT_ID,
|
||||
DEFAULT_PLANT_ID,
|
||||
DEFAULT_URL,
|
||||
@@ -23,110 +19,36 @@ from .const import (
|
||||
from .coordinator import GrowattConfigEntry, GrowattCoordinator
|
||||
from .models import GrowattRuntimeData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_device_list_classic(
|
||||
def get_device_list(
|
||||
api: growattServer.GrowattApi, config: Mapping[str, str]
|
||||
) -> tuple[list[dict[str, str]], str]:
|
||||
"""Retrieve the device list for the selected plant."""
|
||||
plant_id = config[CONF_PLANT_ID]
|
||||
|
||||
# Log in to api and fetch first plant if no plant id is defined.
|
||||
try:
|
||||
login_response = api.login(config[CONF_USERNAME], config[CONF_PASSWORD])
|
||||
# DEBUG: Log the actual response structure
|
||||
except Exception as ex:
|
||||
_LOGGER.error("DEBUG - Login response: %s", login_response)
|
||||
raise ConfigEntryError(
|
||||
f"Error communicating with Growatt API during login: {ex}"
|
||||
) from ex
|
||||
|
||||
if not login_response.get("success"):
|
||||
msg = login_response.get("msg", "Unknown error")
|
||||
_LOGGER.debug("Growatt login failed: %s", msg)
|
||||
if msg == LOGIN_INVALID_AUTH_CODE:
|
||||
raise ConfigEntryAuthFailed("Username, Password or URL may be incorrect!")
|
||||
raise ConfigEntryError(f"Growatt login failed: {msg}")
|
||||
|
||||
login_response = api.login(config[CONF_USERNAME], config[CONF_PASSWORD])
|
||||
if (
|
||||
not login_response["success"]
|
||||
and login_response["msg"] == LOGIN_INVALID_AUTH_CODE
|
||||
):
|
||||
raise ConfigEntryError("Username, Password or URL may be incorrect!")
|
||||
user_id = login_response["user"]["id"]
|
||||
|
||||
if plant_id == DEFAULT_PLANT_ID:
|
||||
try:
|
||||
plant_info = api.plant_list(user_id)
|
||||
except Exception as ex:
|
||||
raise ConfigEntryError(
|
||||
f"Error communicating with Growatt API during plant list: {ex}"
|
||||
) from ex
|
||||
if not plant_info or "data" not in plant_info or not plant_info["data"]:
|
||||
raise ConfigEntryError("No plants found for this account.")
|
||||
plant_info = api.plant_list(user_id)
|
||||
plant_id = plant_info["data"][0]["plantId"]
|
||||
|
||||
# Get a list of devices for specified plant to add sensors for.
|
||||
try:
|
||||
devices = api.device_list(plant_id)
|
||||
except Exception as ex:
|
||||
raise ConfigEntryError(
|
||||
f"Error communicating with Growatt API during device list: {ex}"
|
||||
) from ex
|
||||
|
||||
devices = api.device_list(plant_id)
|
||||
return devices, plant_id
|
||||
|
||||
|
||||
def get_device_list_v1(
|
||||
api, config: Mapping[str, str]
|
||||
) -> tuple[list[dict[str, str]], str]:
|
||||
"""Device list logic for Open API V1.
|
||||
|
||||
Note: Plant selection (including auto-selection if only one plant exists)
|
||||
is handled in the config flow before this function is called. This function
|
||||
only fetches devices for the already-selected plant_id.
|
||||
"""
|
||||
plant_id = config[CONF_PLANT_ID]
|
||||
try:
|
||||
devices_dict = api.device_list(plant_id)
|
||||
except growattServer.GrowattV1ApiError as e:
|
||||
raise ConfigEntryError(
|
||||
f"API error during device list: {e} (Code: {getattr(e, 'error_code', None)}, Message: {getattr(e, 'error_msg', None)})"
|
||||
) from e
|
||||
devices = devices_dict.get("devices", [])
|
||||
# Only MIN device (type = 7) support implemented in current V1 API
|
||||
supported_devices = [
|
||||
{
|
||||
"deviceSn": device.get("device_sn", ""),
|
||||
"deviceType": "min",
|
||||
}
|
||||
for device in devices
|
||||
if device.get("type") == 7
|
||||
]
|
||||
|
||||
for device in devices:
|
||||
if device.get("type") != 7:
|
||||
_LOGGER.warning(
|
||||
"Device %s with type %s not supported in Open API V1, skipping",
|
||||
device.get("device_sn", ""),
|
||||
device.get("type"),
|
||||
)
|
||||
return supported_devices, plant_id
|
||||
|
||||
|
||||
def get_device_list(
|
||||
api, config: Mapping[str, str], api_version: str
|
||||
) -> tuple[list[dict[str, str]], str]:
|
||||
"""Dispatch to correct device list logic based on API version."""
|
||||
if api_version == "v1":
|
||||
return get_device_list_v1(api, config)
|
||||
if api_version == "classic":
|
||||
return get_device_list_classic(api, config)
|
||||
raise ConfigEntryError(f"Unknown API version: {api_version}")
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, config_entry: GrowattConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Growatt from a config entry."""
|
||||
|
||||
config = config_entry.data
|
||||
username = config[CONF_USERNAME]
|
||||
url = config.get(CONF_URL, DEFAULT_URL)
|
||||
|
||||
# If the URL has been deprecated then change to the default instead
|
||||
@@ -136,24 +58,11 @@ async def async_setup_entry(
|
||||
new_data[CONF_URL] = url
|
||||
hass.config_entries.async_update_entry(config_entry, data=new_data)
|
||||
|
||||
# Determine API version
|
||||
if config.get(CONF_AUTH_TYPE) == AUTH_API_TOKEN:
|
||||
api_version = "v1"
|
||||
token = config[CONF_TOKEN]
|
||||
api = growattServer.OpenApiV1(token=token)
|
||||
elif config.get(CONF_AUTH_TYPE) == AUTH_PASSWORD:
|
||||
api_version = "classic"
|
||||
username = config[CONF_USERNAME]
|
||||
api = growattServer.GrowattApi(
|
||||
add_random_user_id=True, agent_identifier=username
|
||||
)
|
||||
api.server_url = url
|
||||
else:
|
||||
raise ConfigEntryError("Unknown authentication type in config entry.")
|
||||
# Initialise the library with the username & a random id each time it is started
|
||||
api = growattServer.GrowattApi(add_random_user_id=True, agent_identifier=username)
|
||||
api.server_url = url
|
||||
|
||||
devices, plant_id = await hass.async_add_executor_job(
|
||||
get_device_list, api, config, api_version
|
||||
)
|
||||
devices, plant_id = await hass.async_add_executor_job(get_device_list, api, config)
|
||||
|
||||
# Create a coordinator for the total sensors
|
||||
total_coordinator = GrowattCoordinator(
|
||||
@@ -166,7 +75,7 @@ async def async_setup_entry(
|
||||
hass, config_entry, device["deviceSn"], device["deviceType"], plant_id
|
||||
)
|
||||
for device in devices
|
||||
if device["deviceType"] in ["inverter", "tlx", "storage", "mix", "min"]
|
||||
if device["deviceType"] in ["inverter", "tlx", "storage", "mix"]
|
||||
}
|
||||
|
||||
# Perform the first refresh for the total coordinator
|
||||
|
@@ -1,38 +1,22 @@
|
||||
"""Config flow for growatt server integration."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import growattServer
|
||||
import requests
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import (
|
||||
CONF_NAME,
|
||||
CONF_PASSWORD,
|
||||
CONF_TOKEN,
|
||||
CONF_URL,
|
||||
CONF_USERNAME,
|
||||
)
|
||||
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_URL, CONF_USERNAME
|
||||
from homeassistant.core import callback
|
||||
|
||||
from .const import (
|
||||
ABORT_NO_PLANTS,
|
||||
AUTH_API_TOKEN,
|
||||
AUTH_PASSWORD,
|
||||
CONF_AUTH_TYPE,
|
||||
CONF_PLANT_ID,
|
||||
DEFAULT_URL,
|
||||
DOMAIN,
|
||||
ERROR_CANNOT_CONNECT,
|
||||
ERROR_INVALID_AUTH,
|
||||
LOGIN_INVALID_AUTH_CODE,
|
||||
SERVER_URLS,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Config flow class."""
|
||||
@@ -43,98 +27,12 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialise growatt server flow."""
|
||||
self.user_id: str | None = None
|
||||
self.user_id = None
|
||||
self.data: dict[str, Any] = {}
|
||||
self.auth_type: str | None = None
|
||||
self.plants: list[dict[str, Any]] = []
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the start of the config flow."""
|
||||
return self.async_show_menu(
|
||||
step_id="user",
|
||||
menu_options=["password_auth", "token_auth"],
|
||||
)
|
||||
|
||||
async def async_step_password_auth(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle username/password authentication."""
|
||||
if user_input is None:
|
||||
return self._async_show_password_form()
|
||||
|
||||
self.auth_type = AUTH_PASSWORD
|
||||
|
||||
# Traditional username/password authentication
|
||||
self.api = growattServer.GrowattApi(
|
||||
add_random_user_id=True, agent_identifier=user_input[CONF_USERNAME]
|
||||
)
|
||||
self.api.server_url = user_input[CONF_URL]
|
||||
|
||||
try:
|
||||
login_response = await self.hass.async_add_executor_job(
|
||||
self.api.login, user_input[CONF_USERNAME], user_input[CONF_PASSWORD]
|
||||
)
|
||||
except requests.exceptions.RequestException as ex:
|
||||
_LOGGER.error("Network error during Growatt API login: %s", ex)
|
||||
return self._async_show_password_form({"base": ERROR_CANNOT_CONNECT})
|
||||
except (ValueError, KeyError, TypeError, AttributeError) as ex:
|
||||
_LOGGER.error("Invalid response format during login: %s", ex)
|
||||
return self._async_show_password_form({"base": ERROR_CANNOT_CONNECT})
|
||||
|
||||
if (
|
||||
not login_response["success"]
|
||||
and login_response["msg"] == LOGIN_INVALID_AUTH_CODE
|
||||
):
|
||||
return self._async_show_password_form({"base": ERROR_INVALID_AUTH})
|
||||
|
||||
self.user_id = login_response["user"]["id"]
|
||||
self.data = user_input
|
||||
self.data[CONF_AUTH_TYPE] = self.auth_type
|
||||
return await self.async_step_plant()
|
||||
|
||||
async def async_step_token_auth(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle API token authentication."""
|
||||
if user_input is None:
|
||||
return self._async_show_token_form()
|
||||
|
||||
self.auth_type = AUTH_API_TOKEN
|
||||
|
||||
# Using token authentication
|
||||
token = user_input[CONF_TOKEN]
|
||||
self.api = growattServer.OpenApiV1(token=token)
|
||||
|
||||
# Verify token by fetching plant list
|
||||
try:
|
||||
plant_response = await self.hass.async_add_executor_job(self.api.plant_list)
|
||||
self.plants = plant_response.get("plants", [])
|
||||
except requests.exceptions.RequestException as ex:
|
||||
_LOGGER.error("Network error during Growatt V1 API plant list: %s", ex)
|
||||
return self._async_show_token_form({"base": ERROR_CANNOT_CONNECT})
|
||||
except growattServer.GrowattV1ApiError as e:
|
||||
_LOGGER.error(
|
||||
"Growatt V1 API error: %s (Code: %s)",
|
||||
e.error_msg or str(e),
|
||||
getattr(e, "error_code", None),
|
||||
)
|
||||
return self._async_show_token_form({"base": ERROR_INVALID_AUTH})
|
||||
except (ValueError, KeyError, TypeError, AttributeError) as ex:
|
||||
_LOGGER.error(
|
||||
"Invalid response format during Growatt V1 API plant list: %s", ex
|
||||
)
|
||||
return self._async_show_token_form({"base": ERROR_CANNOT_CONNECT})
|
||||
self.data = user_input
|
||||
self.data[CONF_AUTH_TYPE] = self.auth_type
|
||||
return await self.async_step_plant()
|
||||
|
||||
@callback
|
||||
def _async_show_password_form(
|
||||
self, errors: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Show the username/password form to the user."""
|
||||
def _async_show_user_form(self, errors=None):
|
||||
"""Show the form to the user."""
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
@@ -144,87 +42,58 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="password_auth", data_schema=data_schema, errors=errors
|
||||
step_id="user", data_schema=data_schema, errors=errors
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_show_token_form(
|
||||
self, errors: dict[str, Any] | None = None
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Show the API token form to the user."""
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_TOKEN): str,
|
||||
}
|
||||
"""Handle the start of the config flow."""
|
||||
if not user_input:
|
||||
return self._async_show_user_form()
|
||||
|
||||
# Initialise the library with the username & a random id each time it is started
|
||||
self.api = growattServer.GrowattApi(
|
||||
add_random_user_id=True, agent_identifier=user_input[CONF_USERNAME]
|
||||
)
|
||||
self.api.server_url = user_input[CONF_URL]
|
||||
login_response = await self.hass.async_add_executor_job(
|
||||
self.api.login, user_input[CONF_USERNAME], user_input[CONF_PASSWORD]
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="token_auth",
|
||||
data_schema=data_schema,
|
||||
errors=errors,
|
||||
)
|
||||
if (
|
||||
not login_response["success"]
|
||||
and login_response["msg"] == LOGIN_INVALID_AUTH_CODE
|
||||
):
|
||||
return self._async_show_user_form({"base": "invalid_auth"})
|
||||
self.user_id = login_response["user"]["id"]
|
||||
|
||||
self.data = user_input
|
||||
return await self.async_step_plant()
|
||||
|
||||
async def async_step_plant(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle adding a "plant" to Home Assistant."""
|
||||
if self.auth_type == AUTH_API_TOKEN:
|
||||
# Using V1 API with token
|
||||
if not self.plants:
|
||||
return self.async_abort(reason=ABORT_NO_PLANTS)
|
||||
plant_info = await self.hass.async_add_executor_job(
|
||||
self.api.plant_list, self.user_id
|
||||
)
|
||||
|
||||
# Create dictionary of plant_id -> name
|
||||
plant_dict = {
|
||||
str(plant["plant_id"]): plant.get("name", "Unknown Plant")
|
||||
for plant in self.plants
|
||||
}
|
||||
if not plant_info["data"]:
|
||||
return self.async_abort(reason="no_plants")
|
||||
|
||||
if user_input is None and len(plant_dict) > 1:
|
||||
data_schema = vol.Schema(
|
||||
{vol.Required(CONF_PLANT_ID): vol.In(plant_dict)}
|
||||
)
|
||||
return self.async_show_form(step_id="plant", data_schema=data_schema)
|
||||
plants = {plant["plantId"]: plant["plantName"] for plant in plant_info["data"]}
|
||||
|
||||
if user_input is None:
|
||||
# Single plant => mark it as selected
|
||||
user_input = {CONF_PLANT_ID: list(plant_dict.keys())[0]}
|
||||
if user_input is None and len(plant_info["data"]) > 1:
|
||||
data_schema = vol.Schema({vol.Required(CONF_PLANT_ID): vol.In(plants)})
|
||||
|
||||
user_input[CONF_NAME] = plant_dict[user_input[CONF_PLANT_ID]]
|
||||
return self.async_show_form(step_id="plant", data_schema=data_schema)
|
||||
|
||||
else:
|
||||
# Traditional API
|
||||
try:
|
||||
plant_info = await self.hass.async_add_executor_job(
|
||||
self.api.plant_list, self.user_id
|
||||
)
|
||||
except requests.exceptions.RequestException as ex:
|
||||
_LOGGER.error("Network error during Growatt API plant list: %s", ex)
|
||||
return self.async_abort(reason=ERROR_CANNOT_CONNECT)
|
||||
|
||||
# Access plant_info["data"] - validate response structure
|
||||
if not isinstance(plant_info, dict) or "data" not in plant_info:
|
||||
_LOGGER.error(
|
||||
"Invalid response format during plant list: missing 'data' key"
|
||||
)
|
||||
return self.async_abort(reason=ERROR_CANNOT_CONNECT)
|
||||
|
||||
plant_data = plant_info["data"]
|
||||
|
||||
if not plant_data:
|
||||
return self.async_abort(reason=ABORT_NO_PLANTS)
|
||||
|
||||
plants = {plant["plantId"]: plant["plantName"] for plant in plant_data}
|
||||
|
||||
if user_input is None and len(plant_data) > 1:
|
||||
data_schema = vol.Schema({vol.Required(CONF_PLANT_ID): vol.In(plants)})
|
||||
return self.async_show_form(step_id="plant", data_schema=data_schema)
|
||||
|
||||
if user_input is None:
|
||||
# single plant => mark it as selected
|
||||
user_input = {CONF_PLANT_ID: plant_data[0]["plantId"]}
|
||||
|
||||
user_input[CONF_NAME] = plants[user_input[CONF_PLANT_ID]]
|
||||
if user_input is None:
|
||||
# single plant => mark it as selected
|
||||
user_input = {CONF_PLANT_ID: plant_info["data"][0]["plantId"]}
|
||||
|
||||
user_input[CONF_NAME] = plants[user_input[CONF_PLANT_ID]]
|
||||
await self.async_set_unique_id(user_input[CONF_PLANT_ID])
|
||||
self._abort_if_unique_id_configured()
|
||||
self.data.update(user_input)
|
||||
|
@@ -4,16 +4,6 @@ from homeassistant.const import Platform
|
||||
|
||||
CONF_PLANT_ID = "plant_id"
|
||||
|
||||
|
||||
# API key support
|
||||
CONF_API_KEY = "api_key"
|
||||
|
||||
# Auth types for config flow
|
||||
AUTH_PASSWORD = "password"
|
||||
AUTH_API_TOKEN = "api_token"
|
||||
CONF_AUTH_TYPE = "auth_type"
|
||||
DEFAULT_AUTH_TYPE = AUTH_PASSWORD
|
||||
|
||||
DEFAULT_PLANT_ID = "0"
|
||||
|
||||
DEFAULT_NAME = "Growatt"
|
||||
@@ -39,10 +29,3 @@ DOMAIN = "growatt_server"
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
LOGIN_INVALID_AUTH_CODE = "502"
|
||||
|
||||
# Config flow error types (also used as abort reasons)
|
||||
ERROR_CANNOT_CONNECT = "cannot_connect" # Used for both form errors and aborts
|
||||
ERROR_INVALID_AUTH = "invalid_auth"
|
||||
|
||||
# Config flow abort reasons
|
||||
ABORT_NO_PLANTS = "no_plants"
|
||||
|
@@ -1,7 +1,5 @@
|
||||
"""Coordinator module for managing Growatt data fetching."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
@@ -40,30 +38,22 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
plant_id: str,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
self.api_version = (
|
||||
"v1" if config_entry.data.get("auth_type") == "api_token" else "classic"
|
||||
self.username = config_entry.data[CONF_USERNAME]
|
||||
self.password = config_entry.data[CONF_PASSWORD]
|
||||
self.url = config_entry.data.get(CONF_URL, DEFAULT_URL)
|
||||
self.api = growattServer.GrowattApi(
|
||||
add_random_user_id=True, agent_identifier=self.username
|
||||
)
|
||||
|
||||
# Set server URL
|
||||
self.api.server_url = self.url
|
||||
|
||||
self.device_id = device_id
|
||||
self.device_type = device_type
|
||||
self.plant_id = plant_id
|
||||
self.previous_values: dict[str, Any] = {}
|
||||
|
||||
if self.api_version == "v1":
|
||||
self.username = None
|
||||
self.password = None
|
||||
self.url = config_entry.data.get(CONF_URL, DEFAULT_URL)
|
||||
self.token = config_entry.data["token"]
|
||||
self.api = growattServer.OpenApiV1(token=self.token)
|
||||
elif self.api_version == "classic":
|
||||
self.username = config_entry.data.get(CONF_USERNAME)
|
||||
self.password = config_entry.data[CONF_PASSWORD]
|
||||
self.url = config_entry.data.get(CONF_URL, DEFAULT_URL)
|
||||
self.api = growattServer.GrowattApi(
|
||||
add_random_user_id=True, agent_identifier=self.username
|
||||
)
|
||||
self.api.server_url = self.url
|
||||
else:
|
||||
raise ValueError(f"Unknown API version: {self.api_version}")
|
||||
# Initialize previous_values to store historical data
|
||||
self.previous_values: dict[str, Any] = {}
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
@@ -77,54 +67,21 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
"""Update data via library synchronously."""
|
||||
_LOGGER.debug("Updating data for %s (%s)", self.device_id, self.device_type)
|
||||
|
||||
# login only required for classic API
|
||||
if self.api_version == "classic":
|
||||
self.api.login(self.username, self.password)
|
||||
# Login in to the Growatt server
|
||||
self.api.login(self.username, self.password)
|
||||
|
||||
if self.device_type == "total":
|
||||
if self.api_version == "v1":
|
||||
# The V1 Plant APIs do not provide the same information as the classic plant_info() API
|
||||
# More specifically:
|
||||
# 1. There is no monetary information to be found, so today and lifetime money is not available
|
||||
# 2. There is no nominal power, this is provided by inverter min_energy()
|
||||
# This means, for the total coordinator we can only fetch and map the following:
|
||||
# todayEnergy -> today_energy
|
||||
# totalEnergy -> total_energy
|
||||
# invTodayPpv -> current_power
|
||||
total_info = self.api.plant_energy_overview(self.plant_id)
|
||||
total_info["todayEnergy"] = total_info["today_energy"]
|
||||
total_info["totalEnergy"] = total_info["total_energy"]
|
||||
total_info["invTodayPpv"] = total_info["current_power"]
|
||||
else:
|
||||
# Classic API: use plant_info as before
|
||||
total_info = self.api.plant_info(self.device_id)
|
||||
del total_info["deviceList"]
|
||||
plant_money_text, currency = total_info["plantMoneyText"].split("/")
|
||||
total_info["plantMoneyText"] = plant_money_text
|
||||
total_info["currency"] = currency
|
||||
_LOGGER.debug("Total info for plant %s: %r", self.plant_id, total_info)
|
||||
total_info = self.api.plant_info(self.device_id)
|
||||
del total_info["deviceList"]
|
||||
plant_money_text, currency = total_info["plantMoneyText"].split("/")
|
||||
total_info["plantMoneyText"] = plant_money_text
|
||||
total_info["currency"] = currency
|
||||
self.data = total_info
|
||||
elif self.device_type == "inverter":
|
||||
self.data = self.api.inverter_detail(self.device_id)
|
||||
elif self.device_type == "min":
|
||||
# Open API V1: min device
|
||||
try:
|
||||
min_details = self.api.min_detail(self.device_id)
|
||||
min_settings = self.api.min_settings(self.device_id)
|
||||
min_energy = self.api.min_energy(self.device_id)
|
||||
except growattServer.GrowattV1ApiError as err:
|
||||
_LOGGER.error(
|
||||
"Error fetching min device data for %s: %s", self.device_id, err
|
||||
)
|
||||
raise UpdateFailed(f"Error fetching min device data: {err}") from err
|
||||
|
||||
min_info = {**min_details, **min_settings, **min_energy}
|
||||
self.data = min_info
|
||||
_LOGGER.debug("min_info for device %s: %r", self.device_id, min_info)
|
||||
elif self.device_type == "tlx":
|
||||
tlx_info = self.api.tlx_detail(self.device_id)
|
||||
self.data = tlx_info["data"]
|
||||
_LOGGER.debug("tlx_info for device %s: %r", self.device_id, tlx_info)
|
||||
elif self.device_type == "storage":
|
||||
storage_info_detail = self.api.storage_params(self.device_id)
|
||||
storage_energy_overview = self.api.storage_energy_overview(
|
||||
@@ -188,7 +145,7 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
return self.data.get("currency")
|
||||
|
||||
def get_data(
|
||||
self, entity_description: GrowattSensorEntityDescription
|
||||
self, entity_description: "GrowattSensorEntityDescription"
|
||||
) -> str | int | float | None:
|
||||
"""Get the data."""
|
||||
variable = entity_description.api_key
|
||||
|
@@ -51,7 +51,7 @@ async def async_setup_entry(
|
||||
sensor_descriptions: list = []
|
||||
if device_coordinator.device_type == "inverter":
|
||||
sensor_descriptions = list(INVERTER_SENSOR_TYPES)
|
||||
elif device_coordinator.device_type in ("tlx", "min"):
|
||||
elif device_coordinator.device_type == "tlx":
|
||||
sensor_descriptions = list(TLX_SENSOR_TYPES)
|
||||
elif device_coordinator.device_type == "storage":
|
||||
sensor_descriptions = list(STORAGE_SENSOR_TYPES)
|
||||
|
@@ -2,42 +2,26 @@
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"no_plants": "No plants have been found on this account"
|
||||
},
|
||||
"error": {
|
||||
"invalid_auth": "Authentication failed. Please check your credentials and try again.",
|
||||
"cannot_connect": "Cannot connect to Growatt servers. Please check your internet connection and try again."
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Choose authentication method",
|
||||
"description": "Note: API Token authentication is currently only supported for MIN/TLX devices. For other device types, please use Username & Password authentication.",
|
||||
"menu_options": {
|
||||
"password_auth": "Username & Password",
|
||||
"token_auth": "API Token (MIN/TLX only)"
|
||||
}
|
||||
},
|
||||
"password_auth": {
|
||||
"title": "Enter your Growatt login credentials",
|
||||
"data": {
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"url": "[%key:common::config_flow::data::url%]"
|
||||
}
|
||||
},
|
||||
"token_auth": {
|
||||
"title": "Enter your API token",
|
||||
"description": "Token authentication is only supported for MIN/TLX devices. For other device types, please use username/password authentication.",
|
||||
"data": {
|
||||
"token": "API Token"
|
||||
}
|
||||
},
|
||||
"plant": {
|
||||
"data": {
|
||||
"plant_id": "Plant"
|
||||
},
|
||||
"title": "Select your plant"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]",
|
||||
"url": "[%key:common::config_flow::data::url%]"
|
||||
},
|
||||
"title": "Enter your Growatt information"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -174,9 +174,6 @@
|
||||
},
|
||||
"collected_items": {
|
||||
"default": "mdi:sack"
|
||||
},
|
||||
"last_checkin": {
|
||||
"default": "mdi:login-variant"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
|
@@ -4,7 +4,6 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from enum import StrEnum
|
||||
import logging
|
||||
from typing import Any
|
||||
@@ -54,7 +53,7 @@ PARALLEL_UPDATES = 1
|
||||
class HabiticaSensorEntityDescription(SensorEntityDescription):
|
||||
"""Habitica Sensor Description."""
|
||||
|
||||
value_fn: Callable[[UserData, ContentData], StateType | datetime]
|
||||
value_fn: Callable[[UserData, ContentData], StateType]
|
||||
attributes_fn: Callable[[UserData, ContentData], dict[str, Any] | None] | None = (
|
||||
None
|
||||
)
|
||||
@@ -115,7 +114,6 @@ class HabiticaSensorEntity(StrEnum):
|
||||
COLLECTED_ITEMS = "collected_items"
|
||||
BOSS_RAGE = "boss_rage"
|
||||
BOSS_RAGE_LIMIT = "boss_rage_limit"
|
||||
LAST_CHECKIN = "last_checkin"
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS: tuple[HabiticaSensorEntityDescription, ...] = (
|
||||
@@ -286,16 +284,6 @@ SENSOR_DESCRIPTIONS: tuple[HabiticaSensorEntityDescription, ...] = (
|
||||
translation_key=HabiticaSensorEntity.PENDING_QUEST_ITEMS,
|
||||
value_fn=pending_quest_items,
|
||||
),
|
||||
HabiticaSensorEntityDescription(
|
||||
key=HabiticaSensorEntity.LAST_CHECKIN,
|
||||
translation_key=HabiticaSensorEntity.LAST_CHECKIN,
|
||||
value_fn=(
|
||||
lambda user, _: dt_util.as_local(last)
|
||||
if (last := user.auth.timestamps.loggedin)
|
||||
else None
|
||||
),
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -411,7 +399,7 @@ class HabiticaSensor(HabiticaBase, SensorEntity):
|
||||
entity_description: HabiticaSensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the device."""
|
||||
|
||||
return self.entity_description.value_fn(
|
||||
@@ -454,7 +442,7 @@ class HabiticaPartySensor(HabiticaPartyBase, SensorEntity):
|
||||
entity_description: HabiticaPartySensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(self) -> StateType | datetime:
|
||||
def native_value(self) -> StateType:
|
||||
"""Return the state of the device."""
|
||||
|
||||
return self.entity_description.value_fn(
|
||||
|
@@ -290,9 +290,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"last_checkin": {
|
||||
"name": "Last check-in"
|
||||
},
|
||||
"health": {
|
||||
"name": "Health",
|
||||
"unit_of_measurement": "[%key:component::habitica::common::unit_health_points%]"
|
||||
|
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/hassio",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["aiohasupervisor==0.3.3"],
|
||||
"requirements": ["aiohasupervisor==0.3.3b0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -10,7 +10,6 @@ from homeassistant.components.homeassistant_hardware import firmware_config_flow
|
||||
from homeassistant.components.homeassistant_hardware.util import (
|
||||
ApplicationType,
|
||||
FirmwareInfo,
|
||||
ResetTarget,
|
||||
)
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
@@ -67,7 +66,6 @@ class ZBT2FirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
|
||||
"""Mixin for Home Assistant Connect ZBT-2 firmware methods."""
|
||||
|
||||
context: ConfigFlowContext
|
||||
BOOTLOADER_RESET_METHODS = [ResetTarget.RTS_DTR]
|
||||
|
||||
async def async_step_install_zigbee_firmware(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
|
@@ -16,7 +16,6 @@ from homeassistant.components.homeassistant_hardware.update import (
|
||||
from homeassistant.components.homeassistant_hardware.util import (
|
||||
ApplicationType,
|
||||
FirmwareInfo,
|
||||
ResetTarget,
|
||||
)
|
||||
from homeassistant.components.update import UpdateDeviceClass
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -157,7 +156,7 @@ async def async_setup_entry(
|
||||
class FirmwareUpdateEntity(BaseFirmwareUpdateEntity):
|
||||
"""Connect ZBT-2 firmware update entity."""
|
||||
|
||||
bootloader_reset_methods = [ResetTarget.RTS_DTR]
|
||||
bootloader_reset_type = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@@ -39,7 +39,6 @@ from .util import (
|
||||
FirmwareInfo,
|
||||
OwningAddon,
|
||||
OwningIntegration,
|
||||
ResetTarget,
|
||||
async_flash_silabs_firmware,
|
||||
get_otbr_addon_manager,
|
||||
guess_firmware_info,
|
||||
@@ -80,8 +79,6 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
"""Base flow to install firmware."""
|
||||
|
||||
ZIGBEE_BAUDRATE = 115200 # Default, subclasses may override
|
||||
BOOTLOADER_RESET_METHODS: list[ResetTarget] = [] # Default, subclasses may override
|
||||
|
||||
_picked_firmware_type: PickedFirmwareType
|
||||
_zigbee_flow_strategy: ZigbeeFlowStrategy = ZigbeeFlowStrategy.RECOMMENDED
|
||||
|
||||
@@ -277,7 +274,7 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
device=self._device,
|
||||
fw_data=fw_data,
|
||||
expected_installed_firmware_type=expected_installed_firmware_type,
|
||||
bootloader_reset_methods=self.BOOTLOADER_RESET_METHODS,
|
||||
bootloader_reset_type=None,
|
||||
progress_callback=lambda offset, total: self.async_update_progress(
|
||||
offset / total
|
||||
),
|
||||
|
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homeassistant_hardware",
|
||||
"integration_type": "system",
|
||||
"requirements": [
|
||||
"universal-silabs-flasher==0.0.35",
|
||||
"universal-silabs-flasher==0.0.32",
|
||||
"ha-silabs-firmware-client==0.2.0"
|
||||
]
|
||||
}
|
||||
|
@@ -22,12 +22,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import FirmwareUpdateCoordinator
|
||||
from .helpers import async_register_firmware_info_callback
|
||||
from .util import (
|
||||
ApplicationType,
|
||||
FirmwareInfo,
|
||||
ResetTarget,
|
||||
async_flash_silabs_firmware,
|
||||
)
|
||||
from .util import ApplicationType, FirmwareInfo, async_flash_silabs_firmware
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -86,7 +81,7 @@ class BaseFirmwareUpdateEntity(
|
||||
|
||||
# Subclasses provide the mapping between firmware types and entity descriptions
|
||||
entity_description: FirmwareUpdateEntityDescription
|
||||
bootloader_reset_methods: list[ResetTarget] = []
|
||||
bootloader_reset_type: str | None = None
|
||||
|
||||
_attr_supported_features = (
|
||||
UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS
|
||||
@@ -273,7 +268,7 @@ class BaseFirmwareUpdateEntity(
|
||||
device=self._current_device,
|
||||
fw_data=fw_data,
|
||||
expected_installed_firmware_type=self.entity_description.expected_firmware_type,
|
||||
bootloader_reset_methods=self.bootloader_reset_methods,
|
||||
bootloader_reset_type=self.bootloader_reset_type,
|
||||
progress_callback=self._update_progress,
|
||||
)
|
||||
finally:
|
||||
|
@@ -4,16 +4,13 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections import defaultdict
|
||||
from collections.abc import AsyncIterator, Callable, Iterable, Sequence
|
||||
from collections.abc import AsyncIterator, Callable, Iterable
|
||||
from contextlib import AsyncExitStack, asynccontextmanager
|
||||
from dataclasses import dataclass
|
||||
from enum import StrEnum
|
||||
import logging
|
||||
|
||||
from universal_silabs_flasher.const import (
|
||||
ApplicationType as FlasherApplicationType,
|
||||
ResetTarget as FlasherResetTarget,
|
||||
)
|
||||
from universal_silabs_flasher.const import ApplicationType as FlasherApplicationType
|
||||
from universal_silabs_flasher.firmware import parse_firmware_image
|
||||
from universal_silabs_flasher.flasher import Flasher
|
||||
|
||||
@@ -62,18 +59,6 @@ class ApplicationType(StrEnum):
|
||||
return FlasherApplicationType(self.value)
|
||||
|
||||
|
||||
class ResetTarget(StrEnum):
|
||||
"""Methods to reset a device into bootloader mode."""
|
||||
|
||||
RTS_DTR = "rts_dtr"
|
||||
BAUDRATE = "baudrate"
|
||||
YELLOW = "yellow"
|
||||
|
||||
def as_flasher_reset_target(self) -> FlasherResetTarget:
|
||||
"""Convert the reset target enum into one compatible with USF."""
|
||||
return FlasherResetTarget(self.value)
|
||||
|
||||
|
||||
@singleton(OTBR_ADDON_MANAGER_DATA)
|
||||
@callback
|
||||
def get_otbr_addon_manager(hass: HomeAssistant) -> WaitingAddonManager:
|
||||
@@ -357,7 +342,7 @@ async def async_flash_silabs_firmware(
|
||||
device: str,
|
||||
fw_data: bytes,
|
||||
expected_installed_firmware_type: ApplicationType,
|
||||
bootloader_reset_methods: Sequence[ResetTarget] = (),
|
||||
bootloader_reset_type: str | None = None,
|
||||
progress_callback: Callable[[int, int], None] | None = None,
|
||||
) -> FirmwareInfo:
|
||||
"""Flash firmware to the SiLabs device."""
|
||||
@@ -374,9 +359,7 @@ async def async_flash_silabs_firmware(
|
||||
ApplicationType.SPINEL.as_flasher_application_type(),
|
||||
ApplicationType.CPC.as_flasher_application_type(),
|
||||
),
|
||||
bootloader_reset=tuple(
|
||||
m.as_flasher_reset_target() for m in bootloader_reset_methods
|
||||
),
|
||||
bootloader_reset=bootloader_reset_type,
|
||||
)
|
||||
|
||||
async with AsyncExitStack() as stack:
|
||||
|
@@ -168,8 +168,7 @@ async def async_setup_entry(
|
||||
class FirmwareUpdateEntity(BaseFirmwareUpdateEntity):
|
||||
"""SkyConnect firmware update entity."""
|
||||
|
||||
# The ZBT-1 does not have a hardware bootloader trigger
|
||||
bootloader_reset_methods = []
|
||||
bootloader_reset_type = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@@ -27,7 +27,6 @@ from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon
|
||||
from homeassistant.components.homeassistant_hardware.util import (
|
||||
ApplicationType,
|
||||
FirmwareInfo,
|
||||
ResetTarget,
|
||||
probe_silabs_firmware_info,
|
||||
)
|
||||
from homeassistant.config_entries import (
|
||||
@@ -84,8 +83,6 @@ else:
|
||||
class YellowFirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
|
||||
"""Mixin for Home Assistant Yellow firmware methods."""
|
||||
|
||||
BOOTLOADER_RESET_METHODS = [ResetTarget.YELLOW]
|
||||
|
||||
async def async_step_install_zigbee_firmware(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
@@ -16,7 +16,6 @@ from homeassistant.components.homeassistant_hardware.update import (
|
||||
from homeassistant.components.homeassistant_hardware.util import (
|
||||
ApplicationType,
|
||||
FirmwareInfo,
|
||||
ResetTarget,
|
||||
)
|
||||
from homeassistant.components.update import UpdateDeviceClass
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -174,7 +173,7 @@ async def async_setup_entry(
|
||||
class FirmwareUpdateEntity(BaseFirmwareUpdateEntity):
|
||||
"""Yellow firmware update entity."""
|
||||
|
||||
bootloader_reset_methods = [ResetTarget.YELLOW] # Triggers a GPIO reset
|
||||
bootloader_reset_type = "yellow" # Triggers a GPIO reset
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@@ -14,6 +14,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiohomekit", "commentjson"],
|
||||
"requirements": ["aiohomekit==3.2.19"],
|
||||
"requirements": ["aiohomekit==3.2.18"],
|
||||
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."]
|
||||
}
|
||||
|
@@ -11,7 +11,7 @@ from homeassistant.core import HomeAssistant
|
||||
from .const import CONF_DEVICE_DATA, CONF_DEVICE_TYPE
|
||||
from .coordinator import INKBIRDActiveBluetoothProcessorCoordinator
|
||||
|
||||
type INKBIRDConfigEntry = ConfigEntry[INKBIRDActiveBluetoothProcessorCoordinator]
|
||||
INKBIRDConfigEntry = ConfigEntry[INKBIRDActiveBluetoothProcessorCoordinator]
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
@@ -13,16 +13,28 @@ from propcache.api import cached_property
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
from homeassistant.const import ( # noqa: F401
|
||||
_DEPRECATED_STATE_JAMMED,
|
||||
_DEPRECATED_STATE_LOCKED,
|
||||
_DEPRECATED_STATE_LOCKING,
|
||||
_DEPRECATED_STATE_UNLOCKED,
|
||||
_DEPRECATED_STATE_UNLOCKING,
|
||||
ATTR_CODE,
|
||||
ATTR_CODE_FORMAT,
|
||||
SERVICE_LOCK,
|
||||
SERVICE_OPEN,
|
||||
SERVICE_UNLOCK,
|
||||
STATE_OPEN,
|
||||
STATE_OPENING,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.deprecation import (
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.typing import ConfigType, StateType
|
||||
@@ -305,3 +317,11 @@ class LockEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
return
|
||||
|
||||
self._lock_option_default_code = ""
|
||||
|
||||
|
||||
# These can be removed if no deprecated constant are in this module anymore
|
||||
__getattr__ = ft.partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = ft.partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
||||
|
@@ -1,64 +0,0 @@
|
||||
"""The Lunatone integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
from lunatone_rest_api_client import Auth, Devices, Info
|
||||
|
||||
from homeassistant.const import CONF_URL, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import (
|
||||
LunatoneConfigEntry,
|
||||
LunatoneData,
|
||||
LunatoneDevicesDataUpdateCoordinator,
|
||||
LunatoneInfoDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
PLATFORMS: Final[list[Platform]] = [Platform.LIGHT]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: LunatoneConfigEntry) -> bool:
|
||||
"""Set up Lunatone from a config entry."""
|
||||
auth_api = Auth(async_get_clientsession(hass), entry.data[CONF_URL])
|
||||
info_api = Info(auth_api)
|
||||
devices_api = Devices(auth_api)
|
||||
|
||||
coordinator_info = LunatoneInfoDataUpdateCoordinator(hass, entry, info_api)
|
||||
await coordinator_info.async_config_entry_first_refresh()
|
||||
|
||||
if info_api.serial_number is None:
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN, translation_key="missing_device_info"
|
||||
)
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={(DOMAIN, str(info_api.serial_number))},
|
||||
name=info_api.name,
|
||||
manufacturer="Lunatone",
|
||||
sw_version=info_api.version,
|
||||
hw_version=info_api.data.device.pcb,
|
||||
configuration_url=entry.data[CONF_URL],
|
||||
serial_number=str(info_api.serial_number),
|
||||
model_id=(
|
||||
f"{info_api.data.device.article_number}{info_api.data.device.article_info}"
|
||||
),
|
||||
)
|
||||
|
||||
coordinator_devices = LunatoneDevicesDataUpdateCoordinator(hass, entry, devices_api)
|
||||
await coordinator_devices.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = LunatoneData(coordinator_info, coordinator_devices)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: LunatoneConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
@@ -1,83 +0,0 @@
|
||||
"""Config flow for Lunatone."""
|
||||
|
||||
from typing import Any, Final
|
||||
|
||||
import aiohttp
|
||||
from lunatone_rest_api_client import Auth, Info
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
SOURCE_RECONFIGURE,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
)
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
DATA_SCHEMA: Final[vol.Schema] = vol.Schema(
|
||||
{vol.Required(CONF_URL, default="http://"): cv.string},
|
||||
)
|
||||
|
||||
|
||||
def compose_title(name: str | None, serial_number: int) -> str:
|
||||
"""Compose a title string from a given name and serial number."""
|
||||
return f"{name or 'DALI Gateway'} {serial_number}"
|
||||
|
||||
|
||||
class LunatoneConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Lunatone config flow."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
url = user_input[CONF_URL]
|
||||
data = {CONF_URL: url}
|
||||
self._async_abort_entries_match(data)
|
||||
auth_api = Auth(
|
||||
session=async_get_clientsession(self.hass),
|
||||
base_url=url,
|
||||
)
|
||||
info_api = Info(auth_api)
|
||||
try:
|
||||
await info_api.async_update()
|
||||
except aiohttp.InvalidUrlClientError:
|
||||
errors["base"] = "invalid_url"
|
||||
except aiohttp.ClientConnectionError:
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
if info_api.data is None or info_api.serial_number is None:
|
||||
errors["base"] = "missing_device_info"
|
||||
else:
|
||||
await self.async_set_unique_id(str(info_api.serial_number))
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
self._abort_if_unique_id_mismatch()
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(),
|
||||
data_updates=data,
|
||||
title=compose_title(info_api.name, info_api.serial_number),
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=compose_title(info_api.name, info_api.serial_number),
|
||||
data={CONF_URL: url},
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=DATA_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a reconfiguration flow initialized by the user."""
|
||||
return await self.async_step_user(user_input)
|
@@ -1,5 +0,0 @@
|
||||
"""Constants for the Lunatone integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
DOMAIN: Final = "lunatone"
|
@@ -1,101 +0,0 @@
|
||||
"""Coordinator for handling data fetching and updates."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
from lunatone_rest_api_client import Device, Devices, Info
|
||||
from lunatone_rest_api_client.models import InfoData
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_DEVICES_SCAN_INTERVAL = timedelta(seconds=10)
|
||||
|
||||
|
||||
@dataclass
|
||||
class LunatoneData:
|
||||
"""Data for Lunatone integration."""
|
||||
|
||||
coordinator_info: LunatoneInfoDataUpdateCoordinator
|
||||
coordinator_devices: LunatoneDevicesDataUpdateCoordinator
|
||||
|
||||
|
||||
type LunatoneConfigEntry = ConfigEntry[LunatoneData]
|
||||
|
||||
|
||||
class LunatoneInfoDataUpdateCoordinator(DataUpdateCoordinator[InfoData]):
|
||||
"""Data update coordinator for Lunatone info."""
|
||||
|
||||
config_entry: LunatoneConfigEntry
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, config_entry: LunatoneConfigEntry, info_api: Info
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"{DOMAIN}-info",
|
||||
always_update=False,
|
||||
)
|
||||
self.info_api = info_api
|
||||
|
||||
async def _async_update_data(self) -> InfoData:
|
||||
"""Update info data."""
|
||||
try:
|
||||
await self.info_api.async_update()
|
||||
except aiohttp.ClientConnectionError as ex:
|
||||
raise UpdateFailed(
|
||||
"Unable to retrieve info data from Lunatone REST API"
|
||||
) from ex
|
||||
|
||||
if self.info_api.data is None:
|
||||
raise UpdateFailed("Did not receive info data from Lunatone REST API")
|
||||
return self.info_api.data
|
||||
|
||||
|
||||
class LunatoneDevicesDataUpdateCoordinator(DataUpdateCoordinator[dict[int, Device]]):
|
||||
"""Data update coordinator for Lunatone devices."""
|
||||
|
||||
config_entry: LunatoneConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: LunatoneConfigEntry,
|
||||
devices_api: Devices,
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"{DOMAIN}-devices",
|
||||
always_update=False,
|
||||
update_interval=DEFAULT_DEVICES_SCAN_INTERVAL,
|
||||
)
|
||||
self.devices_api = devices_api
|
||||
|
||||
async def _async_update_data(self) -> dict[int, Device]:
|
||||
"""Update devices data."""
|
||||
try:
|
||||
await self.devices_api.async_update()
|
||||
except aiohttp.ClientConnectionError as ex:
|
||||
raise UpdateFailed(
|
||||
"Unable to retrieve devices data from Lunatone REST API"
|
||||
) from ex
|
||||
|
||||
if self.devices_api.data is None:
|
||||
raise UpdateFailed("Did not receive devices data from Lunatone REST API")
|
||||
|
||||
return {device.id: device for device in self.devices_api.devices}
|
@@ -1,103 +0,0 @@
|
||||
"""Platform for Lunatone light integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.light import ColorMode, LightEntity
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import LunatoneConfigEntry, LunatoneDevicesDataUpdateCoordinator
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
STATUS_UPDATE_DELAY = 0.04
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: LunatoneConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Lunatone Light platform."""
|
||||
coordinator_info = config_entry.runtime_data.coordinator_info
|
||||
coordinator_devices = config_entry.runtime_data.coordinator_devices
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
LunatoneLight(
|
||||
coordinator_devices, device_id, coordinator_info.data.device.serial
|
||||
)
|
||||
for device_id in coordinator_devices.data
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class LunatoneLight(
|
||||
CoordinatorEntity[LunatoneDevicesDataUpdateCoordinator], LightEntity
|
||||
):
|
||||
"""Representation of a Lunatone light."""
|
||||
|
||||
_attr_color_mode = ColorMode.ONOFF
|
||||
_attr_supported_color_modes = {ColorMode.ONOFF}
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: LunatoneDevicesDataUpdateCoordinator,
|
||||
device_id: int,
|
||||
interface_serial_number: int,
|
||||
) -> None:
|
||||
"""Initialize a LunatoneLight."""
|
||||
super().__init__(coordinator=coordinator)
|
||||
self._device_id = device_id
|
||||
self._interface_serial_number = interface_serial_number
|
||||
self._device = self.coordinator.data.get(self._device_id)
|
||||
self._attr_unique_id = f"{interface_serial_number}-device{device_id}"
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return the device info."""
|
||||
assert self.unique_id
|
||||
name = self._device.name if self._device is not None else None
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, self.unique_id)},
|
||||
name=name,
|
||||
via_device=(DOMAIN, str(self._interface_serial_number)),
|
||||
)
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return super().available and self._device is not None
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return True if light is on."""
|
||||
return self._device is not None and self._device.is_on
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._device = self.coordinator.data.get(self._device_id)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Instruct the light to turn on."""
|
||||
assert self._device
|
||||
await self._device.switch_on()
|
||||
await asyncio.sleep(STATUS_UPDATE_DELAY)
|
||||
await self.coordinator.async_refresh()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Instruct the light to turn off."""
|
||||
assert self._device
|
||||
await self._device.switch_off()
|
||||
await asyncio.sleep(STATUS_UPDATE_DELAY)
|
||||
await self.coordinator.async_refresh()
|
@@ -1,11 +0,0 @@
|
||||
{
|
||||
"domain": "lunatone",
|
||||
"name": "Lunatone",
|
||||
"codeowners": ["@MoonDevLT"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/lunatone",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["lunatone-rest-api-client==0.4.8"]
|
||||
}
|
@@ -1,82 +0,0 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration has only one platform which uses a coordinator.
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities of this integration does not explicitly subscribe to events.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: no actions
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: No options to configure
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not require authentication.
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: todo
|
||||
comment: Discovery not yet supported
|
||||
discovery:
|
||||
status: todo
|
||||
comment: Discovery not yet supported
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: todo
|
||||
entity-device-class: todo
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: todo
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: done
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
@@ -1,36 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "[%key:common::config_flow::description::confirm_setup%]"
|
||||
},
|
||||
"user": {
|
||||
"description": "Connect to the API of your Lunatone DALI IoT Gateway.",
|
||||
"data": {
|
||||
"url": "[%key:common::config_flow::data::url%]"
|
||||
},
|
||||
"data_description": {
|
||||
"url": "The URL of the Lunatone gateway device."
|
||||
}
|
||||
},
|
||||
"reconfigure": {
|
||||
"description": "Update the URL.",
|
||||
"data": {
|
||||
"url": "[%key:common::config_flow::data::url%]"
|
||||
},
|
||||
"data_description": {
|
||||
"url": "[%key:component::lunatone::config::step::user::data_description::url%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_url": "Failed to connect. Check the URL and if the device is connected to power",
|
||||
"missing_device_info": "Failed to read device information. Check the network connection of the device"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
}
|
||||
}
|
||||
}
|
@@ -30,7 +30,6 @@ from .entity import MatterEntity
|
||||
from .helpers import get_matter
|
||||
from .models import MatterDiscoverySchema
|
||||
|
||||
HUMIDITY_SCALING_FACTOR = 100
|
||||
TEMPERATURE_SCALING_FACTOR = 100
|
||||
HVAC_SYSTEM_MODE_MAP = {
|
||||
HVACMode.OFF: 0,
|
||||
@@ -262,18 +261,6 @@ class MatterClimate(MatterEntity, ClimateEntity):
|
||||
self._attr_current_temperature = self._get_temperature_in_degrees(
|
||||
clusters.Thermostat.Attributes.LocalTemperature
|
||||
)
|
||||
|
||||
self._attr_current_humidity = (
|
||||
int(raw_measured_humidity) / HUMIDITY_SCALING_FACTOR
|
||||
if (
|
||||
raw_measured_humidity := self.get_matter_attribute_value(
|
||||
clusters.RelativeHumidityMeasurement.Attributes.MeasuredValue
|
||||
)
|
||||
)
|
||||
is not None
|
||||
else None
|
||||
)
|
||||
|
||||
if self.get_matter_attribute_value(clusters.OnOff.Attributes.OnOff) is False:
|
||||
# special case: the appliance has a dedicated Power switch on the OnOff cluster
|
||||
# if the mains power is off - treat it as if the HVAC mode is off
|
||||
@@ -441,7 +428,6 @@ DISCOVERY_SCHEMAS = [
|
||||
clusters.Thermostat.Attributes.TemperatureSetpointHold,
|
||||
clusters.Thermostat.Attributes.UnoccupiedCoolingSetpoint,
|
||||
clusters.Thermostat.Attributes.UnoccupiedHeatingSetpoint,
|
||||
clusters.RelativeHumidityMeasurement.Attributes.MeasuredValue,
|
||||
clusters.OnOff.Attributes.OnOff,
|
||||
),
|
||||
device_type=(device_types.Thermostat, device_types.RoomAirConditioner),
|
||||
|
@@ -351,7 +351,6 @@ DISCOVERY_SCHEMAS = [
|
||||
required_attributes=(
|
||||
clusters.RelativeHumidityMeasurement.Attributes.MeasuredValue,
|
||||
),
|
||||
allow_multi=True, # also used for climate entity
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.SENSOR,
|
||||
|
@@ -27,7 +27,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
UPDATE_INTERVAL = datetime.timedelta(minutes=30)
|
||||
TIMEOUT = 10
|
||||
|
||||
type TokenManager = Callable[[], Awaitable[str]]
|
||||
TokenManager = Callable[[], Awaitable[str]]
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
|
@@ -55,6 +55,12 @@ from homeassistant.const import ( # noqa: F401
|
||||
from homeassistant.core import HomeAssistant, SupportsResponse
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.deprecation import (
|
||||
DeprecatedConstantEnum,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.network import get_url
|
||||
@@ -69,6 +75,26 @@ from .browse_media import ( # noqa: F401
|
||||
async_process_play_media_url,
|
||||
)
|
||||
from .const import ( # noqa: F401
|
||||
_DEPRECATED_MEDIA_CLASS_DIRECTORY,
|
||||
_DEPRECATED_SUPPORT_BROWSE_MEDIA,
|
||||
_DEPRECATED_SUPPORT_CLEAR_PLAYLIST,
|
||||
_DEPRECATED_SUPPORT_GROUPING,
|
||||
_DEPRECATED_SUPPORT_NEXT_TRACK,
|
||||
_DEPRECATED_SUPPORT_PAUSE,
|
||||
_DEPRECATED_SUPPORT_PLAY,
|
||||
_DEPRECATED_SUPPORT_PLAY_MEDIA,
|
||||
_DEPRECATED_SUPPORT_PREVIOUS_TRACK,
|
||||
_DEPRECATED_SUPPORT_REPEAT_SET,
|
||||
_DEPRECATED_SUPPORT_SEEK,
|
||||
_DEPRECATED_SUPPORT_SELECT_SOUND_MODE,
|
||||
_DEPRECATED_SUPPORT_SELECT_SOURCE,
|
||||
_DEPRECATED_SUPPORT_SHUFFLE_SET,
|
||||
_DEPRECATED_SUPPORT_STOP,
|
||||
_DEPRECATED_SUPPORT_TURN_OFF,
|
||||
_DEPRECATED_SUPPORT_TURN_ON,
|
||||
_DEPRECATED_SUPPORT_VOLUME_MUTE,
|
||||
_DEPRECATED_SUPPORT_VOLUME_SET,
|
||||
_DEPRECATED_SUPPORT_VOLUME_STEP,
|
||||
ATTR_APP_ID,
|
||||
ATTR_APP_NAME,
|
||||
ATTR_ENTITY_PICTURE_LOCAL,
|
||||
@@ -162,6 +188,17 @@ class MediaPlayerDeviceClass(StrEnum):
|
||||
DEVICE_CLASSES_SCHEMA = vol.All(vol.Lower, vol.Coerce(MediaPlayerDeviceClass))
|
||||
|
||||
|
||||
# DEVICE_CLASS* below are deprecated as of 2021.12
|
||||
# use the MediaPlayerDeviceClass enum instead.
|
||||
_DEPRECATED_DEVICE_CLASS_TV = DeprecatedConstantEnum(
|
||||
MediaPlayerDeviceClass.TV, "2025.10"
|
||||
)
|
||||
_DEPRECATED_DEVICE_CLASS_SPEAKER = DeprecatedConstantEnum(
|
||||
MediaPlayerDeviceClass.SPEAKER, "2025.10"
|
||||
)
|
||||
_DEPRECATED_DEVICE_CLASS_RECEIVER = DeprecatedConstantEnum(
|
||||
MediaPlayerDeviceClass.RECEIVER, "2025.10"
|
||||
)
|
||||
DEVICE_CLASSES = [cls.value for cls in MediaPlayerDeviceClass]
|
||||
|
||||
|
||||
@@ -1159,7 +1196,6 @@ class MediaPlayerEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
media_content_id: str | None = None,
|
||||
media_filter_classes: list[MediaClass] | None = None,
|
||||
) -> SearchMedia:
|
||||
"""Search for media."""
|
||||
return await self.async_search_media(
|
||||
query=SearchMediaQuery(
|
||||
search_query=search_query,
|
||||
@@ -1474,3 +1510,13 @@ async def async_fetch_image(
|
||||
logger.warning("Error retrieving proxied image from %s", url)
|
||||
|
||||
return content, content_type
|
||||
|
||||
|
||||
# As we import deprecated constants from the const module, we need to add these two functions
|
||||
# otherwise this module will be logged for using deprecated constants and not the custom component
|
||||
# These can be removed if no deprecated constant are in this module anymore
|
||||
__getattr__ = ft.partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = ft.partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
||||
|
@@ -1,8 +1,15 @@
|
||||
"""Provides the constants needed for component."""
|
||||
|
||||
from enum import IntFlag, StrEnum
|
||||
from functools import partial
|
||||
|
||||
from homeassistant.helpers.deprecation import EnumWithDeprecatedMembers
|
||||
from homeassistant.helpers.deprecation import (
|
||||
DeprecatedConstantEnum,
|
||||
EnumWithDeprecatedMembers,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
)
|
||||
|
||||
# How long our auth signature on the content should be valid for
|
||||
CONTENT_AUTH_EXPIRY_TIME = 3600 * 24
|
||||
@@ -87,6 +94,38 @@ class MediaClass(StrEnum):
|
||||
VIDEO = "video"
|
||||
|
||||
|
||||
# These MEDIA_CLASS_* constants are deprecated as of Home Assistant 2022.10.
|
||||
# Please use the MediaClass enum instead.
|
||||
_DEPRECATED_MEDIA_CLASS_ALBUM = DeprecatedConstantEnum(MediaClass.ALBUM, "2025.10")
|
||||
_DEPRECATED_MEDIA_CLASS_APP = DeprecatedConstantEnum(MediaClass.APP, "2025.10")
|
||||
_DEPRECATED_MEDIA_CLASS_ARTIST = DeprecatedConstantEnum(MediaClass.ARTIST, "2025.10")
|
||||
_DEPRECATED_MEDIA_CLASS_CHANNEL = DeprecatedConstantEnum(MediaClass.CHANNEL, "2025.10")
|
||||
_DEPRECATED_MEDIA_CLASS_COMPOSER = DeprecatedConstantEnum(
|
||||
MediaClass.COMPOSER, "2025.10"
|
||||
)
|
||||
_DEPRECATED_MEDIA_CLASS_CONTRIBUTING_ARTIST = DeprecatedConstantEnum(
|
||||
MediaClass.CONTRIBUTING_ARTIST, "2025.10"
|
||||
)
|
||||
_DEPRECATED_MEDIA_CLASS_DIRECTORY = DeprecatedConstantEnum(
|
||||
MediaClass.DIRECTORY, "2025.10"
|
||||
)
|
||||
_DEPRECATED_MEDIA_CLASS_EPISODE = DeprecatedConstantEnum(MediaClass.EPISODE, "2025.10")
|
||||
_DEPRECATED_MEDIA_CLASS_GAME = DeprecatedConstantEnum(MediaClass.GAME, "2025.10")
|
||||
_DEPRECATED_MEDIA_CLASS_GENRE = DeprecatedConstantEnum(MediaClass.GENRE, "2025.10")
|
||||
_DEPRECATED_MEDIA_CLASS_IMAGE = DeprecatedConstantEnum(MediaClass.IMAGE, "2025.10")
|
||||
_DEPRECATED_MEDIA_CLASS_MOVIE = DeprecatedConstantEnum(MediaClass.MOVIE, "2025.10")
|
||||
_DEPRECATED_MEDIA_CLASS_MUSIC = DeprecatedConstantEnum(MediaClass.MUSIC, "2025.10")
|
||||
_DEPRECATED_MEDIA_CLASS_PLAYLIST = DeprecatedConstantEnum(
|
||||
MediaClass.PLAYLIST, "2025.10"
|
||||
)
|
||||
_DEPRECATED_MEDIA_CLASS_PODCAST = DeprecatedConstantEnum(MediaClass.PODCAST, "2025.10")
|
||||
_DEPRECATED_MEDIA_CLASS_SEASON = DeprecatedConstantEnum(MediaClass.SEASON, "2025.10")
|
||||
_DEPRECATED_MEDIA_CLASS_TRACK = DeprecatedConstantEnum(MediaClass.TRACK, "2025.10")
|
||||
_DEPRECATED_MEDIA_CLASS_TV_SHOW = DeprecatedConstantEnum(MediaClass.TV_SHOW, "2025.10")
|
||||
_DEPRECATED_MEDIA_CLASS_URL = DeprecatedConstantEnum(MediaClass.URL, "2025.10")
|
||||
_DEPRECATED_MEDIA_CLASS_VIDEO = DeprecatedConstantEnum(MediaClass.VIDEO, "2025.10")
|
||||
|
||||
|
||||
class MediaType(StrEnum):
|
||||
"""Media type for media player entities."""
|
||||
|
||||
@@ -113,6 +152,33 @@ class MediaType(StrEnum):
|
||||
VIDEO = "video"
|
||||
|
||||
|
||||
# These MEDIA_TYPE_* constants are deprecated as of Home Assistant 2022.10.
|
||||
# Please use the MediaType enum instead.
|
||||
_DEPRECATED_MEDIA_TYPE_ALBUM = DeprecatedConstantEnum(MediaType.ALBUM, "2025.10")
|
||||
_DEPRECATED_MEDIA_TYPE_APP = DeprecatedConstantEnum(MediaType.APP, "2025.10")
|
||||
_DEPRECATED_MEDIA_TYPE_APPS = DeprecatedConstantEnum(MediaType.APPS, "2025.10")
|
||||
_DEPRECATED_MEDIA_TYPE_ARTIST = DeprecatedConstantEnum(MediaType.ARTIST, "2025.10")
|
||||
_DEPRECATED_MEDIA_TYPE_CHANNEL = DeprecatedConstantEnum(MediaType.CHANNEL, "2025.10")
|
||||
_DEPRECATED_MEDIA_TYPE_CHANNELS = DeprecatedConstantEnum(MediaType.CHANNELS, "2025.10")
|
||||
_DEPRECATED_MEDIA_TYPE_COMPOSER = DeprecatedConstantEnum(MediaType.COMPOSER, "2025.10")
|
||||
_DEPRECATED_MEDIA_TYPE_CONTRIBUTING_ARTIST = DeprecatedConstantEnum(
|
||||
MediaType.CONTRIBUTING_ARTIST, "2025.10"
|
||||
)
|
||||
_DEPRECATED_MEDIA_TYPE_EPISODE = DeprecatedConstantEnum(MediaType.EPISODE, "2025.10")
|
||||
_DEPRECATED_MEDIA_TYPE_GAME = DeprecatedConstantEnum(MediaType.GAME, "2025.10")
|
||||
_DEPRECATED_MEDIA_TYPE_GENRE = DeprecatedConstantEnum(MediaType.GENRE, "2025.10")
|
||||
_DEPRECATED_MEDIA_TYPE_IMAGE = DeprecatedConstantEnum(MediaType.IMAGE, "2025.10")
|
||||
_DEPRECATED_MEDIA_TYPE_MOVIE = DeprecatedConstantEnum(MediaType.MOVIE, "2025.10")
|
||||
_DEPRECATED_MEDIA_TYPE_MUSIC = DeprecatedConstantEnum(MediaType.MUSIC, "2025.10")
|
||||
_DEPRECATED_MEDIA_TYPE_PLAYLIST = DeprecatedConstantEnum(MediaType.PLAYLIST, "2025.10")
|
||||
_DEPRECATED_MEDIA_TYPE_PODCAST = DeprecatedConstantEnum(MediaType.PODCAST, "2025.10")
|
||||
_DEPRECATED_MEDIA_TYPE_SEASON = DeprecatedConstantEnum(MediaType.SEASON, "2025.10")
|
||||
_DEPRECATED_MEDIA_TYPE_TRACK = DeprecatedConstantEnum(MediaType.TRACK, "2025.10")
|
||||
_DEPRECATED_MEDIA_TYPE_TVSHOW = DeprecatedConstantEnum(MediaType.TVSHOW, "2025.10")
|
||||
_DEPRECATED_MEDIA_TYPE_URL = DeprecatedConstantEnum(MediaType.URL, "2025.10")
|
||||
_DEPRECATED_MEDIA_TYPE_VIDEO = DeprecatedConstantEnum(MediaType.VIDEO, "2025.10")
|
||||
|
||||
|
||||
SERVICE_CLEAR_PLAYLIST = "clear_playlist"
|
||||
SERVICE_JOIN = "join"
|
||||
SERVICE_PLAY_MEDIA = "play_media"
|
||||
@@ -131,6 +197,11 @@ class RepeatMode(StrEnum):
|
||||
ONE = "one"
|
||||
|
||||
|
||||
# These REPEAT_MODE_* constants are deprecated as of Home Assistant 2022.10.
|
||||
# Please use the RepeatMode enum instead.
|
||||
_DEPRECATED_REPEAT_MODE_ALL = DeprecatedConstantEnum(RepeatMode.ALL, "2025.10")
|
||||
_DEPRECATED_REPEAT_MODE_OFF = DeprecatedConstantEnum(RepeatMode.OFF, "2025.10")
|
||||
_DEPRECATED_REPEAT_MODE_ONE = DeprecatedConstantEnum(RepeatMode.ONE, "2025.10")
|
||||
REPEAT_MODES = [cls.value for cls in RepeatMode]
|
||||
|
||||
|
||||
@@ -160,3 +231,71 @@ class MediaPlayerEntityFeature(IntFlag):
|
||||
MEDIA_ANNOUNCE = 1048576
|
||||
MEDIA_ENQUEUE = 2097152
|
||||
SEARCH_MEDIA = 4194304
|
||||
|
||||
|
||||
# These SUPPORT_* constants are deprecated as of Home Assistant 2022.5.
|
||||
# Please use the MediaPlayerEntityFeature enum instead.
|
||||
_DEPRECATED_SUPPORT_PAUSE = DeprecatedConstantEnum(
|
||||
MediaPlayerEntityFeature.PAUSE, "2025.10"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_SEEK = DeprecatedConstantEnum(
|
||||
MediaPlayerEntityFeature.SEEK, "2025.10"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_VOLUME_SET = DeprecatedConstantEnum(
|
||||
MediaPlayerEntityFeature.VOLUME_SET, "2025.10"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_VOLUME_MUTE = DeprecatedConstantEnum(
|
||||
MediaPlayerEntityFeature.VOLUME_MUTE, "2025.10"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_PREVIOUS_TRACK = DeprecatedConstantEnum(
|
||||
MediaPlayerEntityFeature.PREVIOUS_TRACK, "2025.10"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_NEXT_TRACK = DeprecatedConstantEnum(
|
||||
MediaPlayerEntityFeature.NEXT_TRACK, "2025.10"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_TURN_ON = DeprecatedConstantEnum(
|
||||
MediaPlayerEntityFeature.TURN_ON, "2025.10"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_TURN_OFF = DeprecatedConstantEnum(
|
||||
MediaPlayerEntityFeature.TURN_OFF, "2025.10"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_PLAY_MEDIA = DeprecatedConstantEnum(
|
||||
MediaPlayerEntityFeature.PLAY_MEDIA, "2025.10"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_VOLUME_STEP = DeprecatedConstantEnum(
|
||||
MediaPlayerEntityFeature.VOLUME_STEP, "2025.10"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_SELECT_SOURCE = DeprecatedConstantEnum(
|
||||
MediaPlayerEntityFeature.SELECT_SOURCE, "2025.10"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_STOP = DeprecatedConstantEnum(
|
||||
MediaPlayerEntityFeature.STOP, "2025.10"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_CLEAR_PLAYLIST = DeprecatedConstantEnum(
|
||||
MediaPlayerEntityFeature.CLEAR_PLAYLIST, "2025.10"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_PLAY = DeprecatedConstantEnum(
|
||||
MediaPlayerEntityFeature.PLAY, "2025.10"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_SHUFFLE_SET = DeprecatedConstantEnum(
|
||||
MediaPlayerEntityFeature.SHUFFLE_SET, "2025.10"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_SELECT_SOUND_MODE = DeprecatedConstantEnum(
|
||||
MediaPlayerEntityFeature.SELECT_SOUND_MODE, "2025.10"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_BROWSE_MEDIA = DeprecatedConstantEnum(
|
||||
MediaPlayerEntityFeature.BROWSE_MEDIA, "2025.10"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_REPEAT_SET = DeprecatedConstantEnum(
|
||||
MediaPlayerEntityFeature.REPEAT_SET, "2025.10"
|
||||
)
|
||||
_DEPRECATED_SUPPORT_GROUPING = DeprecatedConstantEnum(
|
||||
MediaPlayerEntityFeature.GROUPING, "2025.10"
|
||||
)
|
||||
|
||||
# These can be removed if no deprecated constant are in this module anymore
|
||||
__getattr__ = partial(check_if_deprecated_constant, module_globals=globals())
|
||||
__dir__ = partial(
|
||||
dir_with_deprecated_constants, module_globals_keys=[*globals().keys()]
|
||||
)
|
||||
__all__ = all_with_deprecated_constants(globals())
|
||||
|
@@ -7,7 +7,6 @@ from typing import TYPE_CHECKING, Any
|
||||
|
||||
from homeassistant.components.media_player import BrowseMedia, MediaClass, MediaType
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.translation import async_get_cached_translations
|
||||
|
||||
from .const import MEDIA_SOURCE_DATA, URI_SCHEME, URI_SCHEME_REGEX
|
||||
|
||||
@@ -63,15 +62,12 @@ class MediaSourceItem:
|
||||
async def async_browse(self) -> BrowseMediaSource:
|
||||
"""Browse this item."""
|
||||
if self.domain is None:
|
||||
title = async_get_cached_translations(
|
||||
self.hass, self.hass.config.language, "common", "media_source"
|
||||
).get("component.media_source.common.sources_default", "Media Sources")
|
||||
base = BrowseMediaSource(
|
||||
domain=None,
|
||||
identifier=None,
|
||||
media_class=MediaClass.APP,
|
||||
media_content_type=MediaType.APPS,
|
||||
title=title,
|
||||
title="Media Sources",
|
||||
can_play=False,
|
||||
can_expand=True,
|
||||
children_media_class=MediaClass.APP,
|
||||
|
@@ -9,8 +9,5 @@
|
||||
"unknown_media_source": {
|
||||
"message": "Unknown media source: {domain}"
|
||||
}
|
||||
},
|
||||
"common": {
|
||||
"sources_default": "Media sources"
|
||||
}
|
||||
}
|
||||
|
@@ -208,7 +208,7 @@ class ModbusStructEntity(ModbusBaseEntity, RestoreEntity):
|
||||
|
||||
def __process_raw_value(self, entry: float | str | bytes) -> str | None:
|
||||
"""Process value from sensor with NaN handling, scaling, offset, min/max etc."""
|
||||
if self._nan_value is not None and entry in (self._nan_value, -self._nan_value):
|
||||
if self._nan_value and entry in (self._nan_value, -self._nan_value):
|
||||
return None
|
||||
if isinstance(entry, bytes):
|
||||
return entry.decode()
|
||||
|
@@ -34,7 +34,6 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = NordPoolDataUpdateCoordinator(hass, config_entry)
|
||||
await coordinator.fetch_data(dt_util.utcnow(), True)
|
||||
await coordinator.update_listeners(dt_util.utcnow())
|
||||
if not coordinator.last_update_success:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
|
@@ -44,10 +44,9 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodsData]):
|
||||
name=DOMAIN,
|
||||
)
|
||||
self.client = NordPoolClient(session=async_get_clientsession(hass))
|
||||
self.data_unsub: Callable[[], None] | None = None
|
||||
self.listener_unsub: Callable[[], None] | None = None
|
||||
self.unsub: Callable[[], None] | None = None
|
||||
|
||||
def get_next_data_interval(self, now: datetime) -> datetime:
|
||||
def get_next_interval(self, now: datetime) -> datetime:
|
||||
"""Compute next time an update should occur."""
|
||||
next_hour = dt_util.utcnow() + timedelta(hours=1)
|
||||
next_run = datetime(
|
||||
@@ -57,45 +56,23 @@ class NordPoolDataUpdateCoordinator(DataUpdateCoordinator[DeliveryPeriodsData]):
|
||||
next_hour.hour,
|
||||
tzinfo=dt_util.UTC,
|
||||
)
|
||||
LOGGER.debug("Next data update at %s", next_run)
|
||||
return next_run
|
||||
|
||||
def get_next_15_interval(self, now: datetime) -> datetime:
|
||||
"""Compute next time we need to notify listeners."""
|
||||
next_run = dt_util.utcnow() + timedelta(minutes=15)
|
||||
next_minute = next_run.minute // 15 * 15
|
||||
next_run = next_run.replace(
|
||||
minute=next_minute, second=0, microsecond=0, tzinfo=dt_util.UTC
|
||||
)
|
||||
|
||||
LOGGER.debug("Next listener update at %s", next_run)
|
||||
LOGGER.debug("Next update at %s", next_run)
|
||||
return next_run
|
||||
|
||||
async def async_shutdown(self) -> None:
|
||||
"""Cancel any scheduled call, and ignore new runs."""
|
||||
await super().async_shutdown()
|
||||
if self.data_unsub:
|
||||
self.data_unsub()
|
||||
self.data_unsub = None
|
||||
if self.listener_unsub:
|
||||
self.listener_unsub()
|
||||
self.listener_unsub = None
|
||||
|
||||
async def update_listeners(self, now: datetime) -> None:
|
||||
"""Update entity listeners."""
|
||||
self.listener_unsub = async_track_point_in_utc_time(
|
||||
self.hass,
|
||||
self.update_listeners,
|
||||
self.get_next_15_interval(dt_util.utcnow()),
|
||||
)
|
||||
self.async_update_listeners()
|
||||
if self.unsub:
|
||||
self.unsub()
|
||||
self.unsub = None
|
||||
|
||||
async def fetch_data(self, now: datetime, initial: bool = False) -> None:
|
||||
"""Fetch data from Nord Pool."""
|
||||
self.data_unsub = async_track_point_in_utc_time(
|
||||
self.hass, self.fetch_data, self.get_next_data_interval(dt_util.utcnow())
|
||||
self.unsub = async_track_point_in_utc_time(
|
||||
self.hass, self.fetch_data, self.get_next_interval(dt_util.utcnow())
|
||||
)
|
||||
if self.config_entry.pref_disable_polling and not initial:
|
||||
self.async_update_listeners()
|
||||
return
|
||||
try:
|
||||
data = await self.handle_data(initial)
|
||||
|
@@ -307,7 +307,7 @@
|
||||
},
|
||||
"markdown": {
|
||||
"name": "Format as Markdown",
|
||||
"description": "Enable Markdown formatting for the message body. See the Markdown guide for syntax details: https://www.markdownguide.org/basic-syntax/."
|
||||
"description": "Enable Markdown formatting for the message body (Web app only). See the Markdown guide for syntax details: https://www.markdownguide.org/basic-syntax/."
|
||||
},
|
||||
"tags": {
|
||||
"name": "Tags/Emojis",
|
||||
|
@@ -35,7 +35,7 @@ from .const import CONF_DELETE_PERMANENTLY, DATA_BACKUP_AGENT_LISTENERS, DOMAIN
|
||||
from .coordinator import OneDriveConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
UPLOAD_CHUNK_SIZE = 32 * 320 * 1024 # 10.4MB
|
||||
UPLOAD_CHUNK_SIZE = 16 * 320 * 1024 # 5.2MB
|
||||
TIMEOUT = ClientTimeout(connect=10, total=43200) # 12 hours
|
||||
METADATA_VERSION = 2
|
||||
CACHE_TTL = 300
|
||||
@@ -163,10 +163,7 @@ class OneDriveBackupAgent(BackupAgent):
|
||||
)
|
||||
try:
|
||||
backup_file = await LargeFileUploadClient.upload(
|
||||
self._token_function,
|
||||
file,
|
||||
upload_chunk_size=UPLOAD_CHUNK_SIZE,
|
||||
session=async_get_clientsession(self._hass),
|
||||
self._token_function, file, session=async_get_clientsession(self._hass)
|
||||
)
|
||||
except HashMismatchError as err:
|
||||
raise BackupAgentError(
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["ovoenergy"],
|
||||
"requirements": ["ovoenergy==3.0.2"]
|
||||
"requirements": ["ovoenergy==2.0.1"]
|
||||
}
|
||||
|
@@ -115,7 +115,9 @@ class PandoraMediaPlayer(MediaPlayerEntity):
|
||||
async def _start_pianobar(self) -> bool:
|
||||
pianobar = pexpect.spawn("pianobar", encoding="utf-8")
|
||||
pianobar.delaybeforesend = None
|
||||
pianobar.delayafterread = None
|
||||
# mypy thinks delayafterread must be a float but that is not what pexpect says
|
||||
# https://github.com/pexpect/pexpect/blob/4.9/pexpect/expect.py#L170
|
||||
pianobar.delayafterread = None # type: ignore[assignment]
|
||||
pianobar.delayafterclose = 0
|
||||
pianobar.delayafterterminate = 0
|
||||
_LOGGER.debug("Started pianobar subprocess")
|
||||
|
@@ -1,9 +1,7 @@
|
||||
"""Base class for Portainer entities."""
|
||||
|
||||
from pyportainer.models.docker import DockerContainer
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
@@ -33,9 +31,6 @@ class PortainerEndpointEntity(PortainerCoordinatorEntity):
|
||||
identifiers={
|
||||
(DOMAIN, f"{coordinator.config_entry.entry_id}_{self.device_id}")
|
||||
},
|
||||
configuration_url=URL(
|
||||
f"{coordinator.config_entry.data[CONF_URL]}#!/{self.device_id}/docker/dashboard"
|
||||
),
|
||||
manufacturer=DEFAULT_NAME,
|
||||
model="Endpoint",
|
||||
name=device_info.endpoint.name,
|
||||
@@ -68,9 +63,6 @@ class PortainerContainerEntity(PortainerCoordinatorEntity):
|
||||
(DOMAIN, f"{self.coordinator.config_entry.entry_id}_{device_name}")
|
||||
},
|
||||
manufacturer=DEFAULT_NAME,
|
||||
configuration_url=URL(
|
||||
f"{coordinator.config_entry.data[CONF_URL]}#!/{self.endpoint_id}/docker/containers/{self.device_id}"
|
||||
),
|
||||
model="Container",
|
||||
name=device_name,
|
||||
via_device=(
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/portainer",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyportainer==1.0.3"]
|
||||
"requirements": ["pyportainer==0.1.7"]
|
||||
}
|
||||
|
@@ -215,7 +215,6 @@ def create_coordinator_container_vm(
|
||||
return DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=None,
|
||||
name=f"proxmox_coordinator_{host_name}_{node_name}_{vm_id}",
|
||||
update_method=async_update_data,
|
||||
update_interval=timedelta(seconds=UPDATE_INTERVAL),
|
||||
|
@@ -16,6 +16,7 @@ ATTR_HTML: Final = "html"
|
||||
ATTR_CALLBACK_URL: Final = "callback_url"
|
||||
ATTR_EXPIRE: Final = "expire"
|
||||
ATTR_TTL: Final = "ttl"
|
||||
ATTR_DATA: Final = "data"
|
||||
ATTR_TIMESTAMP: Final = "timestamp"
|
||||
|
||||
CONF_USER_KEY: Final = "user_key"
|
||||
|
@@ -67,7 +67,7 @@ class PushoverNotificationService(BaseNotificationService):
|
||||
|
||||
# Extract params from data dict
|
||||
title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
|
||||
data = kwargs.get(ATTR_DATA) or {}
|
||||
data = kwargs.get(ATTR_DATA, {})
|
||||
url = data.get(ATTR_URL)
|
||||
url_title = data.get(ATTR_URL_TITLE)
|
||||
priority = data.get(ATTR_PRIORITY)
|
||||
|
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Callable
|
||||
import re
|
||||
from typing import cast
|
||||
from typing import Generic, TypeVar, cast
|
||||
|
||||
from qbusmqttapi.discovery import QbusMqttDevice, QbusMqttOutput
|
||||
from qbusmqttapi.factory import QbusMqttMessageFactory, QbusMqttTopicFactory
|
||||
@@ -20,6 +20,8 @@ from .coordinator import QbusControllerCoordinator
|
||||
|
||||
_REFID_REGEX = re.compile(r"^\d+\/(\d+(?:\/\d+)?)$")
|
||||
|
||||
StateT = TypeVar("StateT", bound=QbusMqttState)
|
||||
|
||||
|
||||
def create_new_entities(
|
||||
coordinator: QbusControllerCoordinator,
|
||||
@@ -76,7 +78,7 @@ def create_unique_id(serial_number: str, suffix: str) -> str:
|
||||
return f"ctd_{serial_number}_{suffix}"
|
||||
|
||||
|
||||
class QbusEntity[StateT: QbusMqttState](Entity, ABC):
|
||||
class QbusEntity(Entity, ABC, Generic[StateT]):
|
||||
"""Representation of a Qbus entity."""
|
||||
|
||||
_state_cls: type[StateT] = cast(type[StateT], QbusMqttState)
|
||||
|
@@ -53,6 +53,7 @@ KEEPALIVE_TIME = 30
|
||||
CONTEXT_ID_AS_BINARY_SCHEMA_VERSION = 36
|
||||
EVENT_TYPE_IDS_SCHEMA_VERSION = 37
|
||||
STATES_META_SCHEMA_VERSION = 38
|
||||
LAST_REPORTED_SCHEMA_VERSION = 43
|
||||
CIRCULAR_MEAN_SCHEMA_VERSION = 49
|
||||
|
||||
LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION = 28
|
||||
|
@@ -56,6 +56,7 @@ from .const import (
|
||||
DEFAULT_MAX_BIND_VARS,
|
||||
DOMAIN,
|
||||
KEEPALIVE_TIME,
|
||||
LAST_REPORTED_SCHEMA_VERSION,
|
||||
MARIADB_PYMYSQL_URL_PREFIX,
|
||||
MARIADB_URL_PREFIX,
|
||||
MAX_QUEUE_BACKLOG_MIN_VALUE,
|
||||
@@ -805,10 +806,6 @@ class Recorder(threading.Thread):
|
||||
|
||||
# Catch up with missed statistics
|
||||
self._schedule_compile_missing_statistics()
|
||||
|
||||
# Kick off live migrations
|
||||
migration.migrate_data_live(self, self.get_session, schema_status)
|
||||
|
||||
_LOGGER.debug("Recorder processing the queue")
|
||||
self._adjust_lru_size()
|
||||
self.hass.add_job(self._async_set_recorder_ready_migration_done)
|
||||
@@ -825,6 +822,8 @@ class Recorder(threading.Thread):
|
||||
# there are a lot of statistics graphs on the frontend.
|
||||
self.statistics_meta_manager.load(session)
|
||||
|
||||
migration.migrate_data_live(self, self.get_session, schema_status)
|
||||
|
||||
# We must only set the db ready after we have set the table managers
|
||||
# to active if there is no data to migrate.
|
||||
#
|
||||
@@ -1128,6 +1127,9 @@ class Recorder(threading.Thread):
|
||||
else:
|
||||
states_manager.add_pending(entity_id, dbstate)
|
||||
|
||||
if states_meta_manager.active:
|
||||
dbstate.entity_id = None
|
||||
|
||||
if entity_id is None or not (
|
||||
shared_attrs_bytes := state_attributes_manager.serialize_from_event(event)
|
||||
):
|
||||
@@ -1138,7 +1140,7 @@ class Recorder(threading.Thread):
|
||||
dbstate.states_meta_rel = pending_states_meta
|
||||
elif metadata_id := states_meta_manager.get(entity_id, session, True):
|
||||
dbstate.metadata_id = metadata_id
|
||||
elif entity_removed:
|
||||
elif states_meta_manager.active and entity_removed:
|
||||
# If the entity was removed, we don't need to add it to the
|
||||
# StatesMeta table or record it in the pending commit
|
||||
# if it does not have a metadata_id allocated to it as
|
||||
@@ -1225,7 +1227,7 @@ class Recorder(threading.Thread):
|
||||
if (
|
||||
pending_last_reported
|
||||
:= self.states_manager.get_pending_last_reported_timestamp()
|
||||
):
|
||||
) and self.schema_version >= LAST_REPORTED_SCHEMA_VERSION:
|
||||
with session.no_autoflush:
|
||||
session.execute(
|
||||
update(States),
|
||||
|
@@ -6,7 +6,7 @@ from collections.abc import Callable
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
import time
|
||||
from typing import Any, Final, Protocol, Self
|
||||
from typing import Any, Final, Protocol, Self, cast
|
||||
|
||||
import ciso8601
|
||||
from fnv_hash_fast import fnv1a_32
|
||||
@@ -45,9 +45,14 @@ from homeassistant.const import (
|
||||
MAX_LENGTH_STATE_ENTITY_ID,
|
||||
MAX_LENGTH_STATE_STATE,
|
||||
)
|
||||
from homeassistant.core import Event, EventStateChangedData
|
||||
from homeassistant.core import Context, Event, EventOrigin, EventStateChangedData, State
|
||||
from homeassistant.helpers.json import JSON_DUMP, json_bytes, json_bytes_strip_null
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.json import (
|
||||
JSON_DECODE_EXCEPTIONS,
|
||||
json_loads,
|
||||
json_loads_object,
|
||||
)
|
||||
|
||||
from .const import ALL_DOMAIN_EXCLUDE_ATTRS, SupportedDialect
|
||||
from .models import (
|
||||
@@ -55,6 +60,8 @@ from .models import (
|
||||
StatisticDataTimestamp,
|
||||
StatisticMeanType,
|
||||
StatisticMetaData,
|
||||
bytes_to_ulid_or_none,
|
||||
bytes_to_uuid_hex_or_none,
|
||||
datetime_to_timestamp_or_none,
|
||||
process_timestamp,
|
||||
ulid_to_bytes_or_none,
|
||||
@@ -244,6 +251,9 @@ class JSONLiteral(JSON):
|
||||
return process
|
||||
|
||||
|
||||
EVENT_ORIGIN_ORDER = [EventOrigin.local, EventOrigin.remote]
|
||||
|
||||
|
||||
class Events(Base):
|
||||
"""Event history data."""
|
||||
|
||||
@@ -323,6 +333,28 @@ class Events(Base):
|
||||
context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id),
|
||||
)
|
||||
|
||||
def to_native(self, validate_entity_id: bool = True) -> Event | None:
|
||||
"""Convert to a native HA Event."""
|
||||
context = Context(
|
||||
id=bytes_to_ulid_or_none(self.context_id_bin),
|
||||
user_id=bytes_to_uuid_hex_or_none(self.context_user_id_bin),
|
||||
parent_id=bytes_to_ulid_or_none(self.context_parent_id_bin),
|
||||
)
|
||||
try:
|
||||
return Event(
|
||||
self.event_type or "",
|
||||
json_loads_object(self.event_data) if self.event_data else {},
|
||||
EventOrigin(self.origin)
|
||||
if self.origin
|
||||
else EVENT_ORIGIN_ORDER[self.origin_idx or 0],
|
||||
self.time_fired_ts or 0,
|
||||
context=context,
|
||||
)
|
||||
except JSON_DECODE_EXCEPTIONS:
|
||||
# When json_loads fails
|
||||
_LOGGER.exception("Error converting to event: %s", self)
|
||||
return None
|
||||
|
||||
|
||||
class LegacyEvents(LegacyBase):
|
||||
"""Event history data with event_id, used for schema migration."""
|
||||
@@ -378,6 +410,17 @@ class EventData(Base):
|
||||
"""Return the hash of json encoded shared data."""
|
||||
return fnv1a_32(shared_data_bytes)
|
||||
|
||||
def to_native(self) -> dict[str, Any]:
|
||||
"""Convert to an event data dictionary."""
|
||||
shared_data = self.shared_data
|
||||
if shared_data is None:
|
||||
return {}
|
||||
try:
|
||||
return cast(dict[str, Any], json_loads(shared_data))
|
||||
except JSON_DECODE_EXCEPTIONS:
|
||||
_LOGGER.exception("Error converting row to event data: %s", self)
|
||||
return {}
|
||||
|
||||
|
||||
class EventTypes(Base):
|
||||
"""Event type history."""
|
||||
@@ -494,7 +537,7 @@ class States(Base):
|
||||
context = event.context
|
||||
return States(
|
||||
state=state_value,
|
||||
entity_id=None,
|
||||
entity_id=event.data["entity_id"],
|
||||
attributes=None,
|
||||
context_id=None,
|
||||
context_id_bin=ulid_to_bytes_or_none(context.id),
|
||||
@@ -510,6 +553,44 @@ class States(Base):
|
||||
last_reported_ts=last_reported_ts,
|
||||
)
|
||||
|
||||
def to_native(self, validate_entity_id: bool = True) -> State | None:
|
||||
"""Convert to an HA state object."""
|
||||
context = Context(
|
||||
id=bytes_to_ulid_or_none(self.context_id_bin),
|
||||
user_id=bytes_to_uuid_hex_or_none(self.context_user_id_bin),
|
||||
parent_id=bytes_to_ulid_or_none(self.context_parent_id_bin),
|
||||
)
|
||||
try:
|
||||
attrs = json_loads_object(self.attributes) if self.attributes else {}
|
||||
except JSON_DECODE_EXCEPTIONS:
|
||||
# When json_loads fails
|
||||
_LOGGER.exception("Error converting row to state: %s", self)
|
||||
return None
|
||||
last_updated = dt_util.utc_from_timestamp(self.last_updated_ts or 0)
|
||||
if self.last_changed_ts is None or self.last_changed_ts == self.last_updated_ts:
|
||||
last_changed = dt_util.utc_from_timestamp(self.last_updated_ts or 0)
|
||||
else:
|
||||
last_changed = dt_util.utc_from_timestamp(self.last_changed_ts or 0)
|
||||
if (
|
||||
self.last_reported_ts is None
|
||||
or self.last_reported_ts == self.last_updated_ts
|
||||
):
|
||||
last_reported = dt_util.utc_from_timestamp(self.last_updated_ts or 0)
|
||||
else:
|
||||
last_reported = dt_util.utc_from_timestamp(self.last_reported_ts or 0)
|
||||
return State(
|
||||
self.entity_id or "",
|
||||
self.state, # type: ignore[arg-type]
|
||||
# Join the state_attributes table on attributes_id to get the attributes
|
||||
# for newer states
|
||||
attrs,
|
||||
last_changed=last_changed,
|
||||
last_reported=last_reported,
|
||||
last_updated=last_updated,
|
||||
context=context,
|
||||
validate_entity_id=validate_entity_id,
|
||||
)
|
||||
|
||||
|
||||
class LegacyStates(LegacyBase):
|
||||
"""State change history with entity_id, used for schema migration."""
|
||||
@@ -594,6 +675,18 @@ class StateAttributes(Base):
|
||||
"""Return the hash of json encoded shared attributes."""
|
||||
return fnv1a_32(shared_attrs_bytes)
|
||||
|
||||
def to_native(self) -> dict[str, Any]:
|
||||
"""Convert to a state attributes dictionary."""
|
||||
shared_attrs = self.shared_attrs
|
||||
if shared_attrs is None:
|
||||
return {}
|
||||
try:
|
||||
return cast(dict[str, Any], json_loads(shared_attrs))
|
||||
except JSON_DECODE_EXCEPTIONS:
|
||||
# When json_loads fails
|
||||
_LOGGER.exception("Error converting row to state attributes: %s", self)
|
||||
return {}
|
||||
|
||||
|
||||
class StatesMeta(Base):
|
||||
"""Metadata for states."""
|
||||
@@ -810,6 +903,10 @@ class RecorderRuns(Base):
|
||||
f" created='{self.created.isoformat(sep=' ', timespec='seconds')}')>"
|
||||
)
|
||||
|
||||
def to_native(self, validate_entity_id: bool = True) -> Self:
|
||||
"""Return self, native format is this model."""
|
||||
return self
|
||||
|
||||
|
||||
class MigrationChanges(Base):
|
||||
"""Representation of migration changes."""
|
||||
|
@@ -61,6 +61,15 @@ def update_states_metadata(
|
||||
) -> None:
|
||||
"""Update the states metadata table when an entity is renamed."""
|
||||
states_meta_manager = instance.states_meta_manager
|
||||
if not states_meta_manager.active:
|
||||
_LOGGER.warning(
|
||||
"Cannot rename entity_id `%s` to `%s` "
|
||||
"because the states meta manager is not yet active",
|
||||
entity_id,
|
||||
new_entity_id,
|
||||
)
|
||||
return
|
||||
|
||||
with session_scope(
|
||||
session=instance.get_session(),
|
||||
exception_filter=filter_unique_constraint_integrity_error(instance, "state"),
|
||||
|
File diff suppressed because it is too large
Load Diff
664
homeassistant/components/recorder/history/legacy.py
Normal file
664
homeassistant/components/recorder/history/legacy.py
Normal file
@@ -0,0 +1,664 @@
|
||||
"""Provide pre-made queries on top of the recorder component."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import defaultdict
|
||||
from collections.abc import Callable, Iterable, Iterator
|
||||
from datetime import datetime
|
||||
from itertools import groupby
|
||||
from operator import attrgetter
|
||||
import time
|
||||
from typing import Any, cast
|
||||
|
||||
from sqlalchemy import Column, Text, and_, func, lambda_stmt, or_, select
|
||||
from sqlalchemy.engine.row import Row
|
||||
from sqlalchemy.orm.properties import MappedColumn
|
||||
from sqlalchemy.orm.session import Session
|
||||
from sqlalchemy.sql.expression import literal
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
|
||||
from homeassistant.const import COMPRESSED_STATE_LAST_UPDATED, COMPRESSED_STATE_STATE
|
||||
from homeassistant.core import HomeAssistant, State, split_entity_id
|
||||
from homeassistant.helpers.recorder import get_instance
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from ..db_schema import StateAttributes, States
|
||||
from ..filters import Filters
|
||||
from ..models import process_timestamp_to_utc_isoformat
|
||||
from ..models.legacy import LegacyLazyState, legacy_row_to_compressed_state
|
||||
from ..util import execute_stmt_lambda_element, session_scope
|
||||
from .const import (
|
||||
LAST_CHANGED_KEY,
|
||||
NEED_ATTRIBUTE_DOMAINS,
|
||||
SIGNIFICANT_DOMAINS,
|
||||
SIGNIFICANT_DOMAINS_ENTITY_ID_LIKE,
|
||||
STATE_KEY,
|
||||
)
|
||||
|
||||
_BASE_STATES = (
|
||||
States.entity_id,
|
||||
States.state,
|
||||
States.last_changed_ts,
|
||||
States.last_updated_ts,
|
||||
)
|
||||
_BASE_STATES_NO_LAST_CHANGED = (
|
||||
States.entity_id,
|
||||
States.state,
|
||||
literal(value=None).label("last_changed_ts"),
|
||||
States.last_updated_ts,
|
||||
)
|
||||
_QUERY_STATE_NO_ATTR = (
|
||||
*_BASE_STATES,
|
||||
literal(value=None, type_=Text).label("attributes"),
|
||||
literal(value=None, type_=Text).label("shared_attrs"),
|
||||
)
|
||||
_QUERY_STATE_NO_ATTR_NO_LAST_CHANGED = (
|
||||
*_BASE_STATES_NO_LAST_CHANGED,
|
||||
literal(value=None, type_=Text).label("attributes"),
|
||||
literal(value=None, type_=Text).label("shared_attrs"),
|
||||
)
|
||||
_BASE_STATES_PRE_SCHEMA_31 = (
|
||||
States.entity_id,
|
||||
States.state,
|
||||
States.last_changed,
|
||||
States.last_updated,
|
||||
)
|
||||
_BASE_STATES_NO_LAST_CHANGED_PRE_SCHEMA_31 = (
|
||||
States.entity_id,
|
||||
States.state,
|
||||
literal(value=None, type_=Text).label("last_changed"),
|
||||
States.last_updated,
|
||||
)
|
||||
_QUERY_STATE_NO_ATTR_PRE_SCHEMA_31 = (
|
||||
*_BASE_STATES_PRE_SCHEMA_31,
|
||||
literal(value=None, type_=Text).label("attributes"),
|
||||
literal(value=None, type_=Text).label("shared_attrs"),
|
||||
)
|
||||
_QUERY_STATE_NO_ATTR_NO_LAST_CHANGED_PRE_SCHEMA_31 = (
|
||||
*_BASE_STATES_NO_LAST_CHANGED_PRE_SCHEMA_31,
|
||||
literal(value=None, type_=Text).label("attributes"),
|
||||
literal(value=None, type_=Text).label("shared_attrs"),
|
||||
)
|
||||
# Remove QUERY_STATES_PRE_SCHEMA_25
|
||||
# and the migration_in_progress check
|
||||
# once schema 26 is created
|
||||
_QUERY_STATES_PRE_SCHEMA_25 = (
|
||||
*_BASE_STATES_PRE_SCHEMA_31,
|
||||
States.attributes,
|
||||
literal(value=None, type_=Text).label("shared_attrs"),
|
||||
)
|
||||
_QUERY_STATES_PRE_SCHEMA_25_NO_LAST_CHANGED = (
|
||||
*_BASE_STATES_NO_LAST_CHANGED_PRE_SCHEMA_31,
|
||||
States.attributes,
|
||||
literal(value=None, type_=Text).label("shared_attrs"),
|
||||
)
|
||||
_QUERY_STATES_PRE_SCHEMA_31 = (
|
||||
*_BASE_STATES_PRE_SCHEMA_31,
|
||||
# Remove States.attributes once all attributes are in StateAttributes.shared_attrs
|
||||
States.attributes,
|
||||
StateAttributes.shared_attrs,
|
||||
)
|
||||
_QUERY_STATES_NO_LAST_CHANGED_PRE_SCHEMA_31 = (
|
||||
*_BASE_STATES_NO_LAST_CHANGED_PRE_SCHEMA_31,
|
||||
# Remove States.attributes once all attributes are in StateAttributes.shared_attrs
|
||||
States.attributes,
|
||||
StateAttributes.shared_attrs,
|
||||
)
|
||||
_QUERY_STATES = (
|
||||
*_BASE_STATES,
|
||||
# Remove States.attributes once all attributes are in StateAttributes.shared_attrs
|
||||
States.attributes,
|
||||
StateAttributes.shared_attrs,
|
||||
)
|
||||
_QUERY_STATES_NO_LAST_CHANGED = (
|
||||
*_BASE_STATES_NO_LAST_CHANGED,
|
||||
# Remove States.attributes once all attributes are in StateAttributes.shared_attrs
|
||||
States.attributes,
|
||||
StateAttributes.shared_attrs,
|
||||
)
|
||||
_FIELD_MAP = {
|
||||
cast(MappedColumn, field).name: idx
|
||||
for idx, field in enumerate(_QUERY_STATE_NO_ATTR)
|
||||
}
|
||||
_FIELD_MAP_PRE_SCHEMA_31 = {
|
||||
cast(MappedColumn, field).name: idx
|
||||
for idx, field in enumerate(_QUERY_STATES_PRE_SCHEMA_31)
|
||||
}
|
||||
|
||||
|
||||
def _lambda_stmt_and_join_attributes(
|
||||
no_attributes: bool, include_last_changed: bool = True
|
||||
) -> tuple[StatementLambdaElement, bool]:
|
||||
"""Return the lambda_stmt and if StateAttributes should be joined.
|
||||
|
||||
Because these are lambda_stmt the values inside the lambdas need
|
||||
to be explicitly written out to avoid caching the wrong values.
|
||||
"""
|
||||
# If no_attributes was requested we do the query
|
||||
# without the attributes fields and do not join the
|
||||
# state_attributes table
|
||||
if no_attributes:
|
||||
if include_last_changed:
|
||||
return (
|
||||
lambda_stmt(lambda: select(*_QUERY_STATE_NO_ATTR)),
|
||||
False,
|
||||
)
|
||||
return (
|
||||
lambda_stmt(lambda: select(*_QUERY_STATE_NO_ATTR_NO_LAST_CHANGED)),
|
||||
False,
|
||||
)
|
||||
|
||||
if include_last_changed:
|
||||
return lambda_stmt(lambda: select(*_QUERY_STATES)), True
|
||||
return lambda_stmt(lambda: select(*_QUERY_STATES_NO_LAST_CHANGED)), True
|
||||
|
||||
|
||||
def get_significant_states(
|
||||
hass: HomeAssistant,
|
||||
start_time: datetime,
|
||||
end_time: datetime | None = None,
|
||||
entity_ids: list[str] | None = None,
|
||||
filters: Filters | None = None,
|
||||
include_start_time_state: bool = True,
|
||||
significant_changes_only: bool = True,
|
||||
minimal_response: bool = False,
|
||||
no_attributes: bool = False,
|
||||
compressed_state_format: bool = False,
|
||||
) -> dict[str, list[State | dict[str, Any]]]:
|
||||
"""Wrap get_significant_states_with_session with an sql session."""
|
||||
with session_scope(hass=hass, read_only=True) as session:
|
||||
return get_significant_states_with_session(
|
||||
hass,
|
||||
session,
|
||||
start_time,
|
||||
end_time,
|
||||
entity_ids,
|
||||
filters,
|
||||
include_start_time_state,
|
||||
significant_changes_only,
|
||||
minimal_response,
|
||||
no_attributes,
|
||||
compressed_state_format,
|
||||
)
|
||||
|
||||
|
||||
def _significant_states_stmt(
|
||||
start_time: datetime,
|
||||
end_time: datetime | None,
|
||||
entity_ids: list[str],
|
||||
significant_changes_only: bool,
|
||||
no_attributes: bool,
|
||||
) -> StatementLambdaElement:
|
||||
"""Query the database for significant state changes."""
|
||||
stmt, join_attributes = _lambda_stmt_and_join_attributes(
|
||||
no_attributes, include_last_changed=not significant_changes_only
|
||||
)
|
||||
if (
|
||||
len(entity_ids) == 1
|
||||
and significant_changes_only
|
||||
and split_entity_id(entity_ids[0])[0] not in SIGNIFICANT_DOMAINS
|
||||
):
|
||||
stmt += lambda q: q.filter(
|
||||
(States.last_changed_ts == States.last_updated_ts)
|
||||
| States.last_changed_ts.is_(None)
|
||||
)
|
||||
elif significant_changes_only:
|
||||
stmt += lambda q: q.filter(
|
||||
or_(
|
||||
*[
|
||||
States.entity_id.like(entity_domain)
|
||||
for entity_domain in SIGNIFICANT_DOMAINS_ENTITY_ID_LIKE
|
||||
],
|
||||
(
|
||||
(States.last_changed_ts == States.last_updated_ts)
|
||||
| States.last_changed_ts.is_(None)
|
||||
),
|
||||
)
|
||||
)
|
||||
stmt += lambda q: q.filter(States.entity_id.in_(entity_ids))
|
||||
|
||||
start_time_ts = start_time.timestamp()
|
||||
stmt += lambda q: q.filter(States.last_updated_ts > start_time_ts)
|
||||
if end_time:
|
||||
end_time_ts = end_time.timestamp()
|
||||
stmt += lambda q: q.filter(States.last_updated_ts < end_time_ts)
|
||||
|
||||
if join_attributes:
|
||||
stmt += lambda q: q.outerjoin(
|
||||
StateAttributes, States.attributes_id == StateAttributes.attributes_id
|
||||
)
|
||||
stmt += lambda q: q.order_by(States.entity_id, States.last_updated_ts)
|
||||
return stmt
|
||||
|
||||
|
||||
def get_significant_states_with_session(
|
||||
hass: HomeAssistant,
|
||||
session: Session,
|
||||
start_time: datetime,
|
||||
end_time: datetime | None = None,
|
||||
entity_ids: list[str] | None = None,
|
||||
filters: Filters | None = None,
|
||||
include_start_time_state: bool = True,
|
||||
significant_changes_only: bool = True,
|
||||
minimal_response: bool = False,
|
||||
no_attributes: bool = False,
|
||||
compressed_state_format: bool = False,
|
||||
) -> dict[str, list[State | dict[str, Any]]]:
|
||||
"""Return states changes during UTC period start_time - end_time.
|
||||
|
||||
entity_ids is an optional iterable of entities to include in the results.
|
||||
|
||||
filters is an optional SQLAlchemy filter which will be applied to the database
|
||||
queries unless entity_ids is given, in which case its ignored.
|
||||
|
||||
Significant states are all states where there is a state change,
|
||||
as well as all states from certain domains (for instance
|
||||
thermostat so that we get current temperature in our graphs).
|
||||
"""
|
||||
if filters is not None:
|
||||
raise NotImplementedError("Filters are no longer supported")
|
||||
if not entity_ids:
|
||||
raise ValueError("entity_ids must be provided")
|
||||
stmt = _significant_states_stmt(
|
||||
start_time,
|
||||
end_time,
|
||||
entity_ids,
|
||||
significant_changes_only,
|
||||
no_attributes,
|
||||
)
|
||||
states = execute_stmt_lambda_element(session, stmt, None, end_time)
|
||||
return _sorted_states_to_dict(
|
||||
hass,
|
||||
session,
|
||||
states,
|
||||
start_time,
|
||||
entity_ids,
|
||||
include_start_time_state,
|
||||
minimal_response,
|
||||
no_attributes,
|
||||
compressed_state_format,
|
||||
)
|
||||
|
||||
|
||||
def get_full_significant_states_with_session(
|
||||
hass: HomeAssistant,
|
||||
session: Session,
|
||||
start_time: datetime,
|
||||
end_time: datetime | None = None,
|
||||
entity_ids: list[str] | None = None,
|
||||
filters: Filters | None = None,
|
||||
include_start_time_state: bool = True,
|
||||
significant_changes_only: bool = True,
|
||||
no_attributes: bool = False,
|
||||
) -> dict[str, list[State]]:
|
||||
"""Variant of get_significant_states_with_session.
|
||||
|
||||
Difference with get_significant_states_with_session is that it does not
|
||||
return minimal responses.
|
||||
"""
|
||||
return cast(
|
||||
dict[str, list[State]],
|
||||
get_significant_states_with_session(
|
||||
hass=hass,
|
||||
session=session,
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
entity_ids=entity_ids,
|
||||
filters=filters,
|
||||
include_start_time_state=include_start_time_state,
|
||||
significant_changes_only=significant_changes_only,
|
||||
minimal_response=False,
|
||||
no_attributes=no_attributes,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _state_changed_during_period_stmt(
|
||||
start_time: datetime,
|
||||
end_time: datetime | None,
|
||||
entity_id: str,
|
||||
no_attributes: bool,
|
||||
descending: bool,
|
||||
limit: int | None,
|
||||
) -> StatementLambdaElement:
|
||||
stmt, join_attributes = _lambda_stmt_and_join_attributes(
|
||||
no_attributes, include_last_changed=False
|
||||
)
|
||||
start_time_ts = start_time.timestamp()
|
||||
stmt += lambda q: q.filter(
|
||||
(
|
||||
(States.last_changed_ts == States.last_updated_ts)
|
||||
| States.last_changed_ts.is_(None)
|
||||
)
|
||||
& (States.last_updated_ts > start_time_ts)
|
||||
)
|
||||
if end_time:
|
||||
end_time_ts = end_time.timestamp()
|
||||
stmt += lambda q: q.filter(States.last_updated_ts < end_time_ts)
|
||||
stmt += lambda q: q.filter(States.entity_id == entity_id)
|
||||
if join_attributes:
|
||||
stmt += lambda q: q.outerjoin(
|
||||
StateAttributes, States.attributes_id == StateAttributes.attributes_id
|
||||
)
|
||||
if descending:
|
||||
stmt += lambda q: q.order_by(States.entity_id, States.last_updated_ts.desc())
|
||||
else:
|
||||
stmt += lambda q: q.order_by(States.entity_id, States.last_updated_ts)
|
||||
|
||||
if limit:
|
||||
stmt += lambda q: q.limit(limit)
|
||||
return stmt
|
||||
|
||||
|
||||
def state_changes_during_period(
|
||||
hass: HomeAssistant,
|
||||
start_time: datetime,
|
||||
end_time: datetime | None = None,
|
||||
entity_id: str | None = None,
|
||||
no_attributes: bool = False,
|
||||
descending: bool = False,
|
||||
limit: int | None = None,
|
||||
include_start_time_state: bool = True,
|
||||
) -> dict[str, list[State]]:
|
||||
"""Return states changes during UTC period start_time - end_time."""
|
||||
if not entity_id:
|
||||
raise ValueError("entity_id must be provided")
|
||||
entity_ids = [entity_id.lower()]
|
||||
with session_scope(hass=hass, read_only=True) as session:
|
||||
stmt = _state_changed_during_period_stmt(
|
||||
start_time,
|
||||
end_time,
|
||||
entity_id,
|
||||
no_attributes,
|
||||
descending,
|
||||
limit,
|
||||
)
|
||||
states = execute_stmt_lambda_element(session, stmt, None, end_time)
|
||||
return cast(
|
||||
dict[str, list[State]],
|
||||
_sorted_states_to_dict(
|
||||
hass,
|
||||
session,
|
||||
states,
|
||||
start_time,
|
||||
entity_ids,
|
||||
include_start_time_state=include_start_time_state,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _get_last_state_changes_stmt(
|
||||
number_of_states: int, entity_id: str
|
||||
) -> StatementLambdaElement:
|
||||
stmt, join_attributes = _lambda_stmt_and_join_attributes(
|
||||
False, include_last_changed=False
|
||||
)
|
||||
stmt += lambda q: q.where(
|
||||
States.state_id
|
||||
== (
|
||||
select(States.state_id)
|
||||
.filter(States.entity_id == entity_id)
|
||||
.order_by(States.last_updated_ts.desc())
|
||||
.limit(number_of_states)
|
||||
.subquery()
|
||||
).c.state_id
|
||||
)
|
||||
if join_attributes:
|
||||
stmt += lambda q: q.outerjoin(
|
||||
StateAttributes, States.attributes_id == StateAttributes.attributes_id
|
||||
)
|
||||
|
||||
stmt += lambda q: q.order_by(States.state_id.desc())
|
||||
return stmt
|
||||
|
||||
|
||||
def get_last_state_changes(
|
||||
hass: HomeAssistant, number_of_states: int, entity_id: str
|
||||
) -> dict[str, list[State]]:
|
||||
"""Return the last number_of_states."""
|
||||
entity_id_lower = entity_id.lower()
|
||||
entity_ids = [entity_id_lower]
|
||||
|
||||
with session_scope(hass=hass, read_only=True) as session:
|
||||
stmt = _get_last_state_changes_stmt(number_of_states, entity_id_lower)
|
||||
states = list(execute_stmt_lambda_element(session, stmt))
|
||||
return cast(
|
||||
dict[str, list[State]],
|
||||
_sorted_states_to_dict(
|
||||
hass,
|
||||
session,
|
||||
reversed(states),
|
||||
dt_util.utcnow(),
|
||||
entity_ids,
|
||||
include_start_time_state=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _get_states_for_entities_stmt(
|
||||
run_start_ts: float,
|
||||
utc_point_in_time: datetime,
|
||||
entity_ids: list[str],
|
||||
no_attributes: bool,
|
||||
) -> StatementLambdaElement:
|
||||
"""Baked query to get states for specific entities."""
|
||||
stmt, join_attributes = _lambda_stmt_and_join_attributes(
|
||||
no_attributes, include_last_changed=True
|
||||
)
|
||||
# We got an include-list of entities, accelerate the query by filtering already
|
||||
# in the inner query.
|
||||
utc_point_in_time_ts = utc_point_in_time.timestamp()
|
||||
stmt += lambda q: q.join(
|
||||
(
|
||||
most_recent_states_for_entities_by_date := (
|
||||
select(
|
||||
States.entity_id.label("max_entity_id"),
|
||||
func.max(States.last_updated_ts).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated_ts >= run_start_ts)
|
||||
& (States.last_updated_ts < utc_point_in_time_ts)
|
||||
)
|
||||
.filter(States.entity_id.in_(entity_ids))
|
||||
.group_by(States.entity_id)
|
||||
.subquery()
|
||||
)
|
||||
),
|
||||
and_(
|
||||
States.entity_id == most_recent_states_for_entities_by_date.c.max_entity_id,
|
||||
States.last_updated_ts
|
||||
== most_recent_states_for_entities_by_date.c.max_last_updated,
|
||||
),
|
||||
)
|
||||
if join_attributes:
|
||||
stmt += lambda q: q.outerjoin(
|
||||
StateAttributes, (States.attributes_id == StateAttributes.attributes_id)
|
||||
)
|
||||
return stmt
|
||||
|
||||
|
||||
def _get_rows_with_session(
|
||||
hass: HomeAssistant,
|
||||
session: Session,
|
||||
utc_point_in_time: datetime,
|
||||
entity_ids: list[str],
|
||||
*,
|
||||
no_attributes: bool = False,
|
||||
) -> Iterable[Row]:
|
||||
"""Return the states at a specific point in time."""
|
||||
if len(entity_ids) == 1:
|
||||
return execute_stmt_lambda_element(
|
||||
session,
|
||||
_get_single_entity_states_stmt(
|
||||
utc_point_in_time, entity_ids[0], no_attributes
|
||||
),
|
||||
)
|
||||
|
||||
oldest_ts = get_instance(hass).states_manager.oldest_ts
|
||||
|
||||
if oldest_ts is None or oldest_ts > utc_point_in_time.timestamp():
|
||||
# We don't have any states for the requested time
|
||||
return []
|
||||
|
||||
# We have more than one entity to look at so we need to do a query on states
|
||||
# since the last recorder run started.
|
||||
stmt = _get_states_for_entities_stmt(
|
||||
oldest_ts, utc_point_in_time, entity_ids, no_attributes
|
||||
)
|
||||
return execute_stmt_lambda_element(session, stmt)
|
||||
|
||||
|
||||
def _get_single_entity_states_stmt(
|
||||
utc_point_in_time: datetime,
|
||||
entity_id: str,
|
||||
no_attributes: bool = False,
|
||||
) -> StatementLambdaElement:
|
||||
# Use an entirely different (and extremely fast) query if we only
|
||||
# have a single entity id
|
||||
stmt, join_attributes = _lambda_stmt_and_join_attributes(
|
||||
no_attributes, include_last_changed=True
|
||||
)
|
||||
utc_point_in_time_ts = utc_point_in_time.timestamp()
|
||||
stmt += (
|
||||
lambda q: q.filter(
|
||||
States.last_updated_ts < utc_point_in_time_ts,
|
||||
States.entity_id == entity_id,
|
||||
)
|
||||
.order_by(States.last_updated_ts.desc())
|
||||
.limit(1)
|
||||
)
|
||||
if join_attributes:
|
||||
stmt += lambda q: q.outerjoin(
|
||||
StateAttributes, States.attributes_id == StateAttributes.attributes_id
|
||||
)
|
||||
return stmt
|
||||
|
||||
|
||||
def _sorted_states_to_dict(
|
||||
hass: HomeAssistant,
|
||||
session: Session,
|
||||
states: Iterable[Row],
|
||||
start_time: datetime,
|
||||
entity_ids: list[str],
|
||||
include_start_time_state: bool = True,
|
||||
minimal_response: bool = False,
|
||||
no_attributes: bool = False,
|
||||
compressed_state_format: bool = False,
|
||||
) -> dict[str, list[State | dict[str, Any]]]:
|
||||
"""Convert SQL results into JSON friendly data structure.
|
||||
|
||||
This takes our state list and turns it into a JSON friendly data
|
||||
structure {'entity_id': [list of states], 'entity_id2': [list of states]}
|
||||
|
||||
States must be sorted by entity_id and last_updated
|
||||
|
||||
We also need to go back and create a synthetic zero data point for
|
||||
each list of states, otherwise our graphs won't start on the Y
|
||||
axis correctly.
|
||||
"""
|
||||
state_class: Callable[
|
||||
[Row, dict[str, dict[str, Any]], datetime | None], State | dict[str, Any]
|
||||
]
|
||||
if compressed_state_format:
|
||||
state_class = legacy_row_to_compressed_state
|
||||
attr_time = COMPRESSED_STATE_LAST_UPDATED
|
||||
attr_state = COMPRESSED_STATE_STATE
|
||||
else:
|
||||
state_class = LegacyLazyState
|
||||
attr_time = LAST_CHANGED_KEY
|
||||
attr_state = STATE_KEY
|
||||
|
||||
result: dict[str, list[State | dict[str, Any]]] = defaultdict(list)
|
||||
# Set all entity IDs to empty lists in result set to maintain the order
|
||||
for ent_id in entity_ids:
|
||||
result[ent_id] = []
|
||||
|
||||
# Get the states at the start time
|
||||
time.perf_counter()
|
||||
initial_states: dict[str, Row] = {}
|
||||
if include_start_time_state:
|
||||
initial_states = {
|
||||
row.entity_id: row
|
||||
for row in _get_rows_with_session(
|
||||
hass,
|
||||
session,
|
||||
start_time,
|
||||
entity_ids,
|
||||
no_attributes=no_attributes,
|
||||
)
|
||||
}
|
||||
|
||||
if len(entity_ids) == 1:
|
||||
states_iter: Iterable[tuple[str, Iterator[Row]]] = (
|
||||
(entity_ids[0], iter(states)),
|
||||
)
|
||||
else:
|
||||
key_func = attrgetter("entity_id")
|
||||
states_iter = groupby(states, key_func)
|
||||
|
||||
# Append all changes to it
|
||||
for ent_id, group in states_iter:
|
||||
attr_cache: dict[str, dict[str, Any]] = {}
|
||||
prev_state: Column | str
|
||||
ent_results = result[ent_id]
|
||||
if row := initial_states.pop(ent_id, None):
|
||||
prev_state = row.state
|
||||
ent_results.append(state_class(row, attr_cache, start_time))
|
||||
|
||||
if not minimal_response or split_entity_id(ent_id)[0] in NEED_ATTRIBUTE_DOMAINS:
|
||||
ent_results.extend(
|
||||
state_class(db_state, attr_cache, None) for db_state in group
|
||||
)
|
||||
continue
|
||||
|
||||
# With minimal response we only provide a native
|
||||
# State for the first and last response. All the states
|
||||
# in-between only provide the "state" and the
|
||||
# "last_changed".
|
||||
if not ent_results:
|
||||
if (first_state := next(group, None)) is None:
|
||||
continue
|
||||
prev_state = first_state.state
|
||||
ent_results.append(state_class(first_state, attr_cache, None))
|
||||
|
||||
state_idx = _FIELD_MAP["state"]
|
||||
|
||||
#
|
||||
# minimal_response only makes sense with last_updated == last_updated
|
||||
#
|
||||
# We use last_updated for for last_changed since its the same
|
||||
#
|
||||
# With minimal response we do not care about attribute
|
||||
# changes so we can filter out duplicate states
|
||||
last_updated_ts_idx = _FIELD_MAP["last_updated_ts"]
|
||||
if compressed_state_format:
|
||||
for row in group:
|
||||
if (state := row[state_idx]) != prev_state:
|
||||
ent_results.append(
|
||||
{
|
||||
attr_state: state,
|
||||
attr_time: row[last_updated_ts_idx],
|
||||
}
|
||||
)
|
||||
prev_state = state
|
||||
continue
|
||||
|
||||
for row in group:
|
||||
if (state := row[state_idx]) != prev_state:
|
||||
ent_results.append(
|
||||
{
|
||||
attr_state: state,
|
||||
attr_time: process_timestamp_to_utc_isoformat(
|
||||
dt_util.utc_from_timestamp(row[last_updated_ts_idx])
|
||||
),
|
||||
}
|
||||
)
|
||||
prev_state = state
|
||||
|
||||
# If there are no states beyond the initial state,
|
||||
# the state a was never popped from initial_states
|
||||
for ent_id, row in initial_states.items():
|
||||
result[ent_id].append(state_class(row, {}, start_time))
|
||||
|
||||
# Filter out the empty lists if some states had 0 results.
|
||||
return {key: val for key, val in result.items() if val}
|
935
homeassistant/components/recorder/history/modern.py
Normal file
935
homeassistant/components/recorder/history/modern.py
Normal file
@@ -0,0 +1,935 @@
|
||||
"""Provide pre-made queries on top of the recorder component."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable, Iterable, Iterator
|
||||
from datetime import datetime
|
||||
from itertools import groupby
|
||||
from operator import itemgetter
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from sqlalchemy import (
|
||||
CompoundSelect,
|
||||
Select,
|
||||
StatementLambdaElement,
|
||||
Subquery,
|
||||
and_,
|
||||
func,
|
||||
lambda_stmt,
|
||||
literal,
|
||||
select,
|
||||
union_all,
|
||||
)
|
||||
from sqlalchemy.engine.row import Row
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
from homeassistant.const import COMPRESSED_STATE_LAST_UPDATED, COMPRESSED_STATE_STATE
|
||||
from homeassistant.core import HomeAssistant, State, split_entity_id
|
||||
from homeassistant.helpers.recorder import get_instance
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.collection import chunked_or_all
|
||||
|
||||
from ..const import LAST_REPORTED_SCHEMA_VERSION, MAX_IDS_FOR_INDEXED_GROUP_BY
|
||||
from ..db_schema import (
|
||||
SHARED_ATTR_OR_LEGACY_ATTRIBUTES,
|
||||
StateAttributes,
|
||||
States,
|
||||
StatesMeta,
|
||||
)
|
||||
from ..filters import Filters
|
||||
from ..models import (
|
||||
LazyState,
|
||||
datetime_to_timestamp_or_none,
|
||||
extract_metadata_ids,
|
||||
row_to_compressed_state,
|
||||
)
|
||||
from ..util import execute_stmt_lambda_element, session_scope
|
||||
from .const import (
|
||||
LAST_CHANGED_KEY,
|
||||
NEED_ATTRIBUTE_DOMAINS,
|
||||
SIGNIFICANT_DOMAINS,
|
||||
STATE_KEY,
|
||||
)
|
||||
|
||||
_FIELD_MAP = {
|
||||
"metadata_id": 0,
|
||||
"state": 1,
|
||||
"last_updated_ts": 2,
|
||||
}
|
||||
|
||||
|
||||
def _stmt_and_join_attributes(
|
||||
no_attributes: bool,
|
||||
include_last_changed: bool,
|
||||
include_last_reported: bool,
|
||||
) -> Select:
|
||||
"""Return the statement and if StateAttributes should be joined."""
|
||||
_select = select(States.metadata_id, States.state, States.last_updated_ts)
|
||||
if include_last_changed:
|
||||
_select = _select.add_columns(States.last_changed_ts)
|
||||
if include_last_reported:
|
||||
_select = _select.add_columns(States.last_reported_ts)
|
||||
if not no_attributes:
|
||||
_select = _select.add_columns(SHARED_ATTR_OR_LEGACY_ATTRIBUTES)
|
||||
return _select
|
||||
|
||||
|
||||
def _stmt_and_join_attributes_for_start_state(
|
||||
no_attributes: bool,
|
||||
include_last_changed: bool,
|
||||
include_last_reported: bool,
|
||||
) -> Select:
|
||||
"""Return the statement and if StateAttributes should be joined."""
|
||||
_select = select(States.metadata_id, States.state)
|
||||
_select = _select.add_columns(literal(value=0).label("last_updated_ts"))
|
||||
if include_last_changed:
|
||||
_select = _select.add_columns(literal(value=0).label("last_changed_ts"))
|
||||
if include_last_reported:
|
||||
_select = _select.add_columns(literal(value=0).label("last_reported_ts"))
|
||||
if not no_attributes:
|
||||
_select = _select.add_columns(SHARED_ATTR_OR_LEGACY_ATTRIBUTES)
|
||||
return _select
|
||||
|
||||
|
||||
def _select_from_subquery(
|
||||
subquery: Subquery | CompoundSelect,
|
||||
no_attributes: bool,
|
||||
include_last_changed: bool,
|
||||
include_last_reported: bool,
|
||||
) -> Select:
|
||||
"""Return the statement to select from the union."""
|
||||
base_select = select(
|
||||
subquery.c.metadata_id,
|
||||
subquery.c.state,
|
||||
subquery.c.last_updated_ts,
|
||||
)
|
||||
if include_last_changed:
|
||||
base_select = base_select.add_columns(subquery.c.last_changed_ts)
|
||||
if include_last_reported:
|
||||
base_select = base_select.add_columns(subquery.c.last_reported_ts)
|
||||
if no_attributes:
|
||||
return base_select
|
||||
return base_select.add_columns(subquery.c.attributes)
|
||||
|
||||
|
||||
def get_significant_states(
|
||||
hass: HomeAssistant,
|
||||
start_time: datetime,
|
||||
end_time: datetime | None = None,
|
||||
entity_ids: list[str] | None = None,
|
||||
filters: Filters | None = None,
|
||||
include_start_time_state: bool = True,
|
||||
significant_changes_only: bool = True,
|
||||
minimal_response: bool = False,
|
||||
no_attributes: bool = False,
|
||||
compressed_state_format: bool = False,
|
||||
) -> dict[str, list[State | dict[str, Any]]]:
|
||||
"""Wrap get_significant_states_with_session with an sql session."""
|
||||
with session_scope(hass=hass, read_only=True) as session:
|
||||
return get_significant_states_with_session(
|
||||
hass,
|
||||
session,
|
||||
start_time,
|
||||
end_time,
|
||||
entity_ids,
|
||||
filters,
|
||||
include_start_time_state,
|
||||
significant_changes_only,
|
||||
minimal_response,
|
||||
no_attributes,
|
||||
compressed_state_format,
|
||||
)
|
||||
|
||||
|
||||
def _significant_states_stmt(
|
||||
start_time_ts: float,
|
||||
end_time_ts: float | None,
|
||||
single_metadata_id: int | None,
|
||||
metadata_ids: list[int],
|
||||
metadata_ids_in_significant_domains: list[int],
|
||||
significant_changes_only: bool,
|
||||
no_attributes: bool,
|
||||
include_start_time_state: bool,
|
||||
run_start_ts: float | None,
|
||||
slow_dependent_subquery: bool,
|
||||
) -> Select | CompoundSelect:
|
||||
"""Query the database for significant state changes."""
|
||||
include_last_changed = not significant_changes_only
|
||||
stmt = _stmt_and_join_attributes(no_attributes, include_last_changed, False)
|
||||
if significant_changes_only:
|
||||
# Since we are filtering on entity_id (metadata_id) we can avoid
|
||||
# the join of the states_meta table since we already know which
|
||||
# metadata_ids are in the significant domains.
|
||||
if metadata_ids_in_significant_domains:
|
||||
stmt = stmt.filter(
|
||||
States.metadata_id.in_(metadata_ids_in_significant_domains)
|
||||
| (States.last_changed_ts == States.last_updated_ts)
|
||||
| States.last_changed_ts.is_(None)
|
||||
)
|
||||
else:
|
||||
stmt = stmt.filter(
|
||||
(States.last_changed_ts == States.last_updated_ts)
|
||||
| States.last_changed_ts.is_(None)
|
||||
)
|
||||
stmt = stmt.filter(States.metadata_id.in_(metadata_ids)).filter(
|
||||
States.last_updated_ts > start_time_ts
|
||||
)
|
||||
if end_time_ts:
|
||||
stmt = stmt.filter(States.last_updated_ts < end_time_ts)
|
||||
if not no_attributes:
|
||||
stmt = stmt.outerjoin(
|
||||
StateAttributes, States.attributes_id == StateAttributes.attributes_id
|
||||
)
|
||||
if not include_start_time_state or not run_start_ts:
|
||||
return stmt.order_by(States.metadata_id, States.last_updated_ts)
|
||||
unioned_subquery = union_all(
|
||||
_select_from_subquery(
|
||||
_get_start_time_state_stmt(
|
||||
start_time_ts,
|
||||
single_metadata_id,
|
||||
metadata_ids,
|
||||
no_attributes,
|
||||
include_last_changed,
|
||||
slow_dependent_subquery,
|
||||
).subquery(),
|
||||
no_attributes,
|
||||
include_last_changed,
|
||||
False,
|
||||
),
|
||||
_select_from_subquery(
|
||||
stmt.subquery(), no_attributes, include_last_changed, False
|
||||
),
|
||||
).subquery()
|
||||
return _select_from_subquery(
|
||||
unioned_subquery,
|
||||
no_attributes,
|
||||
include_last_changed,
|
||||
False,
|
||||
).order_by(unioned_subquery.c.metadata_id, unioned_subquery.c.last_updated_ts)
|
||||
|
||||
|
||||
def get_significant_states_with_session(
|
||||
hass: HomeAssistant,
|
||||
session: Session,
|
||||
start_time: datetime,
|
||||
end_time: datetime | None = None,
|
||||
entity_ids: list[str] | None = None,
|
||||
filters: Filters | None = None,
|
||||
include_start_time_state: bool = True,
|
||||
significant_changes_only: bool = True,
|
||||
minimal_response: bool = False,
|
||||
no_attributes: bool = False,
|
||||
compressed_state_format: bool = False,
|
||||
) -> dict[str, list[State | dict[str, Any]]]:
|
||||
"""Return states changes during UTC period start_time - end_time.
|
||||
|
||||
entity_ids is an optional iterable of entities to include in the results.
|
||||
|
||||
filters is an optional SQLAlchemy filter which will be applied to the database
|
||||
queries unless entity_ids is given, in which case its ignored.
|
||||
|
||||
Significant states are all states where there is a state change,
|
||||
as well as all states from certain domains (for instance
|
||||
thermostat so that we get current temperature in our graphs).
|
||||
"""
|
||||
if filters is not None:
|
||||
raise NotImplementedError("Filters are no longer supported")
|
||||
if not entity_ids:
|
||||
raise ValueError("entity_ids must be provided")
|
||||
entity_id_to_metadata_id: dict[str, int | None] | None = None
|
||||
metadata_ids_in_significant_domains: list[int] = []
|
||||
instance = get_instance(hass)
|
||||
if not (
|
||||
entity_id_to_metadata_id := instance.states_meta_manager.get_many(
|
||||
entity_ids, session, False
|
||||
)
|
||||
) or not (possible_metadata_ids := extract_metadata_ids(entity_id_to_metadata_id)):
|
||||
return {}
|
||||
metadata_ids = possible_metadata_ids
|
||||
if significant_changes_only:
|
||||
metadata_ids_in_significant_domains = [
|
||||
metadata_id
|
||||
for entity_id, metadata_id in entity_id_to_metadata_id.items()
|
||||
if metadata_id is not None
|
||||
and split_entity_id(entity_id)[0] in SIGNIFICANT_DOMAINS
|
||||
]
|
||||
oldest_ts: float | None = None
|
||||
if include_start_time_state and not (
|
||||
oldest_ts := _get_oldest_possible_ts(hass, start_time)
|
||||
):
|
||||
include_start_time_state = False
|
||||
start_time_ts = start_time.timestamp()
|
||||
end_time_ts = datetime_to_timestamp_or_none(end_time)
|
||||
single_metadata_id = metadata_ids[0] if len(metadata_ids) == 1 else None
|
||||
rows: list[Row] = []
|
||||
if TYPE_CHECKING:
|
||||
assert instance.database_engine is not None
|
||||
slow_dependent_subquery = instance.database_engine.optimizer.slow_dependent_subquery
|
||||
if include_start_time_state and slow_dependent_subquery:
|
||||
# https://github.com/home-assistant/core/issues/137178
|
||||
# If we include the start time state we need to limit the
|
||||
# number of metadata_ids we query for at a time to avoid
|
||||
# hitting limits in the MySQL optimizer that prevent
|
||||
# the start time state query from using an index-only optimization
|
||||
# to find the start time state.
|
||||
iter_metadata_ids = chunked_or_all(metadata_ids, MAX_IDS_FOR_INDEXED_GROUP_BY)
|
||||
else:
|
||||
iter_metadata_ids = (metadata_ids,)
|
||||
for metadata_ids_chunk in iter_metadata_ids:
|
||||
stmt = _generate_significant_states_with_session_stmt(
|
||||
start_time_ts,
|
||||
end_time_ts,
|
||||
single_metadata_id,
|
||||
metadata_ids_chunk,
|
||||
metadata_ids_in_significant_domains,
|
||||
significant_changes_only,
|
||||
no_attributes,
|
||||
include_start_time_state,
|
||||
oldest_ts,
|
||||
slow_dependent_subquery,
|
||||
)
|
||||
row_chunk = cast(
|
||||
list[Row],
|
||||
execute_stmt_lambda_element(session, stmt, None, end_time, orm_rows=False),
|
||||
)
|
||||
if rows:
|
||||
rows += row_chunk
|
||||
else:
|
||||
# If we have no rows yet, we can just assign the chunk
|
||||
# as this is the common case since its rare that
|
||||
# we exceed the MAX_IDS_FOR_INDEXED_GROUP_BY limit
|
||||
rows = row_chunk
|
||||
return _sorted_states_to_dict(
|
||||
rows,
|
||||
start_time_ts if include_start_time_state else None,
|
||||
entity_ids,
|
||||
entity_id_to_metadata_id,
|
||||
minimal_response,
|
||||
compressed_state_format,
|
||||
no_attributes=no_attributes,
|
||||
)
|
||||
|
||||
|
||||
def _generate_significant_states_with_session_stmt(
|
||||
start_time_ts: float,
|
||||
end_time_ts: float | None,
|
||||
single_metadata_id: int | None,
|
||||
metadata_ids: list[int],
|
||||
metadata_ids_in_significant_domains: list[int],
|
||||
significant_changes_only: bool,
|
||||
no_attributes: bool,
|
||||
include_start_time_state: bool,
|
||||
oldest_ts: float | None,
|
||||
slow_dependent_subquery: bool,
|
||||
) -> StatementLambdaElement:
|
||||
return lambda_stmt(
|
||||
lambda: _significant_states_stmt(
|
||||
start_time_ts,
|
||||
end_time_ts,
|
||||
single_metadata_id,
|
||||
metadata_ids,
|
||||
metadata_ids_in_significant_domains,
|
||||
significant_changes_only,
|
||||
no_attributes,
|
||||
include_start_time_state,
|
||||
oldest_ts,
|
||||
slow_dependent_subquery,
|
||||
),
|
||||
track_on=[
|
||||
bool(single_metadata_id),
|
||||
bool(metadata_ids_in_significant_domains),
|
||||
bool(end_time_ts),
|
||||
significant_changes_only,
|
||||
no_attributes,
|
||||
include_start_time_state,
|
||||
slow_dependent_subquery,
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
def get_full_significant_states_with_session(
|
||||
hass: HomeAssistant,
|
||||
session: Session,
|
||||
start_time: datetime,
|
||||
end_time: datetime | None = None,
|
||||
entity_ids: list[str] | None = None,
|
||||
filters: Filters | None = None,
|
||||
include_start_time_state: bool = True,
|
||||
significant_changes_only: bool = True,
|
||||
no_attributes: bool = False,
|
||||
) -> dict[str, list[State]]:
|
||||
"""Variant of get_significant_states_with_session.
|
||||
|
||||
Difference with get_significant_states_with_session is that it does not
|
||||
return minimal responses.
|
||||
"""
|
||||
return cast(
|
||||
dict[str, list[State]],
|
||||
get_significant_states_with_session(
|
||||
hass=hass,
|
||||
session=session,
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
entity_ids=entity_ids,
|
||||
filters=filters,
|
||||
include_start_time_state=include_start_time_state,
|
||||
significant_changes_only=significant_changes_only,
|
||||
minimal_response=False,
|
||||
no_attributes=no_attributes,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _state_changed_during_period_stmt(
|
||||
start_time_ts: float,
|
||||
end_time_ts: float | None,
|
||||
single_metadata_id: int,
|
||||
no_attributes: bool,
|
||||
limit: int | None,
|
||||
include_start_time_state: bool,
|
||||
run_start_ts: float | None,
|
||||
include_last_reported: bool,
|
||||
) -> Select | CompoundSelect:
|
||||
stmt = (
|
||||
_stmt_and_join_attributes(no_attributes, False, include_last_reported)
|
||||
.filter(
|
||||
(
|
||||
(States.last_changed_ts == States.last_updated_ts)
|
||||
| States.last_changed_ts.is_(None)
|
||||
)
|
||||
& (States.last_updated_ts > start_time_ts)
|
||||
)
|
||||
.filter(States.metadata_id == single_metadata_id)
|
||||
)
|
||||
if end_time_ts:
|
||||
stmt = stmt.filter(States.last_updated_ts < end_time_ts)
|
||||
if not no_attributes:
|
||||
stmt = stmt.outerjoin(
|
||||
StateAttributes, States.attributes_id == StateAttributes.attributes_id
|
||||
)
|
||||
if limit:
|
||||
stmt = stmt.limit(limit)
|
||||
stmt = stmt.order_by(States.metadata_id, States.last_updated_ts)
|
||||
if not include_start_time_state or not run_start_ts:
|
||||
# If we do not need the start time state or the
|
||||
# oldest possible timestamp is newer than the start time
|
||||
# we can return the statement as is as there will
|
||||
# never be a start time state.
|
||||
return stmt
|
||||
return _select_from_subquery(
|
||||
union_all(
|
||||
_select_from_subquery(
|
||||
_get_single_entity_start_time_stmt(
|
||||
start_time_ts,
|
||||
single_metadata_id,
|
||||
no_attributes,
|
||||
False,
|
||||
include_last_reported,
|
||||
).subquery(),
|
||||
no_attributes,
|
||||
False,
|
||||
include_last_reported,
|
||||
),
|
||||
_select_from_subquery(
|
||||
stmt.subquery(),
|
||||
no_attributes,
|
||||
False,
|
||||
include_last_reported,
|
||||
),
|
||||
).subquery(),
|
||||
no_attributes,
|
||||
False,
|
||||
include_last_reported,
|
||||
)
|
||||
|
||||
|
||||
def state_changes_during_period(
|
||||
hass: HomeAssistant,
|
||||
start_time: datetime,
|
||||
end_time: datetime | None = None,
|
||||
entity_id: str | None = None,
|
||||
no_attributes: bool = False,
|
||||
descending: bool = False,
|
||||
limit: int | None = None,
|
||||
include_start_time_state: bool = True,
|
||||
) -> dict[str, list[State]]:
|
||||
"""Return states changes during UTC period start_time - end_time."""
|
||||
has_last_reported = (
|
||||
get_instance(hass).schema_version >= LAST_REPORTED_SCHEMA_VERSION
|
||||
)
|
||||
if not entity_id:
|
||||
raise ValueError("entity_id must be provided")
|
||||
entity_ids = [entity_id.lower()]
|
||||
|
||||
with session_scope(hass=hass, read_only=True) as session:
|
||||
instance = get_instance(hass)
|
||||
if not (
|
||||
possible_metadata_id := instance.states_meta_manager.get(
|
||||
entity_id, session, False
|
||||
)
|
||||
):
|
||||
return {}
|
||||
single_metadata_id = possible_metadata_id
|
||||
entity_id_to_metadata_id: dict[str, int | None] = {
|
||||
entity_id: single_metadata_id
|
||||
}
|
||||
oldest_ts: float | None = None
|
||||
if include_start_time_state and not (
|
||||
oldest_ts := _get_oldest_possible_ts(hass, start_time)
|
||||
):
|
||||
include_start_time_state = False
|
||||
start_time_ts = start_time.timestamp()
|
||||
end_time_ts = datetime_to_timestamp_or_none(end_time)
|
||||
stmt = lambda_stmt(
|
||||
lambda: _state_changed_during_period_stmt(
|
||||
start_time_ts,
|
||||
end_time_ts,
|
||||
single_metadata_id,
|
||||
no_attributes,
|
||||
limit,
|
||||
include_start_time_state,
|
||||
oldest_ts,
|
||||
has_last_reported,
|
||||
),
|
||||
track_on=[
|
||||
bool(end_time_ts),
|
||||
no_attributes,
|
||||
bool(limit),
|
||||
include_start_time_state,
|
||||
has_last_reported,
|
||||
],
|
||||
)
|
||||
return cast(
|
||||
dict[str, list[State]],
|
||||
_sorted_states_to_dict(
|
||||
execute_stmt_lambda_element(
|
||||
session, stmt, None, end_time, orm_rows=False
|
||||
),
|
||||
start_time_ts if include_start_time_state else None,
|
||||
entity_ids,
|
||||
entity_id_to_metadata_id,
|
||||
descending=descending,
|
||||
no_attributes=no_attributes,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _get_last_state_changes_single_stmt(metadata_id: int) -> Select:
|
||||
return (
|
||||
_stmt_and_join_attributes(False, False, False)
|
||||
.join(
|
||||
(
|
||||
lastest_state_for_metadata_id := (
|
||||
select(
|
||||
States.metadata_id.label("max_metadata_id"),
|
||||
func.max(States.last_updated_ts).label("max_last_updated"),
|
||||
)
|
||||
.filter(States.metadata_id == metadata_id)
|
||||
.group_by(States.metadata_id)
|
||||
.subquery()
|
||||
)
|
||||
),
|
||||
and_(
|
||||
States.metadata_id == lastest_state_for_metadata_id.c.max_metadata_id,
|
||||
States.last_updated_ts
|
||||
== lastest_state_for_metadata_id.c.max_last_updated,
|
||||
),
|
||||
)
|
||||
.outerjoin(
|
||||
StateAttributes, States.attributes_id == StateAttributes.attributes_id
|
||||
)
|
||||
.order_by(States.state_id.desc())
|
||||
)
|
||||
|
||||
|
||||
def _get_last_state_changes_multiple_stmt(
|
||||
number_of_states: int, metadata_id: int, include_last_reported: bool
|
||||
) -> Select:
|
||||
return (
|
||||
_stmt_and_join_attributes(False, False, include_last_reported)
|
||||
.where(
|
||||
States.state_id
|
||||
== (
|
||||
select(States.state_id)
|
||||
.filter(States.metadata_id == metadata_id)
|
||||
.order_by(States.last_updated_ts.desc())
|
||||
.limit(number_of_states)
|
||||
.subquery()
|
||||
).c.state_id
|
||||
)
|
||||
.outerjoin(
|
||||
StateAttributes, States.attributes_id == StateAttributes.attributes_id
|
||||
)
|
||||
.order_by(States.state_id.desc())
|
||||
)
|
||||
|
||||
|
||||
def get_last_state_changes(
|
||||
hass: HomeAssistant, number_of_states: int, entity_id: str
|
||||
) -> dict[str, list[State]]:
|
||||
"""Return the last number_of_states."""
|
||||
has_last_reported = (
|
||||
get_instance(hass).schema_version >= LAST_REPORTED_SCHEMA_VERSION
|
||||
)
|
||||
entity_id_lower = entity_id.lower()
|
||||
entity_ids = [entity_id_lower]
|
||||
|
||||
# Calling this function with number_of_states > 1 can cause instability
|
||||
# because it has to scan the table to find the last number_of_states states
|
||||
# because the metadata_id_last_updated_ts index is in ascending order.
|
||||
|
||||
with session_scope(hass=hass, read_only=True) as session:
|
||||
instance = get_instance(hass)
|
||||
if not (
|
||||
possible_metadata_id := instance.states_meta_manager.get(
|
||||
entity_id, session, False
|
||||
)
|
||||
):
|
||||
return {}
|
||||
metadata_id = possible_metadata_id
|
||||
entity_id_to_metadata_id: dict[str, int | None] = {entity_id_lower: metadata_id}
|
||||
if number_of_states == 1:
|
||||
stmt = lambda_stmt(
|
||||
lambda: _get_last_state_changes_single_stmt(metadata_id),
|
||||
)
|
||||
else:
|
||||
stmt = lambda_stmt(
|
||||
lambda: _get_last_state_changes_multiple_stmt(
|
||||
number_of_states, metadata_id, has_last_reported
|
||||
),
|
||||
track_on=[has_last_reported],
|
||||
)
|
||||
states = list(execute_stmt_lambda_element(session, stmt, orm_rows=False))
|
||||
return cast(
|
||||
dict[str, list[State]],
|
||||
_sorted_states_to_dict(
|
||||
reversed(states),
|
||||
None,
|
||||
entity_ids,
|
||||
entity_id_to_metadata_id,
|
||||
no_attributes=False,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _get_start_time_state_for_entities_stmt_dependent_sub_query(
|
||||
epoch_time: float,
|
||||
metadata_ids: list[int],
|
||||
no_attributes: bool,
|
||||
include_last_changed: bool,
|
||||
) -> Select:
|
||||
"""Baked query to get states for specific entities."""
|
||||
# Engine has a fast dependent subquery optimizer
|
||||
# This query is the result of significant research in
|
||||
# https://github.com/home-assistant/core/issues/132865
|
||||
# A reverse index scan with a limit 1 is the fastest way to get the
|
||||
# last state change before a specific point in time for all supported
|
||||
# databases. Since all databases support this query as a join
|
||||
# condition we can use it as a subquery to get the last state change
|
||||
# before a specific point in time for all entities.
|
||||
stmt = (
|
||||
_stmt_and_join_attributes_for_start_state(
|
||||
no_attributes=no_attributes,
|
||||
include_last_changed=include_last_changed,
|
||||
include_last_reported=False,
|
||||
)
|
||||
.select_from(StatesMeta)
|
||||
.join(
|
||||
States,
|
||||
and_(
|
||||
States.last_updated_ts
|
||||
== (
|
||||
select(States.last_updated_ts)
|
||||
.where(
|
||||
(StatesMeta.metadata_id == States.metadata_id)
|
||||
& (States.last_updated_ts < epoch_time)
|
||||
)
|
||||
.order_by(States.last_updated_ts.desc())
|
||||
.limit(1)
|
||||
)
|
||||
.scalar_subquery()
|
||||
.correlate(StatesMeta),
|
||||
States.metadata_id == StatesMeta.metadata_id,
|
||||
),
|
||||
)
|
||||
.where(StatesMeta.metadata_id.in_(metadata_ids))
|
||||
)
|
||||
if no_attributes:
|
||||
return stmt
|
||||
return stmt.outerjoin(
|
||||
StateAttributes, (States.attributes_id == StateAttributes.attributes_id)
|
||||
)
|
||||
|
||||
|
||||
def _get_start_time_state_for_entities_stmt_group_by(
|
||||
epoch_time: float,
|
||||
metadata_ids: list[int],
|
||||
no_attributes: bool,
|
||||
include_last_changed: bool,
|
||||
) -> Select:
|
||||
"""Baked query to get states for specific entities."""
|
||||
# Simple group-by for MySQL, must use less
|
||||
# than 1000 metadata_ids in the IN clause for MySQL
|
||||
# or it will optimize poorly. Callers are responsible
|
||||
# for ensuring that the number of metadata_ids is less
|
||||
# than 1000.
|
||||
most_recent_states_for_entities_by_date = (
|
||||
select(
|
||||
States.metadata_id.label("max_metadata_id"),
|
||||
func.max(States.last_updated_ts).label("max_last_updated"),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated_ts < epoch_time) & States.metadata_id.in_(metadata_ids)
|
||||
)
|
||||
.group_by(States.metadata_id)
|
||||
.subquery()
|
||||
)
|
||||
stmt = (
|
||||
_stmt_and_join_attributes_for_start_state(
|
||||
no_attributes=no_attributes,
|
||||
include_last_changed=include_last_changed,
|
||||
include_last_reported=False,
|
||||
)
|
||||
.join(
|
||||
most_recent_states_for_entities_by_date,
|
||||
and_(
|
||||
States.metadata_id
|
||||
== most_recent_states_for_entities_by_date.c.max_metadata_id,
|
||||
States.last_updated_ts
|
||||
== most_recent_states_for_entities_by_date.c.max_last_updated,
|
||||
),
|
||||
)
|
||||
.filter(
|
||||
(States.last_updated_ts < epoch_time) & States.metadata_id.in_(metadata_ids)
|
||||
)
|
||||
)
|
||||
if no_attributes:
|
||||
return stmt
|
||||
return stmt.outerjoin(
|
||||
StateAttributes, (States.attributes_id == StateAttributes.attributes_id)
|
||||
)
|
||||
|
||||
|
||||
def _get_oldest_possible_ts(
|
||||
hass: HomeAssistant, utc_point_in_time: datetime
|
||||
) -> float | None:
|
||||
"""Return the oldest possible timestamp.
|
||||
|
||||
Returns None if there are no states as old as utc_point_in_time.
|
||||
"""
|
||||
|
||||
oldest_ts = get_instance(hass).states_manager.oldest_ts
|
||||
if oldest_ts is not None and oldest_ts < utc_point_in_time.timestamp():
|
||||
return oldest_ts
|
||||
return None
|
||||
|
||||
|
||||
def _get_start_time_state_stmt(
|
||||
epoch_time: float,
|
||||
single_metadata_id: int | None,
|
||||
metadata_ids: list[int],
|
||||
no_attributes: bool,
|
||||
include_last_changed: bool,
|
||||
slow_dependent_subquery: bool,
|
||||
) -> Select:
|
||||
"""Return the states at a specific point in time."""
|
||||
if single_metadata_id:
|
||||
# Use an entirely different (and extremely fast) query if we only
|
||||
# have a single entity id
|
||||
return _get_single_entity_start_time_stmt(
|
||||
epoch_time,
|
||||
single_metadata_id,
|
||||
no_attributes,
|
||||
include_last_changed,
|
||||
False,
|
||||
)
|
||||
# We have more than one entity to look at so we need to do a query on states
|
||||
# since the last recorder run started.
|
||||
if slow_dependent_subquery:
|
||||
return _get_start_time_state_for_entities_stmt_group_by(
|
||||
epoch_time,
|
||||
metadata_ids,
|
||||
no_attributes,
|
||||
include_last_changed,
|
||||
)
|
||||
|
||||
return _get_start_time_state_for_entities_stmt_dependent_sub_query(
|
||||
epoch_time,
|
||||
metadata_ids,
|
||||
no_attributes,
|
||||
include_last_changed,
|
||||
)
|
||||
|
||||
|
||||
def _get_single_entity_start_time_stmt(
|
||||
epoch_time: float,
|
||||
metadata_id: int,
|
||||
no_attributes: bool,
|
||||
include_last_changed: bool,
|
||||
include_last_reported: bool,
|
||||
) -> Select:
|
||||
# Use an entirely different (and extremely fast) query if we only
|
||||
# have a single entity id
|
||||
stmt = (
|
||||
_stmt_and_join_attributes_for_start_state(
|
||||
no_attributes, include_last_changed, include_last_reported
|
||||
)
|
||||
.filter(
|
||||
States.last_updated_ts < epoch_time,
|
||||
States.metadata_id == metadata_id,
|
||||
)
|
||||
.order_by(States.last_updated_ts.desc())
|
||||
.limit(1)
|
||||
)
|
||||
if no_attributes:
|
||||
return stmt
|
||||
return stmt.outerjoin(
|
||||
StateAttributes, States.attributes_id == StateAttributes.attributes_id
|
||||
)
|
||||
|
||||
|
||||
def _sorted_states_to_dict(
|
||||
states: Iterable[Row],
|
||||
start_time_ts: float | None,
|
||||
entity_ids: list[str],
|
||||
entity_id_to_metadata_id: dict[str, int | None],
|
||||
minimal_response: bool = False,
|
||||
compressed_state_format: bool = False,
|
||||
descending: bool = False,
|
||||
no_attributes: bool = False,
|
||||
) -> dict[str, list[State | dict[str, Any]]]:
|
||||
"""Convert SQL results into JSON friendly data structure.
|
||||
|
||||
This takes our state list and turns it into a JSON friendly data
|
||||
structure {'entity_id': [list of states], 'entity_id2': [list of states]}
|
||||
|
||||
States must be sorted by entity_id and last_updated
|
||||
|
||||
We also need to go back and create a synthetic zero data point for
|
||||
each list of states, otherwise our graphs won't start on the Y
|
||||
axis correctly.
|
||||
"""
|
||||
field_map = _FIELD_MAP
|
||||
state_class: Callable[
|
||||
[Row, dict[str, dict[str, Any]], float | None, str, str, float | None, bool],
|
||||
State | dict[str, Any],
|
||||
]
|
||||
if compressed_state_format:
|
||||
state_class = row_to_compressed_state
|
||||
attr_time = COMPRESSED_STATE_LAST_UPDATED
|
||||
attr_state = COMPRESSED_STATE_STATE
|
||||
else:
|
||||
state_class = LazyState
|
||||
attr_time = LAST_CHANGED_KEY
|
||||
attr_state = STATE_KEY
|
||||
|
||||
# Set all entity IDs to empty lists in result set to maintain the order
|
||||
result: dict[str, list[State | dict[str, Any]]] = {
|
||||
entity_id: [] for entity_id in entity_ids
|
||||
}
|
||||
metadata_id_to_entity_id: dict[int, str] = {}
|
||||
metadata_id_to_entity_id = {
|
||||
v: k for k, v in entity_id_to_metadata_id.items() if v is not None
|
||||
}
|
||||
# Get the states at the start time
|
||||
if len(entity_ids) == 1:
|
||||
metadata_id = entity_id_to_metadata_id[entity_ids[0]]
|
||||
assert metadata_id is not None # should not be possible if we got here
|
||||
states_iter: Iterable[tuple[int, Iterator[Row]]] = (
|
||||
(metadata_id, iter(states)),
|
||||
)
|
||||
else:
|
||||
key_func = itemgetter(field_map["metadata_id"])
|
||||
states_iter = groupby(states, key_func)
|
||||
|
||||
state_idx = field_map["state"]
|
||||
last_updated_ts_idx = field_map["last_updated_ts"]
|
||||
|
||||
# Append all changes to it
|
||||
for metadata_id, group in states_iter:
|
||||
entity_id = metadata_id_to_entity_id[metadata_id]
|
||||
attr_cache: dict[str, dict[str, Any]] = {}
|
||||
ent_results = result[entity_id]
|
||||
if (
|
||||
not minimal_response
|
||||
or split_entity_id(entity_id)[0] in NEED_ATTRIBUTE_DOMAINS
|
||||
):
|
||||
ent_results.extend(
|
||||
[
|
||||
state_class(
|
||||
db_state,
|
||||
attr_cache,
|
||||
start_time_ts,
|
||||
entity_id,
|
||||
db_state[state_idx],
|
||||
db_state[last_updated_ts_idx],
|
||||
False,
|
||||
)
|
||||
for db_state in group
|
||||
]
|
||||
)
|
||||
continue
|
||||
|
||||
prev_state: str | None = None
|
||||
# With minimal response we only provide a native
|
||||
# State for the first and last response. All the states
|
||||
# in-between only provide the "state" and the
|
||||
# "last_changed".
|
||||
if not ent_results:
|
||||
if (first_state := next(group, None)) is None:
|
||||
continue
|
||||
prev_state = first_state[state_idx]
|
||||
ent_results.append(
|
||||
state_class(
|
||||
first_state,
|
||||
attr_cache,
|
||||
start_time_ts,
|
||||
entity_id,
|
||||
prev_state,
|
||||
first_state[last_updated_ts_idx],
|
||||
no_attributes,
|
||||
)
|
||||
)
|
||||
|
||||
#
|
||||
# minimal_response only makes sense with last_updated == last_updated
|
||||
#
|
||||
# We use last_updated for for last_changed since its the same
|
||||
#
|
||||
# With minimal response we do not care about attribute
|
||||
# changes so we can filter out duplicate states
|
||||
if compressed_state_format:
|
||||
# Compressed state format uses the timestamp directly
|
||||
ent_results.extend(
|
||||
[
|
||||
{
|
||||
attr_state: (prev_state := state),
|
||||
attr_time: row[last_updated_ts_idx],
|
||||
}
|
||||
for row in group
|
||||
if (state := row[state_idx]) != prev_state
|
||||
]
|
||||
)
|
||||
continue
|
||||
|
||||
# Non-compressed state format returns an ISO formatted string
|
||||
_utc_from_timestamp = dt_util.utc_from_timestamp
|
||||
ent_results.extend(
|
||||
[
|
||||
{
|
||||
attr_state: (prev_state := state),
|
||||
attr_time: _utc_from_timestamp(
|
||||
row[last_updated_ts_idx]
|
||||
).isoformat(),
|
||||
}
|
||||
for row in group
|
||||
if (state := row[state_idx]) != prev_state
|
||||
]
|
||||
)
|
||||
|
||||
if descending:
|
||||
for ent_results in result.values():
|
||||
ent_results.reverse()
|
||||
|
||||
# Filter out the empty lists if some states had 0 results.
|
||||
return {key: val for key, val in result.items() if val}
|
@@ -117,10 +117,10 @@ from .util import (
|
||||
if TYPE_CHECKING:
|
||||
from . import Recorder
|
||||
|
||||
# Live schema migration supported starting from schema version 48 or newer
|
||||
# Schema version 47 was introduced in HA Core 2024.9
|
||||
# Schema version 48 was introduced in HA Core 2025.1
|
||||
LIVE_MIGRATION_MIN_SCHEMA_VERSION = 48
|
||||
# Live schema migration supported starting from schema version 42 or newer
|
||||
# Schema version 41 was introduced in HA Core 2023.4
|
||||
# Schema version 42 was introduced in HA Core 2023.11
|
||||
LIVE_MIGRATION_MIN_SCHEMA_VERSION = 42
|
||||
|
||||
MIGRATION_NOTE_OFFLINE = (
|
||||
"Note: this may take several hours on large databases and slow machines. "
|
||||
|
167
homeassistant/components/recorder/models/legacy.py
Normal file
167
homeassistant/components/recorder/models/legacy.py
Normal file
@@ -0,0 +1,167 @@
|
||||
"""Models for Recorder."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy.engine.row import Row
|
||||
|
||||
from homeassistant.const import (
|
||||
COMPRESSED_STATE_ATTRIBUTES,
|
||||
COMPRESSED_STATE_LAST_CHANGED,
|
||||
COMPRESSED_STATE_LAST_UPDATED,
|
||||
COMPRESSED_STATE_STATE,
|
||||
)
|
||||
from homeassistant.core import Context, State
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .state_attributes import decode_attributes_from_source
|
||||
from .time import process_timestamp
|
||||
|
||||
|
||||
class LegacyLazyState(State):
|
||||
"""A lazy version of core State after schema 31."""
|
||||
|
||||
__slots__ = [
|
||||
"_attributes",
|
||||
"_context",
|
||||
"_last_changed_ts",
|
||||
"_last_reported_ts",
|
||||
"_last_updated_ts",
|
||||
"_row",
|
||||
"attr_cache",
|
||||
]
|
||||
|
||||
def __init__( # pylint: disable=super-init-not-called
|
||||
self,
|
||||
row: Row,
|
||||
attr_cache: dict[str, dict[str, Any]],
|
||||
start_time: datetime | None,
|
||||
entity_id: str | None = None,
|
||||
) -> None:
|
||||
"""Init the lazy state."""
|
||||
self._row = row
|
||||
self.entity_id = entity_id or self._row.entity_id
|
||||
self.state = self._row.state or ""
|
||||
self._attributes: dict[str, Any] | None = None
|
||||
self._last_updated_ts: float | None = self._row.last_updated_ts or (
|
||||
start_time.timestamp() if start_time else None
|
||||
)
|
||||
self._last_changed_ts: float | None = (
|
||||
self._row.last_changed_ts or self._last_updated_ts
|
||||
)
|
||||
self._last_reported_ts: float | None = self._last_updated_ts
|
||||
self._context: Context | None = None
|
||||
self.attr_cache = attr_cache
|
||||
|
||||
@property # type: ignore[override]
|
||||
def attributes(self) -> dict[str, Any]:
|
||||
"""State attributes."""
|
||||
if self._attributes is None:
|
||||
self._attributes = decode_attributes_from_row_legacy(
|
||||
self._row, self.attr_cache
|
||||
)
|
||||
return self._attributes
|
||||
|
||||
@attributes.setter
|
||||
def attributes(self, value: dict[str, Any]) -> None:
|
||||
"""Set attributes."""
|
||||
self._attributes = value
|
||||
|
||||
@property
|
||||
def context(self) -> Context:
|
||||
"""State context."""
|
||||
if self._context is None:
|
||||
self._context = Context(id=None)
|
||||
return self._context
|
||||
|
||||
@context.setter
|
||||
def context(self, value: Context) -> None:
|
||||
"""Set context."""
|
||||
self._context = value
|
||||
|
||||
@property
|
||||
def last_changed(self) -> datetime:
|
||||
"""Last changed datetime."""
|
||||
assert self._last_changed_ts is not None
|
||||
return dt_util.utc_from_timestamp(self._last_changed_ts)
|
||||
|
||||
@last_changed.setter
|
||||
def last_changed(self, value: datetime) -> None:
|
||||
"""Set last changed datetime."""
|
||||
self._last_changed_ts = process_timestamp(value).timestamp()
|
||||
|
||||
@property
|
||||
def last_reported(self) -> datetime:
|
||||
"""Last reported datetime."""
|
||||
assert self._last_reported_ts is not None
|
||||
return dt_util.utc_from_timestamp(self._last_reported_ts)
|
||||
|
||||
@last_reported.setter
|
||||
def last_reported(self, value: datetime) -> None:
|
||||
"""Set last reported datetime."""
|
||||
self._last_reported_ts = process_timestamp(value).timestamp()
|
||||
|
||||
@property
|
||||
def last_updated(self) -> datetime:
|
||||
"""Last updated datetime."""
|
||||
assert self._last_updated_ts is not None
|
||||
return dt_util.utc_from_timestamp(self._last_updated_ts)
|
||||
|
||||
@last_updated.setter
|
||||
def last_updated(self, value: datetime) -> None:
|
||||
"""Set last updated datetime."""
|
||||
self._last_updated_ts = process_timestamp(value).timestamp()
|
||||
|
||||
def as_dict(self) -> dict[str, Any]: # type: ignore[override]
|
||||
"""Return a dict representation of the LazyState.
|
||||
|
||||
Async friendly.
|
||||
To be used for JSON serialization.
|
||||
"""
|
||||
last_updated_isoformat = self.last_updated.isoformat()
|
||||
if self._last_changed_ts == self._last_updated_ts:
|
||||
last_changed_isoformat = last_updated_isoformat
|
||||
else:
|
||||
last_changed_isoformat = self.last_changed.isoformat()
|
||||
return {
|
||||
"entity_id": self.entity_id,
|
||||
"state": self.state,
|
||||
"attributes": self._attributes or self.attributes,
|
||||
"last_changed": last_changed_isoformat,
|
||||
"last_updated": last_updated_isoformat,
|
||||
}
|
||||
|
||||
|
||||
def legacy_row_to_compressed_state(
|
||||
row: Row,
|
||||
attr_cache: dict[str, dict[str, Any]],
|
||||
start_time: datetime | None,
|
||||
entity_id: str | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Convert a database row to a compressed state schema 31 and later."""
|
||||
comp_state = {
|
||||
COMPRESSED_STATE_STATE: row.state,
|
||||
COMPRESSED_STATE_ATTRIBUTES: decode_attributes_from_row_legacy(row, attr_cache),
|
||||
}
|
||||
if start_time:
|
||||
comp_state[COMPRESSED_STATE_LAST_UPDATED] = start_time.timestamp()
|
||||
else:
|
||||
row_last_updated_ts: float = row.last_updated_ts
|
||||
comp_state[COMPRESSED_STATE_LAST_UPDATED] = row_last_updated_ts
|
||||
if (
|
||||
row_last_changed_ts := row.last_changed_ts
|
||||
) and row_last_updated_ts != row_last_changed_ts:
|
||||
comp_state[COMPRESSED_STATE_LAST_CHANGED] = row_last_changed_ts
|
||||
return comp_state
|
||||
|
||||
|
||||
def decode_attributes_from_row_legacy(
|
||||
row: Row, attr_cache: dict[str, dict[str, Any]]
|
||||
) -> dict[str, Any]:
|
||||
"""Decode attributes from a database row."""
|
||||
return decode_attributes_from_source(
|
||||
getattr(row, "shared_attrs", None) or getattr(row, "attributes", None),
|
||||
attr_cache,
|
||||
)
|
@@ -116,7 +116,9 @@ def purge_old_data(
|
||||
# This purge cycle is finished, clean up old event types and
|
||||
# recorder runs
|
||||
_purge_old_event_types(instance, session)
|
||||
_purge_old_entity_ids(instance, session)
|
||||
|
||||
if instance.states_meta_manager.active:
|
||||
_purge_old_entity_ids(instance, session)
|
||||
|
||||
_purge_old_recorder_runs(instance, session, purge_before)
|
||||
with session_scope(session=instance.get_session(), read_only=True) as session:
|
||||
|
@@ -24,6 +24,8 @@ CACHE_SIZE = 8192
|
||||
class StatesMetaManager(BaseLRUTableManager[StatesMeta]):
|
||||
"""Manage the StatesMeta table."""
|
||||
|
||||
active = True
|
||||
|
||||
def __init__(self, recorder: Recorder) -> None:
|
||||
"""Initialize the states meta manager."""
|
||||
self._did_first_load = False
|
||||
|
@@ -110,7 +110,9 @@ SUNDAY_WEEKDAY = 6
|
||||
DAYS_IN_WEEK = 7
|
||||
|
||||
|
||||
def execute(qry: Query) -> list[Row]:
|
||||
def execute(
|
||||
qry: Query, to_native: bool = False, validate_entity_ids: bool = True
|
||||
) -> list[Row]:
|
||||
"""Query the database and convert the objects to HA native form.
|
||||
|
||||
This method also retries a few times in the case of stale connections.
|
||||
@@ -120,15 +122,33 @@ def execute(qry: Query) -> list[Row]:
|
||||
try:
|
||||
if debug:
|
||||
timer_start = time.perf_counter()
|
||||
result = qry.all()
|
||||
|
||||
if to_native:
|
||||
result = [
|
||||
row
|
||||
for row in (
|
||||
row.to_native(validate_entity_id=validate_entity_ids)
|
||||
for row in qry
|
||||
)
|
||||
if row is not None
|
||||
]
|
||||
else:
|
||||
result = qry.all()
|
||||
|
||||
if debug:
|
||||
elapsed = time.perf_counter() - timer_start
|
||||
_LOGGER.debug(
|
||||
"querying %d rows took %fs",
|
||||
len(result),
|
||||
elapsed,
|
||||
)
|
||||
if to_native:
|
||||
_LOGGER.debug(
|
||||
"converting %d rows to native objects took %fs",
|
||||
len(result),
|
||||
elapsed,
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"querying %d rows took %fs",
|
||||
len(result),
|
||||
elapsed,
|
||||
)
|
||||
|
||||
except SQLAlchemyError as err:
|
||||
_LOGGER.error("Error executing query: %s", err)
|
||||
|
@@ -25,7 +25,7 @@ from homeassistant.helpers import (
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, format_mac
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
@@ -497,6 +497,16 @@ def migrate_entity_ids(
|
||||
entity_reg = er.async_get(hass)
|
||||
entities = er.async_entries_for_config_entry(entity_reg, config_entry_id)
|
||||
for entity in entities:
|
||||
# Can be removed in HA 2025.1.0
|
||||
if entity.domain == "update" and entity.unique_id in [
|
||||
host.unique_id,
|
||||
format_mac(host.api.mac_address),
|
||||
]:
|
||||
entity_reg.async_update_entity(
|
||||
entity.entity_id, new_unique_id=f"{host.unique_id}_firmware"
|
||||
)
|
||||
continue
|
||||
|
||||
if host.api.supported(None, "UID") and not entity.unique_id.startswith(
|
||||
host.unique_id
|
||||
):
|
||||
|
@@ -252,7 +252,7 @@ class ReolinkHostChimeCoordinatorEntity(ReolinkHostCoordinatorEntity):
|
||||
chime: Chime,
|
||||
coordinator: DataUpdateCoordinator[None] | None = None,
|
||||
) -> None:
|
||||
"""Initialize ReolinkHostChimeCoordinatorEntity for a chime."""
|
||||
"""Initialize ReolinkChimeCoordinatorEntity for a chime."""
|
||||
super().__init__(reolink_data, coordinator)
|
||||
self._channel = chime.channel
|
||||
self._chime = chime
|
||||
|
@@ -19,5 +19,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.16.1"]
|
||||
"requirements": ["reolink-aio==0.16.0"]
|
||||
}
|
||||
|
@@ -284,7 +284,7 @@ class ReolinkHostSensorEntity(ReolinkHostCoordinatorEntity, SensorEntity):
|
||||
|
||||
|
||||
class ReolinkHddSensorEntity(ReolinkHostCoordinatorEntity, SensorEntity):
|
||||
"""Base sensor class for Reolink storage device sensors."""
|
||||
"""Base sensor class for Reolink host sensors."""
|
||||
|
||||
entity_description: ReolinkSensorEntityDescription
|
||||
|
||||
@@ -294,7 +294,7 @@ class ReolinkHddSensorEntity(ReolinkHostCoordinatorEntity, SensorEntity):
|
||||
hdd_index: int,
|
||||
entity_description: ReolinkSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize Reolink storage device sensor."""
|
||||
"""Initialize Reolink host sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(reolink_data)
|
||||
self._hdd_index = hdd_index
|
||||
|
@@ -132,6 +132,10 @@
|
||||
"title": "Reolink firmware update required",
|
||||
"description": "\"{name}\" with model \"{model}\" and hardware version \"{hw_version}\" is running an old firmware version \"{current_firmware}\", while at least firmware version \"{required_firmware}\" is required for proper operation of the Reolink integration. The firmware can be updated by pressing \"install\" in the more info dialog of the update entity of \"{name}\" from within Home Assistant. Alternatively, the latest firmware can be downloaded from the [Reolink download center]({download_link})."
|
||||
},
|
||||
"hub_switch_deprecated": {
|
||||
"title": "Reolink Home Hub switches deprecated",
|
||||
"description": "The redundant 'Record', 'Email on event', 'FTP upload', 'Push notifications', and 'Buzzer on event' switches on the Reolink Home Hub are deprecated since the new firmware no longer supports these. Please use the equally named switches under each of the camera devices connected to the Home Hub instead. To remove this issue, please adjust automations accordingly and disable the switch entities mentioned."
|
||||
},
|
||||
"password_too_long": {
|
||||
"title": "Reolink password too long",
|
||||
"description": "The password for \"{name}\" is more than 31 characters long, this is no longer compatible with the Reolink API. Please change the password using the Reolink app/client to a password with is shorter than 32 characters. After changing the password, fill in the new password in the Reolink Re-authentication flow to continue using this integration. The latest version of the Reolink app/client also has a password limit of 31 characters."
|
||||
|
@@ -11,8 +11,10 @@ from reolink_aio.api import Chime, Host
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er, issue_registry as ir
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .entity import (
|
||||
ReolinkChannelCoordinatorEntity,
|
||||
ReolinkChannelEntityDescription,
|
||||
@@ -39,11 +41,11 @@ class ReolinkSwitchEntityDescription(
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class ReolinkHostSwitchEntityDescription(
|
||||
class ReolinkNVRSwitchEntityDescription(
|
||||
SwitchEntityDescription,
|
||||
ReolinkHostEntityDescription,
|
||||
):
|
||||
"""A class that describes host switch entities."""
|
||||
"""A class that describes NVR switch entities."""
|
||||
|
||||
method: Callable[[Host, bool], Any]
|
||||
value: Callable[[Host], bool]
|
||||
@@ -154,7 +156,7 @@ SWITCH_ENTITIES = (
|
||||
cmd_key="GetRec",
|
||||
translation_key="record",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
supported=lambda api, ch: api.supported(ch, "rec_enable") and api.is_nvr,
|
||||
supported=lambda api, ch: api.supported(ch, "recording") and api.is_nvr,
|
||||
value=lambda api, ch: api.recording_enabled(ch),
|
||||
method=lambda api, ch, value: api.set_recording(ch, value),
|
||||
),
|
||||
@@ -245,8 +247,8 @@ SWITCH_ENTITIES = (
|
||||
),
|
||||
)
|
||||
|
||||
HOST_SWITCH_ENTITIES = (
|
||||
ReolinkHostSwitchEntityDescription(
|
||||
NVR_SWITCH_ENTITIES = (
|
||||
ReolinkNVRSwitchEntityDescription(
|
||||
key="email",
|
||||
cmd_key="GetEmail",
|
||||
translation_key="email",
|
||||
@@ -255,7 +257,7 @@ HOST_SWITCH_ENTITIES = (
|
||||
value=lambda api: api.email_enabled(),
|
||||
method=lambda api, value: api.set_email(None, value),
|
||||
),
|
||||
ReolinkHostSwitchEntityDescription(
|
||||
ReolinkNVRSwitchEntityDescription(
|
||||
key="ftp_upload",
|
||||
cmd_key="GetFtp",
|
||||
translation_key="ftp_upload",
|
||||
@@ -264,7 +266,7 @@ HOST_SWITCH_ENTITIES = (
|
||||
value=lambda api: api.ftp_enabled(),
|
||||
method=lambda api, value: api.set_ftp(None, value),
|
||||
),
|
||||
ReolinkHostSwitchEntityDescription(
|
||||
ReolinkNVRSwitchEntityDescription(
|
||||
key="push_notifications",
|
||||
cmd_key="GetPush",
|
||||
translation_key="push_notifications",
|
||||
@@ -273,16 +275,16 @@ HOST_SWITCH_ENTITIES = (
|
||||
value=lambda api: api.push_enabled(),
|
||||
method=lambda api, value: api.set_push(None, value),
|
||||
),
|
||||
ReolinkHostSwitchEntityDescription(
|
||||
ReolinkNVRSwitchEntityDescription(
|
||||
key="record",
|
||||
cmd_key="GetRec",
|
||||
translation_key="record",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
supported=lambda api: api.supported(None, "rec_enable") and not api.is_hub,
|
||||
supported=lambda api: api.supported(None, "recording") and not api.is_hub,
|
||||
value=lambda api: api.recording_enabled(),
|
||||
method=lambda api, value: api.set_recording(None, value),
|
||||
),
|
||||
ReolinkHostSwitchEntityDescription(
|
||||
ReolinkNVRSwitchEntityDescription(
|
||||
key="buzzer",
|
||||
cmd_key="GetBuzzerAlarmV20",
|
||||
translation_key="hub_ringtone_on_event",
|
||||
@@ -304,6 +306,56 @@ CHIME_SWITCH_ENTITIES = (
|
||||
),
|
||||
)
|
||||
|
||||
# Can be removed in HA 2025.4.0
|
||||
DEPRECATED_NVR_SWITCHES = [
|
||||
ReolinkNVRSwitchEntityDescription(
|
||||
key="email",
|
||||
cmd_key="GetEmail",
|
||||
translation_key="email",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
supported=lambda api: api.is_hub,
|
||||
value=lambda api: api.email_enabled(),
|
||||
method=lambda api, value: api.set_email(None, value),
|
||||
),
|
||||
ReolinkNVRSwitchEntityDescription(
|
||||
key="ftp_upload",
|
||||
cmd_key="GetFtp",
|
||||
translation_key="ftp_upload",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
supported=lambda api: api.is_hub,
|
||||
value=lambda api: api.ftp_enabled(),
|
||||
method=lambda api, value: api.set_ftp(None, value),
|
||||
),
|
||||
ReolinkNVRSwitchEntityDescription(
|
||||
key="push_notifications",
|
||||
cmd_key="GetPush",
|
||||
translation_key="push_notifications",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
supported=lambda api: api.is_hub,
|
||||
value=lambda api: api.push_enabled(),
|
||||
method=lambda api, value: api.set_push(None, value),
|
||||
),
|
||||
ReolinkNVRSwitchEntityDescription(
|
||||
key="record",
|
||||
cmd_key="GetRec",
|
||||
translation_key="record",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
supported=lambda api: api.is_hub,
|
||||
value=lambda api: api.recording_enabled(),
|
||||
method=lambda api, value: api.set_recording(None, value),
|
||||
),
|
||||
ReolinkNVRSwitchEntityDescription(
|
||||
key="buzzer",
|
||||
cmd_key="GetBuzzerAlarmV20",
|
||||
translation_key="hub_ringtone_on_event",
|
||||
icon="mdi:room-service",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
supported=lambda api: api.is_hub,
|
||||
value=lambda api: api.buzzer_enabled(),
|
||||
method=lambda api, value: api.set_buzzer(None, value),
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -320,8 +372,8 @@ async def async_setup_entry(
|
||||
if entity_description.supported(reolink_data.host.api, channel)
|
||||
]
|
||||
entities.extend(
|
||||
ReolinkHostSwitchEntity(reolink_data, entity_description)
|
||||
for entity_description in HOST_SWITCH_ENTITIES
|
||||
ReolinkNVRSwitchEntity(reolink_data, entity_description)
|
||||
for entity_description in NVR_SWITCH_ENTITIES
|
||||
if entity_description.supported(reolink_data.host.api)
|
||||
)
|
||||
entities.extend(
|
||||
@@ -337,6 +389,34 @@ async def async_setup_entry(
|
||||
if chime.channel is None
|
||||
)
|
||||
|
||||
# Can be removed in HA 2025.4.0
|
||||
depricated_dict = {}
|
||||
for desc in DEPRECATED_NVR_SWITCHES:
|
||||
if not desc.supported(reolink_data.host.api):
|
||||
continue
|
||||
depricated_dict[f"{reolink_data.host.unique_id}_{desc.key}"] = desc
|
||||
|
||||
entity_reg = er.async_get(hass)
|
||||
reg_entities = er.async_entries_for_config_entry(entity_reg, config_entry.entry_id)
|
||||
for entity in reg_entities:
|
||||
# Can be removed in HA 2025.4.0
|
||||
if entity.domain == "switch" and entity.unique_id in depricated_dict:
|
||||
if entity.disabled:
|
||||
entity_reg.async_remove(entity.entity_id)
|
||||
continue
|
||||
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"hub_switch_deprecated",
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="hub_switch_deprecated",
|
||||
)
|
||||
entities.append(
|
||||
ReolinkNVRSwitchEntity(reolink_data, depricated_dict[entity.unique_id])
|
||||
)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
@@ -373,15 +453,15 @@ class ReolinkSwitchEntity(ReolinkChannelCoordinatorEntity, SwitchEntity):
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class ReolinkHostSwitchEntity(ReolinkHostCoordinatorEntity, SwitchEntity):
|
||||
"""Switch entity class for Reolink host features."""
|
||||
class ReolinkNVRSwitchEntity(ReolinkHostCoordinatorEntity, SwitchEntity):
|
||||
"""Switch entity class for Reolink NVR features."""
|
||||
|
||||
entity_description: ReolinkHostSwitchEntityDescription
|
||||
entity_description: ReolinkNVRSwitchEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
reolink_data: ReolinkData,
|
||||
entity_description: ReolinkHostSwitchEntityDescription,
|
||||
entity_description: ReolinkNVRSwitchEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize Reolink switch entity."""
|
||||
self.entity_description = entity_description
|
||||
|
@@ -19,7 +19,7 @@
|
||||
"loggers": ["roborock"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": [
|
||||
"python-roborock==2.49.1",
|
||||
"python-roborock==2.47.1",
|
||||
"vacuum-map-parser-roborock==0.1.4"
|
||||
]
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user