mirror of
https://github.com/home-assistant/core.git
synced 2025-07-30 08:47:09 +00:00
Merge pull request #73136 from home-assistant/rc
This commit is contained in:
commit
0d31d94532
@ -2,6 +2,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from enum import Enum
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
from types import MappingProxyType
|
from types import MappingProxyType
|
||||||
@ -481,7 +482,10 @@ class ElkEntity(Entity):
|
|||||||
@property
|
@property
|
||||||
def extra_state_attributes(self) -> dict[str, Any]:
|
def extra_state_attributes(self) -> dict[str, Any]:
|
||||||
"""Return the default attributes of the element."""
|
"""Return the default attributes of the element."""
|
||||||
return {**self._element.as_dict(), **self.initial_attrs()}
|
dict_as_str = {}
|
||||||
|
for key, val in self._element.as_dict().items():
|
||||||
|
dict_as_str[key] = val.value if isinstance(val, Enum) else val
|
||||||
|
return {**dict_as_str, **self.initial_attrs()}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def available(self) -> bool:
|
def available(self) -> bool:
|
||||||
|
@ -46,6 +46,8 @@ class FibaroCover(FibaroDevice, CoverEntity):
|
|||||||
self._attr_supported_features = (
|
self._attr_supported_features = (
|
||||||
CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE
|
CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE
|
||||||
)
|
)
|
||||||
|
if "stop" in self.fibaro_device.actions:
|
||||||
|
self._attr_supported_features |= CoverEntityFeature.STOP
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def bound(position):
|
def bound(position):
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"domain": "ialarm_xr",
|
"domain": "ialarm_xr",
|
||||||
"name": "Antifurto365 iAlarmXR",
|
"name": "Antifurto365 iAlarmXR",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/ialarm_xr",
|
"documentation": "https://www.home-assistant.io/integrations/ialarm_xr",
|
||||||
"requirements": ["pyialarmxr==1.0.18"],
|
"requirements": ["pyialarmxr-homeassistant==1.0.18"],
|
||||||
"codeowners": ["@bigmoby"],
|
"codeowners": ["@bigmoby"],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
|
@ -636,11 +636,6 @@ class KodiEntity(MediaPlayerEntity):
|
|||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
|
||||||
def available(self):
|
|
||||||
"""Return True if entity is available."""
|
|
||||||
return not self._connect_error
|
|
||||||
|
|
||||||
async def async_turn_on(self):
|
async def async_turn_on(self):
|
||||||
"""Turn the media player on."""
|
"""Turn the media player on."""
|
||||||
_LOGGER.debug("Firing event to turn on device")
|
_LOGGER.debug("Firing event to turn on device")
|
||||||
|
@ -2,9 +2,18 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from homeassistant.components.automation import EVENT_AUTOMATION_TRIGGERED
|
from homeassistant.components.automation import EVENT_AUTOMATION_TRIGGERED
|
||||||
|
from homeassistant.components.counter import DOMAIN as COUNTER_DOMAIN
|
||||||
|
from homeassistant.components.proximity import DOMAIN as PROXIMITY_DOMAIN
|
||||||
from homeassistant.components.script import EVENT_SCRIPT_STARTED
|
from homeassistant.components.script import EVENT_SCRIPT_STARTED
|
||||||
|
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
||||||
from homeassistant.const import EVENT_CALL_SERVICE, EVENT_LOGBOOK_ENTRY
|
from homeassistant.const import EVENT_CALL_SERVICE, EVENT_LOGBOOK_ENTRY
|
||||||
|
|
||||||
|
# Domains that are always continuous
|
||||||
|
ALWAYS_CONTINUOUS_DOMAINS = {COUNTER_DOMAIN, PROXIMITY_DOMAIN}
|
||||||
|
|
||||||
|
# Domains that are continuous if there is a UOM set on the entity
|
||||||
|
CONDITIONALLY_CONTINUOUS_DOMAINS = {SENSOR_DOMAIN}
|
||||||
|
|
||||||
ATTR_MESSAGE = "message"
|
ATTR_MESSAGE = "message"
|
||||||
|
|
||||||
DOMAIN = "logbook"
|
DOMAIN = "logbook"
|
||||||
@ -30,13 +39,11 @@ LOGBOOK_ENTRY_NAME = "name"
|
|||||||
LOGBOOK_ENTRY_STATE = "state"
|
LOGBOOK_ENTRY_STATE = "state"
|
||||||
LOGBOOK_ENTRY_WHEN = "when"
|
LOGBOOK_ENTRY_WHEN = "when"
|
||||||
|
|
||||||
ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED = {EVENT_LOGBOOK_ENTRY, EVENT_CALL_SERVICE}
|
# Automation events that can affect an entity_id or device_id
|
||||||
ENTITY_EVENTS_WITHOUT_CONFIG_ENTRY = {
|
AUTOMATION_EVENTS = {EVENT_AUTOMATION_TRIGGERED, EVENT_SCRIPT_STARTED}
|
||||||
EVENT_LOGBOOK_ENTRY,
|
|
||||||
EVENT_AUTOMATION_TRIGGERED,
|
|
||||||
EVENT_SCRIPT_STARTED,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
# Events that are built-in to the logbook or core
|
||||||
|
BUILT_IN_EVENTS = {EVENT_LOGBOOK_ENTRY, EVENT_CALL_SERVICE}
|
||||||
|
|
||||||
LOGBOOK_FILTERS = "logbook_filters"
|
LOGBOOK_FILTERS = "logbook_filters"
|
||||||
LOGBOOK_ENTITIES_FILTER = "entities_filter"
|
LOGBOOK_ENTITIES_FILTER = "entities_filter"
|
||||||
|
@ -7,6 +7,7 @@ from typing import Any
|
|||||||
from homeassistant.components.sensor import ATTR_STATE_CLASS
|
from homeassistant.components.sensor import ATTR_STATE_CLASS
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
ATTR_DEVICE_ID,
|
ATTR_DEVICE_ID,
|
||||||
|
ATTR_DOMAIN,
|
||||||
ATTR_ENTITY_ID,
|
ATTR_ENTITY_ID,
|
||||||
ATTR_UNIT_OF_MEASUREMENT,
|
ATTR_UNIT_OF_MEASUREMENT,
|
||||||
EVENT_LOGBOOK_ENTRY,
|
EVENT_LOGBOOK_ENTRY,
|
||||||
@ -19,15 +20,13 @@ from homeassistant.core import (
|
|||||||
State,
|
State,
|
||||||
callback,
|
callback,
|
||||||
is_callback,
|
is_callback,
|
||||||
|
split_entity_id,
|
||||||
)
|
)
|
||||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||||
|
from homeassistant.helpers.entityfilter import EntityFilter
|
||||||
from homeassistant.helpers.event import async_track_state_change_event
|
from homeassistant.helpers.event import async_track_state_change_event
|
||||||
|
|
||||||
from .const import (
|
from .const import ALWAYS_CONTINUOUS_DOMAINS, AUTOMATION_EVENTS, BUILT_IN_EVENTS, DOMAIN
|
||||||
ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED,
|
|
||||||
DOMAIN,
|
|
||||||
ENTITY_EVENTS_WITHOUT_CONFIG_ENTRY,
|
|
||||||
)
|
|
||||||
from .models import LazyEventPartialState
|
from .models import LazyEventPartialState
|
||||||
|
|
||||||
|
|
||||||
@ -41,6 +40,25 @@ def async_filter_entities(hass: HomeAssistant, entity_ids: list[str]) -> list[st
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _async_config_entries_for_ids(
|
||||||
|
hass: HomeAssistant, entity_ids: list[str] | None, device_ids: list[str] | None
|
||||||
|
) -> set[str]:
|
||||||
|
"""Find the config entry ids for a set of entities or devices."""
|
||||||
|
config_entry_ids: set[str] = set()
|
||||||
|
if entity_ids:
|
||||||
|
eng_reg = er.async_get(hass)
|
||||||
|
for entity_id in entity_ids:
|
||||||
|
if (entry := eng_reg.async_get(entity_id)) and entry.config_entry_id:
|
||||||
|
config_entry_ids.add(entry.config_entry_id)
|
||||||
|
if device_ids:
|
||||||
|
dev_reg = dr.async_get(hass)
|
||||||
|
for device_id in device_ids:
|
||||||
|
if (device := dev_reg.async_get(device_id)) and device.config_entries:
|
||||||
|
config_entry_ids |= device.config_entries
|
||||||
|
return config_entry_ids
|
||||||
|
|
||||||
|
|
||||||
def async_determine_event_types(
|
def async_determine_event_types(
|
||||||
hass: HomeAssistant, entity_ids: list[str] | None, device_ids: list[str] | None
|
hass: HomeAssistant, entity_ids: list[str] | None, device_ids: list[str] | None
|
||||||
) -> tuple[str, ...]:
|
) -> tuple[str, ...]:
|
||||||
@ -49,42 +67,91 @@ def async_determine_event_types(
|
|||||||
str, tuple[str, Callable[[LazyEventPartialState], dict[str, Any]]]
|
str, tuple[str, Callable[[LazyEventPartialState], dict[str, Any]]]
|
||||||
] = hass.data.get(DOMAIN, {})
|
] = hass.data.get(DOMAIN, {})
|
||||||
if not entity_ids and not device_ids:
|
if not entity_ids and not device_ids:
|
||||||
return (*ALL_EVENT_TYPES_EXCEPT_STATE_CHANGED, *external_events)
|
return (*BUILT_IN_EVENTS, *external_events)
|
||||||
config_entry_ids: set[str] = set()
|
|
||||||
intrested_event_types: set[str] = set()
|
|
||||||
|
|
||||||
|
interested_domains: set[str] = set()
|
||||||
|
for entry_id in _async_config_entries_for_ids(hass, entity_ids, device_ids):
|
||||||
|
if entry := hass.config_entries.async_get_entry(entry_id):
|
||||||
|
interested_domains.add(entry.domain)
|
||||||
|
|
||||||
|
#
|
||||||
|
# automations and scripts can refer to entities or devices
|
||||||
|
# but they do not have a config entry so we need
|
||||||
|
# to add them since we have historically included
|
||||||
|
# them when matching only on entities
|
||||||
|
#
|
||||||
|
intrested_event_types: set[str] = {
|
||||||
|
external_event
|
||||||
|
for external_event, domain_call in external_events.items()
|
||||||
|
if domain_call[0] in interested_domains
|
||||||
|
} | AUTOMATION_EVENTS
|
||||||
if entity_ids:
|
if entity_ids:
|
||||||
#
|
# We also allow entity_ids to be recorded via manual logbook entries.
|
||||||
# Home Assistant doesn't allow firing events from
|
intrested_event_types.add(EVENT_LOGBOOK_ENTRY)
|
||||||
# entities so we have a limited list to check
|
|
||||||
#
|
|
||||||
# automations and scripts can refer to entities
|
|
||||||
# but they do not have a config entry so we need
|
|
||||||
# to add them.
|
|
||||||
#
|
|
||||||
# We also allow entity_ids to be recorded via
|
|
||||||
# manual logbook entries.
|
|
||||||
#
|
|
||||||
intrested_event_types |= ENTITY_EVENTS_WITHOUT_CONFIG_ENTRY
|
|
||||||
|
|
||||||
if device_ids:
|
return tuple(intrested_event_types)
|
||||||
dev_reg = dr.async_get(hass)
|
|
||||||
for device_id in device_ids:
|
|
||||||
if (device := dev_reg.async_get(device_id)) and device.config_entries:
|
|
||||||
config_entry_ids |= device.config_entries
|
|
||||||
interested_domains: set[str] = set()
|
|
||||||
for entry_id in config_entry_ids:
|
|
||||||
if entry := hass.config_entries.async_get_entry(entry_id):
|
|
||||||
interested_domains.add(entry.domain)
|
|
||||||
for external_event, domain_call in external_events.items():
|
|
||||||
if domain_call[0] in interested_domains:
|
|
||||||
intrested_event_types.add(external_event)
|
|
||||||
|
|
||||||
return tuple(
|
|
||||||
event_type
|
@callback
|
||||||
for event_type in (EVENT_LOGBOOK_ENTRY, *external_events)
|
def extract_attr(source: dict[str, Any], attr: str) -> list[str]:
|
||||||
if event_type in intrested_event_types
|
"""Extract an attribute as a list or string."""
|
||||||
)
|
if (value := source.get(attr)) is None:
|
||||||
|
return []
|
||||||
|
if isinstance(value, list):
|
||||||
|
return value
|
||||||
|
return str(value).split(",")
|
||||||
|
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def event_forwarder_filtered(
|
||||||
|
target: Callable[[Event], None],
|
||||||
|
entities_filter: EntityFilter | None,
|
||||||
|
entity_ids: list[str] | None,
|
||||||
|
device_ids: list[str] | None,
|
||||||
|
) -> Callable[[Event], None]:
|
||||||
|
"""Make a callable to filter events."""
|
||||||
|
if not entities_filter and not entity_ids and not device_ids:
|
||||||
|
# No filter
|
||||||
|
# - Script Trace (context ids)
|
||||||
|
# - Automation Trace (context ids)
|
||||||
|
return target
|
||||||
|
|
||||||
|
if entities_filter:
|
||||||
|
# We have an entity filter:
|
||||||
|
# - Logbook panel
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _forward_events_filtered_by_entities_filter(event: Event) -> None:
|
||||||
|
assert entities_filter is not None
|
||||||
|
event_data = event.data
|
||||||
|
entity_ids = extract_attr(event_data, ATTR_ENTITY_ID)
|
||||||
|
if entity_ids and not any(
|
||||||
|
entities_filter(entity_id) for entity_id in entity_ids
|
||||||
|
):
|
||||||
|
return
|
||||||
|
domain = event_data.get(ATTR_DOMAIN)
|
||||||
|
if domain and not entities_filter(f"{domain}._"):
|
||||||
|
return
|
||||||
|
target(event)
|
||||||
|
|
||||||
|
return _forward_events_filtered_by_entities_filter
|
||||||
|
|
||||||
|
# We are filtering on entity_ids and/or device_ids:
|
||||||
|
# - Areas
|
||||||
|
# - Devices
|
||||||
|
# - Logbook Card
|
||||||
|
entity_ids_set = set(entity_ids) if entity_ids else set()
|
||||||
|
device_ids_set = set(device_ids) if device_ids else set()
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _forward_events_filtered_by_device_entity_ids(event: Event) -> None:
|
||||||
|
event_data = event.data
|
||||||
|
if entity_ids_set.intersection(
|
||||||
|
extract_attr(event_data, ATTR_ENTITY_ID)
|
||||||
|
) or device_ids_set.intersection(extract_attr(event_data, ATTR_DEVICE_ID)):
|
||||||
|
target(event)
|
||||||
|
|
||||||
|
return _forward_events_filtered_by_device_entity_ids
|
||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
@ -93,6 +160,7 @@ def async_subscribe_events(
|
|||||||
subscriptions: list[CALLBACK_TYPE],
|
subscriptions: list[CALLBACK_TYPE],
|
||||||
target: Callable[[Event], None],
|
target: Callable[[Event], None],
|
||||||
event_types: tuple[str, ...],
|
event_types: tuple[str, ...],
|
||||||
|
entities_filter: EntityFilter | None,
|
||||||
entity_ids: list[str] | None,
|
entity_ids: list[str] | None,
|
||||||
device_ids: list[str] | None,
|
device_ids: list[str] | None,
|
||||||
) -> None:
|
) -> None:
|
||||||
@ -103,41 +171,31 @@ def async_subscribe_events(
|
|||||||
"""
|
"""
|
||||||
ent_reg = er.async_get(hass)
|
ent_reg = er.async_get(hass)
|
||||||
assert is_callback(target), "target must be a callback"
|
assert is_callback(target), "target must be a callback"
|
||||||
event_forwarder = target
|
event_forwarder = event_forwarder_filtered(
|
||||||
|
target, entities_filter, entity_ids, device_ids
|
||||||
if entity_ids or device_ids:
|
)
|
||||||
entity_ids_set = set(entity_ids) if entity_ids else set()
|
|
||||||
device_ids_set = set(device_ids) if device_ids else set()
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def _forward_events_filtered(event: Event) -> None:
|
|
||||||
event_data = event.data
|
|
||||||
if (
|
|
||||||
entity_ids_set and event_data.get(ATTR_ENTITY_ID) in entity_ids_set
|
|
||||||
) or (device_ids_set and event_data.get(ATTR_DEVICE_ID) in device_ids_set):
|
|
||||||
target(event)
|
|
||||||
|
|
||||||
event_forwarder = _forward_events_filtered
|
|
||||||
|
|
||||||
for event_type in event_types:
|
for event_type in event_types:
|
||||||
subscriptions.append(
|
subscriptions.append(
|
||||||
hass.bus.async_listen(event_type, event_forwarder, run_immediately=True)
|
hass.bus.async_listen(event_type, event_forwarder, run_immediately=True)
|
||||||
)
|
)
|
||||||
|
|
||||||
@callback
|
|
||||||
def _forward_state_events_filtered(event: Event) -> None:
|
|
||||||
if event.data.get("old_state") is None or event.data.get("new_state") is None:
|
|
||||||
return
|
|
||||||
state: State = event.data["new_state"]
|
|
||||||
if not _is_state_filtered(ent_reg, state):
|
|
||||||
target(event)
|
|
||||||
|
|
||||||
if device_ids and not entity_ids:
|
if device_ids and not entity_ids:
|
||||||
# No entities to subscribe to but we are filtering
|
# No entities to subscribe to but we are filtering
|
||||||
# on device ids so we do not want to get any state
|
# on device ids so we do not want to get any state
|
||||||
# changed events
|
# changed events
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _forward_state_events_filtered(event: Event) -> None:
|
||||||
|
if event.data.get("old_state") is None or event.data.get("new_state") is None:
|
||||||
|
return
|
||||||
|
state: State = event.data["new_state"]
|
||||||
|
if _is_state_filtered(ent_reg, state) or (
|
||||||
|
entities_filter and not entities_filter(state.entity_id)
|
||||||
|
):
|
||||||
|
return
|
||||||
|
target(event)
|
||||||
|
|
||||||
if entity_ids:
|
if entity_ids:
|
||||||
subscriptions.append(
|
subscriptions.append(
|
||||||
async_track_state_change_event(
|
async_track_state_change_event(
|
||||||
@ -178,7 +236,8 @@ def _is_state_filtered(ent_reg: er.EntityRegistry, state: State) -> bool:
|
|||||||
we only get significant changes (state.last_changed != state.last_updated)
|
we only get significant changes (state.last_changed != state.last_updated)
|
||||||
"""
|
"""
|
||||||
return bool(
|
return bool(
|
||||||
state.last_changed != state.last_updated
|
split_entity_id(state.entity_id)[0] in ALWAYS_CONTINUOUS_DOMAINS
|
||||||
|
or state.last_changed != state.last_updated
|
||||||
or ATTR_UNIT_OF_MEASUREMENT in state.attributes
|
or ATTR_UNIT_OF_MEASUREMENT in state.attributes
|
||||||
or is_sensor_continuous(ent_reg, state.entity_id)
|
or is_sensor_continuous(ent_reg, state.entity_id)
|
||||||
)
|
)
|
||||||
@ -193,7 +252,8 @@ def _is_entity_id_filtered(
|
|||||||
from the database when a list of entities is requested.
|
from the database when a list of entities is requested.
|
||||||
"""
|
"""
|
||||||
return bool(
|
return bool(
|
||||||
(state := hass.states.get(entity_id))
|
split_entity_id(entity_id)[0] in ALWAYS_CONTINUOUS_DOMAINS
|
||||||
|
or (state := hass.states.get(entity_id))
|
||||||
and (ATTR_UNIT_OF_MEASUREMENT in state.attributes)
|
and (ATTR_UNIT_OF_MEASUREMENT in state.attributes)
|
||||||
or is_sensor_continuous(ent_reg, entity_id)
|
or is_sensor_continuous(ent_reg, entity_id)
|
||||||
)
|
)
|
||||||
|
@ -5,8 +5,6 @@ from collections.abc import Callable, Generator
|
|||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from datetime import datetime as dt
|
from datetime import datetime as dt
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from sqlalchemy.engine.row import Row
|
from sqlalchemy.engine.row import Row
|
||||||
@ -30,7 +28,6 @@ from homeassistant.const import (
|
|||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant, split_entity_id
|
from homeassistant.core import HomeAssistant, split_entity_id
|
||||||
from homeassistant.helpers import entity_registry as er
|
from homeassistant.helpers import entity_registry as er
|
||||||
from homeassistant.helpers.entityfilter import EntityFilter
|
|
||||||
import homeassistant.util.dt as dt_util
|
import homeassistant.util.dt as dt_util
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
@ -46,7 +43,6 @@ from .const import (
|
|||||||
CONTEXT_STATE,
|
CONTEXT_STATE,
|
||||||
CONTEXT_USER_ID,
|
CONTEXT_USER_ID,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
LOGBOOK_ENTITIES_FILTER,
|
|
||||||
LOGBOOK_ENTRY_DOMAIN,
|
LOGBOOK_ENTRY_DOMAIN,
|
||||||
LOGBOOK_ENTRY_ENTITY_ID,
|
LOGBOOK_ENTRY_ENTITY_ID,
|
||||||
LOGBOOK_ENTRY_ICON,
|
LOGBOOK_ENTRY_ICON,
|
||||||
@ -62,11 +58,6 @@ from .models import EventAsRow, LazyEventPartialState, async_event_to_row
|
|||||||
from .queries import statement_for_request
|
from .queries import statement_for_request
|
||||||
from .queries.common import PSUEDO_EVENT_STATE_CHANGED
|
from .queries.common import PSUEDO_EVENT_STATE_CHANGED
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
ENTITY_ID_JSON_EXTRACT = re.compile('"entity_id": ?"([^"]+)"')
|
|
||||||
DOMAIN_JSON_EXTRACT = re.compile('"domain": ?"([^"]+)"')
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class LogbookRun:
|
class LogbookRun:
|
||||||
@ -106,10 +97,6 @@ class EventProcessor:
|
|||||||
self.device_ids = device_ids
|
self.device_ids = device_ids
|
||||||
self.context_id = context_id
|
self.context_id = context_id
|
||||||
self.filters: Filters | None = hass.data[LOGBOOK_FILTERS]
|
self.filters: Filters | None = hass.data[LOGBOOK_FILTERS]
|
||||||
if self.limited_select:
|
|
||||||
self.entities_filter: EntityFilter | Callable[[str], bool] | None = None
|
|
||||||
else:
|
|
||||||
self.entities_filter = hass.data[LOGBOOK_ENTITIES_FILTER]
|
|
||||||
format_time = (
|
format_time = (
|
||||||
_row_time_fired_timestamp if timestamp else _row_time_fired_isoformat
|
_row_time_fired_timestamp if timestamp else _row_time_fired_isoformat
|
||||||
)
|
)
|
||||||
@ -183,7 +170,6 @@ class EventProcessor:
|
|||||||
return list(
|
return list(
|
||||||
_humanify(
|
_humanify(
|
||||||
row_generator,
|
row_generator,
|
||||||
self.entities_filter,
|
|
||||||
self.ent_reg,
|
self.ent_reg,
|
||||||
self.logbook_run,
|
self.logbook_run,
|
||||||
self.context_augmenter,
|
self.context_augmenter,
|
||||||
@ -193,7 +179,6 @@ class EventProcessor:
|
|||||||
|
|
||||||
def _humanify(
|
def _humanify(
|
||||||
rows: Generator[Row | EventAsRow, None, None],
|
rows: Generator[Row | EventAsRow, None, None],
|
||||||
entities_filter: EntityFilter | Callable[[str], bool] | None,
|
|
||||||
ent_reg: er.EntityRegistry,
|
ent_reg: er.EntityRegistry,
|
||||||
logbook_run: LogbookRun,
|
logbook_run: LogbookRun,
|
||||||
context_augmenter: ContextAugmenter,
|
context_augmenter: ContextAugmenter,
|
||||||
@ -208,29 +193,13 @@ def _humanify(
|
|||||||
include_entity_name = logbook_run.include_entity_name
|
include_entity_name = logbook_run.include_entity_name
|
||||||
format_time = logbook_run.format_time
|
format_time = logbook_run.format_time
|
||||||
|
|
||||||
def _keep_row(row: EventAsRow) -> bool:
|
|
||||||
"""Check if the entity_filter rejects a row."""
|
|
||||||
assert entities_filter is not None
|
|
||||||
if entity_id := row.entity_id:
|
|
||||||
return entities_filter(entity_id)
|
|
||||||
if entity_id := row.data.get(ATTR_ENTITY_ID):
|
|
||||||
return entities_filter(entity_id)
|
|
||||||
if domain := row.data.get(ATTR_DOMAIN):
|
|
||||||
return entities_filter(f"{domain}._")
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Process rows
|
# Process rows
|
||||||
for row in rows:
|
for row in rows:
|
||||||
context_id = context_lookup.memorize(row)
|
context_id = context_lookup.memorize(row)
|
||||||
if row.context_only:
|
if row.context_only:
|
||||||
continue
|
continue
|
||||||
event_type = row.event_type
|
event_type = row.event_type
|
||||||
if event_type == EVENT_CALL_SERVICE or (
|
if event_type == EVENT_CALL_SERVICE:
|
||||||
entities_filter
|
|
||||||
# We literally mean is EventAsRow not a subclass of EventAsRow
|
|
||||||
and type(row) is EventAsRow # pylint: disable=unidiomatic-typecheck
|
|
||||||
and not _keep_row(row)
|
|
||||||
):
|
|
||||||
continue
|
continue
|
||||||
if event_type is PSUEDO_EVENT_STATE_CHANGED:
|
if event_type is PSUEDO_EVENT_STATE_CHANGED:
|
||||||
entity_id = row.entity_id
|
entity_id = row.entity_id
|
||||||
@ -417,12 +386,6 @@ def _rows_match(row: Row | EventAsRow, other_row: Row | EventAsRow) -> bool:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def _row_event_data_extract(row: Row | EventAsRow, extractor: re.Pattern) -> str | None:
|
|
||||||
"""Extract from event_data row."""
|
|
||||||
result = extractor.search(row.shared_data or row.event_data or "")
|
|
||||||
return result.group(1) if result else None
|
|
||||||
|
|
||||||
|
|
||||||
def _row_time_fired_isoformat(row: Row | EventAsRow) -> str:
|
def _row_time_fired_isoformat(row: Row | EventAsRow) -> str:
|
||||||
"""Convert the row timed_fired to isoformat."""
|
"""Convert the row timed_fired to isoformat."""
|
||||||
return process_timestamp_to_utc_isoformat(row.time_fired or dt_util.utcnow())
|
return process_timestamp_to_utc_isoformat(row.time_fired or dt_util.utcnow())
|
||||||
|
@ -10,7 +10,7 @@ from sqlalchemy.sql.elements import ClauseList
|
|||||||
from sqlalchemy.sql.expression import literal
|
from sqlalchemy.sql.expression import literal
|
||||||
from sqlalchemy.sql.selectable import Select
|
from sqlalchemy.sql.selectable import Select
|
||||||
|
|
||||||
from homeassistant.components.proximity import DOMAIN as PROXIMITY_DOMAIN
|
from homeassistant.components.recorder.filters import like_domain_matchers
|
||||||
from homeassistant.components.recorder.models import (
|
from homeassistant.components.recorder.models import (
|
||||||
EVENTS_CONTEXT_ID_INDEX,
|
EVENTS_CONTEXT_ID_INDEX,
|
||||||
OLD_FORMAT_ATTRS_JSON,
|
OLD_FORMAT_ATTRS_JSON,
|
||||||
@ -22,15 +22,19 @@ from homeassistant.components.recorder.models import (
|
|||||||
StateAttributes,
|
StateAttributes,
|
||||||
States,
|
States,
|
||||||
)
|
)
|
||||||
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
|
|
||||||
|
|
||||||
CONTINUOUS_DOMAINS = {PROXIMITY_DOMAIN, SENSOR_DOMAIN}
|
from ..const import ALWAYS_CONTINUOUS_DOMAINS, CONDITIONALLY_CONTINUOUS_DOMAINS
|
||||||
CONTINUOUS_ENTITY_ID_LIKE = [f"{domain}.%" for domain in CONTINUOUS_DOMAINS]
|
|
||||||
|
# Domains that are continuous if there is a UOM set on the entity
|
||||||
|
CONDITIONALLY_CONTINUOUS_ENTITY_ID_LIKE = like_domain_matchers(
|
||||||
|
CONDITIONALLY_CONTINUOUS_DOMAINS
|
||||||
|
)
|
||||||
|
# Domains that are always continuous
|
||||||
|
ALWAYS_CONTINUOUS_ENTITY_ID_LIKE = like_domain_matchers(ALWAYS_CONTINUOUS_DOMAINS)
|
||||||
|
|
||||||
UNIT_OF_MEASUREMENT_JSON = '"unit_of_measurement":'
|
UNIT_OF_MEASUREMENT_JSON = '"unit_of_measurement":'
|
||||||
UNIT_OF_MEASUREMENT_JSON_LIKE = f"%{UNIT_OF_MEASUREMENT_JSON}%"
|
UNIT_OF_MEASUREMENT_JSON_LIKE = f"%{UNIT_OF_MEASUREMENT_JSON}%"
|
||||||
|
|
||||||
|
|
||||||
PSUEDO_EVENT_STATE_CHANGED = None
|
PSUEDO_EVENT_STATE_CHANGED = None
|
||||||
# Since we don't store event_types and None
|
# Since we don't store event_types and None
|
||||||
# and we don't store state_changed in events
|
# and we don't store state_changed in events
|
||||||
@ -220,29 +224,44 @@ def _missing_state_matcher() -> sqlalchemy.and_:
|
|||||||
def _not_continuous_entity_matcher() -> sqlalchemy.or_:
|
def _not_continuous_entity_matcher() -> sqlalchemy.or_:
|
||||||
"""Match non continuous entities."""
|
"""Match non continuous entities."""
|
||||||
return sqlalchemy.or_(
|
return sqlalchemy.or_(
|
||||||
_not_continuous_domain_matcher(),
|
# First exclude domains that may be continuous
|
||||||
|
_not_possible_continuous_domain_matcher(),
|
||||||
|
# But let in the entities in the possible continuous domains
|
||||||
|
# that are not actually continuous sensors because they lack a UOM
|
||||||
sqlalchemy.and_(
|
sqlalchemy.and_(
|
||||||
_continuous_domain_matcher, _not_uom_attributes_matcher()
|
_conditionally_continuous_domain_matcher, _not_uom_attributes_matcher()
|
||||||
).self_group(),
|
).self_group(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _not_continuous_domain_matcher() -> sqlalchemy.and_:
|
def _not_possible_continuous_domain_matcher() -> sqlalchemy.and_:
|
||||||
"""Match not continuous domains."""
|
"""Match not continuous domains.
|
||||||
|
|
||||||
|
This matches domain that are always considered continuous
|
||||||
|
and domains that are conditionally (if they have a UOM)
|
||||||
|
continuous domains.
|
||||||
|
"""
|
||||||
return sqlalchemy.and_(
|
return sqlalchemy.and_(
|
||||||
*[
|
*[
|
||||||
~States.entity_id.like(entity_domain)
|
~States.entity_id.like(entity_domain)
|
||||||
for entity_domain in CONTINUOUS_ENTITY_ID_LIKE
|
for entity_domain in (
|
||||||
|
*ALWAYS_CONTINUOUS_ENTITY_ID_LIKE,
|
||||||
|
*CONDITIONALLY_CONTINUOUS_ENTITY_ID_LIKE,
|
||||||
|
)
|
||||||
],
|
],
|
||||||
).self_group()
|
).self_group()
|
||||||
|
|
||||||
|
|
||||||
def _continuous_domain_matcher() -> sqlalchemy.or_:
|
def _conditionally_continuous_domain_matcher() -> sqlalchemy.or_:
|
||||||
"""Match continuous domains."""
|
"""Match conditionally continuous domains.
|
||||||
|
|
||||||
|
This matches domain that are only considered
|
||||||
|
continuous if a UOM is set.
|
||||||
|
"""
|
||||||
return sqlalchemy.or_(
|
return sqlalchemy.or_(
|
||||||
*[
|
*[
|
||||||
States.entity_id.like(entity_domain)
|
States.entity_id.like(entity_domain)
|
||||||
for entity_domain in CONTINUOUS_ENTITY_ID_LIKE
|
for entity_domain in CONDITIONALLY_CONTINUOUS_ENTITY_ID_LIKE
|
||||||
],
|
],
|
||||||
).self_group()
|
).self_group()
|
||||||
|
|
||||||
|
@ -16,9 +16,11 @@ from homeassistant.components.websocket_api import messages
|
|||||||
from homeassistant.components.websocket_api.connection import ActiveConnection
|
from homeassistant.components.websocket_api.connection import ActiveConnection
|
||||||
from homeassistant.components.websocket_api.const import JSON_DUMP
|
from homeassistant.components.websocket_api.const import JSON_DUMP
|
||||||
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, callback
|
from homeassistant.core import CALLBACK_TYPE, Event, HomeAssistant, callback
|
||||||
|
from homeassistant.helpers.entityfilter import EntityFilter
|
||||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||||
import homeassistant.util.dt as dt_util
|
import homeassistant.util.dt as dt_util
|
||||||
|
|
||||||
|
from .const import LOGBOOK_ENTITIES_FILTER
|
||||||
from .helpers import (
|
from .helpers import (
|
||||||
async_determine_event_types,
|
async_determine_event_types,
|
||||||
async_filter_entities,
|
async_filter_entities,
|
||||||
@ -67,6 +69,23 @@ async def _async_wait_for_recorder_sync(hass: HomeAssistant) -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _async_send_empty_response(
|
||||||
|
connection: ActiveConnection, msg_id: int, start_time: dt, end_time: dt | None
|
||||||
|
) -> None:
|
||||||
|
"""Send an empty response.
|
||||||
|
|
||||||
|
The current case for this is when they ask for entity_ids
|
||||||
|
that will all be filtered away because they have UOMs or
|
||||||
|
state_class.
|
||||||
|
"""
|
||||||
|
connection.send_result(msg_id)
|
||||||
|
stream_end_time = end_time or dt_util.utcnow()
|
||||||
|
empty_stream_message = _generate_stream_message([], start_time, stream_end_time)
|
||||||
|
empty_response = messages.event_message(msg_id, empty_stream_message)
|
||||||
|
connection.send_message(JSON_DUMP(empty_response))
|
||||||
|
|
||||||
|
|
||||||
async def _async_send_historical_events(
|
async def _async_send_historical_events(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
connection: ActiveConnection,
|
connection: ActiveConnection,
|
||||||
@ -171,6 +190,17 @@ async def _async_get_ws_stream_events(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _generate_stream_message(
|
||||||
|
events: list[dict[str, Any]], start_day: dt, end_day: dt
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Generate a logbook stream message response."""
|
||||||
|
return {
|
||||||
|
"events": events,
|
||||||
|
"start_time": dt_util.utc_to_timestamp(start_day),
|
||||||
|
"end_time": dt_util.utc_to_timestamp(end_day),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def _ws_stream_get_events(
|
def _ws_stream_get_events(
|
||||||
msg_id: int,
|
msg_id: int,
|
||||||
start_day: dt,
|
start_day: dt,
|
||||||
@ -184,11 +214,7 @@ def _ws_stream_get_events(
|
|||||||
last_time = None
|
last_time = None
|
||||||
if events:
|
if events:
|
||||||
last_time = dt_util.utc_from_timestamp(events[-1]["when"])
|
last_time = dt_util.utc_from_timestamp(events[-1]["when"])
|
||||||
message = {
|
message = _generate_stream_message(events, start_day, end_day)
|
||||||
"events": events,
|
|
||||||
"start_time": dt_util.utc_to_timestamp(start_day),
|
|
||||||
"end_time": dt_util.utc_to_timestamp(end_day),
|
|
||||||
}
|
|
||||||
if partial:
|
if partial:
|
||||||
# This is a hint to consumers of the api that
|
# This is a hint to consumers of the api that
|
||||||
# we are about to send a another block of historical
|
# we are about to send a another block of historical
|
||||||
@ -275,6 +301,10 @@ async def ws_event_stream(
|
|||||||
entity_ids = msg.get("entity_ids")
|
entity_ids = msg.get("entity_ids")
|
||||||
if entity_ids:
|
if entity_ids:
|
||||||
entity_ids = async_filter_entities(hass, entity_ids)
|
entity_ids = async_filter_entities(hass, entity_ids)
|
||||||
|
if not entity_ids:
|
||||||
|
_async_send_empty_response(connection, msg_id, start_time, end_time)
|
||||||
|
return
|
||||||
|
|
||||||
event_types = async_determine_event_types(hass, entity_ids, device_ids)
|
event_types = async_determine_event_types(hass, entity_ids, device_ids)
|
||||||
event_processor = EventProcessor(
|
event_processor = EventProcessor(
|
||||||
hass,
|
hass,
|
||||||
@ -337,8 +367,18 @@ async def ws_event_stream(
|
|||||||
)
|
)
|
||||||
_unsub()
|
_unsub()
|
||||||
|
|
||||||
|
entities_filter: EntityFilter | None = None
|
||||||
|
if not event_processor.limited_select:
|
||||||
|
entities_filter = hass.data[LOGBOOK_ENTITIES_FILTER]
|
||||||
|
|
||||||
async_subscribe_events(
|
async_subscribe_events(
|
||||||
hass, subscriptions, _queue_or_cancel, event_types, entity_ids, device_ids
|
hass,
|
||||||
|
subscriptions,
|
||||||
|
_queue_or_cancel,
|
||||||
|
event_types,
|
||||||
|
entities_filter,
|
||||||
|
entity_ids,
|
||||||
|
device_ids,
|
||||||
)
|
)
|
||||||
subscriptions_setup_complete_time = dt_util.utcnow()
|
subscriptions_setup_complete_time = dt_util.utcnow()
|
||||||
connection.subscriptions[msg_id] = _unsub
|
connection.subscriptions[msg_id] = _unsub
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
"name": "LOOKin",
|
"name": "LOOKin",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/lookin/",
|
"documentation": "https://www.home-assistant.io/integrations/lookin/",
|
||||||
"codeowners": ["@ANMalko", "@bdraco"],
|
"codeowners": ["@ANMalko", "@bdraco"],
|
||||||
"requirements": ["aiolookin==0.1.0"],
|
"requirements": ["aiolookin==0.1.1"],
|
||||||
"zeroconf": ["_lookin._tcp.local."],
|
"zeroconf": ["_lookin._tcp.local."],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
|
@ -139,8 +139,11 @@ async def async_setup_entry(
|
|||||||
entry, coordinator, controller, description
|
entry, coordinator, controller, description
|
||||||
)
|
)
|
||||||
for description in BINARY_SENSOR_DESCRIPTIONS
|
for description in BINARY_SENSOR_DESCRIPTIONS
|
||||||
if (coordinator := coordinators[description.api_category]) is not None
|
if (
|
||||||
and key_exists(coordinator.data, description.data_key)
|
(coordinator := coordinators[description.api_category]) is not None
|
||||||
|
and coordinator.data
|
||||||
|
and key_exists(coordinator.data, description.data_key)
|
||||||
|
)
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
"name": "RainMachine",
|
"name": "RainMachine",
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/rainmachine",
|
"documentation": "https://www.home-assistant.io/integrations/rainmachine",
|
||||||
"requirements": ["regenmaschine==2022.05.1"],
|
"requirements": ["regenmaschine==2022.06.0"],
|
||||||
"codeowners": ["@bachya"],
|
"codeowners": ["@bachya"],
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"homekit": {
|
"homekit": {
|
||||||
|
@ -133,8 +133,11 @@ async def async_setup_entry(
|
|||||||
entry, coordinator, controller, description
|
entry, coordinator, controller, description
|
||||||
)
|
)
|
||||||
for description in SENSOR_DESCRIPTIONS
|
for description in SENSOR_DESCRIPTIONS
|
||||||
if (coordinator := coordinators[description.api_category]) is not None
|
if (
|
||||||
and key_exists(coordinator.data, description.data_key)
|
(coordinator := coordinators[description.api_category]) is not None
|
||||||
|
and coordinator.data
|
||||||
|
and key_exists(coordinator.data, description.data_key)
|
||||||
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
zone_coordinator = coordinators[DATA_ZONES]
|
zone_coordinator = coordinators[DATA_ZONES]
|
||||||
|
@ -248,8 +248,13 @@ def _domain_matcher(
|
|||||||
domains: Iterable[str], columns: Iterable[Column], encoder: Callable[[Any], Any]
|
domains: Iterable[str], columns: Iterable[Column], encoder: Callable[[Any], Any]
|
||||||
) -> ClauseList:
|
) -> ClauseList:
|
||||||
matchers = [
|
matchers = [
|
||||||
(column.is_not(None) & cast(column, Text()).like(encoder(f"{domain}.%")))
|
(column.is_not(None) & cast(column, Text()).like(encoder(domain_matcher)))
|
||||||
for domain in domains
|
for domain_matcher in like_domain_matchers(domains)
|
||||||
for column in columns
|
for column in columns
|
||||||
]
|
]
|
||||||
return or_(*matchers) if matchers else or_(False)
|
return or_(*matchers) if matchers else or_(False)
|
||||||
|
|
||||||
|
|
||||||
|
def like_domain_matchers(domains: Iterable[str]) -> list[str]:
|
||||||
|
"""Convert a list of domains to sql LIKE matchers."""
|
||||||
|
return [f"{domain}.%" for domain in domains]
|
||||||
|
@ -15,6 +15,7 @@ from sqlalchemy.orm.query import Query
|
|||||||
from sqlalchemy.orm.session import Session
|
from sqlalchemy.orm.session import Session
|
||||||
from sqlalchemy.sql.expression import literal
|
from sqlalchemy.sql.expression import literal
|
||||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||||
|
from sqlalchemy.sql.selectable import Subquery
|
||||||
|
|
||||||
from homeassistant.components import recorder
|
from homeassistant.components import recorder
|
||||||
from homeassistant.components.websocket_api.const import (
|
from homeassistant.components.websocket_api.const import (
|
||||||
@ -351,7 +352,8 @@ def _state_changed_during_period_stmt(
|
|||||||
)
|
)
|
||||||
if end_time:
|
if end_time:
|
||||||
stmt += lambda q: q.filter(States.last_updated < end_time)
|
stmt += lambda q: q.filter(States.last_updated < end_time)
|
||||||
stmt += lambda q: q.filter(States.entity_id == entity_id)
|
if entity_id:
|
||||||
|
stmt += lambda q: q.filter(States.entity_id == entity_id)
|
||||||
if join_attributes:
|
if join_attributes:
|
||||||
stmt += lambda q: q.outerjoin(
|
stmt += lambda q: q.outerjoin(
|
||||||
StateAttributes, States.attributes_id == StateAttributes.attributes_id
|
StateAttributes, States.attributes_id == StateAttributes.attributes_id
|
||||||
@ -377,6 +379,7 @@ def state_changes_during_period(
|
|||||||
) -> MutableMapping[str, list[State]]:
|
) -> MutableMapping[str, list[State]]:
|
||||||
"""Return states changes during UTC period start_time - end_time."""
|
"""Return states changes during UTC period start_time - end_time."""
|
||||||
entity_id = entity_id.lower() if entity_id is not None else None
|
entity_id = entity_id.lower() if entity_id is not None else None
|
||||||
|
entity_ids = [entity_id] if entity_id is not None else None
|
||||||
|
|
||||||
with session_scope(hass=hass) as session:
|
with session_scope(hass=hass) as session:
|
||||||
stmt = _state_changed_during_period_stmt(
|
stmt = _state_changed_during_period_stmt(
|
||||||
@ -391,8 +394,6 @@ def state_changes_during_period(
|
|||||||
states = execute_stmt_lambda_element(
|
states = execute_stmt_lambda_element(
|
||||||
session, stmt, None if entity_id else start_time, end_time
|
session, stmt, None if entity_id else start_time, end_time
|
||||||
)
|
)
|
||||||
entity_ids = [entity_id] if entity_id is not None else None
|
|
||||||
|
|
||||||
return cast(
|
return cast(
|
||||||
MutableMapping[str, list[State]],
|
MutableMapping[str, list[State]],
|
||||||
_sorted_states_to_dict(
|
_sorted_states_to_dict(
|
||||||
@ -407,14 +408,16 @@ def state_changes_during_period(
|
|||||||
|
|
||||||
|
|
||||||
def _get_last_state_changes_stmt(
|
def _get_last_state_changes_stmt(
|
||||||
schema_version: int, number_of_states: int, entity_id: str
|
schema_version: int, number_of_states: int, entity_id: str | None
|
||||||
) -> StatementLambdaElement:
|
) -> StatementLambdaElement:
|
||||||
stmt, join_attributes = lambda_stmt_and_join_attributes(
|
stmt, join_attributes = lambda_stmt_and_join_attributes(
|
||||||
schema_version, False, include_last_changed=False
|
schema_version, False, include_last_changed=False
|
||||||
)
|
)
|
||||||
stmt += lambda q: q.filter(
|
stmt += lambda q: q.filter(
|
||||||
(States.last_changed == States.last_updated) | States.last_changed.is_(None)
|
(States.last_changed == States.last_updated) | States.last_changed.is_(None)
|
||||||
).filter(States.entity_id == entity_id)
|
)
|
||||||
|
if entity_id:
|
||||||
|
stmt += lambda q: q.filter(States.entity_id == entity_id)
|
||||||
if join_attributes:
|
if join_attributes:
|
||||||
stmt += lambda q: q.outerjoin(
|
stmt += lambda q: q.outerjoin(
|
||||||
StateAttributes, States.attributes_id == StateAttributes.attributes_id
|
StateAttributes, States.attributes_id == StateAttributes.attributes_id
|
||||||
@ -426,19 +429,18 @@ def _get_last_state_changes_stmt(
|
|||||||
|
|
||||||
|
|
||||||
def get_last_state_changes(
|
def get_last_state_changes(
|
||||||
hass: HomeAssistant, number_of_states: int, entity_id: str
|
hass: HomeAssistant, number_of_states: int, entity_id: str | None
|
||||||
) -> MutableMapping[str, list[State]]:
|
) -> MutableMapping[str, list[State]]:
|
||||||
"""Return the last number_of_states."""
|
"""Return the last number_of_states."""
|
||||||
start_time = dt_util.utcnow()
|
start_time = dt_util.utcnow()
|
||||||
entity_id = entity_id.lower() if entity_id is not None else None
|
entity_id = entity_id.lower() if entity_id is not None else None
|
||||||
|
entity_ids = [entity_id] if entity_id is not None else None
|
||||||
|
|
||||||
with session_scope(hass=hass) as session:
|
with session_scope(hass=hass) as session:
|
||||||
stmt = _get_last_state_changes_stmt(
|
stmt = _get_last_state_changes_stmt(
|
||||||
_schema_version(hass), number_of_states, entity_id
|
_schema_version(hass), number_of_states, entity_id
|
||||||
)
|
)
|
||||||
states = list(execute_stmt_lambda_element(session, stmt))
|
states = list(execute_stmt_lambda_element(session, stmt))
|
||||||
entity_ids = [entity_id] if entity_id is not None else None
|
|
||||||
|
|
||||||
return cast(
|
return cast(
|
||||||
MutableMapping[str, list[State]],
|
MutableMapping[str, list[State]],
|
||||||
_sorted_states_to_dict(
|
_sorted_states_to_dict(
|
||||||
@ -485,6 +487,25 @@ def _get_states_for_entites_stmt(
|
|||||||
return stmt
|
return stmt
|
||||||
|
|
||||||
|
|
||||||
|
def _generate_most_recent_states_by_date(
|
||||||
|
run_start: datetime,
|
||||||
|
utc_point_in_time: datetime,
|
||||||
|
) -> Subquery:
|
||||||
|
"""Generate the sub query for the most recent states by data."""
|
||||||
|
return (
|
||||||
|
select(
|
||||||
|
States.entity_id.label("max_entity_id"),
|
||||||
|
func.max(States.last_updated).label("max_last_updated"),
|
||||||
|
)
|
||||||
|
.filter(
|
||||||
|
(States.last_updated >= run_start)
|
||||||
|
& (States.last_updated < utc_point_in_time)
|
||||||
|
)
|
||||||
|
.group_by(States.entity_id)
|
||||||
|
.subquery()
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _get_states_for_all_stmt(
|
def _get_states_for_all_stmt(
|
||||||
schema_version: int,
|
schema_version: int,
|
||||||
run_start: datetime,
|
run_start: datetime,
|
||||||
@ -500,17 +521,8 @@ def _get_states_for_all_stmt(
|
|||||||
# query, then filter out unwanted domains as well as applying the custom filter.
|
# query, then filter out unwanted domains as well as applying the custom filter.
|
||||||
# This filtering can't be done in the inner query because the domain column is
|
# This filtering can't be done in the inner query because the domain column is
|
||||||
# not indexed and we can't control what's in the custom filter.
|
# not indexed and we can't control what's in the custom filter.
|
||||||
most_recent_states_by_date = (
|
most_recent_states_by_date = _generate_most_recent_states_by_date(
|
||||||
select(
|
run_start, utc_point_in_time
|
||||||
States.entity_id.label("max_entity_id"),
|
|
||||||
func.max(States.last_updated).label("max_last_updated"),
|
|
||||||
)
|
|
||||||
.filter(
|
|
||||||
(States.last_updated >= run_start)
|
|
||||||
& (States.last_updated < utc_point_in_time)
|
|
||||||
)
|
|
||||||
.group_by(States.entity_id)
|
|
||||||
.subquery()
|
|
||||||
)
|
)
|
||||||
stmt += lambda q: q.where(
|
stmt += lambda q: q.where(
|
||||||
States.state_id
|
States.state_id
|
||||||
|
@ -20,6 +20,7 @@ from sqlalchemy.exc import SQLAlchemyError, StatementError
|
|||||||
from sqlalchemy.orm.session import Session
|
from sqlalchemy.orm.session import Session
|
||||||
from sqlalchemy.sql.expression import literal_column, true
|
from sqlalchemy.sql.expression import literal_column, true
|
||||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||||
|
from sqlalchemy.sql.selectable import Subquery
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
@ -484,14 +485,13 @@ def _compile_hourly_statistics_summary_mean_stmt(
|
|||||||
start_time: datetime, end_time: datetime
|
start_time: datetime, end_time: datetime
|
||||||
) -> StatementLambdaElement:
|
) -> StatementLambdaElement:
|
||||||
"""Generate the summary mean statement for hourly statistics."""
|
"""Generate the summary mean statement for hourly statistics."""
|
||||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS_SUMMARY_MEAN))
|
return lambda_stmt(
|
||||||
stmt += (
|
lambda: select(*QUERY_STATISTICS_SUMMARY_MEAN)
|
||||||
lambda q: q.filter(StatisticsShortTerm.start >= start_time)
|
.filter(StatisticsShortTerm.start >= start_time)
|
||||||
.filter(StatisticsShortTerm.start < end_time)
|
.filter(StatisticsShortTerm.start < end_time)
|
||||||
.group_by(StatisticsShortTerm.metadata_id)
|
.group_by(StatisticsShortTerm.metadata_id)
|
||||||
.order_by(StatisticsShortTerm.metadata_id)
|
.order_by(StatisticsShortTerm.metadata_id)
|
||||||
)
|
)
|
||||||
return stmt
|
|
||||||
|
|
||||||
|
|
||||||
def compile_hourly_statistics(
|
def compile_hourly_statistics(
|
||||||
@ -985,26 +985,43 @@ def _statistics_during_period_stmt(
|
|||||||
start_time: datetime,
|
start_time: datetime,
|
||||||
end_time: datetime | None,
|
end_time: datetime | None,
|
||||||
metadata_ids: list[int] | None,
|
metadata_ids: list[int] | None,
|
||||||
table: type[Statistics | StatisticsShortTerm],
|
|
||||||
) -> StatementLambdaElement:
|
) -> StatementLambdaElement:
|
||||||
"""Prepare a database query for statistics during a given period.
|
"""Prepare a database query for statistics during a given period.
|
||||||
|
|
||||||
This prepares a lambda_stmt query, so we don't insert the parameters yet.
|
This prepares a lambda_stmt query, so we don't insert the parameters yet.
|
||||||
"""
|
"""
|
||||||
if table == StatisticsShortTerm:
|
stmt = lambda_stmt(
|
||||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS_SHORT_TERM))
|
lambda: select(*QUERY_STATISTICS).filter(Statistics.start >= start_time)
|
||||||
else:
|
)
|
||||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS))
|
|
||||||
|
|
||||||
stmt += lambda q: q.filter(table.start >= start_time)
|
|
||||||
|
|
||||||
if end_time is not None:
|
if end_time is not None:
|
||||||
stmt += lambda q: q.filter(table.start < end_time)
|
stmt += lambda q: q.filter(Statistics.start < end_time)
|
||||||
|
|
||||||
if metadata_ids:
|
if metadata_ids:
|
||||||
stmt += lambda q: q.filter(table.metadata_id.in_(metadata_ids))
|
stmt += lambda q: q.filter(Statistics.metadata_id.in_(metadata_ids))
|
||||||
|
stmt += lambda q: q.order_by(Statistics.metadata_id, Statistics.start)
|
||||||
|
return stmt
|
||||||
|
|
||||||
stmt += lambda q: q.order_by(table.metadata_id, table.start)
|
|
||||||
|
def _statistics_during_period_stmt_short_term(
|
||||||
|
start_time: datetime,
|
||||||
|
end_time: datetime | None,
|
||||||
|
metadata_ids: list[int] | None,
|
||||||
|
) -> StatementLambdaElement:
|
||||||
|
"""Prepare a database query for short term statistics during a given period.
|
||||||
|
|
||||||
|
This prepares a lambda_stmt query, so we don't insert the parameters yet.
|
||||||
|
"""
|
||||||
|
stmt = lambda_stmt(
|
||||||
|
lambda: select(*QUERY_STATISTICS_SHORT_TERM).filter(
|
||||||
|
StatisticsShortTerm.start >= start_time
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if end_time is not None:
|
||||||
|
stmt += lambda q: q.filter(StatisticsShortTerm.start < end_time)
|
||||||
|
if metadata_ids:
|
||||||
|
stmt += lambda q: q.filter(StatisticsShortTerm.metadata_id.in_(metadata_ids))
|
||||||
|
stmt += lambda q: q.order_by(
|
||||||
|
StatisticsShortTerm.metadata_id, StatisticsShortTerm.start
|
||||||
|
)
|
||||||
return stmt
|
return stmt
|
||||||
|
|
||||||
|
|
||||||
@ -1034,10 +1051,12 @@ def statistics_during_period(
|
|||||||
|
|
||||||
if period == "5minute":
|
if period == "5minute":
|
||||||
table = StatisticsShortTerm
|
table = StatisticsShortTerm
|
||||||
|
stmt = _statistics_during_period_stmt_short_term(
|
||||||
|
start_time, end_time, metadata_ids
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
table = Statistics
|
table = Statistics
|
||||||
|
stmt = _statistics_during_period_stmt(start_time, end_time, metadata_ids)
|
||||||
stmt = _statistics_during_period_stmt(start_time, end_time, metadata_ids, table)
|
|
||||||
stats = execute_stmt_lambda_element(session, stmt)
|
stats = execute_stmt_lambda_element(session, stmt)
|
||||||
|
|
||||||
if not stats:
|
if not stats:
|
||||||
@ -1069,19 +1088,27 @@ def statistics_during_period(
|
|||||||
def _get_last_statistics_stmt(
|
def _get_last_statistics_stmt(
|
||||||
metadata_id: int,
|
metadata_id: int,
|
||||||
number_of_stats: int,
|
number_of_stats: int,
|
||||||
table: type[Statistics | StatisticsShortTerm],
|
|
||||||
) -> StatementLambdaElement:
|
) -> StatementLambdaElement:
|
||||||
"""Generate a statement for number_of_stats statistics for a given statistic_id."""
|
"""Generate a statement for number_of_stats statistics for a given statistic_id."""
|
||||||
if table == StatisticsShortTerm:
|
return lambda_stmt(
|
||||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS_SHORT_TERM))
|
lambda: select(*QUERY_STATISTICS)
|
||||||
else:
|
.filter_by(metadata_id=metadata_id)
|
||||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS))
|
.order_by(Statistics.metadata_id, Statistics.start.desc())
|
||||||
stmt += (
|
.limit(number_of_stats)
|
||||||
lambda q: q.filter_by(metadata_id=metadata_id)
|
)
|
||||||
.order_by(table.metadata_id, table.start.desc())
|
|
||||||
|
|
||||||
|
def _get_last_statistics_short_term_stmt(
|
||||||
|
metadata_id: int,
|
||||||
|
number_of_stats: int,
|
||||||
|
) -> StatementLambdaElement:
|
||||||
|
"""Generate a statement for number_of_stats short term statistics for a given statistic_id."""
|
||||||
|
return lambda_stmt(
|
||||||
|
lambda: select(*QUERY_STATISTICS_SHORT_TERM)
|
||||||
|
.filter_by(metadata_id=metadata_id)
|
||||||
|
.order_by(StatisticsShortTerm.metadata_id, StatisticsShortTerm.start.desc())
|
||||||
.limit(number_of_stats)
|
.limit(number_of_stats)
|
||||||
)
|
)
|
||||||
return stmt
|
|
||||||
|
|
||||||
|
|
||||||
def _get_last_statistics(
|
def _get_last_statistics(
|
||||||
@ -1099,7 +1126,10 @@ def _get_last_statistics(
|
|||||||
if not metadata:
|
if not metadata:
|
||||||
return {}
|
return {}
|
||||||
metadata_id = metadata[statistic_id][0]
|
metadata_id = metadata[statistic_id][0]
|
||||||
stmt = _get_last_statistics_stmt(metadata_id, number_of_stats, table)
|
if table == Statistics:
|
||||||
|
stmt = _get_last_statistics_stmt(metadata_id, number_of_stats)
|
||||||
|
else:
|
||||||
|
stmt = _get_last_statistics_short_term_stmt(metadata_id, number_of_stats)
|
||||||
stats = execute_stmt_lambda_element(session, stmt)
|
stats = execute_stmt_lambda_element(session, stmt)
|
||||||
|
|
||||||
if not stats:
|
if not stats:
|
||||||
@ -1136,12 +1166,9 @@ def get_last_short_term_statistics(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _latest_short_term_statistics_stmt(
|
def _generate_most_recent_statistic_row(metadata_ids: list[int]) -> Subquery:
|
||||||
metadata_ids: list[int],
|
"""Generate the subquery to find the most recent statistic row."""
|
||||||
) -> StatementLambdaElement:
|
return (
|
||||||
"""Create the statement for finding the latest short term stat rows."""
|
|
||||||
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS_SHORT_TERM))
|
|
||||||
most_recent_statistic_row = (
|
|
||||||
select(
|
select(
|
||||||
StatisticsShortTerm.metadata_id,
|
StatisticsShortTerm.metadata_id,
|
||||||
func.max(StatisticsShortTerm.start).label("start_max"),
|
func.max(StatisticsShortTerm.start).label("start_max"),
|
||||||
@ -1149,6 +1176,14 @@ def _latest_short_term_statistics_stmt(
|
|||||||
.where(StatisticsShortTerm.metadata_id.in_(metadata_ids))
|
.where(StatisticsShortTerm.metadata_id.in_(metadata_ids))
|
||||||
.group_by(StatisticsShortTerm.metadata_id)
|
.group_by(StatisticsShortTerm.metadata_id)
|
||||||
).subquery()
|
).subquery()
|
||||||
|
|
||||||
|
|
||||||
|
def _latest_short_term_statistics_stmt(
|
||||||
|
metadata_ids: list[int],
|
||||||
|
) -> StatementLambdaElement:
|
||||||
|
"""Create the statement for finding the latest short term stat rows."""
|
||||||
|
stmt = lambda_stmt(lambda: select(*QUERY_STATISTICS_SHORT_TERM))
|
||||||
|
most_recent_statistic_row = _generate_most_recent_statistic_row(metadata_ids)
|
||||||
stmt += lambda s: s.join(
|
stmt += lambda s: s.join(
|
||||||
most_recent_statistic_row,
|
most_recent_statistic_row,
|
||||||
(
|
(
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
"name": "SimpliSafe",
|
"name": "SimpliSafe",
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/simplisafe",
|
"documentation": "https://www.home-assistant.io/integrations/simplisafe",
|
||||||
"requirements": ["simplisafe-python==2022.05.2"],
|
"requirements": ["simplisafe-python==2022.06.0"],
|
||||||
"codeowners": ["@bachya"],
|
"codeowners": ["@bachya"],
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"dhcp": [
|
"dhcp": [
|
||||||
|
@ -198,13 +198,16 @@ class TomorrowioWeatherEntity(TomorrowioEntity, WeatherEntity):
|
|||||||
max_forecasts = MAX_FORECASTS[self.forecast_type]
|
max_forecasts = MAX_FORECASTS[self.forecast_type]
|
||||||
forecast_count = 0
|
forecast_count = 0
|
||||||
|
|
||||||
|
# Convert utcnow to local to be compatible with tests
|
||||||
|
today = dt_util.as_local(dt_util.utcnow()).date()
|
||||||
|
|
||||||
# Set default values (in cases where keys don't exist), None will be
|
# Set default values (in cases where keys don't exist), None will be
|
||||||
# returned. Override properties per forecast type as needed
|
# returned. Override properties per forecast type as needed
|
||||||
for forecast in raw_forecasts:
|
for forecast in raw_forecasts:
|
||||||
forecast_dt = dt_util.parse_datetime(forecast[TMRW_ATTR_TIMESTAMP])
|
forecast_dt = dt_util.parse_datetime(forecast[TMRW_ATTR_TIMESTAMP])
|
||||||
|
|
||||||
# Throw out past data
|
# Throw out past data
|
||||||
if forecast_dt.date() < dt_util.utcnow().date():
|
if dt_util.as_local(forecast_dt).date() < today:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
values = forecast["values"]
|
values = forecast["values"]
|
||||||
|
@ -17,6 +17,8 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
|||||||
|
|
||||||
from . import DATA_VELUX, VeluxEntity
|
from . import DATA_VELUX, VeluxEntity
|
||||||
|
|
||||||
|
PARALLEL_UPDATES = 1
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_platform(
|
async def async_setup_platform(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
@ -97,12 +99,11 @@ class VeluxCover(VeluxEntity, CoverEntity):
|
|||||||
|
|
||||||
async def async_set_cover_position(self, **kwargs):
|
async def async_set_cover_position(self, **kwargs):
|
||||||
"""Move the cover to a specific position."""
|
"""Move the cover to a specific position."""
|
||||||
if ATTR_POSITION in kwargs:
|
position_percent = 100 - kwargs[ATTR_POSITION]
|
||||||
position_percent = 100 - kwargs[ATTR_POSITION]
|
|
||||||
|
|
||||||
await self.node.set_position(
|
await self.node.set_position(
|
||||||
Position(position_percent=position_percent), wait_for_completion=False
|
Position(position_percent=position_percent), wait_for_completion=False
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_stop_cover(self, **kwargs):
|
async def async_stop_cover(self, **kwargs):
|
||||||
"""Stop the cover."""
|
"""Stop the cover."""
|
||||||
|
@ -10,6 +10,8 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
|||||||
|
|
||||||
from . import DATA_VELUX, VeluxEntity
|
from . import DATA_VELUX, VeluxEntity
|
||||||
|
|
||||||
|
PARALLEL_UPDATES = 1
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_platform(
|
async def async_setup_platform(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
|
@ -10,6 +10,8 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
|||||||
|
|
||||||
from . import _LOGGER, DATA_VELUX
|
from . import _LOGGER, DATA_VELUX
|
||||||
|
|
||||||
|
PARALLEL_UPDATES = 1
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_platform(
|
async def async_setup_platform(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
"name": "Wallbox",
|
"name": "Wallbox",
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/wallbox",
|
"documentation": "https://www.home-assistant.io/integrations/wallbox",
|
||||||
"requirements": ["wallbox==0.4.4"],
|
"requirements": ["wallbox==0.4.9"],
|
||||||
"ssdp": [],
|
"ssdp": [],
|
||||||
"zeroconf": [],
|
"zeroconf": [],
|
||||||
"homekit": {},
|
"homekit": {},
|
||||||
|
@ -7,7 +7,7 @@ from .backports.enum import StrEnum
|
|||||||
|
|
||||||
MAJOR_VERSION: Final = 2022
|
MAJOR_VERSION: Final = 2022
|
||||||
MINOR_VERSION: Final = 6
|
MINOR_VERSION: Final = 6
|
||||||
PATCH_VERSION: Final = "2"
|
PATCH_VERSION: Final = "3"
|
||||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)
|
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)
|
||||||
|
@ -187,7 +187,7 @@ aiolifx==0.7.1
|
|||||||
aiolifx_effects==0.2.2
|
aiolifx_effects==0.2.2
|
||||||
|
|
||||||
# homeassistant.components.lookin
|
# homeassistant.components.lookin
|
||||||
aiolookin==0.1.0
|
aiolookin==0.1.1
|
||||||
|
|
||||||
# homeassistant.components.lyric
|
# homeassistant.components.lyric
|
||||||
aiolyric==1.0.8
|
aiolyric==1.0.8
|
||||||
@ -1550,7 +1550,7 @@ pyhomeworks==0.0.6
|
|||||||
pyialarm==1.9.0
|
pyialarm==1.9.0
|
||||||
|
|
||||||
# homeassistant.components.ialarm_xr
|
# homeassistant.components.ialarm_xr
|
||||||
pyialarmxr==1.0.18
|
pyialarmxr-homeassistant==1.0.18
|
||||||
|
|
||||||
# homeassistant.components.icloud
|
# homeassistant.components.icloud
|
||||||
pyicloud==1.0.0
|
pyicloud==1.0.0
|
||||||
@ -2065,7 +2065,7 @@ raincloudy==0.0.7
|
|||||||
raspyrfm-client==1.2.8
|
raspyrfm-client==1.2.8
|
||||||
|
|
||||||
# homeassistant.components.rainmachine
|
# homeassistant.components.rainmachine
|
||||||
regenmaschine==2022.05.1
|
regenmaschine==2022.06.0
|
||||||
|
|
||||||
# homeassistant.components.renault
|
# homeassistant.components.renault
|
||||||
renault-api==0.1.11
|
renault-api==0.1.11
|
||||||
@ -2168,7 +2168,7 @@ simplehound==0.3
|
|||||||
simplepush==1.1.4
|
simplepush==1.1.4
|
||||||
|
|
||||||
# homeassistant.components.simplisafe
|
# homeassistant.components.simplisafe
|
||||||
simplisafe-python==2022.05.2
|
simplisafe-python==2022.06.0
|
||||||
|
|
||||||
# homeassistant.components.sisyphus
|
# homeassistant.components.sisyphus
|
||||||
sisyphus-control==3.1.2
|
sisyphus-control==3.1.2
|
||||||
@ -2418,7 +2418,7 @@ vultr==0.1.2
|
|||||||
wakeonlan==2.0.1
|
wakeonlan==2.0.1
|
||||||
|
|
||||||
# homeassistant.components.wallbox
|
# homeassistant.components.wallbox
|
||||||
wallbox==0.4.4
|
wallbox==0.4.9
|
||||||
|
|
||||||
# homeassistant.components.waqi
|
# homeassistant.components.waqi
|
||||||
waqiasync==1.0.0
|
waqiasync==1.0.0
|
||||||
|
@ -159,7 +159,7 @@ aiohue==4.4.1
|
|||||||
aiokafka==0.6.0
|
aiokafka==0.6.0
|
||||||
|
|
||||||
# homeassistant.components.lookin
|
# homeassistant.components.lookin
|
||||||
aiolookin==0.1.0
|
aiolookin==0.1.1
|
||||||
|
|
||||||
# homeassistant.components.lyric
|
# homeassistant.components.lyric
|
||||||
aiolyric==1.0.8
|
aiolyric==1.0.8
|
||||||
@ -1038,7 +1038,7 @@ pyhomematic==0.1.77
|
|||||||
pyialarm==1.9.0
|
pyialarm==1.9.0
|
||||||
|
|
||||||
# homeassistant.components.ialarm_xr
|
# homeassistant.components.ialarm_xr
|
||||||
pyialarmxr==1.0.18
|
pyialarmxr-homeassistant==1.0.18
|
||||||
|
|
||||||
# homeassistant.components.icloud
|
# homeassistant.components.icloud
|
||||||
pyicloud==1.0.0
|
pyicloud==1.0.0
|
||||||
@ -1364,7 +1364,7 @@ rachiopy==1.0.3
|
|||||||
radios==0.1.1
|
radios==0.1.1
|
||||||
|
|
||||||
# homeassistant.components.rainmachine
|
# homeassistant.components.rainmachine
|
||||||
regenmaschine==2022.05.1
|
regenmaschine==2022.06.0
|
||||||
|
|
||||||
# homeassistant.components.renault
|
# homeassistant.components.renault
|
||||||
renault-api==0.1.11
|
renault-api==0.1.11
|
||||||
@ -1425,7 +1425,7 @@ sharkiq==0.0.1
|
|||||||
simplehound==0.3
|
simplehound==0.3
|
||||||
|
|
||||||
# homeassistant.components.simplisafe
|
# homeassistant.components.simplisafe
|
||||||
simplisafe-python==2022.05.2
|
simplisafe-python==2022.06.0
|
||||||
|
|
||||||
# homeassistant.components.slack
|
# homeassistant.components.slack
|
||||||
slackclient==2.5.0
|
slackclient==2.5.0
|
||||||
@ -1591,7 +1591,7 @@ vultr==0.1.2
|
|||||||
wakeonlan==2.0.1
|
wakeonlan==2.0.1
|
||||||
|
|
||||||
# homeassistant.components.wallbox
|
# homeassistant.components.wallbox
|
||||||
wallbox==0.4.4
|
wallbox==0.4.9
|
||||||
|
|
||||||
# homeassistant.components.folder_watcher
|
# homeassistant.components.folder_watcher
|
||||||
watchdog==2.1.8
|
watchdog==2.1.8
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
[metadata]
|
[metadata]
|
||||||
version = 2022.6.2
|
version = 2022.6.3
|
||||||
url = https://www.home-assistant.io/
|
url = https://www.home-assistant.io/
|
||||||
|
|
||||||
[options]
|
[options]
|
||||||
|
@ -68,7 +68,6 @@ def mock_humanify(hass_, rows):
|
|||||||
return list(
|
return list(
|
||||||
processor._humanify(
|
processor._humanify(
|
||||||
rows,
|
rows,
|
||||||
None,
|
|
||||||
ent_reg,
|
ent_reg,
|
||||||
logbook_run,
|
logbook_run,
|
||||||
context_augmenter,
|
context_augmenter,
|
||||||
|
@ -745,6 +745,12 @@ async def test_filter_continuous_sensor_values(
|
|||||||
entity_id_third = "light.bla"
|
entity_id_third = "light.bla"
|
||||||
hass.states.async_set(entity_id_third, STATE_OFF, {"unit_of_measurement": "foo"})
|
hass.states.async_set(entity_id_third, STATE_OFF, {"unit_of_measurement": "foo"})
|
||||||
hass.states.async_set(entity_id_third, STATE_ON, {"unit_of_measurement": "foo"})
|
hass.states.async_set(entity_id_third, STATE_ON, {"unit_of_measurement": "foo"})
|
||||||
|
entity_id_proximity = "proximity.bla"
|
||||||
|
hass.states.async_set(entity_id_proximity, STATE_OFF)
|
||||||
|
hass.states.async_set(entity_id_proximity, STATE_ON)
|
||||||
|
entity_id_counter = "counter.bla"
|
||||||
|
hass.states.async_set(entity_id_counter, STATE_OFF)
|
||||||
|
hass.states.async_set(entity_id_counter, STATE_ON)
|
||||||
|
|
||||||
await async_wait_recording_done(hass)
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
|
@ -27,8 +27,8 @@ from homeassistant.const import (
|
|||||||
STATE_OFF,
|
STATE_OFF,
|
||||||
STATE_ON,
|
STATE_ON,
|
||||||
)
|
)
|
||||||
from homeassistant.core import Event, HomeAssistant, State
|
from homeassistant.core import Event, HomeAssistant, State, callback
|
||||||
from homeassistant.helpers import device_registry
|
from homeassistant.helpers import device_registry, entity_registry
|
||||||
from homeassistant.helpers.entityfilter import CONF_ENTITY_GLOBS
|
from homeassistant.helpers.entityfilter import CONF_ENTITY_GLOBS
|
||||||
from homeassistant.setup import async_setup_component
|
from homeassistant.setup import async_setup_component
|
||||||
import homeassistant.util.dt as dt_util
|
import homeassistant.util.dt as dt_util
|
||||||
@ -51,22 +51,8 @@ def set_utc(hass):
|
|||||||
hass.config.set_time_zone("UTC")
|
hass.config.set_time_zone("UTC")
|
||||||
|
|
||||||
|
|
||||||
async def _async_mock_device_with_logbook_platform(hass):
|
@callback
|
||||||
"""Mock an integration that provides a device that are described by the logbook."""
|
async def _async_mock_logbook_platform(hass: HomeAssistant) -> None:
|
||||||
entry = MockConfigEntry(domain="test", data={"first": True}, options=None)
|
|
||||||
entry.add_to_hass(hass)
|
|
||||||
dev_reg = device_registry.async_get(hass)
|
|
||||||
device = dev_reg.async_get_or_create(
|
|
||||||
config_entry_id=entry.entry_id,
|
|
||||||
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
|
|
||||||
identifiers={("bridgeid", "0123")},
|
|
||||||
sw_version="sw-version",
|
|
||||||
name="device name",
|
|
||||||
manufacturer="manufacturer",
|
|
||||||
model="model",
|
|
||||||
suggested_area="Game Room",
|
|
||||||
)
|
|
||||||
|
|
||||||
class MockLogbookPlatform:
|
class MockLogbookPlatform:
|
||||||
"""Mock a logbook platform."""
|
"""Mock a logbook platform."""
|
||||||
|
|
||||||
@ -90,6 +76,40 @@ async def _async_mock_device_with_logbook_platform(hass):
|
|||||||
async_describe_event("test", "mock_event", async_describe_test_event)
|
async_describe_event("test", "mock_event", async_describe_test_event)
|
||||||
|
|
||||||
await logbook._process_logbook_platform(hass, "test", MockLogbookPlatform)
|
await logbook._process_logbook_platform(hass, "test", MockLogbookPlatform)
|
||||||
|
|
||||||
|
|
||||||
|
async def _async_mock_entity_with_logbook_platform(hass):
|
||||||
|
"""Mock an integration that provides an entity that are described by the logbook."""
|
||||||
|
entry = MockConfigEntry(domain="test", data={"first": True}, options=None)
|
||||||
|
entry.add_to_hass(hass)
|
||||||
|
ent_reg = entity_registry.async_get(hass)
|
||||||
|
entry = ent_reg.async_get_or_create(
|
||||||
|
platform="test",
|
||||||
|
domain="sensor",
|
||||||
|
config_entry=entry,
|
||||||
|
unique_id="1234",
|
||||||
|
suggested_object_id="test",
|
||||||
|
)
|
||||||
|
await _async_mock_logbook_platform(hass)
|
||||||
|
return entry
|
||||||
|
|
||||||
|
|
||||||
|
async def _async_mock_device_with_logbook_platform(hass):
|
||||||
|
"""Mock an integration that provides a device that are described by the logbook."""
|
||||||
|
entry = MockConfigEntry(domain="test", data={"first": True}, options=None)
|
||||||
|
entry.add_to_hass(hass)
|
||||||
|
dev_reg = device_registry.async_get(hass)
|
||||||
|
device = dev_reg.async_get_or_create(
|
||||||
|
config_entry_id=entry.entry_id,
|
||||||
|
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
|
||||||
|
identifiers={("bridgeid", "0123")},
|
||||||
|
sw_version="sw-version",
|
||||||
|
name="device name",
|
||||||
|
manufacturer="manufacturer",
|
||||||
|
model="model",
|
||||||
|
suggested_area="Game Room",
|
||||||
|
)
|
||||||
|
await _async_mock_logbook_platform(hass)
|
||||||
return device
|
return device
|
||||||
|
|
||||||
|
|
||||||
@ -1786,6 +1806,103 @@ async def test_event_stream_bad_start_time(hass, hass_ws_client, recorder_mock):
|
|||||||
assert response["error"]["code"] == "invalid_start_time"
|
assert response["error"]["code"] == "invalid_start_time"
|
||||||
|
|
||||||
|
|
||||||
|
@patch("homeassistant.components.logbook.websocket_api.EVENT_COALESCE_TIME", 0)
|
||||||
|
async def test_logbook_stream_match_multiple_entities(
|
||||||
|
hass, recorder_mock, hass_ws_client
|
||||||
|
):
|
||||||
|
"""Test logbook stream with a described integration that uses multiple entities."""
|
||||||
|
now = dt_util.utcnow()
|
||||||
|
await asyncio.gather(
|
||||||
|
*[
|
||||||
|
async_setup_component(hass, comp, {})
|
||||||
|
for comp in ("homeassistant", "logbook", "automation", "script")
|
||||||
|
]
|
||||||
|
)
|
||||||
|
entry = await _async_mock_entity_with_logbook_platform(hass)
|
||||||
|
entity_id = entry.entity_id
|
||||||
|
hass.states.async_set(entity_id, STATE_ON)
|
||||||
|
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
init_count = sum(hass.bus.async_listeners().values())
|
||||||
|
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
websocket_client = await hass_ws_client()
|
||||||
|
await websocket_client.send_json(
|
||||||
|
{
|
||||||
|
"id": 7,
|
||||||
|
"type": "logbook/event_stream",
|
||||||
|
"start_time": now.isoformat(),
|
||||||
|
"entity_ids": [entity_id],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
|
||||||
|
assert msg["id"] == 7
|
||||||
|
assert msg["type"] == TYPE_RESULT
|
||||||
|
assert msg["success"]
|
||||||
|
|
||||||
|
# There are no answers to our initial query
|
||||||
|
# so we get an empty reply. This is to ensure
|
||||||
|
# consumers of the api know there are no results
|
||||||
|
# and its not a failure case. This is useful
|
||||||
|
# in the frontend so we can tell the user there
|
||||||
|
# are no results vs waiting for them to appear
|
||||||
|
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
|
||||||
|
assert msg["id"] == 7
|
||||||
|
assert msg["type"] == "event"
|
||||||
|
assert msg["event"]["events"] == []
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
|
hass.states.async_set("binary_sensor.should_not_appear", STATE_ON)
|
||||||
|
hass.states.async_set("binary_sensor.should_not_appear", STATE_OFF)
|
||||||
|
context = core.Context(
|
||||||
|
id="ac5bd62de45711eaaeb351041eec8dd9",
|
||||||
|
user_id="b400facee45711eaa9308bfd3d19e474",
|
||||||
|
)
|
||||||
|
hass.bus.async_fire(
|
||||||
|
"mock_event", {"entity_id": ["sensor.any", entity_id]}, context=context
|
||||||
|
)
|
||||||
|
hass.bus.async_fire("mock_event", {"entity_id": [f"sensor.any,{entity_id}"]})
|
||||||
|
hass.bus.async_fire("mock_event", {"entity_id": ["sensor.no_match", "light.off"]})
|
||||||
|
hass.states.async_set(entity_id, STATE_OFF, context=context)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
|
||||||
|
assert msg["id"] == 7
|
||||||
|
assert msg["type"] == "event"
|
||||||
|
assert msg["event"]["events"] == [
|
||||||
|
{
|
||||||
|
"context_user_id": "b400facee45711eaa9308bfd3d19e474",
|
||||||
|
"domain": "test",
|
||||||
|
"message": "is on fire",
|
||||||
|
"name": "device name",
|
||||||
|
"when": ANY,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"context_domain": "test",
|
||||||
|
"context_event_type": "mock_event",
|
||||||
|
"context_message": "is on fire",
|
||||||
|
"context_name": "device name",
|
||||||
|
"context_user_id": "b400facee45711eaa9308bfd3d19e474",
|
||||||
|
"entity_id": "sensor.test",
|
||||||
|
"state": "off",
|
||||||
|
"when": ANY,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
await websocket_client.send_json(
|
||||||
|
{"id": 8, "type": "unsubscribe_events", "subscription": 7}
|
||||||
|
)
|
||||||
|
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
|
||||||
|
|
||||||
|
assert msg["id"] == 8
|
||||||
|
assert msg["type"] == TYPE_RESULT
|
||||||
|
assert msg["success"]
|
||||||
|
|
||||||
|
# Check our listener got unsubscribed
|
||||||
|
assert sum(hass.bus.async_listeners().values()) == init_count
|
||||||
|
|
||||||
|
|
||||||
async def test_event_stream_bad_end_time(hass, hass_ws_client, recorder_mock):
|
async def test_event_stream_bad_end_time(hass, hass_ws_client, recorder_mock):
|
||||||
"""Test event_stream bad end time."""
|
"""Test event_stream bad end time."""
|
||||||
await async_setup_component(hass, "logbook", {})
|
await async_setup_component(hass, "logbook", {})
|
||||||
@ -2092,7 +2209,9 @@ async def test_recorder_is_far_behind(hass, recorder_mock, hass_ws_client, caplo
|
|||||||
|
|
||||||
|
|
||||||
@patch("homeassistant.components.logbook.websocket_api.EVENT_COALESCE_TIME", 0)
|
@patch("homeassistant.components.logbook.websocket_api.EVENT_COALESCE_TIME", 0)
|
||||||
async def test_subscribe_all_entities_have_uom(hass, recorder_mock, hass_ws_client):
|
async def test_subscribe_all_entities_are_continuous(
|
||||||
|
hass, recorder_mock, hass_ws_client
|
||||||
|
):
|
||||||
"""Test subscribe/unsubscribe logbook stream with entities that are always filtered."""
|
"""Test subscribe/unsubscribe logbook stream with entities that are always filtered."""
|
||||||
now = dt_util.utcnow()
|
now = dt_util.utcnow()
|
||||||
await asyncio.gather(
|
await asyncio.gather(
|
||||||
@ -2102,11 +2221,19 @@ async def test_subscribe_all_entities_have_uom(hass, recorder_mock, hass_ws_clie
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
await async_wait_recording_done(hass)
|
await async_wait_recording_done(hass)
|
||||||
|
entity_ids = ("sensor.uom", "sensor.uom_two")
|
||||||
|
|
||||||
|
def _cycle_entities():
|
||||||
|
for entity_id in entity_ids:
|
||||||
|
for state in ("1", "2", "3"):
|
||||||
|
hass.states.async_set(
|
||||||
|
entity_id, state, {ATTR_UNIT_OF_MEASUREMENT: "any"}
|
||||||
|
)
|
||||||
|
hass.states.async_set("counter.any", state)
|
||||||
|
hass.states.async_set("proximity.any", state)
|
||||||
|
|
||||||
init_count = sum(hass.bus.async_listeners().values())
|
init_count = sum(hass.bus.async_listeners().values())
|
||||||
hass.states.async_set("sensor.uom", "1", {ATTR_UNIT_OF_MEASUREMENT: "any"})
|
_cycle_entities()
|
||||||
hass.states.async_set("sensor.uom", "2", {ATTR_UNIT_OF_MEASUREMENT: "any"})
|
|
||||||
hass.states.async_set("sensor.uom", "3", {ATTR_UNIT_OF_MEASUREMENT: "any"})
|
|
||||||
|
|
||||||
await async_wait_recording_done(hass)
|
await async_wait_recording_done(hass)
|
||||||
websocket_client = await hass_ws_client()
|
websocket_client = await hass_ws_client()
|
||||||
@ -2115,7 +2242,7 @@ async def test_subscribe_all_entities_have_uom(hass, recorder_mock, hass_ws_clie
|
|||||||
"id": 7,
|
"id": 7,
|
||||||
"type": "logbook/event_stream",
|
"type": "logbook/event_stream",
|
||||||
"start_time": now.isoformat(),
|
"start_time": now.isoformat(),
|
||||||
"entity_ids": ["sensor.uom"],
|
"entity_ids": ["sensor.uom", "counter.any", "proximity.any"],
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -2124,9 +2251,61 @@ async def test_subscribe_all_entities_have_uom(hass, recorder_mock, hass_ws_clie
|
|||||||
assert msg["type"] == TYPE_RESULT
|
assert msg["type"] == TYPE_RESULT
|
||||||
assert msg["success"]
|
assert msg["success"]
|
||||||
|
|
||||||
hass.states.async_set("sensor.uom", "1", {ATTR_UNIT_OF_MEASUREMENT: "any"})
|
_cycle_entities()
|
||||||
hass.states.async_set("sensor.uom", "2", {ATTR_UNIT_OF_MEASUREMENT: "any"})
|
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
|
||||||
hass.states.async_set("sensor.uom", "3", {ATTR_UNIT_OF_MEASUREMENT: "any"})
|
assert msg["id"] == 7
|
||||||
|
assert msg["type"] == "event"
|
||||||
|
assert msg["event"]["events"] == []
|
||||||
|
|
||||||
|
await websocket_client.close()
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
# Check our listener got unsubscribed
|
||||||
|
assert sum(hass.bus.async_listeners().values()) == init_count
|
||||||
|
|
||||||
|
|
||||||
|
@patch("homeassistant.components.logbook.websocket_api.EVENT_COALESCE_TIME", 0)
|
||||||
|
async def test_subscribe_all_entities_have_uom_multiple(
|
||||||
|
hass, recorder_mock, hass_ws_client
|
||||||
|
):
|
||||||
|
"""Test logbook stream with specific request for multiple entities that are always filtered."""
|
||||||
|
now = dt_util.utcnow()
|
||||||
|
await asyncio.gather(
|
||||||
|
*[
|
||||||
|
async_setup_component(hass, comp, {})
|
||||||
|
for comp in ("homeassistant", "logbook", "automation", "script")
|
||||||
|
]
|
||||||
|
)
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
entity_ids = ("sensor.uom", "sensor.uom_two")
|
||||||
|
|
||||||
|
def _cycle_entities():
|
||||||
|
for entity_id in entity_ids:
|
||||||
|
for state in ("1", "2", "3"):
|
||||||
|
hass.states.async_set(
|
||||||
|
entity_id, state, {ATTR_UNIT_OF_MEASUREMENT: "any"}
|
||||||
|
)
|
||||||
|
|
||||||
|
init_count = sum(hass.bus.async_listeners().values())
|
||||||
|
_cycle_entities()
|
||||||
|
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
websocket_client = await hass_ws_client()
|
||||||
|
await websocket_client.send_json(
|
||||||
|
{
|
||||||
|
"id": 7,
|
||||||
|
"type": "logbook/event_stream",
|
||||||
|
"start_time": now.isoformat(),
|
||||||
|
"entity_ids": [*entity_ids],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
|
||||||
|
assert msg["id"] == 7
|
||||||
|
assert msg["type"] == TYPE_RESULT
|
||||||
|
assert msg["success"]
|
||||||
|
|
||||||
|
_cycle_entities()
|
||||||
|
|
||||||
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
|
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
|
||||||
assert msg["id"] == 7
|
assert msg["id"] == 7
|
||||||
@ -2138,3 +2317,90 @@ async def test_subscribe_all_entities_have_uom(hass, recorder_mock, hass_ws_clie
|
|||||||
|
|
||||||
# Check our listener got unsubscribed
|
# Check our listener got unsubscribed
|
||||||
assert sum(hass.bus.async_listeners().values()) == init_count
|
assert sum(hass.bus.async_listeners().values()) == init_count
|
||||||
|
|
||||||
|
|
||||||
|
@patch("homeassistant.components.logbook.websocket_api.EVENT_COALESCE_TIME", 0)
|
||||||
|
async def test_subscribe_entities_some_have_uom_multiple(
|
||||||
|
hass, recorder_mock, hass_ws_client
|
||||||
|
):
|
||||||
|
"""Test logbook stream with uom filtered entities and non-fitlered entities."""
|
||||||
|
now = dt_util.utcnow()
|
||||||
|
await asyncio.gather(
|
||||||
|
*[
|
||||||
|
async_setup_component(hass, comp, {})
|
||||||
|
for comp in ("homeassistant", "logbook", "automation", "script")
|
||||||
|
]
|
||||||
|
)
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
filtered_entity_ids = ("sensor.uom", "sensor.uom_two")
|
||||||
|
non_filtered_entity_ids = ("sensor.keep", "sensor.keep_two")
|
||||||
|
|
||||||
|
def _cycle_entities():
|
||||||
|
for entity_id in filtered_entity_ids:
|
||||||
|
for state in ("1", "2", "3"):
|
||||||
|
hass.states.async_set(
|
||||||
|
entity_id, state, {ATTR_UNIT_OF_MEASUREMENT: "any"}
|
||||||
|
)
|
||||||
|
for entity_id in non_filtered_entity_ids:
|
||||||
|
for state in (STATE_ON, STATE_OFF):
|
||||||
|
hass.states.async_set(entity_id, state)
|
||||||
|
|
||||||
|
init_count = sum(hass.bus.async_listeners().values())
|
||||||
|
_cycle_entities()
|
||||||
|
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
websocket_client = await hass_ws_client()
|
||||||
|
await websocket_client.send_json(
|
||||||
|
{
|
||||||
|
"id": 7,
|
||||||
|
"type": "logbook/event_stream",
|
||||||
|
"start_time": now.isoformat(),
|
||||||
|
"entity_ids": [*filtered_entity_ids, *non_filtered_entity_ids],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
|
||||||
|
assert msg["id"] == 7
|
||||||
|
assert msg["type"] == TYPE_RESULT
|
||||||
|
assert msg["success"]
|
||||||
|
|
||||||
|
_cycle_entities()
|
||||||
|
|
||||||
|
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
|
||||||
|
assert msg["id"] == 7
|
||||||
|
assert msg["type"] == "event"
|
||||||
|
assert msg["event"]["partial"] is True
|
||||||
|
assert msg["event"]["events"] == [
|
||||||
|
{"entity_id": "sensor.keep", "state": "off", "when": ANY},
|
||||||
|
{"entity_id": "sensor.keep_two", "state": "off", "when": ANY},
|
||||||
|
]
|
||||||
|
|
||||||
|
_cycle_entities()
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
|
||||||
|
assert msg["id"] == 7
|
||||||
|
assert msg["type"] == "event"
|
||||||
|
assert msg["event"]["events"] == []
|
||||||
|
assert "partial" not in msg["event"]
|
||||||
|
|
||||||
|
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
|
||||||
|
assert msg["id"] == 7
|
||||||
|
assert msg["type"] == "event"
|
||||||
|
assert msg["event"]["events"] == [
|
||||||
|
{"entity_id": "sensor.keep", "state": "on", "when": ANY},
|
||||||
|
{"entity_id": "sensor.keep", "state": "off", "when": ANY},
|
||||||
|
{"entity_id": "sensor.keep_two", "state": "on", "when": ANY},
|
||||||
|
{"entity_id": "sensor.keep_two", "state": "off", "when": ANY},
|
||||||
|
{"entity_id": "sensor.keep", "state": "on", "when": ANY},
|
||||||
|
{"entity_id": "sensor.keep", "state": "off", "when": ANY},
|
||||||
|
{"entity_id": "sensor.keep_two", "state": "on", "when": ANY},
|
||||||
|
{"entity_id": "sensor.keep_two", "state": "off", "when": ANY},
|
||||||
|
]
|
||||||
|
assert "partial" not in msg["event"]
|
||||||
|
|
||||||
|
await websocket_client.close()
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
# Check our listener got unsubscribed
|
||||||
|
assert sum(hass.bus.async_listeners().values()) == init_count
|
||||||
|
@ -878,3 +878,32 @@ async def test_get_full_significant_states_handles_empty_last_changed(
|
|||||||
assert db_sensor_one_states[0].last_updated is not None
|
assert db_sensor_one_states[0].last_updated is not None
|
||||||
assert db_sensor_one_states[1].last_updated is not None
|
assert db_sensor_one_states[1].last_updated is not None
|
||||||
assert db_sensor_one_states[0].last_updated != db_sensor_one_states[1].last_updated
|
assert db_sensor_one_states[0].last_updated != db_sensor_one_states[1].last_updated
|
||||||
|
|
||||||
|
|
||||||
|
def test_state_changes_during_period_multiple_entities_single_test(hass_recorder):
|
||||||
|
"""Test state change during period with multiple entities in the same test.
|
||||||
|
|
||||||
|
This test ensures the sqlalchemy query cache does not
|
||||||
|
generate incorrect results.
|
||||||
|
"""
|
||||||
|
hass = hass_recorder()
|
||||||
|
start = dt_util.utcnow()
|
||||||
|
test_entites = {f"sensor.{i}": str(i) for i in range(30)}
|
||||||
|
for entity_id, value in test_entites.items():
|
||||||
|
hass.states.set(entity_id, value)
|
||||||
|
|
||||||
|
wait_recording_done(hass)
|
||||||
|
end = dt_util.utcnow()
|
||||||
|
|
||||||
|
hist = history.state_changes_during_period(hass, start, end, None)
|
||||||
|
for entity_id, value in test_entites.items():
|
||||||
|
hist[entity_id][0].state == value
|
||||||
|
|
||||||
|
for entity_id, value in test_entites.items():
|
||||||
|
hist = history.state_changes_during_period(hass, start, end, entity_id)
|
||||||
|
assert len(hist) == 1
|
||||||
|
hist[entity_id][0].state == value
|
||||||
|
|
||||||
|
hist = history.state_changes_during_period(hass, start, end, None)
|
||||||
|
for entity_id, value in test_entites.items():
|
||||||
|
hist[entity_id][0].state == value
|
||||||
|
@ -100,6 +100,15 @@ def test_compile_hourly_statistics(hass_recorder):
|
|||||||
stats = statistics_during_period(hass, zero, period="5minute")
|
stats = statistics_during_period(hass, zero, period="5minute")
|
||||||
assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2}
|
assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2}
|
||||||
|
|
||||||
|
# Test statistics_during_period with a far future start and end date
|
||||||
|
future = dt_util.as_utc(dt_util.parse_datetime("2221-11-01 00:00:00"))
|
||||||
|
stats = statistics_during_period(hass, future, end_time=future, period="5minute")
|
||||||
|
assert stats == {}
|
||||||
|
|
||||||
|
# Test statistics_during_period with a far future end date
|
||||||
|
stats = statistics_during_period(hass, zero, end_time=future, period="5minute")
|
||||||
|
assert stats == {"sensor.test1": expected_stats1, "sensor.test2": expected_stats2}
|
||||||
|
|
||||||
stats = statistics_during_period(
|
stats = statistics_during_period(
|
||||||
hass, zero, statistic_ids=["sensor.test2"], period="5minute"
|
hass, zero, statistic_ids=["sensor.test2"], period="5minute"
|
||||||
)
|
)
|
||||||
@ -814,6 +823,59 @@ def test_monthly_statistics(hass_recorder, caplog, timezone):
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
stats = statistics_during_period(
|
||||||
|
hass,
|
||||||
|
start_time=zero,
|
||||||
|
statistic_ids=["not", "the", "same", "test:total_energy_import"],
|
||||||
|
period="month",
|
||||||
|
)
|
||||||
|
sep_start = dt_util.as_utc(dt_util.parse_datetime("2021-09-01 00:00:00"))
|
||||||
|
sep_end = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
|
||||||
|
oct_start = dt_util.as_utc(dt_util.parse_datetime("2021-10-01 00:00:00"))
|
||||||
|
oct_end = dt_util.as_utc(dt_util.parse_datetime("2021-11-01 00:00:00"))
|
||||||
|
assert stats == {
|
||||||
|
"test:total_energy_import": [
|
||||||
|
{
|
||||||
|
"statistic_id": "test:total_energy_import",
|
||||||
|
"start": sep_start.isoformat(),
|
||||||
|
"end": sep_end.isoformat(),
|
||||||
|
"max": None,
|
||||||
|
"mean": None,
|
||||||
|
"min": None,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": approx(1.0),
|
||||||
|
"sum": approx(3.0),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"statistic_id": "test:total_energy_import",
|
||||||
|
"start": oct_start.isoformat(),
|
||||||
|
"end": oct_end.isoformat(),
|
||||||
|
"max": None,
|
||||||
|
"mean": None,
|
||||||
|
"min": None,
|
||||||
|
"last_reset": None,
|
||||||
|
"state": approx(3.0),
|
||||||
|
"sum": approx(5.0),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
# Use 5minute to ensure table switch works
|
||||||
|
stats = statistics_during_period(
|
||||||
|
hass,
|
||||||
|
start_time=zero,
|
||||||
|
statistic_ids=["test:total_energy_import", "with_other"],
|
||||||
|
period="5minute",
|
||||||
|
)
|
||||||
|
assert stats == {}
|
||||||
|
|
||||||
|
# Ensure future date has not data
|
||||||
|
future = dt_util.as_utc(dt_util.parse_datetime("2221-11-01 00:00:00"))
|
||||||
|
stats = statistics_during_period(
|
||||||
|
hass, start_time=future, end_time=future, period="month"
|
||||||
|
)
|
||||||
|
assert stats == {}
|
||||||
|
|
||||||
dt_util.set_default_time_zone(dt_util.get_time_zone("UTC"))
|
dt_util.set_default_time_zone(dt_util.get_time_zone("UTC"))
|
||||||
|
|
||||||
|
|
||||||
|
@ -26,7 +26,7 @@ from homeassistant.components.wallbox.const import (
|
|||||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
from .const import ERROR, JWT, STATUS, TTL, USER_ID
|
from .const import ERROR, STATUS, TTL, USER_ID
|
||||||
|
|
||||||
from tests.common import MockConfigEntry
|
from tests.common import MockConfigEntry
|
||||||
|
|
||||||
@ -54,11 +54,32 @@ test_response = json.loads(
|
|||||||
authorisation_response = json.loads(
|
authorisation_response = json.loads(
|
||||||
json.dumps(
|
json.dumps(
|
||||||
{
|
{
|
||||||
JWT: "fakekeyhere",
|
"data": {
|
||||||
USER_ID: 12345,
|
"attributes": {
|
||||||
TTL: 145656758,
|
"token": "fakekeyhere",
|
||||||
ERROR: "false",
|
USER_ID: 12345,
|
||||||
STATUS: 200,
|
TTL: 145656758,
|
||||||
|
ERROR: "false",
|
||||||
|
STATUS: 200,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
authorisation_response_unauthorised = json.loads(
|
||||||
|
json.dumps(
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"attributes": {
|
||||||
|
"token": "fakekeyhere",
|
||||||
|
USER_ID: 12345,
|
||||||
|
TTL: 145656758,
|
||||||
|
ERROR: "false",
|
||||||
|
STATUS: 404,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -81,7 +102,7 @@ async def setup_integration(hass: HomeAssistant) -> None:
|
|||||||
|
|
||||||
with requests_mock.Mocker() as mock_request:
|
with requests_mock.Mocker() as mock_request:
|
||||||
mock_request.get(
|
mock_request.get(
|
||||||
"https://api.wall-box.com/auth/token/user",
|
"https://user-api.wall-box.com/users/signin",
|
||||||
json=authorisation_response,
|
json=authorisation_response,
|
||||||
status_code=HTTPStatus.OK,
|
status_code=HTTPStatus.OK,
|
||||||
)
|
)
|
||||||
@ -107,7 +128,7 @@ async def setup_integration_connection_error(hass: HomeAssistant) -> None:
|
|||||||
|
|
||||||
with requests_mock.Mocker() as mock_request:
|
with requests_mock.Mocker() as mock_request:
|
||||||
mock_request.get(
|
mock_request.get(
|
||||||
"https://api.wall-box.com/auth/token/user",
|
"https://user-api.wall-box.com/users/signin",
|
||||||
json=authorisation_response,
|
json=authorisation_response,
|
||||||
status_code=HTTPStatus.FORBIDDEN,
|
status_code=HTTPStatus.FORBIDDEN,
|
||||||
)
|
)
|
||||||
@ -133,7 +154,7 @@ async def setup_integration_read_only(hass: HomeAssistant) -> None:
|
|||||||
|
|
||||||
with requests_mock.Mocker() as mock_request:
|
with requests_mock.Mocker() as mock_request:
|
||||||
mock_request.get(
|
mock_request.get(
|
||||||
"https://api.wall-box.com/auth/token/user",
|
"https://user-api.wall-box.com/users/signin",
|
||||||
json=authorisation_response,
|
json=authorisation_response,
|
||||||
status_code=HTTPStatus.OK,
|
status_code=HTTPStatus.OK,
|
||||||
)
|
)
|
||||||
|
@ -18,8 +18,12 @@ from homeassistant.components.wallbox.const import (
|
|||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
from tests.components.wallbox import entry, setup_integration
|
from tests.components.wallbox import (
|
||||||
from tests.components.wallbox.const import ERROR, JWT, STATUS, TTL, USER_ID
|
authorisation_response,
|
||||||
|
authorisation_response_unauthorised,
|
||||||
|
entry,
|
||||||
|
setup_integration,
|
||||||
|
)
|
||||||
|
|
||||||
test_response = json.loads(
|
test_response = json.loads(
|
||||||
json.dumps(
|
json.dumps(
|
||||||
@ -34,30 +38,6 @@ test_response = json.loads(
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
authorisation_response = json.loads(
|
|
||||||
json.dumps(
|
|
||||||
{
|
|
||||||
JWT: "fakekeyhere",
|
|
||||||
USER_ID: 12345,
|
|
||||||
TTL: 145656758,
|
|
||||||
ERROR: "false",
|
|
||||||
STATUS: 200,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
authorisation_response_unauthorised = json.loads(
|
|
||||||
json.dumps(
|
|
||||||
{
|
|
||||||
JWT: "fakekeyhere",
|
|
||||||
USER_ID: 12345,
|
|
||||||
TTL: 145656758,
|
|
||||||
ERROR: "false",
|
|
||||||
STATUS: 404,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def test_show_set_form(hass: HomeAssistant) -> None:
|
async def test_show_set_form(hass: HomeAssistant) -> None:
|
||||||
"""Test that the setup form is served."""
|
"""Test that the setup form is served."""
|
||||||
@ -77,7 +57,7 @@ async def test_form_cannot_authenticate(hass: HomeAssistant) -> None:
|
|||||||
|
|
||||||
with requests_mock.Mocker() as mock_request:
|
with requests_mock.Mocker() as mock_request:
|
||||||
mock_request.get(
|
mock_request.get(
|
||||||
"https://api.wall-box.com/auth/token/user",
|
"https://user-api.wall-box.com/users/signin",
|
||||||
json=authorisation_response,
|
json=authorisation_response,
|
||||||
status_code=HTTPStatus.FORBIDDEN,
|
status_code=HTTPStatus.FORBIDDEN,
|
||||||
)
|
)
|
||||||
@ -107,7 +87,7 @@ async def test_form_cannot_connect(hass: HomeAssistant) -> None:
|
|||||||
|
|
||||||
with requests_mock.Mocker() as mock_request:
|
with requests_mock.Mocker() as mock_request:
|
||||||
mock_request.get(
|
mock_request.get(
|
||||||
"https://api.wall-box.com/auth/token/user",
|
"https://user-api.wall-box.com/users/signin",
|
||||||
json=authorisation_response_unauthorised,
|
json=authorisation_response_unauthorised,
|
||||||
status_code=HTTPStatus.NOT_FOUND,
|
status_code=HTTPStatus.NOT_FOUND,
|
||||||
)
|
)
|
||||||
@ -137,7 +117,7 @@ async def test_form_validate_input(hass: HomeAssistant) -> None:
|
|||||||
|
|
||||||
with requests_mock.Mocker() as mock_request:
|
with requests_mock.Mocker() as mock_request:
|
||||||
mock_request.get(
|
mock_request.get(
|
||||||
"https://api.wall-box.com/auth/token/user",
|
"https://user-api.wall-box.com/users/signin",
|
||||||
json=authorisation_response,
|
json=authorisation_response,
|
||||||
status_code=HTTPStatus.OK,
|
status_code=HTTPStatus.OK,
|
||||||
)
|
)
|
||||||
@ -166,8 +146,8 @@ async def test_form_reauth(hass: HomeAssistant) -> None:
|
|||||||
|
|
||||||
with requests_mock.Mocker() as mock_request:
|
with requests_mock.Mocker() as mock_request:
|
||||||
mock_request.get(
|
mock_request.get(
|
||||||
"https://api.wall-box.com/auth/token/user",
|
"https://user-api.wall-box.com/users/signin",
|
||||||
text='{"jwt":"fakekeyhere","user_id":12345,"ttl":145656758,"error":false,"status":200}',
|
json=authorisation_response,
|
||||||
status_code=200,
|
status_code=200,
|
||||||
)
|
)
|
||||||
mock_request.get(
|
mock_request.get(
|
||||||
@ -206,7 +186,7 @@ async def test_form_reauth_invalid(hass: HomeAssistant) -> None:
|
|||||||
|
|
||||||
with requests_mock.Mocker() as mock_request:
|
with requests_mock.Mocker() as mock_request:
|
||||||
mock_request.get(
|
mock_request.get(
|
||||||
"https://api.wall-box.com/auth/token/user",
|
"https://user-api.wall-box.com/users/signin",
|
||||||
text='{"jwt":"fakekeyhere","user_id":12345,"ttl":145656758,"error":false,"status":200}',
|
text='{"jwt":"fakekeyhere","user_id":12345,"ttl":145656758,"error":false,"status":200}',
|
||||||
status_code=200,
|
status_code=200,
|
||||||
)
|
)
|
||||||
|
@ -11,24 +11,12 @@ from . import test_response
|
|||||||
|
|
||||||
from tests.components.wallbox import (
|
from tests.components.wallbox import (
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
|
authorisation_response,
|
||||||
entry,
|
entry,
|
||||||
setup_integration,
|
setup_integration,
|
||||||
setup_integration_connection_error,
|
setup_integration_connection_error,
|
||||||
setup_integration_read_only,
|
setup_integration_read_only,
|
||||||
)
|
)
|
||||||
from tests.components.wallbox.const import ERROR, JWT, STATUS, TTL, USER_ID
|
|
||||||
|
|
||||||
authorisation_response = json.loads(
|
|
||||||
json.dumps(
|
|
||||||
{
|
|
||||||
JWT: "fakekeyhere",
|
|
||||||
USER_ID: 12345,
|
|
||||||
TTL: 145656758,
|
|
||||||
ERROR: "false",
|
|
||||||
STATUS: 200,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def test_wallbox_setup_unload_entry(hass: HomeAssistant) -> None:
|
async def test_wallbox_setup_unload_entry(hass: HomeAssistant) -> None:
|
||||||
@ -59,7 +47,7 @@ async def test_wallbox_refresh_failed_invalid_auth(hass: HomeAssistant) -> None:
|
|||||||
|
|
||||||
with requests_mock.Mocker() as mock_request:
|
with requests_mock.Mocker() as mock_request:
|
||||||
mock_request.get(
|
mock_request.get(
|
||||||
"https://api.wall-box.com/auth/token/user",
|
"https://user-api.wall-box.com/users/signin",
|
||||||
json=authorisation_response,
|
json=authorisation_response,
|
||||||
status_code=403,
|
status_code=403,
|
||||||
)
|
)
|
||||||
@ -85,7 +73,7 @@ async def test_wallbox_refresh_failed_connection_error(hass: HomeAssistant) -> N
|
|||||||
|
|
||||||
with requests_mock.Mocker() as mock_request:
|
with requests_mock.Mocker() as mock_request:
|
||||||
mock_request.get(
|
mock_request.get(
|
||||||
"https://api.wall-box.com/auth/token/user",
|
"https://user-api.wall-box.com/users/signin",
|
||||||
json=authorisation_response,
|
json=authorisation_response,
|
||||||
status_code=200,
|
status_code=200,
|
||||||
)
|
)
|
||||||
|
@ -10,30 +10,12 @@ from homeassistant.const import ATTR_ENTITY_ID
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
from tests.components.wallbox import (
|
from tests.components.wallbox import (
|
||||||
|
authorisation_response,
|
||||||
entry,
|
entry,
|
||||||
setup_integration,
|
setup_integration,
|
||||||
setup_integration_read_only,
|
setup_integration_read_only,
|
||||||
)
|
)
|
||||||
from tests.components.wallbox.const import (
|
from tests.components.wallbox.const import MOCK_LOCK_ENTITY_ID
|
||||||
ERROR,
|
|
||||||
JWT,
|
|
||||||
MOCK_LOCK_ENTITY_ID,
|
|
||||||
STATUS,
|
|
||||||
TTL,
|
|
||||||
USER_ID,
|
|
||||||
)
|
|
||||||
|
|
||||||
authorisation_response = json.loads(
|
|
||||||
json.dumps(
|
|
||||||
{
|
|
||||||
JWT: "fakekeyhere",
|
|
||||||
USER_ID: 12345,
|
|
||||||
TTL: 145656758,
|
|
||||||
ERROR: "false",
|
|
||||||
STATUS: 200,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def test_wallbox_lock_class(hass: HomeAssistant) -> None:
|
async def test_wallbox_lock_class(hass: HomeAssistant) -> None:
|
||||||
@ -47,7 +29,7 @@ async def test_wallbox_lock_class(hass: HomeAssistant) -> None:
|
|||||||
|
|
||||||
with requests_mock.Mocker() as mock_request:
|
with requests_mock.Mocker() as mock_request:
|
||||||
mock_request.get(
|
mock_request.get(
|
||||||
"https://api.wall-box.com/auth/token/user",
|
"https://user-api.wall-box.com/users/signin",
|
||||||
json=authorisation_response,
|
json=authorisation_response,
|
||||||
status_code=200,
|
status_code=200,
|
||||||
)
|
)
|
||||||
@ -85,7 +67,7 @@ async def test_wallbox_lock_class_connection_error(hass: HomeAssistant) -> None:
|
|||||||
|
|
||||||
with requests_mock.Mocker() as mock_request:
|
with requests_mock.Mocker() as mock_request:
|
||||||
mock_request.get(
|
mock_request.get(
|
||||||
"https://api.wall-box.com/auth/token/user",
|
"https://user-api.wall-box.com/users/signin",
|
||||||
json=authorisation_response,
|
json=authorisation_response,
|
||||||
status_code=200,
|
status_code=200,
|
||||||
)
|
)
|
||||||
|
@ -9,27 +9,8 @@ from homeassistant.components.wallbox import CHARGER_MAX_CHARGING_CURRENT_KEY
|
|||||||
from homeassistant.const import ATTR_ENTITY_ID
|
from homeassistant.const import ATTR_ENTITY_ID
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
from tests.components.wallbox import entry, setup_integration
|
from tests.components.wallbox import authorisation_response, entry, setup_integration
|
||||||
from tests.components.wallbox.const import (
|
from tests.components.wallbox.const import MOCK_NUMBER_ENTITY_ID
|
||||||
ERROR,
|
|
||||||
JWT,
|
|
||||||
MOCK_NUMBER_ENTITY_ID,
|
|
||||||
STATUS,
|
|
||||||
TTL,
|
|
||||||
USER_ID,
|
|
||||||
)
|
|
||||||
|
|
||||||
authorisation_response = json.loads(
|
|
||||||
json.dumps(
|
|
||||||
{
|
|
||||||
JWT: "fakekeyhere",
|
|
||||||
USER_ID: 12345,
|
|
||||||
TTL: 145656758,
|
|
||||||
ERROR: "false",
|
|
||||||
STATUS: 200,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def test_wallbox_number_class(hass: HomeAssistant) -> None:
|
async def test_wallbox_number_class(hass: HomeAssistant) -> None:
|
||||||
@ -39,7 +20,7 @@ async def test_wallbox_number_class(hass: HomeAssistant) -> None:
|
|||||||
|
|
||||||
with requests_mock.Mocker() as mock_request:
|
with requests_mock.Mocker() as mock_request:
|
||||||
mock_request.get(
|
mock_request.get(
|
||||||
"https://api.wall-box.com/auth/token/user",
|
"https://user-api.wall-box.com/users/signin",
|
||||||
json=authorisation_response,
|
json=authorisation_response,
|
||||||
status_code=200,
|
status_code=200,
|
||||||
)
|
)
|
||||||
@ -68,7 +49,7 @@ async def test_wallbox_number_class_connection_error(hass: HomeAssistant) -> Non
|
|||||||
|
|
||||||
with requests_mock.Mocker() as mock_request:
|
with requests_mock.Mocker() as mock_request:
|
||||||
mock_request.get(
|
mock_request.get(
|
||||||
"https://api.wall-box.com/auth/token/user",
|
"https://user-api.wall-box.com/users/signin",
|
||||||
json=authorisation_response,
|
json=authorisation_response,
|
||||||
status_code=200,
|
status_code=200,
|
||||||
)
|
)
|
||||||
|
@ -10,27 +10,8 @@ from homeassistant.components.wallbox.const import CHARGER_STATUS_ID_KEY
|
|||||||
from homeassistant.const import ATTR_ENTITY_ID
|
from homeassistant.const import ATTR_ENTITY_ID
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
from tests.components.wallbox import entry, setup_integration
|
from tests.components.wallbox import authorisation_response, entry, setup_integration
|
||||||
from tests.components.wallbox.const import (
|
from tests.components.wallbox.const import MOCK_SWITCH_ENTITY_ID
|
||||||
ERROR,
|
|
||||||
JWT,
|
|
||||||
MOCK_SWITCH_ENTITY_ID,
|
|
||||||
STATUS,
|
|
||||||
TTL,
|
|
||||||
USER_ID,
|
|
||||||
)
|
|
||||||
|
|
||||||
authorisation_response = json.loads(
|
|
||||||
json.dumps(
|
|
||||||
{
|
|
||||||
JWT: "fakekeyhere",
|
|
||||||
USER_ID: 12345,
|
|
||||||
TTL: 145656758,
|
|
||||||
ERROR: "false",
|
|
||||||
STATUS: 200,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def test_wallbox_switch_class(hass: HomeAssistant) -> None:
|
async def test_wallbox_switch_class(hass: HomeAssistant) -> None:
|
||||||
@ -44,7 +25,7 @@ async def test_wallbox_switch_class(hass: HomeAssistant) -> None:
|
|||||||
|
|
||||||
with requests_mock.Mocker() as mock_request:
|
with requests_mock.Mocker() as mock_request:
|
||||||
mock_request.get(
|
mock_request.get(
|
||||||
"https://api.wall-box.com/auth/token/user",
|
"https://user-api.wall-box.com/users/signin",
|
||||||
json=authorisation_response,
|
json=authorisation_response,
|
||||||
status_code=200,
|
status_code=200,
|
||||||
)
|
)
|
||||||
@ -82,7 +63,7 @@ async def test_wallbox_switch_class_connection_error(hass: HomeAssistant) -> Non
|
|||||||
|
|
||||||
with requests_mock.Mocker() as mock_request:
|
with requests_mock.Mocker() as mock_request:
|
||||||
mock_request.get(
|
mock_request.get(
|
||||||
"https://api.wall-box.com/auth/token/user",
|
"https://user-api.wall-box.com/users/signin",
|
||||||
json=authorisation_response,
|
json=authorisation_response,
|
||||||
status_code=200,
|
status_code=200,
|
||||||
)
|
)
|
||||||
@ -121,7 +102,7 @@ async def test_wallbox_switch_class_authentication_error(hass: HomeAssistant) ->
|
|||||||
|
|
||||||
with requests_mock.Mocker() as mock_request:
|
with requests_mock.Mocker() as mock_request:
|
||||||
mock_request.get(
|
mock_request.get(
|
||||||
"https://api.wall-box.com/auth/token/user",
|
"https://user-api.wall-box.com/users/signin",
|
||||||
json=authorisation_response,
|
json=authorisation_response,
|
||||||
status_code=200,
|
status_code=200,
|
||||||
)
|
)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user