mirror of
https://github.com/home-assistant/core.git
synced 2025-07-11 07:17:12 +00:00
2023.4.6 (#91833)
This commit is contained in:
commit
cdbdf1ba4f
@ -244,7 +244,6 @@ CALENDAR_EVENT_SCHEMA = vol.Schema(
|
|||||||
},
|
},
|
||||||
_has_same_type("start", "end"),
|
_has_same_type("start", "end"),
|
||||||
_has_timezone("start", "end"),
|
_has_timezone("start", "end"),
|
||||||
_has_consistent_timezone("start", "end"),
|
|
||||||
_as_local_timezone("start", "end"),
|
_as_local_timezone("start", "end"),
|
||||||
_has_min_duration("start", "end", MIN_EVENT_DURATION),
|
_has_min_duration("start", "end", MIN_EVENT_DURATION),
|
||||||
),
|
),
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["sml"],
|
"loggers": ["sml"],
|
||||||
"requirements": ["pysml==0.0.9"]
|
"requirements": ["pysml==0.0.10"]
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,7 @@ import logging
|
|||||||
|
|
||||||
from homewizard_energy import HomeWizardEnergy
|
from homewizard_energy import HomeWizardEnergy
|
||||||
from homewizard_energy.const import SUPPORTS_IDENTIFY, SUPPORTS_STATE, SUPPORTS_SYSTEM
|
from homewizard_energy.const import SUPPORTS_IDENTIFY, SUPPORTS_STATE, SUPPORTS_SYSTEM
|
||||||
from homewizard_energy.errors import DisabledError, RequestError
|
from homewizard_energy.errors import DisabledError, RequestError, UnsupportedError
|
||||||
from homewizard_energy.models import Device
|
from homewizard_energy.models import Device
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
@ -24,6 +24,8 @@ class HWEnergyDeviceUpdateCoordinator(DataUpdateCoordinator[DeviceResponseEntry]
|
|||||||
api: HomeWizardEnergy
|
api: HomeWizardEnergy
|
||||||
api_disabled: bool = False
|
api_disabled: bool = False
|
||||||
|
|
||||||
|
_unsupported_error: bool = False
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
@ -43,12 +45,23 @@ class HWEnergyDeviceUpdateCoordinator(DataUpdateCoordinator[DeviceResponseEntry]
|
|||||||
data=await self.api.data(),
|
data=await self.api.data(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
if self.supports_state(data.device):
|
if self.supports_state(data.device):
|
||||||
data.state = await self.api.state()
|
data.state = await self.api.state()
|
||||||
|
|
||||||
if self.supports_system(data.device):
|
if self.supports_system(data.device):
|
||||||
data.system = await self.api.system()
|
data.system = await self.api.system()
|
||||||
|
|
||||||
|
except UnsupportedError as ex:
|
||||||
|
# Old firmware, ignore
|
||||||
|
if not self._unsupported_error:
|
||||||
|
self._unsupported_error = True
|
||||||
|
_LOGGER.warning(
|
||||||
|
"%s is running an outdated firmware version (%s). Contact HomeWizard support to update your device",
|
||||||
|
self.entry.title,
|
||||||
|
ex,
|
||||||
|
)
|
||||||
|
|
||||||
except RequestError as ex:
|
except RequestError as ex:
|
||||||
raise UpdateFailed(ex) from ex
|
raise UpdateFailed(ex) from ex
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["pyinsteon", "pypubsub"],
|
"loggers": ["pyinsteon", "pypubsub"],
|
||||||
"requirements": [
|
"requirements": [
|
||||||
"pyinsteon==1.4.1",
|
"pyinsteon==1.4.2",
|
||||||
"insteon-frontend-home-assistant==0.3.4"
|
"insteon-frontend-home-assistant==0.3.4"
|
||||||
],
|
],
|
||||||
"usb": [
|
"usb": [
|
||||||
|
@ -12,5 +12,5 @@
|
|||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["pylitterbot"],
|
"loggers": ["pylitterbot"],
|
||||||
"requirements": ["pylitterbot==2023.1.2"]
|
"requirements": ["pylitterbot==2023.4.0"]
|
||||||
}
|
}
|
||||||
|
@ -48,7 +48,10 @@ class LocalSource(MediaSource):
|
|||||||
@callback
|
@callback
|
||||||
def async_full_path(self, source_dir_id: str, location: str) -> Path:
|
def async_full_path(self, source_dir_id: str, location: str) -> Path:
|
||||||
"""Return full path."""
|
"""Return full path."""
|
||||||
return Path(self.hass.config.media_dirs[source_dir_id], location)
|
base_path = self.hass.config.media_dirs[source_dir_id]
|
||||||
|
full_path = Path(base_path, location)
|
||||||
|
full_path.relative_to(base_path)
|
||||||
|
return full_path
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_parse_identifier(self, item: MediaSourceItem) -> tuple[str, str]:
|
def async_parse_identifier(self, item: MediaSourceItem) -> tuple[str, str]:
|
||||||
@ -65,6 +68,9 @@ class LocalSource(MediaSource):
|
|||||||
except ValueError as err:
|
except ValueError as err:
|
||||||
raise Unresolvable("Invalid path.") from err
|
raise Unresolvable("Invalid path.") from err
|
||||||
|
|
||||||
|
if Path(location).is_absolute():
|
||||||
|
raise Unresolvable("Invalid path.")
|
||||||
|
|
||||||
return source_dir_id, location
|
return source_dir_id, location
|
||||||
|
|
||||||
async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia:
|
async def async_resolve_media(self, item: MediaSourceItem) -> PlayMedia:
|
||||||
|
@ -41,15 +41,6 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
|
|
||||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||||
"""Set up the MQTT state feed."""
|
"""Set up the MQTT state feed."""
|
||||||
# Make sure MQTT is available and the entry is loaded
|
|
||||||
if not hass.config_entries.async_entries(
|
|
||||||
mqtt.DOMAIN
|
|
||||||
) or not await hass.config_entries.async_wait_component(
|
|
||||||
hass.config_entries.async_entries(mqtt.DOMAIN)[0]
|
|
||||||
):
|
|
||||||
_LOGGER.error("MQTT integration is not available")
|
|
||||||
return False
|
|
||||||
|
|
||||||
conf: ConfigType = config[DOMAIN]
|
conf: ConfigType = config[DOMAIN]
|
||||||
publish_filter = convert_include_exclude_filter(conf)
|
publish_filter = convert_include_exclude_filter(conf)
|
||||||
base_topic: str = conf[CONF_BASE_TOPIC]
|
base_topic: str = conf[CONF_BASE_TOPIC]
|
||||||
|
@ -374,6 +374,8 @@ def state_changes_during_period(
|
|||||||
if entity_id:
|
if entity_id:
|
||||||
instance = recorder.get_instance(hass)
|
instance = recorder.get_instance(hass)
|
||||||
metadata_id = instance.states_meta_manager.get(entity_id, session, False)
|
metadata_id = instance.states_meta_manager.get(entity_id, session, False)
|
||||||
|
if metadata_id is None:
|
||||||
|
return {}
|
||||||
entity_id_to_metadata_id = {entity_id: metadata_id}
|
entity_id_to_metadata_id = {entity_id: metadata_id}
|
||||||
stmt = _state_changed_during_period_stmt(
|
stmt = _state_changed_during_period_stmt(
|
||||||
start_time,
|
start_time,
|
||||||
@ -394,7 +396,7 @@ def state_changes_during_period(
|
|||||||
states,
|
states,
|
||||||
start_time,
|
start_time,
|
||||||
entity_ids,
|
entity_ids,
|
||||||
entity_id_to_metadata_id,
|
entity_id_to_metadata_id, # type: ignore[arg-type]
|
||||||
include_start_time_state=include_start_time_state,
|
include_start_time_state=include_start_time_state,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
@ -6,6 +6,7 @@ import contextlib
|
|||||||
from dataclasses import dataclass, replace as dataclass_replace
|
from dataclasses import dataclass, replace as dataclass_replace
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import logging
|
import logging
|
||||||
|
from time import time
|
||||||
from typing import TYPE_CHECKING, cast
|
from typing import TYPE_CHECKING, cast
|
||||||
from uuid import UUID
|
from uuid import UUID
|
||||||
|
|
||||||
@ -26,7 +27,7 @@ from sqlalchemy.sql.expression import true
|
|||||||
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.util.enum import try_parse_enum
|
from homeassistant.util.enum import try_parse_enum
|
||||||
from homeassistant.util.ulid import ulid_to_bytes
|
from homeassistant.util.ulid import ulid_at_time, ulid_to_bytes
|
||||||
|
|
||||||
from .auto_repairs.events.schema import (
|
from .auto_repairs.events.schema import (
|
||||||
correct_db_schema as events_correct_db_schema,
|
correct_db_schema as events_correct_db_schema,
|
||||||
@ -92,7 +93,6 @@ if TYPE_CHECKING:
|
|||||||
from . import Recorder
|
from . import Recorder
|
||||||
|
|
||||||
LIVE_MIGRATION_MIN_SCHEMA_VERSION = 0
|
LIVE_MIGRATION_MIN_SCHEMA_VERSION = 0
|
||||||
_EMPTY_CONTEXT_ID = b"\x00" * 16
|
|
||||||
_EMPTY_ENTITY_ID = "missing.entity_id"
|
_EMPTY_ENTITY_ID = "missing.entity_id"
|
||||||
_EMPTY_EVENT_TYPE = "missing_event_type"
|
_EMPTY_EVENT_TYPE = "missing_event_type"
|
||||||
|
|
||||||
@ -1364,13 +1364,17 @@ def _context_id_to_bytes(context_id: str | None) -> bytes | None:
|
|||||||
# ULIDs that filled the column to the max length
|
# ULIDs that filled the column to the max length
|
||||||
# so we need to catch the ValueError and return
|
# so we need to catch the ValueError and return
|
||||||
# None if it happens
|
# None if it happens
|
||||||
if len(context_id) == 32:
|
|
||||||
return UUID(context_id).bytes
|
|
||||||
if len(context_id) == 26:
|
if len(context_id) == 26:
|
||||||
return ulid_to_bytes(context_id)
|
return ulid_to_bytes(context_id)
|
||||||
|
return UUID(context_id).bytes
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _generate_ulid_bytes_at_time(timestamp: float | None) -> bytes:
|
||||||
|
"""Generate a ulid with a specific timestamp."""
|
||||||
|
return ulid_to_bytes(ulid_at_time(timestamp or time()))
|
||||||
|
|
||||||
|
|
||||||
@retryable_database_job("migrate states context_ids to binary format")
|
@retryable_database_job("migrate states context_ids to binary format")
|
||||||
def migrate_states_context_ids(instance: Recorder) -> bool:
|
def migrate_states_context_ids(instance: Recorder) -> bool:
|
||||||
"""Migrate states context_ids to use binary format."""
|
"""Migrate states context_ids to use binary format."""
|
||||||
@ -1385,13 +1389,14 @@ def migrate_states_context_ids(instance: Recorder) -> bool:
|
|||||||
{
|
{
|
||||||
"state_id": state_id,
|
"state_id": state_id,
|
||||||
"context_id": None,
|
"context_id": None,
|
||||||
"context_id_bin": _to_bytes(context_id) or _EMPTY_CONTEXT_ID,
|
"context_id_bin": _to_bytes(context_id)
|
||||||
|
or _generate_ulid_bytes_at_time(last_updated_ts),
|
||||||
"context_user_id": None,
|
"context_user_id": None,
|
||||||
"context_user_id_bin": _to_bytes(context_user_id),
|
"context_user_id_bin": _to_bytes(context_user_id),
|
||||||
"context_parent_id": None,
|
"context_parent_id": None,
|
||||||
"context_parent_id_bin": _to_bytes(context_parent_id),
|
"context_parent_id_bin": _to_bytes(context_parent_id),
|
||||||
}
|
}
|
||||||
for state_id, context_id, context_user_id, context_parent_id in states
|
for state_id, last_updated_ts, context_id, context_user_id, context_parent_id in states
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
# If there is more work to do return False
|
# If there is more work to do return False
|
||||||
@ -1419,13 +1424,14 @@ def migrate_events_context_ids(instance: Recorder) -> bool:
|
|||||||
{
|
{
|
||||||
"event_id": event_id,
|
"event_id": event_id,
|
||||||
"context_id": None,
|
"context_id": None,
|
||||||
"context_id_bin": _to_bytes(context_id) or _EMPTY_CONTEXT_ID,
|
"context_id_bin": _to_bytes(context_id)
|
||||||
|
or _generate_ulid_bytes_at_time(time_fired_ts),
|
||||||
"context_user_id": None,
|
"context_user_id": None,
|
||||||
"context_user_id_bin": _to_bytes(context_user_id),
|
"context_user_id_bin": _to_bytes(context_user_id),
|
||||||
"context_parent_id": None,
|
"context_parent_id": None,
|
||||||
"context_parent_id_bin": _to_bytes(context_parent_id),
|
"context_parent_id_bin": _to_bytes(context_parent_id),
|
||||||
}
|
}
|
||||||
for event_id, context_id, context_user_id, context_parent_id in events
|
for event_id, time_fired_ts, context_id, context_user_id, context_parent_id in events
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
# If there is more work to do return False
|
# If there is more work to do return False
|
||||||
|
@ -690,6 +690,7 @@ def find_events_context_ids_to_migrate() -> StatementLambdaElement:
|
|||||||
return lambda_stmt(
|
return lambda_stmt(
|
||||||
lambda: select(
|
lambda: select(
|
||||||
Events.event_id,
|
Events.event_id,
|
||||||
|
Events.time_fired_ts,
|
||||||
Events.context_id,
|
Events.context_id,
|
||||||
Events.context_user_id,
|
Events.context_user_id,
|
||||||
Events.context_parent_id,
|
Events.context_parent_id,
|
||||||
@ -788,6 +789,7 @@ def find_states_context_ids_to_migrate() -> StatementLambdaElement:
|
|||||||
return lambda_stmt(
|
return lambda_stmt(
|
||||||
lambda: select(
|
lambda: select(
|
||||||
States.state_id,
|
States.state_id,
|
||||||
|
States.last_updated_ts,
|
||||||
States.context_id,
|
States.context_id,
|
||||||
States.context_user_id,
|
States.context_user_id,
|
||||||
States.context_parent_id,
|
States.context_parent_id,
|
||||||
|
@ -8,5 +8,5 @@
|
|||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["renault_api"],
|
"loggers": ["renault_api"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["renault-api==0.1.12"]
|
"requirements": ["renault-api==0.1.13"]
|
||||||
}
|
}
|
||||||
|
@ -9,7 +9,7 @@
|
|||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["aioshelly"],
|
"loggers": ["aioshelly"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["aioshelly==5.3.1"],
|
"requirements": ["aioshelly==5.3.2"],
|
||||||
"zeroconf": [
|
"zeroconf": [
|
||||||
{
|
{
|
||||||
"type": "_http._tcp.local.",
|
"type": "_http._tcp.local.",
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["songpal"],
|
"loggers": ["songpal"],
|
||||||
"quality_scale": "gold",
|
"quality_scale": "gold",
|
||||||
"requirements": ["python-songpal==0.15.1"],
|
"requirements": ["python-songpal==0.15.2"],
|
||||||
"ssdp": [
|
"ssdp": [
|
||||||
{
|
{
|
||||||
"st": "urn:schemas-sony-com:service:ScalarWebAPI:1",
|
"st": "urn:schemas-sony-com:service:ScalarWebAPI:1",
|
||||||
|
@ -446,7 +446,7 @@ class TodoistProjectData:
|
|||||||
LABELS: [],
|
LABELS: [],
|
||||||
OVERDUE: False,
|
OVERDUE: False,
|
||||||
PRIORITY: data.priority,
|
PRIORITY: data.priority,
|
||||||
START: dt.utcnow(),
|
START: dt.now(),
|
||||||
SUMMARY: data.content,
|
SUMMARY: data.content,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ from .backports.enum import StrEnum
|
|||||||
APPLICATION_NAME: Final = "HomeAssistant"
|
APPLICATION_NAME: Final = "HomeAssistant"
|
||||||
MAJOR_VERSION: Final = 2023
|
MAJOR_VERSION: Final = 2023
|
||||||
MINOR_VERSION: Final = 4
|
MINOR_VERSION: Final = 4
|
||||||
PATCH_VERSION: Final = "5"
|
PATCH_VERSION: Final = "6"
|
||||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 10, 0)
|
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 10, 0)
|
||||||
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "homeassistant"
|
name = "homeassistant"
|
||||||
version = "2023.4.5"
|
version = "2023.4.6"
|
||||||
license = {text = "Apache-2.0"}
|
license = {text = "Apache-2.0"}
|
||||||
description = "Open-source home automation platform running on Python 3."
|
description = "Open-source home automation platform running on Python 3."
|
||||||
readme = "README.rst"
|
readme = "README.rst"
|
||||||
|
@ -267,7 +267,7 @@ aiosenseme==0.6.1
|
|||||||
aiosenz==1.0.0
|
aiosenz==1.0.0
|
||||||
|
|
||||||
# homeassistant.components.shelly
|
# homeassistant.components.shelly
|
||||||
aioshelly==5.3.1
|
aioshelly==5.3.2
|
||||||
|
|
||||||
# homeassistant.components.skybell
|
# homeassistant.components.skybell
|
||||||
aioskybell==22.7.0
|
aioskybell==22.7.0
|
||||||
@ -1684,7 +1684,7 @@ pyialarm==2.2.0
|
|||||||
pyicloud==1.0.0
|
pyicloud==1.0.0
|
||||||
|
|
||||||
# homeassistant.components.insteon
|
# homeassistant.components.insteon
|
||||||
pyinsteon==1.4.1
|
pyinsteon==1.4.2
|
||||||
|
|
||||||
# homeassistant.components.intesishome
|
# homeassistant.components.intesishome
|
||||||
pyintesishome==1.8.0
|
pyintesishome==1.8.0
|
||||||
@ -1753,7 +1753,7 @@ pylibrespot-java==0.1.1
|
|||||||
pylitejet==0.5.0
|
pylitejet==0.5.0
|
||||||
|
|
||||||
# homeassistant.components.litterrobot
|
# homeassistant.components.litterrobot
|
||||||
pylitterbot==2023.1.2
|
pylitterbot==2023.4.0
|
||||||
|
|
||||||
# homeassistant.components.lutron_caseta
|
# homeassistant.components.lutron_caseta
|
||||||
pylutron-caseta==0.18.1
|
pylutron-caseta==0.18.1
|
||||||
@ -1976,7 +1976,7 @@ pysmartthings==0.7.6
|
|||||||
pysmarty==0.8
|
pysmarty==0.8
|
||||||
|
|
||||||
# homeassistant.components.edl21
|
# homeassistant.components.edl21
|
||||||
pysml==0.0.9
|
pysml==0.0.10
|
||||||
|
|
||||||
# homeassistant.components.snmp
|
# homeassistant.components.snmp
|
||||||
pysnmplib==5.0.21
|
pysnmplib==5.0.21
|
||||||
@ -2106,7 +2106,7 @@ python-ripple-api==0.0.3
|
|||||||
python-smarttub==0.0.33
|
python-smarttub==0.0.33
|
||||||
|
|
||||||
# homeassistant.components.songpal
|
# homeassistant.components.songpal
|
||||||
python-songpal==0.15.1
|
python-songpal==0.15.2
|
||||||
|
|
||||||
# homeassistant.components.tado
|
# homeassistant.components.tado
|
||||||
python-tado==0.12.0
|
python-tado==0.12.0
|
||||||
@ -2228,7 +2228,7 @@ raspyrfm-client==1.2.8
|
|||||||
regenmaschine==2022.11.0
|
regenmaschine==2022.11.0
|
||||||
|
|
||||||
# homeassistant.components.renault
|
# homeassistant.components.renault
|
||||||
renault-api==0.1.12
|
renault-api==0.1.13
|
||||||
|
|
||||||
# homeassistant.components.reolink
|
# homeassistant.components.reolink
|
||||||
reolink-aio==0.5.10
|
reolink-aio==0.5.10
|
||||||
|
@ -248,7 +248,7 @@ aiosenseme==0.6.1
|
|||||||
aiosenz==1.0.0
|
aiosenz==1.0.0
|
||||||
|
|
||||||
# homeassistant.components.shelly
|
# homeassistant.components.shelly
|
||||||
aioshelly==5.3.1
|
aioshelly==5.3.2
|
||||||
|
|
||||||
# homeassistant.components.skybell
|
# homeassistant.components.skybell
|
||||||
aioskybell==22.7.0
|
aioskybell==22.7.0
|
||||||
@ -1218,7 +1218,7 @@ pyialarm==2.2.0
|
|||||||
pyicloud==1.0.0
|
pyicloud==1.0.0
|
||||||
|
|
||||||
# homeassistant.components.insteon
|
# homeassistant.components.insteon
|
||||||
pyinsteon==1.4.1
|
pyinsteon==1.4.2
|
||||||
|
|
||||||
# homeassistant.components.ipma
|
# homeassistant.components.ipma
|
||||||
pyipma==3.0.6
|
pyipma==3.0.6
|
||||||
@ -1269,7 +1269,7 @@ pylibrespot-java==0.1.1
|
|||||||
pylitejet==0.5.0
|
pylitejet==0.5.0
|
||||||
|
|
||||||
# homeassistant.components.litterrobot
|
# homeassistant.components.litterrobot
|
||||||
pylitterbot==2023.1.2
|
pylitterbot==2023.4.0
|
||||||
|
|
||||||
# homeassistant.components.lutron_caseta
|
# homeassistant.components.lutron_caseta
|
||||||
pylutron-caseta==0.18.1
|
pylutron-caseta==0.18.1
|
||||||
@ -1438,7 +1438,7 @@ pysmartapp==0.3.3
|
|||||||
pysmartthings==0.7.6
|
pysmartthings==0.7.6
|
||||||
|
|
||||||
# homeassistant.components.edl21
|
# homeassistant.components.edl21
|
||||||
pysml==0.0.9
|
pysml==0.0.10
|
||||||
|
|
||||||
# homeassistant.components.snmp
|
# homeassistant.components.snmp
|
||||||
pysnmplib==5.0.21
|
pysnmplib==5.0.21
|
||||||
@ -1508,7 +1508,7 @@ python-picnic-api==1.1.0
|
|||||||
python-smarttub==0.0.33
|
python-smarttub==0.0.33
|
||||||
|
|
||||||
# homeassistant.components.songpal
|
# homeassistant.components.songpal
|
||||||
python-songpal==0.15.1
|
python-songpal==0.15.2
|
||||||
|
|
||||||
# homeassistant.components.tado
|
# homeassistant.components.tado
|
||||||
python-tado==0.12.0
|
python-tado==0.12.0
|
||||||
@ -1591,7 +1591,7 @@ radiotherm==2.1.0
|
|||||||
regenmaschine==2022.11.0
|
regenmaschine==2022.11.0
|
||||||
|
|
||||||
# homeassistant.components.renault
|
# homeassistant.components.renault
|
||||||
renault-api==0.1.12
|
renault-api==0.1.13
|
||||||
|
|
||||||
# homeassistant.components.reolink
|
# homeassistant.components.reolink
|
||||||
reolink-aio==0.5.10
|
reolink-aio==0.5.10
|
||||||
|
@ -1295,3 +1295,37 @@ async def test_event_without_duration(
|
|||||||
assert state.attributes.get("start_time") == one_hour_from_now.strftime(
|
assert state.attributes.get("start_time") == one_hour_from_now.strftime(
|
||||||
DATE_STR_FORMAT
|
DATE_STR_FORMAT
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_event_differs_timezone(
|
||||||
|
hass: HomeAssistant, mock_events_list_items, component_setup
|
||||||
|
) -> None:
|
||||||
|
"""Test a case where the event has a different start/end timezone."""
|
||||||
|
one_hour_from_now = dt_util.now() + datetime.timedelta(minutes=30)
|
||||||
|
end_event = one_hour_from_now + datetime.timedelta(hours=8)
|
||||||
|
event = {
|
||||||
|
**TEST_EVENT,
|
||||||
|
"start": {
|
||||||
|
"dateTime": one_hour_from_now.isoformat(),
|
||||||
|
"timeZone": "America/Regina",
|
||||||
|
},
|
||||||
|
"end": {"dateTime": end_event.isoformat(), "timeZone": "UTC"},
|
||||||
|
}
|
||||||
|
mock_events_list_items([event])
|
||||||
|
|
||||||
|
assert await component_setup()
|
||||||
|
|
||||||
|
state = hass.states.get(TEST_ENTITY)
|
||||||
|
assert state.name == TEST_ENTITY_NAME
|
||||||
|
assert state.state == STATE_OFF
|
||||||
|
assert dict(state.attributes) == {
|
||||||
|
"friendly_name": TEST_ENTITY_NAME,
|
||||||
|
"message": event["summary"],
|
||||||
|
"all_day": False,
|
||||||
|
"offset_reached": False,
|
||||||
|
"start_time": one_hour_from_now.strftime(DATE_STR_FORMAT),
|
||||||
|
"end_time": end_event.strftime(DATE_STR_FORMAT),
|
||||||
|
"location": event["location"],
|
||||||
|
"description": event["description"],
|
||||||
|
"supported_features": 3,
|
||||||
|
}
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""Test the update coordinator for HomeWizard."""
|
"""Test the update coordinator for HomeWizard."""
|
||||||
from unittest.mock import AsyncMock, patch
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
from homewizard_energy.errors import DisabledError, RequestError
|
from homewizard_energy.errors import DisabledError, RequestError, UnsupportedError
|
||||||
from homewizard_energy.models import State, System
|
from homewizard_energy.models import State, System
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@ -507,3 +507,39 @@ async def test_switch_handles_disablederror(
|
|||||||
{"entity_id": "switch.product_name_aabbccddeeff_cloud_connection"},
|
{"entity_id": "switch.product_name_aabbccddeeff_cloud_connection"},
|
||||||
blocking=True,
|
blocking=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_switch_handles_unsupportedrrror(
|
||||||
|
hass: HomeAssistant, mock_config_entry_data, mock_config_entry
|
||||||
|
) -> None:
|
||||||
|
"""Test entity raises HomeAssistantError when Disabled was raised."""
|
||||||
|
|
||||||
|
api = get_mock_device(product_type="HWE-SKT", firmware_version="3.02")
|
||||||
|
api.state = AsyncMock(side_effect=UnsupportedError())
|
||||||
|
api.system = AsyncMock(side_effect=UnsupportedError())
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"homeassistant.components.homewizard.coordinator.HomeWizardEnergy",
|
||||||
|
return_value=api,
|
||||||
|
):
|
||||||
|
entry = mock_config_entry
|
||||||
|
entry.data = mock_config_entry_data
|
||||||
|
entry.add_to_hass(hass)
|
||||||
|
|
||||||
|
await hass.config_entries.async_setup(entry.entry_id)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert (
|
||||||
|
hass.states.get("switch.product_name_aabbccddeeff_cloud_connection").state
|
||||||
|
== STATE_UNAVAILABLE
|
||||||
|
)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
hass.states.get("switch.product_name_aabbccddeeff_switch_lock").state
|
||||||
|
== STATE_UNAVAILABLE
|
||||||
|
)
|
||||||
|
|
||||||
|
assert (
|
||||||
|
hass.states.get("switch.product_name_aabbccddeeff").state
|
||||||
|
== STATE_UNAVAILABLE
|
||||||
|
)
|
||||||
|
@ -101,5 +101,5 @@ async def test_feeder_robot_sensor(
|
|||||||
"""Tests Feeder-Robot sensors."""
|
"""Tests Feeder-Robot sensors."""
|
||||||
await setup_integration(hass, mock_account_with_feederrobot, PLATFORM_DOMAIN)
|
await setup_integration(hass, mock_account_with_feederrobot, PLATFORM_DOMAIN)
|
||||||
sensor = hass.states.get("sensor.test_food_level")
|
sensor = hass.states.get("sensor.test_food_level")
|
||||||
assert sensor.state == "20"
|
assert sensor.state == "10"
|
||||||
assert sensor.attributes["unit_of_measurement"] == PERCENTAGE
|
assert sensor.attributes["unit_of_measurement"] == PERCENTAGE
|
||||||
|
@ -132,9 +132,13 @@ async def test_upload_view(
|
|||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
hass_client: ClientSessionGenerator,
|
hass_client: ClientSessionGenerator,
|
||||||
temp_dir,
|
temp_dir,
|
||||||
|
tmpdir,
|
||||||
hass_admin_user: MockUser,
|
hass_admin_user: MockUser,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Allow uploading media."""
|
"""Allow uploading media."""
|
||||||
|
# We need a temp dir that's not under tempdir fixture
|
||||||
|
extra_media_dir = tmpdir
|
||||||
|
hass.config.media_dirs["another_path"] = temp_dir
|
||||||
|
|
||||||
img = (Path(__file__).parent.parent / "image_upload/logo.png").read_bytes()
|
img = (Path(__file__).parent.parent / "image_upload/logo.png").read_bytes()
|
||||||
|
|
||||||
@ -167,6 +171,8 @@ async def test_upload_view(
|
|||||||
"media-source://media_source/test_dir/..",
|
"media-source://media_source/test_dir/..",
|
||||||
# Domain != media_source
|
# Domain != media_source
|
||||||
"media-source://nest/test_dir/.",
|
"media-source://nest/test_dir/.",
|
||||||
|
# Other directory
|
||||||
|
f"media-source://media_source/another_path///{extra_media_dir}/",
|
||||||
# Completely something else
|
# Completely something else
|
||||||
"http://bla",
|
"http://bla",
|
||||||
):
|
):
|
||||||
@ -178,7 +184,7 @@ async def test_upload_view(
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
assert res.status == 400
|
assert res.status == 400, bad_id
|
||||||
assert not (Path(temp_dir) / "bad-source-id.png").is_file()
|
assert not (Path(temp_dir) / "bad-source-id.png").is_file()
|
||||||
|
|
||||||
# Test invalid POST data
|
# Test invalid POST data
|
||||||
|
@ -96,12 +96,19 @@ async def test_setup_and_stop_waits_for_ha(
|
|||||||
mqtt_mock.async_publish.assert_not_called()
|
mqtt_mock.async_publish.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.xfail()
|
||||||
async def test_startup_no_mqtt(
|
async def test_startup_no_mqtt(
|
||||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test startup without MQTT support."""
|
"""Test startup without MQTT support."""
|
||||||
assert not await add_statestream(hass, base_topic="pub")
|
e_id = "fake.entity"
|
||||||
assert "MQTT integration is not available" in caplog.text
|
|
||||||
|
assert await add_statestream(hass, base_topic="pub")
|
||||||
|
# Set a state of an entity
|
||||||
|
mock_state_change_event(hass, State(e_id, "on"))
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
assert "MQTT is not enabled" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
async def test_setup_succeeds_with_attributes(
|
async def test_setup_succeeds_with_attributes(
|
||||||
|
@ -6,10 +6,9 @@ import sqlite3
|
|||||||
import sys
|
import sys
|
||||||
import threading
|
import threading
|
||||||
from unittest.mock import Mock, PropertyMock, call, patch
|
from unittest.mock import Mock, PropertyMock, call, patch
|
||||||
import uuid
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from sqlalchemy import create_engine, inspect, text
|
from sqlalchemy import create_engine, text
|
||||||
from sqlalchemy.exc import (
|
from sqlalchemy.exc import (
|
||||||
DatabaseError,
|
DatabaseError,
|
||||||
InternalError,
|
InternalError,
|
||||||
@ -35,15 +34,12 @@ from homeassistant.components.recorder.queries import select_event_type_ids
|
|||||||
from homeassistant.components.recorder.tasks import (
|
from homeassistant.components.recorder.tasks import (
|
||||||
EntityIDMigrationTask,
|
EntityIDMigrationTask,
|
||||||
EntityIDPostMigrationTask,
|
EntityIDPostMigrationTask,
|
||||||
EventsContextIDMigrationTask,
|
|
||||||
EventTypeIDMigrationTask,
|
EventTypeIDMigrationTask,
|
||||||
StatesContextIDMigrationTask,
|
|
||||||
)
|
)
|
||||||
from homeassistant.components.recorder.util import session_scope
|
from homeassistant.components.recorder.util import session_scope
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import recorder as recorder_helper
|
from homeassistant.helpers import recorder as recorder_helper
|
||||||
import homeassistant.util.dt as dt_util
|
import homeassistant.util.dt as dt_util
|
||||||
from homeassistant.util.ulid import bytes_to_ulid
|
|
||||||
|
|
||||||
from .common import (
|
from .common import (
|
||||||
async_recorder_block_till_done,
|
async_recorder_block_till_done,
|
||||||
@ -603,322 +599,6 @@ def test_raise_if_exception_missing_empty_cause_str() -> None:
|
|||||||
migration.raise_if_exception_missing_str(programming_exc, ["not present"])
|
migration.raise_if_exception_missing_str(programming_exc, ["not present"])
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("enable_migrate_context_ids", [True])
|
|
||||||
async def test_migrate_events_context_ids(
|
|
||||||
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
|
||||||
) -> None:
|
|
||||||
"""Test we can migrate old uuid context ids and ulid context ids to binary format."""
|
|
||||||
instance = await async_setup_recorder_instance(hass)
|
|
||||||
await async_wait_recording_done(hass)
|
|
||||||
|
|
||||||
test_uuid = uuid.uuid4()
|
|
||||||
uuid_hex = test_uuid.hex
|
|
||||||
uuid_bin = test_uuid.bytes
|
|
||||||
|
|
||||||
def _insert_events():
|
|
||||||
with session_scope(hass=hass) as session:
|
|
||||||
session.add_all(
|
|
||||||
(
|
|
||||||
Events(
|
|
||||||
event_type="old_uuid_context_id_event",
|
|
||||||
event_data=None,
|
|
||||||
origin_idx=0,
|
|
||||||
time_fired=None,
|
|
||||||
time_fired_ts=1677721632.452529,
|
|
||||||
context_id=uuid_hex,
|
|
||||||
context_id_bin=None,
|
|
||||||
context_user_id=None,
|
|
||||||
context_user_id_bin=None,
|
|
||||||
context_parent_id=None,
|
|
||||||
context_parent_id_bin=None,
|
|
||||||
),
|
|
||||||
Events(
|
|
||||||
event_type="empty_context_id_event",
|
|
||||||
event_data=None,
|
|
||||||
origin_idx=0,
|
|
||||||
time_fired=None,
|
|
||||||
time_fired_ts=1677721632.552529,
|
|
||||||
context_id=None,
|
|
||||||
context_id_bin=None,
|
|
||||||
context_user_id=None,
|
|
||||||
context_user_id_bin=None,
|
|
||||||
context_parent_id=None,
|
|
||||||
context_parent_id_bin=None,
|
|
||||||
),
|
|
||||||
Events(
|
|
||||||
event_type="ulid_context_id_event",
|
|
||||||
event_data=None,
|
|
||||||
origin_idx=0,
|
|
||||||
time_fired=None,
|
|
||||||
time_fired_ts=1677721632.552529,
|
|
||||||
context_id="01ARZ3NDEKTSV4RRFFQ69G5FAV",
|
|
||||||
context_id_bin=None,
|
|
||||||
context_user_id="9400facee45711eaa9308bfd3d19e474",
|
|
||||||
context_user_id_bin=None,
|
|
||||||
context_parent_id="01ARZ3NDEKTSV4RRFFQ69G5FA2",
|
|
||||||
context_parent_id_bin=None,
|
|
||||||
),
|
|
||||||
Events(
|
|
||||||
event_type="invalid_context_id_event",
|
|
||||||
event_data=None,
|
|
||||||
origin_idx=0,
|
|
||||||
time_fired=None,
|
|
||||||
time_fired_ts=1677721632.552529,
|
|
||||||
context_id="invalid",
|
|
||||||
context_id_bin=None,
|
|
||||||
context_user_id=None,
|
|
||||||
context_user_id_bin=None,
|
|
||||||
context_parent_id=None,
|
|
||||||
context_parent_id_bin=None,
|
|
||||||
),
|
|
||||||
Events(
|
|
||||||
event_type="garbage_context_id_event",
|
|
||||||
event_data=None,
|
|
||||||
origin_idx=0,
|
|
||||||
time_fired=None,
|
|
||||||
time_fired_ts=1677721632.552529,
|
|
||||||
context_id="adapt_lgt:b'5Cf*':interval:b'0R'",
|
|
||||||
context_id_bin=None,
|
|
||||||
context_user_id=None,
|
|
||||||
context_user_id_bin=None,
|
|
||||||
context_parent_id=None,
|
|
||||||
context_parent_id_bin=None,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
await instance.async_add_executor_job(_insert_events)
|
|
||||||
|
|
||||||
await async_wait_recording_done(hass)
|
|
||||||
# This is a threadsafe way to add a task to the recorder
|
|
||||||
instance.queue_task(EventsContextIDMigrationTask())
|
|
||||||
await async_recorder_block_till_done(hass)
|
|
||||||
|
|
||||||
def _object_as_dict(obj):
|
|
||||||
return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs}
|
|
||||||
|
|
||||||
def _fetch_migrated_events():
|
|
||||||
with session_scope(hass=hass) as session:
|
|
||||||
events = (
|
|
||||||
session.query(Events)
|
|
||||||
.filter(
|
|
||||||
Events.event_type.in_(
|
|
||||||
[
|
|
||||||
"old_uuid_context_id_event",
|
|
||||||
"empty_context_id_event",
|
|
||||||
"ulid_context_id_event",
|
|
||||||
"invalid_context_id_event",
|
|
||||||
"garbage_context_id_event",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
assert len(events) == 5
|
|
||||||
return {event.event_type: _object_as_dict(event) for event in events}
|
|
||||||
|
|
||||||
events_by_type = await instance.async_add_executor_job(_fetch_migrated_events)
|
|
||||||
|
|
||||||
old_uuid_context_id_event = events_by_type["old_uuid_context_id_event"]
|
|
||||||
assert old_uuid_context_id_event["context_id"] is None
|
|
||||||
assert old_uuid_context_id_event["context_user_id"] is None
|
|
||||||
assert old_uuid_context_id_event["context_parent_id"] is None
|
|
||||||
assert old_uuid_context_id_event["context_id_bin"] == uuid_bin
|
|
||||||
assert old_uuid_context_id_event["context_user_id_bin"] is None
|
|
||||||
assert old_uuid_context_id_event["context_parent_id_bin"] is None
|
|
||||||
|
|
||||||
empty_context_id_event = events_by_type["empty_context_id_event"]
|
|
||||||
assert empty_context_id_event["context_id"] is None
|
|
||||||
assert empty_context_id_event["context_user_id"] is None
|
|
||||||
assert empty_context_id_event["context_parent_id"] is None
|
|
||||||
assert empty_context_id_event["context_id_bin"] == b"\x00" * 16
|
|
||||||
assert empty_context_id_event["context_user_id_bin"] is None
|
|
||||||
assert empty_context_id_event["context_parent_id_bin"] is None
|
|
||||||
|
|
||||||
ulid_context_id_event = events_by_type["ulid_context_id_event"]
|
|
||||||
assert ulid_context_id_event["context_id"] is None
|
|
||||||
assert ulid_context_id_event["context_user_id"] is None
|
|
||||||
assert ulid_context_id_event["context_parent_id"] is None
|
|
||||||
assert (
|
|
||||||
bytes_to_ulid(ulid_context_id_event["context_id_bin"])
|
|
||||||
== "01ARZ3NDEKTSV4RRFFQ69G5FAV"
|
|
||||||
)
|
|
||||||
assert (
|
|
||||||
ulid_context_id_event["context_user_id_bin"]
|
|
||||||
== b"\x94\x00\xfa\xce\xe4W\x11\xea\xa90\x8b\xfd=\x19\xe4t"
|
|
||||||
)
|
|
||||||
assert (
|
|
||||||
bytes_to_ulid(ulid_context_id_event["context_parent_id_bin"])
|
|
||||||
== "01ARZ3NDEKTSV4RRFFQ69G5FA2"
|
|
||||||
)
|
|
||||||
|
|
||||||
invalid_context_id_event = events_by_type["invalid_context_id_event"]
|
|
||||||
assert invalid_context_id_event["context_id"] is None
|
|
||||||
assert invalid_context_id_event["context_user_id"] is None
|
|
||||||
assert invalid_context_id_event["context_parent_id"] is None
|
|
||||||
assert invalid_context_id_event["context_id_bin"] == b"\x00" * 16
|
|
||||||
assert invalid_context_id_event["context_user_id_bin"] is None
|
|
||||||
assert invalid_context_id_event["context_parent_id_bin"] is None
|
|
||||||
|
|
||||||
garbage_context_id_event = events_by_type["garbage_context_id_event"]
|
|
||||||
assert garbage_context_id_event["context_id"] is None
|
|
||||||
assert garbage_context_id_event["context_user_id"] is None
|
|
||||||
assert garbage_context_id_event["context_parent_id"] is None
|
|
||||||
assert garbage_context_id_event["context_id_bin"] == b"\x00" * 16
|
|
||||||
assert garbage_context_id_event["context_user_id_bin"] is None
|
|
||||||
assert garbage_context_id_event["context_parent_id_bin"] is None
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("enable_migrate_context_ids", [True])
|
|
||||||
async def test_migrate_states_context_ids(
|
|
||||||
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
|
||||||
) -> None:
|
|
||||||
"""Test we can migrate old uuid context ids and ulid context ids to binary format."""
|
|
||||||
instance = await async_setup_recorder_instance(hass)
|
|
||||||
await async_wait_recording_done(hass)
|
|
||||||
|
|
||||||
test_uuid = uuid.uuid4()
|
|
||||||
uuid_hex = test_uuid.hex
|
|
||||||
uuid_bin = test_uuid.bytes
|
|
||||||
|
|
||||||
def _insert_events():
|
|
||||||
with session_scope(hass=hass) as session:
|
|
||||||
session.add_all(
|
|
||||||
(
|
|
||||||
States(
|
|
||||||
entity_id="state.old_uuid_context_id",
|
|
||||||
last_updated_ts=1677721632.452529,
|
|
||||||
context_id=uuid_hex,
|
|
||||||
context_id_bin=None,
|
|
||||||
context_user_id=None,
|
|
||||||
context_user_id_bin=None,
|
|
||||||
context_parent_id=None,
|
|
||||||
context_parent_id_bin=None,
|
|
||||||
),
|
|
||||||
States(
|
|
||||||
entity_id="state.empty_context_id",
|
|
||||||
last_updated_ts=1677721632.552529,
|
|
||||||
context_id=None,
|
|
||||||
context_id_bin=None,
|
|
||||||
context_user_id=None,
|
|
||||||
context_user_id_bin=None,
|
|
||||||
context_parent_id=None,
|
|
||||||
context_parent_id_bin=None,
|
|
||||||
),
|
|
||||||
States(
|
|
||||||
entity_id="state.ulid_context_id",
|
|
||||||
last_updated_ts=1677721632.552529,
|
|
||||||
context_id="01ARZ3NDEKTSV4RRFFQ69G5FAV",
|
|
||||||
context_id_bin=None,
|
|
||||||
context_user_id="9400facee45711eaa9308bfd3d19e474",
|
|
||||||
context_user_id_bin=None,
|
|
||||||
context_parent_id="01ARZ3NDEKTSV4RRFFQ69G5FA2",
|
|
||||||
context_parent_id_bin=None,
|
|
||||||
),
|
|
||||||
States(
|
|
||||||
entity_id="state.invalid_context_id",
|
|
||||||
last_updated_ts=1677721632.552529,
|
|
||||||
context_id="invalid",
|
|
||||||
context_id_bin=None,
|
|
||||||
context_user_id=None,
|
|
||||||
context_user_id_bin=None,
|
|
||||||
context_parent_id=None,
|
|
||||||
context_parent_id_bin=None,
|
|
||||||
),
|
|
||||||
States(
|
|
||||||
entity_id="state.garbage_context_id",
|
|
||||||
last_updated_ts=1677721632.552529,
|
|
||||||
context_id="adapt_lgt:b'5Cf*':interval:b'0R'",
|
|
||||||
context_id_bin=None,
|
|
||||||
context_user_id=None,
|
|
||||||
context_user_id_bin=None,
|
|
||||||
context_parent_id=None,
|
|
||||||
context_parent_id_bin=None,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
await instance.async_add_executor_job(_insert_events)
|
|
||||||
|
|
||||||
await async_wait_recording_done(hass)
|
|
||||||
# This is a threadsafe way to add a task to the recorder
|
|
||||||
instance.queue_task(StatesContextIDMigrationTask())
|
|
||||||
await async_recorder_block_till_done(hass)
|
|
||||||
|
|
||||||
def _object_as_dict(obj):
|
|
||||||
return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs}
|
|
||||||
|
|
||||||
def _fetch_migrated_states():
|
|
||||||
with session_scope(hass=hass) as session:
|
|
||||||
events = (
|
|
||||||
session.query(States)
|
|
||||||
.filter(
|
|
||||||
States.entity_id.in_(
|
|
||||||
[
|
|
||||||
"state.old_uuid_context_id",
|
|
||||||
"state.empty_context_id",
|
|
||||||
"state.ulid_context_id",
|
|
||||||
"state.invalid_context_id",
|
|
||||||
"state.garbage_context_id",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
assert len(events) == 5
|
|
||||||
return {state.entity_id: _object_as_dict(state) for state in events}
|
|
||||||
|
|
||||||
states_by_entity_id = await instance.async_add_executor_job(_fetch_migrated_states)
|
|
||||||
|
|
||||||
old_uuid_context_id = states_by_entity_id["state.old_uuid_context_id"]
|
|
||||||
assert old_uuid_context_id["context_id"] is None
|
|
||||||
assert old_uuid_context_id["context_user_id"] is None
|
|
||||||
assert old_uuid_context_id["context_parent_id"] is None
|
|
||||||
assert old_uuid_context_id["context_id_bin"] == uuid_bin
|
|
||||||
assert old_uuid_context_id["context_user_id_bin"] is None
|
|
||||||
assert old_uuid_context_id["context_parent_id_bin"] is None
|
|
||||||
|
|
||||||
empty_context_id = states_by_entity_id["state.empty_context_id"]
|
|
||||||
assert empty_context_id["context_id"] is None
|
|
||||||
assert empty_context_id["context_user_id"] is None
|
|
||||||
assert empty_context_id["context_parent_id"] is None
|
|
||||||
assert empty_context_id["context_id_bin"] == b"\x00" * 16
|
|
||||||
assert empty_context_id["context_user_id_bin"] is None
|
|
||||||
assert empty_context_id["context_parent_id_bin"] is None
|
|
||||||
|
|
||||||
ulid_context_id = states_by_entity_id["state.ulid_context_id"]
|
|
||||||
assert ulid_context_id["context_id"] is None
|
|
||||||
assert ulid_context_id["context_user_id"] is None
|
|
||||||
assert ulid_context_id["context_parent_id"] is None
|
|
||||||
assert (
|
|
||||||
bytes_to_ulid(ulid_context_id["context_id_bin"]) == "01ARZ3NDEKTSV4RRFFQ69G5FAV"
|
|
||||||
)
|
|
||||||
assert (
|
|
||||||
ulid_context_id["context_user_id_bin"]
|
|
||||||
== b"\x94\x00\xfa\xce\xe4W\x11\xea\xa90\x8b\xfd=\x19\xe4t"
|
|
||||||
)
|
|
||||||
assert (
|
|
||||||
bytes_to_ulid(ulid_context_id["context_parent_id_bin"])
|
|
||||||
== "01ARZ3NDEKTSV4RRFFQ69G5FA2"
|
|
||||||
)
|
|
||||||
|
|
||||||
invalid_context_id = states_by_entity_id["state.invalid_context_id"]
|
|
||||||
assert invalid_context_id["context_id"] is None
|
|
||||||
assert invalid_context_id["context_user_id"] is None
|
|
||||||
assert invalid_context_id["context_parent_id"] is None
|
|
||||||
assert invalid_context_id["context_id_bin"] == b"\x00" * 16
|
|
||||||
assert invalid_context_id["context_user_id_bin"] is None
|
|
||||||
assert invalid_context_id["context_parent_id_bin"] is None
|
|
||||||
|
|
||||||
garbage_context_id = states_by_entity_id["state.garbage_context_id"]
|
|
||||||
assert garbage_context_id["context_id"] is None
|
|
||||||
assert garbage_context_id["context_user_id"] is None
|
|
||||||
assert garbage_context_id["context_parent_id"] is None
|
|
||||||
assert garbage_context_id["context_id_bin"] == b"\x00" * 16
|
|
||||||
assert garbage_context_id["context_user_id_bin"] is None
|
|
||||||
assert garbage_context_id["context_parent_id_bin"] is None
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("enable_migrate_event_type_ids", [True])
|
@pytest.mark.parametrize("enable_migrate_event_type_ids", [True])
|
||||||
async def test_migrate_event_type_ids(
|
async def test_migrate_event_type_ids(
|
||||||
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
||||||
|
818
tests/components/recorder/test_migration_from_schema_32.py
Normal file
818
tests/components/recorder/test_migration_from_schema_32.py
Normal file
@ -0,0 +1,818 @@
|
|||||||
|
"""The tests for the recorder filter matching the EntityFilter component."""
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
import importlib
|
||||||
|
import sys
|
||||||
|
from unittest.mock import patch
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from freezegun import freeze_time
|
||||||
|
import pytest
|
||||||
|
from sqlalchemy import create_engine, inspect
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from homeassistant.components import recorder
|
||||||
|
from homeassistant.components.recorder import core, migration, statistics
|
||||||
|
from homeassistant.components.recorder.db_schema import (
|
||||||
|
Events,
|
||||||
|
EventTypes,
|
||||||
|
States,
|
||||||
|
StatesMeta,
|
||||||
|
)
|
||||||
|
from homeassistant.components.recorder.queries import select_event_type_ids
|
||||||
|
from homeassistant.components.recorder.tasks import (
|
||||||
|
EntityIDMigrationTask,
|
||||||
|
EntityIDPostMigrationTask,
|
||||||
|
EventsContextIDMigrationTask,
|
||||||
|
EventTypeIDMigrationTask,
|
||||||
|
StatesContextIDMigrationTask,
|
||||||
|
)
|
||||||
|
from homeassistant.components.recorder.util import session_scope
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
import homeassistant.util.dt as dt_util
|
||||||
|
from homeassistant.util.ulid import bytes_to_ulid, ulid_at_time, ulid_to_bytes
|
||||||
|
|
||||||
|
from .common import async_recorder_block_till_done, async_wait_recording_done
|
||||||
|
|
||||||
|
from tests.typing import RecorderInstanceGenerator
|
||||||
|
|
||||||
|
CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine"
|
||||||
|
SCHEMA_MODULE = "tests.components.recorder.db_schema_32"
|
||||||
|
ORIG_TZ = dt_util.DEFAULT_TIME_ZONE
|
||||||
|
|
||||||
|
|
||||||
|
def _create_engine_test(*args, **kwargs):
|
||||||
|
"""Test version of create_engine that initializes with old schema.
|
||||||
|
|
||||||
|
This simulates an existing db with the old schema.
|
||||||
|
"""
|
||||||
|
importlib.import_module(SCHEMA_MODULE)
|
||||||
|
old_db_schema = sys.modules[SCHEMA_MODULE]
|
||||||
|
engine = create_engine(*args, **kwargs)
|
||||||
|
old_db_schema.Base.metadata.create_all(engine)
|
||||||
|
with Session(engine) as session:
|
||||||
|
session.add(
|
||||||
|
recorder.db_schema.StatisticsRuns(start=statistics.get_start_time())
|
||||||
|
)
|
||||||
|
session.add(
|
||||||
|
recorder.db_schema.SchemaChanges(
|
||||||
|
schema_version=old_db_schema.SCHEMA_VERSION
|
||||||
|
)
|
||||||
|
)
|
||||||
|
session.commit()
|
||||||
|
return engine
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def db_schema_32():
|
||||||
|
"""Fixture to initialize the db with the old schema."""
|
||||||
|
importlib.import_module(SCHEMA_MODULE)
|
||||||
|
old_db_schema = sys.modules[SCHEMA_MODULE]
|
||||||
|
|
||||||
|
with patch.object(recorder, "db_schema", old_db_schema), patch.object(
|
||||||
|
recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION
|
||||||
|
), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(
|
||||||
|
core, "EventTypes", old_db_schema.EventTypes
|
||||||
|
), patch.object(
|
||||||
|
core, "EventData", old_db_schema.EventData
|
||||||
|
), patch.object(
|
||||||
|
core, "States", old_db_schema.States
|
||||||
|
), patch.object(
|
||||||
|
core, "Events", old_db_schema.Events
|
||||||
|
), patch.object(
|
||||||
|
core, "StateAttributes", old_db_schema.StateAttributes
|
||||||
|
), patch.object(
|
||||||
|
core, "EntityIDMigrationTask", core.RecorderTask
|
||||||
|
), patch(
|
||||||
|
CREATE_ENGINE_TARGET, new=_create_engine_test
|
||||||
|
):
|
||||||
|
yield
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(name="legacy_recorder_mock")
|
||||||
|
async def legacy_recorder_mock_fixture(recorder_mock):
|
||||||
|
"""Fixture for legacy recorder mock."""
|
||||||
|
with patch.object(recorder_mock.states_meta_manager, "active", False):
|
||||||
|
yield recorder_mock
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("enable_migrate_context_ids", [True])
|
||||||
|
async def test_migrate_events_context_ids(
|
||||||
|
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
||||||
|
) -> None:
|
||||||
|
"""Test we can migrate old uuid context ids and ulid context ids to binary format."""
|
||||||
|
instance = await async_setup_recorder_instance(hass)
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
|
test_uuid = uuid.uuid4()
|
||||||
|
uuid_hex = test_uuid.hex
|
||||||
|
uuid_bin = test_uuid.bytes
|
||||||
|
|
||||||
|
def _insert_events():
|
||||||
|
with session_scope(hass=hass) as session:
|
||||||
|
session.add_all(
|
||||||
|
(
|
||||||
|
Events(
|
||||||
|
event_type="old_uuid_context_id_event",
|
||||||
|
event_data=None,
|
||||||
|
origin_idx=0,
|
||||||
|
time_fired=None,
|
||||||
|
time_fired_ts=1877721632.452529,
|
||||||
|
context_id=uuid_hex,
|
||||||
|
context_id_bin=None,
|
||||||
|
context_user_id=None,
|
||||||
|
context_user_id_bin=None,
|
||||||
|
context_parent_id=None,
|
||||||
|
context_parent_id_bin=None,
|
||||||
|
),
|
||||||
|
Events(
|
||||||
|
event_type="empty_context_id_event",
|
||||||
|
event_data=None,
|
||||||
|
origin_idx=0,
|
||||||
|
time_fired=None,
|
||||||
|
time_fired_ts=1877721632.552529,
|
||||||
|
context_id=None,
|
||||||
|
context_id_bin=None,
|
||||||
|
context_user_id=None,
|
||||||
|
context_user_id_bin=None,
|
||||||
|
context_parent_id=None,
|
||||||
|
context_parent_id_bin=None,
|
||||||
|
),
|
||||||
|
Events(
|
||||||
|
event_type="ulid_context_id_event",
|
||||||
|
event_data=None,
|
||||||
|
origin_idx=0,
|
||||||
|
time_fired=None,
|
||||||
|
time_fired_ts=1877721632.552529,
|
||||||
|
context_id="01ARZ3NDEKTSV4RRFFQ69G5FAV",
|
||||||
|
context_id_bin=None,
|
||||||
|
context_user_id="9400facee45711eaa9308bfd3d19e474",
|
||||||
|
context_user_id_bin=None,
|
||||||
|
context_parent_id="01ARZ3NDEKTSV4RRFFQ69G5FA2",
|
||||||
|
context_parent_id_bin=None,
|
||||||
|
),
|
||||||
|
Events(
|
||||||
|
event_type="invalid_context_id_event",
|
||||||
|
event_data=None,
|
||||||
|
origin_idx=0,
|
||||||
|
time_fired=None,
|
||||||
|
time_fired_ts=1877721632.552529,
|
||||||
|
context_id="invalid",
|
||||||
|
context_id_bin=None,
|
||||||
|
context_user_id=None,
|
||||||
|
context_user_id_bin=None,
|
||||||
|
context_parent_id=None,
|
||||||
|
context_parent_id_bin=None,
|
||||||
|
),
|
||||||
|
Events(
|
||||||
|
event_type="garbage_context_id_event",
|
||||||
|
event_data=None,
|
||||||
|
origin_idx=0,
|
||||||
|
time_fired=None,
|
||||||
|
time_fired_ts=1277721632.552529,
|
||||||
|
context_id="adapt_lgt:b'5Cf*':interval:b'0R'",
|
||||||
|
context_id_bin=None,
|
||||||
|
context_user_id=None,
|
||||||
|
context_user_id_bin=None,
|
||||||
|
context_parent_id=None,
|
||||||
|
context_parent_id_bin=None,
|
||||||
|
),
|
||||||
|
Events(
|
||||||
|
event_type="event_with_garbage_context_id_no_time_fired_ts",
|
||||||
|
event_data=None,
|
||||||
|
origin_idx=0,
|
||||||
|
time_fired=None,
|
||||||
|
time_fired_ts=None,
|
||||||
|
context_id="adapt_lgt:b'5Cf*':interval:b'0R'",
|
||||||
|
context_id_bin=None,
|
||||||
|
context_user_id=None,
|
||||||
|
context_user_id_bin=None,
|
||||||
|
context_parent_id=None,
|
||||||
|
context_parent_id_bin=None,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
await instance.async_add_executor_job(_insert_events)
|
||||||
|
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
now = dt_util.utcnow()
|
||||||
|
expected_ulid_fallback_start = ulid_to_bytes(ulid_at_time(now.timestamp()))[0:6]
|
||||||
|
with freeze_time(now):
|
||||||
|
# This is a threadsafe way to add a task to the recorder
|
||||||
|
instance.queue_task(EventsContextIDMigrationTask())
|
||||||
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
|
def _object_as_dict(obj):
|
||||||
|
return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs}
|
||||||
|
|
||||||
|
def _fetch_migrated_events():
|
||||||
|
with session_scope(hass=hass) as session:
|
||||||
|
events = (
|
||||||
|
session.query(Events)
|
||||||
|
.filter(
|
||||||
|
Events.event_type.in_(
|
||||||
|
[
|
||||||
|
"old_uuid_context_id_event",
|
||||||
|
"empty_context_id_event",
|
||||||
|
"ulid_context_id_event",
|
||||||
|
"invalid_context_id_event",
|
||||||
|
"garbage_context_id_event",
|
||||||
|
"event_with_garbage_context_id_no_time_fired_ts",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
assert len(events) == 6
|
||||||
|
return {event.event_type: _object_as_dict(event) for event in events}
|
||||||
|
|
||||||
|
events_by_type = await instance.async_add_executor_job(_fetch_migrated_events)
|
||||||
|
|
||||||
|
old_uuid_context_id_event = events_by_type["old_uuid_context_id_event"]
|
||||||
|
assert old_uuid_context_id_event["context_id"] is None
|
||||||
|
assert old_uuid_context_id_event["context_user_id"] is None
|
||||||
|
assert old_uuid_context_id_event["context_parent_id"] is None
|
||||||
|
assert old_uuid_context_id_event["context_id_bin"] == uuid_bin
|
||||||
|
assert old_uuid_context_id_event["context_user_id_bin"] is None
|
||||||
|
assert old_uuid_context_id_event["context_parent_id_bin"] is None
|
||||||
|
|
||||||
|
empty_context_id_event = events_by_type["empty_context_id_event"]
|
||||||
|
assert empty_context_id_event["context_id"] is None
|
||||||
|
assert empty_context_id_event["context_user_id"] is None
|
||||||
|
assert empty_context_id_event["context_parent_id"] is None
|
||||||
|
assert empty_context_id_event["context_id_bin"].startswith(
|
||||||
|
b"\x01\xb50\xeeO("
|
||||||
|
) # 6 bytes of timestamp + random
|
||||||
|
assert empty_context_id_event["context_user_id_bin"] is None
|
||||||
|
assert empty_context_id_event["context_parent_id_bin"] is None
|
||||||
|
|
||||||
|
ulid_context_id_event = events_by_type["ulid_context_id_event"]
|
||||||
|
assert ulid_context_id_event["context_id"] is None
|
||||||
|
assert ulid_context_id_event["context_user_id"] is None
|
||||||
|
assert ulid_context_id_event["context_parent_id"] is None
|
||||||
|
assert (
|
||||||
|
bytes_to_ulid(ulid_context_id_event["context_id_bin"])
|
||||||
|
== "01ARZ3NDEKTSV4RRFFQ69G5FAV"
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
ulid_context_id_event["context_user_id_bin"]
|
||||||
|
== b"\x94\x00\xfa\xce\xe4W\x11\xea\xa90\x8b\xfd=\x19\xe4t"
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
bytes_to_ulid(ulid_context_id_event["context_parent_id_bin"])
|
||||||
|
== "01ARZ3NDEKTSV4RRFFQ69G5FA2"
|
||||||
|
)
|
||||||
|
|
||||||
|
invalid_context_id_event = events_by_type["invalid_context_id_event"]
|
||||||
|
assert invalid_context_id_event["context_id"] is None
|
||||||
|
assert invalid_context_id_event["context_user_id"] is None
|
||||||
|
assert invalid_context_id_event["context_parent_id"] is None
|
||||||
|
assert invalid_context_id_event["context_id_bin"].startswith(
|
||||||
|
b"\x01\xb50\xeeO("
|
||||||
|
) # 6 bytes of timestamp + random
|
||||||
|
assert invalid_context_id_event["context_user_id_bin"] is None
|
||||||
|
assert invalid_context_id_event["context_parent_id_bin"] is None
|
||||||
|
|
||||||
|
garbage_context_id_event = events_by_type["garbage_context_id_event"]
|
||||||
|
assert garbage_context_id_event["context_id"] is None
|
||||||
|
assert garbage_context_id_event["context_user_id"] is None
|
||||||
|
assert garbage_context_id_event["context_parent_id"] is None
|
||||||
|
assert garbage_context_id_event["context_id_bin"].startswith(
|
||||||
|
b"\x01)~$\xdf("
|
||||||
|
) # 6 bytes of timestamp + random
|
||||||
|
assert garbage_context_id_event["context_user_id_bin"] is None
|
||||||
|
assert garbage_context_id_event["context_parent_id_bin"] is None
|
||||||
|
|
||||||
|
event_with_garbage_context_id_no_time_fired_ts = events_by_type[
|
||||||
|
"event_with_garbage_context_id_no_time_fired_ts"
|
||||||
|
]
|
||||||
|
assert event_with_garbage_context_id_no_time_fired_ts["context_id"] is None
|
||||||
|
assert event_with_garbage_context_id_no_time_fired_ts["context_user_id"] is None
|
||||||
|
assert event_with_garbage_context_id_no_time_fired_ts["context_parent_id"] is None
|
||||||
|
assert event_with_garbage_context_id_no_time_fired_ts["context_id_bin"].startswith(
|
||||||
|
expected_ulid_fallback_start
|
||||||
|
) # 6 bytes of timestamp + random
|
||||||
|
assert event_with_garbage_context_id_no_time_fired_ts["context_user_id_bin"] is None
|
||||||
|
assert (
|
||||||
|
event_with_garbage_context_id_no_time_fired_ts["context_parent_id_bin"] is None
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("enable_migrate_context_ids", [True])
|
||||||
|
async def test_migrate_states_context_ids(
|
||||||
|
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
||||||
|
) -> None:
|
||||||
|
"""Test we can migrate old uuid context ids and ulid context ids to binary format."""
|
||||||
|
instance = await async_setup_recorder_instance(hass)
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
|
test_uuid = uuid.uuid4()
|
||||||
|
uuid_hex = test_uuid.hex
|
||||||
|
uuid_bin = test_uuid.bytes
|
||||||
|
|
||||||
|
def _insert_states():
|
||||||
|
with session_scope(hass=hass) as session:
|
||||||
|
session.add_all(
|
||||||
|
(
|
||||||
|
States(
|
||||||
|
entity_id="state.old_uuid_context_id",
|
||||||
|
last_updated_ts=1477721632.452529,
|
||||||
|
context_id=uuid_hex,
|
||||||
|
context_id_bin=None,
|
||||||
|
context_user_id=None,
|
||||||
|
context_user_id_bin=None,
|
||||||
|
context_parent_id=None,
|
||||||
|
context_parent_id_bin=None,
|
||||||
|
),
|
||||||
|
States(
|
||||||
|
entity_id="state.empty_context_id",
|
||||||
|
last_updated_ts=1477721632.552529,
|
||||||
|
context_id=None,
|
||||||
|
context_id_bin=None,
|
||||||
|
context_user_id=None,
|
||||||
|
context_user_id_bin=None,
|
||||||
|
context_parent_id=None,
|
||||||
|
context_parent_id_bin=None,
|
||||||
|
),
|
||||||
|
States(
|
||||||
|
entity_id="state.ulid_context_id",
|
||||||
|
last_updated_ts=1477721632.552529,
|
||||||
|
context_id="01ARZ3NDEKTSV4RRFFQ69G5FAV",
|
||||||
|
context_id_bin=None,
|
||||||
|
context_user_id="9400facee45711eaa9308bfd3d19e474",
|
||||||
|
context_user_id_bin=None,
|
||||||
|
context_parent_id="01ARZ3NDEKTSV4RRFFQ69G5FA2",
|
||||||
|
context_parent_id_bin=None,
|
||||||
|
),
|
||||||
|
States(
|
||||||
|
entity_id="state.invalid_context_id",
|
||||||
|
last_updated_ts=1477721632.552529,
|
||||||
|
context_id="invalid",
|
||||||
|
context_id_bin=None,
|
||||||
|
context_user_id=None,
|
||||||
|
context_user_id_bin=None,
|
||||||
|
context_parent_id=None,
|
||||||
|
context_parent_id_bin=None,
|
||||||
|
),
|
||||||
|
States(
|
||||||
|
entity_id="state.garbage_context_id",
|
||||||
|
last_updated_ts=1477721632.552529,
|
||||||
|
context_id="adapt_lgt:b'5Cf*':interval:b'0R'",
|
||||||
|
context_id_bin=None,
|
||||||
|
context_user_id=None,
|
||||||
|
context_user_id_bin=None,
|
||||||
|
context_parent_id=None,
|
||||||
|
context_parent_id_bin=None,
|
||||||
|
),
|
||||||
|
States(
|
||||||
|
entity_id="state.human_readable_uuid_context_id",
|
||||||
|
last_updated_ts=1477721632.552529,
|
||||||
|
context_id="0ae29799-ee4e-4f45-8116-f582d7d3ee65",
|
||||||
|
context_id_bin=None,
|
||||||
|
context_user_id="0ae29799-ee4e-4f45-8116-f582d7d3ee65",
|
||||||
|
context_user_id_bin=None,
|
||||||
|
context_parent_id="0ae29799-ee4e-4f45-8116-f582d7d3ee65",
|
||||||
|
context_parent_id_bin=None,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
await instance.async_add_executor_job(_insert_states)
|
||||||
|
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
instance.queue_task(StatesContextIDMigrationTask())
|
||||||
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
|
def _object_as_dict(obj):
|
||||||
|
return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs}
|
||||||
|
|
||||||
|
def _fetch_migrated_states():
|
||||||
|
with session_scope(hass=hass) as session:
|
||||||
|
events = (
|
||||||
|
session.query(States)
|
||||||
|
.filter(
|
||||||
|
States.entity_id.in_(
|
||||||
|
[
|
||||||
|
"state.old_uuid_context_id",
|
||||||
|
"state.empty_context_id",
|
||||||
|
"state.ulid_context_id",
|
||||||
|
"state.invalid_context_id",
|
||||||
|
"state.garbage_context_id",
|
||||||
|
"state.human_readable_uuid_context_id",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
assert len(events) == 6
|
||||||
|
return {state.entity_id: _object_as_dict(state) for state in events}
|
||||||
|
|
||||||
|
states_by_entity_id = await instance.async_add_executor_job(_fetch_migrated_states)
|
||||||
|
|
||||||
|
old_uuid_context_id = states_by_entity_id["state.old_uuid_context_id"]
|
||||||
|
assert old_uuid_context_id["context_id"] is None
|
||||||
|
assert old_uuid_context_id["context_user_id"] is None
|
||||||
|
assert old_uuid_context_id["context_parent_id"] is None
|
||||||
|
assert old_uuid_context_id["context_id_bin"] == uuid_bin
|
||||||
|
assert old_uuid_context_id["context_user_id_bin"] is None
|
||||||
|
assert old_uuid_context_id["context_parent_id_bin"] is None
|
||||||
|
|
||||||
|
empty_context_id = states_by_entity_id["state.empty_context_id"]
|
||||||
|
assert empty_context_id["context_id"] is None
|
||||||
|
assert empty_context_id["context_user_id"] is None
|
||||||
|
assert empty_context_id["context_parent_id"] is None
|
||||||
|
assert empty_context_id["context_id_bin"].startswith(
|
||||||
|
b"\x01X\x0f\x12\xaf("
|
||||||
|
) # 6 bytes of timestamp + random
|
||||||
|
assert empty_context_id["context_user_id_bin"] is None
|
||||||
|
assert empty_context_id["context_parent_id_bin"] is None
|
||||||
|
|
||||||
|
ulid_context_id = states_by_entity_id["state.ulid_context_id"]
|
||||||
|
assert ulid_context_id["context_id"] is None
|
||||||
|
assert ulid_context_id["context_user_id"] is None
|
||||||
|
assert ulid_context_id["context_parent_id"] is None
|
||||||
|
assert (
|
||||||
|
bytes_to_ulid(ulid_context_id["context_id_bin"]) == "01ARZ3NDEKTSV4RRFFQ69G5FAV"
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
ulid_context_id["context_user_id_bin"]
|
||||||
|
== b"\x94\x00\xfa\xce\xe4W\x11\xea\xa90\x8b\xfd=\x19\xe4t"
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
bytes_to_ulid(ulid_context_id["context_parent_id_bin"])
|
||||||
|
== "01ARZ3NDEKTSV4RRFFQ69G5FA2"
|
||||||
|
)
|
||||||
|
|
||||||
|
invalid_context_id = states_by_entity_id["state.invalid_context_id"]
|
||||||
|
assert invalid_context_id["context_id"] is None
|
||||||
|
assert invalid_context_id["context_user_id"] is None
|
||||||
|
assert invalid_context_id["context_parent_id"] is None
|
||||||
|
assert invalid_context_id["context_id_bin"].startswith(
|
||||||
|
b"\x01X\x0f\x12\xaf("
|
||||||
|
) # 6 bytes of timestamp + random
|
||||||
|
assert invalid_context_id["context_user_id_bin"] is None
|
||||||
|
assert invalid_context_id["context_parent_id_bin"] is None
|
||||||
|
|
||||||
|
garbage_context_id = states_by_entity_id["state.garbage_context_id"]
|
||||||
|
assert garbage_context_id["context_id"] is None
|
||||||
|
assert garbage_context_id["context_user_id"] is None
|
||||||
|
assert garbage_context_id["context_parent_id"] is None
|
||||||
|
assert garbage_context_id["context_id_bin"].startswith(
|
||||||
|
b"\x01X\x0f\x12\xaf("
|
||||||
|
) # 6 bytes of timestamp + random
|
||||||
|
assert garbage_context_id["context_user_id_bin"] is None
|
||||||
|
assert garbage_context_id["context_parent_id_bin"] is None
|
||||||
|
|
||||||
|
human_readable_uuid_context_id = states_by_entity_id[
|
||||||
|
"state.human_readable_uuid_context_id"
|
||||||
|
]
|
||||||
|
assert human_readable_uuid_context_id["context_id"] is None
|
||||||
|
assert human_readable_uuid_context_id["context_user_id"] is None
|
||||||
|
assert human_readable_uuid_context_id["context_parent_id"] is None
|
||||||
|
assert (
|
||||||
|
human_readable_uuid_context_id["context_id_bin"]
|
||||||
|
== b"\n\xe2\x97\x99\xeeNOE\x81\x16\xf5\x82\xd7\xd3\xeee"
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
human_readable_uuid_context_id["context_user_id_bin"]
|
||||||
|
== b"\n\xe2\x97\x99\xeeNOE\x81\x16\xf5\x82\xd7\xd3\xeee"
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
human_readable_uuid_context_id["context_parent_id_bin"]
|
||||||
|
== b"\n\xe2\x97\x99\xeeNOE\x81\x16\xf5\x82\xd7\xd3\xeee"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("enable_migrate_event_type_ids", [True])
|
||||||
|
async def test_migrate_event_type_ids(
|
||||||
|
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
||||||
|
) -> None:
|
||||||
|
"""Test we can migrate event_types to the EventTypes table."""
|
||||||
|
instance = await async_setup_recorder_instance(hass)
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
|
def _insert_events():
|
||||||
|
with session_scope(hass=hass) as session:
|
||||||
|
session.add_all(
|
||||||
|
(
|
||||||
|
Events(
|
||||||
|
event_type="event_type_one",
|
||||||
|
origin_idx=0,
|
||||||
|
time_fired_ts=1677721632.452529,
|
||||||
|
),
|
||||||
|
Events(
|
||||||
|
event_type="event_type_one",
|
||||||
|
origin_idx=0,
|
||||||
|
time_fired_ts=1677721632.552529,
|
||||||
|
),
|
||||||
|
Events(
|
||||||
|
event_type="event_type_two",
|
||||||
|
origin_idx=0,
|
||||||
|
time_fired_ts=1677721632.552529,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
await instance.async_add_executor_job(_insert_events)
|
||||||
|
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
# This is a threadsafe way to add a task to the recorder
|
||||||
|
instance.queue_task(EventTypeIDMigrationTask())
|
||||||
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
|
def _fetch_migrated_events():
|
||||||
|
with session_scope(hass=hass, read_only=True) as session:
|
||||||
|
events = (
|
||||||
|
session.query(Events.event_id, Events.time_fired, EventTypes.event_type)
|
||||||
|
.filter(
|
||||||
|
Events.event_type_id.in_(
|
||||||
|
select_event_type_ids(
|
||||||
|
(
|
||||||
|
"event_type_one",
|
||||||
|
"event_type_two",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
assert len(events) == 3
|
||||||
|
result = {}
|
||||||
|
for event in events:
|
||||||
|
result.setdefault(event.event_type, []).append(
|
||||||
|
{
|
||||||
|
"event_id": event.event_id,
|
||||||
|
"time_fired": event.time_fired,
|
||||||
|
"event_type": event.event_type,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
events_by_type = await instance.async_add_executor_job(_fetch_migrated_events)
|
||||||
|
assert len(events_by_type["event_type_one"]) == 2
|
||||||
|
assert len(events_by_type["event_type_two"]) == 1
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("enable_migrate_entity_ids", [True])
|
||||||
|
async def test_migrate_entity_ids(
|
||||||
|
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
||||||
|
) -> None:
|
||||||
|
"""Test we can migrate entity_ids to the StatesMeta table."""
|
||||||
|
instance = await async_setup_recorder_instance(hass)
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
|
def _insert_states():
|
||||||
|
with session_scope(hass=hass) as session:
|
||||||
|
session.add_all(
|
||||||
|
(
|
||||||
|
States(
|
||||||
|
entity_id="sensor.one",
|
||||||
|
state="one_1",
|
||||||
|
last_updated_ts=1.452529,
|
||||||
|
),
|
||||||
|
States(
|
||||||
|
entity_id="sensor.two",
|
||||||
|
state="two_2",
|
||||||
|
last_updated_ts=2.252529,
|
||||||
|
),
|
||||||
|
States(
|
||||||
|
entity_id="sensor.two",
|
||||||
|
state="two_1",
|
||||||
|
last_updated_ts=3.152529,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
await instance.async_add_executor_job(_insert_states)
|
||||||
|
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
# This is a threadsafe way to add a task to the recorder
|
||||||
|
instance.queue_task(EntityIDMigrationTask())
|
||||||
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
|
def _fetch_migrated_states():
|
||||||
|
with session_scope(hass=hass, read_only=True) as session:
|
||||||
|
states = (
|
||||||
|
session.query(
|
||||||
|
States.state,
|
||||||
|
States.metadata_id,
|
||||||
|
States.last_updated_ts,
|
||||||
|
StatesMeta.entity_id,
|
||||||
|
)
|
||||||
|
.outerjoin(StatesMeta, States.metadata_id == StatesMeta.metadata_id)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
assert len(states) == 3
|
||||||
|
result = {}
|
||||||
|
for state in states:
|
||||||
|
result.setdefault(state.entity_id, []).append(
|
||||||
|
{
|
||||||
|
"state_id": state.entity_id,
|
||||||
|
"last_updated_ts": state.last_updated_ts,
|
||||||
|
"state": state.state,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
states_by_entity_id = await instance.async_add_executor_job(_fetch_migrated_states)
|
||||||
|
assert len(states_by_entity_id["sensor.two"]) == 2
|
||||||
|
assert len(states_by_entity_id["sensor.one"]) == 1
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("enable_migrate_entity_ids", [True])
|
||||||
|
async def test_post_migrate_entity_ids(
|
||||||
|
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
||||||
|
) -> None:
|
||||||
|
"""Test we can migrate entity_ids to the StatesMeta table."""
|
||||||
|
instance = await async_setup_recorder_instance(hass)
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
|
def _insert_events():
|
||||||
|
with session_scope(hass=hass) as session:
|
||||||
|
session.add_all(
|
||||||
|
(
|
||||||
|
States(
|
||||||
|
entity_id="sensor.one",
|
||||||
|
state="one_1",
|
||||||
|
last_updated_ts=1.452529,
|
||||||
|
),
|
||||||
|
States(
|
||||||
|
entity_id="sensor.two",
|
||||||
|
state="two_2",
|
||||||
|
last_updated_ts=2.252529,
|
||||||
|
),
|
||||||
|
States(
|
||||||
|
entity_id="sensor.two",
|
||||||
|
state="two_1",
|
||||||
|
last_updated_ts=3.152529,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
await instance.async_add_executor_job(_insert_events)
|
||||||
|
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
# This is a threadsafe way to add a task to the recorder
|
||||||
|
instance.queue_task(EntityIDPostMigrationTask())
|
||||||
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
|
def _fetch_migrated_states():
|
||||||
|
with session_scope(hass=hass, read_only=True) as session:
|
||||||
|
states = session.query(
|
||||||
|
States.state,
|
||||||
|
States.entity_id,
|
||||||
|
).all()
|
||||||
|
assert len(states) == 3
|
||||||
|
return {state.state: state.entity_id for state in states}
|
||||||
|
|
||||||
|
states_by_state = await instance.async_add_executor_job(_fetch_migrated_states)
|
||||||
|
assert states_by_state["one_1"] is None
|
||||||
|
assert states_by_state["two_2"] is None
|
||||||
|
assert states_by_state["two_1"] is None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("enable_migrate_entity_ids", [True])
|
||||||
|
async def test_migrate_null_entity_ids(
|
||||||
|
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
||||||
|
) -> None:
|
||||||
|
"""Test we can migrate entity_ids to the StatesMeta table."""
|
||||||
|
instance = await async_setup_recorder_instance(hass)
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
|
def _insert_states():
|
||||||
|
with session_scope(hass=hass) as session:
|
||||||
|
session.add(
|
||||||
|
States(
|
||||||
|
entity_id="sensor.one",
|
||||||
|
state="one_1",
|
||||||
|
last_updated_ts=1.452529,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
session.add_all(
|
||||||
|
States(
|
||||||
|
entity_id=None,
|
||||||
|
state="empty",
|
||||||
|
last_updated_ts=time + 1.452529,
|
||||||
|
)
|
||||||
|
for time in range(1000)
|
||||||
|
)
|
||||||
|
session.add(
|
||||||
|
States(
|
||||||
|
entity_id="sensor.one",
|
||||||
|
state="one_1",
|
||||||
|
last_updated_ts=2.452529,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
await instance.async_add_executor_job(_insert_states)
|
||||||
|
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
# This is a threadsafe way to add a task to the recorder
|
||||||
|
instance.queue_task(EntityIDMigrationTask())
|
||||||
|
await async_recorder_block_till_done(hass)
|
||||||
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
|
def _fetch_migrated_states():
|
||||||
|
with session_scope(hass=hass, read_only=True) as session:
|
||||||
|
states = (
|
||||||
|
session.query(
|
||||||
|
States.state,
|
||||||
|
States.metadata_id,
|
||||||
|
States.last_updated_ts,
|
||||||
|
StatesMeta.entity_id,
|
||||||
|
)
|
||||||
|
.outerjoin(StatesMeta, States.metadata_id == StatesMeta.metadata_id)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
assert len(states) == 1002
|
||||||
|
result = {}
|
||||||
|
for state in states:
|
||||||
|
result.setdefault(state.entity_id, []).append(
|
||||||
|
{
|
||||||
|
"state_id": state.entity_id,
|
||||||
|
"last_updated_ts": state.last_updated_ts,
|
||||||
|
"state": state.state,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
states_by_entity_id = await instance.async_add_executor_job(_fetch_migrated_states)
|
||||||
|
assert len(states_by_entity_id[migration._EMPTY_ENTITY_ID]) == 1000
|
||||||
|
assert len(states_by_entity_id["sensor.one"]) == 2
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("enable_migrate_event_type_ids", [True])
|
||||||
|
async def test_migrate_null_event_type_ids(
|
||||||
|
async_setup_recorder_instance: RecorderInstanceGenerator, hass: HomeAssistant
|
||||||
|
) -> None:
|
||||||
|
"""Test we can migrate event_types to the EventTypes table when the event_type is NULL."""
|
||||||
|
instance = await async_setup_recorder_instance(hass)
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
|
def _insert_events():
|
||||||
|
with session_scope(hass=hass) as session:
|
||||||
|
session.add(
|
||||||
|
Events(
|
||||||
|
event_type="event_type_one",
|
||||||
|
origin_idx=0,
|
||||||
|
time_fired_ts=1.452529,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
session.add_all(
|
||||||
|
Events(
|
||||||
|
event_type=None,
|
||||||
|
origin_idx=0,
|
||||||
|
time_fired_ts=time + 1.452529,
|
||||||
|
)
|
||||||
|
for time in range(1000)
|
||||||
|
)
|
||||||
|
session.add(
|
||||||
|
Events(
|
||||||
|
event_type="event_type_one",
|
||||||
|
origin_idx=0,
|
||||||
|
time_fired_ts=2.452529,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
await instance.async_add_executor_job(_insert_events)
|
||||||
|
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
# This is a threadsafe way to add a task to the recorder
|
||||||
|
|
||||||
|
instance.queue_task(EventTypeIDMigrationTask())
|
||||||
|
await async_recorder_block_till_done(hass)
|
||||||
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
|
def _fetch_migrated_events():
|
||||||
|
with session_scope(hass=hass, read_only=True) as session:
|
||||||
|
events = (
|
||||||
|
session.query(Events.event_id, Events.time_fired, EventTypes.event_type)
|
||||||
|
.filter(
|
||||||
|
Events.event_type_id.in_(
|
||||||
|
select_event_type_ids(
|
||||||
|
(
|
||||||
|
"event_type_one",
|
||||||
|
migration._EMPTY_EVENT_TYPE,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
assert len(events) == 1002
|
||||||
|
result = {}
|
||||||
|
for event in events:
|
||||||
|
result.setdefault(event.event_type, []).append(
|
||||||
|
{
|
||||||
|
"event_id": event.event_id,
|
||||||
|
"time_fired": event.time_fired,
|
||||||
|
"event_type": event.event_type,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
events_by_type = await instance.async_add_executor_job(_fetch_migrated_events)
|
||||||
|
assert len(events_by_type["event_type_one"]) == 2
|
||||||
|
assert len(events_by_type[migration._EMPTY_EVENT_TYPE]) == 1000
|
@ -25,6 +25,14 @@ from homeassistant.util import dt
|
|||||||
from tests.typing import ClientSessionGenerator
|
from tests.typing import ClientSessionGenerator
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def set_time_zone(hass: HomeAssistant):
|
||||||
|
"""Set the time zone for the tests."""
|
||||||
|
# Set our timezone to CST/Regina so we can check calculations
|
||||||
|
# This keeps UTC-6 all year round
|
||||||
|
hass.config.set_time_zone("America/Regina")
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(name="task")
|
@pytest.fixture(name="task")
|
||||||
def mock_task() -> Task:
|
def mock_task() -> Task:
|
||||||
"""Mock a todoist Task instance."""
|
"""Mock a todoist Task instance."""
|
||||||
@ -132,6 +140,52 @@ async def test_update_entity_for_custom_project_with_labels_on(
|
|||||||
assert state.state == "on"
|
assert state.state == "on"
|
||||||
|
|
||||||
|
|
||||||
|
@patch("homeassistant.components.todoist.calendar.TodoistAPIAsync")
|
||||||
|
async def test_update_entity_for_custom_project_no_due_date_on(
|
||||||
|
todoist_api, hass: HomeAssistant, api
|
||||||
|
) -> None:
|
||||||
|
"""Test that a task without an explicit due date is considered to be in an on state."""
|
||||||
|
task_wo_due_date = Task(
|
||||||
|
assignee_id=None,
|
||||||
|
assigner_id=None,
|
||||||
|
comment_count=0,
|
||||||
|
is_completed=False,
|
||||||
|
content="No due date task",
|
||||||
|
created_at="2023-04-11T00:25:25.589971Z",
|
||||||
|
creator_id="1",
|
||||||
|
description="",
|
||||||
|
due=None,
|
||||||
|
id="123",
|
||||||
|
labels=["Label1"],
|
||||||
|
order=10,
|
||||||
|
parent_id=None,
|
||||||
|
priority=1,
|
||||||
|
project_id="12345",
|
||||||
|
section_id=None,
|
||||||
|
url="https://todoist.com/showTask?id=123",
|
||||||
|
sync_id=None,
|
||||||
|
)
|
||||||
|
api.get_tasks.return_value = [task_wo_due_date]
|
||||||
|
todoist_api.return_value = api
|
||||||
|
|
||||||
|
assert await setup.async_setup_component(
|
||||||
|
hass,
|
||||||
|
"calendar",
|
||||||
|
{
|
||||||
|
"calendar": {
|
||||||
|
"platform": DOMAIN,
|
||||||
|
CONF_TOKEN: "token",
|
||||||
|
"custom_projects": [{"name": "All projects", "labels": ["Label1"]}],
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
await async_update_entity(hass, "calendar.all_projects")
|
||||||
|
state = hass.states.get("calendar.all_projects")
|
||||||
|
assert state.state == "on"
|
||||||
|
|
||||||
|
|
||||||
@patch("homeassistant.components.todoist.calendar.TodoistAPIAsync")
|
@patch("homeassistant.components.todoist.calendar.TodoistAPIAsync")
|
||||||
async def test_failed_coordinator_update(todoist_api, hass: HomeAssistant, api) -> None:
|
async def test_failed_coordinator_update(todoist_api, hass: HomeAssistant, api) -> None:
|
||||||
"""Test a failed data coordinator update is handled correctly."""
|
"""Test a failed data coordinator update is handled correctly."""
|
||||||
|
Loading…
x
Reference in New Issue
Block a user