mirror of
https://github.com/home-assistant/core.git
synced 2025-07-27 15:17:35 +00:00
2023.4.2 (#91111)
This commit is contained in:
commit
7928b31087
@ -7,5 +7,5 @@
|
|||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["aioambient"],
|
"loggers": ["aioambient"],
|
||||||
"requirements": ["aioambient==2022.10.0"]
|
"requirements": ["aioambient==2023.04.0"]
|
||||||
}
|
}
|
||||||
|
@ -348,7 +348,7 @@ class ScannerEntity(BaseTrackerEntity):
|
|||||||
self.mac_address,
|
self.mac_address,
|
||||||
self.unique_id,
|
self.unique_id,
|
||||||
)
|
)
|
||||||
if self.is_connected:
|
if self.is_connected and self.ip_address:
|
||||||
_async_connected_device_registered(
|
_async_connected_device_registered(
|
||||||
hass,
|
hass,
|
||||||
self.mac_address,
|
self.mac_address,
|
||||||
@ -405,7 +405,7 @@ class ScannerEntity(BaseTrackerEntity):
|
|||||||
"""Return the device state attributes."""
|
"""Return the device state attributes."""
|
||||||
attr: dict[str, StateType] = {}
|
attr: dict[str, StateType] = {}
|
||||||
attr.update(super().state_attributes)
|
attr.update(super().state_attributes)
|
||||||
if self.ip_address is not None:
|
if self.ip_address:
|
||||||
attr[ATTR_IP] = self.ip_address
|
attr[ATTR_IP] = self.ip_address
|
||||||
if self.mac_address is not None:
|
if self.mac_address is not None:
|
||||||
attr[ATTR_MAC] = self.mac_address
|
attr[ATTR_MAC] = self.mac_address
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
|
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["env_canada"],
|
"loggers": ["env_canada"],
|
||||||
"requirements": ["env_canada==0.5.30"]
|
"requirements": ["env_canada==0.5.31"]
|
||||||
}
|
}
|
||||||
|
@ -51,5 +51,5 @@
|
|||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["flux_led"],
|
"loggers": ["flux_led"],
|
||||||
"quality_scale": "platinum",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["flux_led==0.28.36"]
|
"requirements": ["flux_led==0.28.37"]
|
||||||
}
|
}
|
||||||
|
@ -285,17 +285,18 @@ async def async_setup_add_event_service(
|
|||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Missing required fields to set start or end date/datetime"
|
"Missing required fields to set start or end date/datetime"
|
||||||
)
|
)
|
||||||
|
event = Event(
|
||||||
|
summary=call.data[EVENT_SUMMARY],
|
||||||
|
description=call.data[EVENT_DESCRIPTION],
|
||||||
|
start=start,
|
||||||
|
end=end,
|
||||||
|
)
|
||||||
|
if location := call.data.get(EVENT_LOCATION):
|
||||||
|
event.location = location
|
||||||
try:
|
try:
|
||||||
await calendar_service.async_create_event(
|
await calendar_service.async_create_event(
|
||||||
call.data[EVENT_CALENDAR_ID],
|
call.data[EVENT_CALENDAR_ID],
|
||||||
Event(
|
event,
|
||||||
summary=call.data[EVENT_SUMMARY],
|
|
||||||
description=call.data[EVENT_DESCRIPTION],
|
|
||||||
location=call.data[EVENT_LOCATION],
|
|
||||||
start=start,
|
|
||||||
end=end,
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
except ApiException as err:
|
except ApiException as err:
|
||||||
raise HomeAssistantError(str(err)) from err
|
raise HomeAssistantError(str(err)) from err
|
||||||
|
@ -508,9 +508,10 @@ class GoogleCalendarEntity(
|
|||||||
"start": start,
|
"start": start,
|
||||||
"end": end,
|
"end": end,
|
||||||
EVENT_DESCRIPTION: kwargs.get(EVENT_DESCRIPTION),
|
EVENT_DESCRIPTION: kwargs.get(EVENT_DESCRIPTION),
|
||||||
EVENT_LOCATION: kwargs.get(EVENT_LOCATION),
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
if location := kwargs.get(EVENT_LOCATION):
|
||||||
|
event.location = location
|
||||||
if rrule := kwargs.get(EVENT_RRULE):
|
if rrule := kwargs.get(EVENT_RRULE):
|
||||||
event.recurrence = [f"{RRULE_PREFIX}{rrule}"]
|
event.recurrence = [f"{RRULE_PREFIX}{rrule}"]
|
||||||
|
|
||||||
@ -597,18 +598,20 @@ async def async_create_event(entity: GoogleCalendarEntity, call: ServiceCall) ->
|
|||||||
if start is None or end is None:
|
if start is None or end is None:
|
||||||
raise ValueError("Missing required fields to set start or end date/datetime")
|
raise ValueError("Missing required fields to set start or end date/datetime")
|
||||||
|
|
||||||
|
event = Event(
|
||||||
|
summary=call.data[EVENT_SUMMARY],
|
||||||
|
description=call.data[EVENT_DESCRIPTION],
|
||||||
|
start=start,
|
||||||
|
end=end,
|
||||||
|
)
|
||||||
|
if location := call.data.get(EVENT_LOCATION):
|
||||||
|
event.location = location
|
||||||
try:
|
try:
|
||||||
await cast(
|
await cast(
|
||||||
CalendarSyncUpdateCoordinator, entity.coordinator
|
CalendarSyncUpdateCoordinator, entity.coordinator
|
||||||
).sync.api.async_create_event(
|
).sync.api.async_create_event(
|
||||||
entity.calendar_id,
|
entity.calendar_id,
|
||||||
Event(
|
event,
|
||||||
summary=call.data[EVENT_SUMMARY],
|
|
||||||
description=call.data[EVENT_DESCRIPTION],
|
|
||||||
location=call.data[EVENT_LOCATION],
|
|
||||||
start=start,
|
|
||||||
end=end,
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
except ApiException as err:
|
except ApiException as err:
|
||||||
raise HomeAssistantError(str(err)) from err
|
raise HomeAssistantError(str(err)) from err
|
||||||
|
@ -7,5 +7,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/calendar.google/",
|
"documentation": "https://www.home-assistant.io/integrations/calendar.google/",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["googleapiclient"],
|
"loggers": ["googleapiclient"],
|
||||||
"requirements": ["gcal-sync==4.1.3", "oauth2client==4.1.3"]
|
"requirements": ["gcal-sync==4.1.4", "oauth2client==4.1.3"]
|
||||||
}
|
}
|
||||||
|
@ -22,6 +22,8 @@ import homeassistant.util.dt as dt_util
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
API_FAILURE = -1
|
||||||
|
|
||||||
DEFAULT_NAME = "NMBS"
|
DEFAULT_NAME = "NMBS"
|
||||||
|
|
||||||
DEFAULT_ICON = "mdi:train"
|
DEFAULT_ICON = "mdi:train"
|
||||||
@ -162,16 +164,19 @@ class NMBSLiveBoard(SensorEntity):
|
|||||||
"""Set the state equal to the next departure."""
|
"""Set the state equal to the next departure."""
|
||||||
liveboard = self._api_client.get_liveboard(self._station)
|
liveboard = self._api_client.get_liveboard(self._station)
|
||||||
|
|
||||||
if (
|
if liveboard == API_FAILURE:
|
||||||
liveboard is None
|
_LOGGER.warning("API failed in NMBSLiveBoard")
|
||||||
or liveboard.get("departures") is None
|
|
||||||
or liveboard.get("departures").get("number") is None
|
|
||||||
or liveboard.get("departures").get("number") == "0"
|
|
||||||
or liveboard.get("departures").get("departure") is None
|
|
||||||
):
|
|
||||||
return
|
return
|
||||||
|
|
||||||
next_departure = liveboard["departures"]["departure"][0]
|
if not (departures := liveboard.get("departures")):
|
||||||
|
_LOGGER.warning("API returned invalid departures: %r", liveboard)
|
||||||
|
return
|
||||||
|
|
||||||
|
_LOGGER.debug("API returned departures: %r", departures)
|
||||||
|
if departures["number"] == "0":
|
||||||
|
# No trains are scheduled
|
||||||
|
return
|
||||||
|
next_departure = departures["departure"][0]
|
||||||
|
|
||||||
self._attrs = next_departure
|
self._attrs = next_departure
|
||||||
self._state = (
|
self._state = (
|
||||||
@ -290,13 +295,19 @@ class NMBSSensor(SensorEntity):
|
|||||||
self._station_from, self._station_to
|
self._station_from, self._station_to
|
||||||
)
|
)
|
||||||
|
|
||||||
if connections is None or not connections.get("connection"):
|
if connections == API_FAILURE:
|
||||||
|
_LOGGER.warning("API failed in NMBSSensor")
|
||||||
return
|
return
|
||||||
|
|
||||||
if int(connections["connection"][0]["departure"]["left"]) > 0:
|
if not (connection := connections.get("connection")):
|
||||||
next_connection = connections["connection"][1]
|
_LOGGER.warning("API returned invalid connection: %r", connections)
|
||||||
|
return
|
||||||
|
|
||||||
|
_LOGGER.debug("API returned connection: %r", connection)
|
||||||
|
if int(connection[0]["departure"]["left"]) > 0:
|
||||||
|
next_connection = connection[1]
|
||||||
else:
|
else:
|
||||||
next_connection = connections["connection"][0]
|
next_connection = connection[0]
|
||||||
|
|
||||||
self._attrs = next_connection
|
self._attrs = next_connection
|
||||||
|
|
||||||
|
@ -58,6 +58,7 @@ from .const import (
|
|||||||
SupportedDialect,
|
SupportedDialect,
|
||||||
)
|
)
|
||||||
from .db_schema import (
|
from .db_schema import (
|
||||||
|
LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX,
|
||||||
LEGACY_STATES_EVENT_ID_INDEX,
|
LEGACY_STATES_EVENT_ID_INDEX,
|
||||||
SCHEMA_VERSION,
|
SCHEMA_VERSION,
|
||||||
TABLE_STATES,
|
TABLE_STATES,
|
||||||
@ -96,6 +97,7 @@ from .tasks import (
|
|||||||
CompileMissingStatisticsTask,
|
CompileMissingStatisticsTask,
|
||||||
DatabaseLockTask,
|
DatabaseLockTask,
|
||||||
EntityIDMigrationTask,
|
EntityIDMigrationTask,
|
||||||
|
EntityIDPostMigrationTask,
|
||||||
EventIdMigrationTask,
|
EventIdMigrationTask,
|
||||||
EventsContextIDMigrationTask,
|
EventsContextIDMigrationTask,
|
||||||
EventTask,
|
EventTask,
|
||||||
@ -757,6 +759,18 @@ class Recorder(threading.Thread):
|
|||||||
else:
|
else:
|
||||||
_LOGGER.debug("Activating states_meta manager as all data is migrated")
|
_LOGGER.debug("Activating states_meta manager as all data is migrated")
|
||||||
self.states_meta_manager.active = True
|
self.states_meta_manager.active = True
|
||||||
|
with contextlib.suppress(SQLAlchemyError):
|
||||||
|
# If ix_states_entity_id_last_updated_ts still exists
|
||||||
|
# on the states table it means the entity id migration
|
||||||
|
# finished by the EntityIDPostMigrationTask did not
|
||||||
|
# because they restarted in the middle of it. We need
|
||||||
|
# to pick back up where we left off.
|
||||||
|
if get_index_by_name(
|
||||||
|
session,
|
||||||
|
TABLE_STATES,
|
||||||
|
LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX,
|
||||||
|
):
|
||||||
|
self.queue_task(EntityIDPostMigrationTask())
|
||||||
|
|
||||||
if self.schema_version > LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION:
|
if self.schema_version > LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION:
|
||||||
with contextlib.suppress(SQLAlchemyError):
|
with contextlib.suppress(SQLAlchemyError):
|
||||||
|
@ -119,6 +119,7 @@ METADATA_ID_LAST_UPDATED_INDEX_TS = "ix_states_metadata_id_last_updated_ts"
|
|||||||
EVENTS_CONTEXT_ID_BIN_INDEX = "ix_events_context_id_bin"
|
EVENTS_CONTEXT_ID_BIN_INDEX = "ix_events_context_id_bin"
|
||||||
STATES_CONTEXT_ID_BIN_INDEX = "ix_states_context_id_bin"
|
STATES_CONTEXT_ID_BIN_INDEX = "ix_states_context_id_bin"
|
||||||
LEGACY_STATES_EVENT_ID_INDEX = "ix_states_event_id"
|
LEGACY_STATES_EVENT_ID_INDEX = "ix_states_event_id"
|
||||||
|
LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX = "ix_states_entity_id_last_updated_ts"
|
||||||
CONTEXT_ID_BIN_MAX_LENGTH = 16
|
CONTEXT_ID_BIN_MAX_LENGTH = 16
|
||||||
|
|
||||||
MYSQL_COLLATE = "utf8mb4_unicode_ci"
|
MYSQL_COLLATE = "utf8mb4_unicode_ci"
|
||||||
@ -284,7 +285,7 @@ class Events(Base):
|
|||||||
"""Convert to a native HA Event."""
|
"""Convert to a native HA Event."""
|
||||||
context = Context(
|
context = Context(
|
||||||
id=bytes_to_ulid_or_none(self.context_id_bin),
|
id=bytes_to_ulid_or_none(self.context_id_bin),
|
||||||
user_id=bytes_to_uuid_hex_or_none(self.context_user_id),
|
user_id=bytes_to_uuid_hex_or_none(self.context_user_id_bin),
|
||||||
parent_id=bytes_to_ulid_or_none(self.context_parent_id_bin),
|
parent_id=bytes_to_ulid_or_none(self.context_parent_id_bin),
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
@ -508,7 +509,7 @@ class States(Base):
|
|||||||
"""Convert to an HA state object."""
|
"""Convert to an HA state object."""
|
||||||
context = Context(
|
context = Context(
|
||||||
id=bytes_to_ulid_or_none(self.context_id_bin),
|
id=bytes_to_ulid_or_none(self.context_id_bin),
|
||||||
user_id=bytes_to_uuid_hex_or_none(self.context_user_id),
|
user_id=bytes_to_uuid_hex_or_none(self.context_user_id_bin),
|
||||||
parent_id=bytes_to_ulid_or_none(self.context_parent_id_bin),
|
parent_id=bytes_to_ulid_or_none(self.context_parent_id_bin),
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
|
@ -48,6 +48,7 @@ from .const import SupportedDialect
|
|||||||
from .db_schema import (
|
from .db_schema import (
|
||||||
CONTEXT_ID_BIN_MAX_LENGTH,
|
CONTEXT_ID_BIN_MAX_LENGTH,
|
||||||
DOUBLE_PRECISION_TYPE_SQL,
|
DOUBLE_PRECISION_TYPE_SQL,
|
||||||
|
LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX,
|
||||||
LEGACY_STATES_EVENT_ID_INDEX,
|
LEGACY_STATES_EVENT_ID_INDEX,
|
||||||
MYSQL_COLLATE,
|
MYSQL_COLLATE,
|
||||||
MYSQL_DEFAULT_CHARSET,
|
MYSQL_DEFAULT_CHARSET,
|
||||||
@ -1586,7 +1587,7 @@ def post_migrate_entity_ids(instance: Recorder) -> bool:
|
|||||||
|
|
||||||
if is_done:
|
if is_done:
|
||||||
# Drop the old indexes since they are no longer needed
|
# Drop the old indexes since they are no longer needed
|
||||||
_drop_index(session_maker, "states", "ix_states_entity_id_last_updated_ts")
|
_drop_index(session_maker, "states", LEGACY_STATES_ENTITY_ID_LAST_UPDATED_INDEX)
|
||||||
|
|
||||||
_LOGGER.debug("Cleanup legacy entity_ids done=%s", is_done)
|
_LOGGER.debug("Cleanup legacy entity_ids done=%s", is_done)
|
||||||
return is_done
|
return is_done
|
||||||
|
@ -24,5 +24,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/roomba",
|
"documentation": "https://www.home-assistant.io/integrations/roomba",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["paho_mqtt", "roombapy"],
|
"loggers": ["paho_mqtt", "roombapy"],
|
||||||
"requirements": ["roombapy==1.6.6"]
|
"requirements": ["roombapy==1.6.8"]
|
||||||
}
|
}
|
||||||
|
@ -30,6 +30,7 @@ from homeassistant.const import (
|
|||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import TemplateError
|
from homeassistant.exceptions import TemplateError
|
||||||
|
from homeassistant.helpers import issue_registry as ir
|
||||||
from homeassistant.helpers.device_registry import DeviceEntryType
|
from homeassistant.helpers.device_registry import DeviceEntryType
|
||||||
from homeassistant.helpers.entity import DeviceInfo
|
from homeassistant.helpers.entity import DeviceInfo
|
||||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||||
@ -153,10 +154,44 @@ async def async_setup_sensor(
|
|||||||
):
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
|
upper_query = query_str.upper()
|
||||||
|
if use_database_executor:
|
||||||
|
redacted_query = redact_credentials(query_str)
|
||||||
|
|
||||||
|
issue_key = unique_id if unique_id else redacted_query
|
||||||
|
# If the query has a unique id and they fix it we can dismiss the issue
|
||||||
|
# but if it doesn't have a unique id they have to ignore it instead
|
||||||
|
|
||||||
|
if "ENTITY_ID" in upper_query and "STATES_META" not in upper_query:
|
||||||
|
_LOGGER.error(
|
||||||
|
"The query `%s` contains the keyword `entity_id` but does not "
|
||||||
|
"reference the `states_meta` table. This will cause a full table "
|
||||||
|
"scan and database instability. Please check the documentation and use "
|
||||||
|
"`states_meta.entity_id` instead",
|
||||||
|
redacted_query,
|
||||||
|
)
|
||||||
|
|
||||||
|
ir.async_create_issue(
|
||||||
|
hass,
|
||||||
|
DOMAIN,
|
||||||
|
f"entity_id_query_does_full_table_scan_{issue_key}",
|
||||||
|
translation_key="entity_id_query_does_full_table_scan",
|
||||||
|
translation_placeholders={"query": redacted_query},
|
||||||
|
is_fixable=False,
|
||||||
|
severity=ir.IssueSeverity.ERROR,
|
||||||
|
)
|
||||||
|
raise ValueError(
|
||||||
|
"Query contains entity_id but does not reference states_meta"
|
||||||
|
)
|
||||||
|
|
||||||
|
ir.async_delete_issue(
|
||||||
|
hass, DOMAIN, f"entity_id_query_does_full_table_scan_{issue_key}"
|
||||||
|
)
|
||||||
|
|
||||||
# MSSQL uses TOP and not LIMIT
|
# MSSQL uses TOP and not LIMIT
|
||||||
if not ("LIMIT" in query_str.upper() or "SELECT TOP" in query_str.upper()):
|
if not ("LIMIT" in upper_query or "SELECT TOP" in upper_query):
|
||||||
if "mssql" in db_url:
|
if "mssql" in db_url:
|
||||||
query_str = query_str.upper().replace("SELECT", "SELECT TOP 1")
|
query_str = upper_query.replace("SELECT", "SELECT TOP 1")
|
||||||
else:
|
else:
|
||||||
query_str = query_str.replace(";", "") + " LIMIT 1;"
|
query_str = query_str.replace(";", "") + " LIMIT 1;"
|
||||||
|
|
||||||
|
@ -53,5 +53,11 @@
|
|||||||
"db_url_invalid": "[%key:component::sql::config::error::db_url_invalid%]",
|
"db_url_invalid": "[%key:component::sql::config::error::db_url_invalid%]",
|
||||||
"query_invalid": "[%key:component::sql::config::error::query_invalid%]"
|
"query_invalid": "[%key:component::sql::config::error::query_invalid%]"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"issues": {
|
||||||
|
"entity_id_query_does_full_table_scan": {
|
||||||
|
"title": "SQL query does full table scan",
|
||||||
|
"description": "The query `{query}` contains the keyword `entity_id` but does not reference the `states_meta` table. This will cause a full table scan and database instability. Please check the documentation and use `states_meta.entity_id` instead."
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/subaru",
|
"documentation": "https://www.home-assistant.io/integrations/subaru",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["stdiomask", "subarulink"],
|
"loggers": ["stdiomask", "subarulink"],
|
||||||
"requirements": ["subarulink==0.7.5"]
|
"requirements": ["subarulink==0.7.6"]
|
||||||
}
|
}
|
||||||
|
@ -15,6 +15,12 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"st": "urn:schemas-upnp-org:device:InternetGatewayDevice:2"
|
"st": "urn:schemas-upnp-org:device:InternetGatewayDevice:2"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"nt": "urn:schemas-upnp-org:device:InternetGatewayDevice:1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"nt": "urn:schemas-upnp-org:device:InternetGatewayDevice:2"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -35,7 +35,7 @@ from homeassistant.helpers.event import (
|
|||||||
async_track_point_in_time,
|
async_track_point_in_time,
|
||||||
async_track_state_change_event,
|
async_track_state_change_event,
|
||||||
)
|
)
|
||||||
from homeassistant.helpers.start import async_at_start
|
from homeassistant.helpers.start import async_at_started
|
||||||
from homeassistant.helpers.template import is_number
|
from homeassistant.helpers.template import is_number
|
||||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||||
from homeassistant.util import slugify
|
from homeassistant.util import slugify
|
||||||
@ -410,8 +410,11 @@ class UtilityMeterSensor(RestoreSensor):
|
|||||||
|
|
||||||
if (old_state_val := self._validate_state(old_state)) is not None:
|
if (old_state_val := self._validate_state(old_state)) is not None:
|
||||||
return new_state_val - old_state_val
|
return new_state_val - old_state_val
|
||||||
|
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Invalid state (%s > %s)",
|
"%s received an invalid state change coming from %s (%s > %s)",
|
||||||
|
self.name,
|
||||||
|
self._sensor_source_id,
|
||||||
old_state.state if old_state else None,
|
old_state.state if old_state else None,
|
||||||
new_state_val,
|
new_state_val,
|
||||||
)
|
)
|
||||||
@ -423,8 +426,14 @@ class UtilityMeterSensor(RestoreSensor):
|
|||||||
old_state: State | None = event.data.get("old_state")
|
old_state: State | None = event.data.get("old_state")
|
||||||
new_state: State = event.data.get("new_state") # type: ignore[assignment] # a state change event always has a new state
|
new_state: State = event.data.get("new_state") # type: ignore[assignment] # a state change event always has a new state
|
||||||
|
|
||||||
|
# First check if the new_state is valid (see discussion in PR #88446)
|
||||||
if (new_state_val := self._validate_state(new_state)) is None:
|
if (new_state_val := self._validate_state(new_state)) is None:
|
||||||
_LOGGER.warning("Invalid state %s", new_state.state)
|
_LOGGER.warning(
|
||||||
|
"%s received an invalid new state from %s : %s",
|
||||||
|
self.name,
|
||||||
|
self._sensor_source_id,
|
||||||
|
new_state.state,
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
if self._state is None:
|
if self._state is None:
|
||||||
@ -597,7 +606,7 @@ class UtilityMeterSensor(RestoreSensor):
|
|||||||
self.hass, [self._sensor_source_id], self.async_reading
|
self.hass, [self._sensor_source_id], self.async_reading
|
||||||
)
|
)
|
||||||
|
|
||||||
self.async_on_remove(async_at_start(self.hass, async_source_tracking))
|
self.async_on_remove(async_at_started(self.hass, async_source_tracking))
|
||||||
|
|
||||||
async def async_will_remove_from_hass(self) -> None:
|
async def async_will_remove_from_hass(self) -> None:
|
||||||
"""Run when entity will be removed from hass."""
|
"""Run when entity will be removed from hass."""
|
||||||
|
@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/vallox",
|
"documentation": "https://www.home-assistant.io/integrations/vallox",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["vallox_websocket_api"],
|
"loggers": ["vallox_websocket_api"],
|
||||||
"requirements": ["vallox-websocket-api==3.0.0"]
|
"requirements": ["vallox-websocket-api==3.2.1"]
|
||||||
}
|
}
|
||||||
|
@ -8,5 +8,5 @@
|
|||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["zeroconf"],
|
"loggers": ["zeroconf"],
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["zeroconf==0.54.0"]
|
"requirements": ["zeroconf==0.56.0"]
|
||||||
}
|
}
|
||||||
|
@ -187,11 +187,16 @@ class SmartThingsAcceleration(ZigbeeChannel):
|
|||||||
@callback
|
@callback
|
||||||
def attribute_updated(self, attrid, value):
|
def attribute_updated(self, attrid, value):
|
||||||
"""Handle attribute updates on this cluster."""
|
"""Handle attribute updates on this cluster."""
|
||||||
|
try:
|
||||||
|
attr_name = self._cluster.attributes[attrid].name
|
||||||
|
except KeyError:
|
||||||
|
attr_name = UNKNOWN
|
||||||
|
|
||||||
if attrid == self.value_attribute:
|
if attrid == self.value_attribute:
|
||||||
self.async_send_signal(
|
self.async_send_signal(
|
||||||
f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}",
|
f"{self.unique_id}_{SIGNAL_ATTR_UPDATED}",
|
||||||
attrid,
|
attrid,
|
||||||
self._cluster.attributes.get(attrid, [UNKNOWN])[0],
|
attr_name,
|
||||||
value,
|
value,
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
@ -200,7 +205,7 @@ class SmartThingsAcceleration(ZigbeeChannel):
|
|||||||
SIGNAL_ATTR_UPDATED,
|
SIGNAL_ATTR_UPDATED,
|
||||||
{
|
{
|
||||||
ATTR_ATTRIBUTE_ID: attrid,
|
ATTR_ATTRIBUTE_ID: attrid,
|
||||||
ATTR_ATTRIBUTE_NAME: self._cluster.attributes.get(attrid, [UNKNOWN])[0],
|
ATTR_ATTRIBUTE_NAME: attr_name,
|
||||||
ATTR_VALUE: value,
|
ATTR_VALUE: value,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
@ -23,7 +23,7 @@
|
|||||||
"bellows==0.35.0",
|
"bellows==0.35.0",
|
||||||
"pyserial==3.5",
|
"pyserial==3.5",
|
||||||
"pyserial-asyncio==0.6",
|
"pyserial-asyncio==0.6",
|
||||||
"zha-quirks==0.0.95",
|
"zha-quirks==0.0.96",
|
||||||
"zigpy-deconz==0.20.0",
|
"zigpy-deconz==0.20.0",
|
||||||
"zigpy==0.54.0",
|
"zigpy==0.54.0",
|
||||||
"zigpy-xbee==0.17.0",
|
"zigpy-xbee==0.17.0",
|
||||||
|
@ -8,7 +8,7 @@ from .backports.enum import StrEnum
|
|||||||
APPLICATION_NAME: Final = "HomeAssistant"
|
APPLICATION_NAME: Final = "HomeAssistant"
|
||||||
MAJOR_VERSION: Final = 2023
|
MAJOR_VERSION: Final = 2023
|
||||||
MINOR_VERSION: Final = 4
|
MINOR_VERSION: Final = 4
|
||||||
PATCH_VERSION: Final = "1"
|
PATCH_VERSION: Final = "2"
|
||||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 10, 0)
|
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 10, 0)
|
||||||
|
@ -305,6 +305,12 @@ SSDP = {
|
|||||||
{
|
{
|
||||||
"st": "urn:schemas-upnp-org:device:InternetGatewayDevice:2",
|
"st": "urn:schemas-upnp-org:device:InternetGatewayDevice:2",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"nt": "urn:schemas-upnp-org:device:InternetGatewayDevice:1",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"nt": "urn:schemas-upnp-org:device:InternetGatewayDevice:2",
|
||||||
|
},
|
||||||
],
|
],
|
||||||
"webostv": [
|
"webostv": [
|
||||||
{
|
{
|
||||||
|
@ -50,7 +50,7 @@ ulid-transform==0.6.0
|
|||||||
voluptuous-serialize==2.6.0
|
voluptuous-serialize==2.6.0
|
||||||
voluptuous==0.13.1
|
voluptuous==0.13.1
|
||||||
yarl==1.8.1
|
yarl==1.8.1
|
||||||
zeroconf==0.54.0
|
zeroconf==0.56.0
|
||||||
|
|
||||||
# Constrain pycryptodome to avoid vulnerability
|
# Constrain pycryptodome to avoid vulnerability
|
||||||
# see https://github.com/home-assistant/core/pull/16238
|
# see https://github.com/home-assistant/core/pull/16238
|
||||||
@ -159,6 +159,6 @@ uamqp==1.6.0;python_version<'3.11'
|
|||||||
faust-cchardet>=2.1.18
|
faust-cchardet>=2.1.18
|
||||||
|
|
||||||
# websockets 11.0 is missing files in the source distribution
|
# websockets 11.0 is missing files in the source distribution
|
||||||
# which break wheel builds
|
# which break wheel builds so we need at least 11.0.1
|
||||||
# https://github.com/aaugustin/websockets/issues/1329
|
# https://github.com/aaugustin/websockets/issues/1329
|
||||||
websockets<11.0
|
websockets>=11.0.1
|
||||||
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "homeassistant"
|
name = "homeassistant"
|
||||||
version = "2023.4.1"
|
version = "2023.4.2"
|
||||||
license = {text = "Apache-2.0"}
|
license = {text = "Apache-2.0"}
|
||||||
description = "Open-source home automation platform running on Python 3."
|
description = "Open-source home automation platform running on Python 3."
|
||||||
readme = "README.rst"
|
readme = "README.rst"
|
||||||
|
@ -119,7 +119,7 @@ aioairq==0.2.4
|
|||||||
aioairzone==0.5.2
|
aioairzone==0.5.2
|
||||||
|
|
||||||
# homeassistant.components.ambient_station
|
# homeassistant.components.ambient_station
|
||||||
aioambient==2022.10.0
|
aioambient==2023.04.0
|
||||||
|
|
||||||
# homeassistant.components.aseko_pool_live
|
# homeassistant.components.aseko_pool_live
|
||||||
aioaseko==0.0.2
|
aioaseko==0.0.2
|
||||||
@ -661,7 +661,7 @@ enocean==0.50
|
|||||||
enturclient==0.2.4
|
enturclient==0.2.4
|
||||||
|
|
||||||
# homeassistant.components.environment_canada
|
# homeassistant.components.environment_canada
|
||||||
env_canada==0.5.30
|
env_canada==0.5.31
|
||||||
|
|
||||||
# homeassistant.components.enphase_envoy
|
# homeassistant.components.enphase_envoy
|
||||||
envoy_reader==0.20.1
|
envoy_reader==0.20.1
|
||||||
@ -725,7 +725,7 @@ fjaraskupan==2.2.0
|
|||||||
flipr-api==1.5.0
|
flipr-api==1.5.0
|
||||||
|
|
||||||
# homeassistant.components.flux_led
|
# homeassistant.components.flux_led
|
||||||
flux_led==0.28.36
|
flux_led==0.28.37
|
||||||
|
|
||||||
# homeassistant.components.homekit
|
# homeassistant.components.homekit
|
||||||
# homeassistant.components.recorder
|
# homeassistant.components.recorder
|
||||||
@ -757,7 +757,7 @@ gTTS==2.2.4
|
|||||||
gassist-text==0.0.10
|
gassist-text==0.0.10
|
||||||
|
|
||||||
# homeassistant.components.google
|
# homeassistant.components.google
|
||||||
gcal-sync==4.1.3
|
gcal-sync==4.1.4
|
||||||
|
|
||||||
# homeassistant.components.geniushub
|
# homeassistant.components.geniushub
|
||||||
geniushub-client==0.7.0
|
geniushub-client==0.7.0
|
||||||
@ -2258,7 +2258,7 @@ rocketchat-API==0.6.1
|
|||||||
rokuecp==0.17.1
|
rokuecp==0.17.1
|
||||||
|
|
||||||
# homeassistant.components.roomba
|
# homeassistant.components.roomba
|
||||||
roombapy==1.6.6
|
roombapy==1.6.8
|
||||||
|
|
||||||
# homeassistant.components.roon
|
# homeassistant.components.roon
|
||||||
roonapi==0.1.4
|
roonapi==0.1.4
|
||||||
@ -2428,7 +2428,7 @@ streamlabswater==1.0.1
|
|||||||
stringcase==1.2.0
|
stringcase==1.2.0
|
||||||
|
|
||||||
# homeassistant.components.subaru
|
# homeassistant.components.subaru
|
||||||
subarulink==0.7.5
|
subarulink==0.7.6
|
||||||
|
|
||||||
# homeassistant.components.solarlog
|
# homeassistant.components.solarlog
|
||||||
sunwatcher==0.2.1
|
sunwatcher==0.2.1
|
||||||
@ -2565,7 +2565,7 @@ url-normalize==1.4.3
|
|||||||
uvcclient==0.11.0
|
uvcclient==0.11.0
|
||||||
|
|
||||||
# homeassistant.components.vallox
|
# homeassistant.components.vallox
|
||||||
vallox-websocket-api==3.0.0
|
vallox-websocket-api==3.2.1
|
||||||
|
|
||||||
# homeassistant.components.rdw
|
# homeassistant.components.rdw
|
||||||
vehicle==1.0.0
|
vehicle==1.0.0
|
||||||
@ -2692,13 +2692,13 @@ zamg==0.2.2
|
|||||||
zengge==0.2
|
zengge==0.2
|
||||||
|
|
||||||
# homeassistant.components.zeroconf
|
# homeassistant.components.zeroconf
|
||||||
zeroconf==0.54.0
|
zeroconf==0.56.0
|
||||||
|
|
||||||
# homeassistant.components.zeversolar
|
# homeassistant.components.zeversolar
|
||||||
zeversolar==0.3.1
|
zeversolar==0.3.1
|
||||||
|
|
||||||
# homeassistant.components.zha
|
# homeassistant.components.zha
|
||||||
zha-quirks==0.0.95
|
zha-quirks==0.0.96
|
||||||
|
|
||||||
# homeassistant.components.zhong_hong
|
# homeassistant.components.zhong_hong
|
||||||
zhong_hong_hvac==1.0.9
|
zhong_hong_hvac==1.0.9
|
||||||
|
@ -109,7 +109,7 @@ aioairq==0.2.4
|
|||||||
aioairzone==0.5.2
|
aioairzone==0.5.2
|
||||||
|
|
||||||
# homeassistant.components.ambient_station
|
# homeassistant.components.ambient_station
|
||||||
aioambient==2022.10.0
|
aioambient==2023.04.0
|
||||||
|
|
||||||
# homeassistant.components.aseko_pool_live
|
# homeassistant.components.aseko_pool_live
|
||||||
aioaseko==0.0.2
|
aioaseko==0.0.2
|
||||||
@ -517,7 +517,7 @@ energyzero==0.4.1
|
|||||||
enocean==0.50
|
enocean==0.50
|
||||||
|
|
||||||
# homeassistant.components.environment_canada
|
# homeassistant.components.environment_canada
|
||||||
env_canada==0.5.30
|
env_canada==0.5.31
|
||||||
|
|
||||||
# homeassistant.components.enphase_envoy
|
# homeassistant.components.enphase_envoy
|
||||||
envoy_reader==0.20.1
|
envoy_reader==0.20.1
|
||||||
@ -553,7 +553,7 @@ fjaraskupan==2.2.0
|
|||||||
flipr-api==1.5.0
|
flipr-api==1.5.0
|
||||||
|
|
||||||
# homeassistant.components.flux_led
|
# homeassistant.components.flux_led
|
||||||
flux_led==0.28.36
|
flux_led==0.28.37
|
||||||
|
|
||||||
# homeassistant.components.homekit
|
# homeassistant.components.homekit
|
||||||
# homeassistant.components.recorder
|
# homeassistant.components.recorder
|
||||||
@ -579,7 +579,7 @@ gTTS==2.2.4
|
|||||||
gassist-text==0.0.10
|
gassist-text==0.0.10
|
||||||
|
|
||||||
# homeassistant.components.google
|
# homeassistant.components.google
|
||||||
gcal-sync==4.1.3
|
gcal-sync==4.1.4
|
||||||
|
|
||||||
# homeassistant.components.geocaching
|
# homeassistant.components.geocaching
|
||||||
geocachingapi==0.2.1
|
geocachingapi==0.2.1
|
||||||
@ -1609,7 +1609,7 @@ ring_doorbell==0.7.2
|
|||||||
rokuecp==0.17.1
|
rokuecp==0.17.1
|
||||||
|
|
||||||
# homeassistant.components.roomba
|
# homeassistant.components.roomba
|
||||||
roombapy==1.6.6
|
roombapy==1.6.8
|
||||||
|
|
||||||
# homeassistant.components.roon
|
# homeassistant.components.roon
|
||||||
roonapi==0.1.4
|
roonapi==0.1.4
|
||||||
@ -1737,7 +1737,7 @@ stookwijzer==1.3.0
|
|||||||
stringcase==1.2.0
|
stringcase==1.2.0
|
||||||
|
|
||||||
# homeassistant.components.subaru
|
# homeassistant.components.subaru
|
||||||
subarulink==0.7.5
|
subarulink==0.7.6
|
||||||
|
|
||||||
# homeassistant.components.solarlog
|
# homeassistant.components.solarlog
|
||||||
sunwatcher==0.2.1
|
sunwatcher==0.2.1
|
||||||
@ -1832,7 +1832,7 @@ url-normalize==1.4.3
|
|||||||
uvcclient==0.11.0
|
uvcclient==0.11.0
|
||||||
|
|
||||||
# homeassistant.components.vallox
|
# homeassistant.components.vallox
|
||||||
vallox-websocket-api==3.0.0
|
vallox-websocket-api==3.2.1
|
||||||
|
|
||||||
# homeassistant.components.rdw
|
# homeassistant.components.rdw
|
||||||
vehicle==1.0.0
|
vehicle==1.0.0
|
||||||
@ -1926,13 +1926,13 @@ youless-api==1.0.1
|
|||||||
zamg==0.2.2
|
zamg==0.2.2
|
||||||
|
|
||||||
# homeassistant.components.zeroconf
|
# homeassistant.components.zeroconf
|
||||||
zeroconf==0.54.0
|
zeroconf==0.56.0
|
||||||
|
|
||||||
# homeassistant.components.zeversolar
|
# homeassistant.components.zeversolar
|
||||||
zeversolar==0.3.1
|
zeversolar==0.3.1
|
||||||
|
|
||||||
# homeassistant.components.zha
|
# homeassistant.components.zha
|
||||||
zha-quirks==0.0.95
|
zha-quirks==0.0.96
|
||||||
|
|
||||||
# homeassistant.components.zha
|
# homeassistant.components.zha
|
||||||
zigpy-deconz==0.20.0
|
zigpy-deconz==0.20.0
|
||||||
|
@ -164,9 +164,9 @@ uamqp==1.6.0;python_version<'3.11'
|
|||||||
faust-cchardet>=2.1.18
|
faust-cchardet>=2.1.18
|
||||||
|
|
||||||
# websockets 11.0 is missing files in the source distribution
|
# websockets 11.0 is missing files in the source distribution
|
||||||
# which break wheel builds
|
# which break wheel builds so we need at least 11.0.1
|
||||||
# https://github.com/aaugustin/websockets/issues/1329
|
# https://github.com/aaugustin/websockets/issues/1329
|
||||||
websockets<11.0
|
websockets>=11.0.1
|
||||||
"""
|
"""
|
||||||
|
|
||||||
IGNORE_PRE_COMMIT_HOOK_ID = (
|
IGNORE_PRE_COMMIT_HOOK_ID = (
|
||||||
|
@ -195,21 +195,6 @@ async def test_connected_device_registered(
|
|||||||
class MockDisconnectedScannerEntity(MockScannerEntity):
|
class MockDisconnectedScannerEntity(MockScannerEntity):
|
||||||
"""Mock a disconnected scanner entity."""
|
"""Mock a disconnected scanner entity."""
|
||||||
|
|
||||||
@property
|
|
||||||
def mac_address(self) -> str:
|
|
||||||
return "aa:bb:cc:dd:ee:ff"
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_connected(self) -> bool:
|
|
||||||
return True
|
|
||||||
|
|
||||||
@property
|
|
||||||
def hostname(self) -> str:
|
|
||||||
return "connected"
|
|
||||||
|
|
||||||
class MockConnectedScannerEntity(MockScannerEntity):
|
|
||||||
"""Mock a disconnected scanner entity."""
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def mac_address(self) -> str:
|
def mac_address(self) -> str:
|
||||||
return "aa:bb:cc:dd:ee:00"
|
return "aa:bb:cc:dd:ee:00"
|
||||||
@ -222,10 +207,44 @@ async def test_connected_device_registered(
|
|||||||
def hostname(self) -> str:
|
def hostname(self) -> str:
|
||||||
return "disconnected"
|
return "disconnected"
|
||||||
|
|
||||||
|
class MockConnectedScannerEntity(MockScannerEntity):
|
||||||
|
"""Mock a disconnected scanner entity."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mac_address(self) -> str:
|
||||||
|
return "aa:bb:cc:dd:ee:ff"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_connected(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hostname(self) -> str:
|
||||||
|
return "connected"
|
||||||
|
|
||||||
|
class MockConnectedScannerEntityBadIPAddress(MockConnectedScannerEntity):
|
||||||
|
"""Mock a disconnected scanner entity."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def mac_address(self) -> str:
|
||||||
|
return "aa:bb:cc:dd:ee:01"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ip_address(self) -> str:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hostname(self) -> str:
|
||||||
|
return "connected_bad_ip"
|
||||||
|
|
||||||
async def async_setup_entry(hass, config_entry, async_add_entities):
|
async def async_setup_entry(hass, config_entry, async_add_entities):
|
||||||
"""Mock setup entry method."""
|
"""Mock setup entry method."""
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
[MockConnectedScannerEntity(), MockDisconnectedScannerEntity()]
|
[
|
||||||
|
MockConnectedScannerEntity(),
|
||||||
|
MockDisconnectedScannerEntity(),
|
||||||
|
MockConnectedScannerEntityBadIPAddress(),
|
||||||
|
]
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -240,7 +259,7 @@ async def test_connected_device_registered(
|
|||||||
full_name = f"{entity_platform.domain}.{config_entry.domain}"
|
full_name = f"{entity_platform.domain}.{config_entry.domain}"
|
||||||
assert full_name in hass.config.components
|
assert full_name in hass.config.components
|
||||||
assert len(hass.states.async_entity_ids()) == 0 # should be disabled
|
assert len(hass.states.async_entity_ids()) == 0 # should be disabled
|
||||||
assert len(entity_registry.entities) == 2
|
assert len(entity_registry.entities) == 3
|
||||||
assert (
|
assert (
|
||||||
entity_registry.entities["test_domain.test_aa_bb_cc_dd_ee_ff"].config_entry_id
|
entity_registry.entities["test_domain.test_aa_bb_cc_dd_ee_ff"].config_entry_id
|
||||||
== "super-mock-id"
|
== "super-mock-id"
|
||||||
|
@ -888,7 +888,6 @@ async def test_websocket_create(
|
|||||||
assert aioclient_mock.mock_calls[0][2] == {
|
assert aioclient_mock.mock_calls[0][2] == {
|
||||||
"summary": "Bastille Day Party",
|
"summary": "Bastille Day Party",
|
||||||
"description": None,
|
"description": None,
|
||||||
"location": None,
|
|
||||||
"start": {
|
"start": {
|
||||||
"dateTime": "1997-07-14T11:00:00-06:00",
|
"dateTime": "1997-07-14T11:00:00-06:00",
|
||||||
"timeZone": "America/Regina",
|
"timeZone": "America/Regina",
|
||||||
@ -932,7 +931,6 @@ async def test_websocket_create_all_day(
|
|||||||
assert aioclient_mock.mock_calls[0][2] == {
|
assert aioclient_mock.mock_calls[0][2] == {
|
||||||
"summary": "Bastille Day Party",
|
"summary": "Bastille Day Party",
|
||||||
"description": None,
|
"description": None,
|
||||||
"location": None,
|
|
||||||
"start": {
|
"start": {
|
||||||
"date": "1997-07-14",
|
"date": "1997-07-14",
|
||||||
},
|
},
|
||||||
|
@ -94,7 +94,6 @@ def add_event_call_service(
|
|||||||
**params,
|
**params,
|
||||||
"summary": TEST_EVENT_SUMMARY,
|
"summary": TEST_EVENT_SUMMARY,
|
||||||
"description": TEST_EVENT_DESCRIPTION,
|
"description": TEST_EVENT_DESCRIPTION,
|
||||||
"location": TEST_EVENT_LOCATION,
|
|
||||||
},
|
},
|
||||||
target=target,
|
target=target,
|
||||||
blocking=True,
|
blocking=True,
|
||||||
@ -486,7 +485,6 @@ async def test_add_event_date_in_x(
|
|||||||
assert aioclient_mock.mock_calls[0][2] == {
|
assert aioclient_mock.mock_calls[0][2] == {
|
||||||
"summary": TEST_EVENT_SUMMARY,
|
"summary": TEST_EVENT_SUMMARY,
|
||||||
"description": TEST_EVENT_DESCRIPTION,
|
"description": TEST_EVENT_DESCRIPTION,
|
||||||
"location": TEST_EVENT_LOCATION,
|
|
||||||
"start": {"date": start_date.date().isoformat()},
|
"start": {"date": start_date.date().isoformat()},
|
||||||
"end": {"date": end_date.date().isoformat()},
|
"end": {"date": end_date.date().isoformat()},
|
||||||
}
|
}
|
||||||
@ -527,7 +525,6 @@ async def test_add_event_date(
|
|||||||
assert aioclient_mock.mock_calls[0][2] == {
|
assert aioclient_mock.mock_calls[0][2] == {
|
||||||
"summary": TEST_EVENT_SUMMARY,
|
"summary": TEST_EVENT_SUMMARY,
|
||||||
"description": TEST_EVENT_DESCRIPTION,
|
"description": TEST_EVENT_DESCRIPTION,
|
||||||
"location": TEST_EVENT_LOCATION,
|
|
||||||
"start": {"date": today.isoformat()},
|
"start": {"date": today.isoformat()},
|
||||||
"end": {"date": end_date.isoformat()},
|
"end": {"date": end_date.isoformat()},
|
||||||
}
|
}
|
||||||
@ -568,7 +565,6 @@ async def test_add_event_date_time(
|
|||||||
assert aioclient_mock.mock_calls[0][2] == {
|
assert aioclient_mock.mock_calls[0][2] == {
|
||||||
"summary": TEST_EVENT_SUMMARY,
|
"summary": TEST_EVENT_SUMMARY,
|
||||||
"description": TEST_EVENT_DESCRIPTION,
|
"description": TEST_EVENT_DESCRIPTION,
|
||||||
"location": TEST_EVENT_LOCATION,
|
|
||||||
"start": {
|
"start": {
|
||||||
"dateTime": start_datetime.isoformat(timespec="seconds"),
|
"dateTime": start_datetime.isoformat(timespec="seconds"),
|
||||||
"timeZone": "America/Regina",
|
"timeZone": "America/Regina",
|
||||||
@ -606,6 +602,48 @@ async def test_add_event_failure(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_add_event_location(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
component_setup: ComponentSetup,
|
||||||
|
mock_calendars_list: ApiResult,
|
||||||
|
test_api_calendar: dict[str, Any],
|
||||||
|
mock_insert_event: Callable[[str, dict[str, Any]], None],
|
||||||
|
mock_events_list: ApiResult,
|
||||||
|
aioclient_mock: AiohttpClientMocker,
|
||||||
|
add_event_call_service: Callable[dict[str, Any], Awaitable[None]],
|
||||||
|
) -> None:
|
||||||
|
"""Test service call that sets a location field."""
|
||||||
|
|
||||||
|
mock_calendars_list({"items": [test_api_calendar]})
|
||||||
|
mock_events_list({})
|
||||||
|
assert await component_setup()
|
||||||
|
|
||||||
|
now = utcnow()
|
||||||
|
today = now.date()
|
||||||
|
end_date = today + datetime.timedelta(days=2)
|
||||||
|
|
||||||
|
aioclient_mock.clear_requests()
|
||||||
|
mock_insert_event(
|
||||||
|
calendar_id=CALENDAR_ID,
|
||||||
|
)
|
||||||
|
|
||||||
|
await add_event_call_service(
|
||||||
|
{
|
||||||
|
"start_date": today.isoformat(),
|
||||||
|
"end_date": end_date.isoformat(),
|
||||||
|
"location": TEST_EVENT_LOCATION,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
assert len(aioclient_mock.mock_calls) == 1
|
||||||
|
assert aioclient_mock.mock_calls[0][2] == {
|
||||||
|
"summary": TEST_EVENT_SUMMARY,
|
||||||
|
"description": TEST_EVENT_DESCRIPTION,
|
||||||
|
"location": TEST_EVENT_LOCATION,
|
||||||
|
"start": {"date": today.isoformat()},
|
||||||
|
"end": {"date": end_date.isoformat()},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"config_entry_token_expiry", [datetime.datetime.max.timestamp() + 1]
|
"config_entry_token_expiry", [datetime.datetime.max.timestamp() + 1]
|
||||||
)
|
)
|
||||||
|
752
tests/components/recorder/db_schema_32.py
Normal file
752
tests/components/recorder/db_schema_32.py
Normal file
@ -0,0 +1,752 @@
|
|||||||
|
"""Models for SQLAlchemy.
|
||||||
|
|
||||||
|
This file contains the model definitions for schema version 30.
|
||||||
|
It is used to test the schema migration logic.
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
from typing import Any, TypedDict, cast, overload
|
||||||
|
|
||||||
|
import ciso8601
|
||||||
|
from fnvhash import fnv1a_32
|
||||||
|
from sqlalchemy import (
|
||||||
|
JSON,
|
||||||
|
BigInteger,
|
||||||
|
Boolean,
|
||||||
|
Column,
|
||||||
|
DateTime,
|
||||||
|
Float,
|
||||||
|
ForeignKey,
|
||||||
|
Identity,
|
||||||
|
Index,
|
||||||
|
Integer,
|
||||||
|
LargeBinary,
|
||||||
|
SmallInteger,
|
||||||
|
String,
|
||||||
|
Text,
|
||||||
|
distinct,
|
||||||
|
type_coerce,
|
||||||
|
)
|
||||||
|
from sqlalchemy.dialects import mysql, oracle, postgresql, sqlite
|
||||||
|
from sqlalchemy.orm import aliased, declarative_base, relationship
|
||||||
|
from sqlalchemy.orm.session import Session
|
||||||
|
from typing_extensions import Self
|
||||||
|
|
||||||
|
from homeassistant.components.recorder.const import SupportedDialect
|
||||||
|
from homeassistant.const import (
|
||||||
|
ATTR_ATTRIBUTION,
|
||||||
|
ATTR_RESTORED,
|
||||||
|
ATTR_SUPPORTED_FEATURES,
|
||||||
|
MAX_LENGTH_EVENT_CONTEXT_ID,
|
||||||
|
MAX_LENGTH_EVENT_EVENT_TYPE,
|
||||||
|
MAX_LENGTH_EVENT_ORIGIN,
|
||||||
|
MAX_LENGTH_STATE_ENTITY_ID,
|
||||||
|
MAX_LENGTH_STATE_STATE,
|
||||||
|
)
|
||||||
|
from homeassistant.core import Context, Event, EventOrigin, State, split_entity_id
|
||||||
|
from homeassistant.helpers import entity_registry as er
|
||||||
|
from homeassistant.helpers.json import JSON_DUMP, json_bytes
|
||||||
|
import homeassistant.util.dt as dt_util
|
||||||
|
from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads
|
||||||
|
|
||||||
|
ALL_DOMAIN_EXCLUDE_ATTRS = {ATTR_ATTRIBUTION, ATTR_RESTORED, ATTR_SUPPORTED_FEATURES}
|
||||||
|
|
||||||
|
# SQLAlchemy Schema
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
Base = declarative_base()
|
||||||
|
|
||||||
|
SCHEMA_VERSION = 30
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
TABLE_EVENTS = "events"
|
||||||
|
TABLE_EVENT_DATA = "event_data"
|
||||||
|
TABLE_EVENT_TYPES = "event_types"
|
||||||
|
TABLE_STATES = "states"
|
||||||
|
TABLE_STATE_ATTRIBUTES = "state_attributes"
|
||||||
|
TABLE_STATES_META = "states_meta"
|
||||||
|
TABLE_RECORDER_RUNS = "recorder_runs"
|
||||||
|
TABLE_SCHEMA_CHANGES = "schema_changes"
|
||||||
|
TABLE_STATISTICS = "statistics"
|
||||||
|
TABLE_STATISTICS_META = "statistics_meta"
|
||||||
|
TABLE_STATISTICS_RUNS = "statistics_runs"
|
||||||
|
TABLE_STATISTICS_SHORT_TERM = "statistics_short_term"
|
||||||
|
|
||||||
|
ALL_TABLES = [
|
||||||
|
TABLE_STATES,
|
||||||
|
TABLE_STATE_ATTRIBUTES,
|
||||||
|
TABLE_STATES_META,
|
||||||
|
TABLE_EVENTS,
|
||||||
|
TABLE_EVENT_DATA,
|
||||||
|
TABLE_EVENT_TYPES,
|
||||||
|
TABLE_RECORDER_RUNS,
|
||||||
|
TABLE_SCHEMA_CHANGES,
|
||||||
|
TABLE_STATISTICS,
|
||||||
|
TABLE_STATISTICS_META,
|
||||||
|
TABLE_STATISTICS_RUNS,
|
||||||
|
TABLE_STATISTICS_SHORT_TERM,
|
||||||
|
]
|
||||||
|
|
||||||
|
TABLES_TO_CHECK = [
|
||||||
|
TABLE_STATES,
|
||||||
|
TABLE_EVENTS,
|
||||||
|
TABLE_RECORDER_RUNS,
|
||||||
|
TABLE_SCHEMA_CHANGES,
|
||||||
|
]
|
||||||
|
|
||||||
|
LAST_UPDATED_INDEX = "ix_states_last_updated"
|
||||||
|
ENTITY_ID_LAST_UPDATED_TS_INDEX = "ix_states_entity_id_last_updated_ts"
|
||||||
|
EVENTS_CONTEXT_ID_INDEX = "ix_events_context_id"
|
||||||
|
STATES_CONTEXT_ID_INDEX = "ix_states_context_id"
|
||||||
|
CONTEXT_ID_BIN_MAX_LENGTH = 16
|
||||||
|
EVENTS_CONTEXT_ID_BIN_INDEX = "ix_events_context_id_bin"
|
||||||
|
STATES_CONTEXT_ID_BIN_INDEX = "ix_states_context_id_bin"
|
||||||
|
|
||||||
|
|
||||||
|
class FAST_PYSQLITE_DATETIME(sqlite.DATETIME): # type: ignore[misc]
|
||||||
|
"""Use ciso8601 to parse datetimes instead of sqlalchemy built-in regex."""
|
||||||
|
|
||||||
|
def result_processor(self, dialect, coltype): # type: ignore[no-untyped-def]
|
||||||
|
"""Offload the datetime parsing to ciso8601."""
|
||||||
|
return lambda value: None if value is None else ciso8601.parse_datetime(value)
|
||||||
|
|
||||||
|
|
||||||
|
JSON_VARIANT_CAST = Text().with_variant(
|
||||||
|
postgresql.JSON(none_as_null=True), "postgresql"
|
||||||
|
)
|
||||||
|
JSONB_VARIANT_CAST = Text().with_variant(
|
||||||
|
postgresql.JSONB(none_as_null=True), "postgresql"
|
||||||
|
)
|
||||||
|
DATETIME_TYPE = (
|
||||||
|
DateTime(timezone=True)
|
||||||
|
.with_variant(mysql.DATETIME(timezone=True, fsp=6), "mysql")
|
||||||
|
.with_variant(FAST_PYSQLITE_DATETIME(), "sqlite")
|
||||||
|
)
|
||||||
|
DOUBLE_TYPE = (
|
||||||
|
Float()
|
||||||
|
.with_variant(mysql.DOUBLE(asdecimal=False), "mysql")
|
||||||
|
.with_variant(oracle.DOUBLE_PRECISION(), "oracle")
|
||||||
|
.with_variant(postgresql.DOUBLE_PRECISION(), "postgresql")
|
||||||
|
)
|
||||||
|
|
||||||
|
TIMESTAMP_TYPE = DOUBLE_TYPE
|
||||||
|
|
||||||
|
|
||||||
|
class UnsupportedDialect(Exception):
|
||||||
|
"""The dialect or its version is not supported."""
|
||||||
|
|
||||||
|
|
||||||
|
class StatisticResult(TypedDict):
|
||||||
|
"""Statistic result data class.
|
||||||
|
|
||||||
|
Allows multiple datapoints for the same statistic_id.
|
||||||
|
"""
|
||||||
|
|
||||||
|
meta: StatisticMetaData
|
||||||
|
stat: StatisticData
|
||||||
|
|
||||||
|
|
||||||
|
class StatisticDataBase(TypedDict):
|
||||||
|
"""Mandatory fields for statistic data class."""
|
||||||
|
|
||||||
|
start: datetime
|
||||||
|
|
||||||
|
|
||||||
|
class StatisticData(StatisticDataBase, total=False):
|
||||||
|
"""Statistic data class."""
|
||||||
|
|
||||||
|
mean: float
|
||||||
|
min: float
|
||||||
|
max: float
|
||||||
|
last_reset: datetime | None
|
||||||
|
state: float
|
||||||
|
sum: float
|
||||||
|
|
||||||
|
|
||||||
|
class StatisticMetaData(TypedDict):
|
||||||
|
"""Statistic meta data class."""
|
||||||
|
|
||||||
|
has_mean: bool
|
||||||
|
has_sum: bool
|
||||||
|
name: str | None
|
||||||
|
source: str
|
||||||
|
statistic_id: str
|
||||||
|
unit_of_measurement: str | None
|
||||||
|
|
||||||
|
|
||||||
|
class JSONLiteral(JSON): # type: ignore[misc]
|
||||||
|
"""Teach SA how to literalize json."""
|
||||||
|
|
||||||
|
def literal_processor(self, dialect: str) -> Callable[[Any], str]:
|
||||||
|
"""Processor to convert a value to JSON."""
|
||||||
|
|
||||||
|
def process(value: Any) -> str:
|
||||||
|
"""Dump json."""
|
||||||
|
return JSON_DUMP(value)
|
||||||
|
|
||||||
|
return process
|
||||||
|
|
||||||
|
|
||||||
|
EVENT_ORIGIN_ORDER = [EventOrigin.local, EventOrigin.remote]
|
||||||
|
EVENT_ORIGIN_TO_IDX = {origin: idx for idx, origin in enumerate(EVENT_ORIGIN_ORDER)}
|
||||||
|
|
||||||
|
|
||||||
|
class Events(Base): # type: ignore[misc,valid-type]
|
||||||
|
"""Event history data."""
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
# Used for fetching events at a specific time
|
||||||
|
# see logbook
|
||||||
|
Index("ix_events_event_type_time_fired", "event_type", "time_fired"),
|
||||||
|
Index(
|
||||||
|
EVENTS_CONTEXT_ID_BIN_INDEX,
|
||||||
|
"context_id_bin",
|
||||||
|
mysql_length=CONTEXT_ID_BIN_MAX_LENGTH,
|
||||||
|
mariadb_length=CONTEXT_ID_BIN_MAX_LENGTH,
|
||||||
|
),
|
||||||
|
{"mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci"},
|
||||||
|
)
|
||||||
|
__tablename__ = TABLE_EVENTS
|
||||||
|
event_id = Column(Integer, Identity(), primary_key=True)
|
||||||
|
event_type = Column(String(MAX_LENGTH_EVENT_EVENT_TYPE))
|
||||||
|
event_data = Column(Text().with_variant(mysql.LONGTEXT, "mysql"))
|
||||||
|
origin = Column(String(MAX_LENGTH_EVENT_ORIGIN)) # no longer used for new rows
|
||||||
|
origin_idx = Column(SmallInteger)
|
||||||
|
time_fired = Column(DATETIME_TYPE, index=True)
|
||||||
|
time_fired_ts = Column(TIMESTAMP_TYPE, index=True)
|
||||||
|
context_id = Column(String(MAX_LENGTH_EVENT_CONTEXT_ID), index=True)
|
||||||
|
context_user_id = Column(String(MAX_LENGTH_EVENT_CONTEXT_ID))
|
||||||
|
context_parent_id = Column(String(MAX_LENGTH_EVENT_CONTEXT_ID))
|
||||||
|
data_id = Column(Integer, ForeignKey("event_data.data_id"), index=True)
|
||||||
|
context_id_bin = Column(
|
||||||
|
LargeBinary(CONTEXT_ID_BIN_MAX_LENGTH)
|
||||||
|
) # *** Not originally in v3v320, only added for recorder to startup ok
|
||||||
|
context_user_id_bin = Column(
|
||||||
|
LargeBinary(CONTEXT_ID_BIN_MAX_LENGTH)
|
||||||
|
) # *** Not originally in v32, only added for recorder to startup ok
|
||||||
|
context_parent_id_bin = Column(
|
||||||
|
LargeBinary(CONTEXT_ID_BIN_MAX_LENGTH)
|
||||||
|
) # *** Not originally in v32, only added for recorder to startup ok
|
||||||
|
event_type_id = Column(
|
||||||
|
Integer, ForeignKey("event_types.event_type_id"), index=True
|
||||||
|
) # *** Not originally in v32, only added for recorder to startup ok
|
||||||
|
event_data_rel = relationship("EventData")
|
||||||
|
event_type_rel = relationship("EventTypes")
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
"""Return string representation of instance for debugging."""
|
||||||
|
return (
|
||||||
|
"<recorder.Events("
|
||||||
|
f"id={self.event_id}, type='{self.event_type}', "
|
||||||
|
f"origin_idx='{self.origin_idx}', time_fired='{self.time_fired}'"
|
||||||
|
f", data_id={self.data_id})>"
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_event(event: Event) -> Events:
|
||||||
|
"""Create an event database object from a native event."""
|
||||||
|
return Events(
|
||||||
|
event_type=event.event_type,
|
||||||
|
event_data=None,
|
||||||
|
origin_idx=EVENT_ORIGIN_TO_IDX.get(event.origin),
|
||||||
|
time_fired=event.time_fired,
|
||||||
|
context_id=event.context.id,
|
||||||
|
context_user_id=event.context.user_id,
|
||||||
|
context_parent_id=event.context.parent_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
def to_native(self, validate_entity_id: bool = True) -> Event | None:
|
||||||
|
"""Convert to a native HA Event."""
|
||||||
|
context = Context(
|
||||||
|
id=self.context_id,
|
||||||
|
user_id=self.context_user_id,
|
||||||
|
parent_id=self.context_parent_id,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
return Event(
|
||||||
|
self.event_type,
|
||||||
|
json_loads(self.event_data) if self.event_data else {},
|
||||||
|
EventOrigin(self.origin)
|
||||||
|
if self.origin
|
||||||
|
else EVENT_ORIGIN_ORDER[self.origin_idx],
|
||||||
|
process_timestamp(self.time_fired),
|
||||||
|
context=context,
|
||||||
|
)
|
||||||
|
except JSON_DECODE_EXCEPTIONS:
|
||||||
|
# When json_loads fails
|
||||||
|
_LOGGER.exception("Error converting to event: %s", self)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class EventData(Base): # type: ignore[misc,valid-type]
|
||||||
|
"""Event data history."""
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
{"mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci"},
|
||||||
|
)
|
||||||
|
__tablename__ = TABLE_EVENT_DATA
|
||||||
|
data_id = Column(Integer, Identity(), primary_key=True)
|
||||||
|
hash = Column(BigInteger, index=True)
|
||||||
|
# Note that this is not named attributes to avoid confusion with the states table
|
||||||
|
shared_data = Column(Text().with_variant(mysql.LONGTEXT, "mysql"))
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
"""Return string representation of instance for debugging."""
|
||||||
|
return (
|
||||||
|
"<recorder.EventData("
|
||||||
|
f"id={self.data_id}, hash='{self.hash}', data='{self.shared_data}'"
|
||||||
|
")>"
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_event(event: Event) -> EventData:
|
||||||
|
"""Create object from an event."""
|
||||||
|
shared_data = json_bytes(event.data)
|
||||||
|
return EventData(
|
||||||
|
shared_data=shared_data.decode("utf-8"),
|
||||||
|
hash=EventData.hash_shared_data_bytes(shared_data),
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def shared_data_bytes_from_event(
|
||||||
|
event: Event, dialect: SupportedDialect | None
|
||||||
|
) -> bytes:
|
||||||
|
"""Create shared_data from an event."""
|
||||||
|
return json_bytes(event.data)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def hash_shared_data_bytes(shared_data_bytes: bytes) -> int:
|
||||||
|
"""Return the hash of json encoded shared data."""
|
||||||
|
return cast(int, fnv1a_32(shared_data_bytes))
|
||||||
|
|
||||||
|
def to_native(self) -> dict[str, Any]:
|
||||||
|
"""Convert to an HA state object."""
|
||||||
|
try:
|
||||||
|
return cast(dict[str, Any], json_loads(self.shared_data))
|
||||||
|
except JSON_DECODE_EXCEPTIONS:
|
||||||
|
_LOGGER.exception("Error converting row to event data: %s", self)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
# *** Not originally in v32, only added for recorder to startup ok
|
||||||
|
# This is not being tested by the v32 statistics migration tests
|
||||||
|
class EventTypes(Base): # type: ignore[misc,valid-type]
|
||||||
|
"""Event type history."""
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
{"mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci"},
|
||||||
|
)
|
||||||
|
__tablename__ = TABLE_EVENT_TYPES
|
||||||
|
event_type_id = Column(Integer, Identity(), primary_key=True)
|
||||||
|
event_type = Column(String(MAX_LENGTH_EVENT_EVENT_TYPE))
|
||||||
|
|
||||||
|
|
||||||
|
class States(Base): # type: ignore[misc,valid-type]
|
||||||
|
"""State change history."""
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
# Used for fetching the state of entities at a specific time
|
||||||
|
# (get_states in history.py)
|
||||||
|
Index(ENTITY_ID_LAST_UPDATED_TS_INDEX, "entity_id", "last_updated_ts"),
|
||||||
|
Index(
|
||||||
|
STATES_CONTEXT_ID_BIN_INDEX,
|
||||||
|
"context_id_bin",
|
||||||
|
mysql_length=CONTEXT_ID_BIN_MAX_LENGTH,
|
||||||
|
mariadb_length=CONTEXT_ID_BIN_MAX_LENGTH,
|
||||||
|
),
|
||||||
|
{"mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci"},
|
||||||
|
)
|
||||||
|
__tablename__ = TABLE_STATES
|
||||||
|
state_id = Column(Integer, Identity(), primary_key=True)
|
||||||
|
entity_id = Column(String(MAX_LENGTH_STATE_ENTITY_ID))
|
||||||
|
state = Column(String(MAX_LENGTH_STATE_STATE))
|
||||||
|
attributes = Column(
|
||||||
|
Text().with_variant(mysql.LONGTEXT, "mysql")
|
||||||
|
) # no longer used for new rows
|
||||||
|
event_id = Column( # no longer used for new rows
|
||||||
|
Integer, ForeignKey("events.event_id", ondelete="CASCADE"), index=True
|
||||||
|
)
|
||||||
|
last_changed = Column(DATETIME_TYPE)
|
||||||
|
last_changed_ts = Column(TIMESTAMP_TYPE)
|
||||||
|
last_updated = Column(DATETIME_TYPE, default=dt_util.utcnow, index=True)
|
||||||
|
last_updated_ts = Column(TIMESTAMP_TYPE, default=time.time, index=True)
|
||||||
|
old_state_id = Column(Integer, ForeignKey("states.state_id"), index=True)
|
||||||
|
attributes_id = Column(
|
||||||
|
Integer, ForeignKey("state_attributes.attributes_id"), index=True
|
||||||
|
)
|
||||||
|
context_id = Column(String(MAX_LENGTH_EVENT_CONTEXT_ID), index=True)
|
||||||
|
context_user_id = Column(String(MAX_LENGTH_EVENT_CONTEXT_ID))
|
||||||
|
context_parent_id = Column(String(MAX_LENGTH_EVENT_CONTEXT_ID))
|
||||||
|
origin_idx = Column(SmallInteger) # 0 is local, 1 is remote
|
||||||
|
context_id_bin = Column(
|
||||||
|
LargeBinary(CONTEXT_ID_BIN_MAX_LENGTH)
|
||||||
|
) # *** Not originally in v32, only added for recorder to startup ok
|
||||||
|
context_user_id_bin = Column(
|
||||||
|
LargeBinary(CONTEXT_ID_BIN_MAX_LENGTH)
|
||||||
|
) # *** Not originally in v32, only added for recorder to startup ok
|
||||||
|
context_parent_id_bin = Column(
|
||||||
|
LargeBinary(CONTEXT_ID_BIN_MAX_LENGTH)
|
||||||
|
) # *** Not originally in v32, only added for recorder to startup ok
|
||||||
|
metadata_id = Column(
|
||||||
|
Integer, ForeignKey("states_meta.metadata_id"), index=True
|
||||||
|
) # *** Not originally in v32, only added for recorder to startup ok
|
||||||
|
states_meta_rel = relationship("StatesMeta")
|
||||||
|
old_state = relationship("States", remote_side=[state_id])
|
||||||
|
state_attributes = relationship("StateAttributes")
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
"""Return string representation of instance for debugging."""
|
||||||
|
return (
|
||||||
|
f"<recorder.States(id={self.state_id}, entity_id='{self.entity_id}',"
|
||||||
|
f" state='{self.state}', event_id='{self.event_id}',"
|
||||||
|
f" last_updated='{self.last_updated.isoformat(sep=' ', timespec='seconds')}',"
|
||||||
|
f" old_state_id={self.old_state_id}, attributes_id={self.attributes_id})>"
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_event(event: Event) -> States:
|
||||||
|
"""Create object from a state_changed event."""
|
||||||
|
entity_id = event.data["entity_id"]
|
||||||
|
state: State | None = event.data.get("new_state")
|
||||||
|
dbstate = States(
|
||||||
|
entity_id=entity_id,
|
||||||
|
attributes=None,
|
||||||
|
context_id=event.context.id,
|
||||||
|
context_user_id=event.context.user_id,
|
||||||
|
context_parent_id=event.context.parent_id,
|
||||||
|
origin_idx=EVENT_ORIGIN_TO_IDX.get(event.origin),
|
||||||
|
)
|
||||||
|
|
||||||
|
# None state means the state was removed from the state machine
|
||||||
|
if state is None:
|
||||||
|
dbstate.state = ""
|
||||||
|
dbstate.last_updated = event.time_fired
|
||||||
|
dbstate.last_changed = None
|
||||||
|
return dbstate
|
||||||
|
|
||||||
|
dbstate.state = state.state
|
||||||
|
dbstate.last_updated = state.last_updated
|
||||||
|
if state.last_updated == state.last_changed:
|
||||||
|
dbstate.last_changed = None
|
||||||
|
else:
|
||||||
|
dbstate.last_changed = state.last_changed
|
||||||
|
|
||||||
|
return dbstate
|
||||||
|
|
||||||
|
def to_native(self, validate_entity_id: bool = True) -> State | None:
|
||||||
|
"""Convert to an HA state object."""
|
||||||
|
context = Context(
|
||||||
|
id=self.context_id,
|
||||||
|
user_id=self.context_user_id,
|
||||||
|
parent_id=self.context_parent_id,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
attrs = json_loads(self.attributes) if self.attributes else {}
|
||||||
|
except JSON_DECODE_EXCEPTIONS:
|
||||||
|
# When json_loads fails
|
||||||
|
_LOGGER.exception("Error converting row to state: %s", self)
|
||||||
|
return None
|
||||||
|
if self.last_changed is None or self.last_changed == self.last_updated:
|
||||||
|
last_changed = last_updated = process_timestamp(self.last_updated)
|
||||||
|
else:
|
||||||
|
last_updated = process_timestamp(self.last_updated)
|
||||||
|
last_changed = process_timestamp(self.last_changed)
|
||||||
|
return State(
|
||||||
|
self.entity_id,
|
||||||
|
self.state,
|
||||||
|
# Join the state_attributes table on attributes_id to get the attributes
|
||||||
|
# for newer states
|
||||||
|
attrs,
|
||||||
|
last_changed,
|
||||||
|
last_updated,
|
||||||
|
context=context,
|
||||||
|
validate_entity_id=validate_entity_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class StateAttributes(Base): # type: ignore[misc,valid-type]
|
||||||
|
"""State attribute change history."""
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
{"mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci"},
|
||||||
|
)
|
||||||
|
__tablename__ = TABLE_STATE_ATTRIBUTES
|
||||||
|
attributes_id = Column(Integer, Identity(), primary_key=True)
|
||||||
|
hash = Column(BigInteger, index=True)
|
||||||
|
# Note that this is not named attributes to avoid confusion with the states table
|
||||||
|
shared_attrs = Column(Text().with_variant(mysql.LONGTEXT, "mysql"))
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
"""Return string representation of instance for debugging."""
|
||||||
|
return (
|
||||||
|
f"<recorder.StateAttributes(id={self.attributes_id}, hash='{self.hash}',"
|
||||||
|
f" attributes='{self.shared_attrs}')>"
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_event(event: Event) -> StateAttributes:
|
||||||
|
"""Create object from a state_changed event."""
|
||||||
|
state: State | None = event.data.get("new_state")
|
||||||
|
# None state means the state was removed from the state machine
|
||||||
|
attr_bytes = b"{}" if state is None else json_bytes(state.attributes)
|
||||||
|
dbstate = StateAttributes(shared_attrs=attr_bytes.decode("utf-8"))
|
||||||
|
dbstate.hash = StateAttributes.hash_shared_attrs_bytes(attr_bytes)
|
||||||
|
return dbstate
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def shared_attrs_bytes_from_event(
|
||||||
|
event: Event,
|
||||||
|
entity_registry: er.EntityRegistry,
|
||||||
|
exclude_attrs_by_domain: dict[str, set[str]],
|
||||||
|
dialect: SupportedDialect | None,
|
||||||
|
) -> bytes:
|
||||||
|
"""Create shared_attrs from a state_changed event."""
|
||||||
|
state: State | None = event.data.get("new_state")
|
||||||
|
# None state means the state was removed from the state machine
|
||||||
|
if state is None:
|
||||||
|
return b"{}"
|
||||||
|
domain = split_entity_id(state.entity_id)[0]
|
||||||
|
exclude_attrs = (
|
||||||
|
exclude_attrs_by_domain.get(domain, set()) | ALL_DOMAIN_EXCLUDE_ATTRS
|
||||||
|
)
|
||||||
|
return json_bytes(
|
||||||
|
{k: v for k, v in state.attributes.items() if k not in exclude_attrs}
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def hash_shared_attrs_bytes(shared_attrs_bytes: bytes) -> int:
|
||||||
|
"""Return the hash of json encoded shared attributes."""
|
||||||
|
return cast(int, fnv1a_32(shared_attrs_bytes))
|
||||||
|
|
||||||
|
def to_native(self) -> dict[str, Any]:
|
||||||
|
"""Convert to an HA state object."""
|
||||||
|
try:
|
||||||
|
return cast(dict[str, Any], json_loads(self.shared_attrs))
|
||||||
|
except JSON_DECODE_EXCEPTIONS:
|
||||||
|
# When json_loads fails
|
||||||
|
_LOGGER.exception("Error converting row to state attributes: %s", self)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
# *** Not originally in v30, only added for recorder to startup ok
|
||||||
|
# This is not being tested by the v30 statistics migration tests
|
||||||
|
class StatesMeta(Base): # type: ignore[misc,valid-type]
|
||||||
|
"""Metadata for states."""
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
{"mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci"},
|
||||||
|
)
|
||||||
|
__tablename__ = TABLE_STATES_META
|
||||||
|
metadata_id = Column(Integer, Identity(), primary_key=True)
|
||||||
|
entity_id = Column(String(MAX_LENGTH_STATE_ENTITY_ID))
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
"""Return string representation of instance for debugging."""
|
||||||
|
return (
|
||||||
|
"<recorder.StatesMeta("
|
||||||
|
f"id={self.metadata_id}, entity_id='{self.entity_id}'"
|
||||||
|
")>"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class StatisticsBase:
|
||||||
|
"""Statistics base class."""
|
||||||
|
|
||||||
|
id = Column(Integer, Identity(), primary_key=True)
|
||||||
|
created = Column(DATETIME_TYPE, default=dt_util.utcnow)
|
||||||
|
metadata_id = Column(
|
||||||
|
Integer,
|
||||||
|
ForeignKey(f"{TABLE_STATISTICS_META}.id", ondelete="CASCADE"),
|
||||||
|
index=True,
|
||||||
|
)
|
||||||
|
start = Column(DATETIME_TYPE, index=True)
|
||||||
|
mean = Column(DOUBLE_TYPE)
|
||||||
|
min = Column(DOUBLE_TYPE)
|
||||||
|
max = Column(DOUBLE_TYPE)
|
||||||
|
last_reset = Column(DATETIME_TYPE)
|
||||||
|
state = Column(DOUBLE_TYPE)
|
||||||
|
sum = Column(DOUBLE_TYPE)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_stats(cls, metadata_id: int, stats: StatisticData) -> Self:
|
||||||
|
"""Create object from a statistics."""
|
||||||
|
return cls( # type: ignore[call-arg,misc]
|
||||||
|
metadata_id=metadata_id,
|
||||||
|
**stats,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Statistics(Base, StatisticsBase): # type: ignore[misc,valid-type]
|
||||||
|
"""Long term statistics."""
|
||||||
|
|
||||||
|
duration = timedelta(hours=1)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
# Used for fetching statistics for a certain entity at a specific time
|
||||||
|
Index("ix_statistics_statistic_id_start", "metadata_id", "start", unique=True),
|
||||||
|
)
|
||||||
|
__tablename__ = TABLE_STATISTICS
|
||||||
|
|
||||||
|
|
||||||
|
class StatisticsShortTerm(Base, StatisticsBase): # type: ignore[misc,valid-type]
|
||||||
|
"""Short term statistics."""
|
||||||
|
|
||||||
|
duration = timedelta(minutes=5)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
# Used for fetching statistics for a certain entity at a specific time
|
||||||
|
Index(
|
||||||
|
"ix_statistics_short_term_statistic_id_start",
|
||||||
|
"metadata_id",
|
||||||
|
"start",
|
||||||
|
unique=True,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
__tablename__ = TABLE_STATISTICS_SHORT_TERM
|
||||||
|
|
||||||
|
|
||||||
|
class StatisticsMeta(Base): # type: ignore[misc,valid-type]
|
||||||
|
"""Statistics meta data."""
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
{"mysql_default_charset": "utf8mb4", "mysql_collate": "utf8mb4_unicode_ci"},
|
||||||
|
)
|
||||||
|
__tablename__ = TABLE_STATISTICS_META
|
||||||
|
id = Column(Integer, Identity(), primary_key=True)
|
||||||
|
statistic_id = Column(String(255), index=True, unique=True)
|
||||||
|
source = Column(String(32))
|
||||||
|
unit_of_measurement = Column(String(255))
|
||||||
|
has_mean = Column(Boolean)
|
||||||
|
has_sum = Column(Boolean)
|
||||||
|
name = Column(String(255))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_meta(meta: StatisticMetaData) -> StatisticsMeta:
|
||||||
|
"""Create object from meta data."""
|
||||||
|
return StatisticsMeta(**meta)
|
||||||
|
|
||||||
|
|
||||||
|
class RecorderRuns(Base): # type: ignore[misc,valid-type]
|
||||||
|
"""Representation of recorder run."""
|
||||||
|
|
||||||
|
__table_args__ = (Index("ix_recorder_runs_start_end", "start", "end"),)
|
||||||
|
__tablename__ = TABLE_RECORDER_RUNS
|
||||||
|
run_id = Column(Integer, Identity(), primary_key=True)
|
||||||
|
start = Column(DATETIME_TYPE, default=dt_util.utcnow)
|
||||||
|
end = Column(DATETIME_TYPE)
|
||||||
|
closed_incorrect = Column(Boolean, default=False)
|
||||||
|
created = Column(DATETIME_TYPE, default=dt_util.utcnow)
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
"""Return string representation of instance for debugging."""
|
||||||
|
end = (
|
||||||
|
f"'{self.end.isoformat(sep=' ', timespec='seconds')}'" if self.end else None
|
||||||
|
)
|
||||||
|
return (
|
||||||
|
f"<recorder.RecorderRuns(id={self.run_id},"
|
||||||
|
f" start='{self.start.isoformat(sep=' ', timespec='seconds')}', end={end},"
|
||||||
|
f" closed_incorrect={self.closed_incorrect},"
|
||||||
|
f" created='{self.created.isoformat(sep=' ', timespec='seconds')}')>"
|
||||||
|
)
|
||||||
|
|
||||||
|
def entity_ids(self, point_in_time: datetime | None = None) -> list[str]:
|
||||||
|
"""Return the entity ids that existed in this run.
|
||||||
|
|
||||||
|
Specify point_in_time if you want to know which existed at that point
|
||||||
|
in time inside the run.
|
||||||
|
"""
|
||||||
|
session = Session.object_session(self)
|
||||||
|
|
||||||
|
assert session is not None, "RecorderRuns need to be persisted"
|
||||||
|
|
||||||
|
query = session.query(distinct(States.entity_id)).filter(
|
||||||
|
States.last_updated >= self.start
|
||||||
|
)
|
||||||
|
|
||||||
|
if point_in_time is not None:
|
||||||
|
query = query.filter(States.last_updated < point_in_time)
|
||||||
|
elif self.end is not None:
|
||||||
|
query = query.filter(States.last_updated < self.end)
|
||||||
|
|
||||||
|
return [row[0] for row in query]
|
||||||
|
|
||||||
|
def to_native(self, validate_entity_id: bool = True) -> RecorderRuns:
|
||||||
|
"""Return self, native format is this model."""
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
class SchemaChanges(Base): # type: ignore[misc,valid-type]
|
||||||
|
"""Representation of schema version changes."""
|
||||||
|
|
||||||
|
__tablename__ = TABLE_SCHEMA_CHANGES
|
||||||
|
change_id = Column(Integer, Identity(), primary_key=True)
|
||||||
|
schema_version = Column(Integer)
|
||||||
|
changed = Column(DATETIME_TYPE, default=dt_util.utcnow)
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
"""Return string representation of instance for debugging."""
|
||||||
|
return (
|
||||||
|
"<recorder.SchemaChanges("
|
||||||
|
f"id={self.change_id}, schema_version={self.schema_version}, "
|
||||||
|
f"changed='{self.changed.isoformat(sep=' ', timespec='seconds')}'"
|
||||||
|
")>"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class StatisticsRuns(Base): # type: ignore[misc,valid-type]
|
||||||
|
"""Representation of statistics run."""
|
||||||
|
|
||||||
|
__tablename__ = TABLE_STATISTICS_RUNS
|
||||||
|
run_id = Column(Integer, Identity(), primary_key=True)
|
||||||
|
start = Column(DATETIME_TYPE, index=True)
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
"""Return string representation of instance for debugging."""
|
||||||
|
return (
|
||||||
|
f"<recorder.StatisticsRuns(id={self.run_id},"
|
||||||
|
f" start='{self.start.isoformat(sep=' ', timespec='seconds')}', )>"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
EVENT_DATA_JSON = type_coerce(
|
||||||
|
EventData.shared_data.cast(JSONB_VARIANT_CAST), JSONLiteral(none_as_null=True)
|
||||||
|
)
|
||||||
|
OLD_FORMAT_EVENT_DATA_JSON = type_coerce(
|
||||||
|
Events.event_data.cast(JSONB_VARIANT_CAST), JSONLiteral(none_as_null=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
SHARED_ATTRS_JSON = type_coerce(
|
||||||
|
StateAttributes.shared_attrs.cast(JSON_VARIANT_CAST), JSON(none_as_null=True)
|
||||||
|
)
|
||||||
|
OLD_FORMAT_ATTRS_JSON = type_coerce(
|
||||||
|
States.attributes.cast(JSON_VARIANT_CAST), JSON(none_as_null=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
ENTITY_ID_IN_EVENT: Column = EVENT_DATA_JSON["entity_id"]
|
||||||
|
OLD_ENTITY_ID_IN_EVENT: Column = OLD_FORMAT_EVENT_DATA_JSON["entity_id"]
|
||||||
|
DEVICE_ID_IN_EVENT: Column = EVENT_DATA_JSON["device_id"]
|
||||||
|
OLD_STATE = aliased(States, name="old_state")
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def process_timestamp(ts: None) -> None:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
@overload
|
||||||
|
def process_timestamp(ts: datetime) -> datetime:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
def process_timestamp(ts: datetime | None) -> datetime | None:
|
||||||
|
"""Process a timestamp into datetime object."""
|
||||||
|
if ts is None:
|
||||||
|
return None
|
||||||
|
if ts.tzinfo is None:
|
||||||
|
return ts.replace(tzinfo=dt_util.UTC)
|
||||||
|
|
||||||
|
return dt_util.as_utc(ts)
|
@ -29,7 +29,15 @@ from homeassistant.util import dt, dt as dt_util
|
|||||||
|
|
||||||
def test_from_event_to_db_event() -> None:
|
def test_from_event_to_db_event() -> None:
|
||||||
"""Test converting event to db event."""
|
"""Test converting event to db event."""
|
||||||
event = ha.Event("test_event", {"some_data": 15})
|
event = ha.Event(
|
||||||
|
"test_event",
|
||||||
|
{"some_data": 15},
|
||||||
|
context=ha.Context(
|
||||||
|
id="01EYQZJXZ5Z1Z1Z1Z1Z1Z1Z1Z1",
|
||||||
|
parent_id="01EYQZJXZ5Z1Z1Z1Z1Z1Z1Z1Z1",
|
||||||
|
user_id="12345678901234567890123456789012",
|
||||||
|
),
|
||||||
|
)
|
||||||
db_event = Events.from_event(event)
|
db_event = Events.from_event(event)
|
||||||
dialect = SupportedDialect.MYSQL
|
dialect = SupportedDialect.MYSQL
|
||||||
db_event.event_data = EventData.shared_data_bytes_from_event(event, dialect)
|
db_event.event_data = EventData.shared_data_bytes_from_event(event, dialect)
|
||||||
@ -39,7 +47,15 @@ def test_from_event_to_db_event() -> None:
|
|||||||
|
|
||||||
def test_from_event_to_db_state() -> None:
|
def test_from_event_to_db_state() -> None:
|
||||||
"""Test converting event to db state."""
|
"""Test converting event to db state."""
|
||||||
state = ha.State("sensor.temperature", "18")
|
state = ha.State(
|
||||||
|
"sensor.temperature",
|
||||||
|
"18",
|
||||||
|
context=ha.Context(
|
||||||
|
id="01EYQZJXZ5Z1Z1Z1Z1Z1Z1Z1Z1",
|
||||||
|
parent_id="01EYQZJXZ5Z1Z1Z1Z1Z1Z1Z1Z1",
|
||||||
|
user_id="12345678901234567890123456789012",
|
||||||
|
),
|
||||||
|
)
|
||||||
event = ha.Event(
|
event = ha.Event(
|
||||||
EVENT_STATE_CHANGED,
|
EVENT_STATE_CHANGED,
|
||||||
{"entity_id": "sensor.temperature", "old_state": None, "new_state": state},
|
{"entity_id": "sensor.temperature", "old_state": None, "new_state": state},
|
||||||
|
@ -27,7 +27,7 @@ from tests.common import async_test_home_assistant
|
|||||||
ORIG_TZ = dt_util.DEFAULT_TIME_ZONE
|
ORIG_TZ = dt_util.DEFAULT_TIME_ZONE
|
||||||
|
|
||||||
CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine"
|
CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine"
|
||||||
SCHEMA_MODULE = "tests.components.recorder.db_schema_30"
|
SCHEMA_MODULE = "tests.components.recorder.db_schema_32"
|
||||||
|
|
||||||
|
|
||||||
def _create_engine_test(*args, **kwargs):
|
def _create_engine_test(*args, **kwargs):
|
||||||
@ -222,3 +222,144 @@ async def test_migrate_times(
|
|||||||
|
|
||||||
await hass.async_stop()
|
await hass.async_stop()
|
||||||
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
||||||
|
|
||||||
|
|
||||||
|
async def test_migrate_can_resume_entity_id_post_migration(
|
||||||
|
caplog: pytest.LogCaptureFixture, tmpdir: py.path.local
|
||||||
|
) -> None:
|
||||||
|
"""Test we resume the entity id post migration after a restart."""
|
||||||
|
test_db_file = tmpdir.mkdir("sqlite").join("test_run_info.db")
|
||||||
|
dburl = f"{SQLITE_URL_PREFIX}//{test_db_file}"
|
||||||
|
|
||||||
|
importlib.import_module(SCHEMA_MODULE)
|
||||||
|
old_db_schema = sys.modules[SCHEMA_MODULE]
|
||||||
|
now = dt_util.utcnow()
|
||||||
|
one_second_past = now - timedelta(seconds=1)
|
||||||
|
mock_state = State(
|
||||||
|
"sensor.test",
|
||||||
|
"old",
|
||||||
|
{"last_reset": now.isoformat()},
|
||||||
|
last_changed=one_second_past,
|
||||||
|
last_updated=now,
|
||||||
|
)
|
||||||
|
state_changed_event = Event(
|
||||||
|
EVENT_STATE_CHANGED,
|
||||||
|
{
|
||||||
|
"entity_id": "sensor.test",
|
||||||
|
"old_state": None,
|
||||||
|
"new_state": mock_state,
|
||||||
|
},
|
||||||
|
EventOrigin.local,
|
||||||
|
time_fired=now,
|
||||||
|
)
|
||||||
|
custom_event = Event(
|
||||||
|
"custom_event",
|
||||||
|
{"entity_id": "sensor.custom"},
|
||||||
|
EventOrigin.local,
|
||||||
|
time_fired=now,
|
||||||
|
)
|
||||||
|
number_of_migrations = 5
|
||||||
|
|
||||||
|
def _get_states_index_names():
|
||||||
|
with session_scope(hass=hass) as session:
|
||||||
|
return inspect(session.connection()).get_indexes("states")
|
||||||
|
|
||||||
|
with patch.object(recorder, "db_schema", old_db_schema), patch.object(
|
||||||
|
recorder.migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION
|
||||||
|
), patch.object(core, "StatesMeta", old_db_schema.StatesMeta), patch.object(
|
||||||
|
core, "EventTypes", old_db_schema.EventTypes
|
||||||
|
), patch.object(
|
||||||
|
core, "EventData", old_db_schema.EventData
|
||||||
|
), patch.object(
|
||||||
|
core, "States", old_db_schema.States
|
||||||
|
), patch.object(
|
||||||
|
core, "Events", old_db_schema.Events
|
||||||
|
), patch(
|
||||||
|
CREATE_ENGINE_TARGET, new=_create_engine_test
|
||||||
|
), patch(
|
||||||
|
"homeassistant.components.recorder.Recorder._migrate_events_context_ids",
|
||||||
|
), patch(
|
||||||
|
"homeassistant.components.recorder.Recorder._migrate_states_context_ids",
|
||||||
|
), patch(
|
||||||
|
"homeassistant.components.recorder.Recorder._migrate_event_type_ids",
|
||||||
|
), patch(
|
||||||
|
"homeassistant.components.recorder.Recorder._migrate_entity_ids",
|
||||||
|
), patch(
|
||||||
|
"homeassistant.components.recorder.Recorder._post_migrate_entity_ids"
|
||||||
|
), patch(
|
||||||
|
"homeassistant.components.recorder.Recorder._cleanup_legacy_states_event_ids"
|
||||||
|
):
|
||||||
|
hass = await async_test_home_assistant(asyncio.get_running_loop())
|
||||||
|
recorder_helper.async_initialize_recorder(hass)
|
||||||
|
assert await async_setup_component(
|
||||||
|
hass, "recorder", {"recorder": {"db_url": dburl}}
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
|
def _add_data():
|
||||||
|
with session_scope(hass=hass) as session:
|
||||||
|
session.add(old_db_schema.Events.from_event(custom_event))
|
||||||
|
session.add(old_db_schema.States.from_event(state_changed_event))
|
||||||
|
|
||||||
|
await recorder.get_instance(hass).async_add_executor_job(_add_data)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
await recorder.get_instance(hass).async_block_till_done()
|
||||||
|
|
||||||
|
states_indexes = await recorder.get_instance(hass).async_add_executor_job(
|
||||||
|
_get_states_index_names
|
||||||
|
)
|
||||||
|
states_index_names = {index["name"] for index in states_indexes}
|
||||||
|
assert recorder.get_instance(hass).use_legacy_events_index is True
|
||||||
|
|
||||||
|
await hass.async_stop()
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert "ix_states_event_id" in states_index_names
|
||||||
|
assert "ix_states_entity_id_last_updated_ts" in states_index_names
|
||||||
|
|
||||||
|
with patch("homeassistant.components.recorder.Recorder._post_migrate_entity_ids"):
|
||||||
|
hass = await async_test_home_assistant(asyncio.get_running_loop())
|
||||||
|
recorder_helper.async_initialize_recorder(hass)
|
||||||
|
assert await async_setup_component(
|
||||||
|
hass, "recorder", {"recorder": {"db_url": dburl}}
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
# We need to wait for all the migration tasks to complete
|
||||||
|
# before we can check the database.
|
||||||
|
for _ in range(number_of_migrations):
|
||||||
|
await recorder.get_instance(hass).async_block_till_done()
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
|
states_indexes = await recorder.get_instance(hass).async_add_executor_job(
|
||||||
|
_get_states_index_names
|
||||||
|
)
|
||||||
|
states_index_names = {index["name"] for index in states_indexes}
|
||||||
|
await hass.async_stop()
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
assert "ix_states_entity_id_last_updated_ts" in states_index_names
|
||||||
|
|
||||||
|
hass = await async_test_home_assistant(asyncio.get_running_loop())
|
||||||
|
recorder_helper.async_initialize_recorder(hass)
|
||||||
|
assert await async_setup_component(
|
||||||
|
hass, "recorder", {"recorder": {"db_url": dburl}}
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
# We need to wait for all the migration tasks to complete
|
||||||
|
# before we can check the database.
|
||||||
|
for _ in range(number_of_migrations):
|
||||||
|
await recorder.get_instance(hass).async_block_till_done()
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
|
states_indexes = await recorder.get_instance(hass).async_add_executor_job(
|
||||||
|
_get_states_index_names
|
||||||
|
)
|
||||||
|
states_index_names = {index["name"] for index in states_indexes}
|
||||||
|
assert "ix_states_entity_id_last_updated_ts" not in states_index_names
|
||||||
|
|
||||||
|
await hass.async_stop()
|
||||||
|
dt_util.DEFAULT_TIME_ZONE = ORIG_TZ
|
||||||
|
@ -63,6 +63,24 @@ YAML_CONFIG = {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
YAML_CONFIG_FULL_TABLE_SCAN = {
|
||||||
|
"sql": {
|
||||||
|
CONF_NAME: "Get entity_id",
|
||||||
|
CONF_QUERY: "SELECT entity_id from states",
|
||||||
|
CONF_COLUMN_NAME: "entity_id",
|
||||||
|
CONF_UNIQUE_ID: "entity_id_12345",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
YAML_CONFIG_FULL_TABLE_SCAN_NO_UNIQUE_ID = {
|
||||||
|
"sql": {
|
||||||
|
CONF_NAME: "Get entity_id",
|
||||||
|
CONF_QUERY: "SELECT entity_id from states",
|
||||||
|
CONF_COLUMN_NAME: "entity_id",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
YAML_CONFIG_BINARY = {
|
YAML_CONFIG_BINARY = {
|
||||||
"sql": {
|
"sql": {
|
||||||
CONF_DB_URL: "sqlite://",
|
CONF_DB_URL: "sqlite://",
|
||||||
|
@ -11,14 +11,21 @@ from sqlalchemy.exc import SQLAlchemyError
|
|||||||
|
|
||||||
from homeassistant.components.recorder import Recorder
|
from homeassistant.components.recorder import Recorder
|
||||||
from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass
|
from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass
|
||||||
from homeassistant.components.sql.const import DOMAIN
|
from homeassistant.components.sql.const import CONF_QUERY, DOMAIN
|
||||||
from homeassistant.config_entries import SOURCE_USER
|
from homeassistant.config_entries import SOURCE_USER
|
||||||
from homeassistant.const import STATE_UNKNOWN
|
from homeassistant.const import CONF_UNIQUE_ID, STATE_UNKNOWN
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers import issue_registry as ir
|
||||||
from homeassistant.setup import async_setup_component
|
from homeassistant.setup import async_setup_component
|
||||||
from homeassistant.util import dt
|
from homeassistant.util import dt
|
||||||
|
|
||||||
from . import YAML_CONFIG, YAML_CONFIG_BINARY, init_integration
|
from . import (
|
||||||
|
YAML_CONFIG,
|
||||||
|
YAML_CONFIG_BINARY,
|
||||||
|
YAML_CONFIG_FULL_TABLE_SCAN,
|
||||||
|
YAML_CONFIG_FULL_TABLE_SCAN_NO_UNIQUE_ID,
|
||||||
|
init_integration,
|
||||||
|
)
|
||||||
|
|
||||||
from tests.common import MockConfigEntry, async_fire_time_changed
|
from tests.common import MockConfigEntry, async_fire_time_changed
|
||||||
|
|
||||||
@ -322,3 +329,48 @@ async def test_binary_data_from_yaml_setup(
|
|||||||
state = hass.states.get("sensor.get_binary_value")
|
state = hass.states.get("sensor.get_binary_value")
|
||||||
assert state.state == "0xd34324324230392032"
|
assert state.state == "0xd34324324230392032"
|
||||||
assert state.attributes["test_attr"] == "0xd343aa"
|
assert state.attributes["test_attr"] == "0xd343aa"
|
||||||
|
|
||||||
|
|
||||||
|
async def test_issue_when_using_old_query(
|
||||||
|
recorder_mock: Recorder, hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||||
|
) -> None:
|
||||||
|
"""Test we create an issue for an old query that will do a full table scan."""
|
||||||
|
|
||||||
|
assert await async_setup_component(hass, DOMAIN, YAML_CONFIG_FULL_TABLE_SCAN)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
assert "Query contains entity_id but does not reference states_meta" in caplog.text
|
||||||
|
|
||||||
|
assert not hass.states.async_all()
|
||||||
|
issue_registry = ir.async_get(hass)
|
||||||
|
|
||||||
|
config = YAML_CONFIG_FULL_TABLE_SCAN["sql"]
|
||||||
|
|
||||||
|
unique_id = config[CONF_UNIQUE_ID]
|
||||||
|
|
||||||
|
issue = issue_registry.async_get_issue(
|
||||||
|
DOMAIN, f"entity_id_query_does_full_table_scan_{unique_id}"
|
||||||
|
)
|
||||||
|
assert issue.translation_placeholders == {"query": config[CONF_QUERY]}
|
||||||
|
|
||||||
|
|
||||||
|
async def test_issue_when_using_old_query_without_unique_id(
|
||||||
|
recorder_mock: Recorder, hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||||
|
) -> None:
|
||||||
|
"""Test we create an issue for an old query that will do a full table scan."""
|
||||||
|
|
||||||
|
assert await async_setup_component(
|
||||||
|
hass, DOMAIN, YAML_CONFIG_FULL_TABLE_SCAN_NO_UNIQUE_ID
|
||||||
|
)
|
||||||
|
await hass.async_block_till_done()
|
||||||
|
assert "Query contains entity_id but does not reference states_meta" in caplog.text
|
||||||
|
|
||||||
|
assert not hass.states.async_all()
|
||||||
|
issue_registry = ir.async_get(hass)
|
||||||
|
|
||||||
|
config = YAML_CONFIG_FULL_TABLE_SCAN_NO_UNIQUE_ID["sql"]
|
||||||
|
query = config[CONF_QUERY]
|
||||||
|
|
||||||
|
issue = issue_registry.async_get_issue(
|
||||||
|
DOMAIN, f"entity_id_query_does_full_table_scan_{query}"
|
||||||
|
)
|
||||||
|
assert issue.translation_placeholders == {"query": query}
|
||||||
|
@ -35,7 +35,7 @@ from homeassistant.const import (
|
|||||||
ATTR_DEVICE_CLASS,
|
ATTR_DEVICE_CLASS,
|
||||||
ATTR_ENTITY_ID,
|
ATTR_ENTITY_ID,
|
||||||
ATTR_UNIT_OF_MEASUREMENT,
|
ATTR_UNIT_OF_MEASUREMENT,
|
||||||
EVENT_HOMEASSISTANT_START,
|
EVENT_HOMEASSISTANT_STARTED,
|
||||||
STATE_UNAVAILABLE,
|
STATE_UNAVAILABLE,
|
||||||
STATE_UNKNOWN,
|
STATE_UNKNOWN,
|
||||||
UnitOfEnergy,
|
UnitOfEnergy,
|
||||||
@ -105,7 +105,7 @@ async def test_state(hass: HomeAssistant, yaml_config, config_entry_config) -> N
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
entity_id = config_entry_config["source"]
|
entity_id = config_entry_config["source"]
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
hass.states.async_set(
|
hass.states.async_set(
|
||||||
@ -301,7 +301,7 @@ async def test_init(hass: HomeAssistant, yaml_config, config_entry_config) -> No
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
entity_id = config_entry_config["source"]
|
entity_id = config_entry_config["source"]
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
state = hass.states.get("sensor.energy_bill_onpeak")
|
state = hass.states.get("sensor.energy_bill_onpeak")
|
||||||
@ -346,7 +346,7 @@ async def test_unique_id(
|
|||||||
assert await async_setup_component(hass, DOMAIN, yaml_config)
|
assert await async_setup_component(hass, DOMAIN, yaml_config)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert len(entity_registry.entities) == 4
|
assert len(entity_registry.entities) == 4
|
||||||
@ -400,7 +400,7 @@ async def test_entity_name(hass: HomeAssistant, yaml_config, entity_id, name) ->
|
|||||||
assert await async_setup_component(hass, DOMAIN, yaml_config)
|
assert await async_setup_component(hass, DOMAIN, yaml_config)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
state = hass.states.get(entity_id)
|
state = hass.states.get(entity_id)
|
||||||
@ -475,7 +475,8 @@ async def test_device_class(
|
|||||||
entity_id_energy = "sensor.energy"
|
entity_id_energy = "sensor.energy"
|
||||||
entity_id_gas = "sensor.gas"
|
entity_id_gas = "sensor.gas"
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||||
|
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
hass.states.async_set(
|
hass.states.async_set(
|
||||||
@ -657,7 +658,9 @@ async def test_restore_state(
|
|||||||
assert state.state == STATE_UNKNOWN
|
assert state.state == STATE_UNKNOWN
|
||||||
|
|
||||||
# utility_meter is loaded, now set sensors according to utility_meter:
|
# utility_meter is loaded, now set sensors according to utility_meter:
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
|
||||||
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||||
|
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
state = hass.states.get("select.energy_bill")
|
state = hass.states.get("select.energy_bill")
|
||||||
@ -719,7 +722,8 @@ async def test_net_consumption(
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
entity_id = config_entry_config["source"]
|
entity_id = config_entry_config["source"]
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||||
|
|
||||||
hass.states.async_set(
|
hass.states.async_set(
|
||||||
entity_id, 2, {ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR}
|
entity_id, 2, {ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR}
|
||||||
)
|
)
|
||||||
@ -792,7 +796,8 @@ async def test_non_net_consumption(
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
entity_id = config_entry_config["source"]
|
entity_id = config_entry_config["source"]
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||||
|
|
||||||
hass.states.async_set(
|
hass.states.async_set(
|
||||||
entity_id, 2, {ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR}
|
entity_id, 2, {ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR}
|
||||||
)
|
)
|
||||||
@ -817,7 +822,7 @@ async def test_non_net_consumption(
|
|||||||
force_update=True,
|
force_update=True,
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
assert "Invalid state " in caplog.text
|
assert "invalid new state " in caplog.text
|
||||||
|
|
||||||
state = hass.states.get("sensor.energy_bill")
|
state = hass.states.get("sensor.energy_bill")
|
||||||
assert state is not None
|
assert state is not None
|
||||||
@ -882,7 +887,7 @@ async def test_delta_values(
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
entity_id = config_entry_config["source"]
|
entity_id = config_entry_config["source"]
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||||
|
|
||||||
async_fire_time_changed(hass, now)
|
async_fire_time_changed(hass, now)
|
||||||
hass.states.async_set(
|
hass.states.async_set(
|
||||||
@ -903,7 +908,7 @@ async def test_delta_values(
|
|||||||
force_update=True,
|
force_update=True,
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
assert "Invalid state None" in caplog.text
|
assert "invalid new state from sensor.energy : None" in caplog.text
|
||||||
|
|
||||||
now += timedelta(seconds=30)
|
now += timedelta(seconds=30)
|
||||||
with freeze_time(now):
|
with freeze_time(now):
|
||||||
@ -992,7 +997,7 @@ async def test_non_periodically_resetting(
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
entity_id = config_entry_config["source"]
|
entity_id = config_entry_config["source"]
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||||
|
|
||||||
async_fire_time_changed(hass, now)
|
async_fire_time_changed(hass, now)
|
||||||
hass.states.async_set(
|
hass.states.async_set(
|
||||||
@ -1120,7 +1125,8 @@ async def test_non_periodically_resetting_meter_with_tariffs(
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
entity_id = config_entry_config["source"]
|
entity_id = config_entry_config["source"]
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||||
|
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
hass.states.async_set(
|
hass.states.async_set(
|
||||||
@ -1226,7 +1232,7 @@ async def _test_self_reset(
|
|||||||
assert await async_setup_component(hass, DOMAIN, config)
|
assert await async_setup_component(hass, DOMAIN, config)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
|
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
|
||||||
entity_id = config[DOMAIN]["energy_bill"]["source"]
|
entity_id = config[DOMAIN]["energy_bill"]["source"]
|
||||||
|
|
||||||
async_fire_time_changed(hass, now)
|
async_fire_time_changed(hass, now)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user