mirror of
https://github.com/home-assistant/core.git
synced 2025-07-21 12:17:07 +00:00
2023.4.3 (#91316)
This commit is contained in:
commit
8ec6afb85a
@ -67,6 +67,13 @@ SCAN_INTERVAL = datetime.timedelta(seconds=60)
|
||||
# Don't support rrules more often than daily
|
||||
VALID_FREQS = {"DAILY", "WEEKLY", "MONTHLY", "YEARLY"}
|
||||
|
||||
# Ensure events created in Home Assistant have a positive duration
|
||||
MIN_NEW_EVENT_DURATION = datetime.timedelta(seconds=1)
|
||||
|
||||
# Events must have a non-negative duration e.g. Google Calendar can create zero
|
||||
# duration events in the UI.
|
||||
MIN_EVENT_DURATION = datetime.timedelta(seconds=0)
|
||||
|
||||
|
||||
def _has_timezone(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]:
|
||||
"""Assert that all datetime values have a timezone."""
|
||||
@ -116,17 +123,18 @@ def _as_local_timezone(*keys: Any) -> Callable[[dict[str, Any]], dict[str, Any]]
|
||||
return validate
|
||||
|
||||
|
||||
def _has_duration(
|
||||
start_key: str, end_key: str
|
||||
def _has_min_duration(
|
||||
start_key: str, end_key: str, min_duration: datetime.timedelta
|
||||
) -> Callable[[dict[str, Any]], dict[str, Any]]:
|
||||
"""Verify that the time span between start and end is positive."""
|
||||
"""Verify that the time span between start and end has a minimum duration."""
|
||||
|
||||
def validate(obj: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Test that all keys in the dict are in order."""
|
||||
if (start := obj.get(start_key)) and (end := obj.get(end_key)):
|
||||
duration = end - start
|
||||
if duration.total_seconds() <= 0:
|
||||
raise vol.Invalid(f"Expected positive event duration ({start}, {end})")
|
||||
if duration < min_duration:
|
||||
raise vol.Invalid(
|
||||
f"Expected minimum event duration of {min_duration} ({start}, {end})"
|
||||
)
|
||||
return obj
|
||||
|
||||
return validate
|
||||
@ -204,8 +212,8 @@ CREATE_EVENT_SCHEMA = vol.All(
|
||||
),
|
||||
_has_consistent_timezone(EVENT_START_DATETIME, EVENT_END_DATETIME),
|
||||
_as_local_timezone(EVENT_START_DATETIME, EVENT_END_DATETIME),
|
||||
_has_duration(EVENT_START_DATE, EVENT_END_DATE),
|
||||
_has_duration(EVENT_START_DATETIME, EVENT_END_DATETIME),
|
||||
_has_min_duration(EVENT_START_DATE, EVENT_END_DATE, MIN_NEW_EVENT_DURATION),
|
||||
_has_min_duration(EVENT_START_DATETIME, EVENT_END_DATETIME, MIN_NEW_EVENT_DURATION),
|
||||
)
|
||||
|
||||
WEBSOCKET_EVENT_SCHEMA = vol.Schema(
|
||||
@ -221,7 +229,7 @@ WEBSOCKET_EVENT_SCHEMA = vol.Schema(
|
||||
_has_same_type(EVENT_START, EVENT_END),
|
||||
_has_consistent_timezone(EVENT_START, EVENT_END),
|
||||
_as_local_timezone(EVENT_START, EVENT_END),
|
||||
_has_duration(EVENT_START, EVENT_END),
|
||||
_has_min_duration(EVENT_START, EVENT_END, MIN_NEW_EVENT_DURATION),
|
||||
)
|
||||
)
|
||||
|
||||
@ -238,7 +246,7 @@ CALENDAR_EVENT_SCHEMA = vol.Schema(
|
||||
_has_timezone("start", "end"),
|
||||
_has_consistent_timezone("start", "end"),
|
||||
_as_local_timezone("start", "end"),
|
||||
_has_duration("start", "end"),
|
||||
_has_min_duration("start", "end", MIN_EVENT_DURATION),
|
||||
),
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
@ -346,6 +354,16 @@ class CalendarEvent:
|
||||
f"Failed to validate CalendarEvent: {err}"
|
||||
) from err
|
||||
|
||||
# It is common to set a start an end date to be the same thing for
|
||||
# an all day event, but that is not a valid duration. Fix to have a
|
||||
# duration of one day.
|
||||
if (
|
||||
not isinstance(self.start, datetime.datetime)
|
||||
and not isinstance(self.end, datetime.datetime)
|
||||
and self.start == self.end
|
||||
):
|
||||
self.end = self.start + datetime.timedelta(days=1)
|
||||
|
||||
|
||||
def _event_dict_factory(obj: Iterable[tuple[str, Any]]) -> dict[str, str]:
|
||||
"""Convert CalendarEvent dataclass items to dictionary of attributes."""
|
||||
|
@ -32,6 +32,7 @@ from .const import DEFAULT_EXPOSED_ATTRIBUTES, DEFAULT_EXPOSED_DOMAINS, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_DEFAULT_ERROR_TEXT = "Sorry, I couldn't understand that"
|
||||
_ENTITY_REGISTRY_UPDATE_FIELDS = ["aliases", "name", "original_name"]
|
||||
|
||||
REGEX_TYPE = type(re.compile(""))
|
||||
|
||||
@ -450,8 +451,10 @@ class DefaultAgent(AbstractConversationAgent):
|
||||
|
||||
@core.callback
|
||||
def _async_handle_entity_registry_changed(self, event: core.Event) -> None:
|
||||
"""Clear names list cache when an entity changes aliases."""
|
||||
if event.data["action"] == "update" and "aliases" not in event.data["changes"]:
|
||||
"""Clear names list cache when an entity registry entry has changed."""
|
||||
if event.data["action"] == "update" and not any(
|
||||
field in event.data["changes"] for field in _ENTITY_REGISTRY_UPDATE_FIELDS
|
||||
):
|
||||
return
|
||||
self._slot_lists = None
|
||||
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["env_canada"],
|
||||
"requirements": ["env_canada==0.5.31"]
|
||||
"requirements": ["env_canada==0.5.32"]
|
||||
}
|
||||
|
@ -9,7 +9,9 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
DATA_SCHEMA = vol.Schema({vol.Required("username"): str, vol.Required("password"): str})
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str}
|
||||
)
|
||||
|
||||
|
||||
async def validate_input(hass: core.HomeAssistant, data):
|
||||
@ -20,18 +22,11 @@ async def validate_input(hass: core.HomeAssistant, data):
|
||||
|
||||
session = async_get_clientsession(hass)
|
||||
try:
|
||||
api = await async_get_api(
|
||||
data[CONF_USERNAME], data[CONF_PASSWORD], session=session
|
||||
)
|
||||
await async_get_api(data[CONF_USERNAME], data[CONF_PASSWORD], session=session)
|
||||
except RequestError as request_error:
|
||||
LOGGER.error("Error connecting to the Flo API: %s", request_error)
|
||||
raise CannotConnect from request_error
|
||||
|
||||
user_info = await api.user.get_info()
|
||||
a_location_id = user_info["locations"][0]["id"]
|
||||
location_info = await api.location.get_info(a_location_id)
|
||||
return {"title": location_info["nickname"]}
|
||||
|
||||
|
||||
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for flo."""
|
||||
@ -45,8 +40,10 @@ class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
await self.async_set_unique_id(user_input[CONF_USERNAME])
|
||||
self._abort_if_unique_id_configured()
|
||||
try:
|
||||
info = await validate_input(self.hass, user_input)
|
||||
return self.async_create_entry(title=info["title"], data=user_input)
|
||||
await validate_input(self.hass, user_input)
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_USERNAME], data=user_input
|
||||
)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
|
@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20230406.1"]
|
||||
"requirements": ["home-assistant-frontend==20230411.0"]
|
||||
}
|
||||
|
@ -20,7 +20,7 @@ LANG_TO_BROADCAST_COMMAND = {
|
||||
"it": ("Trasmetti {0}", "Trasmetti in {1} {0}"),
|
||||
"ja": ("{0}とブロードキャストして", "{0}と{1}にブロードキャストして"),
|
||||
"ko": ("{0} 라고 방송해 줘", "{0} 라고 {1}에 방송해 줘"),
|
||||
"pt": ("Transmite {0}", "Transmite para {1} {0}"),
|
||||
"pt": ("Transmitir {0}", "Transmitir {0} para {1}"),
|
||||
}
|
||||
|
||||
|
||||
|
@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiopyarr"],
|
||||
"requirements": ["aiopyarr==22.11.0"]
|
||||
"requirements": ["aiopyarr==23.4.0"]
|
||||
}
|
||||
|
@ -196,7 +196,7 @@ def _get_calendar_event(event: Event) -> CalendarEvent:
|
||||
else:
|
||||
start = event.start
|
||||
end = event.end
|
||||
if (end - start) <= timedelta(days=0):
|
||||
if (end - start) < timedelta(days=0):
|
||||
end = start + timedelta(days=1)
|
||||
|
||||
return CalendarEvent(
|
||||
|
@ -25,7 +25,6 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import (
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
@ -47,7 +46,7 @@ from .const import (
|
||||
DOMAIN,
|
||||
ERROR_STATES,
|
||||
)
|
||||
from .helpers import parse_id
|
||||
from .helpers import NukiWebhookException, parse_id
|
||||
|
||||
_NukiDeviceT = TypeVar("_NukiDeviceT", bound=NukiDevice)
|
||||
|
||||
@ -61,6 +60,87 @@ def _get_bridge_devices(bridge: NukiBridge) -> tuple[list[NukiLock], list[NukiOp
|
||||
return bridge.locks, bridge.openers
|
||||
|
||||
|
||||
async def _create_webhook(
|
||||
hass: HomeAssistant, entry: ConfigEntry, bridge: NukiBridge
|
||||
) -> None:
|
||||
# Create HomeAssistant webhook
|
||||
async def handle_webhook(
|
||||
hass: HomeAssistant, webhook_id: str, request: web.Request
|
||||
) -> web.Response:
|
||||
"""Handle webhook callback."""
|
||||
try:
|
||||
data = await request.json()
|
||||
except ValueError:
|
||||
return web.Response(status=HTTPStatus.BAD_REQUEST)
|
||||
|
||||
locks = hass.data[DOMAIN][entry.entry_id][DATA_LOCKS]
|
||||
openers = hass.data[DOMAIN][entry.entry_id][DATA_OPENERS]
|
||||
|
||||
devices = [x for x in locks + openers if x.nuki_id == data["nukiId"]]
|
||||
if len(devices) == 1:
|
||||
devices[0].update_from_callback(data)
|
||||
|
||||
coordinator = hass.data[DOMAIN][entry.entry_id][DATA_COORDINATOR]
|
||||
coordinator.async_set_updated_data(None)
|
||||
|
||||
return web.Response(status=HTTPStatus.OK)
|
||||
|
||||
webhook.async_register(
|
||||
hass, DOMAIN, entry.title, entry.entry_id, handle_webhook, local_only=True
|
||||
)
|
||||
|
||||
webhook_url = webhook.async_generate_path(entry.entry_id)
|
||||
|
||||
try:
|
||||
hass_url = get_url(
|
||||
hass,
|
||||
allow_cloud=False,
|
||||
allow_external=False,
|
||||
allow_ip=True,
|
||||
require_ssl=False,
|
||||
)
|
||||
except NoURLAvailableError:
|
||||
webhook.async_unregister(hass, entry.entry_id)
|
||||
raise NukiWebhookException(
|
||||
f"Error registering URL for webhook {entry.entry_id}: "
|
||||
"HomeAssistant URL is not available"
|
||||
) from None
|
||||
|
||||
url = f"{hass_url}{webhook_url}"
|
||||
|
||||
if hass_url.startswith("https"):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"https_webhook",
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="https_webhook",
|
||||
translation_placeholders={
|
||||
"base_url": hass_url,
|
||||
"network_link": "https://my.home-assistant.io/redirect/network/",
|
||||
},
|
||||
)
|
||||
else:
|
||||
ir.async_delete_issue(hass, DOMAIN, "https_webhook")
|
||||
|
||||
try:
|
||||
async with async_timeout.timeout(10):
|
||||
await hass.async_add_executor_job(
|
||||
_register_webhook, bridge, entry.entry_id, url
|
||||
)
|
||||
except InvalidCredentialsException as err:
|
||||
webhook.async_unregister(hass, entry.entry_id)
|
||||
raise NukiWebhookException(
|
||||
f"Invalid credentials for Bridge: {err}"
|
||||
) from err
|
||||
except RequestException as err:
|
||||
webhook.async_unregister(hass, entry.entry_id)
|
||||
raise NukiWebhookException(
|
||||
f"Error communicating with Bridge: {err}"
|
||||
) from err
|
||||
|
||||
|
||||
def _register_webhook(bridge: NukiBridge, entry_id: str, url: str) -> bool:
|
||||
# Register HA URL as webhook if not already
|
||||
callbacks = bridge.callback_list()
|
||||
@ -126,79 +206,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
sw_version=info["versions"]["firmwareVersion"],
|
||||
)
|
||||
|
||||
async def handle_webhook(
|
||||
hass: HomeAssistant, webhook_id: str, request: web.Request
|
||||
) -> web.Response:
|
||||
"""Handle webhook callback."""
|
||||
try:
|
||||
data = await request.json()
|
||||
except ValueError:
|
||||
return web.Response(status=HTTPStatus.BAD_REQUEST)
|
||||
|
||||
locks = hass.data[DOMAIN][entry.entry_id][DATA_LOCKS]
|
||||
openers = hass.data[DOMAIN][entry.entry_id][DATA_OPENERS]
|
||||
|
||||
devices = [x for x in locks + openers if x.nuki_id == data["nukiId"]]
|
||||
if len(devices) == 1:
|
||||
devices[0].update_from_callback(data)
|
||||
|
||||
coordinator = hass.data[DOMAIN][entry.entry_id][DATA_COORDINATOR]
|
||||
coordinator.async_set_updated_data(None)
|
||||
|
||||
return web.Response(status=HTTPStatus.OK)
|
||||
|
||||
webhook.async_register(
|
||||
hass, DOMAIN, entry.title, entry.entry_id, handle_webhook, local_only=True
|
||||
)
|
||||
|
||||
webhook_url = webhook.async_generate_path(entry.entry_id)
|
||||
|
||||
try:
|
||||
hass_url = get_url(
|
||||
hass,
|
||||
allow_cloud=False,
|
||||
allow_external=False,
|
||||
allow_ip=True,
|
||||
require_ssl=False,
|
||||
)
|
||||
except NoURLAvailableError:
|
||||
webhook.async_unregister(hass, entry.entry_id)
|
||||
raise ConfigEntryNotReady(
|
||||
f"Error registering URL for webhook {entry.entry_id}: "
|
||||
"HomeAssistant URL is not available"
|
||||
) from None
|
||||
|
||||
url = f"{hass_url}{webhook_url}"
|
||||
|
||||
if hass_url.startswith("https"):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"https_webhook",
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="https_webhook",
|
||||
translation_placeholders={
|
||||
"base_url": hass_url,
|
||||
"network_link": "https://my.home-assistant.io/redirect/network/",
|
||||
},
|
||||
)
|
||||
else:
|
||||
ir.async_delete_issue(hass, DOMAIN, "https_webhook")
|
||||
|
||||
try:
|
||||
async with async_timeout.timeout(10):
|
||||
await hass.async_add_executor_job(
|
||||
_register_webhook, bridge, entry.entry_id, url
|
||||
)
|
||||
except InvalidCredentialsException as err:
|
||||
webhook.async_unregister(hass, entry.entry_id)
|
||||
raise ConfigEntryNotReady(f"Invalid credentials for Bridge: {err}") from err
|
||||
except RequestException as err:
|
||||
webhook.async_unregister(hass, entry.entry_id)
|
||||
raise ConfigEntryNotReady(
|
||||
f"Error communicating with Bridge: {err}"
|
||||
) from err
|
||||
await _create_webhook(hass, entry, bridge)
|
||||
except NukiWebhookException as err:
|
||||
_LOGGER.warning("Error registering HomeAssistant webhook: %s", err)
|
||||
|
||||
async def _stop_nuki(_: Event):
|
||||
"""Stop and remove the Nuki webhook."""
|
||||
|
@ -13,3 +13,7 @@ class CannotConnect(exceptions.HomeAssistantError):
|
||||
|
||||
class InvalidAuth(exceptions.HomeAssistantError):
|
||||
"""Error to indicate there is invalid auth."""
|
||||
|
||||
|
||||
class NukiWebhookException(exceptions.HomeAssistantError):
|
||||
"""Error to indicate there was an issue with the webhook."""
|
||||
|
@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiopyarr"],
|
||||
"requirements": ["aiopyarr==22.11.0"]
|
||||
"requirements": ["aiopyarr==23.4.0"]
|
||||
}
|
||||
|
@ -32,6 +32,7 @@ from .const import ( # noqa: F401
|
||||
INTEGRATION_PLATFORM_EXCLUDE_ATTRIBUTES,
|
||||
INTEGRATION_PLATFORMS_LOAD_IN_RECORDER_THREAD,
|
||||
SQLITE_URL_PREFIX,
|
||||
SupportedDialect,
|
||||
)
|
||||
from .core import Recorder
|
||||
from .services import async_register_services
|
||||
|
@ -178,7 +178,7 @@ class ReolinkFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
|
||||
data_schema = data_schema.extend(
|
||||
{
|
||||
vol.Optional(CONF_PORT): cv.positive_int,
|
||||
vol.Optional(CONF_USE_HTTPS): bool,
|
||||
vol.Required(CONF_USE_HTTPS, default=False): bool,
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiopyarr"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["aiopyarr==22.11.0"]
|
||||
"requirements": ["aiopyarr==23.4.0"]
|
||||
}
|
||||
|
@ -64,6 +64,7 @@ def validate_query(db_url: str, query: str, column: str) -> bool:
|
||||
|
||||
if sess:
|
||||
sess.close()
|
||||
engine.dispose()
|
||||
|
||||
return True
|
||||
|
||||
|
16
homeassistant/components/sql/models.py
Normal file
16
homeassistant/components/sql/models.py
Normal file
@ -0,0 +1,16 @@
|
||||
"""The sql integration models."""
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
from sqlalchemy.orm import scoped_session
|
||||
|
||||
from homeassistant.core import CALLBACK_TYPE
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class SQLData:
|
||||
"""Data for the sql integration."""
|
||||
|
||||
shutdown_event_cancel: CALLBACK_TYPE
|
||||
session_makers_by_db_url: dict[str, scoped_session]
|
@ -13,7 +13,11 @@ from sqlalchemy.orm import Session, scoped_session, sessionmaker
|
||||
from sqlalchemy.sql.lambdas import StatementLambdaElement
|
||||
from sqlalchemy.util import LRUCache
|
||||
|
||||
from homeassistant.components.recorder import CONF_DB_URL, get_instance
|
||||
from homeassistant.components.recorder import (
|
||||
CONF_DB_URL,
|
||||
SupportedDialect,
|
||||
get_instance,
|
||||
)
|
||||
from homeassistant.components.sensor import (
|
||||
CONF_STATE_CLASS,
|
||||
SensorDeviceClass,
|
||||
@ -27,8 +31,9 @@ from homeassistant.const import (
|
||||
CONF_UNIQUE_ID,
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
CONF_VALUE_TEMPLATE,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType
|
||||
@ -38,6 +43,7 @@ from homeassistant.helpers.template import Template
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import CONF_COLUMN_NAME, CONF_QUERY, DB_URL_RE, DOMAIN
|
||||
from .models import SQLData
|
||||
from .util import resolve_db_url
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@ -127,6 +133,36 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_get_or_init_domain_data(hass: HomeAssistant) -> SQLData:
|
||||
"""Get or initialize domain data."""
|
||||
if DOMAIN in hass.data:
|
||||
sql_data: SQLData = hass.data[DOMAIN]
|
||||
return sql_data
|
||||
|
||||
session_makers_by_db_url: dict[str, scoped_session] = {}
|
||||
|
||||
#
|
||||
# Ensure we dispose of all engines at shutdown
|
||||
# to avoid unclean disconnects
|
||||
#
|
||||
# Shutdown all sessions in the executor since they will
|
||||
# do blocking I/O
|
||||
#
|
||||
def _shutdown_db_engines(event: Event) -> None:
|
||||
"""Shutdown all database engines."""
|
||||
for sessmaker in session_makers_by_db_url.values():
|
||||
sessmaker.connection().engine.dispose()
|
||||
|
||||
cancel_shutdown = hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_STOP, _shutdown_db_engines
|
||||
)
|
||||
|
||||
sql_data = SQLData(cancel_shutdown, session_makers_by_db_url)
|
||||
hass.data[DOMAIN] = sql_data
|
||||
return sql_data
|
||||
|
||||
|
||||
async def async_setup_sensor(
|
||||
hass: HomeAssistant,
|
||||
name: str,
|
||||
@ -144,25 +180,39 @@ async def async_setup_sensor(
|
||||
"""Set up the SQL sensor."""
|
||||
instance = get_instance(hass)
|
||||
sessmaker: scoped_session | None
|
||||
if use_database_executor := (db_url == instance.db_url):
|
||||
sql_data = _async_get_or_init_domain_data(hass)
|
||||
uses_recorder_db = db_url == instance.db_url
|
||||
use_database_executor = False
|
||||
if uses_recorder_db and instance.dialect_name == SupportedDialect.SQLITE:
|
||||
use_database_executor = True
|
||||
assert instance.engine is not None
|
||||
sessmaker = scoped_session(sessionmaker(bind=instance.engine, future=True))
|
||||
elif not (
|
||||
sessmaker := await hass.async_add_executor_job(
|
||||
_validate_and_get_session_maker_for_db_url, db_url
|
||||
)
|
||||
# For other databases we need to create a new engine since
|
||||
# we want the connection to use the default timezone and these
|
||||
# database engines will use QueuePool as its only sqlite that
|
||||
# needs our custom pool. If there is already a session maker
|
||||
# for this db_url we can use that so we do not create a new engine
|
||||
# for every sensor.
|
||||
elif db_url in sql_data.session_makers_by_db_url:
|
||||
sessmaker = sql_data.session_makers_by_db_url[db_url]
|
||||
elif sessmaker := await hass.async_add_executor_job(
|
||||
_validate_and_get_session_maker_for_db_url, db_url
|
||||
):
|
||||
sql_data.session_makers_by_db_url[db_url] = sessmaker
|
||||
else:
|
||||
return
|
||||
|
||||
upper_query = query_str.upper()
|
||||
if use_database_executor:
|
||||
if uses_recorder_db:
|
||||
redacted_query = redact_credentials(query_str)
|
||||
|
||||
issue_key = unique_id if unique_id else redacted_query
|
||||
# If the query has a unique id and they fix it we can dismiss the issue
|
||||
# but if it doesn't have a unique id they have to ignore it instead
|
||||
|
||||
if "ENTITY_ID" in upper_query and "STATES_META" not in upper_query:
|
||||
if (
|
||||
"ENTITY_ID," in upper_query or "ENTITY_ID " in upper_query
|
||||
) and "STATES_META" not in upper_query:
|
||||
_LOGGER.error(
|
||||
"The query `%s` contains the keyword `entity_id` but does not "
|
||||
"reference the `states_meta` table. This will cause a full table "
|
||||
|
@ -3,7 +3,11 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.cover import CoverEntity, CoverEntityFeature
|
||||
from homeassistant.components.cover import (
|
||||
DOMAIN as COVER_DOMAIN,
|
||||
CoverEntity,
|
||||
CoverEntityFeature,
|
||||
)
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
@ -36,6 +40,7 @@ async def async_setup_entry(
|
||||
CoverSwitch(
|
||||
hass,
|
||||
config_entry.title,
|
||||
COVER_DOMAIN,
|
||||
entity_id,
|
||||
config_entry.entry_id,
|
||||
)
|
||||
|
@ -23,13 +23,15 @@ class BaseEntity(Entity):
|
||||
"""Represents a Switch as an X."""
|
||||
|
||||
_attr_should_poll = False
|
||||
_is_new_entity: bool
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry_title: str,
|
||||
domain: str,
|
||||
switch_entity_id: str,
|
||||
unique_id: str | None,
|
||||
unique_id: str,
|
||||
) -> None:
|
||||
"""Initialize Switch as an X."""
|
||||
registry = er.async_get(hass)
|
||||
@ -41,7 +43,7 @@ class BaseEntity(Entity):
|
||||
|
||||
name: str | None = config_entry_title
|
||||
if wrapped_switch:
|
||||
name = wrapped_switch.name or wrapped_switch.original_name
|
||||
name = wrapped_switch.original_name
|
||||
|
||||
self._device_id = device_id
|
||||
if device_id and (device := device_registry.async_get(device_id)):
|
||||
@ -55,6 +57,10 @@ class BaseEntity(Entity):
|
||||
self._attr_unique_id = unique_id
|
||||
self._switch_entity_id = switch_entity_id
|
||||
|
||||
self._is_new_entity = (
|
||||
registry.async_get_entity_id(domain, SWITCH_AS_X_DOMAIN, unique_id) is None
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_state_changed_listener(self, event: Event | None = None) -> None:
|
||||
"""Handle child updates."""
|
||||
@ -67,7 +73,7 @@ class BaseEntity(Entity):
|
||||
self._attr_available = True
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
"""Register callbacks and copy the wrapped entity's custom name if set."""
|
||||
|
||||
@callback
|
||||
def _async_state_changed_listener(event: Event | None = None) -> None:
|
||||
@ -93,6 +99,15 @@ class BaseEntity(Entity):
|
||||
{"entity_id": self._switch_entity_id},
|
||||
)
|
||||
|
||||
if not self._is_new_entity:
|
||||
return
|
||||
|
||||
wrapped_switch = registry.async_get(self._switch_entity_id)
|
||||
if not wrapped_switch or wrapped_switch.name is None:
|
||||
return
|
||||
|
||||
registry.async_update_entity(self.entity_id, name=wrapped_switch.name)
|
||||
|
||||
|
||||
class BaseToggleEntity(BaseEntity, ToggleEntity):
|
||||
"""Represents a Switch as a ToggleEntity."""
|
||||
|
@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.fan import FanEntity
|
||||
from homeassistant.components.fan import DOMAIN as FAN_DOMAIN, FanEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ENTITY_ID
|
||||
from homeassistant.core import HomeAssistant
|
||||
@ -29,6 +29,7 @@ async def async_setup_entry(
|
||||
FanSwitch(
|
||||
hass,
|
||||
config_entry.title,
|
||||
FAN_DOMAIN,
|
||||
entity_id,
|
||||
config_entry.entry_id,
|
||||
)
|
||||
|
@ -1,7 +1,11 @@
|
||||
"""Light support for switch entities."""
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.light import ColorMode, LightEntity
|
||||
from homeassistant.components.light import (
|
||||
DOMAIN as LIGHT_DOMAIN,
|
||||
ColorMode,
|
||||
LightEntity,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ENTITY_ID
|
||||
from homeassistant.core import HomeAssistant
|
||||
@ -27,6 +31,7 @@ async def async_setup_entry(
|
||||
LightSwitch(
|
||||
hass,
|
||||
config_entry.title,
|
||||
LIGHT_DOMAIN,
|
||||
entity_id,
|
||||
config_entry.entry_id,
|
||||
)
|
||||
|
@ -3,7 +3,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.lock import LockEntity
|
||||
from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN, LockEntity
|
||||
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
@ -36,6 +36,7 @@ async def async_setup_entry(
|
||||
LockSwitch(
|
||||
hass,
|
||||
config_entry.title,
|
||||
LOCK_DOMAIN,
|
||||
entity_id,
|
||||
config_entry.entry_id,
|
||||
)
|
||||
|
@ -1,7 +1,11 @@
|
||||
"""Siren support for switch entities."""
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.siren import SirenEntity, SirenEntityFeature
|
||||
from homeassistant.components.siren import (
|
||||
DOMAIN as SIREN_DOMAIN,
|
||||
SirenEntity,
|
||||
SirenEntityFeature,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_ENTITY_ID
|
||||
from homeassistant.core import HomeAssistant
|
||||
@ -27,6 +31,7 @@ async def async_setup_entry(
|
||||
SirenSwitch(
|
||||
hass,
|
||||
config_entry.title,
|
||||
SIREN_DOMAIN,
|
||||
entity_id,
|
||||
config_entry.entry_id,
|
||||
)
|
||||
|
@ -53,7 +53,10 @@ SERVICE_SEND_KEYPRESS = "send_keypress"
|
||||
SERVICE_SEND_TEXT = "send_text"
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
) -> bool:
|
||||
"""Set up System Bridge from a config entry."""
|
||||
|
||||
# Check version before initialising
|
||||
@ -64,11 +67,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
session=async_get_clientsession(hass),
|
||||
)
|
||||
try:
|
||||
if not await version.check_supported():
|
||||
raise ConfigEntryNotReady(
|
||||
"You are not running a supported version of System Bridge. Please"
|
||||
f" update to {SUPPORTED_VERSION} or higher."
|
||||
)
|
||||
async with async_timeout.timeout(10):
|
||||
if not await version.check_supported():
|
||||
raise ConfigEntryNotReady(
|
||||
"You are not running a supported version of System Bridge. Please"
|
||||
f" update to {SUPPORTED_VERSION} or higher."
|
||||
)
|
||||
except AuthenticationException as exception:
|
||||
_LOGGER.error("Authentication failed for %s: %s", entry.title, exception)
|
||||
raise ConfigEntryAuthFailed from exception
|
||||
@ -87,7 +91,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry=entry,
|
||||
)
|
||||
try:
|
||||
async with async_timeout.timeout(30):
|
||||
async with async_timeout.timeout(10):
|
||||
await coordinator.async_get_data(MODULES)
|
||||
except AuthenticationException as exception:
|
||||
_LOGGER.error("Authentication failed for %s: %s", entry.title, exception)
|
||||
@ -105,8 +109,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
|
||||
try:
|
||||
# Wait for initial data
|
||||
async with async_timeout.timeout(30):
|
||||
while not coordinator.is_ready():
|
||||
async with async_timeout.timeout(10):
|
||||
while not coordinator.is_ready:
|
||||
_LOGGER.debug(
|
||||
"Waiting for initial data from %s (%s)",
|
||||
entry.title,
|
||||
|
@ -55,7 +55,7 @@ async def _validate_input(
|
||||
data[CONF_API_KEY],
|
||||
)
|
||||
try:
|
||||
async with async_timeout.timeout(30):
|
||||
async with async_timeout.timeout(15):
|
||||
await websocket_client.connect(session=async_get_clientsession(hass))
|
||||
hass.async_create_task(websocket_client.listen())
|
||||
response = await websocket_client.get_data(GetData(modules=["system"]))
|
||||
|
@ -82,6 +82,7 @@ class SystemBridgeDataUpdateCoordinator(
|
||||
hass, LOGGER, name=DOMAIN, update_interval=timedelta(seconds=30)
|
||||
)
|
||||
|
||||
@property
|
||||
def is_ready(self) -> bool:
|
||||
"""Return if the data is ready."""
|
||||
if self.data is None:
|
||||
@ -157,7 +158,7 @@ class SystemBridgeDataUpdateCoordinator(
|
||||
self.last_update_success = False
|
||||
self.async_update_listeners()
|
||||
except (ConnectionClosedException, ConnectionResetError) as exception:
|
||||
self.logger.info(
|
||||
self.logger.debug(
|
||||
"Websocket connection closed for %s. Will retry: %s",
|
||||
self.title,
|
||||
exception,
|
||||
@ -168,7 +169,7 @@ class SystemBridgeDataUpdateCoordinator(
|
||||
self.last_update_success = False
|
||||
self.async_update_listeners()
|
||||
except ConnectionErrorException as exception:
|
||||
self.logger.warning(
|
||||
self.logger.debug(
|
||||
"Connection error occurred for %s. Will retry: %s",
|
||||
self.title,
|
||||
exception,
|
||||
|
@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pytile"],
|
||||
"requirements": ["pytile==2022.02.0"]
|
||||
"requirements": ["pytile==2023.04.0"]
|
||||
}
|
||||
|
@ -423,6 +423,15 @@ class UtilityMeterSensor(RestoreSensor):
|
||||
@callback
|
||||
def async_reading(self, event: Event):
|
||||
"""Handle the sensor state changes."""
|
||||
if (
|
||||
source_state := self.hass.states.get(self._sensor_source_id)
|
||||
) is None or source_state.state == STATE_UNAVAILABLE:
|
||||
self._attr_available = False
|
||||
self.async_write_ha_state()
|
||||
return
|
||||
|
||||
self._attr_available = True
|
||||
|
||||
old_state: State | None = event.data.get("old_state")
|
||||
new_state: State = event.data.get("new_state") # type: ignore[assignment] # a state change event always has a new state
|
||||
|
||||
|
@ -25,7 +25,7 @@ class LightLink(ZigbeeChannel):
|
||||
|
||||
application = self._ch_pool.endpoint.device.application
|
||||
try:
|
||||
coordinator = application.get_device(application.ieee)
|
||||
coordinator = application.get_device(application.state.node_info.ieee)
|
||||
except KeyError:
|
||||
self.warning("Aborting - unable to locate required coordinator device.")
|
||||
return
|
||||
|
@ -251,14 +251,20 @@ class InovelliConfigEntityChannel(ZigbeeChannel):
|
||||
"active_energy_reports": True,
|
||||
"power_type": False,
|
||||
"switch_type": False,
|
||||
"increased_non_neutral_output": True,
|
||||
"button_delay": False,
|
||||
"smart_bulb_mode": False,
|
||||
"double_tap_up_for_max_brightness": True,
|
||||
"double_tap_down_for_min_brightness": True,
|
||||
"double_tap_up_enabled": True,
|
||||
"double_tap_down_enabled": True,
|
||||
"double_tap_up_level": True,
|
||||
"double_tap_down_level": True,
|
||||
"led_color_when_on": True,
|
||||
"led_color_when_off": True,
|
||||
"led_intensity_when_on": True,
|
||||
"led_intensity_when_off": True,
|
||||
"led_scaling_mode": True,
|
||||
"aux_switch_scenes": True,
|
||||
"binding_off_to_on_sync_level": True,
|
||||
"local_protection": False,
|
||||
"output_mode": False,
|
||||
"on_off_led_mode": True,
|
||||
|
@ -363,7 +363,7 @@ class IASZoneChannel(ZigbeeChannel):
|
||||
self.debug("started IASZoneChannel configuration")
|
||||
|
||||
await self.bind()
|
||||
ieee = self.cluster.endpoint.device.application.ieee
|
||||
ieee = self.cluster.endpoint.device.application.state.node_info.ieee
|
||||
|
||||
try:
|
||||
res = await self._cluster.write_attributes({"cie_addr": ieee})
|
||||
|
@ -20,12 +20,12 @@
|
||||
"zigpy_znp"
|
||||
],
|
||||
"requirements": [
|
||||
"bellows==0.35.0",
|
||||
"bellows==0.35.1",
|
||||
"pyserial==3.5",
|
||||
"pyserial-asyncio==0.6",
|
||||
"zha-quirks==0.0.96",
|
||||
"zha-quirks==0.0.97",
|
||||
"zigpy-deconz==0.20.0",
|
||||
"zigpy==0.54.0",
|
||||
"zigpy==0.54.1",
|
||||
"zigpy-xbee==0.17.0",
|
||||
"zigpy-zigate==0.10.3",
|
||||
"zigpy-znp==0.10.0"
|
||||
|
@ -835,6 +835,34 @@ class InovelliDefaultAllLEDOffIntensity(
|
||||
_attr_name: str = "Default all LED off intensity"
|
||||
|
||||
|
||||
@CONFIG_DIAGNOSTIC_MATCH(channel_names=CHANNEL_INOVELLI)
|
||||
class InovelliDoubleTapUpLevel(
|
||||
ZHANumberConfigurationEntity, id_suffix="double_tap_up_level"
|
||||
):
|
||||
"""Inovelli double tap up level configuration entity."""
|
||||
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
_attr_icon: str = ICONS[16]
|
||||
_attr_native_min_value: float = 2
|
||||
_attr_native_max_value: float = 254
|
||||
_zcl_attribute: str = "double_tap_up_level"
|
||||
_attr_name: str = "Double tap up level"
|
||||
|
||||
|
||||
@CONFIG_DIAGNOSTIC_MATCH(channel_names=CHANNEL_INOVELLI)
|
||||
class InovelliDoubleTapDownLevel(
|
||||
ZHANumberConfigurationEntity, id_suffix="double_tap_down_level"
|
||||
):
|
||||
"""Inovelli double tap down level configuration entity."""
|
||||
|
||||
_attr_entity_category = EntityCategory.CONFIG
|
||||
_attr_icon: str = ICONS[16]
|
||||
_attr_native_min_value: float = 0
|
||||
_attr_native_max_value: float = 254
|
||||
_zcl_attribute: str = "double_tap_down_level"
|
||||
_attr_name: str = "Double tap down level"
|
||||
|
||||
|
||||
@CONFIG_DIAGNOSTIC_MATCH(channel_names="opple_cluster", models={"aqara.feeder.acn001"})
|
||||
class AqaraPetFeederServingSize(ZHANumberConfigurationEntity, id_suffix="serving_size"):
|
||||
"""Aqara pet feeder serving size configuration entity."""
|
||||
|
@ -472,9 +472,10 @@ class InovelliOutputModeEntity(ZCLEnumSelectEntity, id_suffix="output_mode"):
|
||||
class InovelliSwitchType(types.enum8):
|
||||
"""Inovelli output mode."""
|
||||
|
||||
Load_Only = 0x00
|
||||
Single_Pole = 0x00
|
||||
Three_Way_Dumb = 0x01
|
||||
Three_Way_AUX = 0x02
|
||||
Single_Pole_Full_Sine = 0x03
|
||||
|
||||
|
||||
@CONFIG_DIAGNOSTIC_MATCH(
|
||||
@ -488,6 +489,44 @@ class InovelliSwitchTypeEntity(ZCLEnumSelectEntity, id_suffix="switch_type"):
|
||||
_attr_name: str = "Switch type"
|
||||
|
||||
|
||||
class InovelliLedScalingMode(types.enum1):
|
||||
"""Inovelli led mode."""
|
||||
|
||||
VZM31SN = 0x00
|
||||
LZW31SN = 0x01
|
||||
|
||||
|
||||
@CONFIG_DIAGNOSTIC_MATCH(
|
||||
channel_names=CHANNEL_INOVELLI,
|
||||
)
|
||||
class InovelliLedScalingModeEntity(ZCLEnumSelectEntity, id_suffix="led_scaling_mode"):
|
||||
"""Inovelli led mode control."""
|
||||
|
||||
_select_attr = "led_scaling_mode"
|
||||
_enum = InovelliLedScalingMode
|
||||
_attr_name: str = "Led scaling mode"
|
||||
|
||||
|
||||
class InovelliNonNeutralOutput(types.enum1):
|
||||
"""Inovelli non neutral output selection."""
|
||||
|
||||
Low = 0x00
|
||||
High = 0x01
|
||||
|
||||
|
||||
@CONFIG_DIAGNOSTIC_MATCH(
|
||||
channel_names=CHANNEL_INOVELLI,
|
||||
)
|
||||
class InovelliNonNeutralOutputEntity(
|
||||
ZCLEnumSelectEntity, id_suffix="increased_non_neutral_output"
|
||||
):
|
||||
"""Inovelli non neutral output control."""
|
||||
|
||||
_select_attr = "increased_non_neutral_output"
|
||||
_enum = InovelliNonNeutralOutput
|
||||
_attr_name: str = "Non neutral output"
|
||||
|
||||
|
||||
class AqaraFeedingMode(types.enum8):
|
||||
"""Feeding mode."""
|
||||
|
||||
|
@ -367,25 +367,49 @@ class InovelliSmartBulbMode(ZHASwitchConfigurationEntity, id_suffix="smart_bulb_
|
||||
@CONFIG_DIAGNOSTIC_MATCH(
|
||||
channel_names=CHANNEL_INOVELLI,
|
||||
)
|
||||
class InovelliDoubleTapForFullBrightness(
|
||||
ZHASwitchConfigurationEntity, id_suffix="double_tap_up_for_max_brightness"
|
||||
class InovelliDoubleTapUpEnabled(
|
||||
ZHASwitchConfigurationEntity, id_suffix="double_tap_up_enabled"
|
||||
):
|
||||
"""Inovelli double tap for full brightness control."""
|
||||
"""Inovelli double tap up enabled."""
|
||||
|
||||
_zcl_attribute: str = "double_tap_up_for_max_brightness"
|
||||
_attr_name: str = "Double tap full brightness"
|
||||
_zcl_attribute: str = "double_tap_up_enabled"
|
||||
_attr_name: str = "Double tap up enabled"
|
||||
|
||||
|
||||
@CONFIG_DIAGNOSTIC_MATCH(
|
||||
channel_names=CHANNEL_INOVELLI,
|
||||
)
|
||||
class InovelliDoubleTapForMinBrightness(
|
||||
ZHASwitchConfigurationEntity, id_suffix="double_tap_down_for_min_brightness"
|
||||
class InovelliDoubleTapDownEnabled(
|
||||
ZHASwitchConfigurationEntity, id_suffix="double_tap_down_enabled"
|
||||
):
|
||||
"""Inovelli double tap down for minimum brightness control."""
|
||||
"""Inovelli double tap down enabled."""
|
||||
|
||||
_zcl_attribute: str = "double_tap_down_for_min_brightness"
|
||||
_attr_name: str = "Double tap minimum brightness"
|
||||
_zcl_attribute: str = "double_tap_down_enabled"
|
||||
_attr_name: str = "Double tap down enabled"
|
||||
|
||||
|
||||
@CONFIG_DIAGNOSTIC_MATCH(
|
||||
channel_names=CHANNEL_INOVELLI,
|
||||
)
|
||||
class InovelliAuxSwitchScenes(
|
||||
ZHASwitchConfigurationEntity, id_suffix="aux_switch_scenes"
|
||||
):
|
||||
"""Inovelli unique aux switch scenes."""
|
||||
|
||||
_zcl_attribute: str = "aux_switch_scenes"
|
||||
_attr_name: str = "Aux switch scenes"
|
||||
|
||||
|
||||
@CONFIG_DIAGNOSTIC_MATCH(
|
||||
channel_names=CHANNEL_INOVELLI,
|
||||
)
|
||||
class InovelliBindingOffToOnSyncLevel(
|
||||
ZHASwitchConfigurationEntity, id_suffix="binding_off_to_on_sync_level"
|
||||
):
|
||||
"""Inovelli send move to level with on/off to bound devices."""
|
||||
|
||||
_zcl_attribute: str = "binding_off_to_on_sync_level"
|
||||
_attr_name: str = "Binding off to on sync level"
|
||||
|
||||
|
||||
@CONFIG_DIAGNOSTIC_MATCH(
|
||||
|
@ -8,7 +8,7 @@ from .backports.enum import StrEnum
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2023
|
||||
MINOR_VERSION: Final = 4
|
||||
PATCH_VERSION: Final = "2"
|
||||
PATCH_VERSION: Final = "3"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 10, 0)
|
||||
|
@ -25,7 +25,7 @@ ha-av==10.0.0
|
||||
hass-nabucasa==0.63.1
|
||||
hassil==1.0.6
|
||||
home-assistant-bluetooth==1.9.3
|
||||
home-assistant-frontend==20230406.1
|
||||
home-assistant-frontend==20230411.0
|
||||
home-assistant-intents==2023.3.29
|
||||
httpx==0.23.3
|
||||
ifaddr==0.1.7
|
||||
@ -46,7 +46,7 @@ requests==2.28.2
|
||||
scapy==2.5.0
|
||||
sqlalchemy==2.0.7
|
||||
typing-extensions>=4.5.0,<5.0
|
||||
ulid-transform==0.6.0
|
||||
ulid-transform==0.6.3
|
||||
voluptuous-serialize==2.6.0
|
||||
voluptuous==0.13.1
|
||||
yarl==1.8.1
|
||||
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2023.4.2"
|
||||
version = "2023.4.3"
|
||||
license = {text = "Apache-2.0"}
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
readme = "README.rst"
|
||||
@ -50,7 +50,7 @@ dependencies = [
|
||||
"pyyaml==6.0",
|
||||
"requests==2.28.2",
|
||||
"typing-extensions>=4.5.0,<5.0",
|
||||
"ulid-transform==0.6.0",
|
||||
"ulid-transform==0.6.3",
|
||||
"voluptuous==0.13.1",
|
||||
"voluptuous-serialize==2.6.0",
|
||||
"yarl==1.8.1",
|
||||
|
@ -24,7 +24,7 @@ python-slugify==4.0.1
|
||||
pyyaml==6.0
|
||||
requests==2.28.2
|
||||
typing-extensions>=4.5.0,<5.0
|
||||
ulid-transform==0.6.0
|
||||
ulid-transform==0.6.3
|
||||
voluptuous==0.13.1
|
||||
voluptuous-serialize==2.6.0
|
||||
yarl==1.8.1
|
||||
|
@ -246,7 +246,7 @@ aiopvpc==4.1.0
|
||||
# homeassistant.components.lidarr
|
||||
# homeassistant.components.radarr
|
||||
# homeassistant.components.sonarr
|
||||
aiopyarr==22.11.0
|
||||
aiopyarr==23.4.0
|
||||
|
||||
# homeassistant.components.qnap_qsw
|
||||
aioqsw==0.3.2
|
||||
@ -422,7 +422,7 @@ beautifulsoup4==4.11.1
|
||||
# beewi_smartclim==0.0.10
|
||||
|
||||
# homeassistant.components.zha
|
||||
bellows==0.35.0
|
||||
bellows==0.35.1
|
||||
|
||||
# homeassistant.components.bmw_connected_drive
|
||||
bimmer_connected==0.13.0
|
||||
@ -661,7 +661,7 @@ enocean==0.50
|
||||
enturclient==0.2.4
|
||||
|
||||
# homeassistant.components.environment_canada
|
||||
env_canada==0.5.31
|
||||
env_canada==0.5.32
|
||||
|
||||
# homeassistant.components.enphase_envoy
|
||||
envoy_reader==0.20.1
|
||||
@ -907,7 +907,7 @@ hole==0.8.0
|
||||
holidays==0.21.13
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20230406.1
|
||||
home-assistant-frontend==20230411.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2023.3.29
|
||||
@ -2127,7 +2127,7 @@ python_opendata_transport==0.3.0
|
||||
pythonegardia==1.0.40
|
||||
|
||||
# homeassistant.components.tile
|
||||
pytile==2022.02.0
|
||||
pytile==2023.04.0
|
||||
|
||||
# homeassistant.components.tomorrowio
|
||||
pytomorrowio==0.3.5
|
||||
@ -2698,7 +2698,7 @@ zeroconf==0.56.0
|
||||
zeversolar==0.3.1
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha-quirks==0.0.96
|
||||
zha-quirks==0.0.97
|
||||
|
||||
# homeassistant.components.zhong_hong
|
||||
zhong_hong_hvac==1.0.9
|
||||
@ -2719,7 +2719,7 @@ zigpy-zigate==0.10.3
|
||||
zigpy-znp==0.10.0
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy==0.54.0
|
||||
zigpy==0.54.1
|
||||
|
||||
# homeassistant.components.zoneminder
|
||||
zm-py==0.5.2
|
||||
|
@ -8,7 +8,6 @@
|
||||
-c homeassistant/package_constraints.txt
|
||||
-r requirements_test_pre_commit.txt
|
||||
astroid==2.15.0
|
||||
codecov==2.1.12
|
||||
coverage==7.2.1
|
||||
freezegun==1.2.2
|
||||
mock-open==1.4.0
|
||||
|
@ -227,7 +227,7 @@ aiopvpc==4.1.0
|
||||
# homeassistant.components.lidarr
|
||||
# homeassistant.components.radarr
|
||||
# homeassistant.components.sonarr
|
||||
aiopyarr==22.11.0
|
||||
aiopyarr==23.4.0
|
||||
|
||||
# homeassistant.components.qnap_qsw
|
||||
aioqsw==0.3.2
|
||||
@ -355,7 +355,7 @@ base36==0.1.1
|
||||
beautifulsoup4==4.11.1
|
||||
|
||||
# homeassistant.components.zha
|
||||
bellows==0.35.0
|
||||
bellows==0.35.1
|
||||
|
||||
# homeassistant.components.bmw_connected_drive
|
||||
bimmer_connected==0.13.0
|
||||
@ -517,7 +517,7 @@ energyzero==0.4.1
|
||||
enocean==0.50
|
||||
|
||||
# homeassistant.components.environment_canada
|
||||
env_canada==0.5.31
|
||||
env_canada==0.5.32
|
||||
|
||||
# homeassistant.components.enphase_envoy
|
||||
envoy_reader==0.20.1
|
||||
@ -693,7 +693,7 @@ hole==0.8.0
|
||||
holidays==0.21.13
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20230406.1
|
||||
home-assistant-frontend==20230411.0
|
||||
|
||||
# homeassistant.components.conversation
|
||||
home-assistant-intents==2023.3.29
|
||||
@ -1520,7 +1520,7 @@ python-telegram-bot==13.1
|
||||
python_awair==0.2.4
|
||||
|
||||
# homeassistant.components.tile
|
||||
pytile==2022.02.0
|
||||
pytile==2023.04.0
|
||||
|
||||
# homeassistant.components.tomorrowio
|
||||
pytomorrowio==0.3.5
|
||||
@ -1932,7 +1932,7 @@ zeroconf==0.56.0
|
||||
zeversolar==0.3.1
|
||||
|
||||
# homeassistant.components.zha
|
||||
zha-quirks==0.0.96
|
||||
zha-quirks==0.0.97
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy-deconz==0.20.0
|
||||
@ -1947,7 +1947,7 @@ zigpy-zigate==0.10.3
|
||||
zigpy-znp==0.10.0
|
||||
|
||||
# homeassistant.components.zha
|
||||
zigpy==0.54.0
|
||||
zigpy==0.54.1
|
||||
|
||||
# homeassistant.components.zwave_js
|
||||
zwave-js-server-python==0.47.3
|
||||
|
@ -254,6 +254,32 @@ DTEND;TZID=Europe/London:20221127T003000
|
||||
SUMMARY:Event with a provided Timezone
|
||||
END:VEVENT
|
||||
END:VCALENDAR
|
||||
""",
|
||||
"""BEGIN:VCALENDAR
|
||||
VERSION:2.0
|
||||
PRODID:-//Global Corp.//CalDAV Client//EN
|
||||
BEGIN:VEVENT
|
||||
UID:16
|
||||
DTSTAMP:20171125T000000Z
|
||||
DTSTART:20171127
|
||||
DTEND:20171128
|
||||
SUMMARY:All day event with same start and end
|
||||
LOCATION:Hamburg
|
||||
END:VEVENT
|
||||
END:VCALENDAR
|
||||
""",
|
||||
"""BEGIN:VCALENDAR
|
||||
VERSION:2.0
|
||||
PRODID:-//Global Corp.//CalDAV Client//EN
|
||||
BEGIN:VEVENT
|
||||
UID:17
|
||||
DTSTAMP:20171125T000000Z
|
||||
DTSTART:20171127T010000
|
||||
DTEND:20171127T010000
|
||||
SUMMARY:Event with no duration
|
||||
LOCATION:Hamburg
|
||||
END:VEVENT
|
||||
END:VCALENDAR
|
||||
""",
|
||||
]
|
||||
|
||||
@ -1001,7 +1027,7 @@ async def test_get_events(hass: HomeAssistant, calendar, get_api_events) -> None
|
||||
await hass.async_block_till_done()
|
||||
|
||||
events = await get_api_events("calendar.private")
|
||||
assert len(events) == 16
|
||||
assert len(events) == 18
|
||||
assert calendar.call
|
||||
|
||||
|
||||
|
@ -324,7 +324,7 @@ async def test_unsupported_create_event_service(hass: HomeAssistant) -> None:
|
||||
"end_date_time": "2022-04-01T06:00:00",
|
||||
},
|
||||
vol.error.MultipleInvalid,
|
||||
"Expected positive event duration",
|
||||
"Expected minimum event duration",
|
||||
),
|
||||
(
|
||||
{
|
||||
@ -332,7 +332,7 @@ async def test_unsupported_create_event_service(hass: HomeAssistant) -> None:
|
||||
"end_date": "2022-04-01",
|
||||
},
|
||||
vol.error.MultipleInvalid,
|
||||
"Expected positive event duration",
|
||||
"Expected minimum event duration",
|
||||
),
|
||||
(
|
||||
{
|
||||
@ -340,7 +340,7 @@ async def test_unsupported_create_event_service(hass: HomeAssistant) -> None:
|
||||
"end_date": "2022-04-01",
|
||||
},
|
||||
vol.error.MultipleInvalid,
|
||||
"Expected positive event duration",
|
||||
"Expected minimum event duration",
|
||||
),
|
||||
],
|
||||
ids=[
|
||||
|
@ -147,7 +147,7 @@ async def test_http_processing_intent_target_ha_agent(
|
||||
}
|
||||
|
||||
|
||||
async def test_http_processing_intent_entity_added(
|
||||
async def test_http_processing_intent_entity_added_removed(
|
||||
hass: HomeAssistant,
|
||||
init_components,
|
||||
hass_client: ClientSessionGenerator,
|
||||
@ -197,7 +197,7 @@ async def test_http_processing_intent_entity_added(
|
||||
"conversation_id": None,
|
||||
}
|
||||
|
||||
# Add an alias
|
||||
# Add an entity
|
||||
entity_registry.async_get_or_create(
|
||||
"light", "demo", "5678", suggested_object_id="late"
|
||||
)
|
||||
@ -293,6 +293,288 @@ async def test_http_processing_intent_entity_added(
|
||||
}
|
||||
|
||||
|
||||
async def test_http_processing_intent_alias_added_removed(
|
||||
hass: HomeAssistant,
|
||||
init_components,
|
||||
hass_client: ClientSessionGenerator,
|
||||
hass_admin_user: MockUser,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test processing intent via HTTP API with aliases added later.
|
||||
|
||||
We want to ensure that adding an alias later busts the cache
|
||||
so that the new alias is available.
|
||||
"""
|
||||
entity_registry.async_get_or_create(
|
||||
"light", "demo", "1234", suggested_object_id="kitchen"
|
||||
)
|
||||
hass.states.async_set("light.kitchen", "off", {"friendly_name": "kitchen light"})
|
||||
|
||||
calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on")
|
||||
client = await hass_client()
|
||||
resp = await client.post(
|
||||
"/api/conversation/process", json={"text": "turn on kitchen light"}
|
||||
)
|
||||
|
||||
assert resp.status == HTTPStatus.OK
|
||||
assert len(calls) == 1
|
||||
data = await resp.json()
|
||||
|
||||
assert data == {
|
||||
"response": {
|
||||
"response_type": "action_done",
|
||||
"card": {},
|
||||
"speech": {
|
||||
"plain": {
|
||||
"extra_data": None,
|
||||
"speech": "Turned on light",
|
||||
}
|
||||
},
|
||||
"language": hass.config.language,
|
||||
"data": {
|
||||
"targets": [],
|
||||
"success": [
|
||||
{"id": "light.kitchen", "name": "kitchen light", "type": "entity"}
|
||||
],
|
||||
"failed": [],
|
||||
},
|
||||
},
|
||||
"conversation_id": None,
|
||||
}
|
||||
|
||||
# Add an alias
|
||||
entity_registry.async_update_entity("light.kitchen", aliases={"late added alias"})
|
||||
|
||||
client = await hass_client()
|
||||
resp = await client.post(
|
||||
"/api/conversation/process", json={"text": "turn on late added alias"}
|
||||
)
|
||||
|
||||
assert resp.status == HTTPStatus.OK
|
||||
data = await resp.json()
|
||||
|
||||
assert data == {
|
||||
"response": {
|
||||
"response_type": "action_done",
|
||||
"card": {},
|
||||
"speech": {
|
||||
"plain": {
|
||||
"extra_data": None,
|
||||
"speech": "Turned on light",
|
||||
}
|
||||
},
|
||||
"language": hass.config.language,
|
||||
"data": {
|
||||
"targets": [],
|
||||
"success": [
|
||||
{"id": "light.kitchen", "name": "kitchen light", "type": "entity"}
|
||||
],
|
||||
"failed": [],
|
||||
},
|
||||
},
|
||||
"conversation_id": None,
|
||||
}
|
||||
|
||||
# Now remove the alieas
|
||||
entity_registry.async_update_entity("light.kitchen", aliases={})
|
||||
|
||||
client = await hass_client()
|
||||
resp = await client.post(
|
||||
"/api/conversation/process", json={"text": "turn on late added alias"}
|
||||
)
|
||||
|
||||
assert resp.status == HTTPStatus.OK
|
||||
data = await resp.json()
|
||||
assert data == {
|
||||
"conversation_id": None,
|
||||
"response": {
|
||||
"card": {},
|
||||
"data": {"code": "no_intent_match"},
|
||||
"language": hass.config.language,
|
||||
"response_type": "error",
|
||||
"speech": {
|
||||
"plain": {
|
||||
"extra_data": None,
|
||||
"speech": "Sorry, I couldn't understand that",
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
async def test_http_processing_intent_entity_renamed(
|
||||
hass: HomeAssistant,
|
||||
init_components,
|
||||
hass_client: ClientSessionGenerator,
|
||||
hass_admin_user: MockUser,
|
||||
entity_registry: er.EntityRegistry,
|
||||
enable_custom_integrations: None,
|
||||
) -> None:
|
||||
"""Test processing intent via HTTP API with entities renamed later.
|
||||
|
||||
We want to ensure that renaming an entity later busts the cache
|
||||
so that the new name is used.
|
||||
"""
|
||||
platform = getattr(hass.components, "test.light")
|
||||
platform.init(empty=True)
|
||||
|
||||
entity = platform.MockLight("kitchen light", "on")
|
||||
entity._attr_unique_id = "1234"
|
||||
entity.entity_id = "light.kitchen"
|
||||
platform.ENTITIES.append(entity)
|
||||
assert await async_setup_component(
|
||||
hass,
|
||||
LIGHT_DOMAIN,
|
||||
{LIGHT_DOMAIN: [{"platform": "test"}]},
|
||||
)
|
||||
|
||||
calls = async_mock_service(hass, LIGHT_DOMAIN, "turn_on")
|
||||
client = await hass_client()
|
||||
resp = await client.post(
|
||||
"/api/conversation/process", json={"text": "turn on kitchen light"}
|
||||
)
|
||||
|
||||
assert resp.status == HTTPStatus.OK
|
||||
assert len(calls) == 1
|
||||
data = await resp.json()
|
||||
|
||||
assert data == {
|
||||
"response": {
|
||||
"response_type": "action_done",
|
||||
"card": {},
|
||||
"speech": {
|
||||
"plain": {
|
||||
"extra_data": None,
|
||||
"speech": "Turned on light",
|
||||
}
|
||||
},
|
||||
"language": hass.config.language,
|
||||
"data": {
|
||||
"targets": [],
|
||||
"success": [
|
||||
{"id": "light.kitchen", "name": "kitchen light", "type": "entity"}
|
||||
],
|
||||
"failed": [],
|
||||
},
|
||||
},
|
||||
"conversation_id": None,
|
||||
}
|
||||
|
||||
# Rename the entity
|
||||
entity_registry.async_update_entity("light.kitchen", name="renamed light")
|
||||
await hass.async_block_till_done()
|
||||
|
||||
client = await hass_client()
|
||||
resp = await client.post(
|
||||
"/api/conversation/process", json={"text": "turn on renamed light"}
|
||||
)
|
||||
|
||||
assert resp.status == HTTPStatus.OK
|
||||
data = await resp.json()
|
||||
|
||||
assert data == {
|
||||
"response": {
|
||||
"response_type": "action_done",
|
||||
"card": {},
|
||||
"speech": {
|
||||
"plain": {
|
||||
"extra_data": None,
|
||||
"speech": "Turned on light",
|
||||
}
|
||||
},
|
||||
"language": hass.config.language,
|
||||
"data": {
|
||||
"targets": [],
|
||||
"success": [
|
||||
{"id": "light.kitchen", "name": "renamed light", "type": "entity"}
|
||||
],
|
||||
"failed": [],
|
||||
},
|
||||
},
|
||||
"conversation_id": None,
|
||||
}
|
||||
|
||||
client = await hass_client()
|
||||
resp = await client.post(
|
||||
"/api/conversation/process", json={"text": "turn on kitchen light"}
|
||||
)
|
||||
|
||||
assert resp.status == HTTPStatus.OK
|
||||
data = await resp.json()
|
||||
assert data == {
|
||||
"conversation_id": None,
|
||||
"response": {
|
||||
"card": {},
|
||||
"data": {"code": "no_intent_match"},
|
||||
"language": hass.config.language,
|
||||
"response_type": "error",
|
||||
"speech": {
|
||||
"plain": {
|
||||
"extra_data": None,
|
||||
"speech": "Sorry, I couldn't understand that",
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
# Now clear the custom name
|
||||
entity_registry.async_update_entity("light.kitchen", name=None)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
client = await hass_client()
|
||||
resp = await client.post(
|
||||
"/api/conversation/process", json={"text": "turn on kitchen light"}
|
||||
)
|
||||
|
||||
assert resp.status == HTTPStatus.OK
|
||||
data = await resp.json()
|
||||
|
||||
assert data == {
|
||||
"response": {
|
||||
"response_type": "action_done",
|
||||
"card": {},
|
||||
"speech": {
|
||||
"plain": {
|
||||
"extra_data": None,
|
||||
"speech": "Turned on light",
|
||||
}
|
||||
},
|
||||
"language": hass.config.language,
|
||||
"data": {
|
||||
"targets": [],
|
||||
"success": [
|
||||
{"id": "light.kitchen", "name": "kitchen light", "type": "entity"}
|
||||
],
|
||||
"failed": [],
|
||||
},
|
||||
},
|
||||
"conversation_id": None,
|
||||
}
|
||||
|
||||
client = await hass_client()
|
||||
resp = await client.post(
|
||||
"/api/conversation/process", json={"text": "turn on renamed light"}
|
||||
)
|
||||
|
||||
assert resp.status == HTTPStatus.OK
|
||||
data = await resp.json()
|
||||
assert data == {
|
||||
"conversation_id": None,
|
||||
"response": {
|
||||
"card": {},
|
||||
"data": {"code": "no_intent_match"},
|
||||
"language": hass.config.language,
|
||||
"response_type": "error",
|
||||
"speech": {
|
||||
"plain": {
|
||||
"extra_data": None,
|
||||
"speech": "Sorry, I couldn't understand that",
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize("agent_id", AGENT_ID_OPTIONS)
|
||||
@pytest.mark.parametrize("sentence", ("turn on kitchen", "turn kitchen on"))
|
||||
async def test_turn_on_intent(
|
||||
|
@ -31,7 +31,7 @@ async def test_form(hass: HomeAssistant, aioclient_mock_fixture) -> None:
|
||||
)
|
||||
|
||||
assert result2["type"] == "create_entry"
|
||||
assert result2["title"] == "Home"
|
||||
assert result2["title"] == TEST_USER_ID
|
||||
assert result2["data"] == {"username": TEST_USER_ID, "password": TEST_PASSWORD}
|
||||
await hass.async_block_till_done()
|
||||
assert len(mock_setup_entry.mock_calls) == 1
|
||||
|
@ -1238,3 +1238,60 @@ async def test_reader_in_progress_event(
|
||||
"location": event["location"],
|
||||
"description": event["description"],
|
||||
}
|
||||
|
||||
|
||||
async def test_all_day_event_without_duration(
|
||||
hass: HomeAssistant, mock_events_list_items, component_setup
|
||||
) -> None:
|
||||
"""Test that an all day event without a duration is adjusted to have a duration of one day."""
|
||||
week_from_today = dt_util.now().date() + datetime.timedelta(days=7)
|
||||
event = {
|
||||
**TEST_EVENT,
|
||||
"start": {"date": week_from_today.isoformat()},
|
||||
"end": {"date": week_from_today.isoformat()},
|
||||
}
|
||||
mock_events_list_items([event])
|
||||
|
||||
assert await component_setup()
|
||||
|
||||
expected_end_event = week_from_today + datetime.timedelta(days=1)
|
||||
|
||||
state = hass.states.get(TEST_ENTITY)
|
||||
assert state.name == TEST_ENTITY_NAME
|
||||
assert state.state == STATE_OFF
|
||||
assert dict(state.attributes) == {
|
||||
"friendly_name": TEST_ENTITY_NAME,
|
||||
"message": event["summary"],
|
||||
"all_day": True,
|
||||
"offset_reached": False,
|
||||
"start_time": week_from_today.strftime(DATE_STR_FORMAT),
|
||||
"end_time": expected_end_event.strftime(DATE_STR_FORMAT),
|
||||
"location": event["location"],
|
||||
"description": event["description"],
|
||||
"supported_features": 3,
|
||||
}
|
||||
|
||||
|
||||
async def test_event_without_duration(
|
||||
hass: HomeAssistant, mock_events_list_items, component_setup
|
||||
) -> None:
|
||||
"""Google calendar UI allows creating events without a duration."""
|
||||
one_hour_from_now = dt_util.now() + datetime.timedelta(minutes=30)
|
||||
event = {
|
||||
**TEST_EVENT,
|
||||
"start": {"dateTime": one_hour_from_now.isoformat()},
|
||||
"end": {"dateTime": one_hour_from_now.isoformat()},
|
||||
}
|
||||
mock_events_list_items([event])
|
||||
|
||||
assert await component_setup()
|
||||
|
||||
state = hass.states.get(TEST_ENTITY)
|
||||
assert state.name == TEST_ENTITY_NAME
|
||||
assert state.state == STATE_OFF
|
||||
# Confirm the event is parsed successfully, but we don't assert on the
|
||||
# specific end date as the client library may adjust it
|
||||
assert state.attributes.get("message") == event["summary"]
|
||||
assert state.attributes.get("start_time") == one_hour_from_now.strftime(
|
||||
DATE_STR_FORMAT
|
||||
)
|
||||
|
@ -81,6 +81,23 @@ YAML_CONFIG_FULL_TABLE_SCAN_NO_UNIQUE_ID = {
|
||||
}
|
||||
}
|
||||
|
||||
YAML_CONFIG_FULL_TABLE_SCAN_WITH_MULTIPLE_COLUMNS = {
|
||||
"sql": {
|
||||
CONF_NAME: "Get entity_id",
|
||||
CONF_QUERY: "SELECT entity_id,state_id from states",
|
||||
CONF_COLUMN_NAME: "entity_id",
|
||||
}
|
||||
}
|
||||
|
||||
YAML_CONFIG_WITH_VIEW_THAT_CONTAINS_ENTITY_ID = {
|
||||
"sql": {
|
||||
CONF_NAME: "Get entity_id",
|
||||
CONF_QUERY: "SELECT value from view_sensor_db_unique_entity_ids;",
|
||||
CONF_COLUMN_NAME: "value",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
YAML_CONFIG_BINARY = {
|
||||
"sql": {
|
||||
CONF_DB_URL: "sqlite://",
|
||||
|
@ -24,6 +24,8 @@ from . import (
|
||||
YAML_CONFIG_BINARY,
|
||||
YAML_CONFIG_FULL_TABLE_SCAN,
|
||||
YAML_CONFIG_FULL_TABLE_SCAN_NO_UNIQUE_ID,
|
||||
YAML_CONFIG_FULL_TABLE_SCAN_WITH_MULTIPLE_COLUMNS,
|
||||
YAML_CONFIG_WITH_VIEW_THAT_CONTAINS_ENTITY_ID,
|
||||
init_integration,
|
||||
)
|
||||
|
||||
@ -353,24 +355,105 @@ async def test_issue_when_using_old_query(
|
||||
assert issue.translation_placeholders == {"query": config[CONF_QUERY]}
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"yaml_config",
|
||||
[
|
||||
YAML_CONFIG_FULL_TABLE_SCAN_NO_UNIQUE_ID,
|
||||
YAML_CONFIG_FULL_TABLE_SCAN_WITH_MULTIPLE_COLUMNS,
|
||||
],
|
||||
)
|
||||
async def test_issue_when_using_old_query_without_unique_id(
|
||||
recorder_mock: Recorder, hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||
recorder_mock: Recorder,
|
||||
hass: HomeAssistant,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
yaml_config: dict[str, Any],
|
||||
) -> None:
|
||||
"""Test we create an issue for an old query that will do a full table scan."""
|
||||
|
||||
assert await async_setup_component(
|
||||
hass, DOMAIN, YAML_CONFIG_FULL_TABLE_SCAN_NO_UNIQUE_ID
|
||||
)
|
||||
assert await async_setup_component(hass, DOMAIN, yaml_config)
|
||||
await hass.async_block_till_done()
|
||||
assert "Query contains entity_id but does not reference states_meta" in caplog.text
|
||||
|
||||
assert not hass.states.async_all()
|
||||
issue_registry = ir.async_get(hass)
|
||||
|
||||
config = YAML_CONFIG_FULL_TABLE_SCAN_NO_UNIQUE_ID["sql"]
|
||||
config = yaml_config["sql"]
|
||||
query = config[CONF_QUERY]
|
||||
|
||||
issue = issue_registry.async_get_issue(
|
||||
DOMAIN, f"entity_id_query_does_full_table_scan_{query}"
|
||||
)
|
||||
assert issue.translation_placeholders == {"query": query}
|
||||
|
||||
|
||||
async def test_no_issue_when_view_has_the_text_entity_id_in_it(
|
||||
recorder_mock: Recorder, hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test we do not trigger the full table scan issue for a custom view."""
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.sql.sensor.scoped_session",
|
||||
):
|
||||
await init_integration(
|
||||
hass, YAML_CONFIG_WITH_VIEW_THAT_CONTAINS_ENTITY_ID["sql"]
|
||||
)
|
||||
async_fire_time_changed(
|
||||
hass,
|
||||
dt.utcnow() + timedelta(minutes=1),
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert (
|
||||
"Query contains entity_id but does not reference states_meta" not in caplog.text
|
||||
)
|
||||
assert hass.states.get("sensor.get_entity_id") is not None
|
||||
|
||||
|
||||
async def test_multiple_sensors_using_same_db(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test multiple sensors using the same db."""
|
||||
config = {
|
||||
"db_url": "sqlite:///",
|
||||
"query": "SELECT 5 as value",
|
||||
"column": "value",
|
||||
"name": "Select value SQL query",
|
||||
}
|
||||
config2 = {
|
||||
"db_url": "sqlite:///",
|
||||
"query": "SELECT 5 as value",
|
||||
"column": "value",
|
||||
"name": "Select value SQL query 2",
|
||||
}
|
||||
await init_integration(hass, config)
|
||||
await init_integration(hass, config2, entry_id="2")
|
||||
|
||||
state = hass.states.get("sensor.select_value_sql_query")
|
||||
assert state.state == "5"
|
||||
assert state.attributes["value"] == 5
|
||||
|
||||
state = hass.states.get("sensor.select_value_sql_query_2")
|
||||
assert state.state == "5"
|
||||
assert state.attributes["value"] == 5
|
||||
|
||||
|
||||
async def test_engine_is_disposed_at_stop(
|
||||
recorder_mock: Recorder, hass: HomeAssistant
|
||||
) -> None:
|
||||
"""Test we dispose of the engine at stop."""
|
||||
config = {
|
||||
"db_url": "sqlite:///",
|
||||
"query": "SELECT 5 as value",
|
||||
"column": "value",
|
||||
"name": "Select value SQL query",
|
||||
}
|
||||
await init_integration(hass, config)
|
||||
|
||||
state = hass.states.get("sensor.select_value_sql_query")
|
||||
assert state.state == "5"
|
||||
assert state.attributes["value"] == 5
|
||||
|
||||
with patch("sqlalchemy.engine.base.Engine.dispose") as mock_engine_dispose:
|
||||
await hass.async_stop()
|
||||
|
||||
assert mock_engine_dispose.call_count == 2
|
||||
|
@ -534,7 +534,143 @@ async def test_entity_name(
|
||||
assert entity_entry
|
||||
assert entity_entry.device_id == switch_entity_entry.device_id
|
||||
assert entity_entry.has_entity_name is True
|
||||
assert entity_entry.name is None
|
||||
assert entity_entry.original_name is None
|
||||
assert entity_entry.options == {
|
||||
DOMAIN: {"entity_id": switch_entity_entry.entity_id}
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize("target_domain", PLATFORMS_TO_TEST)
|
||||
async def test_custom_name_1(
|
||||
hass: HomeAssistant,
|
||||
target_domain: Platform,
|
||||
) -> None:
|
||||
"""Test the source entity has a custom name."""
|
||||
registry = er.async_get(hass)
|
||||
device_registry = dr.async_get(hass)
|
||||
|
||||
switch_config_entry = MockConfigEntry()
|
||||
|
||||
device_entry = device_registry.async_get_or_create(
|
||||
config_entry_id=switch_config_entry.entry_id,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
|
||||
name="Device name",
|
||||
)
|
||||
|
||||
switch_entity_entry = registry.async_get_or_create(
|
||||
"switch",
|
||||
"test",
|
||||
"unique",
|
||||
device_id=device_entry.id,
|
||||
has_entity_name=True,
|
||||
original_name="Original entity name",
|
||||
)
|
||||
switch_entity_entry = registry.async_update_entity(
|
||||
switch_entity_entry.entity_id,
|
||||
config_entry_id=switch_config_entry.entry_id,
|
||||
name="Custom entity name",
|
||||
)
|
||||
|
||||
# Add the config entry
|
||||
switch_as_x_config_entry = MockConfigEntry(
|
||||
data={},
|
||||
domain=DOMAIN,
|
||||
options={
|
||||
CONF_ENTITY_ID: switch_entity_entry.id,
|
||||
CONF_TARGET_DOMAIN: target_domain,
|
||||
},
|
||||
title="ABC",
|
||||
)
|
||||
switch_as_x_config_entry.add_to_hass(hass)
|
||||
|
||||
assert await hass.config_entries.async_setup(switch_as_x_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity_entry = registry.async_get(
|
||||
f"{target_domain}.device_name_original_entity_name"
|
||||
)
|
||||
assert entity_entry
|
||||
assert entity_entry.device_id == switch_entity_entry.device_id
|
||||
assert entity_entry.has_entity_name is True
|
||||
assert entity_entry.name == "Custom entity name"
|
||||
assert entity_entry.original_name == "Original entity name"
|
||||
assert entity_entry.options == {
|
||||
DOMAIN: {"entity_id": switch_entity_entry.entity_id}
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.parametrize("target_domain", PLATFORMS_TO_TEST)
|
||||
async def test_custom_name_2(
|
||||
hass: HomeAssistant,
|
||||
target_domain: Platform,
|
||||
) -> None:
|
||||
"""Test the source entity has a custom name.
|
||||
|
||||
This tests the custom name is only copied from the source device when the config
|
||||
switch_as_x config entry is setup the first time.
|
||||
"""
|
||||
registry = er.async_get(hass)
|
||||
device_registry = dr.async_get(hass)
|
||||
|
||||
switch_config_entry = MockConfigEntry()
|
||||
|
||||
device_entry = device_registry.async_get_or_create(
|
||||
config_entry_id=switch_config_entry.entry_id,
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
|
||||
name="Device name",
|
||||
)
|
||||
|
||||
switch_entity_entry = registry.async_get_or_create(
|
||||
"switch",
|
||||
"test",
|
||||
"unique",
|
||||
device_id=device_entry.id,
|
||||
has_entity_name=True,
|
||||
original_name="Original entity name",
|
||||
)
|
||||
switch_entity_entry = registry.async_update_entity(
|
||||
switch_entity_entry.entity_id,
|
||||
config_entry_id=switch_config_entry.entry_id,
|
||||
name="New custom entity name",
|
||||
)
|
||||
|
||||
# Add the config entry
|
||||
switch_as_x_config_entry = MockConfigEntry(
|
||||
data={},
|
||||
domain=DOMAIN,
|
||||
options={
|
||||
CONF_ENTITY_ID: switch_entity_entry.id,
|
||||
CONF_TARGET_DOMAIN: target_domain,
|
||||
},
|
||||
title="ABC",
|
||||
)
|
||||
switch_as_x_config_entry.add_to_hass(hass)
|
||||
|
||||
switch_as_x_entity_entry = registry.async_get_or_create(
|
||||
target_domain,
|
||||
"switch_as_x",
|
||||
switch_as_x_config_entry.entry_id,
|
||||
suggested_object_id="device_name_original_entity_name",
|
||||
)
|
||||
switch_as_x_entity_entry = registry.async_update_entity(
|
||||
switch_as_x_entity_entry.entity_id,
|
||||
config_entry_id=switch_config_entry.entry_id,
|
||||
name="Old custom entity name",
|
||||
)
|
||||
|
||||
assert await hass.config_entries.async_setup(switch_as_x_config_entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity_entry = registry.async_get(
|
||||
f"{target_domain}.device_name_original_entity_name"
|
||||
)
|
||||
assert entity_entry
|
||||
assert entity_entry.entity_id == switch_as_x_entity_entry.entity_id
|
||||
assert entity_entry.device_id == switch_entity_entry.device_id
|
||||
assert entity_entry.has_entity_name is True
|
||||
assert entity_entry.name == "Old custom entity name"
|
||||
assert entity_entry.original_name == "Original entity name"
|
||||
assert entity_entry.options == {
|
||||
DOMAIN: {"entity_id": switch_entity_entry.entity_id}
|
||||
}
|
||||
|
@ -217,20 +217,19 @@ async def test_state(hass: HomeAssistant, yaml_config, config_entry_config) -> N
|
||||
entity_id, "*", {ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR}
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
state = hass.states.get("sensor.energy_bill_midpeak")
|
||||
state = hass.states.get("sensor.energy_bill_offpeak")
|
||||
assert state is not None
|
||||
assert state.state == "0.123"
|
||||
assert state.state == "3"
|
||||
|
||||
# test unavailable source
|
||||
hass.states.async_set(
|
||||
entity_id,
|
||||
STATE_UNAVAILABLE,
|
||||
{ATTR_UNIT_OF_MEASUREMENT: UnitOfEnergy.KILO_WATT_HOUR},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
state = hass.states.get("sensor.energy_bill_midpeak")
|
||||
state = hass.states.get("sensor.energy_bill_offpeak")
|
||||
assert state is not None
|
||||
assert state.state == "0.123"
|
||||
assert state.state == "unavailable"
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
Loading…
x
Reference in New Issue
Block a user