This commit is contained in:
Paulus Schoutsen 2023-02-07 20:21:44 -05:00 committed by GitHub
commit 9962e9b67e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
48 changed files with 602 additions and 298 deletions

View File

@ -3,7 +3,7 @@
"name": "Abode", "name": "Abode",
"config_flow": true, "config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/abode", "documentation": "https://www.home-assistant.io/integrations/abode",
"requirements": ["jaraco.abode==3.2.1"], "requirements": ["jaraco.abode==3.3.0"],
"codeowners": ["@shred86"], "codeowners": ["@shred86"],
"homekit": { "homekit": {
"models": ["Abode", "Iota"] "models": ["Abode", "Iota"]

View File

@ -9,7 +9,7 @@
"connectable": false "connectable": false
} }
], ],
"requirements": ["bluemaestro-ble==0.2.1"], "requirements": ["bluemaestro-ble==0.2.3"],
"dependencies": ["bluetooth_adapters"], "dependencies": ["bluetooth_adapters"],
"codeowners": ["@bdraco"], "codeowners": ["@bdraco"],
"iot_class": "local_push" "iot_class": "local_push"

View File

@ -2,7 +2,7 @@
"domain": "bmw_connected_drive", "domain": "bmw_connected_drive",
"name": "BMW Connected Drive", "name": "BMW Connected Drive",
"documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive", "documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive",
"requirements": ["bimmer_connected==0.12.0"], "requirements": ["bimmer_connected==0.12.1"],
"codeowners": ["@gerard33", "@rikroe"], "codeowners": ["@gerard33", "@rikroe"],
"config_flow": true, "config_flow": true,
"iot_class": "cloud_polling", "iot_class": "cloud_polling",

View File

@ -634,77 +634,87 @@ def get_entity_state_dict(config: Config, entity: State) -> dict[str, Any]:
# Remove the now stale cached entry. # Remove the now stale cached entry.
config.cached_states.pop(entity.entity_id) config.cached_states.pop(entity.entity_id)
if cached_state is None:
return _build_entity_state_dict(entity)
data: dict[str, Any] = cached_state
# Make sure brightness is valid
if data[STATE_BRIGHTNESS] is None:
data[STATE_BRIGHTNESS] = HUE_API_STATE_BRI_MAX if data[STATE_ON] else 0
# Make sure hue/saturation are valid
if (data[STATE_HUE] is None) or (data[STATE_SATURATION] is None):
data[STATE_HUE] = 0
data[STATE_SATURATION] = 0
# If the light is off, set the color to off
if data[STATE_BRIGHTNESS] == 0:
data[STATE_HUE] = 0
data[STATE_SATURATION] = 0
_clamp_values(data)
return data
@lru_cache(maxsize=512)
def _build_entity_state_dict(entity: State) -> dict[str, Any]:
"""Build a state dict for an entity."""
data: dict[str, Any] = { data: dict[str, Any] = {
STATE_ON: False, STATE_ON: entity.state != STATE_OFF,
STATE_BRIGHTNESS: None, STATE_BRIGHTNESS: None,
STATE_HUE: None, STATE_HUE: None,
STATE_SATURATION: None, STATE_SATURATION: None,
STATE_COLOR_TEMP: None, STATE_COLOR_TEMP: None,
} }
if data[STATE_ON]:
if cached_state is None: data[STATE_BRIGHTNESS] = hass_to_hue_brightness(
data[STATE_ON] = entity.state != STATE_OFF entity.attributes.get(ATTR_BRIGHTNESS, 0)
)
if data[STATE_ON]: hue_sat = entity.attributes.get(ATTR_HS_COLOR)
data[STATE_BRIGHTNESS] = hass_to_hue_brightness( if hue_sat is not None:
entity.attributes.get(ATTR_BRIGHTNESS, 0) hue = hue_sat[0]
) sat = hue_sat[1]
hue_sat = entity.attributes.get(ATTR_HS_COLOR) # Convert hass hs values back to hue hs values
if hue_sat is not None: data[STATE_HUE] = int((hue / 360.0) * HUE_API_STATE_HUE_MAX)
hue = hue_sat[0] data[STATE_SATURATION] = int((sat / 100.0) * HUE_API_STATE_SAT_MAX)
sat = hue_sat[1]
# Convert hass hs values back to hue hs values
data[STATE_HUE] = int((hue / 360.0) * HUE_API_STATE_HUE_MAX)
data[STATE_SATURATION] = int((sat / 100.0) * HUE_API_STATE_SAT_MAX)
else:
data[STATE_HUE] = HUE_API_STATE_HUE_MIN
data[STATE_SATURATION] = HUE_API_STATE_SAT_MIN
data[STATE_COLOR_TEMP] = entity.attributes.get(ATTR_COLOR_TEMP, 0)
else: else:
data[STATE_BRIGHTNESS] = 0 data[STATE_HUE] = HUE_API_STATE_HUE_MIN
data[STATE_HUE] = 0 data[STATE_SATURATION] = HUE_API_STATE_SAT_MIN
data[STATE_SATURATION] = 0 data[STATE_COLOR_TEMP] = entity.attributes.get(ATTR_COLOR_TEMP, 0)
data[STATE_COLOR_TEMP] = 0
if entity.domain == climate.DOMAIN:
temperature = entity.attributes.get(ATTR_TEMPERATURE, 0)
# Convert 0-100 to 0-254
data[STATE_BRIGHTNESS] = round(temperature * HUE_API_STATE_BRI_MAX / 100)
elif entity.domain == humidifier.DOMAIN:
humidity = entity.attributes.get(ATTR_HUMIDITY, 0)
# Convert 0-100 to 0-254
data[STATE_BRIGHTNESS] = round(humidity * HUE_API_STATE_BRI_MAX / 100)
elif entity.domain == media_player.DOMAIN:
level = entity.attributes.get(
ATTR_MEDIA_VOLUME_LEVEL, 1.0 if data[STATE_ON] else 0.0
)
# Convert 0.0-1.0 to 0-254
data[STATE_BRIGHTNESS] = round(min(1.0, level) * HUE_API_STATE_BRI_MAX)
elif entity.domain == fan.DOMAIN:
percentage = entity.attributes.get(ATTR_PERCENTAGE) or 0
# Convert 0-100 to 0-254
data[STATE_BRIGHTNESS] = round(percentage * HUE_API_STATE_BRI_MAX / 100)
elif entity.domain == cover.DOMAIN:
level = entity.attributes.get(ATTR_CURRENT_POSITION, 0)
data[STATE_BRIGHTNESS] = round(level / 100 * HUE_API_STATE_BRI_MAX)
else: else:
data = cached_state data[STATE_BRIGHTNESS] = 0
# Make sure brightness is valid data[STATE_HUE] = 0
if data[STATE_BRIGHTNESS] is None: data[STATE_SATURATION] = 0
data[STATE_BRIGHTNESS] = HUE_API_STATE_BRI_MAX if data[STATE_ON] else 0 data[STATE_COLOR_TEMP] = 0
# Make sure hue/saturation are valid if entity.domain == climate.DOMAIN:
if (data[STATE_HUE] is None) or (data[STATE_SATURATION] is None): temperature = entity.attributes.get(ATTR_TEMPERATURE, 0)
data[STATE_HUE] = 0 # Convert 0-100 to 0-254
data[STATE_SATURATION] = 0 data[STATE_BRIGHTNESS] = round(temperature * HUE_API_STATE_BRI_MAX / 100)
elif entity.domain == humidifier.DOMAIN:
humidity = entity.attributes.get(ATTR_HUMIDITY, 0)
# Convert 0-100 to 0-254
data[STATE_BRIGHTNESS] = round(humidity * HUE_API_STATE_BRI_MAX / 100)
elif entity.domain == media_player.DOMAIN:
level = entity.attributes.get(
ATTR_MEDIA_VOLUME_LEVEL, 1.0 if data[STATE_ON] else 0.0
)
# Convert 0.0-1.0 to 0-254
data[STATE_BRIGHTNESS] = round(min(1.0, level) * HUE_API_STATE_BRI_MAX)
elif entity.domain == fan.DOMAIN:
percentage = entity.attributes.get(ATTR_PERCENTAGE) or 0
# Convert 0-100 to 0-254
data[STATE_BRIGHTNESS] = round(percentage * HUE_API_STATE_BRI_MAX / 100)
elif entity.domain == cover.DOMAIN:
level = entity.attributes.get(ATTR_CURRENT_POSITION, 0)
data[STATE_BRIGHTNESS] = round(level / 100 * HUE_API_STATE_BRI_MAX)
_clamp_values(data)
return data
# If the light is off, set the color to off
if data[STATE_BRIGHTNESS] == 0:
data[STATE_HUE] = 0
data[STATE_SATURATION] = 0
# Clamp brightness, hue, saturation, and color temp to valid values def _clamp_values(data: dict[str, Any]) -> None:
"""Clamp brightness, hue, saturation, and color temp to valid values."""
for key, v_min, v_max in ( for key, v_min, v_max in (
(STATE_BRIGHTNESS, HUE_API_STATE_BRI_MIN, HUE_API_STATE_BRI_MAX), (STATE_BRIGHTNESS, HUE_API_STATE_BRI_MIN, HUE_API_STATE_BRI_MAX),
(STATE_HUE, HUE_API_STATE_HUE_MIN, HUE_API_STATE_HUE_MAX), (STATE_HUE, HUE_API_STATE_HUE_MIN, HUE_API_STATE_HUE_MAX),
@ -714,8 +724,6 @@ def get_entity_state_dict(config: Config, entity: State) -> dict[str, Any]:
if data[key] is not None: if data[key] is not None:
data[key] = max(v_min, min(data[key], v_max)) data[key] = max(v_min, min(data[key], v_max))
return data
@lru_cache(maxsize=1024) @lru_cache(maxsize=1024)
def _entity_unique_id(entity_id: str) -> str: def _entity_unique_id(entity_id: str) -> str:
@ -831,6 +839,7 @@ def create_hue_success_response(
def create_config_model(config: Config, request: web.Request) -> dict[str, Any]: def create_config_model(config: Config, request: web.Request) -> dict[str, Any]:
"""Create a config resource.""" """Create a config resource."""
return { return {
"name": "HASS BRIDGE",
"mac": "00:00:00:00:00:00", "mac": "00:00:00:00:00:00",
"swversion": "01003542", "swversion": "01003542",
"apiversion": "1.17.0", "apiversion": "1.17.0",
@ -842,10 +851,18 @@ def create_config_model(config: Config, request: web.Request) -> dict[str, Any]:
def create_list_of_entities(config: Config, request: web.Request) -> dict[str, Any]: def create_list_of_entities(config: Config, request: web.Request) -> dict[str, Any]:
"""Create a list of all entities.""" """Create a list of all entities."""
json_response: dict[str, Any] = { hass: core.HomeAssistant = request.app["hass"]
config.entity_id_to_number(state.entity_id): state_to_json(config, state)
for state in config.get_exposed_states() json_response: dict[str, Any] = {}
} for cached_state in config.get_exposed_states():
entity_id = cached_state.entity_id
state = hass.states.get(entity_id)
assert state is not None
json_response[config.entity_id_to_number(entity_id)] = state_to_json(
config, state
)
return json_response return json_response

View File

@ -2,7 +2,7 @@
"domain": "environment_canada", "domain": "environment_canada",
"name": "Environment Canada", "name": "Environment Canada",
"documentation": "https://www.home-assistant.io/integrations/environment_canada", "documentation": "https://www.home-assistant.io/integrations/environment_canada",
"requirements": ["env_canada==0.5.27"], "requirements": ["env_canada==0.5.28"],
"codeowners": ["@gwww", "@michaeldavie"], "codeowners": ["@gwww", "@michaeldavie"],
"config_flow": true, "config_flow": true,
"iot_class": "cloud_polling", "iot_class": "cloud_polling",

View File

@ -10,7 +10,7 @@
{ "local_name": "xBBQ*", "connectable": false }, { "local_name": "xBBQ*", "connectable": false },
{ "local_name": "tps", "connectable": false } { "local_name": "tps", "connectable": false }
], ],
"requirements": ["inkbird-ble==0.5.5"], "requirements": ["inkbird-ble==0.5.6"],
"dependencies": ["bluetooth_adapters"], "dependencies": ["bluetooth_adapters"],
"codeowners": ["@bdraco"], "codeowners": ["@bdraco"],
"iot_class": "local_push" "iot_class": "local_push"

View File

@ -7,6 +7,7 @@ from urllib.parse import urlparse
from aiohttp import CookieJar from aiohttp import CookieJar
import async_timeout import async_timeout
from pyisy import ISY, ISYConnectionError, ISYInvalidAuthError, ISYResponseParseError from pyisy import ISY, ISYConnectionError, ISYInvalidAuthError, ISYResponseParseError
from pyisy.constants import CONFIG_NETWORKING, CONFIG_PORTAL
import voluptuous as vol import voluptuous as vol
from homeassistant import config_entries from homeassistant import config_entries
@ -43,7 +44,6 @@ from .const import (
ISY_CONF_FIRMWARE, ISY_CONF_FIRMWARE,
ISY_CONF_MODEL, ISY_CONF_MODEL,
ISY_CONF_NAME, ISY_CONF_NAME,
ISY_CONF_NETWORKING,
MANUFACTURER, MANUFACTURER,
PLATFORMS, PLATFORMS,
SCHEME_HTTP, SCHEME_HTTP,
@ -220,9 +220,11 @@ async def async_setup_entry(
numbers = isy_data.variables[Platform.NUMBER] numbers = isy_data.variables[Platform.NUMBER]
for vtype, _, vid in isy.variables.children: for vtype, _, vid in isy.variables.children:
numbers.append(isy.variables[vtype][vid]) numbers.append(isy.variables[vtype][vid])
if isy.conf[ISY_CONF_NETWORKING]: if (
isy.conf[CONFIG_NETWORKING] or isy.conf[CONFIG_PORTAL]
) and isy.networking.nobjs:
isy_data.devices[CONF_NETWORK] = _create_service_device_info( isy_data.devices[CONF_NETWORK] = _create_service_device_info(
isy, name=ISY_CONF_NETWORKING, unique_id=CONF_NETWORK isy, name=CONFIG_NETWORKING, unique_id=CONF_NETWORK
) )
for resource in isy.networking.nobjs: for resource in isy.networking.nobjs:
isy_data.net_resources.append(resource) isy_data.net_resources.append(resource)

View File

@ -118,7 +118,6 @@ SUPPORTED_BIN_SENS_CLASSES = ["moisture", "opening", "motion", "climate"]
# (they can turn off, and report their state) # (they can turn off, and report their state)
ISY_GROUP_PLATFORM = Platform.SWITCH ISY_GROUP_PLATFORM = Platform.SWITCH
ISY_CONF_NETWORKING = "Networking Module"
ISY_CONF_UUID = "uuid" ISY_CONF_UUID = "uuid"
ISY_CONF_NAME = "name" ISY_CONF_NAME = "name"
ISY_CONF_MODEL = "model" ISY_CONF_MODEL = "model"

View File

@ -3,7 +3,7 @@
"name": "Universal Devices ISY/IoX", "name": "Universal Devices ISY/IoX",
"integration_type": "hub", "integration_type": "hub",
"documentation": "https://www.home-assistant.io/integrations/isy994", "documentation": "https://www.home-assistant.io/integrations/isy994",
"requirements": ["pyisy==3.1.11"], "requirements": ["pyisy==3.1.13"],
"codeowners": ["@bdraco", "@shbatm"], "codeowners": ["@bdraco", "@shbatm"],
"config_flow": true, "config_flow": true,
"ssdp": [ "ssdp": [

View File

@ -23,7 +23,7 @@ import homeassistant.helpers.entity_registry as er
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
from homeassistant.helpers.service import entity_service_call from homeassistant.helpers.service import entity_service_call
from .const import _LOGGER, CONF_NETWORK, DOMAIN, ISY_CONF_NAME, ISY_CONF_NETWORKING from .const import _LOGGER, CONF_NETWORK, DOMAIN, ISY_CONF_NAME
from .util import _async_cleanup_registry_entries from .util import _async_cleanup_registry_entries
# Common Services for All Platforms: # Common Services for All Platforms:
@ -233,7 +233,7 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
isy = isy_data.root isy = isy_data.root
if isy_name and isy_name != isy.conf[ISY_CONF_NAME]: if isy_name and isy_name != isy.conf[ISY_CONF_NAME]:
continue continue
if isy.networking is None or not isy.conf[ISY_CONF_NETWORKING]: if isy.networking is None:
continue continue
command = None command = None
if address: if address:

View File

@ -6,7 +6,19 @@
"requirements": ["bluetooth-data-tools==0.3.1", "ld2410-ble==0.1.1"], "requirements": ["bluetooth-data-tools==0.3.1", "ld2410-ble==0.1.1"],
"dependencies": ["bluetooth_adapters"], "dependencies": ["bluetooth_adapters"],
"codeowners": ["@930913"], "codeowners": ["@930913"],
"bluetooth": [{ "local_name": "HLK-LD2410B_*" }], "bluetooth": [
{
"local_name": "HLK-LD2410B_*"
},
{
"local_name": "HLK-LD2410_*"
},
{
"manufacturer_id": 256,
"manufacturer_data_start": [7, 1],
"service_uuid": "0000af30-0000-1000-8000-00805f9b34fb"
}
],
"integration_type": "device", "integration_type": "device",
"iot_class": "local_push" "iot_class": "local_push"
} }

View File

@ -2,7 +2,7 @@
"domain": "lupusec", "domain": "lupusec",
"name": "Lupus Electronics LUPUSEC", "name": "Lupus Electronics LUPUSEC",
"documentation": "https://www.home-assistant.io/integrations/lupusec", "documentation": "https://www.home-assistant.io/integrations/lupusec",
"requirements": ["lupupy==0.2.5"], "requirements": ["lupupy==0.2.7"],
"codeowners": ["@majuss"], "codeowners": ["@majuss"],
"iot_class": "local_polling", "iot_class": "local_polling",
"loggers": ["lupupy"] "loggers": ["lupupy"]

View File

@ -32,7 +32,7 @@ from .addon import get_addon_manager
from .api import async_register_api from .api import async_register_api
from .const import CONF_INTEGRATION_CREATED_ADDON, CONF_USE_ADDON, DOMAIN, LOGGER from .const import CONF_INTEGRATION_CREATED_ADDON, CONF_USE_ADDON, DOMAIN, LOGGER
from .device_platform import DEVICE_PLATFORM from .device_platform import DEVICE_PLATFORM
from .helpers import MatterEntryData, get_matter from .helpers import MatterEntryData, get_matter, get_node_from_device_entry
CONNECT_TIMEOUT = 10 CONNECT_TIMEOUT = 10
LISTEN_READY_TIMEOUT = 30 LISTEN_READY_TIMEOUT = 30
@ -192,23 +192,13 @@ async def async_remove_config_entry_device(
hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry
) -> bool: ) -> bool:
"""Remove a config entry from a device.""" """Remove a config entry from a device."""
unique_id = None node = await get_node_from_device_entry(hass, device_entry)
for ident in device_entry.identifiers: if node is None:
if ident[0] == DOMAIN:
unique_id = ident[1]
break
if not unique_id:
return True return True
matter_entry_data: MatterEntryData = hass.data[DOMAIN][config_entry.entry_id] matter = get_matter(hass)
matter_client = matter_entry_data.adapter.matter_client await matter.matter_client.remove_node(node.node_id)
for node in await matter_client.get_nodes():
if node.unique_id == unique_id:
await matter_client.remove_node(node.node_id)
break
return True return True

View File

@ -11,8 +11,7 @@ from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr from homeassistant.helpers import device_registry as dr
from .const import DOMAIN, ID_TYPE_DEVICE_ID from .helpers import get_matter, get_node_from_device_entry
from .helpers import get_device_id, get_matter
ATTRIBUTES_TO_REDACT = {"chip.clusters.Objects.BasicInformation.Attributes.Location"} ATTRIBUTES_TO_REDACT = {"chip.clusters.Objects.BasicInformation.Attributes.Location"}
@ -53,28 +52,14 @@ async def async_get_device_diagnostics(
) -> dict[str, Any]: ) -> dict[str, Any]:
"""Return diagnostics for a device.""" """Return diagnostics for a device."""
matter = get_matter(hass) matter = get_matter(hass)
device_id_type_prefix = f"{ID_TYPE_DEVICE_ID}_"
device_id_full = next(
identifier[1]
for identifier in device.identifiers
if identifier[0] == DOMAIN and identifier[1].startswith(device_id_type_prefix)
)
device_id = device_id_full.lstrip(device_id_type_prefix)
server_diagnostics = await matter.matter_client.get_diagnostics() server_diagnostics = await matter.matter_client.get_diagnostics()
node = await get_node_from_device_entry(hass, device)
node = next(
node
for node in await matter.matter_client.get_nodes()
for node_device in node.node_devices
if get_device_id(server_diagnostics.info, node_device) == device_id
)
return { return {
"server_info": remove_serialization_type( "server_info": remove_serialization_type(
dataclass_to_dict(server_diagnostics.info) dataclass_to_dict(server_diagnostics.info)
), ),
"node": redact_matter_attributes( "node": redact_matter_attributes(
remove_serialization_type(dataclass_to_dict(node)) remove_serialization_type(dataclass_to_dict(node) if node else {})
), ),
} }

View File

@ -6,8 +6,9 @@ from dataclasses import dataclass
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from homeassistant.core import HomeAssistant, callback from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import device_registry as dr
from .const import DOMAIN from .const import DOMAIN, ID_TYPE_DEVICE_ID
if TYPE_CHECKING: if TYPE_CHECKING:
from matter_server.common.models.node import MatterNode from matter_server.common.models.node import MatterNode
@ -58,3 +59,42 @@ def get_device_id(
# Append nodedevice(type) to differentiate between a root node # Append nodedevice(type) to differentiate between a root node
# and bridge within Home Assistant devices. # and bridge within Home Assistant devices.
return f"{operational_instance_id}-{node_device.__class__.__name__}" return f"{operational_instance_id}-{node_device.__class__.__name__}"
async def get_node_from_device_entry(
hass: HomeAssistant, device: dr.DeviceEntry
) -> MatterNode | None:
"""Return MatterNode from device entry."""
matter = get_matter(hass)
device_id_type_prefix = f"{ID_TYPE_DEVICE_ID}_"
device_id_full = next(
(
identifier[1]
for identifier in device.identifiers
if identifier[0] == DOMAIN
and identifier[1].startswith(device_id_type_prefix)
),
None,
)
if device_id_full is None:
raise ValueError(f"Device {device.id} is not a Matter device")
device_id = device_id_full.lstrip(device_id_type_prefix)
matter_client = matter.matter_client
server_info = matter_client.server_info
if server_info is None:
raise RuntimeError("Matter server information is not available")
node = next(
(
node
for node in await matter_client.get_nodes()
for node_device in node.node_devices
if get_device_id(server_info, node_device) == device_id
),
None,
)
return node

View File

@ -9,7 +9,7 @@ An overview of the areas and the devices in this smart home:
{%- for area in areas %} {%- for area in areas %}
{%- set area_info = namespace(printed=false) %} {%- set area_info = namespace(printed=false) %}
{%- for device in area_devices(area.name) -%} {%- for device in area_devices(area.name) -%}
{%- if not device_attr(device, "disabled_by") and not device_attr(device, "entry_type") %} {%- if not device_attr(device, "disabled_by") and not device_attr(device, "entry_type") and device_attr(device, "name") %}
{%- if not area_info.printed %} {%- if not area_info.printed %}
{{ area.name }}: {{ area.name }}:

View File

@ -8,7 +8,7 @@
"manufacturer_id": 220 "manufacturer_id": 220
} }
], ],
"requirements": ["oralb-ble==0.17.2"], "requirements": ["oralb-ble==0.17.4"],
"dependencies": ["bluetooth_adapters"], "dependencies": ["bluetooth_adapters"],
"codeowners": ["@bdraco", "@Lash-L"], "codeowners": ["@bdraco", "@Lash-L"],
"iot_class": "local_push" "iot_class": "local_push"

View File

@ -2,7 +2,6 @@
from __future__ import annotations from __future__ import annotations
import asyncio
from dataclasses import dataclass from dataclasses import dataclass
import datetime import datetime
import logging import logging
@ -84,27 +83,18 @@ class RainbirdUpdateCoordinator(DataUpdateCoordinator[RainbirdDeviceState]):
raise UpdateFailed(f"Error communicating with Device: {err}") from err raise UpdateFailed(f"Error communicating with Device: {err}") from err
async def _fetch_data(self) -> RainbirdDeviceState: async def _fetch_data(self) -> RainbirdDeviceState:
"""Fetch data from the Rain Bird device.""" """Fetch data from the Rain Bird device.
(zones, states, rain, rain_delay) = await asyncio.gather(
self._fetch_zones(), Rainbird devices can only reliably handle a single request at a time,
self._controller.get_zone_states(), so the requests are sent serially.
self._controller.get_rain_sensor_state(), """
self._controller.get_rain_delay(), available_stations = await self._controller.get_available_stations()
) states = await self._controller.get_zone_states()
rain = await self._controller.get_rain_sensor_state()
rain_delay = await self._controller.get_rain_delay()
return RainbirdDeviceState( return RainbirdDeviceState(
zones=set(zones), zones=available_stations.active_set,
active_zones={zone for zone in zones if states.active(zone)}, active_zones=states.active_set,
rain=rain, rain=rain,
rain_delay=rain_delay, rain_delay=rain_delay,
) )
async def _fetch_zones(self) -> set[int]:
"""Fetch the zones from the device, caching the results."""
if self._zones is None:
available_stations = await self._controller.get_available_stations()
self._zones = {
zone
for zone in range(1, available_stations.stations.count + 1)
if available_stations.stations.active(zone)
}
return self._zones

View File

@ -3,7 +3,7 @@
"name": "Rain Bird", "name": "Rain Bird",
"config_flow": true, "config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/rainbird", "documentation": "https://www.home-assistant.io/integrations/rainbird",
"requirements": ["pyrainbird==1.1.0"], "requirements": ["pyrainbird==2.0.0"],
"codeowners": ["@konikvranik", "@allenporter"], "codeowners": ["@konikvranik", "@allenporter"],
"iot_class": "local_polling", "iot_class": "local_polling",
"loggers": ["pyrainbird"] "loggers": ["pyrainbird"]

View File

@ -14,7 +14,6 @@ import time
from typing import Any, TypeVar, cast from typing import Any, TypeVar, cast
import async_timeout import async_timeout
from awesomeversion import AwesomeVersion
from lru import LRU # pylint: disable=no-name-in-module from lru import LRU # pylint: disable=no-name-in-module
from sqlalchemy import create_engine, event as sqlalchemy_event, exc, func, select from sqlalchemy import create_engine, event as sqlalchemy_event, exc, func, select
from sqlalchemy.engine import Engine from sqlalchemy.engine import Engine
@ -67,6 +66,7 @@ from .db_schema import (
) )
from .executor import DBInterruptibleThreadPoolExecutor from .executor import DBInterruptibleThreadPoolExecutor
from .models import ( from .models import (
DatabaseEngine,
StatisticData, StatisticData,
StatisticMetaData, StatisticMetaData,
UnsupportedDialect, UnsupportedDialect,
@ -173,7 +173,7 @@ class Recorder(threading.Thread):
self.db_url = uri self.db_url = uri
self.db_max_retries = db_max_retries self.db_max_retries = db_max_retries
self.db_retry_wait = db_retry_wait self.db_retry_wait = db_retry_wait
self.engine_version: AwesomeVersion | None = None self.database_engine: DatabaseEngine | None = None
# Database connection is ready, but non-live migration may be in progress # Database connection is ready, but non-live migration may be in progress
db_connected: asyncio.Future[bool] = hass.data[DOMAIN].db_connected db_connected: asyncio.Future[bool] = hass.data[DOMAIN].db_connected
self.async_db_connected: asyncio.Future[bool] = db_connected self.async_db_connected: asyncio.Future[bool] = db_connected
@ -1125,13 +1125,13 @@ class Recorder(threading.Thread):
) -> None: ) -> None:
"""Dbapi specific connection settings.""" """Dbapi specific connection settings."""
assert self.engine is not None assert self.engine is not None
if version := setup_connection_for_dialect( if database_engine := setup_connection_for_dialect(
self, self,
self.engine.dialect.name, self.engine.dialect.name,
dbapi_connection, dbapi_connection,
not self._completed_first_database_setup, not self._completed_first_database_setup,
): ):
self.engine_version = version self.database_engine = database_engine
self._completed_first_database_setup = True self._completed_first_database_setup = True
if self.db_url == SQLITE_URL_PREFIX or ":memory:" in self.db_url: if self.db_url == SQLITE_URL_PREFIX or ":memory:" in self.db_url:

View File

@ -519,48 +519,52 @@ def state_changes_during_period(
def _get_last_state_changes_stmt( def _get_last_state_changes_stmt(
schema_version: int, number_of_states: int, entity_id: str | None schema_version: int, number_of_states: int, entity_id: str
) -> StatementLambdaElement: ) -> StatementLambdaElement:
stmt, join_attributes = lambda_stmt_and_join_attributes( stmt, join_attributes = lambda_stmt_and_join_attributes(
schema_version, False, include_last_changed=False schema_version, False, include_last_changed=False
) )
if schema_version >= 31: if schema_version >= 31:
stmt += lambda q: q.filter( stmt += lambda q: q.where(
(States.last_changed_ts == States.last_updated_ts) States.state_id
| States.last_changed_ts.is_(None) == (
select(States.state_id)
.filter(States.entity_id == entity_id)
.order_by(States.last_updated_ts.desc())
.limit(number_of_states)
.subquery()
).c.state_id
) )
else: else:
stmt += lambda q: q.filter( stmt += lambda q: q.where(
(States.last_changed == States.last_updated) | States.last_changed.is_(None) States.state_id
== (
select(States.state_id)
.filter(States.entity_id == entity_id)
.order_by(States.last_updated.desc())
.limit(number_of_states)
.subquery()
).c.state_id
) )
if entity_id:
stmt += lambda q: q.filter(States.entity_id == entity_id)
if join_attributes: if join_attributes:
stmt += lambda q: q.outerjoin( stmt += lambda q: q.outerjoin(
StateAttributes, States.attributes_id == StateAttributes.attributes_id StateAttributes, States.attributes_id == StateAttributes.attributes_id
) )
if schema_version >= 31:
stmt += lambda q: q.order_by( stmt += lambda q: q.order_by(States.state_id.desc())
States.entity_id, States.last_updated_ts.desc()
).limit(number_of_states)
else:
stmt += lambda q: q.order_by(
States.entity_id, States.last_updated.desc()
).limit(number_of_states)
return stmt return stmt
def get_last_state_changes( def get_last_state_changes(
hass: HomeAssistant, number_of_states: int, entity_id: str | None hass: HomeAssistant, number_of_states: int, entity_id: str
) -> MutableMapping[str, list[State]]: ) -> MutableMapping[str, list[State]]:
"""Return the last number_of_states.""" """Return the last number_of_states."""
start_time = dt_util.utcnow() entity_id_lower = entity_id.lower()
entity_id = entity_id.lower() if entity_id is not None else None entity_ids = [entity_id_lower]
entity_ids = [entity_id] if entity_id is not None else None
with session_scope(hass=hass) as session: with session_scope(hass=hass) as session:
stmt = _get_last_state_changes_stmt( stmt = _get_last_state_changes_stmt(
_schema_version(hass), number_of_states, entity_id _schema_version(hass), number_of_states, entity_id_lower
) )
states = list(execute_stmt_lambda_element(session, stmt)) states = list(execute_stmt_lambda_element(session, stmt))
return cast( return cast(
@ -569,7 +573,7 @@ def get_last_state_changes(
hass, hass,
session, session,
reversed(states), reversed(states),
start_time, dt_util.utcnow(),
entity_ids, entity_ids,
include_start_time_state=False, include_start_time_state=False,
), ),

View File

@ -1,10 +1,12 @@
"""Models for Recorder.""" """Models for Recorder."""
from __future__ import annotations from __future__ import annotations
from dataclasses import dataclass
from datetime import datetime, timedelta from datetime import datetime, timedelta
import logging import logging
from typing import Any, Literal, TypedDict, overload from typing import Any, Literal, TypedDict, overload
from awesomeversion import AwesomeVersion
from sqlalchemy.engine.row import Row from sqlalchemy.engine.row import Row
from homeassistant.const import ( from homeassistant.const import (
@ -17,6 +19,8 @@ from homeassistant.core import Context, State
from homeassistant.helpers.json import json_loads from homeassistant.helpers.json import json_loads
import homeassistant.util.dt as dt_util import homeassistant.util.dt as dt_util
from .const import SupportedDialect
# pylint: disable=invalid-name # pylint: disable=invalid-name
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -443,3 +447,27 @@ class StatisticPeriod(TypedDict, total=False):
calendar: CalendarStatisticPeriod calendar: CalendarStatisticPeriod
fixed_period: FixedStatisticPeriod fixed_period: FixedStatisticPeriod
rolling_window: RollingWindowStatisticPeriod rolling_window: RollingWindowStatisticPeriod
@dataclass
class DatabaseEngine:
"""Properties of the database engine."""
dialect: SupportedDialect
optimizer: DatabaseOptimizer
version: AwesomeVersion | None
@dataclass
class DatabaseOptimizer:
"""Properties of the database optimizer for the configured database engine."""
# Some MariaDB versions have a bug that causes a slow query when using
# a range in a select statement with an IN clause.
#
# https://jira.mariadb.org/browse/MDEV-25020
#
# Historically, we have applied this logic to PostgreSQL as well, but
# it may not be necessary. We should revisit this in the future
# when we have more data.
slow_range_in_select: bool

View File

@ -14,13 +14,14 @@ from sqlalchemy.sql.expression import distinct
from homeassistant.const import EVENT_STATE_CHANGED from homeassistant.const import EVENT_STATE_CHANGED
import homeassistant.util.dt as dt_util import homeassistant.util.dt as dt_util
from .const import MAX_ROWS_TO_PURGE, SupportedDialect from .const import MAX_ROWS_TO_PURGE
from .db_schema import Events, StateAttributes, States from .db_schema import Events, StateAttributes, States
from .models import DatabaseEngine
from .queries import ( from .queries import (
attributes_ids_exist_in_states, attributes_ids_exist_in_states,
attributes_ids_exist_in_states_sqlite, attributes_ids_exist_in_states_with_fast_in_distinct,
data_ids_exist_in_events, data_ids_exist_in_events,
data_ids_exist_in_events_sqlite, data_ids_exist_in_events_with_fast_in_distinct,
delete_event_data_rows, delete_event_data_rows,
delete_event_rows, delete_event_rows,
delete_recorder_runs_rows, delete_recorder_runs_rows,
@ -83,8 +84,6 @@ def purge_old_data(
"Purging states and events before target %s", "Purging states and events before target %s",
purge_before.isoformat(sep=" ", timespec="seconds"), purge_before.isoformat(sep=" ", timespec="seconds"),
) )
using_sqlite = instance.dialect_name == SupportedDialect.SQLITE
with session_scope(session=instance.get_session()) as session: with session_scope(session=instance.get_session()) as session:
# Purge a max of MAX_ROWS_TO_PURGE, based on the oldest states or events record # Purge a max of MAX_ROWS_TO_PURGE, based on the oldest states or events record
has_more_to_purge = False has_more_to_purge = False
@ -93,9 +92,7 @@ def purge_old_data(
"Purge running in legacy format as there are states with event_id" "Purge running in legacy format as there are states with event_id"
" remaining" " remaining"
) )
has_more_to_purge |= _purge_legacy_format( has_more_to_purge |= _purge_legacy_format(instance, session, purge_before)
instance, session, purge_before, using_sqlite
)
else: else:
_LOGGER.debug( _LOGGER.debug(
"Purge running in new format as there are NO states with event_id" "Purge running in new format as there are NO states with event_id"
@ -103,10 +100,10 @@ def purge_old_data(
) )
# Once we are done purging legacy rows, we use the new method # Once we are done purging legacy rows, we use the new method
has_more_to_purge |= _purge_states_and_attributes_ids( has_more_to_purge |= _purge_states_and_attributes_ids(
instance, session, states_batch_size, purge_before, using_sqlite instance, session, states_batch_size, purge_before
) )
has_more_to_purge |= _purge_events_and_data_ids( has_more_to_purge |= _purge_events_and_data_ids(
instance, session, events_batch_size, purge_before, using_sqlite instance, session, events_batch_size, purge_before
) )
statistics_runs = _select_statistics_runs_to_purge(session, purge_before) statistics_runs = _select_statistics_runs_to_purge(session, purge_before)
@ -140,7 +137,7 @@ def _purging_legacy_format(session: Session) -> bool:
def _purge_legacy_format( def _purge_legacy_format(
instance: Recorder, session: Session, purge_before: datetime, using_sqlite: bool instance: Recorder, session: Session, purge_before: datetime
) -> bool: ) -> bool:
"""Purge rows that are still linked by the event_ids.""" """Purge rows that are still linked by the event_ids."""
( (
@ -153,10 +150,10 @@ def _purge_legacy_format(
) )
if state_ids: if state_ids:
_purge_state_ids(instance, session, state_ids) _purge_state_ids(instance, session, state_ids)
_purge_unused_attributes_ids(instance, session, attributes_ids, using_sqlite) _purge_unused_attributes_ids(instance, session, attributes_ids)
if event_ids: if event_ids:
_purge_event_ids(session, event_ids) _purge_event_ids(session, event_ids)
_purge_unused_data_ids(instance, session, data_ids, using_sqlite) _purge_unused_data_ids(instance, session, data_ids)
return bool(event_ids or state_ids or attributes_ids or data_ids) return bool(event_ids or state_ids or attributes_ids or data_ids)
@ -165,12 +162,13 @@ def _purge_states_and_attributes_ids(
session: Session, session: Session,
states_batch_size: int, states_batch_size: int,
purge_before: datetime, purge_before: datetime,
using_sqlite: bool,
) -> bool: ) -> bool:
"""Purge states and linked attributes id in a batch. """Purge states and linked attributes id in a batch.
Returns true if there are more states to purge. Returns true if there are more states to purge.
""" """
database_engine = instance.database_engine
assert database_engine is not None
has_remaining_state_ids_to_purge = True has_remaining_state_ids_to_purge = True
# There are more states relative to attributes_ids so # There are more states relative to attributes_ids so
# we purge enough state_ids to try to generate a full # we purge enough state_ids to try to generate a full
@ -187,7 +185,7 @@ def _purge_states_and_attributes_ids(
_purge_state_ids(instance, session, state_ids) _purge_state_ids(instance, session, state_ids)
attributes_ids_batch = attributes_ids_batch | attributes_ids attributes_ids_batch = attributes_ids_batch | attributes_ids
_purge_unused_attributes_ids(instance, session, attributes_ids_batch, using_sqlite) _purge_unused_attributes_ids(instance, session, attributes_ids_batch)
_LOGGER.debug( _LOGGER.debug(
"After purging states and attributes_ids remaining=%s", "After purging states and attributes_ids remaining=%s",
has_remaining_state_ids_to_purge, has_remaining_state_ids_to_purge,
@ -200,7 +198,6 @@ def _purge_events_and_data_ids(
session: Session, session: Session,
events_batch_size: int, events_batch_size: int,
purge_before: datetime, purge_before: datetime,
using_sqlite: bool,
) -> bool: ) -> bool:
"""Purge states and linked attributes id in a batch. """Purge states and linked attributes id in a batch.
@ -220,7 +217,7 @@ def _purge_events_and_data_ids(
_purge_event_ids(session, event_ids) _purge_event_ids(session, event_ids)
data_ids_batch = data_ids_batch | data_ids data_ids_batch = data_ids_batch | data_ids
_purge_unused_data_ids(instance, session, data_ids_batch, using_sqlite) _purge_unused_data_ids(instance, session, data_ids_batch)
_LOGGER.debug( _LOGGER.debug(
"After purging event and data_ids remaining=%s", "After purging event and data_ids remaining=%s",
has_remaining_event_ids_to_purge, has_remaining_event_ids_to_purge,
@ -267,13 +264,13 @@ def _select_event_data_ids_to_purge(
def _select_unused_attributes_ids( def _select_unused_attributes_ids(
session: Session, attributes_ids: set[int], using_sqlite: bool session: Session, attributes_ids: set[int], database_engine: DatabaseEngine
) -> set[int]: ) -> set[int]:
"""Return a set of attributes ids that are not used by any states in the db.""" """Return a set of attributes ids that are not used by any states in the db."""
if not attributes_ids: if not attributes_ids:
return set() return set()
if using_sqlite: if not database_engine.optimizer.slow_range_in_select:
# #
# SQLite has a superior query optimizer for the distinct query below as it uses # SQLite has a superior query optimizer for the distinct query below as it uses
# the covering index without having to examine the rows directly for both of the # the covering index without having to examine the rows directly for both of the
@ -290,7 +287,7 @@ def _select_unused_attributes_ids(
seen_ids = { seen_ids = {
state[0] state[0]
for state in session.execute( for state in session.execute(
attributes_ids_exist_in_states_sqlite(attributes_ids) attributes_ids_exist_in_states_with_fast_in_distinct(attributes_ids)
).all() ).all()
} }
else: else:
@ -340,16 +337,18 @@ def _purge_unused_attributes_ids(
instance: Recorder, instance: Recorder,
session: Session, session: Session,
attributes_ids_batch: set[int], attributes_ids_batch: set[int],
using_sqlite: bool,
) -> None: ) -> None:
"""Purge unused attributes ids."""
database_engine = instance.database_engine
assert database_engine is not None
if unused_attribute_ids_set := _select_unused_attributes_ids( if unused_attribute_ids_set := _select_unused_attributes_ids(
session, attributes_ids_batch, using_sqlite session, attributes_ids_batch, database_engine
): ):
_purge_batch_attributes_ids(instance, session, unused_attribute_ids_set) _purge_batch_attributes_ids(instance, session, unused_attribute_ids_set)
def _select_unused_event_data_ids( def _select_unused_event_data_ids(
session: Session, data_ids: set[int], using_sqlite: bool session: Session, data_ids: set[int], database_engine: DatabaseEngine
) -> set[int]: ) -> set[int]:
"""Return a set of event data ids that are not used by any events in the db.""" """Return a set of event data ids that are not used by any events in the db."""
if not data_ids: if not data_ids:
@ -357,11 +356,11 @@ def _select_unused_event_data_ids(
# See _select_unused_attributes_ids for why this function # See _select_unused_attributes_ids for why this function
# branches for non-sqlite databases. # branches for non-sqlite databases.
if using_sqlite: if not database_engine.optimizer.slow_range_in_select:
seen_ids = { seen_ids = {
state[0] state[0]
for state in session.execute( for state in session.execute(
data_ids_exist_in_events_sqlite(data_ids) data_ids_exist_in_events_with_fast_in_distinct(data_ids)
).all() ).all()
} }
else: else:
@ -381,10 +380,12 @@ def _select_unused_event_data_ids(
def _purge_unused_data_ids( def _purge_unused_data_ids(
instance: Recorder, session: Session, data_ids_batch: set[int], using_sqlite: bool instance: Recorder, session: Session, data_ids_batch: set[int]
) -> None: ) -> None:
database_engine = instance.database_engine
assert database_engine is not None
if unused_data_ids_set := _select_unused_event_data_ids( if unused_data_ids_set := _select_unused_event_data_ids(
session, data_ids_batch, using_sqlite session, data_ids_batch, database_engine
): ):
_purge_batch_data_ids(instance, session, unused_data_ids_set) _purge_batch_data_ids(instance, session, unused_data_ids_set)
@ -582,7 +583,8 @@ def _purge_old_recorder_runs(
def _purge_filtered_data(instance: Recorder, session: Session) -> bool: def _purge_filtered_data(instance: Recorder, session: Session) -> bool:
"""Remove filtered states and events that shouldn't be in the database.""" """Remove filtered states and events that shouldn't be in the database."""
_LOGGER.debug("Cleanup filtered data") _LOGGER.debug("Cleanup filtered data")
using_sqlite = instance.dialect_name == SupportedDialect.SQLITE database_engine = instance.database_engine
assert database_engine is not None
# Check if excluded entity_ids are in database # Check if excluded entity_ids are in database
excluded_entity_ids: list[str] = [ excluded_entity_ids: list[str] = [
@ -591,7 +593,7 @@ def _purge_filtered_data(instance: Recorder, session: Session) -> bool:
if not instance.entity_filter(entity_id) if not instance.entity_filter(entity_id)
] ]
if len(excluded_entity_ids) > 0: if len(excluded_entity_ids) > 0:
_purge_filtered_states(instance, session, excluded_entity_ids, using_sqlite) _purge_filtered_states(instance, session, excluded_entity_ids, database_engine)
return False return False
# Check if excluded event_types are in database # Check if excluded event_types are in database
@ -611,7 +613,7 @@ def _purge_filtered_states(
instance: Recorder, instance: Recorder,
session: Session, session: Session,
excluded_entity_ids: list[str], excluded_entity_ids: list[str],
using_sqlite: bool, database_engine: DatabaseEngine,
) -> None: ) -> None:
"""Remove filtered states and linked events.""" """Remove filtered states and linked events."""
state_ids: list[int] state_ids: list[int]
@ -632,7 +634,7 @@ def _purge_filtered_states(
_purge_state_ids(instance, session, set(state_ids)) _purge_state_ids(instance, session, set(state_ids))
_purge_event_ids(session, event_ids) _purge_event_ids(session, event_ids)
unused_attribute_ids_set = _select_unused_attributes_ids( unused_attribute_ids_set = _select_unused_attributes_ids(
session, {id_ for id_ in attributes_ids if id_ is not None}, using_sqlite session, {id_ for id_ in attributes_ids if id_ is not None}, database_engine
) )
_purge_batch_attributes_ids(instance, session, unused_attribute_ids_set) _purge_batch_attributes_ids(instance, session, unused_attribute_ids_set)
@ -641,7 +643,8 @@ def _purge_filtered_events(
instance: Recorder, session: Session, excluded_event_types: list[str] instance: Recorder, session: Session, excluded_event_types: list[str]
) -> None: ) -> None:
"""Remove filtered events and linked states.""" """Remove filtered events and linked states."""
using_sqlite = instance.dialect_name == SupportedDialect.SQLITE database_engine = instance.database_engine
assert database_engine is not None
event_ids, data_ids = zip( event_ids, data_ids = zip(
*( *(
session.query(Events.event_id, Events.data_id) session.query(Events.event_id, Events.data_id)
@ -660,7 +663,7 @@ def _purge_filtered_events(
_purge_state_ids(instance, session, state_ids) _purge_state_ids(instance, session, state_ids)
_purge_event_ids(session, event_ids) _purge_event_ids(session, event_ids)
if unused_data_ids_set := _select_unused_event_data_ids( if unused_data_ids_set := _select_unused_event_data_ids(
session, set(data_ids), using_sqlite session, set(data_ids), database_engine
): ):
_purge_batch_data_ids(instance, session, unused_data_ids_set) _purge_batch_data_ids(instance, session, unused_data_ids_set)
if EVENT_STATE_CHANGED in excluded_event_types: if EVENT_STATE_CHANGED in excluded_event_types:
@ -671,7 +674,8 @@ def _purge_filtered_events(
@retryable_database_job("purge") @retryable_database_job("purge")
def purge_entity_data(instance: Recorder, entity_filter: Callable[[str], bool]) -> bool: def purge_entity_data(instance: Recorder, entity_filter: Callable[[str], bool]) -> bool:
"""Purge states and events of specified entities.""" """Purge states and events of specified entities."""
using_sqlite = instance.dialect_name == SupportedDialect.SQLITE database_engine = instance.database_engine
assert database_engine is not None
with session_scope(session=instance.get_session()) as session: with session_scope(session=instance.get_session()) as session:
selected_entity_ids: list[str] = [ selected_entity_ids: list[str] = [
entity_id entity_id
@ -682,7 +686,9 @@ def purge_entity_data(instance: Recorder, entity_filter: Callable[[str], bool])
if len(selected_entity_ids) > 0: if len(selected_entity_ids) > 0:
# Purge a max of MAX_ROWS_TO_PURGE, based on the oldest states # Purge a max of MAX_ROWS_TO_PURGE, based on the oldest states
# or events record. # or events record.
_purge_filtered_states(instance, session, selected_entity_ids, using_sqlite) _purge_filtered_states(
instance, session, selected_entity_ids, database_engine
)
_LOGGER.debug("Purging entity data hasn't fully completed yet") _LOGGER.debug("Purging entity data hasn't fully completed yet")
return False return False

View File

@ -45,7 +45,7 @@ def _state_attrs_exist(attr: int | None) -> Select:
return select(func.min(States.attributes_id)).where(States.attributes_id == attr) return select(func.min(States.attributes_id)).where(States.attributes_id == attr)
def attributes_ids_exist_in_states_sqlite( def attributes_ids_exist_in_states_with_fast_in_distinct(
attributes_ids: Iterable[int], attributes_ids: Iterable[int],
) -> StatementLambdaElement: ) -> StatementLambdaElement:
"""Find attributes ids that exist in the states table.""" """Find attributes ids that exist in the states table."""
@ -268,7 +268,7 @@ def attributes_ids_exist_in_states(
) )
def data_ids_exist_in_events_sqlite( def data_ids_exist_in_events_with_fast_in_distinct(
data_ids: Iterable[int], data_ids: Iterable[int],
) -> StatementLambdaElement: ) -> StatementLambdaElement:
"""Find data ids that exist in the events table.""" """Find data ids that exist in the events table."""

View File

@ -64,8 +64,13 @@ class RunHistory:
@property @property
def current(self) -> RecorderRuns: def current(self) -> RecorderRuns:
"""Get the current run.""" """Get the current run."""
assert self._current_run_info is not None # If start has not been called yet because the recorder is
return self._current_run_info # still starting up we want history to use the current time
# as the created time to ensure we can still return results
# and we do not try to pull data from the previous run.
return self._current_run_info or RecorderRuns(
start=self.recording_start, created=dt_util.utcnow()
)
def get(self, start: datetime) -> RecorderRuns | None: def get(self, start: datetime) -> RecorderRuns | None:
"""Return the recorder run that started before or at start. """Return the recorder run that started before or at start.

View File

@ -49,8 +49,8 @@ def _async_get_db_engine_info(instance: Recorder) -> dict[str, Any]:
db_engine_info: dict[str, Any] = {} db_engine_info: dict[str, Any] = {}
if dialect_name := instance.dialect_name: if dialect_name := instance.dialect_name:
db_engine_info["database_engine"] = dialect_name.value db_engine_info["database_engine"] = dialect_name.value
if engine_version := instance.engine_version: if database_engine := instance.database_engine:
db_engine_info["database_version"] = str(engine_version) db_engine_info["database_version"] = str(database_engine.version)
return db_engine_info return db_engine_info

View File

@ -36,7 +36,13 @@ from .db_schema import (
TABLES_TO_CHECK, TABLES_TO_CHECK,
RecorderRuns, RecorderRuns,
) )
from .models import StatisticPeriod, UnsupportedDialect, process_timestamp from .models import (
DatabaseEngine,
DatabaseOptimizer,
StatisticPeriod,
UnsupportedDialect,
process_timestamp,
)
if TYPE_CHECKING: if TYPE_CHECKING:
from . import Recorder from . import Recorder
@ -51,44 +57,33 @@ QUERY_RETRY_WAIT = 0.1
SQLITE3_POSTFIXES = ["", "-wal", "-shm"] SQLITE3_POSTFIXES = ["", "-wal", "-shm"]
DEFAULT_YIELD_STATES_ROWS = 32768 DEFAULT_YIELD_STATES_ROWS = 32768
# Our minimum versions for each database # Our minimum versions for each database
# #
# Older MariaDB suffers https://jira.mariadb.org/browse/MDEV-25020 # Older MariaDB suffers https://jira.mariadb.org/browse/MDEV-25020
# which is fixed in 10.5.17, 10.6.9, 10.7.5, 10.8.4 # which is fixed in 10.5.17, 10.6.9, 10.7.5, 10.8.4
# #
MIN_VERSION_MARIA_DB = AwesomeVersion( def _simple_version(version: str) -> AwesomeVersion:
"10.3.0", ensure_strategy=AwesomeVersionStrategy.SIMPLEVER """Return a simple version."""
) return AwesomeVersion(version, ensure_strategy=AwesomeVersionStrategy.SIMPLEVER)
RECOMMENDED_MIN_VERSION_MARIA_DB = AwesomeVersion(
"10.5.17", ensure_strategy=AwesomeVersionStrategy.SIMPLEVER
) MIN_VERSION_MARIA_DB = _simple_version("10.3.0")
MARIA_DB_106 = AwesomeVersion( RECOMMENDED_MIN_VERSION_MARIA_DB = _simple_version("10.5.17")
"10.6.0", ensure_strategy=AwesomeVersionStrategy.SIMPLEVER MARIADB_WITH_FIXED_IN_QUERIES_105 = _simple_version("10.5.17")
) MARIA_DB_106 = _simple_version("10.6.0")
RECOMMENDED_MIN_VERSION_MARIA_DB_106 = AwesomeVersion( MARIADB_WITH_FIXED_IN_QUERIES_106 = _simple_version("10.6.9")
"10.6.9", ensure_strategy=AwesomeVersionStrategy.SIMPLEVER RECOMMENDED_MIN_VERSION_MARIA_DB_106 = _simple_version("10.6.9")
) MARIA_DB_107 = _simple_version("10.7.0")
MARIA_DB_107 = AwesomeVersion( RECOMMENDED_MIN_VERSION_MARIA_DB_107 = _simple_version("10.7.5")
"10.7.0", ensure_strategy=AwesomeVersionStrategy.SIMPLEVER MARIADB_WITH_FIXED_IN_QUERIES_107 = _simple_version("10.7.5")
) MARIA_DB_108 = _simple_version("10.8.0")
RECOMMENDED_MIN_VERSION_MARIA_DB_107 = AwesomeVersion( RECOMMENDED_MIN_VERSION_MARIA_DB_108 = _simple_version("10.8.4")
"10.7.5", ensure_strategy=AwesomeVersionStrategy.SIMPLEVER MARIADB_WITH_FIXED_IN_QUERIES_108 = _simple_version("10.8.4")
) MIN_VERSION_MYSQL = _simple_version("8.0.0")
MARIA_DB_108 = AwesomeVersion( MIN_VERSION_PGSQL = _simple_version("12.0")
"10.8.0", ensure_strategy=AwesomeVersionStrategy.SIMPLEVER MIN_VERSION_SQLITE = _simple_version("3.31.0")
)
RECOMMENDED_MIN_VERSION_MARIA_DB_108 = AwesomeVersion(
"10.8.4", ensure_strategy=AwesomeVersionStrategy.SIMPLEVER
)
MIN_VERSION_MYSQL = AwesomeVersion(
"8.0.0", ensure_strategy=AwesomeVersionStrategy.SIMPLEVER
)
MIN_VERSION_PGSQL = AwesomeVersion(
"12.0", ensure_strategy=AwesomeVersionStrategy.SIMPLEVER
)
MIN_VERSION_SQLITE = AwesomeVersion(
"3.31.0", ensure_strategy=AwesomeVersionStrategy.SIMPLEVER
)
# This is the maximum time after the recorder ends the session # This is the maximum time after the recorder ends the session
# before we no longer consider startup to be a "restart" and we # before we no longer consider startup to be a "restart" and we
@ -467,10 +462,12 @@ def setup_connection_for_dialect(
dialect_name: str, dialect_name: str,
dbapi_connection: Any, dbapi_connection: Any,
first_connection: bool, first_connection: bool,
) -> AwesomeVersion | None: ) -> DatabaseEngine | None:
"""Execute statements needed for dialect connection.""" """Execute statements needed for dialect connection."""
version: AwesomeVersion | None = None version: AwesomeVersion | None = None
slow_range_in_select = True
if dialect_name == SupportedDialect.SQLITE: if dialect_name == SupportedDialect.SQLITE:
slow_range_in_select = False
if first_connection: if first_connection:
old_isolation = dbapi_connection.isolation_level old_isolation = dbapi_connection.isolation_level
dbapi_connection.isolation_level = None dbapi_connection.isolation_level = None
@ -536,7 +533,19 @@ def setup_connection_for_dialect(
version or version_string, "MySQL", MIN_VERSION_MYSQL version or version_string, "MySQL", MIN_VERSION_MYSQL
) )
slow_range_in_select = bool(
not version
or version < MARIADB_WITH_FIXED_IN_QUERIES_105
or MARIA_DB_106 <= version < MARIADB_WITH_FIXED_IN_QUERIES_106
or MARIA_DB_107 <= version < MARIADB_WITH_FIXED_IN_QUERIES_107
or MARIA_DB_108 <= version < MARIADB_WITH_FIXED_IN_QUERIES_108
)
elif dialect_name == SupportedDialect.POSTGRESQL: elif dialect_name == SupportedDialect.POSTGRESQL:
# Historically we have marked PostgreSQL as having slow range in select
# but this may not be true for all versions. We should investigate
# this further when we have more data and remove this if possible
# in the future so we can use the simpler purge SQL query for
# _select_unused_attributes_ids and _select_unused_events_ids
if first_connection: if first_connection:
# server_version_num was added in 2006 # server_version_num was added in 2006
result = query_on_connection(dbapi_connection, "SHOW server_version") result = query_on_connection(dbapi_connection, "SHOW server_version")
@ -550,7 +559,14 @@ def setup_connection_for_dialect(
else: else:
_fail_unsupported_dialect(dialect_name) _fail_unsupported_dialect(dialect_name)
return version if not first_connection:
return None
return DatabaseEngine(
dialect=SupportedDialect(dialect_name),
version=version,
optimizer=DatabaseOptimizer(slow_range_in_select=slow_range_in_select),
)
def end_incomplete_runs(session: Session, start_time: datetime) -> None: def end_incomplete_runs(session: Session, start_time: datetime) -> None:

View File

@ -15,7 +15,7 @@
"connectable": false "connectable": false
} }
], ],
"requirements": ["sensorpro-ble==0.5.1"], "requirements": ["sensorpro-ble==0.5.3"],
"dependencies": ["bluetooth_adapters"], "dependencies": ["bluetooth_adapters"],
"codeowners": ["@bdraco"], "codeowners": ["@bdraco"],
"iot_class": "local_push" "iot_class": "local_push"

View File

@ -9,7 +9,7 @@
"connectable": false "connectable": false
} }
], ],
"requirements": ["sensorpush-ble==1.5.2"], "requirements": ["sensorpush-ble==1.5.5"],
"dependencies": ["bluetooth_adapters"], "dependencies": ["bluetooth_adapters"],
"codeowners": ["@bdraco"], "codeowners": ["@bdraco"],
"iot_class": "local_push" "iot_class": "local_push"

View File

@ -2,7 +2,7 @@
"domain": "synology_dsm", "domain": "synology_dsm",
"name": "Synology DSM", "name": "Synology DSM",
"documentation": "https://www.home-assistant.io/integrations/synology_dsm", "documentation": "https://www.home-assistant.io/integrations/synology_dsm",
"requirements": ["py-synologydsm-api==2.1.2"], "requirements": ["py-synologydsm-api==2.1.4"],
"codeowners": ["@hacf-fr", "@Quentame", "@mib1185"], "codeowners": ["@hacf-fr", "@Quentame", "@mib1185"],
"config_flow": true, "config_flow": true,
"ssdp": [ "ssdp": [

View File

@ -8,7 +8,7 @@
{ "local_name": "TP39*", "connectable": false } { "local_name": "TP39*", "connectable": false }
], ],
"dependencies": ["bluetooth_adapters"], "dependencies": ["bluetooth_adapters"],
"requirements": ["thermopro-ble==0.4.3"], "requirements": ["thermopro-ble==0.4.5"],
"codeowners": ["@bdraco"], "codeowners": ["@bdraco"],
"iot_class": "local_push" "iot_class": "local_push"
} }

View File

@ -154,6 +154,7 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = (
device_class=SensorDeviceClass.TIMESTAMP, device_class=SensorDeviceClass.TIMESTAMP,
entity_category=EntityCategory.DIAGNOSTIC, entity_category=EntityCategory.DIAGNOSTIC,
has_entity_name=True, has_entity_name=True,
entity_registry_enabled_default=False,
allowed_fn=lambda controller, _: controller.option_allow_uptime_sensors, allowed_fn=lambda controller, _: controller.option_allow_uptime_sensors,
api_handler_fn=lambda api: api.clients, api_handler_fn=lambda api: api.clients,
available_fn=lambda controller, obj_id: controller.available, available_fn=lambda controller, obj_id: controller.available,

View File

@ -1,13 +1,19 @@
{ {
"domain": "velbus", "domain": "velbus",
"name": "Velbus", "name": "Velbus",
"documentation": "https://www.home-assistant.io/integrations/velbus",
"requirements": ["velbus-aio==2022.12.0"],
"config_flow": true,
"codeowners": ["@Cereal2nd", "@brefra"], "codeowners": ["@Cereal2nd", "@brefra"],
"config_flow": true,
"dependencies": ["usb"], "dependencies": ["usb"],
"documentation": "https://www.home-assistant.io/integrations/velbus",
"integration_type": "hub", "integration_type": "hub",
"iot_class": "local_push", "iot_class": "local_push",
"loggers": [
"velbus-parser",
"velbus-module",
"velbus-packet",
"velbus-protocol"
],
"requirements": ["velbus-aio==2022.12.0"],
"usb": [ "usb": [
{ {
"vid": "10CF", "vid": "10CF",
@ -25,6 +31,5 @@
"vid": "10CF", "vid": "10CF",
"pid": "0518" "pid": "0518"
} }
], ]
"loggers": ["velbusaio"]
} }

View File

@ -14,7 +14,7 @@
} }
], ],
"dependencies": ["bluetooth_adapters"], "dependencies": ["bluetooth_adapters"],
"requirements": ["xiaomi-ble==0.15.0"], "requirements": ["xiaomi-ble==0.16.1"],
"codeowners": ["@Jc2k", "@Ernst79"], "codeowners": ["@Jc2k", "@Ernst79"],
"iot_class": "local_push" "iot_class": "local_push"
} }

View File

@ -8,7 +8,7 @@ from .backports.enum import StrEnum
APPLICATION_NAME: Final = "HomeAssistant" APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2023 MAJOR_VERSION: Final = 2023
MINOR_VERSION: Final = 2 MINOR_VERSION: Final = 2
PATCH_VERSION: Final = "2" PATCH_VERSION: Final = "3"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 10, 0) REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 10, 0)

View File

@ -225,6 +225,19 @@ BLUETOOTH: list[dict[str, bool | str | int | list[int]]] = [
"domain": "ld2410_ble", "domain": "ld2410_ble",
"local_name": "HLK-LD2410B_*", "local_name": "HLK-LD2410B_*",
}, },
{
"domain": "ld2410_ble",
"local_name": "HLK-LD2410_*",
},
{
"domain": "ld2410_ble",
"manufacturer_data_start": [
7,
1,
],
"manufacturer_id": 256,
"service_uuid": "0000af30-0000-1000-8000-00805f9b34fb",
},
{ {
"domain": "led_ble", "domain": "led_ble",
"local_name": "LEDnet*", "local_name": "LEDnet*",

View File

@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project] [project]
name = "homeassistant" name = "homeassistant"
version = "2023.2.2" version = "2023.2.3"
license = {text = "Apache-2.0"} license = {text = "Apache-2.0"}
description = "Open-source home automation platform running on Python 3." description = "Open-source home automation platform running on Python 3."
readme = "README.rst" readme = "README.rst"

View File

@ -425,7 +425,7 @@ beautifulsoup4==4.11.1
bellows==0.34.7 bellows==0.34.7
# homeassistant.components.bmw_connected_drive # homeassistant.components.bmw_connected_drive
bimmer_connected==0.12.0 bimmer_connected==0.12.1
# homeassistant.components.bizkaibus # homeassistant.components.bizkaibus
bizkaibus==0.1.1 bizkaibus==0.1.1
@ -449,7 +449,7 @@ blinkstick==1.2.0
blockchain==1.4.4 blockchain==1.4.4
# homeassistant.components.bluemaestro # homeassistant.components.bluemaestro
bluemaestro-ble==0.2.1 bluemaestro-ble==0.2.3
# homeassistant.components.decora # homeassistant.components.decora
# homeassistant.components.zengge # homeassistant.components.zengge
@ -658,7 +658,7 @@ enocean==0.50
enturclient==0.2.4 enturclient==0.2.4
# homeassistant.components.environment_canada # homeassistant.components.environment_canada
env_canada==0.5.27 env_canada==0.5.28
# homeassistant.components.enphase_envoy # homeassistant.components.enphase_envoy
envoy_reader==0.20.1 envoy_reader==0.20.1
@ -976,7 +976,7 @@ influxdb-client==1.24.0
influxdb==5.3.1 influxdb==5.3.1
# homeassistant.components.inkbird # homeassistant.components.inkbird
inkbird-ble==0.5.5 inkbird-ble==0.5.6
# homeassistant.components.insteon # homeassistant.components.insteon
insteon-frontend-home-assistant==0.2.0 insteon-frontend-home-assistant==0.2.0
@ -997,7 +997,7 @@ ismartgate==4.0.4
janus==1.0.0 janus==1.0.0
# homeassistant.components.abode # homeassistant.components.abode
jaraco.abode==3.2.1 jaraco.abode==3.3.0
# homeassistant.components.jellyfin # homeassistant.components.jellyfin
jellyfin-apiclient-python==1.9.2 jellyfin-apiclient-python==1.9.2
@ -1081,7 +1081,7 @@ london-tube-status==0.5
luftdaten==0.7.4 luftdaten==0.7.4
# homeassistant.components.lupusec # homeassistant.components.lupusec
lupupy==0.2.5 lupupy==0.2.7
# homeassistant.components.lw12wifi # homeassistant.components.lw12wifi
lw12==0.9.2 lw12==0.9.2
@ -1299,7 +1299,7 @@ openwrt-luci-rpc==1.1.11
openwrt-ubus-rpc==0.0.2 openwrt-ubus-rpc==0.0.2
# homeassistant.components.oralb # homeassistant.components.oralb
oralb-ble==0.17.2 oralb-ble==0.17.4
# homeassistant.components.oru # homeassistant.components.oru
oru==0.1.11 oru==0.1.11
@ -1442,7 +1442,7 @@ py-schluter==0.1.7
py-sucks==0.9.8 py-sucks==0.9.8
# homeassistant.components.synology_dsm # homeassistant.components.synology_dsm
py-synologydsm-api==2.1.2 py-synologydsm-api==2.1.4
# homeassistant.components.zabbix # homeassistant.components.zabbix
py-zabbix==1.1.7 py-zabbix==1.1.7
@ -1702,7 +1702,7 @@ pyirishrail==0.0.2
pyiss==1.0.1 pyiss==1.0.1
# homeassistant.components.isy994 # homeassistant.components.isy994
pyisy==3.1.11 pyisy==3.1.13
# homeassistant.components.itach # homeassistant.components.itach
pyitachip2ir==0.0.7 pyitachip2ir==0.0.7
@ -1896,7 +1896,7 @@ pyqwikswitch==0.93
pyrail==0.0.3 pyrail==0.0.3
# homeassistant.components.rainbird # homeassistant.components.rainbird
pyrainbird==1.1.0 pyrainbird==2.0.0
# homeassistant.components.recswitch # homeassistant.components.recswitch
pyrecswitch==1.0.2 pyrecswitch==1.0.2
@ -2312,10 +2312,10 @@ sense_energy==0.11.1
sensirion-ble==0.0.1 sensirion-ble==0.0.1
# homeassistant.components.sensorpro # homeassistant.components.sensorpro
sensorpro-ble==0.5.1 sensorpro-ble==0.5.3
# homeassistant.components.sensorpush # homeassistant.components.sensorpush
sensorpush-ble==1.5.2 sensorpush-ble==1.5.5
# homeassistant.components.sentry # homeassistant.components.sentry
sentry-sdk==1.13.0 sentry-sdk==1.13.0
@ -2481,7 +2481,7 @@ tesla-wall-connector==1.0.2
thermobeacon-ble==0.6.0 thermobeacon-ble==0.6.0
# homeassistant.components.thermopro # homeassistant.components.thermopro
thermopro-ble==0.4.3 thermopro-ble==0.4.5
# homeassistant.components.thermoworks_smoke # homeassistant.components.thermoworks_smoke
thermoworks_smoke==0.1.8 thermoworks_smoke==0.1.8
@ -2637,7 +2637,7 @@ xbox-webapi==2.0.11
xboxapi==2.0.1 xboxapi==2.0.1
# homeassistant.components.xiaomi_ble # homeassistant.components.xiaomi_ble
xiaomi-ble==0.15.0 xiaomi-ble==0.16.1
# homeassistant.components.knx # homeassistant.components.knx
xknx==2.3.0 xknx==2.3.0

View File

@ -355,7 +355,7 @@ beautifulsoup4==4.11.1
bellows==0.34.7 bellows==0.34.7
# homeassistant.components.bmw_connected_drive # homeassistant.components.bmw_connected_drive
bimmer_connected==0.12.0 bimmer_connected==0.12.1
# homeassistant.components.bluetooth # homeassistant.components.bluetooth
bleak-retry-connector==2.13.0 bleak-retry-connector==2.13.0
@ -370,7 +370,7 @@ blebox_uniapi==2.1.4
blinkpy==0.19.2 blinkpy==0.19.2
# homeassistant.components.bluemaestro # homeassistant.components.bluemaestro
bluemaestro-ble==0.2.1 bluemaestro-ble==0.2.3
# homeassistant.components.bluetooth # homeassistant.components.bluetooth
bluetooth-adapters==0.15.2 bluetooth-adapters==0.15.2
@ -511,7 +511,7 @@ energyzero==0.3.1
enocean==0.50 enocean==0.50
# homeassistant.components.environment_canada # homeassistant.components.environment_canada
env_canada==0.5.27 env_canada==0.5.28
# homeassistant.components.enphase_envoy # homeassistant.components.enphase_envoy
envoy_reader==0.20.1 envoy_reader==0.20.1
@ -735,7 +735,7 @@ influxdb-client==1.24.0
influxdb==5.3.1 influxdb==5.3.1
# homeassistant.components.inkbird # homeassistant.components.inkbird
inkbird-ble==0.5.5 inkbird-ble==0.5.6
# homeassistant.components.insteon # homeassistant.components.insteon
insteon-frontend-home-assistant==0.2.0 insteon-frontend-home-assistant==0.2.0
@ -753,7 +753,7 @@ ismartgate==4.0.4
janus==1.0.0 janus==1.0.0
# homeassistant.components.abode # homeassistant.components.abode
jaraco.abode==3.2.1 jaraco.abode==3.3.0
# homeassistant.components.jellyfin # homeassistant.components.jellyfin
jellyfin-apiclient-python==1.9.2 jellyfin-apiclient-python==1.9.2
@ -947,7 +947,7 @@ openai==0.26.2
openerz-api==0.2.0 openerz-api==0.2.0
# homeassistant.components.oralb # homeassistant.components.oralb
oralb-ble==0.17.2 oralb-ble==0.17.4
# homeassistant.components.ovo_energy # homeassistant.components.ovo_energy
ovoenergy==1.2.0 ovoenergy==1.2.0
@ -1051,7 +1051,7 @@ py-melissa-climate==2.1.4
py-nightscout==1.2.2 py-nightscout==1.2.2
# homeassistant.components.synology_dsm # homeassistant.components.synology_dsm
py-synologydsm-api==2.1.2 py-synologydsm-api==2.1.4
# homeassistant.components.seventeentrack # homeassistant.components.seventeentrack
py17track==2021.12.2 py17track==2021.12.2
@ -1221,7 +1221,7 @@ pyiqvia==2022.04.0
pyiss==1.0.1 pyiss==1.0.1
# homeassistant.components.isy994 # homeassistant.components.isy994
pyisy==3.1.11 pyisy==3.1.13
# homeassistant.components.kaleidescape # homeassistant.components.kaleidescape
pykaleidescape==1.0.1 pykaleidescape==1.0.1
@ -1367,7 +1367,7 @@ pyps4-2ndscreen==1.3.1
pyqwikswitch==0.93 pyqwikswitch==0.93
# homeassistant.components.rainbird # homeassistant.components.rainbird
pyrainbird==1.1.0 pyrainbird==2.0.0
# homeassistant.components.risco # homeassistant.components.risco
pyrisco==0.5.7 pyrisco==0.5.7
@ -1627,10 +1627,10 @@ sense_energy==0.11.1
sensirion-ble==0.0.1 sensirion-ble==0.0.1
# homeassistant.components.sensorpro # homeassistant.components.sensorpro
sensorpro-ble==0.5.1 sensorpro-ble==0.5.3
# homeassistant.components.sensorpush # homeassistant.components.sensorpush
sensorpush-ble==1.5.2 sensorpush-ble==1.5.5
# homeassistant.components.sentry # homeassistant.components.sentry
sentry-sdk==1.13.0 sentry-sdk==1.13.0
@ -1745,7 +1745,7 @@ tesla-wall-connector==1.0.2
thermobeacon-ble==0.6.0 thermobeacon-ble==0.6.0
# homeassistant.components.thermopro # homeassistant.components.thermopro
thermopro-ble==0.4.3 thermopro-ble==0.4.5
# homeassistant.components.tilt_ble # homeassistant.components.tilt_ble
tilt-ble==0.2.3 tilt-ble==0.2.3
@ -1862,7 +1862,7 @@ wolf_smartset==0.1.11
xbox-webapi==2.0.11 xbox-webapi==2.0.11
# homeassistant.components.xiaomi_ble # homeassistant.components.xiaomi_ble
xiaomi-ble==0.15.0 xiaomi-ble==0.16.1
# homeassistant.components.knx # homeassistant.components.knx
xknx==2.3.0 xknx==2.3.0

View File

@ -301,6 +301,7 @@ async def test_discover_lights(hass, hue_client):
await hass.async_block_till_done() await hass.async_block_till_done()
result_json = await async_get_lights(hue_client) result_json = await async_get_lights(hue_client)
assert "1" not in result_json.keys()
devices = {val["uniqueid"] for val in result_json.values()} devices = {val["uniqueid"] for val in result_json.values()}
assert "00:2f:d2:31:ce:c5:55:cc-ee" not in devices # light.ceiling_lights assert "00:2f:d2:31:ce:c5:55:cc-ee" not in devices # light.ceiling_lights
@ -308,8 +309,16 @@ async def test_discover_lights(hass, hue_client):
hass.states.async_set("light.ceiling_lights", STATE_ON) hass.states.async_set("light.ceiling_lights", STATE_ON)
await hass.async_block_till_done() await hass.async_block_till_done()
result_json = await async_get_lights(hue_client) result_json = await async_get_lights(hue_client)
devices = {val["uniqueid"] for val in result_json.values()} device = result_json["1"] # Test that light ID did not change
assert "00:2f:d2:31:ce:c5:55:cc-ee" in devices # light.ceiling_lights assert device["uniqueid"] == "00:2f:d2:31:ce:c5:55:cc-ee" # light.ceiling_lights
assert device["state"][HUE_API_STATE_ON] is True
# Test that returned value is fresh and not cached
hass.states.async_set("light.ceiling_lights", STATE_OFF)
await hass.async_block_till_done()
result_json = await async_get_lights(hue_client)
device = result_json["1"]
assert device["state"][HUE_API_STATE_ON] is False
async def test_light_without_brightness_supported(hass_hue, hue_client): async def test_light_without_brightness_supported(hass_hue, hue_client):
@ -465,8 +474,9 @@ async def test_discover_full_state(hue_client):
# Make sure array is correct size # Make sure array is correct size
assert len(result_json) == 2 assert len(result_json) == 2
assert len(config_json) == 6 assert len(config_json) == 7
assert len(lights_json) >= 1 assert len(lights_json) >= 1
assert "name" in config_json
# Make sure the config wrapper added to the config is there # Make sure the config wrapper added to the config is there
assert "mac" in config_json assert "mac" in config_json
@ -505,7 +515,8 @@ async def test_discover_config(hue_client):
config_json = await result.json() config_json = await result.json()
# Make sure array is correct size # Make sure array is correct size
assert len(config_json) == 6 assert len(config_json) == 7
assert "name" in config_json
# Make sure the config wrapper added to the config is there # Make sure the config wrapper added to the config is there
assert "mac" in config_json assert "mac" in config_json

View File

@ -3,11 +3,20 @@ from __future__ import annotations
from unittest.mock import MagicMock from unittest.mock import MagicMock
from homeassistant.components.matter.helpers import get_device_id import pytest
from homeassistant.components.matter.const import DOMAIN
from homeassistant.components.matter.helpers import (
get_device_id,
get_node_from_device_entry,
)
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr
from .common import setup_integration_with_node_fixture from .common import setup_integration_with_node_fixture
from tests.common import MockConfigEntry
async def test_get_device_id( async def test_get_device_id(
hass: HomeAssistant, hass: HomeAssistant,
@ -20,3 +29,42 @@ async def test_get_device_id(
device_id = get_device_id(matter_client.server_info, node.node_devices[0]) device_id = get_device_id(matter_client.server_info, node.node_devices[0])
assert device_id == "00000000000004D2-0000000000000005-MatterNodeDevice" assert device_id == "00000000000004D2-0000000000000005-MatterNodeDevice"
async def test_get_node_from_device_entry(
hass: HomeAssistant,
matter_client: MagicMock,
) -> None:
"""Test get_node_from_device_entry."""
device_registry = dr.async_get(hass)
other_domain = "other_domain"
other_config_entry = MockConfigEntry(domain=other_domain)
other_device_entry = device_registry.async_get_or_create(
config_entry_id=other_config_entry.entry_id,
identifiers={(other_domain, "1234")},
)
node = await setup_integration_with_node_fixture(
hass, "device_diagnostics", matter_client
)
config_entry = hass.config_entries.async_entries(DOMAIN)[0]
device_entry = dr.async_entries_for_config_entry(
device_registry, config_entry.entry_id
)[0]
assert device_entry
node_from_device_entry = await get_node_from_device_entry(hass, device_entry)
assert node_from_device_entry is node
with pytest.raises(ValueError) as value_error:
await get_node_from_device_entry(hass, other_device_entry)
assert f"Device {other_device_entry.id} is not a Matter device" in str(
value_error.value
)
matter_client.server_info = None
with pytest.raises(RuntimeError) as runtime_error:
node_from_device_entry = await get_node_from_device_entry(hass, device_entry)
assert "Matter server information is not available" in str(runtime_error.value)

View File

@ -2,9 +2,10 @@
from __future__ import annotations from __future__ import annotations
import asyncio import asyncio
from collections.abc import Generator from collections.abc import Awaitable, Callable, Generator
from unittest.mock import AsyncMock, MagicMock, call, patch from unittest.mock import AsyncMock, MagicMock, call, patch
from aiohttp import ClientWebSocketResponse
from matter_server.client.exceptions import CannotConnect, InvalidServerVersion from matter_server.client.exceptions import CannotConnect, InvalidServerVersion
from matter_server.common.helpers.util import dataclass_from_dict from matter_server.common.helpers.util import dataclass_from_dict
from matter_server.common.models.error import MatterError from matter_server.common.models.error import MatterError
@ -16,9 +17,14 @@ from homeassistant.components.matter.const import DOMAIN
from homeassistant.config_entries import ConfigEntryDisabler, ConfigEntryState from homeassistant.config_entries import ConfigEntryDisabler, ConfigEntryState
from homeassistant.const import STATE_UNAVAILABLE from homeassistant.const import STATE_UNAVAILABLE
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant
from homeassistant.helpers import issue_registry as ir from homeassistant.helpers import (
device_registry as dr,
entity_registry as er,
issue_registry as ir,
)
from homeassistant.setup import async_setup_component
from .common import load_and_parse_node_fixture from .common import load_and_parse_node_fixture, setup_integration_with_node_fixture
from tests.common import MockConfigEntry from tests.common import MockConfigEntry
@ -587,3 +593,76 @@ async def test_remove_entry(
assert entry.state is ConfigEntryState.NOT_LOADED assert entry.state is ConfigEntryState.NOT_LOADED
assert len(hass.config_entries.async_entries(DOMAIN)) == 0 assert len(hass.config_entries.async_entries(DOMAIN)) == 0
assert "Failed to uninstall the Matter Server add-on" in caplog.text assert "Failed to uninstall the Matter Server add-on" in caplog.text
async def test_remove_config_entry_device(
hass: HomeAssistant,
matter_client: MagicMock,
hass_ws_client: Callable[[HomeAssistant], Awaitable[ClientWebSocketResponse]],
) -> None:
"""Test that a device can be removed ok."""
assert await async_setup_component(hass, "config", {})
await setup_integration_with_node_fixture(hass, "device_diagnostics", matter_client)
await hass.async_block_till_done()
config_entry = hass.config_entries.async_entries(DOMAIN)[0]
device_registry = dr.async_get(hass)
device_entry = dr.async_entries_for_config_entry(
device_registry, config_entry.entry_id
)[0]
entity_registry = er.async_get(hass)
entity_id = "light.m5stamp_lighting_app"
assert device_entry
assert entity_registry.async_get(entity_id)
assert hass.states.get(entity_id)
client = await hass_ws_client(hass)
await client.send_json(
{
"id": 5,
"type": "config/device_registry/remove_config_entry",
"config_entry_id": config_entry.entry_id,
"device_id": device_entry.id,
}
)
response = await client.receive_json()
assert response["success"]
await hass.async_block_till_done()
assert not device_registry.async_get(device_entry.id)
assert not entity_registry.async_get(entity_id)
assert not hass.states.get(entity_id)
async def test_remove_config_entry_device_no_node(
hass: HomeAssistant,
matter_client: MagicMock,
integration: MockConfigEntry,
hass_ws_client: Callable[[HomeAssistant], Awaitable[ClientWebSocketResponse]],
) -> None:
"""Test that a device can be removed ok without an existing node."""
assert await async_setup_component(hass, "config", {})
config_entry = integration
device_registry = dr.async_get(hass)
device_entry = device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id,
identifiers={
(DOMAIN, "deviceid_00000000000004D2-0000000000000005-MatterNodeDevice")
},
)
client = await hass_ws_client(hass)
await client.send_json(
{
"id": 5,
"type": "config/device_registry/remove_config_entry",
"config_entry_id": config_entry.entry_id,
"device_id": device_entry.id,
}
)
response = await client.receive_json()
assert response["success"]
await hass.async_block_till_done()
assert not device_registry.async_get(device_entry.id)

View File

@ -67,6 +67,13 @@ async def test_default_prompt(hass, mock_init_component):
device_reg.async_update_device( device_reg.async_update_device(
device.id, disabled_by=device_registry.DeviceEntryDisabler.USER device.id, disabled_by=device_registry.DeviceEntryDisabler.USER
) )
device = device_reg.async_get_or_create(
config_entry_id="1234",
connections={("test", "9876-no-name")},
manufacturer="Test Manufacturer NoName",
model="Test Model NoName",
suggested_area="Test Area 2",
)
with patch("openai.Completion.create") as mock_create: with patch("openai.Completion.create") as mock_create:
result = await conversation.async_converse(hass, "hello", None, Context()) result = await conversation.async_converse(hass, "hello", None, Context())

View File

@ -324,7 +324,7 @@ def test_get_last_state_changes(hass_recorder):
start = dt_util.utcnow() - timedelta(minutes=2) start = dt_util.utcnow() - timedelta(minutes=2)
point = start + timedelta(minutes=1) point = start + timedelta(minutes=1)
point2 = point + timedelta(minutes=1) point2 = point + timedelta(minutes=1, seconds=1)
with patch( with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=start "homeassistant.components.recorder.core.dt_util.utcnow", return_value=start

View File

@ -249,7 +249,7 @@ def test_get_last_state_changes(hass_recorder):
start = dt_util.utcnow() - timedelta(minutes=2) start = dt_util.utcnow() - timedelta(minutes=2)
point = start + timedelta(minutes=1) point = start + timedelta(minutes=1)
point2 = point + timedelta(minutes=1) point2 = point + timedelta(minutes=1, seconds=1)
with patch( with patch(
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=start "homeassistant.components.recorder.core.dt_util.utcnow", return_value=start

View File

@ -45,3 +45,14 @@ async def test_run_history(recorder_mock, hass):
process_timestamp(instance.run_history.get(now).start) process_timestamp(instance.run_history.get(now).start)
== instance.run_history.recording_start == instance.run_history.recording_start
) )
async def test_run_history_during_schema_migration(recorder_mock, hass):
"""Test the run history during schema migration."""
instance = recorder.get_instance(hass)
run_history = instance.run_history
assert run_history.current.start == run_history.recording_start
with instance.get_session() as session:
run_history.start(session)
assert run_history.current.start == run_history.recording_start
assert run_history.current.created >= run_history.recording_start

View File

@ -231,7 +231,12 @@ def test_setup_connection_for_dialect_sqlite(sqlite_version):
dbapi_connection = MagicMock(cursor=_make_cursor_mock) dbapi_connection = MagicMock(cursor=_make_cursor_mock)
util.setup_connection_for_dialect(instance_mock, "sqlite", dbapi_connection, True) assert (
util.setup_connection_for_dialect(
instance_mock, "sqlite", dbapi_connection, True
)
is not None
)
assert len(execute_args) == 5 assert len(execute_args) == 5
assert execute_args[0] == "PRAGMA journal_mode=WAL" assert execute_args[0] == "PRAGMA journal_mode=WAL"
@ -241,7 +246,12 @@ def test_setup_connection_for_dialect_sqlite(sqlite_version):
assert execute_args[4] == "PRAGMA foreign_keys=ON" assert execute_args[4] == "PRAGMA foreign_keys=ON"
execute_args = [] execute_args = []
util.setup_connection_for_dialect(instance_mock, "sqlite", dbapi_connection, False) assert (
util.setup_connection_for_dialect(
instance_mock, "sqlite", dbapi_connection, False
)
is None
)
assert len(execute_args) == 3 assert len(execute_args) == 3
assert execute_args[0] == "PRAGMA cache_size = -16384" assert execute_args[0] == "PRAGMA cache_size = -16384"
@ -276,7 +286,12 @@ def test_setup_connection_for_dialect_sqlite_zero_commit_interval(
dbapi_connection = MagicMock(cursor=_make_cursor_mock) dbapi_connection = MagicMock(cursor=_make_cursor_mock)
util.setup_connection_for_dialect(instance_mock, "sqlite", dbapi_connection, True) assert (
util.setup_connection_for_dialect(
instance_mock, "sqlite", dbapi_connection, True
)
is not None
)
assert len(execute_args) == 5 assert len(execute_args) == 5
assert execute_args[0] == "PRAGMA journal_mode=WAL" assert execute_args[0] == "PRAGMA journal_mode=WAL"
@ -286,7 +301,12 @@ def test_setup_connection_for_dialect_sqlite_zero_commit_interval(
assert execute_args[4] == "PRAGMA foreign_keys=ON" assert execute_args[4] == "PRAGMA foreign_keys=ON"
execute_args = [] execute_args = []
util.setup_connection_for_dialect(instance_mock, "sqlite", dbapi_connection, False) assert (
util.setup_connection_for_dialect(
instance_mock, "sqlite", dbapi_connection, False
)
is None
)
assert len(execute_args) == 3 assert len(execute_args) == 3
assert execute_args[0] == "PRAGMA cache_size = -16384" assert execute_args[0] == "PRAGMA cache_size = -16384"
@ -444,11 +464,13 @@ def test_supported_pgsql(caplog, pgsql_version):
dbapi_connection = MagicMock(cursor=_make_cursor_mock) dbapi_connection = MagicMock(cursor=_make_cursor_mock)
util.setup_connection_for_dialect( database_engine = util.setup_connection_for_dialect(
instance_mock, "postgresql", dbapi_connection, True instance_mock, "postgresql", dbapi_connection, True
) )
assert "minimum supported version" not in caplog.text assert "minimum supported version" not in caplog.text
assert database_engine is not None
assert database_engine.optimizer.slow_range_in_select is True
@pytest.mark.parametrize( @pytest.mark.parametrize(
@ -525,9 +547,13 @@ def test_supported_sqlite(caplog, sqlite_version):
dbapi_connection = MagicMock(cursor=_make_cursor_mock) dbapi_connection = MagicMock(cursor=_make_cursor_mock)
util.setup_connection_for_dialect(instance_mock, "sqlite", dbapi_connection, True) database_engine = util.setup_connection_for_dialect(
instance_mock, "sqlite", dbapi_connection, True
)
assert "minimum supported version" not in caplog.text assert "minimum supported version" not in caplog.text
assert database_engine is not None
assert database_engine.optimizer.slow_range_in_select is False
@pytest.mark.parametrize( @pytest.mark.parametrize(
@ -599,7 +625,7 @@ async def test_issue_for_mariadb_with_MDEV_25020(
dbapi_connection = MagicMock(cursor=_make_cursor_mock) dbapi_connection = MagicMock(cursor=_make_cursor_mock)
await hass.async_add_executor_job( database_engine = await hass.async_add_executor_job(
util.setup_connection_for_dialect, util.setup_connection_for_dialect,
instance_mock, instance_mock,
"mysql", "mysql",
@ -613,6 +639,9 @@ async def test_issue_for_mariadb_with_MDEV_25020(
assert issue is not None assert issue is not None
assert issue.translation_placeholders == {"min_version": min_version} assert issue.translation_placeholders == {"min_version": min_version}
assert database_engine is not None
assert database_engine.optimizer.slow_range_in_select is True
@pytest.mark.parametrize( @pytest.mark.parametrize(
"mysql_version", "mysql_version",
@ -649,7 +678,7 @@ async def test_no_issue_for_mariadb_with_MDEV_25020(hass, caplog, mysql_version)
dbapi_connection = MagicMock(cursor=_make_cursor_mock) dbapi_connection = MagicMock(cursor=_make_cursor_mock)
await hass.async_add_executor_job( database_engine = await hass.async_add_executor_job(
util.setup_connection_for_dialect, util.setup_connection_for_dialect,
instance_mock, instance_mock,
"mysql", "mysql",
@ -662,6 +691,9 @@ async def test_no_issue_for_mariadb_with_MDEV_25020(hass, caplog, mysql_version)
issue = registry.async_get_issue(DOMAIN, "maria_db_range_index_regression") issue = registry.async_get_issue(DOMAIN, "maria_db_range_index_regression")
assert issue is None assert issue is None
assert database_engine is not None
assert database_engine.optimizer.slow_range_in_select is False
def test_basic_sanity_check(hass_recorder, recorder_db_url): def test_basic_sanity_check(hass_recorder, recorder_db_url):
"""Test the basic sanity checks with a missing table.""" """Test the basic sanity checks with a missing table."""

View File

@ -193,6 +193,7 @@ async def test_uptime_sensors(
hass, hass,
aioclient_mock, aioclient_mock,
mock_unifi_websocket, mock_unifi_websocket,
entity_registry_enabled_by_default,
initial_uptime, initial_uptime,
event_uptime, event_uptime,
new_uptime, new_uptime,
@ -263,7 +264,9 @@ async def test_uptime_sensors(
assert hass.states.get("sensor.client1_uptime") is None assert hass.states.get("sensor.client1_uptime") is None
async def test_remove_sensors(hass, aioclient_mock, mock_unifi_websocket): async def test_remove_sensors(
hass, aioclient_mock, mock_unifi_websocket, entity_registry_enabled_by_default
):
"""Verify removing of clients work as expected.""" """Verify removing of clients work as expected."""
wired_client = { wired_client = {
"hostname": "Wired client", "hostname": "Wired client",