Compare commits

...

53 Commits

Author SHA1 Message Date
Erik
4fcebddf23 Make attributes of area registry items immutable 2025-11-24 11:05:24 +01:00
Retha Runolfsson
c0e59c4508 Add support for switchbot s20 (#156368) 2025-11-11 13:55:50 +01:00
Erik Montnemery
cd379aadbf Use pytest.mark.freeze_time in sensibo tests (#156348) 2025-11-11 13:52:19 +01:00
antoniocifu
ccdd54b187 Fix support for Hyperion 2.1.1 (#156343)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-11-11 13:18:35 +01:00
Marc Mueller
3f22dbaa2e Update pytest to 9.0.0 (#156365) 2025-11-11 13:18:09 +01:00
Retha Runolfsson
c18dc0a9ab Add support for Switchbot Smart thermostat radiator (#155123) 2025-11-11 13:12:39 +01:00
Erik Montnemery
f0e4296d93 Use pytest.mark.freeze_time in sensor tests (#156349) 2025-11-11 13:05:52 +01:00
Erik Montnemery
b3750109c6 Use pytest.mark.freeze_time in playstation_network tests (#156347) 2025-11-11 13:05:38 +01:00
Erik Montnemery
93025c9845 Use pytest.mark.freeze_time in pglab tests (#156346) 2025-11-11 13:05:17 +01:00
Erik Montnemery
df348644b1 Use pytest.mark.freeze_time in openai_conversation tests (#156345) 2025-11-11 13:05:02 +01:00
Erik Montnemery
8749b0d750 Use pytest.mark.freeze_time in smhi tests (#156352) 2025-11-11 13:02:21 +01:00
Erik Montnemery
a6a1519c06 Use pytest.mark.freeze_time in snoo tests (#156353) 2025-11-11 13:02:01 +01:00
Erik Montnemery
3068e19843 Use pytest.mark.freeze_time in telegram_bot tests (#156354) 2025-11-11 13:01:34 +01:00
Erik Montnemery
55feb1e735 Use pytest.mark.freeze_time in tomorrowio tests (#156355) 2025-11-11 13:01:29 +01:00
Erik Montnemery
bb7dc69131 Use pytest.mark.freeze_time in yale_smart_alarm tests (#156359) 2025-11-11 12:06:22 +01:00
Erik Montnemery
aa9003a524 Use pytest.mark.freeze_time in wake_word tests (#156360) 2025-11-11 12:06:12 +01:00
Erik Montnemery
4e9da5249d Use pytest.mark.freeze_time in utility_meter tests (#156361) 2025-11-11 12:05:58 +01:00
Erik Montnemery
f502739df2 Use pytest.mark.freeze_time in zha tests (#156358) 2025-11-11 12:04:59 +01:00
Erik Montnemery
0f2ff29378 Use pytest.mark.freeze_time in sleep_as_android tests (#156351) 2025-11-11 12:04:40 +01:00
Erik Montnemery
2921e7ed3c Use pytest.mark.freeze_time in plaato tests (#156362) 2025-11-11 12:04:31 +01:00
Christopher Fenner
25d44e8d37 Enhance compressor phase with state translations in ViCare integration (#156238) 2025-11-11 11:20:27 +01:00
Will Moss
0a480a26a3 Remove import of config_entry_oauth2_flow in scaffold in favor of direct imports (#156302) 2025-11-11 11:17:31 +01:00
Khole
d5da64dd8d Bump pyhive to 1.0.7 (#156309) 2025-11-11 11:16:11 +01:00
wollew
92adcd8635 add the velux KLF 200 gateway as device (#155434)
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2025-11-11 11:13:18 +01:00
Joost Lekkerkerker
ee0c4b15c2 Make certain fields required for subentry flows (#156251) 2025-11-11 09:42:51 +01:00
Erik Montnemery
507f54198e Use pytest.mark.freeze_time in habitica tests (#156332) 2025-11-11 09:37:17 +01:00
epenet
0ed342b433 Use dpcode_wrapper in tuya alarm control panel platform (#156306) 2025-11-11 09:36:09 +01:00
cdnninja
363c86faf3 Add remove entity to vesync (#156213) 2025-11-11 09:35:19 +01:00
dependabot[bot]
095a7ad060 Bump actions/dependency-review-action from 4.8.1 to 4.8.2 (#156322)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-11 09:34:38 +01:00
Åke Strandberg
ab5981bbbd Use common string for OAuth2 implementation error in myuplink (#156338) 2025-11-11 09:33:59 +01:00
Erik Montnemery
ac2fb53dfd Fix typo in recorder statistics_meta table manager (#156326) 2025-11-11 09:33:30 +01:00
Erik Montnemery
02ff5de1ff Use pytest.mark.freeze_time in ntfy tests (#156336) 2025-11-11 09:33:21 +01:00
Erik Montnemery
5cd5d480d9 Check collation of statistics_meta DB table (#156327) 2025-11-11 09:31:43 +01:00
Erik Montnemery
a3c7d772fc Use pytest.mark.freeze_time in conversation tests (#156329) 2025-11-11 09:29:46 +01:00
micha91
fe0c69dba7 Update aiomusiccast to 0.15 (#156325) 2025-11-11 09:26:16 +01:00
Artur Pragacz
e5365234c3 Add myself as codeowner to music assistant (#156324) 2025-11-11 09:24:09 +01:00
Erik Montnemery
1531175bd3 Use pytest.mark.freeze_time in google tests (#156330) 2025-11-11 09:22:48 +01:00
Erik Montnemery
62add59ff4 Use pytest.mark.freeze_time in google_generative_ai_conversation tests (#156331) 2025-11-11 09:21:52 +01:00
Erik Montnemery
d8daca657b Use pytest.mark.freeze_time in intellifire tests (#156333) 2025-11-11 10:17:58 +02:00
Erik Montnemery
1891da46ea Use pytest.mark.freeze_time in knx tests (#156335) 2025-11-11 08:52:39 +01:00
Marc Mueller
22ae894745 Update pytest-asyncio to 1.3.0 (#156315) 2025-11-10 22:07:02 -08:00
Will Moss
160810c69d Move oauth2_implementation_unavailable string to top level (#156299) 2025-11-11 06:58:24 +01:00
epenet
2ae23b920a Use dpcode_wrapper in tuya siren platform (#156284) 2025-11-10 23:06:14 +01:00
Artur Pragacz
a7edfb082f Move config intents to manager (#154903) 2025-11-10 16:04:25 -06:00
Ludovic BOUÉ
3ac203b05f Add Matter Aqara W100 fixture (#156305)
- Adds JSON fixture file containing Matter node data for the Aqara W100 sensor
- Updates test configuration to include the new fixture in parametrized tests
- Adds snapshot test data for sensor and button entities created by this device
2025-11-10 21:58:18 +01:00
Jan Bouwhuis
7c3eb19fc4 Fix issues() template method returns non active issues (#156274) 2025-11-10 21:56:57 +01:00
kingy444
70c6fac743 Move hunterdouglas_powerview data class to upstream library (#156228)
Co-authored-by: J. Nick Koston <nick@koston.org>
2025-11-10 14:49:00 -06:00
Åke Strandberg
e19d7250d5 Adjust user-facing string for miele (#156280) 2025-11-10 20:42:42 +01:00
Maikel Punie
a850d5dba7 Bump velbusaio to 2025.11.0 (#156293) 2025-11-10 21:25:00 +02:00
Erik Montnemery
0cf0f10654 Correct migration to recorder schema 51 (#156267)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2025-11-10 20:14:25 +01:00
Ludovic BOUÉ
8429f154ca Fix status checks in Matter binary sensors (#156276)
This PR fixes bitmap bit checking logic in Matter binary sensors by replacing equality comparisons with bitwise AND operations. The changes correct how the integration checks if specific bits are set in bitmap fields.

Key changes:

Changed equality checks (==) to bitwise AND operations (&) for checking bitmap bits
Wrapped bitwise operations with bool() to ensure boolean return values
Applied fixes consistently across PumpStatus, DishwasherAlarm, and RefrigeratorAlarm bitmaps
2025-11-10 19:45:17 +01:00
Assaf Inbal
7b4f5ad362 Ituran: Don't cache properties (#156281) 2025-11-10 19:24:58 +02:00
David Rapan
583b439557 Add Shelly number translation (#156156)
Signed-off-by: David Rapan <david@rapan.cz>
2025-11-10 19:15:16 +02:00
115 changed files with 4186 additions and 727 deletions

View File

@@ -622,7 +622,7 @@ jobs:
steps:
- *checkout
- name: Dependency review
uses: actions/dependency-review-action@40c09b7dc99638e5ddb0bfd91c1673effc064d8a # v4.8.1
uses: actions/dependency-review-action@3c4e3dcb1aa7874d2c16be7d79418e9b7efd6261 # v4.8.2
with:
license-check: false # We use our own license audit checks

4
CODEOWNERS generated
View File

@@ -1017,8 +1017,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/msteams/ @peroyvind
/homeassistant/components/mullvad/ @meichthys
/tests/components/mullvad/ @meichthys
/homeassistant/components/music_assistant/ @music-assistant
/tests/components/music_assistant/ @music-assistant
/homeassistant/components/music_assistant/ @music-assistant @arturpragacz
/tests/components/music_assistant/ @music-assistant @arturpragacz
/homeassistant/components/mutesync/ @currentoor
/tests/components/mutesync/ @currentoor
/homeassistant/components/my/ @home-assistant/core

View File

@@ -4,7 +4,7 @@ from __future__ import annotations
from collections.abc import Callable
import logging
from typing import Literal
from typing import Any, Literal
from hassil.recognize import RecognizeResult
import voluptuous as vol
@@ -21,6 +21,7 @@ from homeassistant.core import (
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_validation as cv, intent
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.reload import async_integration_yaml_config
from homeassistant.helpers.typing import ConfigType
from homeassistant.loader import bind_hass
@@ -52,6 +53,8 @@ from .const import (
DATA_COMPONENT,
DOMAIN,
HOME_ASSISTANT_AGENT,
METADATA_CUSTOM_FILE,
METADATA_CUSTOM_SENTENCE,
SERVICE_PROCESS,
SERVICE_RELOAD,
ConversationEntityFeature,
@@ -266,10 +269,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
entity_component = EntityComponent[ConversationEntity](_LOGGER, DOMAIN, hass)
hass.data[DATA_COMPONENT] = entity_component
agent_config = config.get(DOMAIN, {})
await async_setup_default_agent(
hass, entity_component, config_intents=agent_config.get("intents", {})
)
manager = get_agent_manager(hass)
hass_config_path = hass.config.path()
config_intents = _get_config_intents(config, hass_config_path)
manager.update_config_intents(config_intents)
await async_setup_default_agent(hass, entity_component)
async def handle_process(service: ServiceCall) -> ServiceResponse:
"""Parse text into commands."""
@@ -294,9 +300,16 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
async def handle_reload(service: ServiceCall) -> None:
"""Reload intents."""
agent = get_agent_manager(hass).default_agent
language = service.data.get(ATTR_LANGUAGE)
if language is None:
conf = await async_integration_yaml_config(hass, DOMAIN)
if conf is not None:
config_intents = _get_config_intents(conf, hass_config_path)
manager.update_config_intents(config_intents)
agent = manager.default_agent
if agent is not None:
await agent.async_reload(language=service.data.get(ATTR_LANGUAGE))
await agent.async_reload(language=language)
hass.services.async_register(
DOMAIN,
@@ -313,6 +326,27 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
return True
def _get_config_intents(config: ConfigType, hass_config_path: str) -> dict[str, Any]:
"""Return config intents."""
intents = config.get(DOMAIN, {}).get("intents", {})
return {
"intents": {
intent_name: {
"data": [
{
"sentences": sentences,
"metadata": {
METADATA_CUSTOM_SENTENCE: True,
METADATA_CUSTOM_FILE: hass_config_path,
},
}
]
}
for intent_name, sentences in intents.items()
}
}
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up a config entry."""
return await hass.data[DATA_COMPONENT].async_setup_entry(entry)

View File

@@ -147,6 +147,7 @@ class AgentManager:
self.hass = hass
self._agents: dict[str, AbstractConversationAgent] = {}
self.default_agent: DefaultAgent | None = None
self.config_intents: dict[str, Any] = {}
self.triggers_details: list[TriggerDetails] = []
@callback
@@ -199,9 +200,16 @@ class AgentManager:
async def async_setup_default_agent(self, agent: DefaultAgent) -> None:
"""Set up the default agent."""
agent.update_config_intents(self.config_intents)
agent.update_triggers(self.triggers_details)
self.default_agent = agent
def update_config_intents(self, intents: dict[str, Any]) -> None:
"""Update config intents."""
self.config_intents = intents
if self.default_agent is not None:
self.default_agent.update_config_intents(intents)
def register_trigger(self, trigger_details: TriggerDetails) -> CALLBACK_TYPE:
"""Register a trigger."""
self.triggers_details.append(trigger_details)

View File

@@ -30,3 +30,7 @@ class ConversationEntityFeature(IntFlag):
"""Supported features of the conversation entity."""
CONTROL = 1
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
METADATA_CUSTOM_FILE = "hass_custom_file"

View File

@@ -77,7 +77,12 @@ from homeassistant.util.json import JsonObjectType, json_loads_object
from .agent_manager import get_agent_manager
from .chat_log import AssistantContent, ChatLog
from .const import DOMAIN, ConversationEntityFeature
from .const import (
DOMAIN,
METADATA_CUSTOM_FILE,
METADATA_CUSTOM_SENTENCE,
ConversationEntityFeature,
)
from .entity import ConversationEntity
from .models import ConversationInput, ConversationResult
from .trace import ConversationTraceEventType, async_conversation_trace_append
@@ -91,8 +96,6 @@ _ENTITY_REGISTRY_UPDATE_FIELDS = ["aliases", "name", "original_name"]
_DEFAULT_EXPOSED_ATTRIBUTES = {"device_class"}
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
METADATA_CUSTOM_FILE = "hass_custom_file"
METADATA_FUZZY_MATCH = "hass_fuzzy_match"
ERROR_SENTINEL = object()
@@ -202,10 +205,9 @@ class IntentCache:
async def async_setup_default_agent(
hass: HomeAssistant,
entity_component: EntityComponent[ConversationEntity],
config_intents: dict[str, Any],
) -> None:
"""Set up entity registry listener for the default agent."""
agent = DefaultAgent(hass, config_intents)
agent = DefaultAgent(hass)
await entity_component.async_add_entities([agent])
await get_agent_manager(hass).async_setup_default_agent(agent)
@@ -230,14 +232,14 @@ class DefaultAgent(ConversationEntity):
_attr_name = "Home Assistant"
_attr_supported_features = ConversationEntityFeature.CONTROL
def __init__(self, hass: HomeAssistant, config_intents: dict[str, Any]) -> None:
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize the default agent."""
self.hass = hass
self._lang_intents: dict[str, LanguageIntents | object] = {}
self._load_intents_lock = asyncio.Lock()
# intent -> [sentences]
self._config_intents: dict[str, Any] = config_intents
# Intents from common conversation config
self._config_intents: dict[str, Any] = {}
# Sentences that will trigger a callback (skipping intent recognition)
self._triggers_details: list[TriggerDetails] = []
@@ -1035,6 +1037,14 @@ class DefaultAgent(ConversationEntity):
# Intents have changed, so we must clear the cache
self._intent_cache.clear()
@callback
def update_config_intents(self, intents: dict[str, Any]) -> None:
"""Update config intents."""
self._config_intents = intents
# Intents have changed, so we must clear the cache
self._intent_cache.clear()
async def async_prepare(self, language: str | None = None) -> None:
"""Load intents for a language."""
if language is None:
@@ -1159,33 +1169,10 @@ class DefaultAgent(ConversationEntity):
custom_sentences_path,
)
# Load sentences from HA config for default language only
if self._config_intents and (
self.hass.config.language in (language, language_variant)
):
hass_config_path = self.hass.config.path()
merge_dict(
intents_dict,
{
"intents": {
intent_name: {
"data": [
{
"sentences": sentences,
"metadata": {
METADATA_CUSTOM_SENTENCE: True,
METADATA_CUSTOM_FILE: hass_config_path,
},
}
]
}
for intent_name, sentences in self._config_intents.items()
}
},
)
_LOGGER.debug(
"Loaded intents from configuration.yaml",
)
merge_dict(
intents_dict,
self._config_intents,
)
if not intents_dict:
return None

View File

@@ -9,5 +9,5 @@
},
"iot_class": "cloud_polling",
"loggers": ["apyhiveapi"],
"requirements": ["pyhive-integration==1.0.6"]
"requirements": ["pyhive-integration==1.0.7"]
}

View File

@@ -1237,7 +1237,7 @@
"message": "Error obtaining data from the API: {error}"
},
"oauth2_implementation_unavailable": {
"message": "OAuth2 implementation temporarily unavailable, will retry"
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
},
"pause_program": {
"message": "Error pausing program: {error}"

View File

@@ -4,6 +4,7 @@ import logging
from typing import TYPE_CHECKING
from aiopvapi.resources.model import PowerviewData
from aiopvapi.resources.shade_data import PowerviewShadeData
from aiopvapi.rooms import Rooms
from aiopvapi.scenes import Scenes
from aiopvapi.shades import Shades
@@ -16,7 +17,6 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er
from .const import DOMAIN, HUB_EXCEPTIONS, MANUFACTURER
from .coordinator import PowerviewShadeUpdateCoordinator
from .model import PowerviewConfigEntry, PowerviewEntryData
from .shade_data import PowerviewShadeData
from .util import async_connect_hub
PARALLEL_UPDATES = 1

View File

@@ -8,6 +8,7 @@ import logging
from aiopvapi.helpers.aiorequest import PvApiMaintenance
from aiopvapi.hub import Hub
from aiopvapi.resources.shade_data import PowerviewShadeData
from aiopvapi.shades import Shades
from homeassistant.config_entries import ConfigEntry
@@ -15,7 +16,6 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import HUB_EXCEPTIONS
from .shade_data import PowerviewShadeData
_LOGGER = logging.getLogger(__name__)

View File

@@ -208,13 +208,13 @@ class PowerViewShadeBase(ShadeEntity, CoverEntity):
async def _async_execute_move(self, move: ShadePosition) -> None:
"""Execute a move that can affect multiple positions."""
_LOGGER.debug("Move request %s: %s", self.name, move)
# Store the requested positions so subsequent move
# requests contain the secondary shade positions
self.data.update_shade_position(self._shade.id, move)
async with self.coordinator.radio_operation_lock:
response = await self._shade.move(move)
_LOGGER.debug("Move response %s: %s", self.name, response)
# Process the response from the hub (including new positions)
self.data.update_shade_position(self._shade.id, response)
async def _async_set_cover_position(self, target_hass_position: int) -> None:
"""Move the shade to a position."""
target_hass_position = self._clamp_cover_limit(target_hass_position)

View File

@@ -3,6 +3,7 @@
import logging
from aiopvapi.resources.shade import BaseShade, ShadePosition
from aiopvapi.resources.shade_data import PowerviewShadeData
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.device_registry import DeviceInfo
@@ -11,7 +12,6 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER
from .coordinator import PowerviewShadeUpdateCoordinator
from .model import PowerviewDeviceInfo
from .shade_data import PowerviewShadeData
_LOGGER = logging.getLogger(__name__)

View File

@@ -1,80 +0,0 @@
"""Shade data for the Hunter Douglas PowerView integration."""
from __future__ import annotations
from dataclasses import fields
from typing import Any
from aiopvapi.resources.model import PowerviewData
from aiopvapi.resources.shade import BaseShade, ShadePosition
from .util import async_map_data_by_id
POSITION_FIELDS = [field for field in fields(ShadePosition) if field.name != "velocity"]
def copy_position_data(source: ShadePosition, target: ShadePosition) -> ShadePosition:
"""Copy position data from source to target for None values only."""
for field in POSITION_FIELDS:
if (value := getattr(source, field.name)) is not None:
setattr(target, field.name, value)
class PowerviewShadeData:
"""Coordinate shade data between multiple api calls."""
def __init__(self) -> None:
"""Init the shade data."""
self._raw_data_by_id: dict[int, dict[str | int, Any]] = {}
self._shade_group_data_by_id: dict[int, BaseShade] = {}
self.positions: dict[int, ShadePosition] = {}
def get_raw_data(self, shade_id: int) -> dict[str | int, Any]:
"""Get data for the shade."""
return self._raw_data_by_id[shade_id]
def get_all_raw_data(self) -> dict[int, dict[str | int, Any]]:
"""Get data for all shades."""
return self._raw_data_by_id
def get_shade(self, shade_id: int) -> BaseShade:
"""Get specific shade from the coordinator."""
return self._shade_group_data_by_id[shade_id]
def get_shade_position(self, shade_id: int) -> ShadePosition:
"""Get positions for a shade."""
if shade_id not in self.positions:
shade_position = ShadePosition()
# If we have the group data, use it to populate the initial position
if shade := self._shade_group_data_by_id.get(shade_id):
copy_position_data(shade.current_position, shade_position)
self.positions[shade_id] = shade_position
return self.positions[shade_id]
def update_from_group_data(self, shade_id: int) -> None:
"""Process an update from the group data."""
data = self._shade_group_data_by_id[shade_id]
copy_position_data(data.current_position, self.get_shade_position(data.id))
def store_group_data(self, shade_data: PowerviewData) -> None:
"""Store data from the all shades endpoint.
This does not update the shades or positions (self.positions)
as the data may be stale. update_from_group_data
with a shade_id will update a specific shade
from the group data.
"""
self._shade_group_data_by_id = shade_data.processed
self._raw_data_by_id = async_map_data_by_id(shade_data.raw)
def update_shade_position(self, shade_id: int, new_position: ShadePosition) -> None:
"""Update a single shades position."""
copy_position_data(new_position, self.get_shade_position(shade_id))
def update_shade_velocity(self, shade_id: int, shade_data: ShadePosition) -> None:
"""Update a single shades velocity."""
# the hub will always return a velocity of 0 on initial connect,
# separate definition to store consistent value in HA
# this value is purely driven from HA
if shade_data.velocity is not None:
self.get_shade_position(shade_id).velocity = shade_data.velocity

View File

@@ -2,25 +2,15 @@
from __future__ import annotations
from collections.abc import Iterable
from typing import Any
from aiopvapi.helpers.aiorequest import AioRequest
from aiopvapi.helpers.constants import ATTR_ID
from aiopvapi.hub import Hub
from homeassistant.core import HomeAssistant, callback
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .model import PowerviewAPI, PowerviewDeviceInfo
@callback
def async_map_data_by_id(data: Iterable[dict[str | int, Any]]):
"""Return a dict with the key being the id for a list of entries."""
return {entry[ATTR_ID]: entry for entry in data}
async def async_connect_hub(
hass: HomeAssistant, address: str, api_version: int | None = None
) -> PowerviewAPI:

View File

@@ -13,6 +13,7 @@ from typing import Any
from aiohttp import web
from hyperion import client
from hyperion.const import (
KEY_DATA,
KEY_IMAGE,
KEY_IMAGE_STREAM,
KEY_LEDCOLORS,
@@ -155,7 +156,8 @@ class HyperionCamera(Camera):
"""Update Hyperion components."""
if not img:
return
img_data = img.get(KEY_RESULT, {}).get(KEY_IMAGE)
# Prefer KEY_DATA (Hyperion server >= 2.1.1); fall back to KEY_RESULT for older server versions
img_data = img.get(KEY_DATA, img.get(KEY_RESULT, {})).get(KEY_IMAGE)
if not img_data or not img_data.startswith(IMAGE_STREAM_JPG_SENTINEL):
return
async with self._image_cond:

View File

@@ -5,7 +5,6 @@ from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from propcache.api import cached_property
from pyituran import Vehicle
from homeassistant.components.binary_sensor import (
@@ -69,7 +68,7 @@ class IturanBinarySensor(IturanBaseEntity, BinarySensorEntity):
super().__init__(coordinator, license_plate, description.key)
self.entity_description = description
@cached_property
@property
def is_on(self) -> bool:
"""Return true if the binary sensor is on."""
return self.entity_description.value_fn(self.vehicle)

View File

@@ -2,8 +2,6 @@
from __future__ import annotations
from propcache.api import cached_property
from homeassistant.components.device_tracker import TrackerEntity
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -40,12 +38,12 @@ class IturanDeviceTracker(IturanBaseEntity, TrackerEntity):
"""Initialize the device tracker."""
super().__init__(coordinator, license_plate, "device_tracker")
@cached_property
@property
def latitude(self) -> float | None:
"""Return latitude value of the device."""
return self.vehicle.gps_coordinates[0]
@cached_property
@property
def longitude(self) -> float | None:
"""Return longitude value of the device."""
return self.vehicle.gps_coordinates[1]

View File

@@ -6,7 +6,6 @@ from collections.abc import Callable
from dataclasses import dataclass
from datetime import datetime
from propcache.api import cached_property
from pyituran import Vehicle
from homeassistant.components.sensor import (
@@ -133,7 +132,7 @@ class IturanSensor(IturanBaseEntity, SensorEntity):
super().__init__(coordinator, license_plate, description.key)
self.entity_description = description
@cached_property
@property
def native_value(self) -> StateType | datetime:
"""Return the state of the device."""
return self.entity_description.value_fn(self.vehicle)

View File

@@ -353,17 +353,13 @@ DISCOVERY_SCHEMAS = [
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
# DeviceFault or SupplyFault bit enabled
device_to_ha={
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kDeviceFault: True,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSupplyFault: True,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSpeedLow: False,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSpeedHigh: False,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kLocalOverride: False,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRunning: False,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRemotePressure: False,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRemoteFlow: False,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRemoteTemperature: False,
}.get,
device_to_ha=lambda x: bool(
x
& (
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kDeviceFault
| clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSupplyFault
)
),
),
entity_class=MatterBinarySensor,
required_attributes=(
@@ -377,9 +373,9 @@ DISCOVERY_SCHEMAS = [
key="PumpStatusRunning",
translation_key="pump_running",
device_class=BinarySensorDeviceClass.RUNNING,
device_to_ha=lambda x: (
device_to_ha=lambda x: bool(
x
== clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRunning
& clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRunning
),
),
entity_class=MatterBinarySensor,
@@ -395,8 +391,8 @@ DISCOVERY_SCHEMAS = [
translation_key="dishwasher_alarm_inflow",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
device_to_ha=lambda x: (
x == clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kInflowError
device_to_ha=lambda x: bool(
x & clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kInflowError
),
),
entity_class=MatterBinarySensor,
@@ -410,8 +406,8 @@ DISCOVERY_SCHEMAS = [
translation_key="alarm_door",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
device_to_ha=lambda x: (
x == clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kDoorError
device_to_ha=lambda x: bool(
x & clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kDoorError
),
),
entity_class=MatterBinarySensor,
@@ -481,8 +477,8 @@ DISCOVERY_SCHEMAS = [
translation_key="alarm_door",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
device_to_ha=lambda x: (
x == clusters.RefrigeratorAlarm.Bitmaps.AlarmBitmap.kDoorOpen
device_to_ha=lambda x: bool(
x & clusters.RefrigeratorAlarm.Bitmaps.AlarmBitmap.kDoorOpen
),
),
entity_class=MatterBinarySensor,

View File

@@ -1009,7 +1009,7 @@
"cleaning_care_program": "Cleaning/care program",
"maintenance_program": "Maintenance program",
"normal_operation_mode": "Normal operation mode",
"own_program": "Own program"
"own_program": "Program"
}
},
"remaining_time": {
@@ -1089,7 +1089,7 @@
"message": "Invalid device targeted."
},
"oauth2_implementation_unavailable": {
"message": "OAuth2 implementation unavailable, will retry"
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
},
"set_program_error": {
"message": "'Set program' action failed: {status} / {message}"

View File

@@ -2,7 +2,7 @@
"domain": "music_assistant",
"name": "Music Assistant",
"after_dependencies": ["media_source", "media_player"],
"codeowners": ["@music-assistant"],
"codeowners": ["@music-assistant", "@arturpragacz"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/music_assistant",
"iot_class": "local_push",

View File

@@ -57,7 +57,7 @@
"message": "Error while loading the integration."
},
"implementation_unavailable": {
"message": "OAuth2 implementation is not available, will retry."
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
},
"incorrect_oauth2_scope": {
"message": "Stored permissions are invalid. Please login again to update permissions."

View File

@@ -26,6 +26,9 @@ def validate_db_schema(instance: Recorder) -> set[str]:
schema_errors |= validate_table_schema_supports_utf8(
instance, StatisticsMeta, (StatisticsMeta.statistic_id,)
)
schema_errors |= validate_table_schema_has_correct_collation(
instance, StatisticsMeta
)
for table in (Statistics, StatisticsShortTerm):
schema_errors |= validate_db_schema_precision(instance, table)
schema_errors |= validate_table_schema_has_correct_collation(instance, table)

View File

@@ -54,7 +54,7 @@ CONTEXT_ID_AS_BINARY_SCHEMA_VERSION = 36
EVENT_TYPE_IDS_SCHEMA_VERSION = 37
STATES_META_SCHEMA_VERSION = 38
CIRCULAR_MEAN_SCHEMA_VERSION = 49
UNIT_CLASS_SCHEMA_VERSION = 51
UNIT_CLASS_SCHEMA_VERSION = 52
LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION = 28
LEGACY_STATES_EVENT_FOREIGN_KEYS_FIXED_SCHEMA_VERSION = 43

View File

@@ -71,7 +71,7 @@ class LegacyBase(DeclarativeBase):
"""Base class for tables, used for schema migration."""
SCHEMA_VERSION = 51
SCHEMA_VERSION = 52
_LOGGER = logging.getLogger(__name__)

View File

@@ -13,7 +13,15 @@ from typing import TYPE_CHECKING, Any, TypedDict, cast, final
from uuid import UUID
import sqlalchemy
from sqlalchemy import ForeignKeyConstraint, MetaData, Table, func, text, update
from sqlalchemy import (
ForeignKeyConstraint,
MetaData,
Table,
cast as cast_,
func,
text,
update,
)
from sqlalchemy.engine import CursorResult, Engine
from sqlalchemy.exc import (
DatabaseError,
@@ -26,8 +34,9 @@ from sqlalchemy.exc import (
from sqlalchemy.orm import DeclarativeBase
from sqlalchemy.orm.session import Session
from sqlalchemy.schema import AddConstraint, CreateTable, DropConstraint
from sqlalchemy.sql.expression import true
from sqlalchemy.sql.expression import and_, true
from sqlalchemy.sql.lambdas import StatementLambdaElement
from sqlalchemy.types import BINARY
from homeassistant.core import HomeAssistant
from homeassistant.util.enum import try_parse_enum
@@ -2044,14 +2053,74 @@ class _SchemaVersion50Migrator(_SchemaVersionMigrator, target_version=50):
class _SchemaVersion51Migrator(_SchemaVersionMigrator, target_version=51):
def _apply_update(self) -> None:
"""Version specific update method."""
# Add unit class column to StatisticsMeta
# Replaced with version 52 which corrects issues with MySQL string comparisons.
class _SchemaVersion52Migrator(_SchemaVersionMigrator, target_version=52):
def _apply_update(self) -> None:
"""Version specific update method."""
if self.engine.dialect.name == SupportedDialect.MYSQL:
self._apply_update_mysql()
else:
self._apply_update_postgresql_sqlite()
def _apply_update_mysql(self) -> None:
"""Version specific update method for mysql."""
_add_columns(self.session_maker, "statistics_meta", ["unit_class VARCHAR(255)"])
with session_scope(session=self.session_maker()) as session:
connection = session.connection()
for conv in _PRIMARY_UNIT_CONVERTERS:
case_sensitive_units = {
u.encode("utf-8") if u else u for u in conv.VALID_UNITS
}
# Reset unit_class to None for entries that do not match
# the valid units (case sensitive) but matched before due to
# case insensitive comparisons.
connection.execute(
update(StatisticsMeta)
.where(StatisticsMeta.unit_of_measurement.in_(conv.VALID_UNITS))
.where(
and_(
StatisticsMeta.unit_of_measurement.in_(conv.VALID_UNITS),
cast_(StatisticsMeta.unit_of_measurement, BINARY).not_in(
case_sensitive_units
),
)
)
.values(unit_class=None)
)
# Do an explicitly case sensitive match (actually binary) to set the
# correct unit_class. This is needed because we use the case sensitive
# utf8mb4_unicode_ci collation.
connection.execute(
update(StatisticsMeta)
.where(
and_(
cast_(StatisticsMeta.unit_of_measurement, BINARY).in_(
case_sensitive_units
),
StatisticsMeta.unit_class.is_(None),
)
)
.values(unit_class=conv.UNIT_CLASS)
)
def _apply_update_postgresql_sqlite(self) -> None:
"""Version specific update method for postgresql and sqlite."""
_add_columns(self.session_maker, "statistics_meta", ["unit_class VARCHAR(255)"])
with session_scope(session=self.session_maker()) as session:
connection = session.connection()
for conv in _PRIMARY_UNIT_CONVERTERS:
# Set the correct unit_class. Unlike MySQL, Postgres and SQLite
# have case sensitive string comparisons by default, so we
# can directly match on the valid units.
connection.execute(
update(StatisticsMeta)
.where(
and_(
StatisticsMeta.unit_of_measurement.in_(conv.VALID_UNITS),
StatisticsMeta.unit_class.is_(None),
)
)
.values(unit_class=conv.UNIT_CLASS)
)

View File

@@ -26,7 +26,7 @@ CACHE_SIZE = 8192
_LOGGER = logging.getLogger(__name__)
QUERY_STATISTIC_META = (
QUERY_STATISTICS_META = (
StatisticsMeta.id,
StatisticsMeta.statistic_id,
StatisticsMeta.source,
@@ -55,7 +55,7 @@ def _generate_get_metadata_stmt(
Depending on the schema version, either mean_type (added in version 49) or has_mean column is used.
"""
columns: list[InstrumentedAttribute[Any]] = list(QUERY_STATISTIC_META)
columns: list[InstrumentedAttribute[Any]] = list(QUERY_STATISTICS_META)
if schema_version >= CIRCULAR_MEAN_SCHEMA_VERSION:
columns.append(StatisticsMeta.mean_type)
else:

View File

@@ -12,6 +12,7 @@ from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError
from homeassistant.components.number import (
DOMAIN as NUMBER_PLATFORM,
NumberDeviceClass,
NumberEntity,
NumberEntityDescription,
NumberExtraStoredData,
@@ -107,6 +108,9 @@ class RpcNumber(ShellyRpcAttributeEntity, NumberEntity):
if description.mode_fn is not None:
self._attr_mode = description.mode_fn(coordinator.device.config[key])
if hasattr(self, "_attr_name") and description.role != ROLE_GENERIC:
delattr(self, "_attr_name")
@property
def native_value(self) -> float | None:
"""Return value of number."""
@@ -181,7 +185,6 @@ NUMBERS: dict[tuple[str, str], BlockNumberDescription] = {
("device", "valvePos"): BlockNumberDescription(
key="device|valvepos",
translation_key="valve_position",
name="Valve position",
native_unit_of_measurement=PERCENTAGE,
available=lambda block: cast(int, block.valveError) != 1,
entity_category=EntityCategory.CONFIG,
@@ -200,12 +203,12 @@ RPC_NUMBERS: Final = {
key="blutrv",
sub_key="current_C",
translation_key="external_temperature",
name="External temperature",
native_min_value=-50,
native_max_value=50,
native_step=0.1,
mode=NumberMode.BOX,
entity_category=EntityCategory.CONFIG,
device_class=NumberDeviceClass.TEMPERATURE,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
method="blu_trv_set_external_temperature",
entity_class=RpcBluTrvExtTempNumber,
@@ -213,7 +216,7 @@ RPC_NUMBERS: Final = {
"number_generic": RpcNumberDescription(
key="number",
sub_key="value",
removal_condition=lambda config, _status, key: not is_view_for_platform(
removal_condition=lambda config, _, key: not is_view_for_platform(
config, key, NUMBER_PLATFORM
),
max_fn=lambda config: config["max"],
@@ -229,9 +232,11 @@ RPC_NUMBERS: Final = {
"number_current_limit": RpcNumberDescription(
key="number",
sub_key="value",
translation_key="current_limit",
device_class=NumberDeviceClass.CURRENT,
max_fn=lambda config: config["max"],
min_fn=lambda config: config["min"],
mode_fn=lambda config: NumberMode.SLIDER,
mode_fn=lambda _: NumberMode.SLIDER,
step_fn=lambda config: config["meta"]["ui"].get("step"),
unit=get_virtual_component_unit,
method="number_set",
@@ -241,10 +246,11 @@ RPC_NUMBERS: Final = {
"number_position": RpcNumberDescription(
key="number",
sub_key="value",
translation_key="valve_position",
entity_registry_enabled_default=False,
max_fn=lambda config: config["max"],
min_fn=lambda config: config["min"],
mode_fn=lambda config: NumberMode.SLIDER,
mode_fn=lambda _: NumberMode.SLIDER,
step_fn=lambda config: config["meta"]["ui"].get("step"),
unit=get_virtual_component_unit,
method="number_set",
@@ -254,10 +260,12 @@ RPC_NUMBERS: Final = {
"number_target_humidity": RpcNumberDescription(
key="number",
sub_key="value",
translation_key="target_humidity",
device_class=NumberDeviceClass.HUMIDITY,
entity_registry_enabled_default=False,
max_fn=lambda config: config["max"],
min_fn=lambda config: config["min"],
mode_fn=lambda config: NumberMode.SLIDER,
mode_fn=lambda _: NumberMode.SLIDER,
step_fn=lambda config: config["meta"]["ui"].get("step"),
unit=get_virtual_component_unit,
method="number_set",
@@ -267,10 +275,12 @@ RPC_NUMBERS: Final = {
"number_target_temperature": RpcNumberDescription(
key="number",
sub_key="value",
translation_key="target_temperature",
device_class=NumberDeviceClass.TEMPERATURE,
entity_registry_enabled_default=False,
max_fn=lambda config: config["max"],
min_fn=lambda config: config["min"],
mode_fn=lambda config: NumberMode.SLIDER,
mode_fn=lambda _: NumberMode.SLIDER,
step_fn=lambda config: config["meta"]["ui"].get("step"),
unit=get_virtual_component_unit,
method="number_set",
@@ -281,21 +291,20 @@ RPC_NUMBERS: Final = {
key="blutrv",
sub_key="pos",
translation_key="valve_position",
name="Valve position",
native_min_value=0,
native_max_value=100,
native_step=1,
mode=NumberMode.SLIDER,
native_unit_of_measurement=PERCENTAGE,
method="blu_trv_set_valve_position",
removal_condition=lambda config, _status, key: config[key].get("enable", True)
removal_condition=lambda config, _, key: config[key].get("enable", True)
is True,
entity_class=RpcBluTrvNumber,
),
"left_slot_intensity": RpcNumberDescription(
key="cury",
sub_key="slots",
name="Left slot intensity",
translation_key="left_slot_intensity",
value=lambda status, _: status["left"]["intensity"],
native_min_value=0,
native_max_value=100,
@@ -311,7 +320,7 @@ RPC_NUMBERS: Final = {
"right_slot_intensity": RpcNumberDescription(
key="cury",
sub_key="slots",
name="Right slot intensity",
translation_key="right_slot_intensity",
value=lambda status, _: status["right"]["intensity"],
native_min_value=0,
native_max_value=100,
@@ -402,6 +411,9 @@ class BlockSleepingNumber(ShellySleepingBlockAttributeEntity, RestoreNumber):
self.restored_data: NumberExtraStoredData | None = None
super().__init__(coordinator, block, attribute, description, entry)
if hasattr(self, "_attr_name"):
delattr(self, "_attr_name")
async def async_added_to_hass(self) -> None:
"""Handle entity which will be added."""
await super().async_added_to_hass()

View File

@@ -188,6 +188,29 @@
}
}
},
"number": {
"current_limit": {
"name": "Current limit"
},
"external_temperature": {
"name": "External temperature"
},
"left_slot_intensity": {
"name": "Left slot intensity"
},
"right_slot_intensity": {
"name": "Right slot intensity"
},
"target_humidity": {
"name": "Target humidity"
},
"target_temperature": {
"name": "Target temperature"
},
"valve_position": {
"name": "Valve position"
}
},
"select": {
"cury_mode": {
"name": "Mode",

View File

@@ -75,6 +75,7 @@ PLATFORMS_BY_TYPE = {
SupportedModels.HUBMINI_MATTER.value: [Platform.SENSOR],
SupportedModels.CIRCULATOR_FAN.value: [Platform.FAN, Platform.SENSOR],
SupportedModels.S10_VACUUM.value: [Platform.VACUUM, Platform.SENSOR],
SupportedModels.S20_VACUUM.value: [Platform.VACUUM, Platform.SENSOR],
SupportedModels.K10_VACUUM.value: [Platform.VACUUM, Platform.SENSOR],
SupportedModels.K10_PRO_VACUUM.value: [Platform.VACUUM, Platform.SENSOR],
SupportedModels.K10_PRO_COMBO_VACUUM.value: [Platform.VACUUM, Platform.SENSOR],
@@ -102,6 +103,10 @@ PLATFORMS_BY_TYPE = {
SupportedModels.RELAY_SWITCH_2PM.value: [Platform.SWITCH, Platform.SENSOR],
SupportedModels.GARAGE_DOOR_OPENER.value: [Platform.COVER, Platform.SENSOR],
SupportedModels.CLIMATE_PANEL.value: [Platform.SENSOR, Platform.BINARY_SENSOR],
SupportedModels.SMART_THERMOSTAT_RADIATOR.value: [
Platform.CLIMATE,
Platform.SENSOR,
],
}
CLASS_BY_DEVICE = {
SupportedModels.CEILING_LIGHT.value: switchbot.SwitchbotCeilingLight,
@@ -119,6 +124,7 @@ CLASS_BY_DEVICE = {
SupportedModels.ROLLER_SHADE.value: switchbot.SwitchbotRollerShade,
SupportedModels.CIRCULATOR_FAN.value: switchbot.SwitchbotFan,
SupportedModels.S10_VACUUM.value: switchbot.SwitchbotVacuum,
SupportedModels.S20_VACUUM.value: switchbot.SwitchbotVacuum,
SupportedModels.K10_VACUUM.value: switchbot.SwitchbotVacuum,
SupportedModels.K10_PRO_VACUUM.value: switchbot.SwitchbotVacuum,
SupportedModels.K10_PRO_COMBO_VACUUM.value: switchbot.SwitchbotVacuum,
@@ -136,6 +142,7 @@ CLASS_BY_DEVICE = {
SupportedModels.PLUG_MINI_EU.value: switchbot.SwitchbotRelaySwitch,
SupportedModels.RELAY_SWITCH_2PM.value: switchbot.SwitchbotRelaySwitch2PM,
SupportedModels.GARAGE_DOOR_OPENER.value: switchbot.SwitchbotGarageDoorOpener,
SupportedModels.SMART_THERMOSTAT_RADIATOR.value: switchbot.SwitchbotSmartThermostatRadiator,
}

View File

@@ -0,0 +1,140 @@
"""Support for Switchbot Climate devices."""
from __future__ import annotations
import logging
from typing import Any
import switchbot
from switchbot import (
ClimateAction as SwitchBotClimateAction,
ClimateMode as SwitchBotClimateMode,
)
from homeassistant.components.climate import (
ClimateEntity,
ClimateEntityFeature,
HVACAction,
HVACMode,
)
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import SwitchbotConfigEntry
from .entity import SwitchbotEntity, exception_handler
SWITCHBOT_CLIMATE_TO_HASS_HVAC_MODE = {
SwitchBotClimateMode.HEAT: HVACMode.HEAT,
SwitchBotClimateMode.OFF: HVACMode.OFF,
}
HASS_HVAC_MODE_TO_SWITCHBOT_CLIMATE = {
HVACMode.HEAT: SwitchBotClimateMode.HEAT,
HVACMode.OFF: SwitchBotClimateMode.OFF,
}
SWITCHBOT_ACTION_TO_HASS_HVAC_ACTION = {
SwitchBotClimateAction.HEATING: HVACAction.HEATING,
SwitchBotClimateAction.IDLE: HVACAction.IDLE,
SwitchBotClimateAction.OFF: HVACAction.OFF,
}
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
async def async_setup_entry(
hass: HomeAssistant,
entry: SwitchbotConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Switchbot climate based on a config entry."""
coordinator = entry.runtime_data
async_add_entities([SwitchBotClimateEntity(coordinator)])
class SwitchBotClimateEntity(SwitchbotEntity, ClimateEntity):
"""Representation of a Switchbot Climate device."""
_device: switchbot.SwitchbotDevice
_attr_supported_features = (
ClimateEntityFeature.PRESET_MODE
| ClimateEntityFeature.TARGET_TEMPERATURE
| ClimateEntityFeature.TURN_OFF
| ClimateEntityFeature.TURN_ON
)
_attr_target_temperature_step = 0.5
_attr_temperature_unit = UnitOfTemperature.CELSIUS
_attr_translation_key = "climate"
_attr_name = None
@property
def min_temp(self) -> float:
"""Return the minimum temperature."""
return self._device.min_temperature
@property
def max_temp(self) -> float:
"""Return the maximum temperature."""
return self._device.max_temperature
@property
def preset_modes(self) -> list[str] | None:
"""Return the list of available preset modes."""
return self._device.preset_modes
@property
def preset_mode(self) -> str | None:
"""Return the current preset mode."""
return self._device.preset_mode
@property
def hvac_mode(self) -> HVACMode | None:
"""Return the current HVAC mode."""
return SWITCHBOT_CLIMATE_TO_HASS_HVAC_MODE.get(
self._device.hvac_mode, HVACMode.OFF
)
@property
def hvac_modes(self) -> list[HVACMode]:
"""Return the list of available HVAC modes."""
return [
SWITCHBOT_CLIMATE_TO_HASS_HVAC_MODE[mode]
for mode in self._device.hvac_modes
]
@property
def hvac_action(self) -> HVACAction | None:
"""Return the current HVAC action."""
return SWITCHBOT_ACTION_TO_HASS_HVAC_ACTION.get(
self._device.hvac_action, HVACAction.OFF
)
@property
def current_temperature(self) -> float | None:
"""Return the current temperature."""
return self._device.current_temperature
@property
def target_temperature(self) -> float | None:
"""Return the temperature we try to reach."""
return self._device.target_temperature
@exception_handler
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set new HVAC mode."""
return await self._device.set_hvac_mode(
HASS_HVAC_MODE_TO_SWITCHBOT_CLIMATE[hvac_mode]
)
@exception_handler
async def async_set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
return await self._device.set_preset_mode(preset_mode)
@exception_handler
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperature."""
temperature = kwargs.get(ATTR_TEMPERATURE)
return await self._device.set_target_temperature(temperature)

View File

@@ -58,6 +58,8 @@ class SupportedModels(StrEnum):
K11_PLUS_VACUUM = "k11+_vacuum"
GARAGE_DOOR_OPENER = "garage_door_opener"
CLIMATE_PANEL = "climate_panel"
SMART_THERMOSTAT_RADIATOR = "smart_thermostat_radiator"
S20_VACUUM = "s20_vacuum"
CONNECTABLE_SUPPORTED_MODEL_TYPES = {
@@ -78,6 +80,7 @@ CONNECTABLE_SUPPORTED_MODEL_TYPES = {
SwitchbotModel.CIRCULATOR_FAN: SupportedModels.CIRCULATOR_FAN,
SwitchbotModel.K20_VACUUM: SupportedModels.K20_VACUUM,
SwitchbotModel.S10_VACUUM: SupportedModels.S10_VACUUM,
SwitchbotModel.S20_VACUUM: SupportedModels.S20_VACUUM,
SwitchbotModel.K10_VACUUM: SupportedModels.K10_VACUUM,
SwitchbotModel.K10_PRO_VACUUM: SupportedModels.K10_PRO_VACUUM,
SwitchbotModel.K10_PRO_COMBO_VACUUM: SupportedModels.K10_PRO_COMBO_VACUUM,
@@ -95,6 +98,7 @@ CONNECTABLE_SUPPORTED_MODEL_TYPES = {
SwitchbotModel.K11_VACUUM: SupportedModels.K11_PLUS_VACUUM,
SwitchbotModel.GARAGE_DOOR_OPENER: SupportedModels.GARAGE_DOOR_OPENER,
SwitchbotModel.CLIMATE_PANEL: SupportedModels.CLIMATE_PANEL,
SwitchbotModel.SMART_THERMOSTAT_RADIATOR: SupportedModels.SMART_THERMOSTAT_RADIATOR,
}
NON_CONNECTABLE_SUPPORTED_MODEL_TYPES = {
@@ -132,6 +136,7 @@ ENCRYPTED_MODELS = {
SwitchbotModel.PLUG_MINI_EU,
SwitchbotModel.RELAY_SWITCH_2PM,
SwitchbotModel.GARAGE_DOOR_OPENER,
SwitchbotModel.SMART_THERMOSTAT_RADIATOR,
}
ENCRYPTED_SWITCHBOT_MODEL_TO_CLASS: dict[
@@ -153,6 +158,7 @@ ENCRYPTED_SWITCHBOT_MODEL_TO_CLASS: dict[
SwitchbotModel.PLUG_MINI_EU: switchbot.SwitchbotRelaySwitch,
SwitchbotModel.RELAY_SWITCH_2PM: switchbot.SwitchbotRelaySwitch2PM,
SwitchbotModel.GARAGE_DOOR_OPENER: switchbot.SwitchbotRelaySwitch,
SwitchbotModel.SMART_THERMOSTAT_RADIATOR: switchbot.SwitchbotSmartThermostatRadiator,
}
HASS_SENSOR_TYPE_TO_SWITCHBOT_MODEL = {

View File

@@ -1,5 +1,18 @@
{
"entity": {
"climate": {
"climate": {
"state_attributes": {
"preset_mode": {
"state": {
"manual": "mdi:hand-back-right",
"off": "mdi:hvac-off",
"schedule": "mdi:calendar-clock"
}
}
}
}
},
"fan": {
"air_purifier": {
"default": "mdi:air-purifier",

View File

@@ -100,6 +100,19 @@
"name": "Unlocked alarm"
}
},
"climate": {
"climate": {
"state_attributes": {
"preset_mode": {
"state": {
"manual": "[%key:common::state::manual%]",
"off": "[%key:common::state::off%]",
"schedule": "Schedule"
}
}
}
}
},
"cover": {
"cover": {
"state_attributes": {

View File

@@ -84,6 +84,7 @@
"abort": {
"already_configured": "Chat already configured"
},
"entry_type": "Allowed chat ID",
"error": {
"chat_not_found": "Chat not found"
},

View File

@@ -19,9 +19,9 @@ from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import TuyaConfigEntry
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode, DPType
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode
from .entity import TuyaEntity
from .models import EnumTypeData, find_dpcode
from .models import DPCodeEnumWrapper
from .util import get_dpcode
@@ -85,9 +85,21 @@ async def async_setup_entry(
device = manager.device_map[device_id]
if descriptions := ALARM.get(device.category):
entities.extend(
TuyaAlarmEntity(device, manager, description)
TuyaAlarmEntity(
device,
manager,
description,
action_dpcode_wrapper=action_dpcode_wrapper,
state_dpcode_wrapper=DPCodeEnumWrapper.find_dpcode(
device, description.master_state
),
)
for description in descriptions
if description.key in device.status
if (
action_dpcode_wrapper := DPCodeEnumWrapper.find_dpcode(
device, description.key, prefer_function=True
)
)
)
async_add_entities(entities)
@@ -103,7 +115,6 @@ class TuyaAlarmEntity(TuyaEntity, AlarmControlPanelEntity):
_attr_name = None
_attr_code_arm_required = False
_master_state: EnumTypeData | None = None
_alarm_msg_dpcode: DPCode | None = None
def __init__(
@@ -111,33 +122,24 @@ class TuyaAlarmEntity(TuyaEntity, AlarmControlPanelEntity):
device: CustomerDevice,
device_manager: Manager,
description: TuyaAlarmControlPanelEntityDescription,
*,
action_dpcode_wrapper: DPCodeEnumWrapper,
state_dpcode_wrapper: DPCodeEnumWrapper | None,
) -> None:
"""Init Tuya Alarm."""
super().__init__(device, device_manager)
self.entity_description = description
self._attr_unique_id = f"{super().unique_id}{description.key}"
self._action_dpcode_wrapper = action_dpcode_wrapper
self._state_dpcode_wrapper = state_dpcode_wrapper
# Determine supported modes
if supported_modes := find_dpcode(
self.device, description.key, dptype=DPType.ENUM, prefer_function=True
):
if Mode.HOME in supported_modes.range:
self._attr_supported_features |= AlarmControlPanelEntityFeature.ARM_HOME
if Mode.ARM in supported_modes.range:
self._attr_supported_features |= AlarmControlPanelEntityFeature.ARM_AWAY
if Mode.SOS in supported_modes.range:
self._attr_supported_features |= AlarmControlPanelEntityFeature.TRIGGER
# Determine master state
if enum_type := find_dpcode(
self.device,
description.master_state,
dptype=DPType.ENUM,
prefer_function=True,
):
self._master_state = enum_type
if Mode.HOME in action_dpcode_wrapper.type_information.range:
self._attr_supported_features |= AlarmControlPanelEntityFeature.ARM_HOME
if Mode.ARM in action_dpcode_wrapper.type_information.range:
self._attr_supported_features |= AlarmControlPanelEntityFeature.ARM_AWAY
if Mode.SOS in action_dpcode_wrapper.type_information.range:
self._attr_supported_features |= AlarmControlPanelEntityFeature.TRIGGER
# Determine alarm message
if dp_code := get_dpcode(self.device, description.alarm_msg):
@@ -149,8 +151,8 @@ class TuyaAlarmEntity(TuyaEntity, AlarmControlPanelEntity):
# When the alarm is triggered, only its 'state' is changing. From 'normal' to 'alarm'.
# The 'mode' doesn't change, and stays as 'arm' or 'home'.
if (
self._master_state is not None
and self.device.status.get(self._master_state.dpcode) == State.ALARM
self._state_dpcode_wrapper is not None
and self.device.status.get(self._state_dpcode_wrapper.dpcode) == State.ALARM
):
# Only report as triggered if NOT a battery warning
if (
@@ -166,28 +168,26 @@ class TuyaAlarmEntity(TuyaEntity, AlarmControlPanelEntity):
def changed_by(self) -> str | None:
"""Last change triggered by."""
if (
self._master_state is not None
self._state_dpcode_wrapper is not None
and self._alarm_msg_dpcode is not None
and self.device.status.get(self._master_state.dpcode) == State.ALARM
and self.device.status.get(self._state_dpcode_wrapper.dpcode) == State.ALARM
and (encoded_msg := self.device.status.get(self._alarm_msg_dpcode))
):
return b64decode(encoded_msg).decode("utf-16be")
return None
def alarm_disarm(self, code: str | None = None) -> None:
async def async_alarm_disarm(self, code: str | None = None) -> None:
"""Send Disarm command."""
self._send_command(
[{"code": self.entity_description.key, "value": Mode.DISARMED}]
)
await self._async_send_dpcode_update(self._action_dpcode_wrapper, Mode.DISARMED)
def alarm_arm_home(self, code: str | None = None) -> None:
async def async_alarm_arm_home(self, code: str | None = None) -> None:
"""Send Home command."""
self._send_command([{"code": self.entity_description.key, "value": Mode.HOME}])
await self._async_send_dpcode_update(self._action_dpcode_wrapper, Mode.HOME)
def alarm_arm_away(self, code: str | None = None) -> None:
async def async_alarm_arm_away(self, code: str | None = None) -> None:
"""Send Arm command."""
self._send_command([{"code": self.entity_description.key, "value": Mode.ARM}])
await self._async_send_dpcode_update(self._action_dpcode_wrapper, Mode.ARM)
def alarm_trigger(self, code: str | None = None) -> None:
async def async_alarm_trigger(self, code: str | None = None) -> None:
"""Send SOS command."""
self._send_command([{"code": self.entity_description.key, "value": Mode.SOS}])
await self._async_send_dpcode_update(self._action_dpcode_wrapper, Mode.SOS)

View File

@@ -196,7 +196,7 @@ class DPCodeTypeInformationWrapper[T: TypeInformation](DPCodeWrapper):
def find_dpcode(
cls,
device: CustomerDevice,
dpcodes: str | DPCode | tuple[DPCode, ...],
dpcodes: str | DPCode | tuple[DPCode, ...] | None,
*,
prefer_function: bool = False,
) -> Self | None:

View File

@@ -19,6 +19,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import TuyaConfigEntry
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode
from .entity import TuyaEntity
from .models import DPCodeBooleanWrapper
SIRENS: dict[DeviceCategory, tuple[SirenEntityDescription, ...]] = {
DeviceCategory.CO2BJ: (
@@ -64,9 +65,13 @@ async def async_setup_entry(
device = manager.device_map[device_id]
if descriptions := SIRENS.get(device.category):
entities.extend(
TuyaSirenEntity(device, manager, description)
TuyaSirenEntity(device, manager, description, dpcode_wrapper)
for description in descriptions
if description.key in device.status
if (
dpcode_wrapper := DPCodeBooleanWrapper.find_dpcode(
device, description.key, prefer_function=True
)
)
)
async_add_entities(entities)
@@ -89,21 +94,23 @@ class TuyaSirenEntity(TuyaEntity, SirenEntity):
device: CustomerDevice,
device_manager: Manager,
description: SirenEntityDescription,
dpcode_wrapper: DPCodeBooleanWrapper,
) -> None:
"""Init Tuya Siren."""
super().__init__(device, device_manager)
self.entity_description = description
self._attr_unique_id = f"{super().unique_id}{description.key}"
self._dpcode_wrapper = dpcode_wrapper
@property
def is_on(self) -> bool:
def is_on(self) -> bool | None:
"""Return true if siren is on."""
return self.device.status.get(self.entity_description.key, False)
return self._dpcode_wrapper.read_device_status(self.device)
def turn_on(self, **kwargs: Any) -> None:
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the siren on."""
self._send_command([{"code": self.entity_description.key, "value": True}])
await self._async_send_dpcode_update(self._dpcode_wrapper, True)
def turn_off(self, **kwargs: Any) -> None:
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the siren off."""
self._send_command([{"code": self.entity_description.key, "value": False}])
await self._async_send_dpcode_update(self._dpcode_wrapper, False)

View File

@@ -14,7 +14,7 @@
"velbus-protocol"
],
"quality_scale": "bronze",
"requirements": ["velbus-aio==2025.8.0"],
"requirements": ["velbus-aio==2025.11.0"],
"usb": [
{
"pid": "0B1B",

View File

@@ -1,17 +1,20 @@
"""Support for VELUX KLF 200 devices."""
from __future__ import annotations
from pyvlx import PyVLX, PyVLXException
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, CONF_PASSWORD, EVENT_HOMEASSISTANT_STOP
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.helpers import device_registry as dr
from .const import DOMAIN, LOGGER, PLATFORMS
type VeluxConfigEntry = ConfigEntry[PyVLX]
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_setup_entry(hass: HomeAssistant, entry: VeluxConfigEntry) -> bool:
"""Set up the velux component."""
host = entry.data[CONF_HOST]
password = entry.data[CONF_PASSWORD]
@@ -27,6 +30,21 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry.runtime_data = pyvlx
device_registry = dr.async_get(hass)
device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={(DOMAIN, f"gateway_{entry.entry_id}")},
name="KLF 200 Gateway",
manufacturer="Velux",
model="KLF 200",
hw_version=(
str(pyvlx.klf200.version.hardwareversion) if pyvlx.klf200.version else None
),
sw_version=(
str(pyvlx.klf200.version.softwareversion) if pyvlx.klf200.version else None
),
)
async def on_hass_stop(event):
"""Close connection when hass stops."""
LOGGER.debug("Velux interface terminated")
@@ -46,6 +64,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: VeluxConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)

View File

@@ -24,14 +24,14 @@ SCAN_INTERVAL = timedelta(minutes=5) # Use standard polling
async def async_setup_entry(
hass: HomeAssistant,
config: VeluxConfigEntry,
config_entry: VeluxConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up rain sensor(s) for Velux platform."""
pyvlx = config.runtime_data
pyvlx = config_entry.runtime_data
async_add_entities(
VeluxRainSensor(node, config.entry_id)
VeluxRainSensor(node, config_entry.entry_id)
for node in pyvlx.nodes
if isinstance(node, Window) and node.rain_sensor
)

View File

@@ -32,13 +32,13 @@ PARALLEL_UPDATES = 1
async def async_setup_entry(
hass: HomeAssistant,
config: VeluxConfigEntry,
config_entry: VeluxConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up cover(s) for Velux platform."""
pyvlx = config.runtime_data
pyvlx = config_entry.runtime_data
async_add_entities(
VeluxCover(node, config.entry_id)
VeluxCover(node, config_entry.entry_id)
for node in pyvlx.nodes
if isinstance(node, OpeningDevice)
)

View File

@@ -18,22 +18,23 @@ class VeluxEntity(Entity):
def __init__(self, node: Node, config_entry_id: str) -> None:
"""Initialize the Velux device."""
self.node = node
self._attr_unique_id = (
unique_id = (
node.serial_number
if node.serial_number
else f"{config_entry_id}_{node.node_id}"
)
self._attr_unique_id = unique_id
self._attr_device_info = DeviceInfo(
identifiers={
(
DOMAIN,
node.serial_number
if node.serial_number
else f"{config_entry_id}_{node.node_id}",
unique_id,
)
},
name=node.name if node.name else f"#{node.node_id}",
serial_number=node.serial_number,
via_device=(DOMAIN, f"gateway_{config_entry_id}"),
)
@callback

View File

@@ -18,13 +18,13 @@ PARALLEL_UPDATES = 1
async def async_setup_entry(
hass: HomeAssistant,
config: VeluxConfigEntry,
config_entry: VeluxConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up light(s) for Velux platform."""
pyvlx = config.runtime_data
pyvlx = config_entry.runtime_data
async_add_entities(
VeluxLight(node, config.entry_id)
VeluxLight(node, config_entry.entry_id)
for node in pyvlx.nodes
if isinstance(node, LighteningDevice)
)

View File

@@ -15,11 +15,11 @@ PARALLEL_UPDATES = 1
async def async_setup_entry(
hass: HomeAssistant,
config: VeluxConfigEntry,
config_entry: VeluxConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the scenes for Velux platform."""
pyvlx = config.runtime_data
pyvlx = config_entry.runtime_data
entities = [VeluxScene(scene) for scene in pyvlx.scenes]
async_add_entities(entities)

View File

@@ -11,6 +11,7 @@ from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.device_registry import DeviceEntry
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .const import DOMAIN, SERVICE_UPDATE_DEVS, VS_COORDINATOR, VS_MANAGER
@@ -121,3 +122,21 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) ->
hass.config_entries.async_update_entry(config_entry, minor_version=2)
return True
async def async_remove_config_entry_device(
hass: HomeAssistant, config_entry: ConfigEntry, device_entry: DeviceEntry
) -> bool:
"""Remove a config entry from a device."""
manager = hass.data[DOMAIN][VS_MANAGER]
await manager.get_devices()
for dev in manager.devices:
if isinstance(dev.sub_device_no, int):
device_id = f"{dev.cid}{dev.sub_device_no!s}"
else:
device_id = dev.cid
identifier = next(iter(device_entry.identifiers), None)
if identifier and device_id == identifier[1]:
return False
return True

View File

@@ -58,6 +58,7 @@ from .utils import (
get_compressors,
get_device_serial,
is_supported,
normalize_state,
)
_LOGGER = logging.getLogger(__name__)
@@ -1086,7 +1087,7 @@ COMPRESSOR_SENSORS: tuple[ViCareSensorEntityDescription, ...] = (
ViCareSensorEntityDescription(
key="compressor_phase",
translation_key="compressor_phase",
value_getter=lambda api: api.getPhase(),
value_getter=lambda api: normalize_state(api.getPhase()),
entity_category=EntityCategory.DIAGNOSTIC,
),
)

View File

@@ -213,7 +213,18 @@
"name": "Compressor hours load class 5"
},
"compressor_phase": {
"name": "Compressor phase"
"name": "Compressor phase",
"state": {
"cooling": "[%key:component::climate::entity_component::_::state_attributes::hvac_action::state::cooling%]",
"defrost": "[%key:component::climate::entity_component::_::state_attributes::hvac_action::state::defrosting%]",
"heating": "[%key:component::climate::entity_component::_::state_attributes::hvac_action::state::heating%]",
"off": "[%key:common::state::off%]",
"passive_defrost": "Passive defrosting",
"pause": "[%key:common::state::idle%]",
"preparing": "Preparing",
"preparing_defrost": "Preparing defrost",
"ready": "[%key:common::state::idle%]"
}
},
"compressor_starts": {
"name": "Compressor starts"

View File

@@ -133,3 +133,8 @@ def get_compressors(device: PyViCareDevice) -> list[PyViCareHeatingDeviceCompone
def filter_state(state: str) -> str | None:
"""Return the state if not 'nothing' or 'unknown'."""
return None if state in ("nothing", "unknown") else state
def normalize_state(state: str) -> str:
"""Return the state with underscores instead of hyphens."""
return state.replace("-", "_")

View File

@@ -99,7 +99,7 @@
},
"exceptions": {
"oauth2_implementation_unavailable": {
"message": "OAuth2 implementation temporarily unavailable, will retry"
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
},
"request_exception": {
"message": "Failed to connect to Xbox Network"

View File

@@ -7,7 +7,7 @@
"documentation": "https://www.home-assistant.io/integrations/yamaha_musiccast",
"iot_class": "local_push",
"loggers": ["aiomusiccast"],
"requirements": ["aiomusiccast==0.14.8"],
"requirements": ["aiomusiccast==0.15.0"],
"ssdp": [
{
"manufacturer": "Yamaha Corporation"

View File

@@ -134,7 +134,7 @@
"message": "Config entry not found or not loaded!"
},
"oauth2_implementation_unavailable": {
"message": "OAuth2 implementation temporarily unavailable, will retry"
"message": "[%key:common::exceptions::oauth2_implementation_unavailable::message%]"
},
"valve_inoperable_currently": {
"message": "The Valve cannot be operated currently."

View File

@@ -76,12 +76,12 @@ class EventAreaRegistryUpdatedData(TypedDict):
class AreaEntry(NormalizedNameBaseRegistryEntry):
"""Area Registry Entry."""
aliases: set[str]
aliases: frozenset[str]
floor_id: str | None
humidity_entity_id: str | None
icon: str | None
id: str
labels: set[str] = field(default_factory=set)
labels: frozenset[str] = field(default_factory=frozenset)
picture: str | None
temperature_entity_id: str | None
_cache: dict[str, Any] = field(default_factory=dict, compare=False, init=False)
@@ -295,12 +295,12 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]):
_validate_temperature_entity(self.hass, temperature_entity_id)
area = AreaEntry(
aliases=aliases or set(),
aliases=frozenset(aliases) if aliases else frozenset(),
floor_id=floor_id,
humidity_entity_id=humidity_entity_id,
icon=icon,
id=self._generate_id(name),
labels=labels or set(),
labels=frozenset(labels) if labels else frozenset(),
name=name,
picture=picture,
temperature_entity_id=temperature_entity_id,
@@ -338,11 +338,11 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]):
self,
area_id: str,
*,
aliases: set[str] | UndefinedType = UNDEFINED,
aliases: frozenset[str] | set[str] | UndefinedType = UNDEFINED,
floor_id: str | None | UndefinedType = UNDEFINED,
humidity_entity_id: str | None | UndefinedType = UNDEFINED,
icon: str | None | UndefinedType = UNDEFINED,
labels: set[str] | UndefinedType = UNDEFINED,
labels: frozenset[str] | set[str] | UndefinedType = UNDEFINED,
name: str | UndefinedType = UNDEFINED,
picture: str | None | UndefinedType = UNDEFINED,
temperature_entity_id: str | None | UndefinedType = UNDEFINED,
@@ -374,11 +374,11 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]):
self,
area_id: str,
*,
aliases: set[str] | UndefinedType = UNDEFINED,
aliases: frozenset[str] | set[str] | UndefinedType = UNDEFINED,
floor_id: str | None | UndefinedType = UNDEFINED,
humidity_entity_id: str | None | UndefinedType = UNDEFINED,
icon: str | None | UndefinedType = UNDEFINED,
labels: set[str] | UndefinedType = UNDEFINED,
labels: frozenset[str] | set[str] | UndefinedType = UNDEFINED,
name: str | UndefinedType = UNDEFINED,
picture: str | None | UndefinedType = UNDEFINED,
temperature_entity_id: str | None | UndefinedType = UNDEFINED,
@@ -389,17 +389,23 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]):
new_values: dict[str, Any] = {
attr_name: value
for attr_name, value in (
("aliases", aliases),
("floor_id", floor_id),
("humidity_entity_id", humidity_entity_id),
("icon", icon),
("labels", labels),
("picture", picture),
("temperature_entity_id", temperature_entity_id),
)
if value is not UNDEFINED and value != getattr(old, attr_name)
}
for attr_name, value in (
("aliases", aliases),
("labels", labels),
):
if value is UNDEFINED or value == getattr(old, attr_name):
continue
new_values[attr_name] = frozenset(value)
if "humidity_entity_id" in new_values and humidity_entity_id is not None:
_validate_humidity_entity(self.hass, new_values["humidity_entity_id"])
@@ -432,12 +438,12 @@ class AreaRegistry(BaseRegistry[AreasRegistryStoreData]):
for area in data["areas"]:
assert area["name"] is not None and area["id"] is not None
areas[area["id"]] = AreaEntry(
aliases=set(area["aliases"]),
aliases=frozenset(area["aliases"]),
floor_id=area["floor_id"],
humidity_entity_id=area["humidity_entity_id"],
icon=area["icon"],
id=area["id"],
labels=set(area["labels"]),
labels=frozenset(area["labels"]),
name=area["name"],
picture=area["picture"],
temperature_entity_id=area["temperature_entity_id"],

View File

@@ -1304,7 +1304,11 @@ def issues(hass: HomeAssistant) -> dict[tuple[str, str], dict[str, Any]]:
"""Return all open issues."""
current_issues = ir.async_get(hass).issues
# Use JSON for safe representation
return {k: v.to_json() for (k, v) in current_issues.items()}
return {
key: issue_entry.to_json()
for (key, issue_entry) in current_issues.items()
if issue_entry.active
}
def issue(hass: HomeAssistant, domain: str, issue_id: str) -> dict[str, Any] | None:

View File

@@ -115,6 +115,11 @@
"turned_on": "{entity_name} turned on"
}
},
"exceptions": {
"oauth2_implementation_unavailable": {
"message": "OAuth2 implementation unavailable, will retry"
}
},
"generic": {
"model": "Model",
"ui_managed": "Managed via UI"

6
requirements_all.txt generated
View File

@@ -321,7 +321,7 @@ aiomealie==1.1.0
aiomodernforms==0.1.8
# homeassistant.components.yamaha_musiccast
aiomusiccast==0.14.8
aiomusiccast==0.15.0
# homeassistant.components.nanoleaf
aionanoleaf==0.2.1
@@ -2050,7 +2050,7 @@ pyhaversion==22.8.0
pyheos==1.0.6
# homeassistant.components.hive
pyhive-integration==1.0.6
pyhive-integration==1.0.7
# homeassistant.components.homematic
pyhomematic==0.1.77
@@ -3076,7 +3076,7 @@ vegehub==0.1.26
vehicle==2.2.2
# homeassistant.components.velbus
velbus-aio==2025.8.0
velbus-aio==2025.11.0
# homeassistant.components.venstar
venstarcolortouch==0.21

View File

@@ -21,7 +21,7 @@ pydantic==2.12.2
pylint==4.0.1
pylint-per-file-ignores==1.4.0
pipdeptree==2.26.1
pytest-asyncio==1.2.0
pytest-asyncio==1.3.0
pytest-aiohttp==1.1.0
pytest-cov==7.0.0
pytest-freezer==0.4.9
@@ -32,7 +32,7 @@ pytest-timeout==2.4.0
pytest-unordered==0.7.0
pytest-picked==0.5.1
pytest-xdist==3.8.0
pytest==8.4.2
pytest==9.0.0
requests-mock==1.12.1
respx==0.22.0
syrupy==5.0.0

View File

@@ -303,7 +303,7 @@ aiomealie==1.1.0
aiomodernforms==0.1.8
# homeassistant.components.yamaha_musiccast
aiomusiccast==0.14.8
aiomusiccast==0.15.0
# homeassistant.components.nanoleaf
aionanoleaf==0.2.1
@@ -1709,7 +1709,7 @@ pyhaversion==22.8.0
pyheos==1.0.6
# homeassistant.components.hive
pyhive-integration==1.0.6
pyhive-integration==1.0.7
# homeassistant.components.homematic
pyhomematic==0.1.77
@@ -2543,7 +2543,7 @@ vegehub==0.1.26
vehicle==2.2.2
# homeassistant.components.velbus
velbus-aio==2025.8.0
velbus-aio==2025.11.0
# homeassistant.components.venstar
venstarcolortouch==0.21

View File

@@ -174,6 +174,7 @@ def gen_data_entry_schema(
flow_title: int,
require_step_title: bool,
mandatory_description: str | None = None,
subentry_flow: bool = False,
) -> vol.All:
"""Generate a data entry schema."""
step_title_class = vol.Required if require_step_title else vol.Optional
@@ -206,9 +207,13 @@ def gen_data_entry_schema(
vol.Optional("abort"): {str: translation_value_validator},
vol.Optional("progress"): {str: translation_value_validator},
vol.Optional("create_entry"): {str: translation_value_validator},
vol.Optional("initiate_flow"): {str: translation_value_validator},
vol.Optional("entry_type"): translation_value_validator,
}
if subentry_flow:
schema[vol.Required("entry_type")] = translation_value_validator
schema[vol.Required("initiate_flow")] = {
vol.Required("user"): translation_value_validator,
str: translation_value_validator,
}
if flow_title == REQUIRED:
schema[vol.Required("title")] = translation_value_validator
elif flow_title == REMOVED:
@@ -314,6 +319,7 @@ def gen_strings_schema(config: Config, integration: Integration) -> vol.Schema:
integration=integration,
flow_title=REMOVED,
require_step_title=False,
subentry_flow=True,
),
slug_validator=vol.Any("_", cv.slug),
),

View File

@@ -6,9 +6,11 @@ from homeassistant.config_entries import ConfigEntry
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import aiohttp_client, config_entry_oauth2_flow
from homeassistant.helpers import aiohttp_client
from homeassistant.helpers.config_entry_oauth2_flow import (
ImplementationUnavailableError,
OAuth2Session,
async_get_config_entry_implementation,
)
from . import api
@@ -26,17 +28,13 @@ type New_NameConfigEntry = ConfigEntry[api.AsyncConfigEntryAuth]
async def async_setup_entry(hass: HomeAssistant, entry: New_NameConfigEntry) -> bool:
"""Set up NEW_NAME from a config entry."""
try:
implementation = (
await config_entry_oauth2_flow.async_get_config_entry_implementation(
hass, entry
)
)
implementation = await async_get_config_entry_implementation(hass, entry)
except ImplementationUnavailableError as err:
raise ConfigEntryNotReady(
"OAuth2 implementation temporarily unavailable, will retry"
) from err
session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
session = OAuth2Session(hass, entry, implementation)
# If using a requests-based API lib
entry.runtime_data = api.ConfigEntryAuth(hass, session)

View File

@@ -1,5 +1,5 @@
# serializer version: 1
# name: test_alarm_control_panel[None-amax_3000][alarm_control_panel.area1-entry]
# name: test_alarm_control_panel[amax_3000-None][alarm_control_panel.area1-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -34,7 +34,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_alarm_control_panel[None-amax_3000][alarm_control_panel.area1-state]
# name: test_alarm_control_panel[amax_3000-None][alarm_control_panel.area1-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'changed_by': None,
@@ -51,7 +51,7 @@
'state': 'disarmed',
})
# ---
# name: test_alarm_control_panel[None-b5512][alarm_control_panel.area1-entry]
# name: test_alarm_control_panel[b5512-None][alarm_control_panel.area1-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -86,7 +86,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_alarm_control_panel[None-b5512][alarm_control_panel.area1-state]
# name: test_alarm_control_panel[b5512-None][alarm_control_panel.area1-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'changed_by': None,
@@ -103,7 +103,7 @@
'state': 'disarmed',
})
# ---
# name: test_alarm_control_panel[None-solution_3000][alarm_control_panel.area1-entry]
# name: test_alarm_control_panel[solution_3000-None][alarm_control_panel.area1-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -138,7 +138,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_alarm_control_panel[None-solution_3000][alarm_control_panel.area1-state]
# name: test_alarm_control_panel[solution_3000-None][alarm_control_panel.area1-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'changed_by': None,

View File

@@ -1,5 +1,5 @@
# serializer version: 1
# name: test_sensor[None-amax_3000][sensor.area1_burglary_alarm_issues-entry]
# name: test_sensor[amax_3000-None][sensor.area1_burglary_alarm_issues-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -34,7 +34,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_sensor[None-amax_3000][sensor.area1_burglary_alarm_issues-state]
# name: test_sensor[amax_3000-None][sensor.area1_burglary_alarm_issues-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Area1 Burglary alarm issues',
@@ -47,7 +47,7 @@
'state': 'no_issues',
})
# ---
# name: test_sensor[None-amax_3000][sensor.area1_faulting_points-entry]
# name: test_sensor[amax_3000-None][sensor.area1_faulting_points-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -82,7 +82,7 @@
'unit_of_measurement': 'points',
})
# ---
# name: test_sensor[None-amax_3000][sensor.area1_faulting_points-state]
# name: test_sensor[amax_3000-None][sensor.area1_faulting_points-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Area1 Faulting points',
@@ -96,7 +96,7 @@
'state': '0',
})
# ---
# name: test_sensor[None-amax_3000][sensor.area1_fire_alarm_issues-entry]
# name: test_sensor[amax_3000-None][sensor.area1_fire_alarm_issues-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -131,7 +131,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_sensor[None-amax_3000][sensor.area1_fire_alarm_issues-state]
# name: test_sensor[amax_3000-None][sensor.area1_fire_alarm_issues-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Area1 Fire alarm issues',
@@ -144,7 +144,7 @@
'state': 'no_issues',
})
# ---
# name: test_sensor[None-amax_3000][sensor.area1_gas_alarm_issues-entry]
# name: test_sensor[amax_3000-None][sensor.area1_gas_alarm_issues-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -179,7 +179,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_sensor[None-amax_3000][sensor.area1_gas_alarm_issues-state]
# name: test_sensor[amax_3000-None][sensor.area1_gas_alarm_issues-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Area1 Gas alarm issues',
@@ -192,7 +192,7 @@
'state': 'no_issues',
})
# ---
# name: test_sensor[None-b5512][sensor.area1_burglary_alarm_issues-entry]
# name: test_sensor[b5512-None][sensor.area1_burglary_alarm_issues-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -227,7 +227,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_sensor[None-b5512][sensor.area1_burglary_alarm_issues-state]
# name: test_sensor[b5512-None][sensor.area1_burglary_alarm_issues-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Area1 Burglary alarm issues',
@@ -240,7 +240,7 @@
'state': 'no_issues',
})
# ---
# name: test_sensor[None-b5512][sensor.area1_faulting_points-entry]
# name: test_sensor[b5512-None][sensor.area1_faulting_points-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -275,7 +275,7 @@
'unit_of_measurement': 'points',
})
# ---
# name: test_sensor[None-b5512][sensor.area1_faulting_points-state]
# name: test_sensor[b5512-None][sensor.area1_faulting_points-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Area1 Faulting points',
@@ -289,7 +289,7 @@
'state': '0',
})
# ---
# name: test_sensor[None-b5512][sensor.area1_fire_alarm_issues-entry]
# name: test_sensor[b5512-None][sensor.area1_fire_alarm_issues-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -324,7 +324,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_sensor[None-b5512][sensor.area1_fire_alarm_issues-state]
# name: test_sensor[b5512-None][sensor.area1_fire_alarm_issues-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Area1 Fire alarm issues',
@@ -337,7 +337,7 @@
'state': 'no_issues',
})
# ---
# name: test_sensor[None-b5512][sensor.area1_gas_alarm_issues-entry]
# name: test_sensor[b5512-None][sensor.area1_gas_alarm_issues-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -372,7 +372,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_sensor[None-b5512][sensor.area1_gas_alarm_issues-state]
# name: test_sensor[b5512-None][sensor.area1_gas_alarm_issues-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Area1 Gas alarm issues',
@@ -385,7 +385,7 @@
'state': 'no_issues',
})
# ---
# name: test_sensor[None-solution_3000][sensor.area1_burglary_alarm_issues-entry]
# name: test_sensor[solution_3000-None][sensor.area1_burglary_alarm_issues-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -420,7 +420,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_sensor[None-solution_3000][sensor.area1_burglary_alarm_issues-state]
# name: test_sensor[solution_3000-None][sensor.area1_burglary_alarm_issues-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Area1 Burglary alarm issues',
@@ -433,7 +433,7 @@
'state': 'no_issues',
})
# ---
# name: test_sensor[None-solution_3000][sensor.area1_faulting_points-entry]
# name: test_sensor[solution_3000-None][sensor.area1_faulting_points-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -468,7 +468,7 @@
'unit_of_measurement': 'points',
})
# ---
# name: test_sensor[None-solution_3000][sensor.area1_faulting_points-state]
# name: test_sensor[solution_3000-None][sensor.area1_faulting_points-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Area1 Faulting points',
@@ -482,7 +482,7 @@
'state': '0',
})
# ---
# name: test_sensor[None-solution_3000][sensor.area1_fire_alarm_issues-entry]
# name: test_sensor[solution_3000-None][sensor.area1_fire_alarm_issues-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -517,7 +517,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_sensor[None-solution_3000][sensor.area1_fire_alarm_issues-state]
# name: test_sensor[solution_3000-None][sensor.area1_fire_alarm_issues-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Area1 Fire alarm issues',
@@ -530,7 +530,7 @@
'state': 'no_issues',
})
# ---
# name: test_sensor[None-solution_3000][sensor.area1_gas_alarm_issues-entry]
# name: test_sensor[solution_3000-None][sensor.area1_gas_alarm_issues-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -565,7 +565,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_sensor[None-solution_3000][sensor.area1_gas_alarm_issues-state]
# name: test_sensor[solution_3000-None][sensor.area1_gas_alarm_issues-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Area1 Gas alarm issues',

View File

@@ -1,5 +1,5 @@
# serializer version: 1
# name: test_switch[None-amax_3000][switch.main_door_locked-entry]
# name: test_switch[amax_3000-None][switch.main_door_locked-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -34,7 +34,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_switch[None-amax_3000][switch.main_door_locked-state]
# name: test_switch[amax_3000-None][switch.main_door_locked-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Main Door Locked',
@@ -47,7 +47,7 @@
'state': 'on',
})
# ---
# name: test_switch[None-amax_3000][switch.main_door_momentarily_unlocked-entry]
# name: test_switch[amax_3000-None][switch.main_door_momentarily_unlocked-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -82,7 +82,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_switch[None-amax_3000][switch.main_door_momentarily_unlocked-state]
# name: test_switch[amax_3000-None][switch.main_door_momentarily_unlocked-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Main Door Momentarily unlocked',
@@ -95,7 +95,7 @@
'state': 'off',
})
# ---
# name: test_switch[None-amax_3000][switch.main_door_secured-entry]
# name: test_switch[amax_3000-None][switch.main_door_secured-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -130,7 +130,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_switch[None-amax_3000][switch.main_door_secured-state]
# name: test_switch[amax_3000-None][switch.main_door_secured-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Main Door Secured',
@@ -143,7 +143,7 @@
'state': 'off',
})
# ---
# name: test_switch[None-amax_3000][switch.output_a-entry]
# name: test_switch[amax_3000-None][switch.output_a-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -178,7 +178,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_switch[None-amax_3000][switch.output_a-state]
# name: test_switch[amax_3000-None][switch.output_a-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Output A',
@@ -191,7 +191,7 @@
'state': 'off',
})
# ---
# name: test_switch[None-b5512][switch.main_door_locked-entry]
# name: test_switch[b5512-None][switch.main_door_locked-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -226,7 +226,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_switch[None-b5512][switch.main_door_locked-state]
# name: test_switch[b5512-None][switch.main_door_locked-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Main Door Locked',
@@ -239,7 +239,7 @@
'state': 'on',
})
# ---
# name: test_switch[None-b5512][switch.main_door_momentarily_unlocked-entry]
# name: test_switch[b5512-None][switch.main_door_momentarily_unlocked-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -274,7 +274,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_switch[None-b5512][switch.main_door_momentarily_unlocked-state]
# name: test_switch[b5512-None][switch.main_door_momentarily_unlocked-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Main Door Momentarily unlocked',
@@ -287,7 +287,7 @@
'state': 'off',
})
# ---
# name: test_switch[None-b5512][switch.main_door_secured-entry]
# name: test_switch[b5512-None][switch.main_door_secured-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -322,7 +322,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_switch[None-b5512][switch.main_door_secured-state]
# name: test_switch[b5512-None][switch.main_door_secured-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Main Door Secured',
@@ -335,7 +335,7 @@
'state': 'off',
})
# ---
# name: test_switch[None-b5512][switch.output_a-entry]
# name: test_switch[b5512-None][switch.output_a-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -370,7 +370,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_switch[None-b5512][switch.output_a-state]
# name: test_switch[b5512-None][switch.output_a-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Output A',
@@ -383,7 +383,7 @@
'state': 'off',
})
# ---
# name: test_switch[None-solution_3000][switch.main_door_locked-entry]
# name: test_switch[solution_3000-None][switch.main_door_locked-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -418,7 +418,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_switch[None-solution_3000][switch.main_door_locked-state]
# name: test_switch[solution_3000-None][switch.main_door_locked-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Main Door Locked',
@@ -431,7 +431,7 @@
'state': 'on',
})
# ---
# name: test_switch[None-solution_3000][switch.main_door_momentarily_unlocked-entry]
# name: test_switch[solution_3000-None][switch.main_door_momentarily_unlocked-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -466,7 +466,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_switch[None-solution_3000][switch.main_door_momentarily_unlocked-state]
# name: test_switch[solution_3000-None][switch.main_door_momentarily_unlocked-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Main Door Momentarily unlocked',
@@ -479,7 +479,7 @@
'state': 'off',
})
# ---
# name: test_switch[None-solution_3000][switch.main_door_secured-entry]
# name: test_switch[solution_3000-None][switch.main_door_secured-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -514,7 +514,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_switch[None-solution_3000][switch.main_door_secured-state]
# name: test_switch[solution_3000-None][switch.main_door_secured-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Main Door Secured',
@@ -527,7 +527,7 @@
'state': 'off',
})
# ---
# name: test_switch[None-solution_3000][switch.output_a-entry]
# name: test_switch[solution_3000-None][switch.output_a-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -562,7 +562,7 @@
'unit_of_measurement': None,
})
# ---
# name: test_switch[None-solution_3000][switch.output_a-state]
# name: test_switch[solution_3000-None][switch.output_a-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Output A',

View File

@@ -3,7 +3,6 @@
from datetime import datetime
from unittest.mock import patch
from freezegun import freeze_time
import pytest
from homeassistant.components import (
@@ -453,7 +452,7 @@ async def test_todo_add_item_fr(
assert intent_obj.slots.get("item", {}).get("value", "").strip() == "farine"
@freeze_time(
@pytest.mark.freeze_time(
datetime(
year=2013,
month=9,

View File

@@ -144,7 +144,7 @@ async def test_custom_agent(
@pytest.mark.usefixtures("init_components")
async def test_prepare_reload(hass: HomeAssistant) -> None:
async def test_reload(hass: HomeAssistant) -> None:
"""Test calling the reload service."""
language = hass.config.language
agent = async_get_agent(hass)
@@ -154,20 +154,39 @@ async def test_prepare_reload(hass: HomeAssistant) -> None:
# Confirm intents are loaded
assert agent._lang_intents.get(language)
# Confirm config intents are empty
assert not agent._config_intents["intents"]
# Try to clear for a different language
await hass.services.async_call("conversation", "reload", {"language": "elvish"})
await hass.async_block_till_done()
await hass.services.async_call(
"conversation", "reload", {"language": "elvish"}, blocking=True
)
# Confirm intents are still loaded
assert agent._lang_intents.get(language)
# Confirm config intents are still empty
assert not agent._config_intents["intents"]
# Clear cache for all languages
await hass.services.async_call("conversation", "reload", {})
await hass.async_block_till_done()
# Reload from a changed configuration file
hass_config_new = {
"conversation": {
"intents": {
"TestIntent": [
"Test intent phrase",
"Another test intent phrase",
]
}
}
}
with patch(
"homeassistant.config.load_yaml_config_file", return_value=hass_config_new
):
await hass.services.async_call("conversation", "reload", {}, blocking=True)
# Confirm intent cache is cleared
assert not agent._lang_intents.get(language)
# Confirm new config intents are loaded
assert agent._config_intents["intents"]
@pytest.mark.usefixtures("init_components")

View File

@@ -4,17 +4,14 @@ from collections.abc import Callable
import time
from typing import Any
from aiohttp.test_utils import TestClient
from freezegun import freeze_time
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.auth.models import Credentials
from homeassistant.core import HomeAssistant
from .conftest import TEST_EVENT, ApiResult, ComponentSetup
from tests.common import CLIENT_ID, MockConfigEntry, MockUser
from tests.common import MockConfigEntry
from tests.components.diagnostics import get_diagnostics_for_config_entry
from tests.test_util.aiohttp import AiohttpClientMocker
from tests.typing import ClientSessionGenerator
@@ -29,41 +26,13 @@ def mock_test_setup(
mock_calendars_list({"items": [test_api_calendar]})
async def generate_new_hass_access_token(
hass: HomeAssistant, hass_admin_user: MockUser, hass_admin_credential: Credentials
) -> str:
"""Return an access token to access Home Assistant."""
await hass.auth.async_link_user(hass_admin_user, hass_admin_credential)
refresh_token = await hass.auth.async_create_refresh_token(
hass_admin_user, CLIENT_ID, credential=hass_admin_credential
)
return hass.auth.async_create_access_token(refresh_token)
def _get_test_client_generator(
hass: HomeAssistant, aiohttp_client: ClientSessionGenerator, new_token: str
):
"""Return a test client generator.""."""
async def auth_client() -> TestClient:
return await aiohttp_client(
hass.http.app, headers={"Authorization": f"Bearer {new_token}"}
)
return auth_client
@freeze_time("2023-03-13 12:05:00-07:00")
@pytest.mark.usefixtures("socket_enabled")
@pytest.mark.freeze_time("2023-03-13 12:05:00-07:00")
async def test_diagnostics(
hass: HomeAssistant,
hass_client: ClientSessionGenerator,
component_setup: ComponentSetup,
mock_events_list_items: Callable[[list[dict[str, Any]]], None],
hass_admin_user: MockUser,
hass_admin_credential: Credentials,
config_entry: MockConfigEntry,
aiohttp_client: ClientSessionGenerator,
snapshot: SnapshotAssertion,
aioclient_mock: AiohttpClientMocker,
) -> None:
@@ -103,13 +72,5 @@ async def test_diagnostics(
assert await component_setup()
# Since we are freezing time only when we enter this test, we need to
# manually create a new token and clients since the token created by
# the fixtures would not be valid.
new_token = await generate_new_hass_access_token(
hass, hass_admin_user, hass_admin_credential
)
data = await get_diagnostics_for_config_entry(
hass, _get_test_client_generator(hass, aiohttp_client, new_token), config_entry
)
data = await get_diagnostics_for_config_entry(hass, hass_client, config_entry)
assert data == snapshot

View File

@@ -3,7 +3,6 @@
from pathlib import Path
from unittest.mock import AsyncMock, Mock, patch
from freezegun import freeze_time
from google.genai.types import File, FileState, GenerateContentResponse
import pytest
import voluptuous as vol
@@ -223,7 +222,7 @@ async def test_generate_data(
@pytest.mark.usefixtures("mock_init_component")
@freeze_time("2025-06-14 22:59:00")
@pytest.mark.freeze_time("2025-06-14 22:59:00")
async def test_generate_image(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,

View File

@@ -3,7 +3,6 @@
from collections.abc import Generator
from unittest.mock import patch
from freezegun.api import freeze_time
import pytest
from syrupy.assertion import SnapshotAssertion
@@ -33,7 +32,7 @@ async def set_tz(hass: HomeAssistant) -> None:
@pytest.mark.usefixtures("habitica")
@freeze_time("2024-09-20T22:00:00.000Z")
@pytest.mark.freeze_time("2024-09-20T22:00:00.000Z")
async def test_calendar_platform(
hass: HomeAssistant,
config_entry: MockConfigEntry,

View File

@@ -7,7 +7,7 @@ from unittest.mock import AsyncMock, patch
from uuid import UUID
from aiohttp import ClientError
from freezegun.api import FrozenDateTimeFactory, freeze_time
from freezegun.api import FrozenDateTimeFactory
from habiticalib import HabiticaGroupMembersResponse
import pytest
from syrupy.assertion import SnapshotAssertion
@@ -82,7 +82,7 @@ async def test_notify_platform(
),
],
)
@freeze_time("2025-08-13T00:00:00+00:00")
@pytest.mark.freeze_time("2025-08-13T00:00:00+00:00")
async def test_send_message(
hass: HomeAssistant,
config_entry: MockConfigEntry,

View File

@@ -7,7 +7,6 @@ from unittest.mock import AsyncMock, patch
from uuid import UUID
from aiohttp import ClientError
from freezegun.api import freeze_time
from habiticalib import (
Checklist,
Direction,
@@ -1845,7 +1844,7 @@ async def test_create_todo(
],
)
@pytest.mark.usefixtures("mock_uuid4")
@freeze_time("2025-02-25T22:00:00.000Z")
@pytest.mark.freeze_time("2025-02-25T22:00:00.000Z")
async def test_update_daily(
hass: HomeAssistant,
config_entry: MockConfigEntry,
@@ -2023,7 +2022,7 @@ async def test_update_daily(
],
)
@pytest.mark.usefixtures("mock_uuid4")
@freeze_time("2025-02-25T22:00:00.000Z")
@pytest.mark.freeze_time("2025-02-25T22:00:00.000Z")
async def test_create_daily(
hass: HomeAssistant,
config_entry: MockConfigEntry,
@@ -2064,7 +2063,7 @@ async def test_create_daily(
],
)
@pytest.mark.usefixtures("mock_uuid4")
@freeze_time("2025-02-25T22:00:00.000Z")
@pytest.mark.freeze_time("2025-02-25T22:00:00.000Z")
async def test_update_daily_service_validation_errors(
hass: HomeAssistant,
config_entry: MockConfigEntry,

View File

@@ -2,7 +2,6 @@
from unittest.mock import patch
from freezegun import freeze_time
import pytest
from syrupy.assertion import SnapshotAssertion
@@ -15,7 +14,7 @@ from . import setup_integration
from tests.common import MockConfigEntry, snapshot_platform
@freeze_time("2021-01-01T12:00:00Z")
@pytest.mark.freeze_time("2021-01-01T12:00:00Z")
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
async def test_all_sensor_entities(
hass: HomeAssistant,

View File

@@ -2,7 +2,6 @@
from unittest.mock import AsyncMock, patch
from freezegun import freeze_time
import pytest
from syrupy.assertion import SnapshotAssertion
@@ -15,7 +14,7 @@ from . import setup_integration
from tests.common import MockConfigEntry, snapshot_platform
@freeze_time("2021-01-01T12:00:00Z")
@pytest.mark.freeze_time("2021-01-01T12:00:00Z")
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
async def test_all_sensor_entities(
hass: HomeAssistant,

View File

@@ -2,7 +2,6 @@
from datetime import timedelta
from freezegun import freeze_time
from freezegun.api import FrozenDateTimeFactory
import pytest
@@ -348,7 +347,7 @@ async def test_expose_conversion_exception(
)
@freeze_time("2022-1-7 9:13:14") # UTC -> +1h = Vienna in winter (9 -> 0xA)
@pytest.mark.freeze_time("2022-1-7 9:13:14") # UTC -> +1h = Vienna in winter (9 -> 0xA)
@pytest.mark.parametrize(
("time_type", "raw"),
[

View File

@@ -79,6 +79,7 @@ async def integration_fixture(
"aqara_door_window_p2",
"aqara_motion_p2",
"aqara_presence_fp300",
"aqara_sensor_w100",
"aqara_thermostat_w500",
"aqara_u200",
"battery_storage",

View File

@@ -0,0 +1,528 @@
{
"node_id": 75,
"date_commissioned": "2025-06-07T15:30:15.263101",
"last_interview": "2025-06-07T15:30:15.263113",
"interview_version": 6,
"available": true,
"is_bridge": false,
"attributes": {
"0/29/0": [
{
"0": 18,
"1": 1
},
{
"0": 22,
"1": 3
}
],
"0/29/1": [29, 31, 40, 42, 48, 49, 51, 52, 53, 60, 62, 63, 70],
"0/29/2": [41],
"0/29/3": [1, 2, 3, 4, 5, 6],
"0/29/65532": 0,
"0/29/65533": 2,
"0/29/65528": [],
"0/29/65529": [],
"0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
"0/31/0": [
{
"1": 5,
"2": 2,
"3": [112233],
"4": null,
"254": 4
}
],
"0/31/1": [],
"0/31/2": 4,
"0/31/3": 3,
"0/31/4": 4,
"0/31/65532": 0,
"0/31/65533": 1,
"0/31/65528": [],
"0/31/65529": [],
"0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533],
"0/40/0": 17,
"0/40/1": "Aqara",
"0/40/2": 4447,
"0/40/3": "Aqara Climate Sensor W100",
"0/40/4": 8196,
"0/40/5": "Climate Sensor W100",
"0/40/6": "**REDACTED**",
"0/40/7": 12,
"0/40/8": "0.0.1.2",
"0/40/9": 1010,
"0/40/10": "1.0.1.0",
"0/40/11": "20250108",
"0/40/12": "AA016",
"0/40/13": "https://www.aqara.com/en/products.html",
"0/40/14": "Aqara Climate Sensor W100",
"0/40/15": "***************",
"0/40/16": false,
"0/40/18": "***************",
"0/40/19": {
"0": 3,
"1": 3
},
"0/40/21": 16973824,
"0/40/22": 1,
"0/40/65532": 0,
"0/40/65533": 3,
"0/40/65528": [],
"0/40/65529": [],
"0/40/65531": [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 19, 21, 22,
65528, 65529, 65531, 65532, 65533
],
"0/42/0": [],
"0/42/1": true,
"0/42/2": 1,
"0/42/3": null,
"0/42/65532": 0,
"0/42/65533": 1,
"0/42/65528": [],
"0/42/65529": [0],
"0/42/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
"0/48/0": 0,
"0/48/1": {
"0": 60,
"1": 900
},
"0/48/2": 0,
"0/48/3": 0,
"0/48/4": true,
"0/48/65532": 0,
"0/48/65533": 1,
"0/48/65528": [1, 3, 5],
"0/48/65529": [0, 2, 4],
"0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533],
"0/49/0": 1,
"0/49/1": [
{
"0": "aFq/aOcqMFo=",
"1": true
}
],
"0/49/2": 10,
"0/49/3": 20,
"0/49/4": true,
"0/49/5": 0,
"0/49/6": "aFq/aOcqMFo=",
"0/49/7": null,
"0/49/9": 4,
"0/49/10": 4,
"0/49/65532": 2,
"0/49/65533": 2,
"0/49/65528": [1, 5, 7],
"0/49/65529": [0, 3, 4, 6, 8],
"0/49/65531": [
0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 65528, 65529, 65531, 65532, 65533
],
"0/51/0": [
{
"0": "AqaraHome-0123",
"1": true,
"2": null,
"3": null,
"4": "piylcw37nWM=",
"5": [],
"6": [
"/RXRKakLAAFKcohVnCFKow==",
"/Z4/qUibGFsAAAD//gAcAg==",
"/Z4/qUibGFsYCaOd1Hp6Vg==",
"/oAAAAAAAACkLKVzDfudYw=="
],
"7": 4
}
],
"0/51/1": 1,
"0/51/2": 299,
"0/51/4": 6,
"0/51/5": [],
"0/51/8": false,
"0/51/65532": 0,
"0/51/65533": 2,
"0/51/65528": [2],
"0/51/65529": [0, 1],
"0/51/65531": [0, 1, 2, 4, 5, 8, 65528, 65529, 65531, 65532, 65533],
"0/52/0": [
{
"0": 2,
"1": "sys_evt",
"3": 1952
},
{
"0": 11,
"1": "Bluetoot",
"3": 1438
},
{
"0": 3,
"1": "THREAD",
"3": 1651
},
{
"0": 1,
"1": "Bluetoot",
"3": 306
},
{
"0": 10,
"1": "Bluetoot",
"3": 107
},
{
"0": 7,
"1": "Tmr Svc",
"3": 943
},
{
"0": 8,
"1": "app",
"3": 748
},
{
"0": 6,
"1": "IDLE",
"3": 231
},
{
"0": 4,
"1": "CHIP",
"3": 305
}
],
"0/52/1": 46224,
"0/52/2": 35696,
"0/52/3": 56048,
"0/52/65532": 1,
"0/52/65533": 1,
"0/52/65528": [],
"0/52/65529": [0],
"0/52/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
"0/53/0": 11,
"0/53/1": 2,
"0/53/2": "AqaraHome-0123",
"0/53/3": 23343,
"0/53/4": 7519532985124270170,
"0/53/5": "QP2eP6lImxhb",
"0/53/6": 0,
"0/53/7": [
{
"0": 17151429082474872369,
"1": 284,
"2": 7168,
"3": 295817,
"4": 111774,
"5": 3,
"6": -74,
"7": -74,
"8": 37,
"9": 0,
"10": true,
"11": true,
"12": true,
"13": false
}
],
"0/53/8": [
{
"0": 17151429082474872369,
"1": 7168,
"2": 7,
"3": 0,
"4": 0,
"5": 3,
"6": 3,
"7": 28,
"8": true,
"9": true
}
],
"0/53/9": 405350277,
"0/53/22": 2799,
"0/53/23": 2797,
"0/53/24": 2,
"0/53/39": 503,
"0/53/40": 503,
"0/53/41": 0,
"0/53/65532": 15,
"0/53/65533": 2,
"0/53/65528": [],
"0/53/65529": [0],
"0/53/65531": [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 22, 23, 24, 39, 40, 41, 65528, 65529, 65531,
65532, 65533
],
"0/60/0": 0,
"0/60/1": null,
"0/60/2": null,
"0/60/65532": 1,
"0/60/65533": 1,
"0/60/65528": [],
"0/60/65529": [0, 1, 2],
"0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533],
"0/62/0": [
{
"1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRSxgkBwEkCAEwCUEEL5gmAVxeNTcndwbt1d1SNaICqrmw8Mk3fQ7CkQlM0XhpLv0XzjnnmI+jorFA31RvWDYa0URByx588JSq6G/d7DcKNQEoARgkAgE2AwQCBAEYMAQUPES5ZFkTssoDCAkEz+kBgkL3jMcwBRRT9HTfU5Nds+HA8j+/MRP+0pVyIxgwC0B5OoI+cs5wwGlxvfMdinguUmA+VEWBZjQP6rEvd929qf4zpgpkfyjX7LFYCvoqqKJCOW052dLhgfYGUOqCfo7AGA==",
"2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEyT62Yt4qMI+MorlmQ/Hxh2CpLetznVknlAbhvYAwTexpSxp9GnhR09SrcUhz3mOb0eZa2TylqcnPBhHJ2Ih2RTcKNQEpARgkAmAwBBRT9HTfU5Nds+HA8j+/MRP+0pVyIzAFFOMCO8Jk7ZCknJquFGPtPzJiNqsDGDALQI/Kc38hQyK7AkT7/pN4hiYW3LoWKT3NA43+ssMJoVpDcaZ989GXBQKIbHKbBEXzUQ1J8wfL7l2pL0Z8Lso9JwgY",
"254": 4
}
],
"0/62/1": [
{
"1": "BIrruNo7r0gX6j6lq1dDi5zeK3jxcTavjt2o4adCCSCYtbxOakfb7C3GXqgV4LzulFSinbewmYkdqFBHqm5pxvU=",
"2": 4939,
"3": 2,
"4": 75,
"5": "",
"254": 4
}
],
"0/62/2": 5,
"0/62/3": 4,
"0/62/4": [
"FTABAQAkAgE3AyYUyakYCSYVj6gLsxgmBGoW1y8kBQA3BiYUyakYCSYVj6gLsxgkBwEkCAEwCUEEgYwxrTB+tyiEGfrRwjlXTG34MiQtJXbg5Qqd0ohdRW7MfwYY7vZiX/0h9hI8MqUralFaVPcnghAP0MSJm1YrqTcKNQEpARgkAmAwBBS3BS9aJzt+p6i28Nj+trB2Uu+vdzAFFLcFL1onO36nqLbw2P62sHZS7693GDALQMvassZTgvO/snCPohEojdKdGb2IpuRpSsu4HkM1JJQ9yFwhkyl0OOS2kvOVUNlfb2YnoJaH4L2jz0G9GVclBIgY",
"FTABAQAkAgE3AycUQhmZbaIbYjokFQIYJgRWZLcqJAUANwYnFEIZmW2iG2I6JBUCGCQHASQIATAJQQT2AlKGW/kOMjqayzeO0md523/fuhrhGEUU91uQpTiKo0I7wcPpKnmrwfQNPX6g0kEQl+VGaXa3e22lzfu5Tzp0Nwo1ASkBGCQCYDAEFOOMk13ScMKuT2hlaydi1yEJnhTqMAUU44yTXdJwwq5PaGVrJ2LXIQmeFOoYMAtAv2jJd1qd5miXbYesH1XrJ+vgyY0hzGuZ78N6Jw4Cb1oN1sLSpA+PNM0u7+hsEqcSvvn2eSV8EaRR+hg5YQjHDxg=",
"FTABD38O1NiPyscyxScZaN7uECQCATcDJhSoQfl2GCYEIqqfLyYFImy36zcGJhSoQfl2GCQHASQIATAJQQT5WrI2v6EgLRXdxlmZLlXX3rxeBe1C3NN/x9QV0tMVF+gH/FPSyq69dZKuoyskx0UOHcN20wdPffFuqgy/4uiaNwo1ASkBGCQCYDAEFM8XoLF/WKnSeqflSO5TQBQz4ObIMAUUzxegsX9YqdJ6p+VI7lNAFDPg5sgYMAtAHTWpsQPPwqR9gCqBGcDbPu2gusKeVuytcD5v7qK1/UjVr2/WGjMw3SYM10HWKdPTQZa2f3JI3uxv1nFnlcQpDBg=",
"FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEEiuu42juvSBfqPqWrV0OLnN4rePFxNq+O3ajhp0IJIJi1vE5qR9vsLcZeqBXgvO6UVKKdt7CZiR2oUEeqbmnG9TcKNQEpARgkAmAwBBTjAjvCZO2QpJyarhRj7T8yYjarAzAFFOMCO8Jk7ZCknJquFGPtPzJiNqsDGDALQE7hTxTRg92QOxwA1hK3xv8DaxvxL71r6ZHcNRzug9wNnonJ+NC84SFKvKDxwcBxHYqFdIyDiDgwJNTQIBgasmIY"
],
"0/62/5": 4,
"0/62/65532": 0,
"0/62/65533": 1,
"0/62/65528": [1, 3, 5, 8],
"0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11],
"0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533],
"0/63/0": [],
"0/63/1": [],
"0/63/2": 4,
"0/63/3": 3,
"0/63/65532": 0,
"0/63/65533": 2,
"0/63/65528": [2, 5],
"0/63/65529": [0, 1, 3, 4],
"0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
"0/70/0": 300,
"0/70/1": 0,
"0/70/2": 1000,
"0/70/65532": 0,
"0/70/65533": 2,
"0/70/65528": [],
"0/70/65529": [],
"0/70/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533],
"1/3/0": 0,
"1/3/1": 4,
"1/3/65532": 0,
"1/3/65533": 4,
"1/3/65528": [],
"1/3/65529": [0],
"1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
"1/29/0": [
{
"0": 770,
"1": 1
}
],
"1/29/1": [3, 29, 1026],
"1/29/2": [],
"1/29/3": [],
"1/29/65532": 0,
"1/29/65533": 2,
"1/29/65528": [],
"1/29/65529": [],
"1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
"1/1026/0": 2773,
"1/1026/1": -4000,
"1/1026/2": 12500,
"1/1026/65532": 0,
"1/1026/65533": 4,
"1/1026/65528": [],
"1/1026/65529": [],
"1/1026/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533],
"2/3/0": 0,
"2/3/1": 4,
"2/3/65532": 0,
"2/3/65533": 4,
"2/3/65528": [],
"2/3/65529": [0],
"2/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
"2/29/0": [
{
"0": 775,
"1": 1
}
],
"2/29/1": [3, 29, 1029],
"2/29/2": [],
"2/29/3": [],
"2/29/65532": 0,
"2/29/65533": 2,
"2/29/65528": [],
"2/29/65529": [],
"2/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
"2/1029/0": 4472,
"2/1029/1": 0,
"2/1029/2": 10000,
"2/1029/65532": 0,
"2/1029/65533": 3,
"2/1029/65528": [],
"2/1029/65529": [],
"2/1029/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533],
"3/3/0": 0,
"3/3/1": 4,
"3/3/65532": 0,
"3/3/65533": 4,
"3/3/65528": [],
"3/3/65529": [0],
"3/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
"3/29/0": [
{
"0": 15,
"1": 3
}
],
"3/29/1": [3, 29, 59],
"3/29/2": [],
"3/29/3": [],
"3/29/4": [
{
"0": null,
"1": 7,
"2": 1
},
{
"0": null,
"1": 8,
"2": 2
}
],
"3/29/65532": 1,
"3/29/65533": 2,
"3/29/65528": [],
"3/29/65529": [],
"3/29/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533],
"3/59/0": 2,
"3/59/1": 0,
"3/59/2": 2,
"3/59/65532": 30,
"3/59/65533": 1,
"3/59/65528": [],
"3/59/65529": [],
"3/59/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533],
"4/3/0": 0,
"4/3/1": 4,
"4/3/65532": 0,
"4/3/65533": 4,
"4/3/65528": [],
"4/3/65529": [0],
"4/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
"4/29/0": [
{
"0": 15,
"1": 3
}
],
"4/29/1": [3, 29, 59],
"4/29/2": [],
"4/29/3": [],
"4/29/4": [
{
"0": null,
"1": 7,
"2": 2
},
{
"0": null,
"1": 8,
"2": 4
}
],
"4/29/65532": 1,
"4/29/65533": 2,
"4/29/65528": [],
"4/29/65529": [],
"4/29/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533],
"4/59/0": 2,
"4/59/1": 0,
"4/59/2": 2,
"4/59/65532": 30,
"4/59/65533": 1,
"4/59/65528": [],
"4/59/65529": [],
"4/59/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533],
"5/3/0": 0,
"5/3/1": 4,
"5/3/65532": 0,
"5/3/65533": 4,
"5/3/65528": [],
"5/3/65529": [0],
"5/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
"5/29/0": [
{
"0": 15,
"1": 3
}
],
"5/29/1": [3, 29, 59],
"5/29/2": [],
"5/29/3": [],
"5/29/4": [
{
"0": null,
"1": 7,
"2": 3
},
{
"0": null,
"1": 8,
"2": 3
}
],
"5/29/65532": 1,
"5/29/65533": 2,
"5/29/65528": [],
"5/29/65529": [],
"5/29/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533],
"5/59/0": 2,
"5/59/1": 0,
"5/59/2": 2,
"5/59/65532": 30,
"5/59/65533": 1,
"5/59/65528": [],
"5/59/65529": [],
"5/59/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533],
"6/29/0": [
{
"0": 17,
"1": 1
}
],
"6/29/1": [29, 47],
"6/29/2": [],
"6/29/3": [],
"6/29/65532": 0,
"6/29/65533": 2,
"6/29/65528": [],
"6/29/65529": [],
"6/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
"6/47/0": 1,
"6/47/1": 0,
"6/47/2": "Battery",
"6/47/11": 3120,
"6/47/12": 200,
"6/47/14": 0,
"6/47/15": false,
"6/47/16": 2,
"6/47/19": "CR2450",
"6/47/25": 2,
"6/47/31": [],
"6/47/65532": 10,
"6/47/65533": 2,
"6/47/65528": [],
"6/47/65529": [],
"6/47/65531": [
0, 1, 2, 11, 12, 14, 15, 16, 19, 25, 31, 65528, 65529, 65531, 65532, 65533
]
},
"attribute_subscriptions": []
}

View File

@@ -438,6 +438,251 @@
'state': 'unknown',
})
# ---
# name: test_buttons[aqara_sensor_w100][button.climate_sensor_w100_identify_1-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'button',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'button.climate_sensor_w100_identify_1',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <ButtonDeviceClass.IDENTIFY: 'identify'>,
'original_icon': None,
'original_name': 'Identify (1)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-1-IdentifyButton-3-1',
'unit_of_measurement': None,
})
# ---
# name: test_buttons[aqara_sensor_w100][button.climate_sensor_w100_identify_1-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'identify',
'friendly_name': 'Climate Sensor W100 Identify (1)',
}),
'context': <ANY>,
'entity_id': 'button.climate_sensor_w100_identify_1',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_buttons[aqara_sensor_w100][button.climate_sensor_w100_identify_2-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'button',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'button.climate_sensor_w100_identify_2',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <ButtonDeviceClass.IDENTIFY: 'identify'>,
'original_icon': None,
'original_name': 'Identify (2)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-2-IdentifyButton-3-1',
'unit_of_measurement': None,
})
# ---
# name: test_buttons[aqara_sensor_w100][button.climate_sensor_w100_identify_2-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'identify',
'friendly_name': 'Climate Sensor W100 Identify (2)',
}),
'context': <ANY>,
'entity_id': 'button.climate_sensor_w100_identify_2',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_buttons[aqara_sensor_w100][button.climate_sensor_w100_identify_3-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'button',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'button.climate_sensor_w100_identify_3',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <ButtonDeviceClass.IDENTIFY: 'identify'>,
'original_icon': None,
'original_name': 'Identify (3)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-3-IdentifyButton-3-1',
'unit_of_measurement': None,
})
# ---
# name: test_buttons[aqara_sensor_w100][button.climate_sensor_w100_identify_3-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'identify',
'friendly_name': 'Climate Sensor W100 Identify (3)',
}),
'context': <ANY>,
'entity_id': 'button.climate_sensor_w100_identify_3',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_buttons[aqara_sensor_w100][button.climate_sensor_w100_identify_4-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'button',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'button.climate_sensor_w100_identify_4',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <ButtonDeviceClass.IDENTIFY: 'identify'>,
'original_icon': None,
'original_name': 'Identify (4)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-4-IdentifyButton-3-1',
'unit_of_measurement': None,
})
# ---
# name: test_buttons[aqara_sensor_w100][button.climate_sensor_w100_identify_4-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'identify',
'friendly_name': 'Climate Sensor W100 Identify (4)',
}),
'context': <ANY>,
'entity_id': 'button.climate_sensor_w100_identify_4',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_buttons[aqara_sensor_w100][button.climate_sensor_w100_identify_5-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'button',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'button.climate_sensor_w100_identify_5',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <ButtonDeviceClass.IDENTIFY: 'identify'>,
'original_icon': None,
'original_name': 'Identify (5)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-5-IdentifyButton-3-1',
'unit_of_measurement': None,
})
# ---
# name: test_buttons[aqara_sensor_w100][button.climate_sensor_w100_identify_5-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'identify',
'friendly_name': 'Climate Sensor W100 Identify (5)',
}),
'context': <ANY>,
'entity_id': 'button.climate_sensor_w100_identify_5',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_buttons[aqara_thermostat_w500][button.floor_heating_thermostat_identify_1-entry]
EntityRegistryEntrySnapshot({
'aliases': set({

View File

@@ -1,4 +1,193 @@
# serializer version: 1
# name: test_events[aqara_sensor_w100][event.climate_sensor_w100_button_3-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'event_types': list([
'multi_press_1',
'multi_press_2',
'long_press',
'long_release',
]),
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'event',
'entity_category': None,
'entity_id': 'event.climate_sensor_w100_button_3',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <EventDeviceClass.BUTTON: 'button'>,
'original_icon': None,
'original_name': 'Button (3)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'button',
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-3-GenericSwitch-59-1',
'unit_of_measurement': None,
})
# ---
# name: test_events[aqara_sensor_w100][event.climate_sensor_w100_button_3-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'button',
'event_type': None,
'event_types': list([
'multi_press_1',
'multi_press_2',
'long_press',
'long_release',
]),
'friendly_name': 'Climate Sensor W100 Button (3)',
}),
'context': <ANY>,
'entity_id': 'event.climate_sensor_w100_button_3',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_events[aqara_sensor_w100][event.climate_sensor_w100_button_4-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'event_types': list([
'multi_press_1',
'multi_press_2',
'long_press',
'long_release',
]),
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'event',
'entity_category': None,
'entity_id': 'event.climate_sensor_w100_button_4',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <EventDeviceClass.BUTTON: 'button'>,
'original_icon': None,
'original_name': 'Button (4)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'button',
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-4-GenericSwitch-59-1',
'unit_of_measurement': None,
})
# ---
# name: test_events[aqara_sensor_w100][event.climate_sensor_w100_button_4-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'button',
'event_type': None,
'event_types': list([
'multi_press_1',
'multi_press_2',
'long_press',
'long_release',
]),
'friendly_name': 'Climate Sensor W100 Button (4)',
}),
'context': <ANY>,
'entity_id': 'event.climate_sensor_w100_button_4',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_events[aqara_sensor_w100][event.climate_sensor_w100_button_5-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'event_types': list([
'multi_press_1',
'multi_press_2',
'long_press',
'long_release',
]),
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'event',
'entity_category': None,
'entity_id': 'event.climate_sensor_w100_button_5',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <EventDeviceClass.BUTTON: 'button'>,
'original_icon': None,
'original_name': 'Button (5)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'button',
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-5-GenericSwitch-59-1',
'unit_of_measurement': None,
})
# ---
# name: test_events[aqara_sensor_w100][event.climate_sensor_w100_button_5-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'button',
'event_type': None,
'event_types': list([
'multi_press_1',
'multi_press_2',
'long_press',
'long_release',
]),
'friendly_name': 'Climate Sensor W100 Button (5)',
}),
'context': <ANY>,
'entity_id': 'event.climate_sensor_w100_button_5',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_events[generic_switch][event.mock_generic_switch_button-entry]
EntityRegistryEntrySnapshot({
'aliases': set({

View File

@@ -1944,6 +1944,428 @@
'state': '27.94',
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_battery-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.climate_sensor_w100_battery',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <SensorDeviceClass.BATTERY: 'battery'>,
'original_icon': None,
'original_name': 'Battery',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-6-PowerSource-47-12',
'unit_of_measurement': '%',
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_battery-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'battery',
'friendly_name': 'Climate Sensor W100 Battery',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': '%',
}),
'context': <ANY>,
'entity_id': 'sensor.climate_sensor_w100_battery',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '100',
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_battery_type-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.climate_sensor_w100_battery_type',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Battery type',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'battery_replacement_description',
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-6-PowerSourceBatReplacementDescription-47-19',
'unit_of_measurement': None,
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_battery_type-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Climate Sensor W100 Battery type',
}),
'context': <ANY>,
'entity_id': 'sensor.climate_sensor_w100_battery_type',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'CR2450',
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_battery_voltage-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.climate_sensor_w100_battery_voltage',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
'sensor': dict({
'suggested_display_precision': 0,
}),
'sensor.private': dict({
'suggested_unit_of_measurement': <UnitOfElectricPotential.VOLT: 'V'>,
}),
}),
'original_device_class': <SensorDeviceClass.VOLTAGE: 'voltage'>,
'original_icon': None,
'original_name': 'Battery voltage',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'battery_voltage',
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-6-PowerSourceBatVoltage-47-11',
'unit_of_measurement': <UnitOfElectricPotential.VOLT: 'V'>,
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_battery_voltage-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'voltage',
'friendly_name': 'Climate Sensor W100 Battery voltage',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': <UnitOfElectricPotential.VOLT: 'V'>,
}),
'context': <ANY>,
'entity_id': 'sensor.climate_sensor_w100_battery_voltage',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '3.12',
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_current_switch_position_3-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.climate_sensor_w100_current_switch_position_3',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Current switch position (3)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'switch_current_position',
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-3-SwitchCurrentPosition-59-1',
'unit_of_measurement': None,
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_current_switch_position_3-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Climate Sensor W100 Current switch position (3)',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'context': <ANY>,
'entity_id': 'sensor.climate_sensor_w100_current_switch_position_3',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '0',
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_current_switch_position_4-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.climate_sensor_w100_current_switch_position_4',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Current switch position (4)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'switch_current_position',
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-4-SwitchCurrentPosition-59-1',
'unit_of_measurement': None,
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_current_switch_position_4-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Climate Sensor W100 Current switch position (4)',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'context': <ANY>,
'entity_id': 'sensor.climate_sensor_w100_current_switch_position_4',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '0',
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_current_switch_position_5-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.climate_sensor_w100_current_switch_position_5',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Current switch position (5)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'switch_current_position',
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-5-SwitchCurrentPosition-59-1',
'unit_of_measurement': None,
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_current_switch_position_5-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Climate Sensor W100 Current switch position (5)',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'context': <ANY>,
'entity_id': 'sensor.climate_sensor_w100_current_switch_position_5',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '0',
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_humidity-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.climate_sensor_w100_humidity',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <SensorDeviceClass.HUMIDITY: 'humidity'>,
'original_icon': None,
'original_name': 'Humidity',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-2-HumiditySensor-1029-0',
'unit_of_measurement': '%',
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_humidity-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'humidity',
'friendly_name': 'Climate Sensor W100 Humidity',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': '%',
}),
'context': <ANY>,
'entity_id': 'sensor.climate_sensor_w100_humidity',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '44.72',
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_temperature-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.climate_sensor_w100_temperature',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
'sensor': dict({
'suggested_display_precision': 1,
}),
}),
'original_device_class': <SensorDeviceClass.TEMPERATURE: 'temperature'>,
'original_icon': None,
'original_name': 'Temperature',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-1-TemperatureSensor-1026-0',
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_temperature-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'temperature',
'friendly_name': 'Climate Sensor W100 Temperature',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
}),
'context': <ANY>,
'entity_id': 'sensor.climate_sensor_w100_temperature',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '27.73',
})
# ---
# name: test_sensors[aqara_thermostat_w500][sensor.floor_heating_thermostat_energy-entry]
EntityRegistryEntrySnapshot({
'aliases': set({

View File

@@ -239,11 +239,12 @@ async def test_pump(
assert state
assert state.state == "off"
# PumpStatus --> DeviceFault bit
# Initial state: kRunning bit only (no fault bits) should be off
state = hass.states.get("binary_sensor.mock_pump_problem")
assert state
assert state.state == "unknown"
assert state.state == "off"
# Set DeviceFault bit
set_node_attribute(matter_node, 1, 512, 16, 1)
await trigger_subscription_callback(hass, matter_client)
@@ -251,7 +252,14 @@ async def test_pump(
assert state
assert state.state == "on"
# PumpStatus --> SupplyFault bit
# Clear all bits - problem sensor should be off
set_node_attribute(matter_node, 1, 512, 16, 0)
await trigger_subscription_callback(hass, matter_client)
state = hass.states.get("binary_sensor.mock_pump_problem")
assert state
assert state.state == "off"
# Set SupplyFault bit
set_node_attribute(matter_node, 1, 512, 16, 2)
await trigger_subscription_callback(hass, matter_client)
@@ -270,6 +278,7 @@ async def test_dishwasher_alarm(
state = hass.states.get("binary_sensor.dishwasher_door_alarm")
assert state
# set DoorAlarm alarm
set_node_attribute(matter_node, 1, 93, 2, 4)
await trigger_subscription_callback(hass, matter_client)
@@ -277,6 +286,22 @@ async def test_dishwasher_alarm(
assert state
assert state.state == "on"
# clear DoorAlarm alarm
set_node_attribute(matter_node, 1, 93, 2, 0)
await trigger_subscription_callback(hass, matter_client)
state = hass.states.get("binary_sensor.dishwasher_inflow_alarm")
assert state
assert state.state == "off"
# set InflowError alarm
set_node_attribute(matter_node, 1, 93, 2, 1)
await trigger_subscription_callback(hass, matter_client)
state = hass.states.get("binary_sensor.dishwasher_inflow_alarm")
assert state
assert state.state == "on"
@pytest.mark.parametrize("node_fixture", ["valve"])
async def test_water_valve(

View File

@@ -52,7 +52,7 @@
'event_types': list([
'Title: Hello',
]),
'expires': datetime.datetime(2025, 3, 29, 5, 58, 46, tzinfo=datetime.timezone.utc),
'expires': HAFakeDatetime(2025, 3, 29, 5, 58, 46, tzinfo=datetime.timezone.utc),
'friendly_name': 'mytopic',
'icon': 'https://example.com/icon.png',
'id': 'h6Y2hKA5sy0U',
@@ -61,7 +61,7 @@
'tags': list([
'octopus',
]),
'time': datetime.datetime(2025, 3, 28, 17, 58, 46, tzinfo=datetime.timezone.utc),
'time': HAFakeDatetime(2025, 3, 28, 17, 58, 46, tzinfo=datetime.timezone.utc),
'title': 'Title',
'topic': 'mytopic',
}),

View File

@@ -13,7 +13,7 @@ from aiontfy.exceptions import (
NtfyTimeoutError,
NtfyUnauthorizedAuthenticationError,
)
from freezegun.api import FrozenDateTimeFactory, freeze_time
from freezegun.api import FrozenDateTimeFactory
import pytest
from syrupy.assertion import SnapshotAssertion
@@ -44,7 +44,7 @@ async def event_only() -> AsyncGenerator[None]:
@pytest.mark.usefixtures("mock_aiontfy")
@freeze_time("2025-09-03T22:00:00.000Z")
@pytest.mark.freeze_time("2025-09-03T22:00:00.000Z")
async def test_event_platform(
hass: HomeAssistant,
config_entry: MockConfigEntry,

View File

@@ -9,7 +9,6 @@ from aiontfy.exceptions import (
NtfyHTTPError,
NtfyUnauthorizedAuthenticationError,
)
from freezegun.api import freeze_time
import pytest
from syrupy.assertion import SnapshotAssertion
@@ -57,7 +56,7 @@ async def test_notify_platform(
await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id)
@freeze_time("2025-01-09T12:00:00+00:00")
@pytest.mark.freeze_time("2025-01-09T12:00:00+00:00")
async def test_send_message(
hass: HomeAssistant,
config_entry: MockConfigEntry,

View File

@@ -3,7 +3,6 @@
from pathlib import Path
from unittest.mock import AsyncMock, patch
from freezegun import freeze_time
import httpx
from openai import PermissionDeniedError
import pytest
@@ -212,7 +211,7 @@ async def test_generate_data_with_attachments(
@pytest.mark.usefixtures("mock_init_component")
@freeze_time("2025-06-14 22:59:00")
@pytest.mark.freeze_time("2025-06-14 22:59:00")
@pytest.mark.parametrize("image_model", ["gpt-image-1", "gpt-image-1-mini"])
async def test_generate_image(
hass: HomeAssistant,

View File

@@ -2,7 +2,6 @@
import json
from freezegun import freeze_time
import pytest
from syrupy.assertion import SnapshotAssertion
@@ -14,7 +13,7 @@ from tests.common import async_fire_mqtt_message
from tests.typing import MqttMockHAClient
@freeze_time("2024-02-26 01:21:34")
@pytest.mark.freeze_time("2024-02-26 01:21:34")
@pytest.mark.parametrize(
"sensor_suffix",
[

View File

@@ -1,55 +1 @@
"""Tests for the Plaato integration."""
from unittest.mock import patch
from freezegun import freeze_time
from pyplaato.models.airlock import PlaatoAirlock
from pyplaato.models.device import PlaatoDeviceType
from pyplaato.models.keg import PlaatoKeg
from homeassistant.components.plaato.const import (
CONF_DEVICE_NAME,
CONF_DEVICE_TYPE,
CONF_USE_WEBHOOK,
DOMAIN,
)
from homeassistant.const import CONF_TOKEN
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
# Note: It would be good to replace this test data
# with actual data from the API
AIRLOCK_DATA = {}
KEG_DATA = {}
@freeze_time("2024-05-24 12:00:00", tz_offset=0)
async def init_integration(
hass: HomeAssistant, device_type: PlaatoDeviceType
) -> MockConfigEntry:
"""Mock integration setup."""
with (
patch(
"homeassistant.components.plaato.coordinator.Plaato.get_airlock_data",
return_value=PlaatoAirlock(AIRLOCK_DATA),
),
patch(
"homeassistant.components.plaato.coordinator.Plaato.get_keg_data",
return_value=PlaatoKeg(KEG_DATA),
),
):
entry = MockConfigEntry(
domain=DOMAIN,
data={
CONF_USE_WEBHOOK: False,
CONF_TOKEN: "valid_token",
CONF_DEVICE_TYPE: device_type,
CONF_DEVICE_NAME: "device_name",
},
entry_id="123456",
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
return entry

View File

@@ -0,0 +1,62 @@
"""Test fixtures for the Plaato integration."""
from collections.abc import AsyncGenerator
from unittest.mock import patch
from pyplaato.models.airlock import PlaatoAirlock
from pyplaato.models.device import PlaatoDeviceType
from pyplaato.models.keg import PlaatoKeg
import pytest
from homeassistant.components.plaato.const import (
CONF_DEVICE_NAME,
CONF_DEVICE_TYPE,
CONF_USE_WEBHOOK,
DOMAIN,
)
from homeassistant.const import CONF_TOKEN, Platform
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
# Note: It would be good to replace this test data
# with actual data from the API
AIRLOCK_DATA = {}
KEG_DATA = {}
@pytest.fixture
async def init_integration(
hass: HomeAssistant,
device_type: PlaatoDeviceType,
platform: Platform,
) -> AsyncGenerator[MockConfigEntry]:
"""Mock integration setup."""
with (
patch(
"homeassistant.components.plaato.PLATFORMS",
[platform],
),
patch(
"homeassistant.components.plaato.coordinator.Plaato.get_airlock_data",
return_value=PlaatoAirlock(AIRLOCK_DATA),
),
patch(
"homeassistant.components.plaato.coordinator.Plaato.get_keg_data",
return_value=PlaatoKeg(KEG_DATA),
),
):
entry = MockConfigEntry(
domain=DOMAIN,
data={
CONF_USE_WEBHOOK: False,
CONF_TOKEN: "valid_token",
CONF_DEVICE_TYPE: device_type,
CONF_DEVICE_NAME: "device_name",
},
entry_id="123456",
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
yield entry

View File

@@ -1,7 +1,5 @@
"""Tests for the plaato binary sensors."""
from unittest.mock import patch
from pyplaato.models.device import PlaatoDeviceType
import pytest
from syrupy.assertion import SnapshotAssertion
@@ -10,24 +8,23 @@ from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from . import init_integration
from tests.common import MockConfigEntry, snapshot_platform
from tests.common import snapshot_platform
@pytest.fixture
def platform() -> Platform:
"""Fixture to specify platform."""
return Platform.BINARY_SENSOR
# note: PlaatoDeviceType.Airlock does not provide binary sensors
@pytest.mark.parametrize("device_type", [PlaatoDeviceType.Keg])
@pytest.mark.freeze_time("2024-05-24 12:00:00", tz_offset=0)
async def test_binary_sensors(
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
init_integration: MockConfigEntry,
snapshot: SnapshotAssertion,
device_type: PlaatoDeviceType,
) -> None:
"""Test binary sensors."""
with patch(
"homeassistant.components.plaato.PLATFORMS",
[Platform.BINARY_SENSOR],
):
entry = await init_integration(hass, device_type)
await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id)
await snapshot_platform(hass, entity_registry, snapshot, init_integration.entry_id)

View File

@@ -1,7 +1,5 @@
"""Tests for the plaato sensors."""
from unittest.mock import patch
from pyplaato.models.device import PlaatoDeviceType
import pytest
from syrupy.assertion import SnapshotAssertion
@@ -10,25 +8,24 @@ from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from . import init_integration
from tests.common import MockConfigEntry, snapshot_platform
from tests.common import snapshot_platform
@pytest.fixture
def platform() -> Platform:
"""Fixture to specify platform."""
return Platform.SENSOR
@pytest.mark.parametrize(
"device_type", [PlaatoDeviceType.Airlock, PlaatoDeviceType.Keg]
)
@pytest.mark.freeze_time("2024-05-24 12:00:00", tz_offset=0)
async def test_sensors(
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
init_integration: MockConfigEntry,
snapshot: SnapshotAssertion,
device_type: PlaatoDeviceType,
) -> None:
"""Test sensors."""
with patch(
"homeassistant.components.plaato.PLATFORMS",
[Platform.SENSOR],
):
entry = await init_integration(hass, device_type)
await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id)
await snapshot_platform(hass, entity_registry, snapshot, init_integration.entry_id)

View File

@@ -3,7 +3,6 @@
from collections.abc import AsyncGenerator
from unittest.mock import MagicMock, patch
from freezegun.api import freeze_time
from psnawp_api.core.psnawp_exceptions import (
PSNAWPClientError,
PSNAWPForbiddenError,
@@ -63,7 +62,7 @@ async def test_notify_platform(
"notify.testuser_direct_message_publicuniversalfriend",
],
)
@freeze_time("2025-07-28T00:00:00+00:00")
@pytest.mark.freeze_time("2025-07-28T00:00:00+00:00")
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
async def test_send_message(
hass: HomeAssistant,

View File

@@ -0,0 +1,893 @@
"""Models for SQLAlchemy.
This file contains the model definitions for schema version 51.
It is used to test the schema migration logic.
"""
from __future__ import annotations
from collections.abc import Callable
from datetime import datetime, timedelta
import logging
import time
from typing import Any, Final, Protocol, Self
import ciso8601
from fnv_hash_fast import fnv1a_32
from sqlalchemy import (
CHAR,
JSON,
BigInteger,
Boolean,
ColumnElement,
DateTime,
Float,
ForeignKey,
Identity,
Index,
Integer,
LargeBinary,
SmallInteger,
String,
Text,
case,
type_coerce,
)
from sqlalchemy.dialects import mysql, oracle, postgresql, sqlite
from sqlalchemy.engine.interfaces import Dialect
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.orm import DeclarativeBase, Mapped, aliased, mapped_column, relationship
from sqlalchemy.types import TypeDecorator
from homeassistant.components.recorder.const import (
ALL_DOMAIN_EXCLUDE_ATTRS,
SupportedDialect,
)
from homeassistant.components.recorder.models import (
StatisticData,
StatisticDataTimestamp,
StatisticMeanType,
StatisticMetaData,
datetime_to_timestamp_or_none,
process_timestamp,
ulid_to_bytes_or_none,
uuid_hex_to_bytes_or_none,
)
from homeassistant.components.sensor import ATTR_STATE_CLASS
from homeassistant.const import (
ATTR_DEVICE_CLASS,
ATTR_FRIENDLY_NAME,
ATTR_UNIT_OF_MEASUREMENT,
MATCH_ALL,
MAX_LENGTH_EVENT_EVENT_TYPE,
MAX_LENGTH_STATE_ENTITY_ID,
MAX_LENGTH_STATE_STATE,
)
from homeassistant.core import Event, EventStateChangedData
from homeassistant.helpers.json import JSON_DUMP, json_bytes, json_bytes_strip_null
from homeassistant.util import dt as dt_util
# SQLAlchemy Schema
class Base(DeclarativeBase):
"""Base class for tables."""
class LegacyBase(DeclarativeBase):
"""Base class for tables, used for schema migration."""
SCHEMA_VERSION = 51
_LOGGER = logging.getLogger(__name__)
TABLE_EVENTS = "events"
TABLE_EVENT_DATA = "event_data"
TABLE_EVENT_TYPES = "event_types"
TABLE_STATES = "states"
TABLE_STATE_ATTRIBUTES = "state_attributes"
TABLE_STATES_META = "states_meta"
TABLE_RECORDER_RUNS = "recorder_runs"
TABLE_SCHEMA_CHANGES = "schema_changes"
TABLE_STATISTICS = "statistics"
TABLE_STATISTICS_META = "statistics_meta"
TABLE_STATISTICS_RUNS = "statistics_runs"
TABLE_STATISTICS_SHORT_TERM = "statistics_short_term"
TABLE_MIGRATION_CHANGES = "migration_changes"
STATISTICS_TABLES = ("statistics", "statistics_short_term")
MAX_STATE_ATTRS_BYTES = 16384
MAX_EVENT_DATA_BYTES = 32768
PSQL_DIALECT = SupportedDialect.POSTGRESQL
ALL_TABLES = [
TABLE_STATES,
TABLE_STATE_ATTRIBUTES,
TABLE_EVENTS,
TABLE_EVENT_DATA,
TABLE_EVENT_TYPES,
TABLE_RECORDER_RUNS,
TABLE_SCHEMA_CHANGES,
TABLE_MIGRATION_CHANGES,
TABLE_STATES_META,
TABLE_STATISTICS,
TABLE_STATISTICS_META,
TABLE_STATISTICS_RUNS,
TABLE_STATISTICS_SHORT_TERM,
]
TABLES_TO_CHECK = [
TABLE_STATES,
TABLE_EVENTS,
TABLE_RECORDER_RUNS,
TABLE_SCHEMA_CHANGES,
]
LAST_UPDATED_INDEX_TS = "ix_states_last_updated_ts"
METADATA_ID_LAST_UPDATED_INDEX_TS = "ix_states_metadata_id_last_updated_ts"
EVENTS_CONTEXT_ID_BIN_INDEX = "ix_events_context_id_bin"
STATES_CONTEXT_ID_BIN_INDEX = "ix_states_context_id_bin"
LEGACY_STATES_EVENT_ID_INDEX = "ix_states_event_id"
LEGACY_STATES_ENTITY_ID_LAST_UPDATED_TS_INDEX = "ix_states_entity_id_last_updated_ts"
LEGACY_MAX_LENGTH_EVENT_CONTEXT_ID: Final = 36
CONTEXT_ID_BIN_MAX_LENGTH = 16
MYSQL_COLLATE = "utf8mb4_unicode_ci"
MYSQL_DEFAULT_CHARSET = "utf8mb4"
MYSQL_ENGINE = "InnoDB"
_DEFAULT_TABLE_ARGS = {
"mysql_default_charset": MYSQL_DEFAULT_CHARSET,
"mysql_collate": MYSQL_COLLATE,
"mysql_engine": MYSQL_ENGINE,
"mariadb_default_charset": MYSQL_DEFAULT_CHARSET,
"mariadb_collate": MYSQL_COLLATE,
"mariadb_engine": MYSQL_ENGINE,
}
_MATCH_ALL_KEEP = {
ATTR_DEVICE_CLASS,
ATTR_STATE_CLASS,
ATTR_UNIT_OF_MEASUREMENT,
ATTR_FRIENDLY_NAME,
}
class UnusedDateTime(DateTime):
"""An unused column type that behaves like a datetime."""
class Unused(CHAR):
"""An unused column type that behaves like a string."""
@compiles(UnusedDateTime, "mysql", "mariadb", "sqlite")
@compiles(Unused, "mysql", "mariadb", "sqlite")
def compile_char_zero(type_: TypeDecorator, compiler: Any, **kw: Any) -> str:
"""Compile UnusedDateTime and Unused as CHAR(0) on mysql, mariadb, and sqlite."""
return "CHAR(0)" # Uses 1 byte on MySQL (no change on sqlite)
@compiles(Unused, "postgresql")
def compile_char_one(type_: TypeDecorator, compiler: Any, **kw: Any) -> str:
"""Compile Unused as CHAR(1) on postgresql."""
return "CHAR(1)" # Uses 1 byte
class FAST_PYSQLITE_DATETIME(sqlite.DATETIME):
"""Use ciso8601 to parse datetimes instead of sqlalchemy built-in regex."""
def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None:
"""Offload the datetime parsing to ciso8601."""
return lambda value: None if value is None else ciso8601.parse_datetime(value)
class NativeLargeBinary(LargeBinary):
"""A faster version of LargeBinary for engines that support python bytes natively."""
def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None:
"""No conversion needed for engines that support native bytes."""
return None
# Although all integers are same in SQLite, it does not allow an identity column to be BIGINT
# https://sqlite.org/forum/info/2dfa968a702e1506e885cb06d92157d492108b22bf39459506ab9f7125bca7fd
ID_TYPE = BigInteger().with_variant(sqlite.INTEGER, "sqlite")
# For MariaDB and MySQL we can use an unsigned integer type since it will fit 2**32
# for sqlite and postgresql we use a bigint
UINT_32_TYPE = BigInteger().with_variant(
mysql.INTEGER(unsigned=True), # type: ignore[no-untyped-call]
"mysql",
"mariadb",
)
JSON_VARIANT_CAST = Text().with_variant(
postgresql.JSON(none_as_null=True),
"postgresql",
)
JSONB_VARIANT_CAST = Text().with_variant(
postgresql.JSONB(none_as_null=True),
"postgresql",
)
DATETIME_TYPE = (
DateTime(timezone=True)
.with_variant(mysql.DATETIME(timezone=True, fsp=6), "mysql", "mariadb") # type: ignore[no-untyped-call]
.with_variant(FAST_PYSQLITE_DATETIME(), "sqlite") # type: ignore[no-untyped-call]
)
DOUBLE_TYPE = (
Float()
.with_variant(mysql.DOUBLE(asdecimal=False), "mysql", "mariadb") # type: ignore[no-untyped-call]
.with_variant(oracle.DOUBLE_PRECISION(), "oracle")
.with_variant(postgresql.DOUBLE_PRECISION(), "postgresql")
)
UNUSED_LEGACY_COLUMN = Unused(0)
UNUSED_LEGACY_DATETIME_COLUMN = UnusedDateTime(timezone=True)
UNUSED_LEGACY_INTEGER_COLUMN = SmallInteger()
DOUBLE_PRECISION_TYPE_SQL = "DOUBLE PRECISION"
BIG_INTEGER_SQL = "BIGINT"
CONTEXT_BINARY_TYPE = LargeBinary(CONTEXT_ID_BIN_MAX_LENGTH).with_variant(
NativeLargeBinary(CONTEXT_ID_BIN_MAX_LENGTH), "mysql", "mariadb", "sqlite"
)
TIMESTAMP_TYPE = DOUBLE_TYPE
class _LiteralProcessorType(Protocol):
def __call__(self, value: Any) -> str: ...
class JSONLiteral(JSON):
"""Teach SA how to literalize json."""
def literal_processor(self, dialect: Dialect) -> _LiteralProcessorType:
"""Processor to convert a value to JSON."""
def process(value: Any) -> str:
"""Dump json."""
return JSON_DUMP(value)
return process
class Events(Base):
"""Event history data."""
__table_args__ = (
# Used for fetching events at a specific time
# see logbook
Index(
"ix_events_event_type_id_time_fired_ts", "event_type_id", "time_fired_ts"
),
Index(
EVENTS_CONTEXT_ID_BIN_INDEX,
"context_id_bin",
mysql_length=CONTEXT_ID_BIN_MAX_LENGTH,
mariadb_length=CONTEXT_ID_BIN_MAX_LENGTH,
),
_DEFAULT_TABLE_ARGS,
)
__tablename__ = TABLE_EVENTS
event_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
event_type: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
event_data: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
origin: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
origin_idx: Mapped[int | None] = mapped_column(SmallInteger)
time_fired: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
time_fired_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, index=True)
context_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
context_user_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
context_parent_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
data_id: Mapped[int | None] = mapped_column(
ID_TYPE, ForeignKey("event_data.data_id"), index=True
)
context_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE)
context_user_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE)
context_parent_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE)
event_type_id: Mapped[int | None] = mapped_column(
ID_TYPE, ForeignKey("event_types.event_type_id")
)
event_data_rel: Mapped[EventData | None] = relationship("EventData")
event_type_rel: Mapped[EventTypes | None] = relationship("EventTypes")
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
"<recorder.Events("
f"id={self.event_id}, event_type_id='{self.event_type_id}', "
f"origin_idx='{self.origin_idx}', time_fired='{self._time_fired_isotime}'"
f", data_id={self.data_id})>"
)
@property
def _time_fired_isotime(self) -> str | None:
"""Return time_fired as an isotime string."""
date_time: datetime | None
if self.time_fired_ts is not None:
date_time = dt_util.utc_from_timestamp(self.time_fired_ts)
else:
date_time = process_timestamp(self.time_fired)
if date_time is None:
return None
return date_time.isoformat(sep=" ", timespec="seconds")
@staticmethod
def from_event(event: Event) -> Events:
"""Create an event database object from a native event."""
context = event.context
return Events(
event_type=None,
event_data=None,
origin_idx=event.origin.idx,
time_fired=None,
time_fired_ts=event.time_fired_timestamp,
context_id=None,
context_id_bin=ulid_to_bytes_or_none(context.id),
context_user_id=None,
context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id),
context_parent_id=None,
context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id),
)
class LegacyEvents(LegacyBase):
"""Event history data with event_id, used for schema migration."""
__table_args__ = (_DEFAULT_TABLE_ARGS,)
__tablename__ = TABLE_EVENTS
event_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
context_id: Mapped[str | None] = mapped_column(
String(LEGACY_MAX_LENGTH_EVENT_CONTEXT_ID), index=True
)
class EventData(Base):
"""Event data history."""
__table_args__ = (_DEFAULT_TABLE_ARGS,)
__tablename__ = TABLE_EVENT_DATA
data_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
hash: Mapped[int | None] = mapped_column(UINT_32_TYPE, index=True)
# Note that this is not named attributes to avoid confusion with the states table
shared_data: Mapped[str | None] = mapped_column(
Text().with_variant(mysql.LONGTEXT, "mysql", "mariadb")
)
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
"<recorder.EventData("
f"id={self.data_id}, hash='{self.hash}', data='{self.shared_data}'"
")>"
)
@staticmethod
def shared_data_bytes_from_event(
event: Event, dialect: SupportedDialect | None
) -> bytes:
"""Create shared_data from an event."""
encoder = json_bytes_strip_null if dialect == PSQL_DIALECT else json_bytes
bytes_result = encoder(event.data)
if len(bytes_result) > MAX_EVENT_DATA_BYTES:
_LOGGER.warning(
"Event data for %s exceed maximum size of %s bytes. "
"This can cause database performance issues; Event data "
"will not be stored",
event.event_type,
MAX_EVENT_DATA_BYTES,
)
return b"{}"
return bytes_result
@staticmethod
def hash_shared_data_bytes(shared_data_bytes: bytes) -> int:
"""Return the hash of json encoded shared data."""
return fnv1a_32(shared_data_bytes)
class EventTypes(Base):
"""Event type history."""
__table_args__ = (_DEFAULT_TABLE_ARGS,)
__tablename__ = TABLE_EVENT_TYPES
event_type_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
event_type: Mapped[str | None] = mapped_column(
String(MAX_LENGTH_EVENT_EVENT_TYPE), index=True, unique=True
)
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
"<recorder.EventTypes("
f"id={self.event_type_id}, event_type='{self.event_type}'"
")>"
)
class States(Base):
"""State change history."""
__table_args__ = (
# Used for fetching the state of entities at a specific time
# (get_states in history.py)
Index(METADATA_ID_LAST_UPDATED_INDEX_TS, "metadata_id", "last_updated_ts"),
Index(
STATES_CONTEXT_ID_BIN_INDEX,
"context_id_bin",
mysql_length=CONTEXT_ID_BIN_MAX_LENGTH,
mariadb_length=CONTEXT_ID_BIN_MAX_LENGTH,
),
_DEFAULT_TABLE_ARGS,
)
__tablename__ = TABLE_STATES
state_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
entity_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
state: Mapped[str | None] = mapped_column(String(MAX_LENGTH_STATE_STATE))
attributes: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
event_id: Mapped[int | None] = mapped_column(UNUSED_LEGACY_INTEGER_COLUMN)
last_changed: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
last_changed_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE)
last_reported_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE)
last_updated: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
last_updated_ts: Mapped[float | None] = mapped_column(
TIMESTAMP_TYPE, default=time.time, index=True
)
old_state_id: Mapped[int | None] = mapped_column(
ID_TYPE, ForeignKey("states.state_id"), index=True
)
attributes_id: Mapped[int | None] = mapped_column(
ID_TYPE, ForeignKey("state_attributes.attributes_id"), index=True
)
context_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
context_user_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
context_parent_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
origin_idx: Mapped[int | None] = mapped_column(
SmallInteger
) # 0 is local, 1 is remote
old_state: Mapped[States | None] = relationship("States", remote_side=[state_id])
state_attributes: Mapped[StateAttributes | None] = relationship("StateAttributes")
context_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE)
context_user_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE)
context_parent_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE)
metadata_id: Mapped[int | None] = mapped_column(
ID_TYPE, ForeignKey("states_meta.metadata_id")
)
states_meta_rel: Mapped[StatesMeta | None] = relationship("StatesMeta")
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
f"<recorder.States(id={self.state_id}, entity_id='{self.entity_id}'"
f" metadata_id={self.metadata_id},"
f" state='{self.state}', event_id='{self.event_id}',"
f" last_updated='{self._last_updated_isotime}',"
f" old_state_id={self.old_state_id}, attributes_id={self.attributes_id})>"
)
@property
def _last_updated_isotime(self) -> str | None:
"""Return last_updated as an isotime string."""
date_time: datetime | None
if self.last_updated_ts is not None:
date_time = dt_util.utc_from_timestamp(self.last_updated_ts)
else:
date_time = process_timestamp(self.last_updated)
if date_time is None:
return None
return date_time.isoformat(sep=" ", timespec="seconds")
@staticmethod
def from_event(event: Event[EventStateChangedData]) -> States:
"""Create object from a state_changed event."""
state = event.data["new_state"]
# None state means the state was removed from the state machine
if state is None:
state_value = ""
last_updated_ts = event.time_fired_timestamp
last_changed_ts = None
last_reported_ts = None
else:
state_value = state.state
last_updated_ts = state.last_updated_timestamp
if state.last_updated == state.last_changed:
last_changed_ts = None
else:
last_changed_ts = state.last_changed_timestamp
if state.last_updated == state.last_reported:
last_reported_ts = None
else:
last_reported_ts = state.last_reported_timestamp
context = event.context
return States(
state=state_value,
entity_id=None,
attributes=None,
context_id=None,
context_id_bin=ulid_to_bytes_or_none(context.id),
context_user_id=None,
context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id),
context_parent_id=None,
context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id),
origin_idx=event.origin.idx,
last_updated=None,
last_changed=None,
last_updated_ts=last_updated_ts,
last_changed_ts=last_changed_ts,
last_reported_ts=last_reported_ts,
)
class LegacyStates(LegacyBase):
"""State change history with entity_id, used for schema migration."""
__table_args__ = (
Index(
LEGACY_STATES_ENTITY_ID_LAST_UPDATED_TS_INDEX,
"entity_id",
"last_updated_ts",
),
_DEFAULT_TABLE_ARGS,
)
__tablename__ = TABLE_STATES
state_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
entity_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
last_updated_ts: Mapped[float | None] = mapped_column(
TIMESTAMP_TYPE, default=time.time, index=True
)
context_id: Mapped[str | None] = mapped_column(
String(LEGACY_MAX_LENGTH_EVENT_CONTEXT_ID), index=True
)
class StateAttributes(Base):
"""State attribute change history."""
__table_args__ = (_DEFAULT_TABLE_ARGS,)
__tablename__ = TABLE_STATE_ATTRIBUTES
attributes_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
hash: Mapped[int | None] = mapped_column(UINT_32_TYPE, index=True)
# Note that this is not named attributes to avoid confusion with the states table
shared_attrs: Mapped[str | None] = mapped_column(
Text().with_variant(mysql.LONGTEXT, "mysql", "mariadb")
)
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
f"<recorder.StateAttributes(id={self.attributes_id}, hash='{self.hash}',"
f" attributes='{self.shared_attrs}')>"
)
@staticmethod
def shared_attrs_bytes_from_event(
event: Event[EventStateChangedData],
dialect: SupportedDialect | None,
) -> bytes:
"""Create shared_attrs from a state_changed event."""
# None state means the state was removed from the state machine
if (state := event.data["new_state"]) is None:
return b"{}"
if state_info := state.state_info:
unrecorded_attributes = state_info["unrecorded_attributes"]
exclude_attrs = {
*ALL_DOMAIN_EXCLUDE_ATTRS,
*unrecorded_attributes,
}
if MATCH_ALL in unrecorded_attributes:
# Don't exclude device class, state class, unit of measurement
# or friendly name when using the MATCH_ALL exclude constant
exclude_attrs.update(state.attributes)
exclude_attrs -= _MATCH_ALL_KEEP
else:
exclude_attrs = ALL_DOMAIN_EXCLUDE_ATTRS
encoder = json_bytes_strip_null if dialect == PSQL_DIALECT else json_bytes
bytes_result = encoder(
{k: v for k, v in state.attributes.items() if k not in exclude_attrs}
)
if len(bytes_result) > MAX_STATE_ATTRS_BYTES:
_LOGGER.warning(
"State attributes for %s exceed maximum size of %s bytes. "
"This can cause database performance issues; Attributes "
"will not be stored",
state.entity_id,
MAX_STATE_ATTRS_BYTES,
)
return b"{}"
return bytes_result
@staticmethod
def hash_shared_attrs_bytes(shared_attrs_bytes: bytes) -> int:
"""Return the hash of json encoded shared attributes."""
return fnv1a_32(shared_attrs_bytes)
class StatesMeta(Base):
"""Metadata for states."""
__table_args__ = (_DEFAULT_TABLE_ARGS,)
__tablename__ = TABLE_STATES_META
metadata_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
entity_id: Mapped[str | None] = mapped_column(
String(MAX_LENGTH_STATE_ENTITY_ID), index=True, unique=True
)
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
"<recorder.StatesMeta("
f"id={self.metadata_id}, entity_id='{self.entity_id}'"
")>"
)
class StatisticsBase:
"""Statistics base class."""
id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
created: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
created_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, default=time.time)
metadata_id: Mapped[int | None] = mapped_column(
ID_TYPE,
ForeignKey(f"{TABLE_STATISTICS_META}.id", ondelete="CASCADE"),
)
start: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
start_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, index=True)
mean: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
mean_weight: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
min: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
max: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
last_reset: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
last_reset_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE)
state: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
sum: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
duration: timedelta
@classmethod
def from_stats(
cls, metadata_id: int, stats: StatisticData, now_timestamp: float | None = None
) -> Self:
"""Create object from a statistics with datetime objects."""
return cls( # type: ignore[call-arg]
metadata_id=metadata_id,
created=None,
created_ts=now_timestamp or time.time(),
start=None,
start_ts=stats["start"].timestamp(),
mean=stats.get("mean"),
mean_weight=stats.get("mean_weight"),
min=stats.get("min"),
max=stats.get("max"),
last_reset=None,
last_reset_ts=datetime_to_timestamp_or_none(stats.get("last_reset")),
state=stats.get("state"),
sum=stats.get("sum"),
)
@classmethod
def from_stats_ts(
cls,
metadata_id: int,
stats: StatisticDataTimestamp,
now_timestamp: float | None = None,
) -> Self:
"""Create object from a statistics with timestamps."""
return cls( # type: ignore[call-arg]
metadata_id=metadata_id,
created=None,
created_ts=now_timestamp or time.time(),
start=None,
start_ts=stats["start_ts"],
mean=stats.get("mean"),
mean_weight=stats.get("mean_weight"),
min=stats.get("min"),
max=stats.get("max"),
last_reset=None,
last_reset_ts=stats.get("last_reset_ts"),
state=stats.get("state"),
sum=stats.get("sum"),
)
class Statistics(Base, StatisticsBase):
"""Long term statistics."""
duration = timedelta(hours=1)
__table_args__ = (
# Used for fetching statistics for a certain entity at a specific time
Index(
"ix_statistics_statistic_id_start_ts",
"metadata_id",
"start_ts",
unique=True,
),
_DEFAULT_TABLE_ARGS,
)
__tablename__ = TABLE_STATISTICS
class _StatisticsShortTerm(StatisticsBase):
"""Short term statistics."""
duration = timedelta(minutes=5)
__tablename__ = TABLE_STATISTICS_SHORT_TERM
class StatisticsShortTerm(Base, _StatisticsShortTerm):
"""Short term statistics."""
__table_args__ = (
# Used for fetching statistics for a certain entity at a specific time
Index(
"ix_statistics_short_term_statistic_id_start_ts",
"metadata_id",
"start_ts",
unique=True,
),
_DEFAULT_TABLE_ARGS,
)
class LegacyStatisticsShortTerm(LegacyBase, _StatisticsShortTerm):
"""Short term statistics with 32-bit index, used for schema migration."""
__table_args__ = (
# Used for fetching statistics for a certain entity at a specific time
Index(
"ix_statistics_short_term_statistic_id_start_ts",
"metadata_id",
"start_ts",
unique=True,
),
_DEFAULT_TABLE_ARGS,
)
metadata_id: Mapped[int | None] = mapped_column(
Integer,
ForeignKey(f"{TABLE_STATISTICS_META}.id", ondelete="CASCADE"),
use_existing_column=True,
)
class _StatisticsMeta:
"""Statistics meta data."""
__table_args__ = (_DEFAULT_TABLE_ARGS,)
__tablename__ = TABLE_STATISTICS_META
id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
statistic_id: Mapped[str | None] = mapped_column(
String(255), index=True, unique=True
)
source: Mapped[str | None] = mapped_column(String(32))
unit_of_measurement: Mapped[str | None] = mapped_column(String(255))
unit_class: Mapped[str | None] = mapped_column(String(255))
has_mean: Mapped[bool | None] = mapped_column(Boolean)
has_sum: Mapped[bool | None] = mapped_column(Boolean)
name: Mapped[str | None] = mapped_column(String(255))
mean_type: Mapped[StatisticMeanType] = mapped_column(
SmallInteger, nullable=False, default=StatisticMeanType.NONE.value
) # See StatisticMeanType
@staticmethod
def from_meta(meta: StatisticMetaData) -> StatisticsMeta:
"""Create object from meta data."""
return StatisticsMeta(**meta)
class StatisticsMeta(Base, _StatisticsMeta):
"""Statistics meta data."""
class LegacyStatisticsMeta(LegacyBase, _StatisticsMeta):
"""Statistics meta data with 32-bit index, used for schema migration."""
id: Mapped[int] = mapped_column(
Integer,
Identity(),
primary_key=True,
use_existing_column=True,
)
class RecorderRuns(Base):
"""Representation of recorder run."""
__table_args__ = (
Index("ix_recorder_runs_start_end", "start", "end"),
_DEFAULT_TABLE_ARGS,
)
__tablename__ = TABLE_RECORDER_RUNS
run_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
start: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow)
end: Mapped[datetime | None] = mapped_column(DATETIME_TYPE)
closed_incorrect: Mapped[bool] = mapped_column(Boolean, default=False)
created: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow)
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
end = (
f"'{self.end.isoformat(sep=' ', timespec='seconds')}'" if self.end else None
)
return (
f"<recorder.RecorderRuns(id={self.run_id},"
f" start='{self.start.isoformat(sep=' ', timespec='seconds')}', end={end},"
f" closed_incorrect={self.closed_incorrect},"
f" created='{self.created.isoformat(sep=' ', timespec='seconds')}')>"
)
class MigrationChanges(Base):
"""Representation of migration changes."""
__tablename__ = TABLE_MIGRATION_CHANGES
__table_args__ = (_DEFAULT_TABLE_ARGS,)
migration_id: Mapped[str] = mapped_column(String(255), primary_key=True)
version: Mapped[int] = mapped_column(SmallInteger)
class SchemaChanges(Base):
"""Representation of schema version changes."""
__tablename__ = TABLE_SCHEMA_CHANGES
__table_args__ = (_DEFAULT_TABLE_ARGS,)
change_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
schema_version: Mapped[int | None] = mapped_column(Integer)
changed: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow)
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
"<recorder.SchemaChanges("
f"id={self.change_id}, schema_version={self.schema_version}, "
f"changed='{self.changed.isoformat(sep=' ', timespec='seconds')}'"
")>"
)
class StatisticsRuns(Base):
"""Representation of statistics run."""
__tablename__ = TABLE_STATISTICS_RUNS
__table_args__ = (_DEFAULT_TABLE_ARGS,)
run_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
start: Mapped[datetime] = mapped_column(DATETIME_TYPE, index=True)
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
f"<recorder.StatisticsRuns(id={self.run_id},"
f" start='{self.start.isoformat(sep=' ', timespec='seconds')}', )>"
)
EVENT_DATA_JSON = type_coerce(
EventData.shared_data.cast(JSONB_VARIANT_CAST), JSONLiteral(none_as_null=True)
)
OLD_FORMAT_EVENT_DATA_JSON = type_coerce(
Events.event_data.cast(JSONB_VARIANT_CAST), JSONLiteral(none_as_null=True)
)
SHARED_ATTRS_JSON = type_coerce(
StateAttributes.shared_attrs.cast(JSON_VARIANT_CAST), JSON(none_as_null=True)
)
OLD_FORMAT_ATTRS_JSON = type_coerce(
States.attributes.cast(JSON_VARIANT_CAST), JSON(none_as_null=True)
)
ENTITY_ID_IN_EVENT: ColumnElement = EVENT_DATA_JSON["entity_id"]
OLD_ENTITY_ID_IN_EVENT: ColumnElement = OLD_FORMAT_EVENT_DATA_JSON["entity_id"]
DEVICE_ID_IN_EVENT: ColumnElement = EVENT_DATA_JSON["device_id"]
OLD_STATE = aliased(States, name="old_state")
SHARED_ATTR_OR_LEGACY_ATTRIBUTES = case(
(StateAttributes.shared_attrs.is_(None), States.attributes),
else_=StateAttributes.shared_attrs,
).label("attributes")
SHARED_DATA_OR_LEGACY_EVENT_DATA = case(
(EventData.shared_data.is_(None), Events.event_data), else_=EventData.shared_data
).label("event_data")

View File

@@ -1,4 +1,4 @@
"""The tests for the recorder filter matching the EntityFilter component."""
"""Test for migration from DB schema version 50."""
import importlib
import sys
@@ -134,6 +134,26 @@ async def test_migrate_statistics_meta(
name="Test 3",
mean_type=StatisticMeanType.NONE,
),
# Wrong case
old_db_schema.StatisticsMeta(
statistic_id="sensor.test4",
source="recorder",
unit_of_measurement="l/min",
has_mean=None,
has_sum=True,
name="Test 4",
mean_type=StatisticMeanType.NONE,
),
# Wrong encoding
old_db_schema.StatisticsMeta(
statistic_id="sensor.test5",
source="recorder",
unit_of_measurement="",
has_mean=None,
has_sum=True,
name="Test 5",
mean_type=StatisticMeanType.NONE,
),
)
)
@@ -251,6 +271,28 @@ async def test_migrate_statistics_meta(
"statistics_unit_of_measurement": "ppm",
"unit_class": "unitless",
},
{
"display_unit_of_measurement": "l/min",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 4",
"source": "recorder",
"statistic_id": "sensor.test4",
"statistics_unit_of_measurement": "l/min",
"unit_class": None,
},
{
"display_unit_of_measurement": "",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 5",
"source": "recorder",
"statistic_id": "sensor.test5",
"statistics_unit_of_measurement": "",
"unit_class": None,
},
]
)
assert post_migration_metadata_db == {
@@ -287,5 +329,27 @@ async def test_migrate_statistics_meta(
"unit_class": "unitless",
"unit_of_measurement": "ppm",
},
"sensor.test4": {
"has_mean": None,
"has_sum": True,
"id": 4,
"mean_type": 0,
"name": "Test 4",
"source": "recorder",
"statistic_id": "sensor.test4",
"unit_class": None,
"unit_of_measurement": "l/min",
},
"sensor.test5": {
"has_mean": None,
"has_sum": True,
"id": 5,
"mean_type": 0,
"name": "Test 5",
"source": "recorder",
"statistic_id": "sensor.test5",
"unit_class": None,
"unit_of_measurement": "",
},
}
assert post_migration_metadata_api == unordered(pre_migration_metadata_api)

View File

@@ -0,0 +1,456 @@
"""Test for migration from DB schema version 51."""
import importlib
import sys
import threading
from unittest.mock import patch
import pytest
from pytest_unordered import unordered
from sqlalchemy import create_engine, inspect
from sqlalchemy.orm import Session
from homeassistant.components import recorder
from homeassistant.components.recorder import core, migration, statistics
from homeassistant.components.recorder.const import UNIT_CLASS_SCHEMA_VERSION
from homeassistant.components.recorder.db_schema import StatisticsMeta
from homeassistant.components.recorder.models import StatisticMeanType
from homeassistant.components.recorder.util import session_scope
from homeassistant.core import HomeAssistant
from .common import (
async_recorder_block_till_done,
async_wait_recording_done,
get_patched_live_version,
)
from .conftest import instrument_migration
from tests.common import async_test_home_assistant
from tests.typing import RecorderInstanceContextManager
CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine"
SCHEMA_MODULE_51 = "tests.components.recorder.db_schema_51"
@pytest.fixture
async def mock_recorder_before_hass(
async_test_recorder: RecorderInstanceContextManager,
) -> None:
"""Set up recorder."""
async def _async_wait_migration_done(hass: HomeAssistant) -> None:
"""Wait for the migration to be done."""
await recorder.get_instance(hass).async_block_till_done()
await async_recorder_block_till_done(hass)
def _create_engine_test(*args, **kwargs):
"""Test version of create_engine that initializes with old schema.
This simulates an existing db with the old schema.
"""
importlib.import_module(SCHEMA_MODULE_51)
old_db_schema = sys.modules[SCHEMA_MODULE_51]
engine = create_engine(*args, **kwargs)
old_db_schema.Base.metadata.create_all(engine)
with Session(engine) as session:
session.add(
recorder.db_schema.StatisticsRuns(start=statistics.get_start_time())
)
session.add(
recorder.db_schema.SchemaChanges(
schema_version=old_db_schema.SCHEMA_VERSION
)
)
session.commit()
return engine
@pytest.fixture
def db_schema_51():
"""Fixture to initialize the db with the old schema."""
importlib.import_module(SCHEMA_MODULE_51)
old_db_schema = sys.modules[SCHEMA_MODULE_51]
with (
patch.object(recorder, "db_schema", old_db_schema),
patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION),
patch.object(
migration,
"LIVE_MIGRATION_MIN_SCHEMA_VERSION",
get_patched_live_version(old_db_schema),
),
patch.object(migration, "non_live_data_migration_needed", return_value=False),
patch.object(core, "StatesMeta", old_db_schema.StatesMeta),
patch.object(core, "EventTypes", old_db_schema.EventTypes),
patch.object(core, "EventData", old_db_schema.EventData),
patch.object(core, "States", old_db_schema.States),
patch.object(core, "Events", old_db_schema.Events),
patch.object(core, "StateAttributes", old_db_schema.StateAttributes),
patch(CREATE_ENGINE_TARGET, new=_create_engine_test),
):
yield
@pytest.mark.parametrize(
("persistent_database", "expected_unit_class"),
[
(
True,
{
# MariaDB/MySQL should correct unit class of sensor.test4 + sensor.test5
"mysql": {
"sensor.test1": "energy",
"sensor.test2": "power",
"sensor.test3": "unitless",
"sensor.test4": None,
"sensor.test5": None,
},
# PostgreSQL is not modified by the migration
"postgresql": {
"sensor.test1": "energy",
"sensor.test2": "power",
"sensor.test3": "unitless",
"sensor.test4": "volume_flow_rate",
"sensor.test5": "area",
},
# SQLite is not modified by the migration
"sqlite": {
"sensor.test1": "energy",
"sensor.test2": "power",
"sensor.test3": "unitless",
"sensor.test4": "volume_flow_rate",
"sensor.test5": "area",
},
},
),
],
)
@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage
async def test_migrate_statistics_meta(
async_test_recorder: RecorderInstanceContextManager,
caplog: pytest.LogCaptureFixture,
expected_unit_class: dict[str, dict[str, str | None]],
) -> None:
"""Test we can fix bad migration to version 51."""
importlib.import_module(SCHEMA_MODULE_51)
old_db_schema = sys.modules[SCHEMA_MODULE_51]
def _insert_metadata():
with session_scope(hass=hass) as session:
session.add_all(
(
old_db_schema.StatisticsMeta(
statistic_id="sensor.test1",
source="recorder",
unit_of_measurement="kWh",
has_mean=None,
has_sum=True,
name="Test 1",
mean_type=StatisticMeanType.NONE,
unit_class="energy",
),
# Unexpected, but will not be changed by migration
old_db_schema.StatisticsMeta(
statistic_id="sensor.test2",
source="recorder",
unit_of_measurement="cats",
has_mean=None,
has_sum=True,
name="Test 2",
mean_type=StatisticMeanType.NONE,
unit_class="power",
),
# This will be updated to "unitless" when migration runs again
old_db_schema.StatisticsMeta(
statistic_id="sensor.test3",
source="recorder",
unit_of_measurement="ppm",
has_mean=None,
has_sum=True,
name="Test 3",
mean_type=StatisticMeanType.NONE,
unit_class=None,
),
# Wrong case
old_db_schema.StatisticsMeta(
statistic_id="sensor.test4",
source="recorder",
unit_of_measurement="l/min",
has_mean=None,
has_sum=True,
name="Test 4",
mean_type=StatisticMeanType.NONE,
unit_class="volume_flow_rate",
),
# Wrong encoding
old_db_schema.StatisticsMeta(
statistic_id="sensor.test5",
source="recorder",
unit_of_measurement="",
has_mean=None,
has_sum=True,
name="Test 5",
mean_type=StatisticMeanType.NONE,
unit_class="area",
),
)
)
# Create database with old schema
with (
patch.object(recorder, "db_schema", old_db_schema),
patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION),
patch.object(
migration,
"LIVE_MIGRATION_MIN_SCHEMA_VERSION",
get_patched_live_version(old_db_schema),
),
patch.object(migration.EventsContextIDMigration, "migrate_data"),
patch(CREATE_ENGINE_TARGET, new=_create_engine_test),
):
async with (
async_test_home_assistant() as hass,
async_test_recorder(hass) as instance,
):
await instance.async_add_executor_job(_insert_metadata)
await async_wait_recording_done(hass)
await _async_wait_migration_done(hass)
await hass.async_stop()
await hass.async_block_till_done()
def _object_as_dict(obj):
return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs}
def _fetch_metadata():
with session_scope(hass=hass) as session:
metadatas = session.query(StatisticsMeta).all()
return {
metadata.statistic_id: _object_as_dict(metadata)
for metadata in metadatas
}
# Run again with new schema, let migration run
async with async_test_home_assistant() as hass:
with (
instrument_migration(hass) as instrumented_migration,
):
# Stall migration when the last non-live schema migration is done
instrumented_migration.stall_on_schema_version = UNIT_CLASS_SCHEMA_VERSION
async with async_test_recorder(
hass, wait_recorder=False, wait_recorder_setup=False
) as instance:
engine_name = instance.engine.dialect.name
# Wait for migration to reach migration of unit class
await hass.async_add_executor_job(
instrumented_migration.apply_update_stalled.wait
)
# Check that it's possible to read metadata via the API, this will
# stop working when version 50 is migrated off line
pre_migration_metadata_api = await instance.async_add_executor_job(
statistics.list_statistic_ids,
hass,
None,
None,
)
instrumented_migration.migration_stall.set()
instance.recorder_and_worker_thread_ids.add(threading.get_ident())
await hass.async_block_till_done()
await async_wait_recording_done(hass)
await async_wait_recording_done(hass)
post_migration_metadata_db = await instance.async_add_executor_job(
_fetch_metadata
)
post_migration_metadata_api = await instance.async_add_executor_job(
statistics.list_statistic_ids,
hass,
None,
None,
)
await hass.async_stop()
await hass.async_block_till_done()
assert pre_migration_metadata_api == unordered(
[
{
"display_unit_of_measurement": "kWh",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 1",
"source": "recorder",
"statistic_id": "sensor.test1",
"statistics_unit_of_measurement": "kWh",
"unit_class": "energy",
},
{
"display_unit_of_measurement": "cats",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 2",
"source": "recorder",
"statistic_id": "sensor.test2",
"statistics_unit_of_measurement": "cats",
"unit_class": None,
},
{
"display_unit_of_measurement": "ppm",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 3",
"source": "recorder",
"statistic_id": "sensor.test3",
"statistics_unit_of_measurement": "ppm",
"unit_class": "unitless",
},
{
"display_unit_of_measurement": "l/min",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 4",
"source": "recorder",
"statistic_id": "sensor.test4",
"statistics_unit_of_measurement": "l/min",
"unit_class": None,
},
{
"display_unit_of_measurement": "",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 5",
"source": "recorder",
"statistic_id": "sensor.test5",
"statistics_unit_of_measurement": "",
"unit_class": None,
},
]
)
assert post_migration_metadata_db == {
"sensor.test1": {
"has_mean": None,
"has_sum": True,
"id": 1,
"mean_type": 0,
"name": "Test 1",
"source": "recorder",
"statistic_id": "sensor.test1",
"unit_class": expected_unit_class[engine_name]["sensor.test1"],
"unit_of_measurement": "kWh",
},
"sensor.test2": {
"has_mean": None,
"has_sum": True,
"id": 2,
"mean_type": 0,
"name": "Test 2",
"source": "recorder",
"statistic_id": "sensor.test2",
"unit_class": expected_unit_class[engine_name]["sensor.test2"],
"unit_of_measurement": "cats",
},
"sensor.test3": {
"has_mean": None,
"has_sum": True,
"id": 3,
"mean_type": 0,
"name": "Test 3",
"source": "recorder",
"statistic_id": "sensor.test3",
"unit_class": expected_unit_class[engine_name]["sensor.test3"],
"unit_of_measurement": "ppm",
},
"sensor.test4": {
"has_mean": None,
"has_sum": True,
"id": 4,
"mean_type": 0,
"name": "Test 4",
"source": "recorder",
"statistic_id": "sensor.test4",
"unit_class": expected_unit_class[engine_name]["sensor.test4"],
"unit_of_measurement": "l/min",
},
"sensor.test5": {
"has_mean": None,
"has_sum": True,
"id": 5,
"mean_type": 0,
"name": "Test 5",
"source": "recorder",
"statistic_id": "sensor.test5",
"unit_class": expected_unit_class[engine_name]["sensor.test5"],
"unit_of_measurement": "",
},
}
assert post_migration_metadata_api == unordered(
[
{
"display_unit_of_measurement": "kWh",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 1",
"source": "recorder",
"statistic_id": "sensor.test1",
"statistics_unit_of_measurement": "kWh",
"unit_class": expected_unit_class[engine_name]["sensor.test1"],
},
{
"display_unit_of_measurement": "cats",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 2",
"source": "recorder",
"statistic_id": "sensor.test2",
"statistics_unit_of_measurement": "cats",
"unit_class": expected_unit_class[engine_name]["sensor.test2"],
},
{
"display_unit_of_measurement": "ppm",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 3",
"source": "recorder",
"statistic_id": "sensor.test3",
"statistics_unit_of_measurement": "ppm",
"unit_class": expected_unit_class[engine_name]["sensor.test3"],
},
{
"display_unit_of_measurement": "l/min",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 4",
"source": "recorder",
"statistic_id": "sensor.test4",
"statistics_unit_of_measurement": "l/min",
"unit_class": expected_unit_class[engine_name]["sensor.test4"],
},
{
"display_unit_of_measurement": "",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 5",
"source": "recorder",
"statistic_id": "sensor.test5",
"statistics_unit_of_measurement": "",
"unit_class": expected_unit_class[engine_name]["sensor.test5"],
},
]
)

View File

@@ -5,7 +5,6 @@ from __future__ import annotations
from datetime import timedelta
from unittest.mock import MagicMock
from freezegun import freeze_time
from freezegun.api import FrozenDateTimeFactory
import pytest
from syrupy.assertion import SnapshotAssertion
@@ -27,7 +26,7 @@ from homeassistant.util import dt as dt_util
from tests.common import async_fire_time_changed, snapshot_platform
@freeze_time("2022-03-12T15:24:26+00:00")
@pytest.mark.freeze_time("2022-03-12T15:24:26+00:00")
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
@pytest.mark.parametrize(
"load_platforms",

View File

@@ -9,7 +9,6 @@ import math
from typing import Any
from unittest.mock import patch
from freezegun.api import freeze_time
import pytest
from homeassistant.components import sensor
@@ -477,7 +476,7 @@ async def test_restore_sensor_save_state(
assert type(extra_data["native_value"]) is native_value_type
@freeze_time("2020-02-08 15:00:00")
@pytest.mark.freeze_time("2020-02-08 15:00:00")
async def test_restore_sensor_save_state_frozen_time_datetime(
hass: HomeAssistant,
hass_storage: dict[str, Any],
@@ -505,7 +504,7 @@ async def test_restore_sensor_save_state_frozen_time_datetime(
assert type(extra_data["native_value"]) is dict
@freeze_time("2020-02-08 15:00:00")
@pytest.mark.freeze_time("2020-02-08 15:00:00")
async def test_restore_sensor_save_state_frozen_time_date(
hass: HomeAssistant,
hass_storage: dict[str, Any],

View File

@@ -181,7 +181,7 @@
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': 'left_slot_intensity',
'unique_id': '123456789ABC-cury:0-left_slot_intensity',
'unit_of_measurement': '%',
})
@@ -239,7 +239,7 @@
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': 'right_slot_intensity',
'unique_id': '123456789ABC-cury:0-right_slot_intensity',
'unit_of_measurement': '%',
})

View File

@@ -27,7 +27,7 @@
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_device_class': <NumberDeviceClass.TEMPERATURE: 'temperature'>,
'original_icon': None,
'original_name': 'External temperature',
'platform': 'shelly',
@@ -42,6 +42,7 @@
# name: test_blu_trv_number_entity[number.trv_name_external_temperature-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'temperature',
'friendly_name': 'TRV-Name External temperature',
'max': 50,
'min': -50,
@@ -150,7 +151,7 @@
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': 'left_slot_intensity',
'unique_id': '123456789ABC-cury:0-left_slot_intensity',
'unit_of_measurement': '%',
})
@@ -208,7 +209,7 @@
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': 'right_slot_intensity',
'unique_id': '123456789ABC-cury:0-right_slot_intensity',
'unit_of_measurement': '%',
})

View File

@@ -4,7 +4,6 @@ from collections.abc import Generator
from http import HTTPStatus
from unittest.mock import patch
from freezegun.api import freeze_time
import pytest
from syrupy.assertion import SnapshotAssertion
@@ -28,7 +27,7 @@ def event_only() -> Generator[None]:
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
@freeze_time("2025-01-01T03:30:00.000Z")
@pytest.mark.freeze_time("2025-01-01T03:30:00.000Z")
async def test_setup(
hass: HomeAssistant,
config_entry: MockConfigEntry,
@@ -129,7 +128,7 @@ async def test_setup(
],
)
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
@freeze_time("2025-01-01T03:30:00.000+00:00")
@pytest.mark.freeze_time("2025-01-01T03:30:00.000+00:00")
async def test_webhook_event(
hass: HomeAssistant,
config_entry: MockConfigEntry,

View File

@@ -3,7 +3,6 @@
from datetime import datetime, timedelta
from unittest.mock import MagicMock
from freezegun import freeze_time
from freezegun.api import FrozenDateTimeFactory
from pysmhi import (
SMHIFirePointForecast,
@@ -66,7 +65,7 @@ async def test_setup_hass(
"to_load",
[1],
)
@freeze_time(datetime(2023, 8, 7, 1, tzinfo=dt_util.UTC))
@pytest.mark.freeze_time(datetime(2023, 8, 7, 1, tzinfo=dt_util.UTC))
async def test_clear_night(
hass: HomeAssistant,
mock_client: SMHIPointForecast,

View File

@@ -2,7 +2,7 @@
from unittest.mock import AsyncMock
from freezegun import freeze_time
import pytest
from homeassistant.const import STATE_UNAVAILABLE
from homeassistant.core import HomeAssistant
@@ -11,7 +11,7 @@ from . import async_init_integration, find_update_callback
from .const import MOCK_SNOO_DATA
@freeze_time("2025-01-01 12:00:00")
@pytest.mark.freeze_time("2025-01-01 12:00:00")
async def test_events(hass: HomeAssistant, bypass_api: AsyncMock) -> None:
"""Test events and check test values are correctly set."""
await async_init_integration(hass)
@@ -26,7 +26,7 @@ async def test_events(hass: HomeAssistant, bypass_api: AsyncMock) -> None:
)
@freeze_time("2025-01-01 12:00:00")
@pytest.mark.freeze_time("2025-01-01 12:00:00")
async def test_events_data_on_startup(
hass: HomeAssistant, bypass_api: AsyncMock
) -> None:

Some files were not shown because too many files have changed in this diff Show More