Compare commits

..

1 Commits

Author SHA1 Message Date
copilot-swe-agent[bot]
c68b0dcb7d Initial plan 2025-11-10 11:21:26 +00:00
117 changed files with 5810 additions and 7727 deletions

View File

@@ -25,7 +25,7 @@ from .const import (
RECOMMENDED_CHAT_MODEL,
)
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION)
PLATFORMS = (Platform.CONVERSATION,)
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]

View File

@@ -1,80 +0,0 @@
"""AI Task integration for Anthropic."""
from __future__ import annotations
from json import JSONDecodeError
import logging
from homeassistant.components import ai_task, conversation
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.util.json import json_loads
from .entity import AnthropicBaseLLMEntity
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up AI Task entities."""
for subentry in config_entry.subentries.values():
if subentry.subentry_type != "ai_task_data":
continue
async_add_entities(
[AnthropicTaskEntity(config_entry, subentry)],
config_subentry_id=subentry.subentry_id,
)
class AnthropicTaskEntity(
ai_task.AITaskEntity,
AnthropicBaseLLMEntity,
):
"""Anthropic AI Task entity."""
_attr_supported_features = (
ai_task.AITaskEntityFeature.GENERATE_DATA
| ai_task.AITaskEntityFeature.SUPPORT_ATTACHMENTS
)
async def _async_generate_data(
self,
task: ai_task.GenDataTask,
chat_log: conversation.ChatLog,
) -> ai_task.GenDataTaskResult:
"""Handle a generate data task."""
await self._async_handle_chat_log(chat_log, task.name, task.structure)
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
raise HomeAssistantError(
"Last content in chat log is not an AssistantContent"
)
text = chat_log.content[-1].content or ""
if not task.structure:
return ai_task.GenDataTaskResult(
conversation_id=chat_log.conversation_id,
data=text,
)
try:
data = json_loads(text)
except JSONDecodeError as err:
_LOGGER.error(
"Failed to parse JSON response: %s. Response: %s",
err,
text,
)
raise HomeAssistantError("Error with Claude structured response") from err
return ai_task.GenDataTaskResult(
conversation_id=chat_log.conversation_id,
data=data,
)

View File

@@ -53,7 +53,6 @@ from .const import (
CONF_WEB_SEARCH_REGION,
CONF_WEB_SEARCH_TIMEZONE,
CONF_WEB_SEARCH_USER_LOCATION,
DEFAULT_AI_TASK_NAME,
DEFAULT_CONVERSATION_NAME,
DOMAIN,
NON_THINKING_MODELS,
@@ -75,16 +74,12 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
}
)
RECOMMENDED_CONVERSATION_OPTIONS = {
RECOMMENDED_OPTIONS = {
CONF_RECOMMENDED: True,
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
}
RECOMMENDED_AI_TASK_OPTIONS = {
CONF_RECOMMENDED: True,
}
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
"""Validate the user input allows us to connect.
@@ -107,7 +102,7 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
errors = {}
if user_input is not None:
self._async_abort_entries_match(user_input)
@@ -135,16 +130,10 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
subentries=[
{
"subentry_type": "conversation",
"data": RECOMMENDED_CONVERSATION_OPTIONS,
"data": RECOMMENDED_OPTIONS,
"title": DEFAULT_CONVERSATION_NAME,
"unique_id": None,
},
{
"subentry_type": "ai_task_data",
"data": RECOMMENDED_AI_TASK_OPTIONS,
"title": DEFAULT_AI_TASK_NAME,
"unique_id": None,
},
}
],
)
@@ -158,10 +147,7 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
cls, config_entry: ConfigEntry
) -> dict[str, type[ConfigSubentryFlow]]:
"""Return subentries supported by this integration."""
return {
"conversation": ConversationSubentryFlowHandler,
"ai_task_data": ConversationSubentryFlowHandler,
}
return {"conversation": ConversationSubentryFlowHandler}
class ConversationSubentryFlowHandler(ConfigSubentryFlow):
@@ -178,10 +164,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""Add a subentry."""
if self._subentry_type == "ai_task_data":
self.options = RECOMMENDED_AI_TASK_OPTIONS.copy()
else:
self.options = RECOMMENDED_CONVERSATION_OPTIONS.copy()
self.options = RECOMMENDED_OPTIONS.copy()
return await self.async_step_init()
async def async_step_reconfigure(
@@ -215,29 +198,23 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
errors: dict[str, str] = {}
if self._is_new:
if self._subentry_type == "ai_task_data":
default_name = DEFAULT_AI_TASK_NAME
else:
default_name = DEFAULT_CONVERSATION_NAME
step_schema[vol.Required(CONF_NAME, default=default_name)] = str
if self._subentry_type == "conversation":
step_schema.update(
{
vol.Optional(CONF_PROMPT): TemplateSelector(),
vol.Optional(
CONF_LLM_HASS_API,
): SelectSelector(
SelectSelectorConfig(options=hass_apis, multiple=True)
),
}
step_schema[vol.Required(CONF_NAME, default=DEFAULT_CONVERSATION_NAME)] = (
str
)
step_schema[
vol.Required(
CONF_RECOMMENDED, default=self.options.get(CONF_RECOMMENDED, False)
)
] = bool
step_schema.update(
{
vol.Optional(CONF_PROMPT): TemplateSelector(),
vol.Optional(
CONF_LLM_HASS_API,
): SelectSelector(
SelectSelectorConfig(options=hass_apis, multiple=True)
),
vol.Required(
CONF_RECOMMENDED, default=self.options.get(CONF_RECOMMENDED, False)
): bool,
}
)
if user_input is not None:
if not user_input.get(CONF_LLM_HASS_API):
@@ -321,14 +298,10 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
if not model.startswith(tuple(NON_THINKING_MODELS)):
step_schema[
vol.Optional(CONF_THINKING_BUDGET, default=RECOMMENDED_THINKING_BUDGET)
] = vol.All(
NumberSelector(
NumberSelectorConfig(
min=0,
max=self.options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
)
),
vol.Coerce(int),
] = NumberSelector(
NumberSelectorConfig(
min=0, max=self.options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS)
)
)
else:
self.options.pop(CONF_THINKING_BUDGET, None)

View File

@@ -6,7 +6,6 @@ DOMAIN = "anthropic"
LOGGER = logging.getLogger(__package__)
DEFAULT_CONVERSATION_NAME = "Claude conversation"
DEFAULT_AI_TASK_NAME = "Claude AI Task"
CONF_RECOMMENDED = "recommended"
CONF_PROMPT = "prompt"

View File

@@ -1,24 +1,17 @@
"""Base entity for Anthropic."""
import base64
from collections.abc import AsyncGenerator, Callable, Iterable
from dataclasses import dataclass, field
import json
from mimetypes import guess_file_type
from pathlib import Path
from typing import Any
import anthropic
from anthropic import AsyncStream
from anthropic.types import (
Base64ImageSourceParam,
Base64PDFSourceParam,
CitationsDelta,
CitationsWebSearchResultLocation,
CitationWebSearchResultLocationParam,
ContentBlockParam,
DocumentBlockParam,
ImageBlockParam,
InputJSONDelta,
MessageDeltaUsage,
MessageParam,
@@ -44,9 +37,6 @@ from anthropic.types import (
ThinkingConfigDisabledParam,
ThinkingConfigEnabledParam,
ThinkingDelta,
ToolChoiceAnyParam,
ToolChoiceAutoParam,
ToolChoiceToolParam,
ToolParam,
ToolResultBlockParam,
ToolUnionParam,
@@ -60,16 +50,13 @@ from anthropic.types import (
WebSearchToolResultError,
)
from anthropic.types.message_create_params import MessageCreateParamsStreaming
import voluptuous as vol
from voluptuous_openapi import convert
from homeassistant.components import conversation
from homeassistant.config_entries import ConfigSubentry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import device_registry as dr, llm
from homeassistant.helpers.entity import Entity
from homeassistant.util import slugify
from . import AnthropicConfigEntry
from .const import (
@@ -334,7 +321,6 @@ def _convert_content(
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
chat_log: conversation.ChatLog,
stream: AsyncStream[MessageStreamEvent],
output_tool: str | None = None,
) -> AsyncGenerator[
conversation.AssistantContentDeltaDict | conversation.ToolResultContentDeltaDict
]:
@@ -395,16 +381,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
input="",
)
current_tool_args = ""
if response.content_block.name == output_tool:
if first_block or content_details.has_content():
if content_details.has_citations():
content_details.delete_empty()
yield {"native": content_details}
content_details = ContentDetails()
content_details.add_citation_detail()
yield {"role": "assistant"}
has_native = False
first_block = False
elif isinstance(response.content_block, TextBlock):
if ( # Do not start a new assistant content just for citations, concatenate consecutive blocks with citations instead.
first_block
@@ -495,16 +471,7 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
first_block = True
elif isinstance(response, RawContentBlockDeltaEvent):
if isinstance(response.delta, InputJSONDelta):
if (
current_tool_block is not None
and current_tool_block["name"] == output_tool
):
content_details.citation_details[-1].length += len(
response.delta.partial_json
)
yield {"content": response.delta.partial_json}
else:
current_tool_args += response.delta.partial_json
current_tool_args += response.delta.partial_json
elif isinstance(response.delta, TextDelta):
content_details.citation_details[-1].length += len(response.delta.text)
yield {"content": response.delta.text}
@@ -523,9 +490,6 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
content_details.add_citation(response.delta.citation)
elif isinstance(response, RawContentBlockStopEvent):
if current_tool_block is not None:
if current_tool_block["name"] == output_tool:
current_tool_block = None
continue
tool_args = json.loads(current_tool_args) if current_tool_args else {}
current_tool_block["input"] = tool_args
yield {
@@ -593,8 +557,6 @@ class AnthropicBaseLLMEntity(Entity):
async def _async_handle_chat_log(
self,
chat_log: conversation.ChatLog,
structure_name: str | None = None,
structure: vol.Schema | None = None,
) -> None:
"""Generate an answer for the chat log."""
options = self.subentry.data
@@ -651,74 +613,6 @@ class AnthropicBaseLLMEntity(Entity):
}
tools.append(web_search)
# Handle attachments by adding them to the last user message
last_content = chat_log.content[-1]
if last_content.role == "user" and last_content.attachments:
last_message = messages[-1]
if last_message["role"] != "user":
raise HomeAssistantError(
"Last message must be a user message to add attachments"
)
if isinstance(last_message["content"], str):
last_message["content"] = [
TextBlockParam(type="text", text=last_message["content"])
]
last_message["content"].extend( # type: ignore[union-attr]
await async_prepare_files_for_prompt(
self.hass, [(a.path, a.mime_type) for a in last_content.attachments]
)
)
if structure and structure_name:
structure_name = slugify(structure_name)
if model_args["thinking"]["type"] == "disabled":
if not tools:
# Simplest case: no tools and no extended thinking
# Add a tool and force its use
model_args["tool_choice"] = ToolChoiceToolParam(
type="tool",
name=structure_name,
)
else:
# Second case: tools present but no extended thinking
# Allow the model to use any tool but not text response
# The model should know to use the right tool by its description
model_args["tool_choice"] = ToolChoiceAnyParam(
type="any",
)
else:
# Extended thinking is enabled. With extended thinking, we cannot
# force tool use or disable text responses, so we add a hint to the
# system prompt instead. With extended thinking, the model should be
# smart enough to use the tool.
model_args["tool_choice"] = ToolChoiceAutoParam(
type="auto",
)
if isinstance(model_args["system"], str):
model_args["system"] = [
TextBlockParam(type="text", text=model_args["system"])
]
model_args["system"].append( # type: ignore[union-attr]
TextBlockParam(
type="text",
text=f"Claude MUST use the '{structure_name}' tool to provide the final answer instead of plain text.",
)
)
tools.append(
ToolParam(
name=structure_name,
description="Use this tool to reply to the user",
input_schema=convert(
structure,
custom_serializer=chat_log.llm_api.custom_serializer
if chat_log.llm_api
else llm.selector_serializer,
),
)
)
if tools:
model_args["tools"] = tools
@@ -735,11 +629,7 @@ class AnthropicBaseLLMEntity(Entity):
content
async for content in chat_log.async_add_delta_content_stream(
self.entity_id,
_transform_stream(
chat_log,
stream,
output_tool=structure_name if structure else None,
),
_transform_stream(chat_log, stream),
)
]
)
@@ -751,59 +641,3 @@ class AnthropicBaseLLMEntity(Entity):
if not chat_log.unresponded_tool_results:
break
async def async_prepare_files_for_prompt(
hass: HomeAssistant, files: list[tuple[Path, str | None]]
) -> Iterable[ImageBlockParam | DocumentBlockParam]:
"""Append files to a prompt.
Caller needs to ensure that the files are allowed.
"""
def append_files_to_content() -> Iterable[ImageBlockParam | DocumentBlockParam]:
content: list[ImageBlockParam | DocumentBlockParam] = []
for file_path, mime_type in files:
if not file_path.exists():
raise HomeAssistantError(f"`{file_path}` does not exist")
if mime_type is None:
mime_type = guess_file_type(file_path)[0]
if not mime_type or not mime_type.startswith(("image/", "application/pdf")):
raise HomeAssistantError(
"Only images and PDF are supported by the Anthropic API,"
f"`{file_path}` is not an image file or PDF"
)
if mime_type == "image/jpg":
mime_type = "image/jpeg"
base64_file = base64.b64encode(file_path.read_bytes()).decode("utf-8")
if mime_type.startswith("image/"):
content.append(
ImageBlockParam(
type="image",
source=Base64ImageSourceParam(
type="base64",
media_type=mime_type, # type: ignore[typeddict-item]
data=base64_file,
),
)
)
elif mime_type.startswith("application/pdf"):
content.append(
DocumentBlockParam(
type="document",
source=Base64PDFSourceParam(
type="base64",
media_type=mime_type, # type: ignore[typeddict-item]
data=base64_file,
),
)
)
return content
return await hass.async_add_executor_job(append_files_to_content)

View File

@@ -18,49 +18,6 @@
}
},
"config_subentries": {
"ai_task_data": {
"abort": {
"entry_not_loaded": "[%key:component::anthropic::config_subentries::conversation::abort::entry_not_loaded%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
},
"entry_type": "AI task",
"initiate_flow": {
"reconfigure": "Reconfigure AI task",
"user": "Add AI task"
},
"step": {
"advanced": {
"data": {
"chat_model": "[%key:common::generic::model%]",
"max_tokens": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::max_tokens%]",
"temperature": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::temperature%]"
},
"title": "[%key:component::anthropic::config_subentries::conversation::step::advanced::title%]"
},
"init": {
"data": {
"name": "[%key:common::config_flow::data::name%]",
"recommended": "[%key:component::anthropic::config_subentries::conversation::step::init::data::recommended%]"
},
"title": "[%key:component::anthropic::config_subentries::conversation::step::init::title%]"
},
"model": {
"data": {
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data::thinking_budget%]",
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data::user_location%]",
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search%]",
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search_max_uses%]"
},
"data_description": {
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::thinking_budget%]",
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::user_location%]",
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search%]",
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search_max_uses%]"
},
"title": "[%key:component::anthropic::config_subentries::conversation::step::model::title%]"
}
}
},
"conversation": {
"abort": {
"entry_not_loaded": "Cannot add things while the configuration is disabled.",
@@ -89,8 +46,7 @@
},
"data_description": {
"prompt": "Instruct how the LLM should respond. This can be a template."
},
"title": "Basic settings"
}
},
"model": {
"data": {

View File

@@ -37,6 +37,13 @@ USER_SCHEMA = vol.Schema(
}
)
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.string})
STEP_RECONFIGURE = vol.Schema(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
}
)
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, str]:
@@ -168,55 +175,36 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
) -> ConfigFlowResult:
"""Handle reconfiguration of the device."""
reconfigure_entry = self._get_reconfigure_entry()
if not user_input:
return self.async_show_form(
step_id="reconfigure", data_schema=STEP_RECONFIGURE
)
updated_host = user_input[CONF_HOST]
self._async_abort_entries_match({CONF_HOST: updated_host})
errors: dict[str, str] = {}
if user_input is not None:
updated_host = user_input[CONF_HOST]
self._async_abort_entries_match({CONF_HOST: updated_host})
try:
data_to_validate = {
CONF_HOST: updated_host,
CONF_PORT: user_input[CONF_PORT],
CONF_PIN: user_input[CONF_PIN],
CONF_TYPE: reconfigure_entry.data.get(CONF_TYPE, BRIDGE),
}
await validate_input(self.hass, data_to_validate)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except InvalidPin:
errors["base"] = "invalid_pin"
except Exception: # noqa: BLE001
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
data_updates = {
CONF_HOST: updated_host,
CONF_PORT: user_input[CONF_PORT],
CONF_PIN: user_input[CONF_PIN],
}
return self.async_update_reload_and_abort(
reconfigure_entry, data_updates=data_updates
)
schema = vol.Schema(
{
vol.Required(
CONF_HOST, default=reconfigure_entry.data[CONF_HOST]
): cv.string,
vol.Required(
CONF_PORT, default=reconfigure_entry.data[CONF_PORT]
): cv.port,
vol.Optional(CONF_PIN): cv.string,
}
)
try:
await validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except InvalidPin:
errors["base"] = "invalid_pin"
except Exception: # noqa: BLE001
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_update_reload_and_abort(
reconfigure_entry, data_updates={CONF_HOST: updated_host}
)
return self.async_show_form(
step_id="reconfigure",
data_schema=schema,
data_schema=STEP_RECONFIGURE,
errors=errors,
)

View File

@@ -116,28 +116,20 @@ class FoscamCoordinator(DataUpdateCoordinator[FoscamDeviceInfo]):
is_open_wdr = None
is_open_hdr = None
reserve3 = product_info.get("reserve4")
model = product_info.get("model")
model_int = int(model) if model is not None else 7002
if model_int > 7001:
reserve3_int = int(reserve3) if reserve3 is not None else 0
supports_wdr_adjustment_val = bool(int(reserve3_int & 256))
supports_hdr_adjustment_val = bool(int(reserve3_int & 128))
if supports_wdr_adjustment_val:
ret_wdr, is_open_wdr_data = self.session.getWdrMode()
mode = (
is_open_wdr_data["mode"] if ret_wdr == 0 and is_open_wdr_data else 0
)
is_open_wdr = bool(int(mode))
elif supports_hdr_adjustment_val:
ret_hdr, is_open_hdr_data = self.session.getHdrMode()
mode = (
is_open_hdr_data["mode"] if ret_hdr == 0 and is_open_hdr_data else 0
)
is_open_hdr = bool(int(mode))
else:
supports_wdr_adjustment_val = False
supports_hdr_adjustment_val = False
reserve3_int = int(reserve3) if reserve3 is not None else 0
supports_wdr_adjustment_val = bool(int(reserve3_int & 256))
supports_hdr_adjustment_val = bool(int(reserve3_int & 128))
if supports_wdr_adjustment_val:
ret_wdr, is_open_wdr_data = self.session.getWdrMode()
mode = is_open_wdr_data["mode"] if ret_wdr == 0 and is_open_wdr_data else 0
is_open_wdr = bool(int(mode))
elif supports_hdr_adjustment_val:
ret_hdr, is_open_hdr_data = self.session.getHdrMode()
mode = is_open_hdr_data["mode"] if ret_hdr == 0 and is_open_hdr_data else 0
is_open_hdr = bool(int(mode))
ret_sw, software_capabilities = self.session.getSWCapabilities()
supports_speak_volume_adjustment_val = (
bool(int(software_capabilities.get("swCapabilities1")) & 32)
if ret_sw == 0

View File

@@ -0,0 +1,77 @@
"""Support for the Hive alarm."""
from __future__ import annotations
from datetime import timedelta
from homeassistant.components.alarm_control_panel import (
AlarmControlPanelEntity,
AlarmControlPanelEntityFeature,
AlarmControlPanelState,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import HiveConfigEntry
from .entity import HiveEntity
PARALLEL_UPDATES = 0
SCAN_INTERVAL = timedelta(seconds=15)
HIVETOHA = {
"home": AlarmControlPanelState.DISARMED,
"asleep": AlarmControlPanelState.ARMED_NIGHT,
"away": AlarmControlPanelState.ARMED_AWAY,
"sos": AlarmControlPanelState.TRIGGERED,
}
async def async_setup_entry(
hass: HomeAssistant,
entry: HiveConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Hive thermostat based on a config entry."""
hive = entry.runtime_data
if devices := hive.session.deviceList.get("alarm_control_panel"):
async_add_entities(
[HiveAlarmControlPanelEntity(hive, dev) for dev in devices], True
)
class HiveAlarmControlPanelEntity(HiveEntity, AlarmControlPanelEntity):
"""Representation of a Hive alarm."""
_attr_supported_features = (
AlarmControlPanelEntityFeature.ARM_NIGHT
| AlarmControlPanelEntityFeature.ARM_AWAY
| AlarmControlPanelEntityFeature.TRIGGER
)
_attr_code_arm_required = False
async def async_alarm_disarm(self, code: str | None = None) -> None:
"""Send disarm command."""
await self.hive.alarm.setMode(self.device, "home")
async def async_alarm_arm_night(self, code: str | None = None) -> None:
"""Send arm night command."""
await self.hive.alarm.setMode(self.device, "asleep")
async def async_alarm_arm_away(self, code: str | None = None) -> None:
"""Send arm away command."""
await self.hive.alarm.setMode(self.device, "away")
async def async_alarm_trigger(self, code: str | None = None) -> None:
"""Send alarm trigger command."""
await self.hive.alarm.setMode(self.device, "sos")
async def async_update(self) -> None:
"""Update all Node data from Hive."""
await self.hive.session.updateData(self.device)
self.device = await self.hive.alarm.getAlarm(self.device)
self._attr_available = self.device["deviceData"].get("online")
if self._attr_available:
if self.device["status"]["state"]:
self._attr_alarm_state = AlarmControlPanelState.TRIGGERED
else:
self._attr_alarm_state = HIVETOHA[self.device["status"]["mode"]]

View File

@@ -11,6 +11,7 @@ CONFIG_ENTRY_VERSION = 1
DEFAULT_NAME = "Hive"
DOMAIN = "hive"
PLATFORMS = [
Platform.ALARM_CONTROL_PANEL,
Platform.BINARY_SENSOR,
Platform.CLIMATE,
Platform.LIGHT,
@@ -19,6 +20,7 @@ PLATFORMS = [
Platform.WATER_HEATER,
]
PLATFORM_LOOKUP = {
Platform.ALARM_CONTROL_PANEL: "alarm_control_panel",
Platform.BINARY_SENSOR: "binary_sensor",
Platform.CLIMATE: "climate",
Platform.LIGHT: "light",

View File

@@ -4,7 +4,6 @@ import logging
from typing import TYPE_CHECKING
from aiopvapi.resources.model import PowerviewData
from aiopvapi.resources.shade_data import PowerviewShadeData
from aiopvapi.rooms import Rooms
from aiopvapi.scenes import Scenes
from aiopvapi.shades import Shades
@@ -17,6 +16,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er
from .const import DOMAIN, HUB_EXCEPTIONS, MANUFACTURER
from .coordinator import PowerviewShadeUpdateCoordinator
from .model import PowerviewConfigEntry, PowerviewEntryData
from .shade_data import PowerviewShadeData
from .util import async_connect_hub
PARALLEL_UPDATES = 1

View File

@@ -8,7 +8,6 @@ import logging
from aiopvapi.helpers.aiorequest import PvApiMaintenance
from aiopvapi.hub import Hub
from aiopvapi.resources.shade_data import PowerviewShadeData
from aiopvapi.shades import Shades
from homeassistant.config_entries import ConfigEntry
@@ -16,6 +15,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import HUB_EXCEPTIONS
from .shade_data import PowerviewShadeData
_LOGGER = logging.getLogger(__name__)

View File

@@ -208,13 +208,13 @@ class PowerViewShadeBase(ShadeEntity, CoverEntity):
async def _async_execute_move(self, move: ShadePosition) -> None:
"""Execute a move that can affect multiple positions."""
_LOGGER.debug("Move request %s: %s", self.name, move)
# Store the requested positions so subsequent move
# requests contain the secondary shade positions
self.data.update_shade_position(self._shade.id, move)
async with self.coordinator.radio_operation_lock:
response = await self._shade.move(move)
_LOGGER.debug("Move response %s: %s", self.name, response)
# Process the response from the hub (including new positions)
self.data.update_shade_position(self._shade.id, response)
async def _async_set_cover_position(self, target_hass_position: int) -> None:
"""Move the shade to a position."""
target_hass_position = self._clamp_cover_limit(target_hass_position)

View File

@@ -3,7 +3,6 @@
import logging
from aiopvapi.resources.shade import BaseShade, ShadePosition
from aiopvapi.resources.shade_data import PowerviewShadeData
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.device_registry import DeviceInfo
@@ -12,6 +11,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER
from .coordinator import PowerviewShadeUpdateCoordinator
from .model import PowerviewDeviceInfo
from .shade_data import PowerviewShadeData
_LOGGER = logging.getLogger(__name__)

View File

@@ -18,6 +18,6 @@
},
"iot_class": "local_polling",
"loggers": ["aiopvapi"],
"requirements": ["aiopvapi==3.3.0"],
"requirements": ["aiopvapi==3.2.1"],
"zeroconf": ["_powerview._tcp.local.", "_PowerView-G3._tcp.local."]
}

View File

@@ -0,0 +1,80 @@
"""Shade data for the Hunter Douglas PowerView integration."""
from __future__ import annotations
from dataclasses import fields
from typing import Any
from aiopvapi.resources.model import PowerviewData
from aiopvapi.resources.shade import BaseShade, ShadePosition
from .util import async_map_data_by_id
POSITION_FIELDS = [field for field in fields(ShadePosition) if field.name != "velocity"]
def copy_position_data(source: ShadePosition, target: ShadePosition) -> ShadePosition:
"""Copy position data from source to target for None values only."""
for field in POSITION_FIELDS:
if (value := getattr(source, field.name)) is not None:
setattr(target, field.name, value)
class PowerviewShadeData:
"""Coordinate shade data between multiple api calls."""
def __init__(self) -> None:
"""Init the shade data."""
self._raw_data_by_id: dict[int, dict[str | int, Any]] = {}
self._shade_group_data_by_id: dict[int, BaseShade] = {}
self.positions: dict[int, ShadePosition] = {}
def get_raw_data(self, shade_id: int) -> dict[str | int, Any]:
"""Get data for the shade."""
return self._raw_data_by_id[shade_id]
def get_all_raw_data(self) -> dict[int, dict[str | int, Any]]:
"""Get data for all shades."""
return self._raw_data_by_id
def get_shade(self, shade_id: int) -> BaseShade:
"""Get specific shade from the coordinator."""
return self._shade_group_data_by_id[shade_id]
def get_shade_position(self, shade_id: int) -> ShadePosition:
"""Get positions for a shade."""
if shade_id not in self.positions:
shade_position = ShadePosition()
# If we have the group data, use it to populate the initial position
if shade := self._shade_group_data_by_id.get(shade_id):
copy_position_data(shade.current_position, shade_position)
self.positions[shade_id] = shade_position
return self.positions[shade_id]
def update_from_group_data(self, shade_id: int) -> None:
"""Process an update from the group data."""
data = self._shade_group_data_by_id[shade_id]
copy_position_data(data.current_position, self.get_shade_position(data.id))
def store_group_data(self, shade_data: PowerviewData) -> None:
"""Store data from the all shades endpoint.
This does not update the shades or positions (self.positions)
as the data may be stale. update_from_group_data
with a shade_id will update a specific shade
from the group data.
"""
self._shade_group_data_by_id = shade_data.processed
self._raw_data_by_id = async_map_data_by_id(shade_data.raw)
def update_shade_position(self, shade_id: int, new_position: ShadePosition) -> None:
"""Update a single shades position."""
copy_position_data(new_position, self.get_shade_position(shade_id))
def update_shade_velocity(self, shade_id: int, shade_data: ShadePosition) -> None:
"""Update a single shades velocity."""
# the hub will always return a velocity of 0 on initial connect,
# separate definition to store consistent value in HA
# this value is purely driven from HA
if shade_data.velocity is not None:
self.get_shade_position(shade_id).velocity = shade_data.velocity

View File

@@ -2,15 +2,25 @@
from __future__ import annotations
from collections.abc import Iterable
from typing import Any
from aiopvapi.helpers.aiorequest import AioRequest
from aiopvapi.helpers.constants import ATTR_ID
from aiopvapi.hub import Hub
from homeassistant.core import HomeAssistant
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .model import PowerviewAPI, PowerviewDeviceInfo
@callback
def async_map_data_by_id(data: Iterable[dict[str | int, Any]]):
"""Return a dict with the key being the id for a list of entries."""
return {entry[ATTR_ID]: entry for entry in data}
async def async_connect_hub(
hass: HomeAssistant, address: str, api_version: int | None = None
) -> PowerviewAPI:

View File

@@ -5,6 +5,7 @@ from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from propcache.api import cached_property
from pyituran import Vehicle
from homeassistant.components.binary_sensor import (
@@ -68,7 +69,7 @@ class IturanBinarySensor(IturanBaseEntity, BinarySensorEntity):
super().__init__(coordinator, license_plate, description.key)
self.entity_description = description
@property
@cached_property
def is_on(self) -> bool:
"""Return true if the binary sensor is on."""
return self.entity_description.value_fn(self.vehicle)

View File

@@ -2,6 +2,8 @@
from __future__ import annotations
from propcache.api import cached_property
from homeassistant.components.device_tracker import TrackerEntity
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -38,12 +40,12 @@ class IturanDeviceTracker(IturanBaseEntity, TrackerEntity):
"""Initialize the device tracker."""
super().__init__(coordinator, license_plate, "device_tracker")
@property
@cached_property
def latitude(self) -> float | None:
"""Return latitude value of the device."""
return self.vehicle.gps_coordinates[0]
@property
@cached_property
def longitude(self) -> float | None:
"""Return longitude value of the device."""
return self.vehicle.gps_coordinates[1]

View File

@@ -6,6 +6,7 @@ from collections.abc import Callable
from dataclasses import dataclass
from datetime import datetime
from propcache.api import cached_property
from pyituran import Vehicle
from homeassistant.components.sensor import (
@@ -132,7 +133,7 @@ class IturanSensor(IturanBaseEntity, SensorEntity):
super().__init__(coordinator, license_plate, description.key)
self.entity_description = description
@property
@cached_property
def native_value(self) -> StateType | datetime:
"""Return the state of the device."""
return self.entity_description.value_fn(self.vehicle)

View File

@@ -94,6 +94,28 @@
}
},
"services": {
"address_to_device_id": {
"description": "Converts an LCN address into a device ID.",
"fields": {
"host": {
"description": "Host name as given in the integration panel.",
"name": "Host name"
},
"id": {
"description": "Module or group number of the target.",
"name": "Module or group ID"
},
"segment_id": {
"description": "Segment number of the target.",
"name": "Segment ID"
},
"type": {
"description": "Module type of the target.",
"name": "Type"
}
},
"name": "Address to device ID"
},
"dyn_text": {
"description": "Sends dynamic text to LCN-GTxD displays.",
"fields": {

View File

@@ -234,22 +234,12 @@ class MatterAdapter:
self._create_device_registry(endpoint)
# run platform discovery from device type instances
for entity_info in async_discover_entities(endpoint):
# For entities that should only exist once per device (not per endpoint),
# exclude endpoint_id from the discovery key
if entity_info.discovery_schema.once_per_device:
discovery_key = (
f"{entity_info.platform}_{endpoint.node.node_id}_"
f"{entity_info.primary_attribute.cluster_id}_"
f"{entity_info.primary_attribute.attribute_id}_"
f"{entity_info.entity_description.key}"
)
else:
discovery_key = (
f"{entity_info.platform}_{endpoint.node.node_id}_{endpoint.endpoint_id}_"
f"{entity_info.primary_attribute.cluster_id}_"
f"{entity_info.primary_attribute.attribute_id}_"
f"{entity_info.entity_description.key}"
)
discovery_key = (
f"{entity_info.platform}_{endpoint.node.node_id}_{endpoint.endpoint_id}_"
f"{entity_info.primary_attribute.cluster_id}_"
f"{entity_info.primary_attribute.attribute_id}_"
f"{entity_info.entity_description.key}"
)
if discovery_key in self.discovered_entities:
continue
LOGGER.debug(

View File

@@ -353,13 +353,17 @@ DISCOVERY_SCHEMAS = [
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
# DeviceFault or SupplyFault bit enabled
device_to_ha=lambda x: bool(
x
& (
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kDeviceFault
| clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSupplyFault
)
),
device_to_ha={
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kDeviceFault: True,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSupplyFault: True,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSpeedLow: False,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kSpeedHigh: False,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kLocalOverride: False,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRunning: False,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRemotePressure: False,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRemoteFlow: False,
clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRemoteTemperature: False,
}.get,
),
entity_class=MatterBinarySensor,
required_attributes=(
@@ -373,9 +377,9 @@ DISCOVERY_SCHEMAS = [
key="PumpStatusRunning",
translation_key="pump_running",
device_class=BinarySensorDeviceClass.RUNNING,
device_to_ha=lambda x: bool(
device_to_ha=lambda x: (
x
& clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRunning
== clusters.PumpConfigurationAndControl.Bitmaps.PumpStatusBitmap.kRunning
),
),
entity_class=MatterBinarySensor,
@@ -391,8 +395,8 @@ DISCOVERY_SCHEMAS = [
translation_key="dishwasher_alarm_inflow",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
device_to_ha=lambda x: bool(
x & clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kInflowError
device_to_ha=lambda x: (
x == clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kInflowError
),
),
entity_class=MatterBinarySensor,
@@ -406,8 +410,8 @@ DISCOVERY_SCHEMAS = [
translation_key="alarm_door",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
device_to_ha=lambda x: bool(
x & clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kDoorError
device_to_ha=lambda x: (
x == clusters.DishwasherAlarm.Bitmaps.AlarmBitmap.kDoorError
),
),
entity_class=MatterBinarySensor,
@@ -421,10 +425,9 @@ DISCOVERY_SCHEMAS = [
translation_key="valve_fault_general_fault",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
# GeneralFault bit from ValveFault attribute
device_to_ha=lambda x: bool(
device_to_ha=lambda x: (
x
& clusters.ValveConfigurationAndControl.Bitmaps.ValveFaultBitmap.kGeneralFault
== clusters.ValveConfigurationAndControl.Bitmaps.ValveFaultBitmap.kGeneralFault
),
),
entity_class=MatterBinarySensor,
@@ -440,10 +443,9 @@ DISCOVERY_SCHEMAS = [
translation_key="valve_fault_blocked",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
# Blocked bit from ValveFault attribute
device_to_ha=lambda x: bool(
device_to_ha=lambda x: (
x
& clusters.ValveConfigurationAndControl.Bitmaps.ValveFaultBitmap.kBlocked
== clusters.ValveConfigurationAndControl.Bitmaps.ValveFaultBitmap.kBlocked
),
),
entity_class=MatterBinarySensor,
@@ -459,10 +461,9 @@ DISCOVERY_SCHEMAS = [
translation_key="valve_fault_leaking",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
# Leaking bit from ValveFault attribute
device_to_ha=lambda x: bool(
device_to_ha=lambda x: (
x
& clusters.ValveConfigurationAndControl.Bitmaps.ValveFaultBitmap.kLeaking
== clusters.ValveConfigurationAndControl.Bitmaps.ValveFaultBitmap.kLeaking
),
),
entity_class=MatterBinarySensor,
@@ -477,8 +478,8 @@ DISCOVERY_SCHEMAS = [
translation_key="alarm_door",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
device_to_ha=lambda x: bool(
x & clusters.RefrigeratorAlarm.Bitmaps.AlarmBitmap.kDoorOpen
device_to_ha=lambda x: (
x == clusters.RefrigeratorAlarm.Bitmaps.AlarmBitmap.kDoorOpen
),
),
entity_class=MatterBinarySensor,

View File

@@ -65,10 +65,7 @@ DISCOVERY_SCHEMAS = [
entity_class=MatterCommandButton,
required_attributes=(clusters.Identify.Attributes.IdentifyType,),
value_is_not=clusters.Identify.Enums.IdentifyTypeEnum.kNone,
# Only create a single Identify button per device, not one per endpoint.
# The Identify cluster can appear on multiple endpoints; once_per_device=True
# ensures only one button is created for the entire device.
once_per_device=True,
allow_multi=True,
),
MatterDiscoverySchema(
platform=Platform.BUTTON,

View File

@@ -152,7 +152,3 @@ class MatterDiscoverySchema:
# [optional] the secondary (required) attribute value must NOT have this value
# for example to filter out empty lists in list sensor values
secondary_value_is_not: Any = UNSET
# [optional] bool to specify if this entity should only be created once per device
# instead of once per endpoint (useful for device-level entities like identify button)
once_per_device: bool = False

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "local_polling",
"quality_scale": "platinum",
"requirements": ["aiomealie==1.1.0"]
"requirements": ["aiomealie==1.0.1"]
}

View File

@@ -1009,7 +1009,7 @@
"cleaning_care_program": "Cleaning/care program",
"maintenance_program": "Maintenance program",
"normal_operation_mode": "Normal operation mode",
"own_program": "Program"
"own_program": "Own program"
}
},
"remaining_time": {

View File

@@ -12,12 +12,7 @@ from homeassistant.helpers import entity_registry as er
from .const import _LOGGER
PLATFORMS: list[Platform] = [
Platform.CLIMATE,
Platform.COVER,
Platform.LIGHT,
Platform.SCENE,
]
PLATFORMS: list[Platform] = [Platform.COVER, Platform.LIGHT, Platform.SCENE]
type NikoHomeControlConfigEntry = ConfigEntry[NHCController]

View File

@@ -1,100 +0,0 @@
"""Support for Niko Home Control thermostats."""
from typing import Any
from nhc.const import THERMOSTAT_MODES, THERMOSTAT_MODES_REVERSE
from nhc.thermostat import NHCThermostat
from homeassistant.components.climate import (
PRESET_ECO,
ClimateEntity,
ClimateEntityFeature,
HVACMode,
)
from homeassistant.components.sensor import UnitOfTemperature
from homeassistant.const import ATTR_TEMPERATURE
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import NikoHomeControlConfigEntry
from .const import (
NIKO_HOME_CONTROL_THERMOSTAT_MODES_MAP,
NikoHomeControlThermostatModes,
)
from .entity import NikoHomeControlEntity
async def async_setup_entry(
hass: HomeAssistant,
entry: NikoHomeControlConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Niko Home Control thermostat entry."""
controller = entry.runtime_data
async_add_entities(
NikoHomeControlClimate(thermostat, controller, entry.entry_id)
for thermostat in controller.thermostats.values()
)
class NikoHomeControlClimate(NikoHomeControlEntity, ClimateEntity):
"""Representation of a Niko Home Control thermostat."""
_attr_supported_features: ClimateEntityFeature = (
ClimateEntityFeature.PRESET_MODE
| ClimateEntityFeature.TARGET_TEMPERATURE
| ClimateEntityFeature.TURN_OFF
)
_attr_temperature_unit = UnitOfTemperature.CELSIUS
_attr_name = None
_action: NHCThermostat
_attr_translation_key = "nhc_thermostat"
_attr_hvac_modes = [HVACMode.OFF, HVACMode.COOL, HVACMode.AUTO]
_attr_preset_modes = [
"day",
"night",
PRESET_ECO,
"prog1",
"prog2",
"prog3",
]
def _get_niko_mode(self, mode: str) -> int:
"""Return the Niko mode."""
return THERMOSTAT_MODES_REVERSE.get(mode, NikoHomeControlThermostatModes.OFF)
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperature."""
if ATTR_TEMPERATURE in kwargs:
await self._action.set_temperature(kwargs.get(ATTR_TEMPERATURE))
async def async_set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
await self._action.set_mode(self._get_niko_mode(preset_mode))
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set new target hvac mode."""
await self._action.set_mode(NIKO_HOME_CONTROL_THERMOSTAT_MODES_MAP[hvac_mode])
async def async_turn_off(self) -> None:
"""Turn thermostat off."""
await self._action.set_mode(NikoHomeControlThermostatModes.OFF)
def update_state(self) -> None:
"""Update the state of the entity."""
if self._action.state == NikoHomeControlThermostatModes.OFF:
self._attr_hvac_mode = HVACMode.OFF
self._attr_preset_mode = None
elif self._action.state == NikoHomeControlThermostatModes.COOL:
self._attr_hvac_mode = HVACMode.COOL
self._attr_preset_mode = None
else:
self._attr_hvac_mode = HVACMode.AUTO
self._attr_preset_mode = THERMOSTAT_MODES[self._action.state]
self._attr_target_temperature = self._action.setpoint
self._attr_current_temperature = self._action.measured

View File

@@ -1,23 +1,6 @@
"""Constants for niko_home_control integration."""
from enum import IntEnum
import logging
from homeassistant.components.climate import HVACMode
DOMAIN = "niko_home_control"
_LOGGER = logging.getLogger(__name__)
NIKO_HOME_CONTROL_THERMOSTAT_MODES_MAP = {
HVACMode.OFF: 3,
HVACMode.COOL: 4,
HVACMode.AUTO: 5,
}
class NikoHomeControlThermostatModes(IntEnum):
"""Enum for Niko Home Control thermostat modes."""
OFF = 3
COOL = 4
AUTO = 5

View File

@@ -1,20 +0,0 @@
{
"entity": {
"climate": {
"nhc_thermostat": {
"state_attributes": {
"preset_mode": {
"default": "mdi:calendar-clock",
"state": {
"day": "mdi:weather-sunny",
"night": "mdi:weather-night",
"prog1": "mdi:numeric-1",
"prog2": "mdi:numeric-2",
"prog3": "mdi:numeric-3"
}
}
}
}
}
}
}

View File

@@ -26,23 +26,5 @@
"description": "Set up your Niko Home Control instance."
}
}
},
"entity": {
"climate": {
"nhc_thermostat": {
"state_attributes": {
"preset_mode": {
"state": {
"day": "Day",
"eco": "Eco",
"night": "Night",
"prog1": "Program 1",
"prog2": "Program 2",
"prog3": "Program 3"
}
}
}
}
}
}
}

View File

@@ -15,5 +15,5 @@
"documentation": "https://www.home-assistant.io/integrations/palazzetti",
"integration_type": "device",
"iot_class": "local_polling",
"requirements": ["pypalazzetti==0.1.20"]
"requirements": ["pypalazzetti==0.1.19"]
}

View File

@@ -256,7 +256,6 @@ class PlaystationNetworkFriendDataCoordinator(
account_id=self.user.account_id,
presence=self.user.get_presence(),
profile=self.profile,
trophy_summary=self.user.trophy_summary(),
)
except PSNAWPForbiddenError as error:
raise UpdateFailed(

View File

@@ -54,7 +54,7 @@ class PlaystationNetworkSensor(StrEnum):
NOW_PLAYING = "now_playing"
SENSOR_DESCRIPTIONS: tuple[PlaystationNetworkSensorEntityDescription, ...] = (
SENSOR_DESCRIPTIONS_TROPHY: tuple[PlaystationNetworkSensorEntityDescription, ...] = (
PlaystationNetworkSensorEntityDescription(
key=PlaystationNetworkSensor.TROPHY_LEVEL,
translation_key=PlaystationNetworkSensor.TROPHY_LEVEL,
@@ -106,6 +106,8 @@ SENSOR_DESCRIPTIONS: tuple[PlaystationNetworkSensorEntityDescription, ...] = (
else None
),
),
)
SENSOR_DESCRIPTIONS_USER: tuple[PlaystationNetworkSensorEntityDescription, ...] = (
PlaystationNetworkSensorEntityDescription(
key=PlaystationNetworkSensor.ONLINE_ID,
translation_key=PlaystationNetworkSensor.ONLINE_ID,
@@ -150,7 +152,7 @@ async def async_setup_entry(
coordinator = config_entry.runtime_data.user_data
async_add_entities(
PlaystationNetworkSensorEntity(coordinator, description)
for description in SENSOR_DESCRIPTIONS
for description in SENSOR_DESCRIPTIONS_TROPHY + SENSOR_DESCRIPTIONS_USER
)
for (
@@ -164,7 +166,7 @@ async def async_setup_entry(
description,
config_entry.subentries[subentry_id],
)
for description in SENSOR_DESCRIPTIONS
for description in SENSOR_DESCRIPTIONS_USER
],
config_subentry_id=subentry_id,
)

View File

@@ -57,14 +57,12 @@ type SelectType = Literal[
"select_gateway_mode",
"select_regulation_mode",
"select_schedule",
"select_zone_profile",
]
type SelectOptionsType = Literal[
"available_schedules",
"dhw_modes",
"gateway_modes",
"regulation_modes",
"zone_profiles",
"available_schedules",
]
# Default directives
@@ -84,10 +82,3 @@ MASTER_THERMOSTATS: Final[list[str]] = [
"zone_thermometer",
"zone_thermostat",
]
# Select constants
SELECT_DHW_MODE: Final = "select_dhw_mode"
SELECT_GATEWAY_MODE: Final = "select_gateway_mode"
SELECT_REGULATION_MODE: Final = "select_regulation_mode"
SELECT_SCHEDULE: Final = "select_schedule"
SELECT_ZONE_PROFILE: Final = "select_zone_profile"

View File

@@ -8,6 +8,6 @@
"iot_class": "local_polling",
"loggers": ["plugwise"],
"quality_scale": "platinum",
"requirements": ["plugwise==1.10.0"],
"requirements": ["plugwise==1.9.0"],
"zeroconf": ["_plugwise._tcp.local."]
}

View File

@@ -9,15 +9,7 @@ from homeassistant.const import STATE_ON, EntityCategory
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import (
SELECT_DHW_MODE,
SELECT_GATEWAY_MODE,
SELECT_REGULATION_MODE,
SELECT_SCHEDULE,
SELECT_ZONE_PROFILE,
SelectOptionsType,
SelectType,
)
from .const import SelectOptionsType, SelectType
from .coordinator import PlugwiseConfigEntry, PlugwiseDataUpdateCoordinator
from .entity import PlugwiseEntity
from .util import plugwise_command
@@ -35,34 +27,28 @@ class PlugwiseSelectEntityDescription(SelectEntityDescription):
SELECT_TYPES = (
PlugwiseSelectEntityDescription(
key=SELECT_SCHEDULE,
translation_key=SELECT_SCHEDULE,
key="select_schedule",
translation_key="select_schedule",
options_key="available_schedules",
),
PlugwiseSelectEntityDescription(
key=SELECT_REGULATION_MODE,
translation_key=SELECT_REGULATION_MODE,
key="select_regulation_mode",
translation_key="regulation_mode",
entity_category=EntityCategory.CONFIG,
options_key="regulation_modes",
),
PlugwiseSelectEntityDescription(
key=SELECT_DHW_MODE,
translation_key=SELECT_DHW_MODE,
key="select_dhw_mode",
translation_key="dhw_mode",
entity_category=EntityCategory.CONFIG,
options_key="dhw_modes",
),
PlugwiseSelectEntityDescription(
key=SELECT_GATEWAY_MODE,
translation_key=SELECT_GATEWAY_MODE,
key="select_gateway_mode",
translation_key="gateway_mode",
entity_category=EntityCategory.CONFIG,
options_key="gateway_modes",
),
PlugwiseSelectEntityDescription(
key=SELECT_ZONE_PROFILE,
translation_key=SELECT_ZONE_PROFILE,
entity_category=EntityCategory.CONFIG,
options_key="zone_profiles",
),
)

View File

@@ -109,7 +109,7 @@
}
},
"select": {
"select_dhw_mode": {
"dhw_mode": {
"name": "DHW mode",
"state": {
"auto": "[%key:common::state::auto%]",
@@ -118,7 +118,7 @@
"off": "[%key:common::state::off%]"
}
},
"select_gateway_mode": {
"gateway_mode": {
"name": "Gateway mode",
"state": {
"away": "Pause",
@@ -126,7 +126,7 @@
"vacation": "Vacation"
}
},
"select_regulation_mode": {
"regulation_mode": {
"name": "Regulation mode",
"state": {
"bleeding_cold": "Bleeding cold",
@@ -141,14 +141,6 @@
"state": {
"off": "[%key:common::state::off%]"
}
},
"select_zone_profile": {
"name": "Zone profile",
"state": {
"active": "[%key:common::state::active%]",
"off": "[%key:common::state::off%]",
"passive": "Passive"
}
}
},
"sensor": {

View File

@@ -54,7 +54,7 @@ CONTEXT_ID_AS_BINARY_SCHEMA_VERSION = 36
EVENT_TYPE_IDS_SCHEMA_VERSION = 37
STATES_META_SCHEMA_VERSION = 38
CIRCULAR_MEAN_SCHEMA_VERSION = 49
UNIT_CLASS_SCHEMA_VERSION = 52
UNIT_CLASS_SCHEMA_VERSION = 51
LEGACY_STATES_EVENT_ID_INDEX_SCHEMA_VERSION = 28
LEGACY_STATES_EVENT_FOREIGN_KEYS_FIXED_SCHEMA_VERSION = 43

View File

@@ -71,7 +71,7 @@ class LegacyBase(DeclarativeBase):
"""Base class for tables, used for schema migration."""
SCHEMA_VERSION = 52
SCHEMA_VERSION = 51
_LOGGER = logging.getLogger(__name__)

View File

@@ -13,15 +13,7 @@ from typing import TYPE_CHECKING, Any, TypedDict, cast, final
from uuid import UUID
import sqlalchemy
from sqlalchemy import (
ForeignKeyConstraint,
MetaData,
Table,
cast as cast_,
func,
text,
update,
)
from sqlalchemy import ForeignKeyConstraint, MetaData, Table, func, text, update
from sqlalchemy.engine import CursorResult, Engine
from sqlalchemy.exc import (
DatabaseError,
@@ -34,9 +26,8 @@ from sqlalchemy.exc import (
from sqlalchemy.orm import DeclarativeBase
from sqlalchemy.orm.session import Session
from sqlalchemy.schema import AddConstraint, CreateTable, DropConstraint
from sqlalchemy.sql.expression import and_, true
from sqlalchemy.sql.expression import true
from sqlalchemy.sql.lambdas import StatementLambdaElement
from sqlalchemy.types import BINARY
from homeassistant.core import HomeAssistant
from homeassistant.util.enum import try_parse_enum
@@ -2053,74 +2044,14 @@ class _SchemaVersion50Migrator(_SchemaVersionMigrator, target_version=50):
class _SchemaVersion51Migrator(_SchemaVersionMigrator, target_version=51):
def _apply_update(self) -> None:
"""Version specific update method."""
# Replaced with version 52 which corrects issues with MySQL string comparisons.
class _SchemaVersion52Migrator(_SchemaVersionMigrator, target_version=52):
def _apply_update(self) -> None:
"""Version specific update method."""
if self.engine.dialect.name == SupportedDialect.MYSQL:
self._apply_update_mysql()
else:
self._apply_update_postgresql_sqlite()
def _apply_update_mysql(self) -> None:
"""Version specific update method for mysql."""
# Add unit class column to StatisticsMeta
_add_columns(self.session_maker, "statistics_meta", ["unit_class VARCHAR(255)"])
with session_scope(session=self.session_maker()) as session:
connection = session.connection()
for conv in _PRIMARY_UNIT_CONVERTERS:
case_sensitive_units = {
u.encode("utf-8") if u else u for u in conv.VALID_UNITS
}
# Reset unit_class to None for entries that do not match
# the valid units (case sensitive) but matched before due to
# case insensitive comparisons.
connection.execute(
update(StatisticsMeta)
.where(
and_(
StatisticsMeta.unit_of_measurement.in_(conv.VALID_UNITS),
cast_(StatisticsMeta.unit_of_measurement, BINARY).not_in(
case_sensitive_units
),
)
)
.values(unit_class=None)
)
# Do an explicitly case sensitive match (actually binary) to set the
# correct unit_class. This is needed because we use the case sensitive
# utf8mb4_unicode_ci collation.
connection.execute(
update(StatisticsMeta)
.where(
and_(
cast_(StatisticsMeta.unit_of_measurement, BINARY).in_(
case_sensitive_units
),
StatisticsMeta.unit_class.is_(None),
)
)
.values(unit_class=conv.UNIT_CLASS)
)
def _apply_update_postgresql_sqlite(self) -> None:
"""Version specific update method for postgresql and sqlite."""
_add_columns(self.session_maker, "statistics_meta", ["unit_class VARCHAR(255)"])
with session_scope(session=self.session_maker()) as session:
connection = session.connection()
for conv in _PRIMARY_UNIT_CONVERTERS:
# Set the correct unit_class. Unlike MySQL, Postgres and SQLite
# have case sensitive string comparisons by default, so we
# can directly match on the valid units.
connection.execute(
update(StatisticsMeta)
.where(
and_(
StatisticsMeta.unit_of_measurement.in_(conv.VALID_UNITS),
StatisticsMeta.unit_class.is_(None),
)
)
.where(StatisticsMeta.unit_of_measurement.in_(conv.VALID_UNITS))
.values(unit_class=conv.UNIT_CLASS)
)

View File

@@ -2,15 +2,12 @@
from __future__ import annotations
from satel_integra.satel_integra import AsyncSatel
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
)
from homeassistant.const import CONF_NAME
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -20,7 +17,6 @@ from .const import (
CONF_ZONE_NUMBER,
CONF_ZONE_TYPE,
CONF_ZONES,
DOMAIN,
SIGNAL_OUTPUTS_UPDATED,
SIGNAL_ZONES_UPDATED,
SUBENTRY_TYPE_OUTPUT,
@@ -44,9 +40,9 @@ async def async_setup_entry(
)
for subentry in zone_subentries:
zone_num: int = subentry.data[CONF_ZONE_NUMBER]
zone_type: BinarySensorDeviceClass = subentry.data[CONF_ZONE_TYPE]
zone_name: str = subentry.data[CONF_NAME]
zone_num = subentry.data[CONF_ZONE_NUMBER]
zone_type = subentry.data[CONF_ZONE_TYPE]
zone_name = subentry.data[CONF_NAME]
async_add_entities(
[
@@ -69,9 +65,9 @@ async def async_setup_entry(
)
for subentry in output_subentries:
output_num: int = subentry.data[CONF_OUTPUT_NUMBER]
ouput_type: BinarySensorDeviceClass = subentry.data[CONF_ZONE_TYPE]
output_name: str = subentry.data[CONF_NAME]
output_num = subentry.data[CONF_OUTPUT_NUMBER]
ouput_type = subentry.data[CONF_ZONE_TYPE]
output_name = subentry.data[CONF_NAME]
async_add_entities(
[
@@ -93,48 +89,68 @@ class SatelIntegraBinarySensor(BinarySensorEntity):
"""Representation of an Satel Integra binary sensor."""
_attr_should_poll = False
_attr_has_entity_name = True
_attr_name = None
def __init__(
self,
controller: AsyncSatel,
device_number: int,
device_name: str,
device_class: BinarySensorDeviceClass,
sensor_type: str,
react_to_signal: str,
config_entry_id: str,
) -> None:
controller,
device_number,
device_name,
zone_type,
sensor_type,
react_to_signal,
config_entry_id,
):
"""Initialize the binary_sensor."""
self._device_number = device_number
self._attr_unique_id = f"{config_entry_id}_{sensor_type}_{device_number}"
self._name = device_name
self._zone_type = zone_type
self._state = 0
self._react_to_signal = react_to_signal
self._satel = controller
self._attr_device_class = device_class
self._attr_device_info = DeviceInfo(
name=device_name, identifiers={(DOMAIN, self._attr_unique_id)}
)
async def async_added_to_hass(self) -> None:
"""Register callbacks."""
if self._react_to_signal == SIGNAL_OUTPUTS_UPDATED:
self._attr_is_on = self._device_number in self._satel.violated_outputs
if self._device_number in self._satel.violated_outputs:
self._state = 1
else:
self._state = 0
elif self._device_number in self._satel.violated_zones:
self._state = 1
else:
self._attr_is_on = self._device_number in self._satel.violated_zones
self._state = 0
self.async_on_remove(
async_dispatcher_connect(
self.hass, self._react_to_signal, self._devices_updated
)
)
@property
def name(self):
"""Return the name of the entity."""
return self._name
@property
def icon(self) -> str | None:
"""Icon for device by its type."""
if self._zone_type is BinarySensorDeviceClass.SMOKE:
return "mdi:fire"
return None
@property
def is_on(self):
"""Return true if sensor is on."""
return self._state == 1
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
return self._zone_type
@callback
def _devices_updated(self, zones: dict[int, int]):
def _devices_updated(self, zones):
"""Update the zone's state, if needed."""
if self._device_number in zones:
new_state = zones[self._device_number] == 1
if new_state != self._attr_is_on:
self._attr_is_on = new_state
self.async_write_ha_state()
if self._device_number in zones and self._state != zones[self._device_number]:
self._state = zones[self._device_number]
self.async_write_ha_state()

View File

@@ -12,7 +12,6 @@ from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError
from homeassistant.components.number import (
DOMAIN as NUMBER_PLATFORM,
NumberDeviceClass,
NumberEntity,
NumberEntityDescription,
NumberExtraStoredData,
@@ -108,9 +107,6 @@ class RpcNumber(ShellyRpcAttributeEntity, NumberEntity):
if description.mode_fn is not None:
self._attr_mode = description.mode_fn(coordinator.device.config[key])
if hasattr(self, "_attr_name") and description.role != ROLE_GENERIC:
delattr(self, "_attr_name")
@property
def native_value(self) -> float | None:
"""Return value of number."""
@@ -185,6 +181,7 @@ NUMBERS: dict[tuple[str, str], BlockNumberDescription] = {
("device", "valvePos"): BlockNumberDescription(
key="device|valvepos",
translation_key="valve_position",
name="Valve position",
native_unit_of_measurement=PERCENTAGE,
available=lambda block: cast(int, block.valveError) != 1,
entity_category=EntityCategory.CONFIG,
@@ -203,12 +200,12 @@ RPC_NUMBERS: Final = {
key="blutrv",
sub_key="current_C",
translation_key="external_temperature",
name="External temperature",
native_min_value=-50,
native_max_value=50,
native_step=0.1,
mode=NumberMode.BOX,
entity_category=EntityCategory.CONFIG,
device_class=NumberDeviceClass.TEMPERATURE,
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
method="blu_trv_set_external_temperature",
entity_class=RpcBluTrvExtTempNumber,
@@ -216,7 +213,7 @@ RPC_NUMBERS: Final = {
"number_generic": RpcNumberDescription(
key="number",
sub_key="value",
removal_condition=lambda config, _, key: not is_view_for_platform(
removal_condition=lambda config, _status, key: not is_view_for_platform(
config, key, NUMBER_PLATFORM
),
max_fn=lambda config: config["max"],
@@ -232,11 +229,9 @@ RPC_NUMBERS: Final = {
"number_current_limit": RpcNumberDescription(
key="number",
sub_key="value",
translation_key="current_limit",
device_class=NumberDeviceClass.CURRENT,
max_fn=lambda config: config["max"],
min_fn=lambda config: config["min"],
mode_fn=lambda _: NumberMode.SLIDER,
mode_fn=lambda config: NumberMode.SLIDER,
step_fn=lambda config: config["meta"]["ui"].get("step"),
unit=get_virtual_component_unit,
method="number_set",
@@ -246,11 +241,10 @@ RPC_NUMBERS: Final = {
"number_position": RpcNumberDescription(
key="number",
sub_key="value",
translation_key="valve_position",
entity_registry_enabled_default=False,
max_fn=lambda config: config["max"],
min_fn=lambda config: config["min"],
mode_fn=lambda _: NumberMode.SLIDER,
mode_fn=lambda config: NumberMode.SLIDER,
step_fn=lambda config: config["meta"]["ui"].get("step"),
unit=get_virtual_component_unit,
method="number_set",
@@ -260,12 +254,10 @@ RPC_NUMBERS: Final = {
"number_target_humidity": RpcNumberDescription(
key="number",
sub_key="value",
translation_key="target_humidity",
device_class=NumberDeviceClass.HUMIDITY,
entity_registry_enabled_default=False,
max_fn=lambda config: config["max"],
min_fn=lambda config: config["min"],
mode_fn=lambda _: NumberMode.SLIDER,
mode_fn=lambda config: NumberMode.SLIDER,
step_fn=lambda config: config["meta"]["ui"].get("step"),
unit=get_virtual_component_unit,
method="number_set",
@@ -275,12 +267,10 @@ RPC_NUMBERS: Final = {
"number_target_temperature": RpcNumberDescription(
key="number",
sub_key="value",
translation_key="target_temperature",
device_class=NumberDeviceClass.TEMPERATURE,
entity_registry_enabled_default=False,
max_fn=lambda config: config["max"],
min_fn=lambda config: config["min"],
mode_fn=lambda _: NumberMode.SLIDER,
mode_fn=lambda config: NumberMode.SLIDER,
step_fn=lambda config: config["meta"]["ui"].get("step"),
unit=get_virtual_component_unit,
method="number_set",
@@ -291,20 +281,21 @@ RPC_NUMBERS: Final = {
key="blutrv",
sub_key="pos",
translation_key="valve_position",
name="Valve position",
native_min_value=0,
native_max_value=100,
native_step=1,
mode=NumberMode.SLIDER,
native_unit_of_measurement=PERCENTAGE,
method="blu_trv_set_valve_position",
removal_condition=lambda config, _, key: config[key].get("enable", True)
removal_condition=lambda config, _status, key: config[key].get("enable", True)
is True,
entity_class=RpcBluTrvNumber,
),
"left_slot_intensity": RpcNumberDescription(
key="cury",
sub_key="slots",
translation_key="left_slot_intensity",
name="Left slot intensity",
value=lambda status, _: status["left"]["intensity"],
native_min_value=0,
native_max_value=100,
@@ -320,7 +311,7 @@ RPC_NUMBERS: Final = {
"right_slot_intensity": RpcNumberDescription(
key="cury",
sub_key="slots",
translation_key="right_slot_intensity",
name="Right slot intensity",
value=lambda status, _: status["right"]["intensity"],
native_min_value=0,
native_max_value=100,
@@ -411,9 +402,6 @@ class BlockSleepingNumber(ShellySleepingBlockAttributeEntity, RestoreNumber):
self.restored_data: NumberExtraStoredData | None = None
super().__init__(coordinator, block, attribute, description, entry)
if hasattr(self, "_attr_name"):
delattr(self, "_attr_name")
async def async_added_to_hass(self) -> None:
"""Handle entity which will be added."""
await super().async_added_to_hass()

View File

@@ -188,29 +188,6 @@
}
}
},
"number": {
"current_limit": {
"name": "Current limit"
},
"external_temperature": {
"name": "External temperature"
},
"left_slot_intensity": {
"name": "Left slot intensity"
},
"right_slot_intensity": {
"name": "Right slot intensity"
},
"target_humidity": {
"name": "Target humidity"
},
"target_temperature": {
"name": "Target temperature"
},
"valve_position": {
"name": "Valve position"
}
},
"select": {
"cury_mode": {
"name": "Mode",

View File

@@ -30,5 +30,5 @@
"iot_class": "cloud_push",
"loggers": ["pysmartthings"],
"quality_scale": "bronze",
"requirements": ["pysmartthings==3.3.2"]
"requirements": ["pysmartthings==3.3.1"]
}

View File

@@ -41,5 +41,5 @@
"iot_class": "local_push",
"loggers": ["switchbot"],
"quality_scale": "gold",
"requirements": ["PySwitchbot==0.73.0"]
"requirements": ["PySwitchbot==0.72.1"]
}

View File

@@ -15,11 +15,11 @@ from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.util.json import json_loads
from . import TuyaConfigEntry
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode, DPType
from .entity import TuyaEntity
from .models import DPCodeBitmapBitWrapper, DPCodeBooleanWrapper, DPCodeWrapper
@dataclass(frozen=True)
@@ -366,48 +366,20 @@ BINARY_SENSORS: dict[DeviceCategory, tuple[TuyaBinarySensorEntityDescription, ..
}
class _CustomDPCodeWrapper(DPCodeWrapper):
"""Custom DPCode Wrapper to check for values in a set."""
_valid_values: set[bool | float | int | str]
def __init__(
self, dpcode: str, valid_values: set[bool | float | int | str]
) -> None:
"""Init CustomDPCodeBooleanWrapper."""
super().__init__(dpcode)
self._valid_values = valid_values
def read_device_status(self, device: CustomerDevice) -> bool | None:
"""Read the device value for the dpcode."""
if (raw_value := self._read_device_status_raw(device)) is None:
return None
return raw_value in self._valid_values
def _get_dpcode_wrapper(
device: CustomerDevice,
description: TuyaBinarySensorEntityDescription,
) -> DPCodeWrapper | None:
"""Get DPCode wrapper for an entity description."""
dpcode = description.dpcode or description.key
if description.bitmap_key is not None:
return DPCodeBitmapBitWrapper.find_dpcode(
device, dpcode, bitmap_key=description.bitmap_key
)
if bool_type := DPCodeBooleanWrapper.find_dpcode(device, dpcode):
return bool_type
# Legacy / compatibility
if dpcode not in device.status:
def _get_bitmap_bit_mask(
device: CustomerDevice, dpcode: str, bitmap_key: str | None
) -> int | None:
"""Get the bit mask for a given bitmap description."""
if (
bitmap_key is None
or (status_range := device.status_range.get(dpcode)) is None
or status_range.type != DPType.BITMAP
or not isinstance(bitmap_values := json_loads(status_range.values), dict)
or not isinstance(bitmap_labels := bitmap_values.get("label"), list)
or bitmap_key not in bitmap_labels
):
return None
return _CustomDPCodeWrapper(
dpcode,
description.on_value
if isinstance(description.on_value, set)
else {description.on_value},
)
return bitmap_labels.index(bitmap_key)
async def async_setup_entry(
@@ -425,11 +397,25 @@ async def async_setup_entry(
for device_id in device_ids:
device = manager.device_map[device_id]
if descriptions := BINARY_SENSORS.get(device.category):
entities.extend(
TuyaBinarySensorEntity(device, manager, description, dpcode_wrapper)
for description in descriptions
if (dpcode_wrapper := _get_dpcode_wrapper(device, description))
)
for description in descriptions:
dpcode = description.dpcode or description.key
if dpcode in device.status:
mask = _get_bitmap_bit_mask(
device, dpcode, description.bitmap_key
)
if (
description.bitmap_key is None # Regular binary sensor
or mask is not None # Bitmap sensor with valid mask
):
entities.append(
TuyaBinarySensorEntity(
device,
manager,
description,
mask,
)
)
async_add_entities(entities)
@@ -450,15 +436,26 @@ class TuyaBinarySensorEntity(TuyaEntity, BinarySensorEntity):
device: CustomerDevice,
device_manager: Manager,
description: TuyaBinarySensorEntityDescription,
dpcode_wrapper: DPCodeWrapper,
bit_mask: int | None = None,
) -> None:
"""Init Tuya binary sensor."""
super().__init__(device, device_manager)
self.entity_description = description
self._attr_unique_id = f"{super().unique_id}{description.key}"
self._dpcode_wrapper = dpcode_wrapper
self._bit_mask = bit_mask
@property
def is_on(self) -> bool | None:
def is_on(self) -> bool:
"""Return true if sensor is on."""
return self._dpcode_wrapper.read_device_status(self.device)
dpcode = self.entity_description.dpcode or self.entity_description.key
if dpcode not in self.device.status:
return False
if self._bit_mask is not None:
# For bitmap sensors, check the specific bit mask
return (self.device.status[dpcode] & (1 << self._bit_mask)) != 0
if isinstance(self.entity_description.on_value, set):
return self.device.status[dpcode] in self.entity_description.on_value
return self.device.status[dpcode] == self.entity_description.on_value

View File

@@ -13,7 +13,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import TuyaConfigEntry
from .const import TUYA_DISCOVERY_NEW, DeviceCategory, DPCode
from .entity import TuyaEntity
from .models import DPCodeBooleanWrapper
BUTTONS: dict[DeviceCategory, tuple[ButtonEntityDescription, ...]] = {
DeviceCategory.HXD: (
@@ -22,19 +21,6 @@ BUTTONS: dict[DeviceCategory, tuple[ButtonEntityDescription, ...]] = {
translation_key="snooze",
),
),
DeviceCategory.MSP: (
ButtonEntityDescription(
key=DPCode.FACTORY_RESET,
translation_key="factory_reset",
entity_category=EntityCategory.DIAGNOSTIC,
entity_registry_enabled_default=False,
),
ButtonEntityDescription(
key=DPCode.MANUAL_CLEAN,
translation_key="manual_clean",
entity_category=EntityCategory.CONFIG,
),
),
DeviceCategory.SD: (
ButtonEntityDescription(
key=DPCode.RESET_DUSTER_CLOTH,
@@ -81,13 +67,9 @@ async def async_setup_entry(
device = manager.device_map[device_id]
if descriptions := BUTTONS.get(device.category):
entities.extend(
TuyaButtonEntity(device, manager, description, dpcode_wrapper)
TuyaButtonEntity(device, manager, description)
for description in descriptions
if (
dpcode_wrapper := DPCodeBooleanWrapper.find_dpcode(
device, description.key, prefer_function=True
)
)
if description.key in device.status
)
async_add_entities(entities)
@@ -107,14 +89,12 @@ class TuyaButtonEntity(TuyaEntity, ButtonEntity):
device: CustomerDevice,
device_manager: Manager,
description: ButtonEntityDescription,
dpcode_wrapper: DPCodeBooleanWrapper,
) -> None:
"""Init Tuya button."""
super().__init__(device, device_manager)
self.entity_description = description
self._attr_unique_id = f"{super().unique_id}{description.key}"
self._dpcode_wrapper = dpcode_wrapper
async def async_press(self) -> None:
def press(self) -> None:
"""Press the button."""
await self._async_send_dpcode_update(self._dpcode_wrapper, True)
self._send_command([{"code": self.entity_description.key, "value": True}])

View File

@@ -704,7 +704,6 @@ class DPCode(StrEnum):
DECIBEL_SWITCH = "decibel_switch"
DEHUMIDITY_SET_ENUM = "dehumidify_set_enum"
DEHUMIDITY_SET_VALUE = "dehumidify_set_value"
DELAY_CLEAN_TIME = "delay_clean_time"
DELAY_SET = "delay_set"
DEW_POINT_TEMP = "dew_point_temp"
DISINFECTION = "disinfection"
@@ -718,7 +717,6 @@ class DPCode(StrEnum):
ELECTRICITY_LEFT = "electricity_left"
EXCRETION_TIME_DAY = "excretion_time_day"
EXCRETION_TIMES_DAY = "excretion_times_day"
FACTORY_RESET = "factory_reset"
FAN_BEEP = "fan_beep" # Sound
FAN_COOL = "fan_cool" # Cool wind
FAN_DIRECTION = "fan_direction" # Fan direction
@@ -775,7 +773,6 @@ class DPCode(StrEnum):
LIQUID_STATE = "liquid_state"
LOCK = "lock" # Lock / Child lock
MACH_OPERATE = "mach_operate"
MANUAL_CLEAN = "manual_clean"
MANUAL_FEED = "manual_feed"
MASTER_MODE = "master_mode" # alarm mode
MASTER_STATE = "master_state" # alarm state

View File

@@ -240,13 +240,6 @@ LIGHTS: dict[DeviceCategory, tuple[TuyaLightEntityDescription, ...]] = {
color_data=DPCode.COLOUR_DATA,
),
),
DeviceCategory.MSP: (
TuyaLightEntityDescription(
key=DPCode.LIGHT,
translation_key="light",
entity_category=EntityCategory.CONFIG,
),
),
DeviceCategory.QJDCZ: (
TuyaLightEntityDescription(
key=DPCode.SWITCH_LED,

View File

@@ -22,18 +22,17 @@ class TypeInformation:
As provided by the SDK, from `device.function` / `device.status_range`.
"""
dpcode: DPCode
@classmethod
def from_json(cls, dpcode: DPCode, data: str) -> Self | None:
"""Load JSON string and return a TypeInformation object."""
return cls(dpcode)
raise NotImplementedError("from_json is not implemented for this type")
@dataclass
class IntegerTypeData(TypeInformation):
"""Integer Type Data."""
dpcode: DPCode
min: int
max: int
scale: float
@@ -101,24 +100,11 @@ class IntegerTypeData(TypeInformation):
)
@dataclass
class BitmapTypeInformation(TypeInformation):
"""Bitmap type information."""
label: list[str]
@classmethod
def from_json(cls, dpcode: DPCode, data: str) -> Self | None:
"""Load JSON string and return a BitmapTypeInformation object."""
if not (parsed := json.loads(data)):
return None
return cls(dpcode, **parsed)
@dataclass
class EnumTypeData(TypeInformation):
"""Enum Type Data."""
dpcode: DPCode
range: list[str]
@classmethod
@@ -130,8 +116,6 @@ class EnumTypeData(TypeInformation):
_TYPE_INFORMATION_MAPPINGS: dict[DPType, type[TypeInformation]] = {
DPType.BITMAP: BitmapTypeInformation,
DPType.BOOLEAN: TypeInformation,
DPType.ENUM: EnumTypeData,
DPType.INTEGER: IntegerTypeData,
}
@@ -162,13 +146,13 @@ class DPCodeWrapper(ABC):
The raw device status is converted to a Home Assistant value.
"""
@abstractmethod
def _convert_value_to_raw_value(self, device: CustomerDevice, value: Any) -> Any:
"""Convert a Home Assistant value back to a raw device value.
This is called by `get_update_command` to prepare the value for sending
back to the device, and should be implemented in concrete classes if needed.
back to the device, and should be implemented in concrete classes.
"""
raise NotImplementedError
def get_update_command(self, device: CustomerDevice, value: Any) -> dict[str, Any]:
"""Get the update command for the dpcode.
@@ -181,6 +165,29 @@ class DPCodeWrapper(ABC):
}
class DPCodeBooleanWrapper(DPCodeWrapper):
"""Simple wrapper for boolean values.
Supports True/False only.
"""
def read_device_status(self, device: CustomerDevice) -> bool | None:
"""Read the device value for the dpcode."""
if (raw_value := self._read_device_status_raw(device)) in (True, False):
return raw_value
return None
def _convert_value_to_raw_value(
self, device: CustomerDevice, value: Any
) -> Any | None:
"""Convert a Home Assistant value back to a raw device value."""
if value in (True, False):
return value
# Currently only called with boolean values
# Safety net in case of future changes
raise ValueError(f"Invalid boolean value `{value}`")
class DPCodeTypeInformationWrapper[T: TypeInformation](DPCodeWrapper):
"""Base DPCode wrapper with Type Information."""
@@ -210,31 +217,6 @@ class DPCodeTypeInformationWrapper[T: TypeInformation](DPCodeWrapper):
return None
class DPCodeBooleanWrapper(DPCodeTypeInformationWrapper[TypeInformation]):
"""Simple wrapper for boolean values.
Supports True/False only.
"""
DPTYPE = DPType.BOOLEAN
def read_device_status(self, device: CustomerDevice) -> bool | None:
"""Read the device value for the dpcode."""
if (raw_value := self._read_device_status_raw(device)) in (True, False):
return raw_value
return None
def _convert_value_to_raw_value(
self, device: CustomerDevice, value: Any
) -> Any | None:
"""Convert a Home Assistant value back to a raw device value."""
if value in (True, False):
return value
# Currently only called with boolean values
# Safety net in case of future changes
raise ValueError(f"Invalid boolean value `{value}`")
class DPCodeEnumWrapper(DPCodeTypeInformationWrapper[EnumTypeData]):
"""Simple wrapper for EnumTypeData values."""
@@ -290,48 +272,6 @@ class DPCodeIntegerWrapper(DPCodeTypeInformationWrapper[IntegerTypeData]):
)
class DPCodeBitmapBitWrapper(DPCodeWrapper):
"""Simple wrapper for a specific bit in bitmap values."""
def __init__(self, dpcode: str, mask: int) -> None:
"""Init DPCodeBitmapWrapper."""
super().__init__(dpcode)
self._mask = mask
def read_device_status(self, device: CustomerDevice) -> bool | None:
"""Read the device value for the dpcode."""
if (raw_value := self._read_device_status_raw(device)) is None:
return None
return (raw_value & (1 << self._mask)) != 0
@classmethod
def find_dpcode(
cls,
device: CustomerDevice,
dpcodes: str | DPCode | tuple[DPCode, ...],
*,
bitmap_key: str,
) -> Self | None:
"""Find and return a DPCodeBitmapBitWrapper for the given DP codes."""
if (
type_information := find_dpcode(device, dpcodes, dptype=DPType.BITMAP)
) and bitmap_key in type_information.label:
return cls(
type_information.dpcode, type_information.label.index(bitmap_key)
)
return None
@overload
def find_dpcode(
device: CustomerDevice,
dpcodes: str | DPCode | tuple[DPCode, ...] | None,
*,
prefer_function: bool = False,
dptype: Literal[DPType.BITMAP],
) -> BitmapTypeInformation | None: ...
@overload
def find_dpcode(
device: CustomerDevice,

View File

@@ -180,14 +180,6 @@ NUMBERS: dict[DeviceCategory, tuple[NumberEntityDescription, ...]] = {
entity_category=EntityCategory.CONFIG,
),
),
DeviceCategory.MSP: (
NumberEntityDescription(
key=DPCode.DELAY_CLEAN_TIME,
translation_key="delay_clean_time",
device_class=NumberDeviceClass.DURATION,
entity_category=EntityCategory.CONFIG,
),
),
DeviceCategory.MZJ: (
NumberEntityDescription(
key=DPCode.COOK_TEMPERATURE,

View File

@@ -77,12 +77,6 @@
}
},
"button": {
"factory_reset": {
"name": "Factory reset"
},
"manual_clean": {
"name": "Manual clean"
},
"reset_duster_cloth": {
"name": "Reset duster cloth"
},
@@ -172,9 +166,6 @@
"cook_time": {
"name": "Cooking time"
},
"delay_clean_time": {
"name": "Delay clean time"
},
"down_delay": {
"name": "Down delay"
},

View File

@@ -946,13 +946,14 @@ async def async_setup_entry(
device = manager.device_map[device_id]
if descriptions := SWITCHES.get(device.category):
entities.extend(
TuyaSwitchEntity(device, manager, description, dpcode_wrapper)
for description in descriptions
if (
dpcode_wrapper := DPCodeBooleanWrapper.find_dpcode(
device, description.key, prefer_function=True
)
TuyaSwitchEntity(
device,
manager,
description,
DPCodeBooleanWrapper(description.key),
)
for description in descriptions
if description.key in device.status
and _check_deprecation(
hass,
device,

View File

@@ -94,13 +94,14 @@ async def async_setup_entry(
device = manager.device_map[device_id]
if descriptions := VALVES.get(device.category):
entities.extend(
TuyaValveEntity(device, manager, description, dpcode_wrapper)
for description in descriptions
if (
dpcode_wrapper := DPCodeBooleanWrapper.find_dpcode(
device, description.key, prefer_function=True
)
TuyaValveEntity(
device,
manager,
description,
DPCodeBooleanWrapper(description.key),
)
for description in descriptions
if description.key in device.status
)
async_add_entities(entities)

View File

@@ -14,7 +14,7 @@
"velbus-protocol"
],
"quality_scale": "bronze",
"requirements": ["velbus-aio==2025.11.0"],
"requirements": ["velbus-aio==2025.8.0"],
"usb": [
{
"pid": "0B1B",

View File

@@ -35,7 +35,6 @@ class VeluxLight(VeluxEntity, LightEntity):
_attr_supported_color_modes = {ColorMode.BRIGHTNESS}
_attr_color_mode = ColorMode.BRIGHTNESS
_attr_name = None
node: LighteningDevice

View File

@@ -22,7 +22,6 @@ from homeassistant.const import (
UnitOfElectricPotential,
UnitOfEnergy,
UnitOfPower,
UnitOfTemperature,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
@@ -140,15 +139,6 @@ SENSORS: tuple[VeSyncSensorEntityDescription, ...] = (
value_fn=lambda device: device.state.humidity,
exists_fn=is_humidifier,
),
VeSyncSensorEntityDescription(
key="temperature",
device_class=SensorDeviceClass.TEMPERATURE,
native_unit_of_measurement=UnitOfTemperature.FAHRENHEIT,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda device: device.state.temperature,
exists_fn=lambda device: is_humidifier(device)
and device.state.temperature is not None,
),
)

View File

@@ -16,7 +16,6 @@ from homeassistant.exceptions import (
)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.config_entry_oauth2_flow import (
ImplementationUnavailableError,
OAuth2Session,
async_get_config_entry_implementation,
)
@@ -66,13 +65,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: VolvoConfigEntry) -> bo
async def _async_auth_and_create_api(
hass: HomeAssistant, entry: VolvoConfigEntry
) -> VolvoCarsApi:
try:
implementation = await async_get_config_entry_implementation(hass, entry)
except ImplementationUnavailableError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="oauth2_implementation_unavailable",
) from err
implementation = await async_get_config_entry_implementation(hass, entry)
oauth_session = OAuth2Session(hass, entry, implementation)
web_session = async_get_clientsession(hass)
auth = VolvoAuth(web_session, oauth_session)

View File

@@ -362,9 +362,6 @@
"no_vehicle": {
"message": "Unable to retrieve vehicle details."
},
"oauth2_implementation_unavailable": {
"message": "OAuth2 implementation unavailable, will retry"
},
"unauthorized": {
"message": "Authentication failed. {message}"
},

View File

@@ -54,7 +54,7 @@ _PING_TIMEOUT: Final = 5
_PING_SEND_DELAY: Final = 2
_PIPELINE_FINISH_TIMEOUT: Final = 1
_TTS_SAMPLE_RATE: Final = 22050
_AUDIO_CHUNK_BYTES: Final = 2048 # 1024 samples
_ANNOUNCE_CHUNK_BYTES: Final = 2048 # 1024 samples
_TTS_TIMEOUT_EXTRA: Final = 1.0
# Wyoming stage -> Assist stage
@@ -360,7 +360,7 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
)
assert proc.stdout is not None
while True:
chunk_bytes = await proc.stdout.read(_AUDIO_CHUNK_BYTES)
chunk_bytes = await proc.stdout.read(_ANNOUNCE_CHUNK_BYTES)
if not chunk_bytes:
break
@@ -782,22 +782,17 @@ class WyomingAssistSatellite(WyomingSatelliteEntity, AssistSatelliteEntity):
assert sample_width is not None
assert sample_channels is not None
data_chunk_idx = 0
while data_chunk_idx < len(data_chunk):
audio_chunk = AudioChunk(
rate=sample_rate,
width=sample_width,
channels=sample_channels,
audio=data_chunk[
data_chunk_idx : data_chunk_idx + _AUDIO_CHUNK_BYTES
],
timestamp=timestamp,
)
audio_chunk = AudioChunk(
rate=sample_rate,
width=sample_width,
channels=sample_channels,
audio=data_chunk,
timestamp=timestamp,
)
await self._client.write_event(audio_chunk.event())
timestamp += audio_chunk.milliseconds
total_seconds += audio_chunk.seconds
data_chunk_idx += _AUDIO_CHUNK_BYTES
await self._client.write_event(audio_chunk.event())
timestamp += audio_chunk.milliseconds
total_seconds += audio_chunk.seconds
await self._client.write_event(AudioStop(timestamp=timestamp).event())
_LOGGER.debug("TTS streaming complete")

View File

@@ -1304,11 +1304,7 @@ def issues(hass: HomeAssistant) -> dict[tuple[str, str], dict[str, Any]]:
"""Return all open issues."""
current_issues = ir.async_get(hass).issues
# Use JSON for safe representation
return {
key: issue_entry.to_json()
for (key, issue_entry) in current_issues.items()
if issue_entry.active
}
return {k: v.to_json() for (k, v) in current_issues.items()}
def issue(hass: HomeAssistant, domain: str, issue_id: str) -> dict[str, Any] | None:

14
requirements_all.txt generated
View File

@@ -83,7 +83,7 @@ PyRMVtransport==0.3.3
PySrDaliGateway==0.13.1
# homeassistant.components.switchbot
PySwitchbot==0.73.0
PySwitchbot==0.72.1
# homeassistant.components.switchmate
PySwitchmate==0.5.1
@@ -315,7 +315,7 @@ aiolookin==1.0.0
aiolyric==2.0.2
# homeassistant.components.mealie
aiomealie==1.1.0
aiomealie==1.0.1
# homeassistant.components.modern_forms
aiomodernforms==0.1.8
@@ -354,7 +354,7 @@ aiopulse==0.4.6
aiopurpleair==2025.08.1
# homeassistant.components.hunterdouglas_powerview
aiopvapi==3.3.0
aiopvapi==3.2.1
# homeassistant.components.pvpc_hourly_pricing
aiopvpc==4.2.2
@@ -1719,7 +1719,7 @@ plexauth==0.0.6
plexwebsocket==0.0.14
# homeassistant.components.plugwise
plugwise==1.10.0
plugwise==1.9.0
# homeassistant.components.serial_pm
pmsensor==0.4
@@ -2259,7 +2259,7 @@ pyotp==2.9.0
pyoverkiz==1.19.0
# homeassistant.components.palazzetti
pypalazzetti==0.1.20
pypalazzetti==0.1.19
# homeassistant.components.paperless_ngx
pypaperless==4.1.1
@@ -2380,7 +2380,7 @@ pysmappee==0.2.29
pysmarlaapi==0.9.2
# homeassistant.components.smartthings
pysmartthings==3.3.2
pysmartthings==3.3.1
# homeassistant.components.smarty
pysmarty2==0.10.3
@@ -3076,7 +3076,7 @@ vegehub==0.1.26
vehicle==2.2.2
# homeassistant.components.velbus
velbus-aio==2025.11.0
velbus-aio==2025.8.0
# homeassistant.components.venstar
venstarcolortouch==0.21

View File

@@ -80,7 +80,7 @@ PyRMVtransport==0.3.3
PySrDaliGateway==0.13.1
# homeassistant.components.switchbot
PySwitchbot==0.73.0
PySwitchbot==0.72.1
# homeassistant.components.syncthru
PySyncThru==0.8.0
@@ -297,7 +297,7 @@ aiolookin==1.0.0
aiolyric==2.0.2
# homeassistant.components.mealie
aiomealie==1.1.0
aiomealie==1.0.1
# homeassistant.components.modern_forms
aiomodernforms==0.1.8
@@ -336,7 +336,7 @@ aiopulse==0.4.6
aiopurpleair==2025.08.1
# homeassistant.components.hunterdouglas_powerview
aiopvapi==3.3.0
aiopvapi==3.2.1
# homeassistant.components.pvpc_hourly_pricing
aiopvpc==4.2.2
@@ -1456,7 +1456,7 @@ plexauth==0.0.6
plexwebsocket==0.0.14
# homeassistant.components.plugwise
plugwise==1.10.0
plugwise==1.9.0
# homeassistant.components.poolsense
poolsense==0.0.8
@@ -1885,7 +1885,7 @@ pyotp==2.9.0
pyoverkiz==1.19.0
# homeassistant.components.palazzetti
pypalazzetti==0.1.20
pypalazzetti==0.1.19
# homeassistant.components.paperless_ngx
pypaperless==4.1.1
@@ -1982,7 +1982,7 @@ pysmappee==0.2.29
pysmarlaapi==0.9.2
# homeassistant.components.smartthings
pysmartthings==3.3.2
pysmartthings==3.3.1
# homeassistant.components.smarty
pysmarty2==0.10.3
@@ -2543,7 +2543,7 @@ vegehub==0.1.26
vehicle==2.2.2
# homeassistant.components.velbus
velbus-aio==2025.11.0
velbus-aio==2025.8.0
# homeassistant.components.venstar
venstarcolortouch==0.21

View File

@@ -1,11 +1,11 @@
"""Test Ambient Weather Network sensors."""
from datetime import timedelta
from datetime import datetime, timedelta
from unittest.mock import AsyncMock, patch
from aioambient import OpenAPI
from aioambient.errors import RequestError
from freezegun.api import FrozenDateTimeFactory
from freezegun import freeze_time
import pytest
from syrupy.assertion import SnapshotAssertion
@@ -18,7 +18,7 @@ from .conftest import setup_platform
from tests.common import async_fire_time_changed, snapshot_platform
@pytest.mark.freeze_time("2023-11-9")
@freeze_time("2023-11-9")
@pytest.mark.parametrize(
"config_entry",
["AA:AA:AA:AA:AA:AA", "CC:CC:CC:CC:CC:CC", "DD:DD:DD:DD:DD:DD"],
@@ -54,43 +54,45 @@ async def test_sensors_with_no_data(
@pytest.mark.parametrize("config_entry", ["AA:AA:AA:AA:AA:AA"], indirect=True)
@pytest.mark.freeze_time("2023-11-8")
async def test_sensors_disappearing(
hass: HomeAssistant,
open_api: OpenAPI,
aioambient: AsyncMock,
config_entry: ConfigEntry,
caplog: pytest.LogCaptureFixture,
freezer: FrozenDateTimeFactory,
) -> None:
"""Test that we log errors properly."""
# Normal state, sensor is available.
await setup_platform(True, hass, config_entry)
sensor = hass.states.get("sensor.station_a_relative_pressure")
assert sensor is not None
assert float(sensor.state) == pytest.approx(1001.89694313129)
# Sensor becomes unavailable if the network is unavailable. Log message
# should only show up once.
for _ in range(5):
with patch.object(open_api, "get_device_details", side_effect=RequestError()):
freezer.tick(timedelta(minutes=10))
async_fire_time_changed(hass)
await hass.async_block_till_done()
sensor = hass.states.get("sensor.station_a_relative_pressure")
assert sensor is not None
assert sensor.state == "unavailable"
assert caplog.text.count("Cannot connect to Ambient Network") == 1
# Network comes back. Sensor should start reporting again. Log message
# should only show up once.
for _ in range(5):
freezer.tick(timedelta(minutes=10))
async_fire_time_changed(hass)
await hass.async_block_till_done()
initial_datetime = datetime(year=2023, month=11, day=8)
with freeze_time(initial_datetime) as frozen_datetime:
# Normal state, sensor is available.
await setup_platform(True, hass, config_entry)
sensor = hass.states.get("sensor.station_a_relative_pressure")
assert sensor is not None
assert float(sensor.state) == pytest.approx(1001.89694313129)
assert caplog.text.count("Fetching ambient_network data recovered") == 1
# Sensor becomes unavailable if the network is unavailable. Log message
# should only show up once.
for _ in range(5):
with patch.object(
open_api, "get_device_details", side_effect=RequestError()
):
frozen_datetime.tick(timedelta(minutes=10))
async_fire_time_changed(hass)
await hass.async_block_till_done()
sensor = hass.states.get("sensor.station_a_relative_pressure")
assert sensor is not None
assert sensor.state == "unavailable"
assert caplog.text.count("Cannot connect to Ambient Network") == 1
# Network comes back. Sensor should start reporting again. Log message
# should only show up once.
for _ in range(5):
frozen_datetime.tick(timedelta(minutes=10))
async_fire_time_changed(hass)
await hass.async_block_till_done()
sensor = hass.states.get("sensor.station_a_relative_pressure")
assert sensor is not None
assert float(sensor.state) == pytest.approx(1001.89694313129)
assert caplog.text.count("Fetching ambient_network data recovered") == 1

View File

@@ -1,168 +1 @@
"""Tests for the Anthropic integration."""
from anthropic.types import (
CitationsDelta,
InputJSONDelta,
RawContentBlockDeltaEvent,
RawContentBlockStartEvent,
RawContentBlockStopEvent,
RawMessageStreamEvent,
RedactedThinkingBlock,
ServerToolUseBlock,
SignatureDelta,
TextBlock,
TextCitation,
TextDelta,
ThinkingBlock,
ThinkingDelta,
ToolUseBlock,
WebSearchResultBlock,
WebSearchToolResultBlock,
)
def create_content_block(
index: int, text_parts: list[str], citations: list[TextCitation] | None = None
) -> list[RawMessageStreamEvent]:
"""Create a text content block with the specified deltas."""
return [
RawContentBlockStartEvent(
type="content_block_start",
content_block=TextBlock(
text="", type="text", citations=[] if citations else None
),
index=index,
),
*[
RawContentBlockDeltaEvent(
delta=CitationsDelta(citation=citation, type="citations_delta"),
index=index,
type="content_block_delta",
)
for citation in (citations or [])
],
*[
RawContentBlockDeltaEvent(
delta=TextDelta(text=text_part, type="text_delta"),
index=index,
type="content_block_delta",
)
for text_part in text_parts
],
RawContentBlockStopEvent(index=index, type="content_block_stop"),
]
def create_thinking_block(
index: int, thinking_parts: list[str]
) -> list[RawMessageStreamEvent]:
"""Create a thinking block with the specified deltas."""
return [
RawContentBlockStartEvent(
type="content_block_start",
content_block=ThinkingBlock(signature="", thinking="", type="thinking"),
index=index,
),
*[
RawContentBlockDeltaEvent(
delta=ThinkingDelta(thinking=thinking_part, type="thinking_delta"),
index=index,
type="content_block_delta",
)
for thinking_part in thinking_parts
],
RawContentBlockDeltaEvent(
delta=SignatureDelta(
signature="ErUBCkYIARgCIkCYXaVNJShe3A86Hp7XUzh9YsCYBbJTbQsrklTAPtJ2sP/N"
"oB6tSzpK/nTL6CjSo2R6n0KNBIg5MH6asM2R/kmaEgyB/X1FtZq5OQAC7jUaDEPWCdcwGQ"
"4RaBy5wiIwmRxExIlDhoY6tILoVPnOExkC/0igZxHEwxK8RU/fmw0b+o+TwAarzUitwzbo"
"21E5Kh3pa3I6yqVROf1t2F8rFocNUeCegsWV/ytwYV+ayA==",
type="signature_delta",
),
index=index,
type="content_block_delta",
),
RawContentBlockStopEvent(index=index, type="content_block_stop"),
]
def create_redacted_thinking_block(index: int) -> list[RawMessageStreamEvent]:
"""Create a redacted thinking block."""
return [
RawContentBlockStartEvent(
type="content_block_start",
content_block=RedactedThinkingBlock(
data="EroBCkYIARgCKkBJDytPJhw//4vy3t7aE+LfIkxvkAh51cBPrAvBCo6AjgI57Zt9K"
"WPnUVV50OQJ0KZzUFoGZG5sxg95zx4qMwkoEgz43Su3myJKckvj03waDBZLIBSeoAeRUeV"
"sJCIwQ5edQN0sa+HNeB/KUBkoMUwV+IT0eIhcpFxnILdvxUAKM4R1o4KG3x+yO0eo/kyOK"
"iKfrCPFQhvBVmTZPFhgA2Ow8L9gGDVipcz6x3Uu9YETGEny",
type="redacted_thinking",
),
index=index,
),
RawContentBlockStopEvent(index=index, type="content_block_stop"),
]
def create_tool_use_block(
index: int, tool_id: str, tool_name: str, json_parts: list[str]
) -> list[RawMessageStreamEvent]:
"""Create a tool use content block with the specified deltas."""
return [
RawContentBlockStartEvent(
type="content_block_start",
content_block=ToolUseBlock(
id=tool_id, name=tool_name, input={}, type="tool_use"
),
index=index,
),
*[
RawContentBlockDeltaEvent(
delta=InputJSONDelta(partial_json=json_part, type="input_json_delta"),
index=index,
type="content_block_delta",
)
for json_part in json_parts
],
RawContentBlockStopEvent(index=index, type="content_block_stop"),
]
def create_web_search_block(
index: int, id: str, query_parts: list[str]
) -> list[RawMessageStreamEvent]:
"""Create a server tool use block for web search."""
return [
RawContentBlockStartEvent(
type="content_block_start",
content_block=ServerToolUseBlock(
type="server_tool_use", id=id, input={}, name="web_search"
),
index=index,
),
*[
RawContentBlockDeltaEvent(
delta=InputJSONDelta(type="input_json_delta", partial_json=query_part),
index=index,
type="content_block_delta",
)
for query_part in query_parts
],
RawContentBlockStopEvent(index=index, type="content_block_stop"),
]
def create_web_search_result_block(
index: int, id: str, results: list[WebSearchResultBlock]
) -> list[RawMessageStreamEvent]:
"""Create a server tool result block for web search results."""
return [
RawContentBlockStartEvent(
type="content_block_start",
content_block=WebSearchToolResultBlock(
type="web_search_tool_result", tool_use_id=id, content=results
),
index=index,
),
RawContentBlockStopEvent(index=index, type="content_block_stop"),
]

View File

@@ -1,20 +1,8 @@
"""Tests helpers."""
from collections.abc import AsyncGenerator, Generator, Iterable
from unittest.mock import AsyncMock, patch
from collections.abc import AsyncGenerator
from unittest.mock import patch
from anthropic.types import (
Message,
MessageDeltaUsage,
RawContentBlockStartEvent,
RawMessageDeltaEvent,
RawMessageStartEvent,
RawMessageStopEvent,
RawMessageStreamEvent,
ToolUseBlock,
Usage,
)
from anthropic.types.raw_message_delta_event import Delta
import pytest
from homeassistant.components.anthropic import CONF_CHAT_MODEL
@@ -26,7 +14,6 @@ from homeassistant.components.anthropic.const import (
CONF_WEB_SEARCH_REGION,
CONF_WEB_SEARCH_TIMEZONE,
CONF_WEB_SEARCH_USER_LOCATION,
DEFAULT_AI_TASK_NAME,
DEFAULT_CONVERSATION_NAME,
)
from homeassistant.const import CONF_LLM_HASS_API
@@ -53,13 +40,7 @@ def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry:
"subentry_type": "conversation",
"title": DEFAULT_CONVERSATION_NAME,
"unique_id": None,
},
{
"data": {},
"subentry_type": "ai_task_data",
"title": DEFAULT_AI_TASK_NAME,
"unique_id": None,
},
}
],
)
entry.add_to_hass(hass)
@@ -133,61 +114,3 @@ async def mock_init_component(
async def setup_ha(hass: HomeAssistant) -> None:
"""Set up Home Assistant."""
assert await async_setup_component(hass, "homeassistant", {})
@pytest.fixture
def mock_create_stream() -> Generator[AsyncMock]:
"""Mock stream response."""
async def mock_generator(events: Iterable[RawMessageStreamEvent], **kwargs):
"""Create a stream of messages with the specified content blocks."""
stop_reason = "end_turn"
refusal_magic_string = "ANTHROPIC_MAGIC_STRING_TRIGGER_REFUSAL_1FAEFB6177B4672DEE07F9D3AFC62588CCD2631EDCF22E8CCC1FB35B501C9C86"
for message in kwargs.get("messages"):
if message["role"] != "user":
continue
if isinstance(message["content"], str):
if refusal_magic_string in message["content"]:
stop_reason = "refusal"
break
else:
for content in message["content"]:
if content.get(
"type"
) == "text" and refusal_magic_string in content.get("text", ""):
stop_reason = "refusal"
break
yield RawMessageStartEvent(
message=Message(
type="message",
id="msg_1234567890ABCDEFGHIJKLMN",
content=[],
role="assistant",
model="claude-3-5-sonnet-20240620",
usage=Usage(input_tokens=0, output_tokens=0),
),
type="message_start",
)
for event in events:
if isinstance(event, RawContentBlockStartEvent) and isinstance(
event.content_block, ToolUseBlock
):
stop_reason = "tool_use"
yield event
yield RawMessageDeltaEvent(
type="message_delta",
delta=Delta(stop_reason=stop_reason, stop_sequence=""),
usage=MessageDeltaUsage(output_tokens=0),
)
yield RawMessageStopEvent(type="message_stop")
with patch(
"anthropic.resources.messages.AsyncMessages.create",
new_callable=AsyncMock,
) as mock_create:
mock_create.side_effect = lambda **kwargs: mock_generator(
mock_create.return_value.pop(0), **kwargs
)
yield mock_create

View File

@@ -1,211 +0,0 @@
"""Tests for the Anthropic integration."""
from pathlib import Path
from unittest.mock import AsyncMock, patch
import pytest
import voluptuous as vol
from homeassistant.components import ai_task, media_source
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import entity_registry as er, selector
from . import create_content_block, create_tool_use_block
from tests.common import MockConfigEntry
async def test_generate_data(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_init_component,
mock_create_stream: AsyncMock,
entity_registry: er.EntityRegistry,
) -> None:
"""Test AI Task data generation."""
entity_id = "ai_task.claude_ai_task"
# Ensure entity is linked to the subentry
entity_entry = entity_registry.async_get(entity_id)
ai_task_entry = next(
iter(
entry
for entry in mock_config_entry.subentries.values()
if entry.subentry_type == "ai_task_data"
)
)
assert entity_entry is not None
assert entity_entry.config_entry_id == mock_config_entry.entry_id
assert entity_entry.config_subentry_id == ai_task_entry.subentry_id
mock_create_stream.return_value = [create_content_block(0, ["The test data"])]
result = await ai_task.async_generate_data(
hass,
task_name="Test Task",
entity_id=entity_id,
instructions="Generate test data",
)
assert result.data == "The test data"
async def test_generate_structured_data(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_init_component,
mock_create_stream: AsyncMock,
) -> None:
"""Test AI Task structured data generation."""
mock_create_stream.return_value = [
create_tool_use_block(
1,
"toolu_0123456789AbCdEfGhIjKlM",
"test_task",
['{"charac', 'ters": ["Mario', '", "Luigi"]}'],
),
]
result = await ai_task.async_generate_data(
hass,
task_name="Test Task",
entity_id="ai_task.claude_ai_task",
instructions="Generate test data",
structure=vol.Schema(
{
vol.Required("characters"): selector.selector(
{
"text": {
"multiple": True,
}
}
)
},
),
)
assert result.data == {"characters": ["Mario", "Luigi"]}
async def test_generate_invalid_structured_data(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_init_component,
mock_create_stream: AsyncMock,
) -> None:
"""Test AI Task with invalid JSON response."""
mock_create_stream.return_value = [
create_tool_use_block(
1,
"toolu_0123456789AbCdEfGhIjKlM",
"test_task",
"INVALID JSON RESPONSE",
)
]
with pytest.raises(
HomeAssistantError, match="Error with Claude structured response"
):
await ai_task.async_generate_data(
hass,
task_name="Test Task",
entity_id="ai_task.claude_ai_task",
instructions="Generate test data",
structure=vol.Schema(
{
vol.Required("characters"): selector.selector(
{
"text": {
"multiple": True,
}
}
)
},
),
)
async def test_generate_data_with_attachments(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_init_component,
mock_create_stream: AsyncMock,
entity_registry: er.EntityRegistry,
) -> None:
"""Test AI Task data generation with attachments."""
entity_id = "ai_task.claude_ai_task"
mock_create_stream.return_value = [create_content_block(0, ["Hi there!"])]
# Test with attachments
with (
patch(
"homeassistant.components.media_source.async_resolve_media",
side_effect=[
media_source.PlayMedia(
url="http://example.com/doorbell_snapshot.jpg",
mime_type="image/jpeg",
path=Path("doorbell_snapshot.jpg"),
),
media_source.PlayMedia(
url="http://example.com/context.pdf",
mime_type="application/pdf",
path=Path("context.pdf"),
),
],
),
patch("pathlib.Path.exists", return_value=True),
patch(
"homeassistant.components.openai_conversation.entity.guess_file_type",
return_value=("image/jpeg", None),
),
patch("pathlib.Path.read_bytes", return_value=b"fake_image_data"),
):
result = await ai_task.async_generate_data(
hass,
task_name="Test Task",
entity_id=entity_id,
instructions="Test prompt",
attachments=[
{"media_content_id": "media-source://media/doorbell_snapshot.jpg"},
{"media_content_id": "media-source://media/context.pdf"},
],
)
assert result.data == "Hi there!"
# Verify that the create stream was called with the correct parameters
# The last call should have the user message with attachments
call_args = mock_create_stream.call_args
assert call_args is not None
# Check that the input includes the attachments
input_messages = call_args[1]["messages"]
assert len(input_messages) > 0
# Find the user message with attachments
user_message_with_attachments = input_messages[-2]
assert user_message_with_attachments is not None
assert isinstance(user_message_with_attachments["content"], list)
assert len(user_message_with_attachments["content"]) == 3 # Text + attachments
assert user_message_with_attachments["content"] == [
{"type": "text", "text": "Test prompt"},
{
"type": "image",
"source": {
"data": "ZmFrZV9pbWFnZV9kYXRh",
"media_type": "image/jpeg",
"type": "base64",
},
},
{
"type": "document",
"source": {
"data": "ZmFrZV9pbWFnZV9kYXRh",
"media_type": "application/pdf",
"type": "base64",
},
},
]

View File

@@ -15,10 +15,7 @@ from httpx import URL, Request, Response
import pytest
from homeassistant import config_entries
from homeassistant.components.anthropic.config_flow import (
RECOMMENDED_AI_TASK_OPTIONS,
RECOMMENDED_CONVERSATION_OPTIONS,
)
from homeassistant.components.anthropic.config_flow import RECOMMENDED_OPTIONS
from homeassistant.components.anthropic.const import (
CONF_CHAT_MODEL,
CONF_MAX_TOKENS,
@@ -33,7 +30,6 @@ from homeassistant.components.anthropic.const import (
CONF_WEB_SEARCH_REGION,
CONF_WEB_SEARCH_TIMEZONE,
CONF_WEB_SEARCH_USER_LOCATION,
DEFAULT_AI_TASK_NAME,
DEFAULT_CONVERSATION_NAME,
DOMAIN,
RECOMMENDED_CHAT_MODEL,
@@ -78,6 +74,7 @@ async def test_form(hass: HomeAssistant) -> None:
"api_key": "bla",
},
)
await hass.async_block_till_done()
assert result2["type"] is FlowResultType.CREATE_ENTRY
assert result2["data"] == {
@@ -87,16 +84,10 @@ async def test_form(hass: HomeAssistant) -> None:
assert result2["subentries"] == [
{
"subentry_type": "conversation",
"data": RECOMMENDED_CONVERSATION_OPTIONS,
"data": RECOMMENDED_OPTIONS,
"title": DEFAULT_CONVERSATION_NAME,
"unique_id": None,
},
{
"subentry_type": "ai_task_data",
"data": RECOMMENDED_AI_TASK_OPTIONS,
"title": DEFAULT_AI_TASK_NAME,
"unique_id": None,
},
}
]
assert len(mock_setup_entry.mock_calls) == 1
@@ -144,13 +135,14 @@ async def test_creating_conversation_subentry(
result2 = await hass.config_entries.subentries.async_configure(
result["flow_id"],
{CONF_NAME: "Mock name", **RECOMMENDED_CONVERSATION_OPTIONS},
{CONF_NAME: "Mock name", **RECOMMENDED_OPTIONS},
)
await hass.async_block_till_done()
assert result2["type"] is FlowResultType.CREATE_ENTRY
assert result2["title"] == "Mock name"
processed_options = RECOMMENDED_CONVERSATION_OPTIONS.copy()
processed_options = RECOMMENDED_OPTIONS.copy()
processed_options[CONF_PROMPT] = processed_options[CONF_PROMPT].strip()
assert result2["data"] == processed_options
@@ -310,6 +302,7 @@ async def test_subentry_web_search_user_location(
"user_location": True,
},
)
await hass.async_block_till_done()
assert (
mock_create.call_args.kwargs["messages"][0]["content"] == "Where are the "
@@ -564,122 +557,3 @@ async def test_subentry_options_switching(
assert subentry_flow["type"] is FlowResultType.ABORT
assert subentry_flow["reason"] == "reconfigure_successful"
assert subentry.data == expected_options
async def test_creating_ai_task_subentry(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_init_component,
) -> None:
"""Test creating an AI task subentry."""
old_subentries = set(mock_config_entry.subentries)
# Original conversation + original ai_task
assert len(mock_config_entry.subentries) == 2
result = await hass.config_entries.subentries.async_init(
(mock_config_entry.entry_id, "ai_task_data"),
context={"source": config_entries.SOURCE_USER},
)
assert result.get("type") is FlowResultType.FORM
assert result.get("step_id") == "init"
assert not result.get("errors")
result2 = await hass.config_entries.subentries.async_configure(
result["flow_id"],
{
"name": "Custom AI Task",
CONF_RECOMMENDED: True,
},
)
assert result2.get("type") is FlowResultType.CREATE_ENTRY
assert result2.get("title") == "Custom AI Task"
assert result2.get("data") == {
CONF_RECOMMENDED: True,
}
assert (
len(mock_config_entry.subentries) == 3
) # Original conversation + original ai_task + new ai_task
new_subentry_id = list(set(mock_config_entry.subentries) - old_subentries)[0]
new_subentry = mock_config_entry.subentries[new_subentry_id]
assert new_subentry.subentry_type == "ai_task_data"
assert new_subentry.title == "Custom AI Task"
async def test_ai_task_subentry_not_loaded(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test creating an AI task subentry when entry is not loaded."""
# Don't call mock_init_component to simulate not loaded state
result = await hass.config_entries.subentries.async_init(
(mock_config_entry.entry_id, "ai_task_data"),
context={"source": config_entries.SOURCE_USER},
)
assert result.get("type") is FlowResultType.ABORT
assert result.get("reason") == "entry_not_loaded"
async def test_creating_ai_task_subentry_advanced(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_init_component,
) -> None:
"""Test creating an AI task subentry with advanced settings."""
result = await hass.config_entries.subentries.async_init(
(mock_config_entry.entry_id, "ai_task_data"),
context={"source": config_entries.SOURCE_USER},
)
assert result.get("type") is FlowResultType.FORM
assert result.get("step_id") == "init"
# Go to advanced settings
result2 = await hass.config_entries.subentries.async_configure(
result["flow_id"],
{
"name": "Advanced AI Task",
CONF_RECOMMENDED: False,
},
)
assert result2.get("type") is FlowResultType.FORM
assert result2.get("step_id") == "advanced"
# Configure advanced settings
result3 = await hass.config_entries.subentries.async_configure(
result["flow_id"],
{
CONF_CHAT_MODEL: "claude-sonnet-4-5",
CONF_MAX_TOKENS: 200,
CONF_TEMPERATURE: 0.5,
},
)
assert result3.get("type") is FlowResultType.FORM
assert result3.get("step_id") == "model"
# Configure model settings
result4 = await hass.config_entries.subentries.async_configure(
result["flow_id"],
{
CONF_WEB_SEARCH: False,
},
)
assert result4.get("type") is FlowResultType.CREATE_ENTRY
assert result4.get("title") == "Advanced AI Task"
assert result4.get("data") == {
CONF_RECOMMENDED: False,
CONF_CHAT_MODEL: "claude-sonnet-4-5",
CONF_MAX_TOKENS: 200,
CONF_TEMPERATURE: 0.5,
CONF_WEB_SEARCH: False,
CONF_WEB_SEARCH_MAX_USES: 5,
CONF_WEB_SEARCH_USER_LOCATION: False,
CONF_THINKING_BUDGET: 0,
}

File diff suppressed because it is too large Load Diff

View File

@@ -79,7 +79,6 @@ async def integration_fixture(
"aqara_door_window_p2",
"aqara_motion_p2",
"aqara_presence_fp300",
"aqara_sensor_w100",
"aqara_thermostat_w500",
"aqara_u200",
"battery_storage",

View File

@@ -1,528 +0,0 @@
{
"node_id": 75,
"date_commissioned": "2025-06-07T15:30:15.263101",
"last_interview": "2025-06-07T15:30:15.263113",
"interview_version": 6,
"available": true,
"is_bridge": false,
"attributes": {
"0/29/0": [
{
"0": 18,
"1": 1
},
{
"0": 22,
"1": 3
}
],
"0/29/1": [29, 31, 40, 42, 48, 49, 51, 52, 53, 60, 62, 63, 70],
"0/29/2": [41],
"0/29/3": [1, 2, 3, 4, 5, 6],
"0/29/65532": 0,
"0/29/65533": 2,
"0/29/65528": [],
"0/29/65529": [],
"0/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
"0/31/0": [
{
"1": 5,
"2": 2,
"3": [112233],
"4": null,
"254": 4
}
],
"0/31/1": [],
"0/31/2": 4,
"0/31/3": 3,
"0/31/4": 4,
"0/31/65532": 0,
"0/31/65533": 1,
"0/31/65528": [],
"0/31/65529": [],
"0/31/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533],
"0/40/0": 17,
"0/40/1": "Aqara",
"0/40/2": 4447,
"0/40/3": "Aqara Climate Sensor W100",
"0/40/4": 8196,
"0/40/5": "Climate Sensor W100",
"0/40/6": "**REDACTED**",
"0/40/7": 12,
"0/40/8": "0.0.1.2",
"0/40/9": 1010,
"0/40/10": "1.0.1.0",
"0/40/11": "20250108",
"0/40/12": "AA016",
"0/40/13": "https://www.aqara.com/en/products.html",
"0/40/14": "Aqara Climate Sensor W100",
"0/40/15": "***************",
"0/40/16": false,
"0/40/18": "***************",
"0/40/19": {
"0": 3,
"1": 3
},
"0/40/21": 16973824,
"0/40/22": 1,
"0/40/65532": 0,
"0/40/65533": 3,
"0/40/65528": [],
"0/40/65529": [],
"0/40/65531": [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 19, 21, 22,
65528, 65529, 65531, 65532, 65533
],
"0/42/0": [],
"0/42/1": true,
"0/42/2": 1,
"0/42/3": null,
"0/42/65532": 0,
"0/42/65533": 1,
"0/42/65528": [],
"0/42/65529": [0],
"0/42/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
"0/48/0": 0,
"0/48/1": {
"0": 60,
"1": 900
},
"0/48/2": 0,
"0/48/3": 0,
"0/48/4": true,
"0/48/65532": 0,
"0/48/65533": 1,
"0/48/65528": [1, 3, 5],
"0/48/65529": [0, 2, 4],
"0/48/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533],
"0/49/0": 1,
"0/49/1": [
{
"0": "aFq/aOcqMFo=",
"1": true
}
],
"0/49/2": 10,
"0/49/3": 20,
"0/49/4": true,
"0/49/5": 0,
"0/49/6": "aFq/aOcqMFo=",
"0/49/7": null,
"0/49/9": 4,
"0/49/10": 4,
"0/49/65532": 2,
"0/49/65533": 2,
"0/49/65528": [1, 5, 7],
"0/49/65529": [0, 3, 4, 6, 8],
"0/49/65531": [
0, 1, 2, 3, 4, 5, 6, 7, 9, 10, 65528, 65529, 65531, 65532, 65533
],
"0/51/0": [
{
"0": "AqaraHome-0123",
"1": true,
"2": null,
"3": null,
"4": "piylcw37nWM=",
"5": [],
"6": [
"/RXRKakLAAFKcohVnCFKow==",
"/Z4/qUibGFsAAAD//gAcAg==",
"/Z4/qUibGFsYCaOd1Hp6Vg==",
"/oAAAAAAAACkLKVzDfudYw=="
],
"7": 4
}
],
"0/51/1": 1,
"0/51/2": 299,
"0/51/4": 6,
"0/51/5": [],
"0/51/8": false,
"0/51/65532": 0,
"0/51/65533": 2,
"0/51/65528": [2],
"0/51/65529": [0, 1],
"0/51/65531": [0, 1, 2, 4, 5, 8, 65528, 65529, 65531, 65532, 65533],
"0/52/0": [
{
"0": 2,
"1": "sys_evt",
"3": 1952
},
{
"0": 11,
"1": "Bluetoot",
"3": 1438
},
{
"0": 3,
"1": "THREAD",
"3": 1651
},
{
"0": 1,
"1": "Bluetoot",
"3": 306
},
{
"0": 10,
"1": "Bluetoot",
"3": 107
},
{
"0": 7,
"1": "Tmr Svc",
"3": 943
},
{
"0": 8,
"1": "app",
"3": 748
},
{
"0": 6,
"1": "IDLE",
"3": 231
},
{
"0": 4,
"1": "CHIP",
"3": 305
}
],
"0/52/1": 46224,
"0/52/2": 35696,
"0/52/3": 56048,
"0/52/65532": 1,
"0/52/65533": 1,
"0/52/65528": [],
"0/52/65529": [0],
"0/52/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
"0/53/0": 11,
"0/53/1": 2,
"0/53/2": "AqaraHome-0123",
"0/53/3": 23343,
"0/53/4": 7519532985124270170,
"0/53/5": "QP2eP6lImxhb",
"0/53/6": 0,
"0/53/7": [
{
"0": 17151429082474872369,
"1": 284,
"2": 7168,
"3": 295817,
"4": 111774,
"5": 3,
"6": -74,
"7": -74,
"8": 37,
"9": 0,
"10": true,
"11": true,
"12": true,
"13": false
}
],
"0/53/8": [
{
"0": 17151429082474872369,
"1": 7168,
"2": 7,
"3": 0,
"4": 0,
"5": 3,
"6": 3,
"7": 28,
"8": true,
"9": true
}
],
"0/53/9": 405350277,
"0/53/22": 2799,
"0/53/23": 2797,
"0/53/24": 2,
"0/53/39": 503,
"0/53/40": 503,
"0/53/41": 0,
"0/53/65532": 15,
"0/53/65533": 2,
"0/53/65528": [],
"0/53/65529": [0],
"0/53/65531": [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 22, 23, 24, 39, 40, 41, 65528, 65529, 65531,
65532, 65533
],
"0/60/0": 0,
"0/60/1": null,
"0/60/2": null,
"0/60/65532": 1,
"0/60/65533": 1,
"0/60/65528": [],
"0/60/65529": [0, 1, 2],
"0/60/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533],
"0/62/0": [
{
"1": "FTABAQEkAgE3AyQTAhgmBIAigScmBYAlTTo3BiQVAiQRSxgkBwEkCAEwCUEEL5gmAVxeNTcndwbt1d1SNaICqrmw8Mk3fQ7CkQlM0XhpLv0XzjnnmI+jorFA31RvWDYa0URByx588JSq6G/d7DcKNQEoARgkAgE2AwQCBAEYMAQUPES5ZFkTssoDCAkEz+kBgkL3jMcwBRRT9HTfU5Nds+HA8j+/MRP+0pVyIxgwC0B5OoI+cs5wwGlxvfMdinguUmA+VEWBZjQP6rEvd929qf4zpgpkfyjX7LFYCvoqqKJCOW052dLhgfYGUOqCfo7AGA==",
"2": "FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQTAhgkBwEkCAEwCUEEyT62Yt4qMI+MorlmQ/Hxh2CpLetznVknlAbhvYAwTexpSxp9GnhR09SrcUhz3mOb0eZa2TylqcnPBhHJ2Ih2RTcKNQEpARgkAmAwBBRT9HTfU5Nds+HA8j+/MRP+0pVyIzAFFOMCO8Jk7ZCknJquFGPtPzJiNqsDGDALQI/Kc38hQyK7AkT7/pN4hiYW3LoWKT3NA43+ssMJoVpDcaZ989GXBQKIbHKbBEXzUQ1J8wfL7l2pL0Z8Lso9JwgY",
"254": 4
}
],
"0/62/1": [
{
"1": "BIrruNo7r0gX6j6lq1dDi5zeK3jxcTavjt2o4adCCSCYtbxOakfb7C3GXqgV4LzulFSinbewmYkdqFBHqm5pxvU=",
"2": 4939,
"3": 2,
"4": 75,
"5": "",
"254": 4
}
],
"0/62/2": 5,
"0/62/3": 4,
"0/62/4": [
"FTABAQAkAgE3AyYUyakYCSYVj6gLsxgmBGoW1y8kBQA3BiYUyakYCSYVj6gLsxgkBwEkCAEwCUEEgYwxrTB+tyiEGfrRwjlXTG34MiQtJXbg5Qqd0ohdRW7MfwYY7vZiX/0h9hI8MqUralFaVPcnghAP0MSJm1YrqTcKNQEpARgkAmAwBBS3BS9aJzt+p6i28Nj+trB2Uu+vdzAFFLcFL1onO36nqLbw2P62sHZS7693GDALQMvassZTgvO/snCPohEojdKdGb2IpuRpSsu4HkM1JJQ9yFwhkyl0OOS2kvOVUNlfb2YnoJaH4L2jz0G9GVclBIgY",
"FTABAQAkAgE3AycUQhmZbaIbYjokFQIYJgRWZLcqJAUANwYnFEIZmW2iG2I6JBUCGCQHASQIATAJQQT2AlKGW/kOMjqayzeO0md523/fuhrhGEUU91uQpTiKo0I7wcPpKnmrwfQNPX6g0kEQl+VGaXa3e22lzfu5Tzp0Nwo1ASkBGCQCYDAEFOOMk13ScMKuT2hlaydi1yEJnhTqMAUU44yTXdJwwq5PaGVrJ2LXIQmeFOoYMAtAv2jJd1qd5miXbYesH1XrJ+vgyY0hzGuZ78N6Jw4Cb1oN1sLSpA+PNM0u7+hsEqcSvvn2eSV8EaRR+hg5YQjHDxg=",
"FTABD38O1NiPyscyxScZaN7uECQCATcDJhSoQfl2GCYEIqqfLyYFImy36zcGJhSoQfl2GCQHASQIATAJQQT5WrI2v6EgLRXdxlmZLlXX3rxeBe1C3NN/x9QV0tMVF+gH/FPSyq69dZKuoyskx0UOHcN20wdPffFuqgy/4uiaNwo1ASkBGCQCYDAEFM8XoLF/WKnSeqflSO5TQBQz4ObIMAUUzxegsX9YqdJ6p+VI7lNAFDPg5sgYMAtAHTWpsQPPwqR9gCqBGcDbPu2gusKeVuytcD5v7qK1/UjVr2/WGjMw3SYM10HWKdPTQZa2f3JI3uxv1nFnlcQpDBg=",
"FTABAQEkAgE3AyQUARgmBIAigScmBYAlTTo3BiQUARgkBwEkCAEwCUEEiuu42juvSBfqPqWrV0OLnN4rePFxNq+O3ajhp0IJIJi1vE5qR9vsLcZeqBXgvO6UVKKdt7CZiR2oUEeqbmnG9TcKNQEpARgkAmAwBBTjAjvCZO2QpJyarhRj7T8yYjarAzAFFOMCO8Jk7ZCknJquFGPtPzJiNqsDGDALQE7hTxTRg92QOxwA1hK3xv8DaxvxL71r6ZHcNRzug9wNnonJ+NC84SFKvKDxwcBxHYqFdIyDiDgwJNTQIBgasmIY"
],
"0/62/5": 4,
"0/62/65532": 0,
"0/62/65533": 1,
"0/62/65528": [1, 3, 5, 8],
"0/62/65529": [0, 2, 4, 6, 7, 9, 10, 11],
"0/62/65531": [0, 1, 2, 3, 4, 5, 65528, 65529, 65531, 65532, 65533],
"0/63/0": [],
"0/63/1": [],
"0/63/2": 4,
"0/63/3": 3,
"0/63/65532": 0,
"0/63/65533": 2,
"0/63/65528": [2, 5],
"0/63/65529": [0, 1, 3, 4],
"0/63/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
"0/70/0": 300,
"0/70/1": 0,
"0/70/2": 1000,
"0/70/65532": 0,
"0/70/65533": 2,
"0/70/65528": [],
"0/70/65529": [],
"0/70/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533],
"1/3/0": 0,
"1/3/1": 4,
"1/3/65532": 0,
"1/3/65533": 4,
"1/3/65528": [],
"1/3/65529": [0],
"1/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
"1/29/0": [
{
"0": 770,
"1": 1
}
],
"1/29/1": [3, 29, 1026],
"1/29/2": [],
"1/29/3": [],
"1/29/65532": 0,
"1/29/65533": 2,
"1/29/65528": [],
"1/29/65529": [],
"1/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
"1/1026/0": 2773,
"1/1026/1": -4000,
"1/1026/2": 12500,
"1/1026/65532": 0,
"1/1026/65533": 4,
"1/1026/65528": [],
"1/1026/65529": [],
"1/1026/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533],
"2/3/0": 0,
"2/3/1": 4,
"2/3/65532": 0,
"2/3/65533": 4,
"2/3/65528": [],
"2/3/65529": [0],
"2/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
"2/29/0": [
{
"0": 775,
"1": 1
}
],
"2/29/1": [3, 29, 1029],
"2/29/2": [],
"2/29/3": [],
"2/29/65532": 0,
"2/29/65533": 2,
"2/29/65528": [],
"2/29/65529": [],
"2/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
"2/1029/0": 4472,
"2/1029/1": 0,
"2/1029/2": 10000,
"2/1029/65532": 0,
"2/1029/65533": 3,
"2/1029/65528": [],
"2/1029/65529": [],
"2/1029/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533],
"3/3/0": 0,
"3/3/1": 4,
"3/3/65532": 0,
"3/3/65533": 4,
"3/3/65528": [],
"3/3/65529": [0],
"3/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
"3/29/0": [
{
"0": 15,
"1": 3
}
],
"3/29/1": [3, 29, 59],
"3/29/2": [],
"3/29/3": [],
"3/29/4": [
{
"0": null,
"1": 7,
"2": 1
},
{
"0": null,
"1": 8,
"2": 2
}
],
"3/29/65532": 1,
"3/29/65533": 2,
"3/29/65528": [],
"3/29/65529": [],
"3/29/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533],
"3/59/0": 2,
"3/59/1": 0,
"3/59/2": 2,
"3/59/65532": 30,
"3/59/65533": 1,
"3/59/65528": [],
"3/59/65529": [],
"3/59/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533],
"4/3/0": 0,
"4/3/1": 4,
"4/3/65532": 0,
"4/3/65533": 4,
"4/3/65528": [],
"4/3/65529": [0],
"4/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
"4/29/0": [
{
"0": 15,
"1": 3
}
],
"4/29/1": [3, 29, 59],
"4/29/2": [],
"4/29/3": [],
"4/29/4": [
{
"0": null,
"1": 7,
"2": 2
},
{
"0": null,
"1": 8,
"2": 4
}
],
"4/29/65532": 1,
"4/29/65533": 2,
"4/29/65528": [],
"4/29/65529": [],
"4/29/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533],
"4/59/0": 2,
"4/59/1": 0,
"4/59/2": 2,
"4/59/65532": 30,
"4/59/65533": 1,
"4/59/65528": [],
"4/59/65529": [],
"4/59/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533],
"5/3/0": 0,
"5/3/1": 4,
"5/3/65532": 0,
"5/3/65533": 4,
"5/3/65528": [],
"5/3/65529": [0],
"5/3/65531": [0, 1, 65528, 65529, 65531, 65532, 65533],
"5/29/0": [
{
"0": 15,
"1": 3
}
],
"5/29/1": [3, 29, 59],
"5/29/2": [],
"5/29/3": [],
"5/29/4": [
{
"0": null,
"1": 7,
"2": 3
},
{
"0": null,
"1": 8,
"2": 3
}
],
"5/29/65532": 1,
"5/29/65533": 2,
"5/29/65528": [],
"5/29/65529": [],
"5/29/65531": [0, 1, 2, 3, 4, 65528, 65529, 65531, 65532, 65533],
"5/59/0": 2,
"5/59/1": 0,
"5/59/2": 2,
"5/59/65532": 30,
"5/59/65533": 1,
"5/59/65528": [],
"5/59/65529": [],
"5/59/65531": [0, 1, 2, 65528, 65529, 65531, 65532, 65533],
"6/29/0": [
{
"0": 17,
"1": 1
}
],
"6/29/1": [29, 47],
"6/29/2": [],
"6/29/3": [],
"6/29/65532": 0,
"6/29/65533": 2,
"6/29/65528": [],
"6/29/65529": [],
"6/29/65531": [0, 1, 2, 3, 65528, 65529, 65531, 65532, 65533],
"6/47/0": 1,
"6/47/1": 0,
"6/47/2": "Battery",
"6/47/11": 3120,
"6/47/12": 200,
"6/47/14": 0,
"6/47/15": false,
"6/47/16": 2,
"6/47/19": "CR2450",
"6/47/25": 2,
"6/47/31": [],
"6/47/65532": 10,
"6/47/65533": 2,
"6/47/65528": [],
"6/47/65529": [],
"6/47/65531": [
0, 1, 2, 11, 12, 14, 15, 16, 19, 25, 31, 65528, 65529, 65531, 65532, 65533
]
},
"attribute_subscriptions": []
}

View File

@@ -193,6 +193,55 @@
'state': 'unknown',
})
# ---
# name: test_buttons[aqara_motion_p2][button.aqara_motion_and_light_sensor_p2_identify_2-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'button',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'button.aqara_motion_and_light_sensor_p2_identify_2',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <ButtonDeviceClass.IDENTIFY: 'identify'>,
'original_icon': None,
'original_name': 'Identify (2)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-0000000000000053-MatterNodeDevice-2-IdentifyButton-3-1',
'unit_of_measurement': None,
})
# ---
# name: test_buttons[aqara_motion_p2][button.aqara_motion_and_light_sensor_p2_identify_2-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'identify',
'friendly_name': 'Aqara Motion and Light Sensor P2 Identify (2)',
}),
'context': <ANY>,
'entity_id': 'button.aqara_motion_and_light_sensor_p2_identify_2',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_buttons[aqara_presence_fp300][button.presence_multi_sensor_fp300_1_identify_1-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
@@ -242,7 +291,7 @@
'state': 'unknown',
})
# ---
# name: test_buttons[aqara_sensor_w100][button.climate_sensor_w100_identify_1-entry]
# name: test_buttons[aqara_presence_fp300][button.presence_multi_sensor_fp300_1_identify_2-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
@@ -255,7 +304,7 @@
'disabled_by': None,
'domain': 'button',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'button.climate_sensor_w100_identify_1',
'entity_id': 'button.presence_multi_sensor_fp300_1_identify_2',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
@@ -267,24 +316,122 @@
}),
'original_device_class': <ButtonDeviceClass.IDENTIFY: 'identify'>,
'original_icon': None,
'original_name': 'Identify (1)',
'original_name': 'Identify (2)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-1-IdentifyButton-3-1',
'unique_id': '00000000000004D2-00000000000000CD-MatterNodeDevice-2-IdentifyButton-3-1',
'unit_of_measurement': None,
})
# ---
# name: test_buttons[aqara_sensor_w100][button.climate_sensor_w100_identify_1-state]
# name: test_buttons[aqara_presence_fp300][button.presence_multi_sensor_fp300_1_identify_2-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'identify',
'friendly_name': 'Climate Sensor W100 Identify (1)',
'friendly_name': 'Presence Multi-Sensor FP300 1 Identify (2)',
}),
'context': <ANY>,
'entity_id': 'button.climate_sensor_w100_identify_1',
'entity_id': 'button.presence_multi_sensor_fp300_1_identify_2',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_buttons[aqara_presence_fp300][button.presence_multi_sensor_fp300_1_identify_3-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'button',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'button.presence_multi_sensor_fp300_1_identify_3',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <ButtonDeviceClass.IDENTIFY: 'identify'>,
'original_icon': None,
'original_name': 'Identify (3)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-00000000000000CD-MatterNodeDevice-3-IdentifyButton-3-1',
'unit_of_measurement': None,
})
# ---
# name: test_buttons[aqara_presence_fp300][button.presence_multi_sensor_fp300_1_identify_3-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'identify',
'friendly_name': 'Presence Multi-Sensor FP300 1 Identify (3)',
}),
'context': <ANY>,
'entity_id': 'button.presence_multi_sensor_fp300_1_identify_3',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_buttons[aqara_presence_fp300][button.presence_multi_sensor_fp300_1_identify_4-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'button',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'button.presence_multi_sensor_fp300_1_identify_4',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <ButtonDeviceClass.IDENTIFY: 'identify'>,
'original_icon': None,
'original_name': 'Identify (4)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-00000000000000CD-MatterNodeDevice-4-IdentifyButton-3-1',
'unit_of_measurement': None,
})
# ---
# name: test_buttons[aqara_presence_fp300][button.presence_multi_sensor_fp300_1_identify_4-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'identify',
'friendly_name': 'Presence Multi-Sensor FP300 1 Identify (4)',
}),
'context': <ANY>,
'entity_id': 'button.presence_multi_sensor_fp300_1_identify_4',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
@@ -340,6 +487,55 @@
'state': 'unknown',
})
# ---
# name: test_buttons[aqara_thermostat_w500][button.floor_heating_thermostat_identify_2-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'button',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'button.floor_heating_thermostat_identify_2',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <ButtonDeviceClass.IDENTIFY: 'identify'>,
'original_icon': None,
'original_name': 'Identify (2)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-0000000000000064-MatterNodeDevice-2-IdentifyButton-3-1',
'unit_of_measurement': None,
})
# ---
# name: test_buttons[aqara_thermostat_w500][button.floor_heating_thermostat_identify_2-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'identify',
'friendly_name': 'Floor Heating Thermostat Identify (2)',
}),
'context': <ANY>,
'entity_id': 'button.floor_heating_thermostat_identify_2',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_buttons[aqara_u200][button.aqara_smart_lock_u200_identify-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
@@ -634,6 +830,55 @@
'state': 'unknown',
})
# ---
# name: test_buttons[eve_energy_20ecn4101][button.eve_energy_20ecn4101_identify_bottom-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'button',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'button.eve_energy_20ecn4101_identify_bottom',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <ButtonDeviceClass.IDENTIFY: 'identify'>,
'original_icon': None,
'original_name': 'Identify (bottom)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-00000000000000C7-MatterNodeDevice-2-IdentifyButton-3-1',
'unit_of_measurement': None,
})
# ---
# name: test_buttons[eve_energy_20ecn4101][button.eve_energy_20ecn4101_identify_bottom-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'identify',
'friendly_name': 'Eve Energy 20ECN4101 Identify (bottom)',
}),
'context': <ANY>,
'entity_id': 'button.eve_energy_20ecn4101_identify_bottom',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_buttons[eve_energy_20ecn4101][button.eve_energy_20ecn4101_identify_top-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
@@ -879,6 +1124,55 @@
'state': 'unknown',
})
# ---
# name: test_buttons[eve_weather_sensor][button.eve_weather_identify_2-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'button',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'button.eve_weather_identify_2',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <ButtonDeviceClass.IDENTIFY: 'identify'>,
'original_icon': None,
'original_name': 'Identify (2)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-000000000000001D-MatterNodeDevice-2-IdentifyButton-3-1',
'unit_of_measurement': None,
})
# ---
# name: test_buttons[eve_weather_sensor][button.eve_weather_identify_2-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'identify',
'friendly_name': 'Eve Weather Identify (2)',
}),
'context': <ANY>,
'entity_id': 'button.eve_weather_identify_2',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_buttons[extended_color_light][button.mock_extended_color_light_identify-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
@@ -1653,6 +1947,251 @@
'state': 'unknown',
})
# ---
# name: test_buttons[multi_endpoint_light][button.inovelli_identify_2-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'button',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'button.inovelli_identify_2',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <ButtonDeviceClass.IDENTIFY: 'identify'>,
'original_icon': None,
'original_name': 'Identify (2)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-2-IdentifyButton-3-1',
'unit_of_measurement': None,
})
# ---
# name: test_buttons[multi_endpoint_light][button.inovelli_identify_2-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'identify',
'friendly_name': 'Inovelli Identify (2)',
}),
'context': <ANY>,
'entity_id': 'button.inovelli_identify_2',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_buttons[multi_endpoint_light][button.inovelli_identify_6-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'button',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'button.inovelli_identify_6',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <ButtonDeviceClass.IDENTIFY: 'identify'>,
'original_icon': None,
'original_name': 'Identify (6)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-6-IdentifyButton-3-1',
'unit_of_measurement': None,
})
# ---
# name: test_buttons[multi_endpoint_light][button.inovelli_identify_6-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'identify',
'friendly_name': 'Inovelli Identify (6)',
}),
'context': <ANY>,
'entity_id': 'button.inovelli_identify_6',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_buttons[multi_endpoint_light][button.inovelli_identify_config-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'button',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'button.inovelli_identify_config',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <ButtonDeviceClass.IDENTIFY: 'identify'>,
'original_icon': None,
'original_name': 'Identify (Config)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-5-IdentifyButton-3-1',
'unit_of_measurement': None,
})
# ---
# name: test_buttons[multi_endpoint_light][button.inovelli_identify_config-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'identify',
'friendly_name': 'Inovelli Identify (Config)',
}),
'context': <ANY>,
'entity_id': 'button.inovelli_identify_config',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_buttons[multi_endpoint_light][button.inovelli_identify_down-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'button',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'button.inovelli_identify_down',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <ButtonDeviceClass.IDENTIFY: 'identify'>,
'original_icon': None,
'original_name': 'Identify (Down)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-4-IdentifyButton-3-1',
'unit_of_measurement': None,
})
# ---
# name: test_buttons[multi_endpoint_light][button.inovelli_identify_down-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'identify',
'friendly_name': 'Inovelli Identify (Down)',
}),
'context': <ANY>,
'entity_id': 'button.inovelli_identify_down',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_buttons[multi_endpoint_light][button.inovelli_identify_up-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'button',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'button.inovelli_identify_up',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <ButtonDeviceClass.IDENTIFY: 'identify'>,
'original_icon': None,
'original_name': 'Identify (Up)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-00000000000000C5-MatterNodeDevice-3-IdentifyButton-3-1',
'unit_of_measurement': None,
})
# ---
# name: test_buttons[multi_endpoint_light][button.inovelli_identify_up-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'identify',
'friendly_name': 'Inovelli Identify (Up)',
}),
'context': <ANY>,
'entity_id': 'button.inovelli_identify_up',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_buttons[occupancy_sensor][button.mock_occupancy_sensor_identify-entry]
EntityRegistryEntrySnapshot({
'aliases': set({

View File

@@ -1,193 +1,4 @@
# serializer version: 1
# name: test_events[aqara_sensor_w100][event.climate_sensor_w100_button_3-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'event_types': list([
'multi_press_1',
'multi_press_2',
'long_press',
'long_release',
]),
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'event',
'entity_category': None,
'entity_id': 'event.climate_sensor_w100_button_3',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <EventDeviceClass.BUTTON: 'button'>,
'original_icon': None,
'original_name': 'Button (3)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'button',
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-3-GenericSwitch-59-1',
'unit_of_measurement': None,
})
# ---
# name: test_events[aqara_sensor_w100][event.climate_sensor_w100_button_3-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'button',
'event_type': None,
'event_types': list([
'multi_press_1',
'multi_press_2',
'long_press',
'long_release',
]),
'friendly_name': 'Climate Sensor W100 Button (3)',
}),
'context': <ANY>,
'entity_id': 'event.climate_sensor_w100_button_3',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_events[aqara_sensor_w100][event.climate_sensor_w100_button_4-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'event_types': list([
'multi_press_1',
'multi_press_2',
'long_press',
'long_release',
]),
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'event',
'entity_category': None,
'entity_id': 'event.climate_sensor_w100_button_4',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <EventDeviceClass.BUTTON: 'button'>,
'original_icon': None,
'original_name': 'Button (4)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'button',
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-4-GenericSwitch-59-1',
'unit_of_measurement': None,
})
# ---
# name: test_events[aqara_sensor_w100][event.climate_sensor_w100_button_4-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'button',
'event_type': None,
'event_types': list([
'multi_press_1',
'multi_press_2',
'long_press',
'long_release',
]),
'friendly_name': 'Climate Sensor W100 Button (4)',
}),
'context': <ANY>,
'entity_id': 'event.climate_sensor_w100_button_4',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_events[aqara_sensor_w100][event.climate_sensor_w100_button_5-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'event_types': list([
'multi_press_1',
'multi_press_2',
'long_press',
'long_release',
]),
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'event',
'entity_category': None,
'entity_id': 'event.climate_sensor_w100_button_5',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <EventDeviceClass.BUTTON: 'button'>,
'original_icon': None,
'original_name': 'Button (5)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'button',
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-5-GenericSwitch-59-1',
'unit_of_measurement': None,
})
# ---
# name: test_events[aqara_sensor_w100][event.climate_sensor_w100_button_5-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'button',
'event_type': None,
'event_types': list([
'multi_press_1',
'multi_press_2',
'long_press',
'long_release',
]),
'friendly_name': 'Climate Sensor W100 Button (5)',
}),
'context': <ANY>,
'entity_id': 'event.climate_sensor_w100_button_5',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'unknown',
})
# ---
# name: test_events[generic_switch][event.mock_generic_switch_button-entry]
EntityRegistryEntrySnapshot({
'aliases': set({

View File

@@ -1944,428 +1944,6 @@
'state': '27.94',
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_battery-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.climate_sensor_w100_battery',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <SensorDeviceClass.BATTERY: 'battery'>,
'original_icon': None,
'original_name': 'Battery',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-6-PowerSource-47-12',
'unit_of_measurement': '%',
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_battery-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'battery',
'friendly_name': 'Climate Sensor W100 Battery',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': '%',
}),
'context': <ANY>,
'entity_id': 'sensor.climate_sensor_w100_battery',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '100',
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_battery_type-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.climate_sensor_w100_battery_type',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Battery type',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'battery_replacement_description',
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-6-PowerSourceBatReplacementDescription-47-19',
'unit_of_measurement': None,
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_battery_type-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Climate Sensor W100 Battery type',
}),
'context': <ANY>,
'entity_id': 'sensor.climate_sensor_w100_battery_type',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'CR2450',
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_battery_voltage-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.climate_sensor_w100_battery_voltage',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
'sensor': dict({
'suggested_display_precision': 0,
}),
'sensor.private': dict({
'suggested_unit_of_measurement': <UnitOfElectricPotential.VOLT: 'V'>,
}),
}),
'original_device_class': <SensorDeviceClass.VOLTAGE: 'voltage'>,
'original_icon': None,
'original_name': 'Battery voltage',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'battery_voltage',
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-6-PowerSourceBatVoltage-47-11',
'unit_of_measurement': <UnitOfElectricPotential.VOLT: 'V'>,
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_battery_voltage-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'voltage',
'friendly_name': 'Climate Sensor W100 Battery voltage',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': <UnitOfElectricPotential.VOLT: 'V'>,
}),
'context': <ANY>,
'entity_id': 'sensor.climate_sensor_w100_battery_voltage',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '3.12',
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_current_switch_position_3-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.climate_sensor_w100_current_switch_position_3',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Current switch position (3)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'switch_current_position',
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-3-SwitchCurrentPosition-59-1',
'unit_of_measurement': None,
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_current_switch_position_3-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Climate Sensor W100 Current switch position (3)',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'context': <ANY>,
'entity_id': 'sensor.climate_sensor_w100_current_switch_position_3',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '0',
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_current_switch_position_4-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.climate_sensor_w100_current_switch_position_4',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Current switch position (4)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'switch_current_position',
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-4-SwitchCurrentPosition-59-1',
'unit_of_measurement': None,
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_current_switch_position_4-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Climate Sensor W100 Current switch position (4)',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'context': <ANY>,
'entity_id': 'sensor.climate_sensor_w100_current_switch_position_4',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '0',
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_current_switch_position_5-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': <EntityCategory.DIAGNOSTIC: 'diagnostic'>,
'entity_id': 'sensor.climate_sensor_w100_current_switch_position_5',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Current switch position (5)',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'switch_current_position',
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-5-SwitchCurrentPosition-59-1',
'unit_of_measurement': None,
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_current_switch_position_5-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Climate Sensor W100 Current switch position (5)',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'context': <ANY>,
'entity_id': 'sensor.climate_sensor_w100_current_switch_position_5',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '0',
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_humidity-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.climate_sensor_w100_humidity',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <SensorDeviceClass.HUMIDITY: 'humidity'>,
'original_icon': None,
'original_name': 'Humidity',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-2-HumiditySensor-1029-0',
'unit_of_measurement': '%',
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_humidity-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'humidity',
'friendly_name': 'Climate Sensor W100 Humidity',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': '%',
}),
'context': <ANY>,
'entity_id': 'sensor.climate_sensor_w100_humidity',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '44.72',
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_temperature-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.climate_sensor_w100_temperature',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
'sensor': dict({
'suggested_display_precision': 1,
}),
}),
'original_device_class': <SensorDeviceClass.TEMPERATURE: 'temperature'>,
'original_icon': None,
'original_name': 'Temperature',
'platform': 'matter',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '00000000000004D2-000000000000004B-MatterNodeDevice-1-TemperatureSensor-1026-0',
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
})
# ---
# name: test_sensors[aqara_sensor_w100][sensor.climate_sensor_w100_temperature-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'temperature',
'friendly_name': 'Climate Sensor W100 Temperature',
'state_class': <SensorStateClass.MEASUREMENT: 'measurement'>,
'unit_of_measurement': <UnitOfTemperature.CELSIUS: '°C'>,
}),
'context': <ANY>,
'entity_id': 'sensor.climate_sensor_w100_temperature',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '27.73',
})
# ---
# name: test_sensors[aqara_thermostat_w500][sensor.floor_heating_thermostat_energy-entry]
EntityRegistryEntrySnapshot({
'aliases': set({

View File

@@ -239,12 +239,11 @@ async def test_pump(
assert state
assert state.state == "off"
# Initial state: kRunning bit only (no fault bits) should be off
# PumpStatus --> DeviceFault bit
state = hass.states.get("binary_sensor.mock_pump_problem")
assert state
assert state.state == "off"
assert state.state == "unknown"
# Set DeviceFault bit
set_node_attribute(matter_node, 1, 512, 16, 1)
await trigger_subscription_callback(hass, matter_client)
@@ -252,14 +251,7 @@ async def test_pump(
assert state
assert state.state == "on"
# Clear all bits - problem sensor should be off
set_node_attribute(matter_node, 1, 512, 16, 0)
await trigger_subscription_callback(hass, matter_client)
state = hass.states.get("binary_sensor.mock_pump_problem")
assert state
assert state.state == "off"
# Set SupplyFault bit
# PumpStatus --> SupplyFault bit
set_node_attribute(matter_node, 1, 512, 16, 2)
await trigger_subscription_callback(hass, matter_client)
@@ -278,7 +270,6 @@ async def test_dishwasher_alarm(
state = hass.states.get("binary_sensor.dishwasher_door_alarm")
assert state
# set DoorAlarm alarm
set_node_attribute(matter_node, 1, 93, 2, 4)
await trigger_subscription_callback(hass, matter_client)
@@ -286,22 +277,6 @@ async def test_dishwasher_alarm(
assert state
assert state.state == "on"
# clear DoorAlarm alarm
set_node_attribute(matter_node, 1, 93, 2, 0)
await trigger_subscription_callback(hass, matter_client)
state = hass.states.get("binary_sensor.dishwasher_inflow_alarm")
assert state
assert state.state == "off"
# set InflowError alarm
set_node_attribute(matter_node, 1, 93, 2, 1)
await trigger_subscription_callback(hass, matter_client)
state = hass.states.get("binary_sensor.dishwasher_inflow_alarm")
assert state
assert state.state == "on"
@pytest.mark.parametrize("node_fixture", ["valve"])
async def test_water_valve(
@@ -323,7 +298,7 @@ async def test_water_valve(
assert state
assert state.state == "off"
# ValveFault general_fault test (bit 0)
# ValveFault general_fault test
set_node_attribute(matter_node, 1, 129, 9, 1)
await trigger_subscription_callback(hass, matter_client)
@@ -339,7 +314,7 @@ async def test_water_valve(
assert state
assert state.state == "off"
# ValveFault valve_blocked test (bit 1)
# ValveFault valve_blocked test
set_node_attribute(matter_node, 1, 129, 9, 2)
await trigger_subscription_callback(hass, matter_client)
@@ -355,7 +330,7 @@ async def test_water_valve(
assert state
assert state.state == "off"
# ValveFault valve_leaking test (bit 2)
# ValveFault valve_leaking test
set_node_attribute(matter_node, 1, 129, 9, 4)
await trigger_subscription_callback(hass, matter_client)
@@ -371,22 +346,6 @@ async def test_water_valve(
assert state
assert state.state == "on"
# ValveFault multiple faults test (bits 0 and 2)
set_node_attribute(matter_node, 1, 129, 9, 5)
await trigger_subscription_callback(hass, matter_client)
state = hass.states.get("binary_sensor.valve_general_fault")
assert state
assert state.state == "on"
state = hass.states.get("binary_sensor.valve_valve_blocked")
assert state
assert state.state == "off"
state = hass.states.get("binary_sensor.valve_valve_leaking")
assert state
assert state.state == "on"
@pytest.mark.parametrize("node_fixture", ["thermostat"])
async def test_thermostat_occupancy(

View File

@@ -63,7 +63,7 @@
"recipeCategory": [],
"tags": [],
"tools": [],
"rating": 5.0,
"rating": null,
"orgURL": "https://tastesbetterfromscratch.com/roast-chicken/",
"dateAdded": "2024-01-21",
"dateUpdated": "2024-01-21T15:29:25.664540",

View File

@@ -71,7 +71,7 @@
"recipeCategory": [],
"tags": [],
"tools": [],
"rating": 5.0,
"rating": null,
"orgURL": "https://www.papillesetpupilles.fr/2018/10/patates-douces-au-four.html/",
"dateAdded": "2024-01-21",
"dateUpdated": "2024-01-21T10:27:39.409746",

View File

@@ -25,7 +25,6 @@
'original_url': 'https://tastesbetterfromscratch.com/roast-chicken/',
'perform_time': '1 Hour 20 Minutes',
'prep_time': '15 Minutes',
'rating': 5.0,
'recipe_id': '5b055066-d57d-4fd0-8dfd-a2c2f07b36f1',
'recipe_yield': '6 servings',
'slug': 'roast-chicken',
@@ -56,7 +55,6 @@
'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/',
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93',
'recipe_yield': '2 servings',
'slug': 'zoete-aardappel-curry-traybake',
@@ -85,7 +83,6 @@
'original_url': 'https://akispetretzikis.com/recipe/7959/efkolh-makaronada-me-keftedakia-ston-fourno',
'perform_time': '50 Minutes',
'prep_time': '15 Minutes',
'rating': None,
'recipe_id': 'f79f7e9d-4b58-4930-a586-2b127f16ee34',
'recipe_yield': '6 servings',
'slug': 'eukole-makaronada-me-kephtedakia-ston-phourno-1',
@@ -114,7 +111,6 @@
'original_url': 'https://www.ambitiouskitchen.com/greek-turkey-meatballs/',
'perform_time': '20 Minutes',
'prep_time': '40 Minutes',
'rating': None,
'recipe_id': '47595e4c-52bc-441d-b273-3edf4258806d',
'recipe_yield': '4 servings',
'slug': 'greek-turkey-meatballs-with-lemon-orzo-creamy-feta-yogurt-sauce',
@@ -143,7 +139,6 @@
'original_url': 'https://www.food.com/recipe/pampered-chef-double-chocolate-mocha-trifle-74963',
'perform_time': '1 Hour',
'prep_time': '15 Minutes',
'rating': 3.0,
'recipe_id': '92635fd0-f2dc-4e78-a6e4-ecd556ad361f',
'recipe_yield': '12 servings',
'slug': 'pampered-chef-double-chocolate-mocha-trifle',
@@ -172,7 +167,6 @@
'original_url': 'https://natashaskitchen.com/cheeseburger-sliders/',
'perform_time': '22 Minutes',
'prep_time': '8 Minutes',
'rating': 5.0,
'recipe_id': '8bdd3656-5e7e-45d3-a3c4-557390846a22',
'recipe_yield': '24 servings',
'slug': 'cheeseburger-sliders-easy-30-min-recipe',
@@ -201,7 +195,6 @@
'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe',
'perform_time': '3 Hours 10 Minutes',
'prep_time': '5 Minutes',
'rating': None,
'recipe_id': '48f39d27-4b8e-4c14-bf36-4e1e6497e75e',
'recipe_yield': '6 servings',
'slug': 'all-american-beef-stew-recipe',
@@ -230,7 +223,6 @@
'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/',
'perform_time': '20 Minutes',
'prep_time': '15 Minutes',
'rating': None,
'recipe_id': '9d553779-607e-471b-acf3-84e6be27b159',
'recipe_yield': '4 servings',
'slug': 'einfacher-nudelauflauf-mit-brokkoli',
@@ -259,7 +251,6 @@
'original_url': 'https://www.allrecipes.com/recipe/284039/miso-udon-noodles-with-spinach-and-tofu/',
'perform_time': '15 Minutes',
'prep_time': '10 Minutes',
'rating': None,
'recipe_id': '25b814f2-d9bf-4df0-b40d-d2f2457b4317',
'recipe_yield': '2 servings',
'slug': 'miso-udon-noodles-with-spinach-and-tofu',
@@ -288,7 +279,6 @@
'original_url': 'https://www.ricardocuisine.com/recettes/8919-mousse-de-saumon',
'perform_time': '2 Minutes',
'prep_time': '15 Minutes',
'rating': None,
'recipe_id': '55c88810-4cf1-4d86-ae50-63b15fd173fb',
'recipe_yield': '12 servings',
'slug': 'mousse-de-saumon',
@@ -333,7 +323,6 @@
'original_url': 'https://www.directoalpaladar.com/recetas-de-carnes-y-aves/receta-de-pollo-al-curry-en-10-minutos',
'perform_time': '7 Minutes',
'prep_time': '3 Minutes',
'rating': None,
'recipe_id': 'e360a0cc-18b0-4a84-a91b-8aa59e2451c9',
'recipe_yield': '2 servings',
'slug': 'receta-de-pollo-al-curry-en-10-minutos-con-video-incluido',
@@ -362,7 +351,6 @@
'original_url': 'https://www.marmiton.org/recettes/recette_boeuf-bourguignon_18889.aspx',
'perform_time': '4 Hours',
'prep_time': '1 Hour',
'rating': None,
'recipe_id': '9c7b8aee-c93c-4b1b-ab48-2625d444743a',
'recipe_yield': '4 servings',
'slug': 'boeuf-bourguignon-la-vraie-recette-2',
@@ -391,7 +379,6 @@
'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe',
'perform_time': '3 Hours 10 Minutes',
'prep_time': '5 Minutes',
'rating': None,
'recipe_id': '48f39d27-4b8e-4c14-bf36-4e1e6497e75e',
'recipe_yield': '6 servings',
'slug': 'all-american-beef-stew-recipe',
@@ -422,7 +409,6 @@
'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/',
'perform_time': '20 Minutes',
'prep_time': '15 Minutes',
'rating': None,
'recipe_id': '9d553779-607e-471b-acf3-84e6be27b159',
'recipe_yield': '4 servings',
'slug': 'einfacher-nudelauflauf-mit-brokkoli',

View File

@@ -12,7 +12,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': 'e82f5449-c33b-437c-b712-337587199264',
'recipe_yield': None,
'slug': 'tu6y',
@@ -28,7 +27,6 @@
'original_url': 'https://akispetretzikis.com/recipe/7959/efkolh-makaronada-me-keftedakia-ston-fourno',
'perform_time': '50 Minutes',
'prep_time': '15 Minutes',
'rating': None,
'recipe_id': 'f79f7e9d-4b58-4930-a586-2b127f16ee34',
'recipe_yield': '6 servings',
'slug': 'eukole-makaronada-me-kephtedakia-ston-phourno-1',
@@ -44,7 +42,6 @@
'original_url': 'https://www.papillesetpupilles.fr/2018/10/patates-douces-au-four.html/',
'perform_time': None,
'prep_time': None,
'rating': 5.0,
'recipe_id': '90097c8b-9d80-468a-b497-73957ac0cd8b',
'recipe_yield': '',
'slug': 'patates-douces-au-four-1',
@@ -60,7 +57,6 @@
'original_url': 'https://www.papillesetpupilles.fr/2018/10/patates-douces-au-four.html/',
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '98845807-9365-41fd-acd1-35630b468c27',
'recipe_yield': '',
'slug': 'sweet-potatoes',
@@ -76,7 +72,6 @@
'original_url': 'https://akispetretzikis.com/recipe/7959/efkolh-makaronada-me-keftedakia-ston-fourno',
'perform_time': '50 Minutes',
'prep_time': '15 Minutes',
'rating': None,
'recipe_id': '40c227e0-3c7e-41f7-866d-5de04eaecdd7',
'recipe_yield': '6 servings',
'slug': 'eukole-makaronada-me-kephtedakia-ston-phourno',
@@ -92,7 +87,6 @@
'original_url': 'https://www.marmiton.org/recettes/recette_boeuf-bourguignon_18889.aspx',
'perform_time': '4 Hours',
'prep_time': '1 Hour',
'rating': None,
'recipe_id': '9c7b8aee-c93c-4b1b-ab48-2625d444743a',
'recipe_yield': '4 servings',
'slug': 'boeuf-bourguignon-la-vraie-recette-2',
@@ -108,7 +102,6 @@
'original_url': 'https://www.marmiton.org/recettes/recette_boeuf-bourguignon_18889.aspx',
'perform_time': '4 Hours',
'prep_time': '1 Hour',
'rating': None,
'recipe_id': 'fc42c7d1-7b0f-4e04-b88a-dbd80b81540b',
'recipe_yield': '4 servings',
'slug': 'boeuf-bourguignon-la-vraie-recette-1',
@@ -124,7 +117,6 @@
'original_url': 'https://biancazapatka.com/de/erdnussbutter-schoko-bananenbrot/',
'perform_time': '55 Minutes',
'prep_time': '15 Minutes',
'rating': None,
'recipe_id': '89e63d72-7a51-4cef-b162-2e45035d0a91',
'recipe_yield': '14 servings',
'slug': 'veganes-marmor-bananenbrot-mit-erdnussbutter',
@@ -140,7 +132,6 @@
'original_url': 'https://kuechenchaotin.de/pasta-mit-tomaten-knoblauch-basilikum/',
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': 'eab64457-97ba-4d6c-871c-cb1c724ccb51',
'recipe_yield': '',
'slug': 'pasta-mit-tomaten-knoblauch-und-basilikum-einfach-und-genial-kuechenchaotin',
@@ -156,7 +147,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '12439e3d-3c1c-4dcc-9c6e-4afcea2a0542',
'recipe_yield': None,
'slug': 'test123',
@@ -172,7 +162,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '6567f6ec-e410-49cb-a1a5-d08517184e78',
'recipe_yield': None,
'slug': 'bureeto',
@@ -188,7 +177,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': 'f7737d17-161c-4008-88d4-dd2616778cd0',
'recipe_yield': None,
'slug': 'subway-double-cookies',
@@ -204,7 +192,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '1904b717-4a8b-4de9-8909-56958875b5f4',
'recipe_yield': None,
'slug': 'qwerty12345',
@@ -220,7 +207,6 @@
'original_url': 'https://natashaskitchen.com/cheeseburger-sliders/',
'perform_time': '22 Minutes',
'prep_time': '8 Minutes',
'rating': 5.0,
'recipe_id': '8bdd3656-5e7e-45d3-a3c4-557390846a22',
'recipe_yield': '24 servings',
'slug': 'cheeseburger-sliders-easy-30-min-recipe',
@@ -236,7 +222,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '8a30d31d-aa14-411e-af0c-6b61a94f5291',
'recipe_yield': '4',
'slug': 'meatloaf',
@@ -252,7 +237,6 @@
'original_url': 'https://www.chefkoch.de/rezepte/937641199437984/Richtig-rheinischer-Sauerbraten.html',
'perform_time': '2 Hours 20 Minutes',
'prep_time': '1 Hour',
'rating': 3.0,
'recipe_id': 'f2f7880b-1136-436f-91b7-129788d8c117',
'recipe_yield': '4 servings',
'slug': 'richtig-rheinischer-sauerbraten',
@@ -268,7 +252,6 @@
'original_url': 'https://www.chefkoch.de/rezepte/2307761368177614/Orientalischer-Gemuese-Haehnchen-Eintopf.html',
'perform_time': '20 Minutes',
'prep_time': '15 Minutes',
'rating': None,
'recipe_id': 'cf634591-0f82-4254-8e00-2f7e8b0c9022',
'recipe_yield': '6 servings',
'slug': 'orientalischer-gemuse-hahnchen-eintopf',
@@ -284,7 +267,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '05208856-d273-4cc9-bcfa-e0215d57108d',
'recipe_yield': '4',
'slug': 'test-20240121',
@@ -300,7 +282,6 @@
'original_url': 'https://www.lekkerensimpel.com/loempia-bowl/',
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '145eeb05-781a-4eb0-a656-afa8bc8c0164',
'recipe_yield': '',
'slug': 'loempia-bowl',
@@ -316,7 +297,6 @@
'original_url': 'https://thehappypear.ie/aquafaba-chocolate-mousse/',
'perform_time': None,
'prep_time': '10 Minutes',
'rating': None,
'recipe_id': '5c6532aa-ad84-424c-bc05-c32d50430fe4',
'recipe_yield': '6 servings',
'slug': '5-ingredient-chocolate-mousse',
@@ -332,7 +312,6 @@
'original_url': 'https://www.chefkoch.de/rezepte/1208161226570428/Der-perfekte-Pfannkuchen-gelingt-einfach-immer.html',
'perform_time': '10 Minutes',
'prep_time': '5 Minutes',
'rating': None,
'recipe_id': 'f2e684f2-49e0-45ee-90de-951344472f1c',
'recipe_yield': '4 servings',
'slug': 'der-perfekte-pfannkuchen-gelingt-einfach-immer',
@@ -348,7 +327,6 @@
'original_url': 'https://www.besondersgut.ch/dinkel-sauerteigbrot/',
'perform_time': '35min',
'prep_time': '1h',
'rating': None,
'recipe_id': 'cf239441-b75d-4dea-a48e-9d99b7cb5842',
'recipe_yield': '1',
'slug': 'dinkel-sauerteigbrot',
@@ -364,7 +342,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '2673eb90-6d78-4b95-af36-5db8c8a6da37',
'recipe_yield': None,
'slug': 'test-234234',
@@ -380,7 +357,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '0a723c54-af53-40e9-a15f-c87aae5ac688',
'recipe_yield': None,
'slug': 'test-243',
@@ -396,7 +372,6 @@
'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/',
'perform_time': '20 Minutes',
'prep_time': '15 Minutes',
'rating': None,
'recipe_id': '9d553779-607e-471b-acf3-84e6be27b159',
'recipe_yield': '4 servings',
'slug': 'einfacher-nudelauflauf-mit-brokkoli',
@@ -423,7 +398,6 @@
'original_url': 'https://www.przepisy.pl/przepis/tarta-cytrynowa-z-beza',
'perform_time': None,
'prep_time': '1 Hour',
'rating': None,
'recipe_id': '9d3cb303-a996-4144-948a-36afaeeef554',
'recipe_yield': '8 servings',
'slug': 'tarta-cytrynowa-z-beza',
@@ -439,7 +413,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '77f05a49-e869-4048-aa62-0d8a1f5a8f1c',
'recipe_yield': None,
'slug': 'martins-test-recipe',
@@ -455,7 +428,6 @@
'original_url': 'https://aniagotuje.pl/przepis/muffinki-czekoladowe',
'perform_time': '30 Minutes',
'prep_time': '25 Minutes',
'rating': None,
'recipe_id': '75a90207-9c10-4390-a265-c47a4b67fd69',
'recipe_yield': '12',
'slug': 'muffinki-czekoladowe',
@@ -471,7 +443,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '4320ba72-377b-4657-8297-dce198f24cdf',
'recipe_yield': None,
'slug': 'my-test-recipe',
@@ -487,7 +458,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '98dac844-31ee-426a-b16c-fb62a5dd2816',
'recipe_yield': None,
'slug': 'my-test-receipe',
@@ -503,7 +473,6 @@
'original_url': 'https://www.papillesetpupilles.fr/2018/10/patates-douces-au-four.html/',
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': 'c3c8f207-c704-415d-81b1-da9f032cf52f',
'recipe_yield': '',
'slug': 'patates-douces-au-four',
@@ -519,7 +488,6 @@
'original_url': 'https://sallysbakingaddiction.com/homemade-pizza-crust-recipe/',
'perform_time': '15 Minutes',
'prep_time': '2 Hours 15 Minutes',
'rating': None,
'recipe_id': '1edb2f6e-133c-4be0-b516-3c23625a97ec',
'recipe_yield': '2 servings',
'slug': 'easy-homemade-pizza-dough',
@@ -535,7 +503,6 @@
'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe',
'perform_time': '3 Hours 10 Minutes',
'prep_time': '5 Minutes',
'rating': None,
'recipe_id': '48f39d27-4b8e-4c14-bf36-4e1e6497e75e',
'recipe_yield': '6 servings',
'slug': 'all-american-beef-stew-recipe',
@@ -551,7 +518,6 @@
'original_url': 'https://www.seriouseats.com/serious-eats-halal-cart-style-chicken-and-rice-white-sauce-recipe',
'perform_time': '55 Minutes',
'prep_time': '20 Minutes',
'rating': 5.0,
'recipe_id': '6530ea6e-401e-4304-8a7a-12162ddf5b9c',
'recipe_yield': '4 servings',
'slug': 'serious-eats-halal-cart-style-chicken-and-rice-with-white-sauce',
@@ -567,7 +533,6 @@
'original_url': 'https://www.chefkoch.de/rezepte/1062121211526182/Schnelle-Kaesespaetzle.html',
'perform_time': '30 Minutes',
'prep_time': '10 Minutes',
'rating': None,
'recipe_id': 'c496cf9c-1ece-448a-9d3f-ef772f078a4e',
'recipe_yield': '4 servings',
'slug': 'schnelle-kasespatzle',
@@ -583,7 +548,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '49aa6f42-6760-4adf-b6cd-59592da485c3',
'recipe_yield': None,
'slug': 'taco',
@@ -599,7 +563,6 @@
'original_url': 'https://www.ica.se/recept/vodkapasta-729011/',
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '6402a253-2baa-460d-bf4f-b759bb655588',
'recipe_yield': '4 servings',
'slug': 'vodkapasta',
@@ -615,7 +578,6 @@
'original_url': 'https://www.ica.se/recept/vodkapasta-729011/',
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '4f54e9e1-f21d-40ec-a135-91e633dfb733',
'recipe_yield': '4 servings',
'slug': 'vodkapasta2',
@@ -631,7 +593,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': 'e1a3edb0-49a0-49a3-83e3-95554e932670',
'recipe_yield': '1',
'slug': 'rub',
@@ -647,7 +608,6 @@
'original_url': 'https://www.justapinch.com/recipes/dessert/cookies/banana-bread-chocolate-chip-cookies.html',
'perform_time': '15 Minutes',
'prep_time': '10 Minutes',
'rating': None,
'recipe_id': '1a0f4e54-db5b-40f1-ab7e-166dab5f6523',
'recipe_yield': '',
'slug': 'banana-bread-chocolate-chip-cookies',
@@ -663,7 +623,6 @@
'original_url': 'https://chefjeanpierre.com/recipes/soups/creamy-cauliflower-bisque/',
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '447acae6-3424-4c16-8c26-c09040ad8041',
'recipe_yield': '',
'slug': 'cauliflower-bisque-recipe-with-cheddar-cheese',
@@ -679,7 +638,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '864136a3-27b0-4f3b-a90f-486f42d6df7a',
'recipe_yield': '',
'slug': 'prova',
@@ -695,7 +653,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': 'c7ccf4c7-c5f4-4191-a79b-1a49d068f6a4',
'recipe_yield': None,
'slug': 'pate-au-beurre-1',
@@ -711,7 +668,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': 'd01865c3-0f18-4e8d-84c0-c14c345fdf9c',
'recipe_yield': None,
'slug': 'pate-au-beurre',
@@ -727,7 +683,6 @@
'original_url': 'https://saltpepperskillet.com/recipes/sous-vide-cheesecake/',
'perform_time': '1 Hour 30 Minutes',
'prep_time': '10 Minutes',
'rating': None,
'recipe_id': '2cec2bb2-19b6-40b8-a36c-1a76ea29c517',
'recipe_yield': '4 servings',
'slug': 'sous-vide-cheesecake-recipe',
@@ -743,7 +698,6 @@
'original_url': 'https://recipes.anovaculinary.com/recipe/the-bomb-cheesecakes',
'perform_time': None,
'prep_time': '30 Minutes',
'rating': None,
'recipe_id': '8e0e4566-9caf-4c2e-a01c-dcead23db86b',
'recipe_yield': '10 servings',
'slug': 'the-bomb-mini-cheesecakes',
@@ -759,7 +713,6 @@
'original_url': 'https://www.chefkoch.de/rezepte/2109501340136606/Tagliatelle-al-Salmone.html',
'perform_time': '15 Minutes',
'prep_time': '10 Minutes',
'rating': None,
'recipe_id': 'a051eafd-9712-4aee-a8e5-0cd10a6772ee',
'recipe_yield': '4 servings',
'slug': 'tagliatelle-al-salmone',
@@ -775,7 +728,6 @@
'original_url': 'https://www.backenmachtgluecklich.de/rezepte/death-by-chocolate-kuchen.html',
'perform_time': '25 Minutes',
'prep_time': '25 Minutes',
'rating': None,
'recipe_id': '093d51e9-0823-40ad-8e0e-a1d5790dd627',
'recipe_yield': '1 serving',
'slug': 'death-by-chocolate',
@@ -791,7 +743,6 @@
'original_url': 'https://www.fernweh-koch.de/palak-dal-indischer-spinat-linsen-rezept/',
'perform_time': '20 Minutes',
'prep_time': '10 Minutes',
'rating': None,
'recipe_id': '2d1f62ec-4200-4cfd-987e-c75755d7607c',
'recipe_yield': '4 servings',
'slug': 'palak-dal-rezept-aus-indien',
@@ -807,7 +758,6 @@
'original_url': 'https://www.chefkoch.de/rezepte/74441028021809/Tortelline-a-la-Romana.html',
'perform_time': None,
'prep_time': '30 Minutes',
'rating': None,
'recipe_id': '973dc36d-1661-49b4-ad2d-0b7191034fb3',
'recipe_yield': '4 servings',
'slug': 'tortelline-a-la-romana',
@@ -831,7 +781,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': 'e82f5449-c33b-437c-b712-337587199264',
'recipe_yield': None,
'slug': 'tu6y',
@@ -847,7 +796,6 @@
'original_url': 'https://akispetretzikis.com/recipe/7959/efkolh-makaronada-me-keftedakia-ston-fourno',
'perform_time': '50 Minutes',
'prep_time': '15 Minutes',
'rating': None,
'recipe_id': 'f79f7e9d-4b58-4930-a586-2b127f16ee34',
'recipe_yield': '6 servings',
'slug': 'eukole-makaronada-me-kephtedakia-ston-phourno-1',
@@ -863,7 +811,6 @@
'original_url': 'https://www.papillesetpupilles.fr/2018/10/patates-douces-au-four.html/',
'perform_time': None,
'prep_time': None,
'rating': 5.0,
'recipe_id': '90097c8b-9d80-468a-b497-73957ac0cd8b',
'recipe_yield': '',
'slug': 'patates-douces-au-four-1',
@@ -879,7 +826,6 @@
'original_url': 'https://www.papillesetpupilles.fr/2018/10/patates-douces-au-four.html/',
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '98845807-9365-41fd-acd1-35630b468c27',
'recipe_yield': '',
'slug': 'sweet-potatoes',
@@ -895,7 +841,6 @@
'original_url': 'https://akispetretzikis.com/recipe/7959/efkolh-makaronada-me-keftedakia-ston-fourno',
'perform_time': '50 Minutes',
'prep_time': '15 Minutes',
'rating': None,
'recipe_id': '40c227e0-3c7e-41f7-866d-5de04eaecdd7',
'recipe_yield': '6 servings',
'slug': 'eukole-makaronada-me-kephtedakia-ston-phourno',
@@ -911,7 +856,6 @@
'original_url': 'https://www.marmiton.org/recettes/recette_boeuf-bourguignon_18889.aspx',
'perform_time': '4 Hours',
'prep_time': '1 Hour',
'rating': None,
'recipe_id': '9c7b8aee-c93c-4b1b-ab48-2625d444743a',
'recipe_yield': '4 servings',
'slug': 'boeuf-bourguignon-la-vraie-recette-2',
@@ -927,7 +871,6 @@
'original_url': 'https://www.marmiton.org/recettes/recette_boeuf-bourguignon_18889.aspx',
'perform_time': '4 Hours',
'prep_time': '1 Hour',
'rating': None,
'recipe_id': 'fc42c7d1-7b0f-4e04-b88a-dbd80b81540b',
'recipe_yield': '4 servings',
'slug': 'boeuf-bourguignon-la-vraie-recette-1',
@@ -943,7 +886,6 @@
'original_url': 'https://biancazapatka.com/de/erdnussbutter-schoko-bananenbrot/',
'perform_time': '55 Minutes',
'prep_time': '15 Minutes',
'rating': None,
'recipe_id': '89e63d72-7a51-4cef-b162-2e45035d0a91',
'recipe_yield': '14 servings',
'slug': 'veganes-marmor-bananenbrot-mit-erdnussbutter',
@@ -959,7 +901,6 @@
'original_url': 'https://kuechenchaotin.de/pasta-mit-tomaten-knoblauch-basilikum/',
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': 'eab64457-97ba-4d6c-871c-cb1c724ccb51',
'recipe_yield': '',
'slug': 'pasta-mit-tomaten-knoblauch-und-basilikum-einfach-und-genial-kuechenchaotin',
@@ -975,7 +916,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '12439e3d-3c1c-4dcc-9c6e-4afcea2a0542',
'recipe_yield': None,
'slug': 'test123',
@@ -991,7 +931,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '6567f6ec-e410-49cb-a1a5-d08517184e78',
'recipe_yield': None,
'slug': 'bureeto',
@@ -1007,7 +946,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': 'f7737d17-161c-4008-88d4-dd2616778cd0',
'recipe_yield': None,
'slug': 'subway-double-cookies',
@@ -1023,7 +961,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '1904b717-4a8b-4de9-8909-56958875b5f4',
'recipe_yield': None,
'slug': 'qwerty12345',
@@ -1039,7 +976,6 @@
'original_url': 'https://natashaskitchen.com/cheeseburger-sliders/',
'perform_time': '22 Minutes',
'prep_time': '8 Minutes',
'rating': 5.0,
'recipe_id': '8bdd3656-5e7e-45d3-a3c4-557390846a22',
'recipe_yield': '24 servings',
'slug': 'cheeseburger-sliders-easy-30-min-recipe',
@@ -1055,7 +991,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '8a30d31d-aa14-411e-af0c-6b61a94f5291',
'recipe_yield': '4',
'slug': 'meatloaf',
@@ -1071,7 +1006,6 @@
'original_url': 'https://www.chefkoch.de/rezepte/937641199437984/Richtig-rheinischer-Sauerbraten.html',
'perform_time': '2 Hours 20 Minutes',
'prep_time': '1 Hour',
'rating': 3.0,
'recipe_id': 'f2f7880b-1136-436f-91b7-129788d8c117',
'recipe_yield': '4 servings',
'slug': 'richtig-rheinischer-sauerbraten',
@@ -1087,7 +1021,6 @@
'original_url': 'https://www.chefkoch.de/rezepte/2307761368177614/Orientalischer-Gemuese-Haehnchen-Eintopf.html',
'perform_time': '20 Minutes',
'prep_time': '15 Minutes',
'rating': None,
'recipe_id': 'cf634591-0f82-4254-8e00-2f7e8b0c9022',
'recipe_yield': '6 servings',
'slug': 'orientalischer-gemuse-hahnchen-eintopf',
@@ -1103,7 +1036,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '05208856-d273-4cc9-bcfa-e0215d57108d',
'recipe_yield': '4',
'slug': 'test-20240121',
@@ -1119,7 +1051,6 @@
'original_url': 'https://www.lekkerensimpel.com/loempia-bowl/',
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '145eeb05-781a-4eb0-a656-afa8bc8c0164',
'recipe_yield': '',
'slug': 'loempia-bowl',
@@ -1135,7 +1066,6 @@
'original_url': 'https://thehappypear.ie/aquafaba-chocolate-mousse/',
'perform_time': None,
'prep_time': '10 Minutes',
'rating': None,
'recipe_id': '5c6532aa-ad84-424c-bc05-c32d50430fe4',
'recipe_yield': '6 servings',
'slug': '5-ingredient-chocolate-mousse',
@@ -1151,7 +1081,6 @@
'original_url': 'https://www.chefkoch.de/rezepte/1208161226570428/Der-perfekte-Pfannkuchen-gelingt-einfach-immer.html',
'perform_time': '10 Minutes',
'prep_time': '5 Minutes',
'rating': None,
'recipe_id': 'f2e684f2-49e0-45ee-90de-951344472f1c',
'recipe_yield': '4 servings',
'slug': 'der-perfekte-pfannkuchen-gelingt-einfach-immer',
@@ -1167,7 +1096,6 @@
'original_url': 'https://www.besondersgut.ch/dinkel-sauerteigbrot/',
'perform_time': '35min',
'prep_time': '1h',
'rating': None,
'recipe_id': 'cf239441-b75d-4dea-a48e-9d99b7cb5842',
'recipe_yield': '1',
'slug': 'dinkel-sauerteigbrot',
@@ -1183,7 +1111,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '2673eb90-6d78-4b95-af36-5db8c8a6da37',
'recipe_yield': None,
'slug': 'test-234234',
@@ -1199,7 +1126,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '0a723c54-af53-40e9-a15f-c87aae5ac688',
'recipe_yield': None,
'slug': 'test-243',
@@ -1215,7 +1141,6 @@
'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/',
'perform_time': '20 Minutes',
'prep_time': '15 Minutes',
'rating': None,
'recipe_id': '9d553779-607e-471b-acf3-84e6be27b159',
'recipe_yield': '4 servings',
'slug': 'einfacher-nudelauflauf-mit-brokkoli',
@@ -1242,7 +1167,6 @@
'original_url': 'https://www.przepisy.pl/przepis/tarta-cytrynowa-z-beza',
'perform_time': None,
'prep_time': '1 Hour',
'rating': None,
'recipe_id': '9d3cb303-a996-4144-948a-36afaeeef554',
'recipe_yield': '8 servings',
'slug': 'tarta-cytrynowa-z-beza',
@@ -1258,7 +1182,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '77f05a49-e869-4048-aa62-0d8a1f5a8f1c',
'recipe_yield': None,
'slug': 'martins-test-recipe',
@@ -1274,7 +1197,6 @@
'original_url': 'https://aniagotuje.pl/przepis/muffinki-czekoladowe',
'perform_time': '30 Minutes',
'prep_time': '25 Minutes',
'rating': None,
'recipe_id': '75a90207-9c10-4390-a265-c47a4b67fd69',
'recipe_yield': '12',
'slug': 'muffinki-czekoladowe',
@@ -1290,7 +1212,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '4320ba72-377b-4657-8297-dce198f24cdf',
'recipe_yield': None,
'slug': 'my-test-recipe',
@@ -1306,7 +1227,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '98dac844-31ee-426a-b16c-fb62a5dd2816',
'recipe_yield': None,
'slug': 'my-test-receipe',
@@ -1322,7 +1242,6 @@
'original_url': 'https://www.papillesetpupilles.fr/2018/10/patates-douces-au-four.html/',
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': 'c3c8f207-c704-415d-81b1-da9f032cf52f',
'recipe_yield': '',
'slug': 'patates-douces-au-four',
@@ -1338,7 +1257,6 @@
'original_url': 'https://sallysbakingaddiction.com/homemade-pizza-crust-recipe/',
'perform_time': '15 Minutes',
'prep_time': '2 Hours 15 Minutes',
'rating': None,
'recipe_id': '1edb2f6e-133c-4be0-b516-3c23625a97ec',
'recipe_yield': '2 servings',
'slug': 'easy-homemade-pizza-dough',
@@ -1354,7 +1272,6 @@
'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe',
'perform_time': '3 Hours 10 Minutes',
'prep_time': '5 Minutes',
'rating': None,
'recipe_id': '48f39d27-4b8e-4c14-bf36-4e1e6497e75e',
'recipe_yield': '6 servings',
'slug': 'all-american-beef-stew-recipe',
@@ -1370,7 +1287,6 @@
'original_url': 'https://www.seriouseats.com/serious-eats-halal-cart-style-chicken-and-rice-white-sauce-recipe',
'perform_time': '55 Minutes',
'prep_time': '20 Minutes',
'rating': 5.0,
'recipe_id': '6530ea6e-401e-4304-8a7a-12162ddf5b9c',
'recipe_yield': '4 servings',
'slug': 'serious-eats-halal-cart-style-chicken-and-rice-with-white-sauce',
@@ -1386,7 +1302,6 @@
'original_url': 'https://www.chefkoch.de/rezepte/1062121211526182/Schnelle-Kaesespaetzle.html',
'perform_time': '30 Minutes',
'prep_time': '10 Minutes',
'rating': None,
'recipe_id': 'c496cf9c-1ece-448a-9d3f-ef772f078a4e',
'recipe_yield': '4 servings',
'slug': 'schnelle-kasespatzle',
@@ -1402,7 +1317,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '49aa6f42-6760-4adf-b6cd-59592da485c3',
'recipe_yield': None,
'slug': 'taco',
@@ -1418,7 +1332,6 @@
'original_url': 'https://www.ica.se/recept/vodkapasta-729011/',
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '6402a253-2baa-460d-bf4f-b759bb655588',
'recipe_yield': '4 servings',
'slug': 'vodkapasta',
@@ -1434,7 +1347,6 @@
'original_url': 'https://www.ica.se/recept/vodkapasta-729011/',
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '4f54e9e1-f21d-40ec-a135-91e633dfb733',
'recipe_yield': '4 servings',
'slug': 'vodkapasta2',
@@ -1450,7 +1362,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': 'e1a3edb0-49a0-49a3-83e3-95554e932670',
'recipe_yield': '1',
'slug': 'rub',
@@ -1466,7 +1377,6 @@
'original_url': 'https://www.justapinch.com/recipes/dessert/cookies/banana-bread-chocolate-chip-cookies.html',
'perform_time': '15 Minutes',
'prep_time': '10 Minutes',
'rating': None,
'recipe_id': '1a0f4e54-db5b-40f1-ab7e-166dab5f6523',
'recipe_yield': '',
'slug': 'banana-bread-chocolate-chip-cookies',
@@ -1482,7 +1392,6 @@
'original_url': 'https://chefjeanpierre.com/recipes/soups/creamy-cauliflower-bisque/',
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '447acae6-3424-4c16-8c26-c09040ad8041',
'recipe_yield': '',
'slug': 'cauliflower-bisque-recipe-with-cheddar-cheese',
@@ -1498,7 +1407,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': '864136a3-27b0-4f3b-a90f-486f42d6df7a',
'recipe_yield': '',
'slug': 'prova',
@@ -1514,7 +1422,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': 'c7ccf4c7-c5f4-4191-a79b-1a49d068f6a4',
'recipe_yield': None,
'slug': 'pate-au-beurre-1',
@@ -1530,7 +1437,6 @@
'original_url': None,
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': 'd01865c3-0f18-4e8d-84c0-c14c345fdf9c',
'recipe_yield': None,
'slug': 'pate-au-beurre',
@@ -1546,7 +1452,6 @@
'original_url': 'https://saltpepperskillet.com/recipes/sous-vide-cheesecake/',
'perform_time': '1 Hour 30 Minutes',
'prep_time': '10 Minutes',
'rating': None,
'recipe_id': '2cec2bb2-19b6-40b8-a36c-1a76ea29c517',
'recipe_yield': '4 servings',
'slug': 'sous-vide-cheesecake-recipe',
@@ -1562,7 +1467,6 @@
'original_url': 'https://recipes.anovaculinary.com/recipe/the-bomb-cheesecakes',
'perform_time': None,
'prep_time': '30 Minutes',
'rating': None,
'recipe_id': '8e0e4566-9caf-4c2e-a01c-dcead23db86b',
'recipe_yield': '10 servings',
'slug': 'the-bomb-mini-cheesecakes',
@@ -1578,7 +1482,6 @@
'original_url': 'https://www.chefkoch.de/rezepte/2109501340136606/Tagliatelle-al-Salmone.html',
'perform_time': '15 Minutes',
'prep_time': '10 Minutes',
'rating': None,
'recipe_id': 'a051eafd-9712-4aee-a8e5-0cd10a6772ee',
'recipe_yield': '4 servings',
'slug': 'tagliatelle-al-salmone',
@@ -1594,7 +1497,6 @@
'original_url': 'https://www.backenmachtgluecklich.de/rezepte/death-by-chocolate-kuchen.html',
'perform_time': '25 Minutes',
'prep_time': '25 Minutes',
'rating': None,
'recipe_id': '093d51e9-0823-40ad-8e0e-a1d5790dd627',
'recipe_yield': '1 serving',
'slug': 'death-by-chocolate',
@@ -1610,7 +1512,6 @@
'original_url': 'https://www.fernweh-koch.de/palak-dal-indischer-spinat-linsen-rezept/',
'perform_time': '20 Minutes',
'prep_time': '10 Minutes',
'rating': None,
'recipe_id': '2d1f62ec-4200-4cfd-987e-c75755d7607c',
'recipe_yield': '4 servings',
'slug': 'palak-dal-rezept-aus-indien',
@@ -1626,7 +1527,6 @@
'original_url': 'https://www.chefkoch.de/rezepte/74441028021809/Tortelline-a-la-Romana.html',
'perform_time': None,
'prep_time': '30 Minutes',
'rating': None,
'recipe_id': '973dc36d-1661-49b4-ad2d-0b7191034fb3',
'recipe_yield': '4 servings',
'slug': 'tortelline-a-la-romana',
@@ -1786,7 +1686,6 @@
'original_url': 'https://www.sacher.com/en/original-sacher-torte/recipe/',
'perform_time': '1 hour',
'prep_time': '1 hour 30 minutes',
'rating': None,
'recipe_id': 'fada9582-709b-46aa-b384-d5952123ad93',
'recipe_yield': '4 servings',
'slug': 'original-sacher-torte-2',
@@ -1851,7 +1750,6 @@
'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/',
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93',
'recipe_yield': '2 servings',
'slug': 'zoete-aardappel-curry-traybake',
@@ -1877,7 +1775,6 @@
'original_url': 'https://tastesbetterfromscratch.com/roast-chicken/',
'perform_time': '1 Hour 20 Minutes',
'prep_time': '15 Minutes',
'rating': 5.0,
'recipe_id': '5b055066-d57d-4fd0-8dfd-a2c2f07b36f1',
'recipe_yield': '6 servings',
'slug': 'roast-chicken',
@@ -1903,7 +1800,6 @@
'original_url': 'https://www.directoalpaladar.com/recetas-de-carnes-y-aves/receta-de-pollo-al-curry-en-10-minutos',
'perform_time': '7 Minutes',
'prep_time': '3 Minutes',
'rating': None,
'recipe_id': 'e360a0cc-18b0-4a84-a91b-8aa59e2451c9',
'recipe_yield': '2 servings',
'slug': 'receta-de-pollo-al-curry-en-10-minutos-con-video-incluido',
@@ -1929,7 +1825,6 @@
'original_url': 'https://www.marmiton.org/recettes/recette_boeuf-bourguignon_18889.aspx',
'perform_time': '4 Hours',
'prep_time': '1 Hour',
'rating': None,
'recipe_id': '9c7b8aee-c93c-4b1b-ab48-2625d444743a',
'recipe_yield': '4 servings',
'slug': 'boeuf-bourguignon-la-vraie-recette-2',
@@ -1955,7 +1850,6 @@
'original_url': 'https://akispetretzikis.com/recipe/7959/efkolh-makaronada-me-keftedakia-ston-fourno',
'perform_time': '50 Minutes',
'prep_time': '15 Minutes',
'rating': None,
'recipe_id': 'f79f7e9d-4b58-4930-a586-2b127f16ee34',
'recipe_yield': '6 servings',
'slug': 'eukole-makaronada-me-kephtedakia-ston-phourno-1',
@@ -1981,7 +1875,6 @@
'original_url': 'https://www.ambitiouskitchen.com/greek-turkey-meatballs/',
'perform_time': '20 Minutes',
'prep_time': '40 Minutes',
'rating': None,
'recipe_id': '47595e4c-52bc-441d-b273-3edf4258806d',
'recipe_yield': '4 servings',
'slug': 'greek-turkey-meatballs-with-lemon-orzo-creamy-feta-yogurt-sauce',
@@ -2007,7 +1900,6 @@
'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/',
'perform_time': '20 Minutes',
'prep_time': '15 Minutes',
'rating': None,
'recipe_id': '9d553779-607e-471b-acf3-84e6be27b159',
'recipe_yield': '4 servings',
'slug': 'einfacher-nudelauflauf-mit-brokkoli',
@@ -2033,7 +1925,6 @@
'original_url': 'https://www.food.com/recipe/pampered-chef-double-chocolate-mocha-trifle-74963',
'perform_time': '1 Hour',
'prep_time': '15 Minutes',
'rating': 3.0,
'recipe_id': '92635fd0-f2dc-4e78-a6e4-ecd556ad361f',
'recipe_yield': '12 servings',
'slug': 'pampered-chef-double-chocolate-mocha-trifle',
@@ -2059,7 +1950,6 @@
'original_url': 'https://natashaskitchen.com/cheeseburger-sliders/',
'perform_time': '22 Minutes',
'prep_time': '8 Minutes',
'rating': 5.0,
'recipe_id': '8bdd3656-5e7e-45d3-a3c4-557390846a22',
'recipe_yield': '24 servings',
'slug': 'cheeseburger-sliders-easy-30-min-recipe',
@@ -2085,7 +1975,6 @@
'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe',
'perform_time': '3 Hours 10 Minutes',
'prep_time': '5 Minutes',
'rating': None,
'recipe_id': '48f39d27-4b8e-4c14-bf36-4e1e6497e75e',
'recipe_yield': '6 servings',
'slug': 'all-american-beef-stew-recipe',
@@ -2111,7 +2000,6 @@
'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe',
'perform_time': '3 Hours 10 Minutes',
'prep_time': '5 Minutes',
'rating': None,
'recipe_id': '48f39d27-4b8e-4c14-bf36-4e1e6497e75e',
'recipe_yield': '6 servings',
'slug': 'all-american-beef-stew-recipe',
@@ -2137,7 +2025,6 @@
'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/',
'perform_time': '20 Minutes',
'prep_time': '15 Minutes',
'rating': None,
'recipe_id': '9d553779-607e-471b-acf3-84e6be27b159',
'recipe_yield': '4 servings',
'slug': 'einfacher-nudelauflauf-mit-brokkoli',
@@ -2163,7 +2050,6 @@
'original_url': 'https://www.allrecipes.com/recipe/284039/miso-udon-noodles-with-spinach-and-tofu/',
'perform_time': '15 Minutes',
'prep_time': '10 Minutes',
'rating': None,
'recipe_id': '25b814f2-d9bf-4df0-b40d-d2f2457b4317',
'recipe_yield': '2 servings',
'slug': 'miso-udon-noodles-with-spinach-and-tofu',
@@ -2189,7 +2075,6 @@
'original_url': 'https://www.ricardocuisine.com/recettes/8919-mousse-de-saumon',
'perform_time': '2 Minutes',
'prep_time': '15 Minutes',
'rating': None,
'recipe_id': '55c88810-4cf1-4d86-ae50-63b15fd173fb',
'recipe_yield': '12 servings',
'slug': 'mousse-de-saumon',
@@ -2362,7 +2247,6 @@
'original_url': 'https://www.sacher.com/en/original-sacher-torte/recipe/',
'perform_time': '1 hour',
'prep_time': '1 hour 30 minutes',
'rating': None,
'recipe_id': 'fada9582-709b-46aa-b384-d5952123ad93',
'recipe_yield': '4 servings',
'slug': 'original-sacher-torte-2',
@@ -2426,7 +2310,6 @@
'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/',
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93',
'recipe_yield': '2 servings',
'slug': 'zoete-aardappel-curry-traybake',
@@ -2456,7 +2339,6 @@
'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/',
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93',
'recipe_yield': '2 servings',
'slug': 'zoete-aardappel-curry-traybake',
@@ -2486,7 +2368,6 @@
'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/',
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93',
'recipe_yield': '2 servings',
'slug': 'zoete-aardappel-curry-traybake',
@@ -2516,7 +2397,6 @@
'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/',
'perform_time': None,
'prep_time': None,
'rating': None,
'recipe_id': 'c5f00a93-71a2-4e48-900f-d9ad0bb9de93',
'recipe_yield': '2 servings',
'slug': 'zoete-aardappel-curry-traybake',

View File

@@ -6,7 +6,6 @@ from unittest.mock import AsyncMock, patch
from nhc.cover import NHCCover
from nhc.light import NHCLight
from nhc.scene import NHCScene
from nhc.thermostat import NHCThermostat
import pytest
from homeassistant.components.niko_home_control.const import DOMAIN
@@ -63,22 +62,6 @@ def cover() -> NHCCover:
return mock
@pytest.fixture
def climate() -> NHCThermostat:
"""Return a thermostat mock."""
mock = AsyncMock(spec=NHCThermostat)
mock.id = 5
mock.name = "thermostat"
mock.suggested_area = "room"
mock.state = 0
mock.measured = 180
mock.setpoint = 200
mock.overrule = 0
mock.overruletime = 0
mock.ecosave = 0
return mock
@pytest.fixture
def scene() -> NHCScene:
"""Return a scene mock."""
@@ -93,11 +76,7 @@ def scene() -> NHCScene:
@pytest.fixture
def mock_niko_home_control_connection(
light: NHCLight,
dimmable_light: NHCLight,
cover: NHCCover,
climate: NHCThermostat,
scene: NHCScene,
light: NHCLight, dimmable_light: NHCLight, cover: NHCCover, scene: NHCScene
) -> Generator[AsyncMock]:
"""Mock a NHC client."""
with (
@@ -113,7 +92,6 @@ def mock_niko_home_control_connection(
client = mock_client.return_value
client.lights = [light, dimmable_light]
client.covers = [cover]
client.thermostats = {"thermostat-5": climate}
client.scenes = [scene]
client.connect = AsyncMock(return_value=True)
yield client

View File

@@ -1,84 +0,0 @@
# serializer version: 1
# name: test_entities[climate.thermostat-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'hvac_modes': list([
<HVACMode.OFF: 'off'>,
<HVACMode.COOL: 'cool'>,
<HVACMode.AUTO: 'auto'>,
]),
'max_temp': 35,
'min_temp': 7,
'preset_modes': list([
'day',
'night',
'eco',
'prog1',
'prog2',
'prog3',
]),
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'climate',
'entity_category': None,
'entity_id': 'climate.thermostat',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': None,
'platform': 'niko_home_control',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': <ClimateEntityFeature: 145>,
'translation_key': 'nhc_thermostat',
'unique_id': '01JFN93M7KRA38V5AMPCJ2JYYV-5',
'unit_of_measurement': None,
})
# ---
# name: test_entities[climate.thermostat-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'current_temperature': 180,
'friendly_name': 'thermostat',
'hvac_modes': list([
<HVACMode.OFF: 'off'>,
<HVACMode.COOL: 'cool'>,
<HVACMode.AUTO: 'auto'>,
]),
'max_temp': 35,
'min_temp': 7,
'preset_mode': 'day',
'preset_modes': list([
'day',
'night',
'eco',
'prog1',
'prog2',
'prog3',
]),
'supported_features': <ClimateEntityFeature: 145>,
'temperature': 200,
}),
'context': <ANY>,
'entity_id': 'climate.thermostat',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'auto',
})
# ---

View File

@@ -1,116 +0,0 @@
"""Tests for the Niko Home Control Climate platform."""
from typing import Any
from unittest.mock import AsyncMock, patch
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.climate import ATTR_HVAC_MODE, ATTR_PRESET_MODE, HVACMode
from homeassistant.const import ATTR_ENTITY_ID, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from . import find_update_callback, setup_integration
from tests.common import MockConfigEntry, snapshot_platform
async def test_entities(
hass: HomeAssistant,
snapshot: SnapshotAssertion,
mock_niko_home_control_connection: AsyncMock,
mock_config_entry: MockConfigEntry,
entity_registry: er.EntityRegistry,
) -> None:
"""Test all entities."""
with patch(
"homeassistant.components.niko_home_control.PLATFORMS", [Platform.CLIMATE]
):
await setup_integration(hass, mock_config_entry)
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
@pytest.mark.parametrize(
("service", "service_parameters", "api_method", "api_parameters"),
[
("set_temperature", {"temperature": 25}, "set_temperature", (25,)),
("set_preset_mode", {ATTR_PRESET_MODE: "eco"}, "set_mode", (2,)),
("set_hvac_mode", {ATTR_HVAC_MODE: HVACMode.COOL}, "set_mode", (4,)),
("set_hvac_mode", {ATTR_HVAC_MODE: HVACMode.OFF}, "set_mode", (3,)),
("set_hvac_mode", {ATTR_HVAC_MODE: HVACMode.AUTO}, "set_mode", (5,)),
],
)
async def test_set(
hass: HomeAssistant,
mock_niko_home_control_connection: AsyncMock,
mock_config_entry: MockConfigEntry,
climate: AsyncMock,
service: str,
service_parameters: dict[str, Any],
api_method: str,
api_parameters: tuple[Any, ...],
) -> None:
"""Test setting a value on the climate entity."""
await setup_integration(hass, mock_config_entry)
await hass.services.async_call(
"climate",
service,
{ATTR_ENTITY_ID: "climate.thermostat"} | service_parameters,
blocking=True,
)
getattr(
mock_niko_home_control_connection.thermostats["thermostat-5"],
api_method,
).assert_called_once_with(*api_parameters)
async def test_updating(
hass: HomeAssistant,
mock_niko_home_control_connection: AsyncMock,
mock_config_entry: MockConfigEntry,
climate: AsyncMock,
) -> None:
"""Test updating the thermostat."""
await setup_integration(hass, mock_config_entry)
climate.state = 0
await find_update_callback(mock_niko_home_control_connection, 5)(0)
assert hass.states.get("climate.thermostat").attributes.get("preset_mode") == "day"
assert hass.states.get("climate.thermostat").state == "auto"
climate.state = 1
await find_update_callback(mock_niko_home_control_connection, 5)(1)
assert (
hass.states.get("climate.thermostat").attributes.get("preset_mode") == "night"
)
assert hass.states.get("climate.thermostat").state == "auto"
climate.state = 2
await find_update_callback(mock_niko_home_control_connection, 5)(2)
assert hass.states.get("climate.thermostat").state == "auto"
assert hass.states.get("climate.thermostat").attributes["preset_mode"] == "eco"
climate.state = 3
await find_update_callback(mock_niko_home_control_connection, 5)(3)
assert hass.states.get("climate.thermostat").state == "off"
climate.state = 4
await find_update_callback(mock_niko_home_control_connection, 5)(4)
assert hass.states.get("climate.thermostat").state == "cool"
climate.state = 5
await find_update_callback(mock_niko_home_control_connection, 5)(5)
assert hass.states.get("climate.thermostat").state == "auto"
assert hass.states.get("climate.thermostat").attributes["preset_mode"] == "prog1"
climate.state = 6
await find_update_callback(mock_niko_home_control_connection, 5)(6)
assert hass.states.get("climate.thermostat").state == "auto"
assert hass.states.get("climate.thermostat").attributes["preset_mode"] == "prog2"
climate.state = 7
await find_update_callback(mock_niko_home_control_connection, 5)(7)
assert hass.states.get("climate.thermostat").state == "auto"
assert hass.states.get("climate.thermostat").attributes["preset_mode"] == "prog3"

View File

@@ -185,9 +185,7 @@ def mock_psnawpapi(mock_user: MagicMock) -> Generator[MagicMock]:
spec=User, account_id="fren-psn-id", online_id="PublicUniversalFriend"
)
fren.get_presence.return_value = mock_user.get_presence.return_value
fren.trophy_summary.return_value = TrophySummary(
"fren-psn-id", 420, 20, 5, TrophySet(4782, 1245, 437, 96)
)
client.user.return_value.friends_list.return_value = [fren]
yield client

View File

@@ -1,102 +1,4 @@
# serializer version: 1
# name: test_sensors[sensor.publicuniversalfriend_bronze_trophies-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.publicuniversalfriend_bronze_trophies',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Bronze trophies',
'platform': 'playstation_network',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': <PlaystationNetworkSensor.EARNED_TROPHIES_BRONZE: 'earned_trophies_bronze'>,
'unique_id': 'fren-psn-id_earned_trophies_bronze',
'unit_of_measurement': 'trophies',
})
# ---
# name: test_sensors[sensor.publicuniversalfriend_bronze_trophies-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'PublicUniversalFriend Bronze trophies',
'unit_of_measurement': 'trophies',
}),
'context': <ANY>,
'entity_id': 'sensor.publicuniversalfriend_bronze_trophies',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '4782',
})
# ---
# name: test_sensors[sensor.publicuniversalfriend_gold_trophies-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.publicuniversalfriend_gold_trophies',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Gold trophies',
'platform': 'playstation_network',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': <PlaystationNetworkSensor.EARNED_TROPHIES_GOLD: 'earned_trophies_gold'>,
'unique_id': 'fren-psn-id_earned_trophies_gold',
'unit_of_measurement': 'trophies',
})
# ---
# name: test_sensors[sensor.publicuniversalfriend_gold_trophies-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'PublicUniversalFriend Gold trophies',
'unit_of_measurement': 'trophies',
}),
'context': <ANY>,
'entity_id': 'sensor.publicuniversalfriend_gold_trophies',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '437',
})
# ---
# name: test_sensors[sensor.publicuniversalfriend_last_online-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
@@ -146,55 +48,6 @@
'state': '2025-06-30T01:42:15+00:00',
})
# ---
# name: test_sensors[sensor.publicuniversalfriend_next_level-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.publicuniversalfriend_next_level',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Next level',
'platform': 'playstation_network',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': <PlaystationNetworkSensor.TROPHY_LEVEL_PROGRESS: 'trophy_level_progress'>,
'unique_id': 'fren-psn-id_trophy_level_progress',
'unit_of_measurement': '%',
})
# ---
# name: test_sensors[sensor.publicuniversalfriend_next_level-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'PublicUniversalFriend Next level',
'unit_of_measurement': '%',
}),
'context': <ANY>,
'entity_id': 'sensor.publicuniversalfriend_next_level',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '20',
})
# ---
# name: test_sensors[sensor.publicuniversalfriend_now_playing-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
@@ -353,152 +206,6 @@
'state': 'availabletoplay',
})
# ---
# name: test_sensors[sensor.publicuniversalfriend_platinum_trophies-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.publicuniversalfriend_platinum_trophies',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Platinum trophies',
'platform': 'playstation_network',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': <PlaystationNetworkSensor.EARNED_TROPHIES_PLATINUM: 'earned_trophies_platinum'>,
'unique_id': 'fren-psn-id_earned_trophies_platinum',
'unit_of_measurement': 'trophies',
})
# ---
# name: test_sensors[sensor.publicuniversalfriend_platinum_trophies-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'PublicUniversalFriend Platinum trophies',
'unit_of_measurement': 'trophies',
}),
'context': <ANY>,
'entity_id': 'sensor.publicuniversalfriend_platinum_trophies',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '96',
})
# ---
# name: test_sensors[sensor.publicuniversalfriend_silver_trophies-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.publicuniversalfriend_silver_trophies',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Silver trophies',
'platform': 'playstation_network',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': <PlaystationNetworkSensor.EARNED_TROPHIES_SILVER: 'earned_trophies_silver'>,
'unique_id': 'fren-psn-id_earned_trophies_silver',
'unit_of_measurement': 'trophies',
})
# ---
# name: test_sensors[sensor.publicuniversalfriend_silver_trophies-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'PublicUniversalFriend Silver trophies',
'unit_of_measurement': 'trophies',
}),
'context': <ANY>,
'entity_id': 'sensor.publicuniversalfriend_silver_trophies',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '1245',
})
# ---
# name: test_sensors[sensor.publicuniversalfriend_trophy_level-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'sensor',
'entity_category': None,
'entity_id': 'sensor.publicuniversalfriend_trophy_level',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Trophy level',
'platform': 'playstation_network',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': <PlaystationNetworkSensor.TROPHY_LEVEL: 'trophy_level'>,
'unique_id': 'fren-psn-id_trophy_level',
'unit_of_measurement': None,
})
# ---
# name: test_sensors[sensor.publicuniversalfriend_trophy_level-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'PublicUniversalFriend Trophy level',
}),
'context': <ANY>,
'entity_id': 'sensor.publicuniversalfriend_trophy_level',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '420',
})
# ---
# name: test_sensors[sensor.testuser_bronze_trophies-entry]
EntityRegistryEntrySnapshot({
'aliases': set({

View File

@@ -25,32 +25,6 @@
"dhw_cm_switch": false
}
},
"14df5c4dc8cb4ba69f9d1ac0eaf7c5c6": {
"available": true,
"binary_sensors": {
"low_battery": false
},
"dev_class": "zone_thermostat",
"hardware": "1",
"location": "f2bf9048bef64cc5b6d5110154e33c81",
"model": "Emma Pro",
"model_id": "170-01",
"name": "Emma",
"sensors": {
"battery": 100,
"humidity": 65.0,
"setpoint": 23.5,
"temperature": 24.2
},
"temperature_offset": {
"lower_bound": -2.0,
"resolution": 0.1,
"setpoint": 0.0,
"upper_bound": 2.0
},
"vendor": "Plugwise",
"zigbee_mac_address": "60EFABFFFE89CBA0"
},
"1772a4ea304041adb83f357b751341ff": {
"available": true,
"binary_sensors": {
@@ -64,11 +38,11 @@
"model_id": "106-03",
"name": "Tom Badkamer",
"sensors": {
"battery": 60,
"setpoint": 25.0,
"temperature": 24.8,
"temperature_difference": -0.4,
"valve_position": 100.0
"battery": 99,
"setpoint": 18.0,
"temperature": 21.6,
"temperature_difference": -0.2,
"valve_position": 100
},
"temperature_offset": {
"lower_bound": -2.0,
@@ -77,9 +51,10 @@
"upper_bound": 2.0
},
"vendor": "Plugwise",
"zigbee_mac_address": "000D6F000C8FCBA0"
"zigbee_mac_address": "000D6F000C8FF5EE"
},
"ad4838d7d35c4d6ea796ee12ae5aedf8": {
"available": true,
"dev_class": "thermostat",
"location": "f2bf9048bef64cc5b6d5110154e33c81",
"model": "ThermoTouch",
@@ -87,7 +62,7 @@
"name": "Anna",
"sensors": {
"setpoint": 23.5,
"temperature": 24.0
"temperature": 25.8
},
"vendor": "Plugwise"
},
@@ -96,20 +71,20 @@
"plugwise_notification": false
},
"dev_class": "gateway",
"firmware": "3.9.0",
"firmware": "3.7.8",
"gateway_modes": ["away", "full", "vacation"],
"hardware": "AME Smile 2.0 board",
"location": "bc93488efab249e5bc54fd7e175a6f91",
"mac_address": "D40FB201CBA0",
"mac_address": "012345679891",
"model": "Gateway",
"model_id": "smile_open_therm",
"name": "Adam",
"notifications": {},
"regulation_modes": [
"bleeding_cold",
"heating",
"off",
"bleeding_hot",
"bleeding_cold",
"off",
"heating",
"cooling"
],
"select_gateway_mode": "full",
@@ -118,33 +93,12 @@
"outdoor_temperature": 29.65
},
"vendor": "Plugwise",
"zigbee_mac_address": "000D6F000D5ACBA0"
},
"da575e9e09b947e281fb6e3ebce3b174": {
"available": true,
"binary_sensors": {
"low_battery": false
},
"dev_class": "zone_thermometer",
"firmware": "2020-09-01T02:00:00+02:00",
"hardware": "1",
"location": "f2bf9048bef64cc5b6d5110154e33c81",
"model": "Jip",
"model_id": "168-01",
"name": "Jip",
"sensors": {
"battery": 100,
"humidity": 65.8,
"setpoint": 23.5,
"temperature": 23.8
},
"vendor": "Plugwise",
"zigbee_mac_address": "70AC08FFFEE1CBA0"
"zigbee_mac_address": "000D6F000D5A168D"
},
"e2f4322d57924fa090fbbc48b3a140dc": {
"available": true,
"binary_sensors": {
"low_battery": false
"low_battery": true
},
"dev_class": "zone_thermostat",
"firmware": "2016-10-10T02:00:00+02:00",
@@ -154,9 +108,9 @@
"model_id": "158-01",
"name": "Lisa Badkamer",
"sensors": {
"battery": 71,
"setpoint": 25.0,
"temperature": 25.6
"battery": 14,
"setpoint": 23.5,
"temperature": 23.9
},
"temperature_offset": {
"lower_bound": -2.0,
@@ -165,7 +119,7 @@
"upper_bound": 2.0
},
"vendor": "Plugwise",
"zigbee_mac_address": "000D6F000C86CBA0"
"zigbee_mac_address": "000D6F000C869B61"
},
"e8ef2a01ed3b4139a53bf749204fe6b4": {
"dev_class": "switching",
@@ -184,9 +138,9 @@
"active_preset": "home",
"available_schedules": [
"Badkamer",
"Test",
"Vakantie",
"Weekschema",
"Test",
"off"
],
"climate_mode": "cool",
@@ -194,13 +148,12 @@
"dev_class": "climate",
"model": "ThermoZone",
"name": "Living room",
"preset_modes": ["vacation", "no_frost", "asleep", "home", "away"],
"preset_modes": ["no_frost", "asleep", "vacation", "home", "away"],
"select_schedule": "off",
"select_zone_profile": "active",
"sensors": {
"electricity_consumed": 60.8,
"electricity_consumed": 149.9,
"electricity_produced": 0.0,
"temperature": 24.2
"temperature": 25.8
},
"thermostat": {
"lower_bound": 1.0,
@@ -209,23 +162,18 @@
"upper_bound": 35.0
},
"thermostats": {
"primary": [
"ad4838d7d35c4d6ea796ee12ae5aedf8",
"14df5c4dc8cb4ba69f9d1ac0eaf7c5c6",
"da575e9e09b947e281fb6e3ebce3b174"
],
"primary": ["ad4838d7d35c4d6ea796ee12ae5aedf8"],
"secondary": []
},
"vendor": "Plugwise",
"zone_profiles": ["active", "off", "passive"]
"vendor": "Plugwise"
},
"f871b8c4d63549319221e294e4f88074": {
"active_preset": "vacation",
"active_preset": "home",
"available_schedules": [
"Badkamer",
"Test",
"Vakantie",
"Weekschema",
"Test",
"off"
],
"climate_mode": "auto",
@@ -233,13 +181,12 @@
"dev_class": "climate",
"model": "ThermoZone",
"name": "Bathroom",
"preset_modes": ["vacation", "no_frost", "asleep", "home", "away"],
"select_schedule": "off",
"select_zone_profile": "passive",
"preset_modes": ["no_frost", "asleep", "vacation", "home", "away"],
"select_schedule": "Badkamer",
"sensors": {
"electricity_consumed": 0.0,
"electricity_produced": 0.0,
"temperature": 25.8
"temperature": 23.9
},
"thermostat": {
"lower_bound": 0.0,
@@ -251,7 +198,6 @@
"primary": ["e2f4322d57924fa090fbbc48b3a140dc"],
"secondary": ["1772a4ea304041adb83f357b751341ff"]
},
"vendor": "Plugwise",
"zone_profiles": ["active", "off", "passive"]
"vendor": "Plugwise"
}
}

View File

@@ -30,32 +30,6 @@
"dhw_cm_switch": false
}
},
"14df5c4dc8cb4ba69f9d1ac0eaf7c5c6": {
"available": true,
"binary_sensors": {
"low_battery": false
},
"dev_class": "zone_thermostat",
"hardware": "1",
"location": "f2bf9048bef64cc5b6d5110154e33c81",
"model": "Emma Pro",
"model_id": "170-01",
"name": "Emma",
"sensors": {
"battery": 100,
"humidity": 65.0,
"setpoint": 20.0,
"temperature": 19.5
},
"temperature_offset": {
"lower_bound": -2.0,
"resolution": 0.1,
"setpoint": 0.0,
"upper_bound": 2.0
},
"vendor": "Plugwise",
"zigbee_mac_address": "60EFABFFFE89CBA0"
},
"1772a4ea304041adb83f357b751341ff": {
"available": true,
"binary_sensors": {
@@ -69,11 +43,11 @@
"model_id": "106-03",
"name": "Tom Badkamer",
"sensors": {
"battery": 60,
"setpoint": 25.0,
"battery": 99,
"setpoint": 18.0,
"temperature": 18.6,
"temperature_difference": -0.4,
"valve_position": 100.0
"temperature_difference": -0.2,
"valve_position": 100
},
"temperature_offset": {
"lower_bound": -2.0,
@@ -82,9 +56,10 @@
"upper_bound": 2.0
},
"vendor": "Plugwise",
"zigbee_mac_address": "000D6F000C8FCBA0"
"zigbee_mac_address": "000D6F000C8FF5EE"
},
"ad4838d7d35c4d6ea796ee12ae5aedf8": {
"available": true,
"dev_class": "thermostat",
"location": "f2bf9048bef64cc5b6d5110154e33c81",
"model": "ThermoTouch",
@@ -101,49 +76,28 @@
"plugwise_notification": false
},
"dev_class": "gateway",
"firmware": "3.9.0",
"firmware": "3.7.8",
"gateway_modes": ["away", "full", "vacation"],
"hardware": "AME Smile 2.0 board",
"location": "bc93488efab249e5bc54fd7e175a6f91",
"mac_address": "D40FB201CBA0",
"mac_address": "012345679891",
"model": "Gateway",
"model_id": "smile_open_therm",
"name": "Adam",
"notifications": {},
"regulation_modes": ["bleeding_cold", "heating", "off", "bleeding_hot"],
"regulation_modes": ["bleeding_hot", "bleeding_cold", "off", "heating"],
"select_gateway_mode": "full",
"select_regulation_mode": "heating",
"sensors": {
"outdoor_temperature": -1.25
},
"vendor": "Plugwise",
"zigbee_mac_address": "000D6F000D5ACBA0"
},
"da575e9e09b947e281fb6e3ebce3b174": {
"available": true,
"binary_sensors": {
"low_battery": false
},
"dev_class": "zone_thermometer",
"firmware": "2020-09-01T02:00:00+02:00",
"hardware": "1",
"location": "f2bf9048bef64cc5b6d5110154e33c81",
"model": "Jip",
"model_id": "168-01",
"name": "Jip",
"sensors": {
"battery": 100,
"humidity": 65.8,
"setpoint": 20.0,
"temperature": 19.3
},
"vendor": "Plugwise",
"zigbee_mac_address": "70AC08FFFEE1CBA0"
"zigbee_mac_address": "000D6F000D5A168D"
},
"e2f4322d57924fa090fbbc48b3a140dc": {
"available": true,
"binary_sensors": {
"low_battery": false
"low_battery": true
},
"dev_class": "zone_thermostat",
"firmware": "2016-10-10T02:00:00+02:00",
@@ -153,7 +107,7 @@
"model_id": "158-01",
"name": "Lisa Badkamer",
"sensors": {
"battery": 71,
"battery": 14,
"setpoint": 15.0,
"temperature": 17.9
},
@@ -164,7 +118,7 @@
"upper_bound": 2.0
},
"vendor": "Plugwise",
"zigbee_mac_address": "000D6F000C86CBA0"
"zigbee_mac_address": "000D6F000C869B61"
},
"e8ef2a01ed3b4139a53bf749204fe6b4": {
"dev_class": "switching",
@@ -183,9 +137,9 @@
"active_preset": "home",
"available_schedules": [
"Badkamer",
"Test",
"Vakantie",
"Weekschema",
"Test",
"off"
],
"climate_mode": "heat",
@@ -193,11 +147,10 @@
"dev_class": "climate",
"model": "ThermoZone",
"name": "Living room",
"preset_modes": ["vacation", "no_frost", "asleep", "home", "away"],
"preset_modes": ["no_frost", "asleep", "vacation", "home", "away"],
"select_schedule": "off",
"select_zone_profile": "active",
"sensors": {
"electricity_consumed": 60.8,
"electricity_consumed": 149.9,
"electricity_produced": 0.0,
"temperature": 19.1
},
@@ -208,23 +161,18 @@
"upper_bound": 35.0
},
"thermostats": {
"primary": [
"ad4838d7d35c4d6ea796ee12ae5aedf8",
"14df5c4dc8cb4ba69f9d1ac0eaf7c5c6",
"da575e9e09b947e281fb6e3ebce3b174"
],
"primary": ["ad4838d7d35c4d6ea796ee12ae5aedf8"],
"secondary": []
},
"vendor": "Plugwise",
"zone_profiles": ["active", "off", "passive"]
"vendor": "Plugwise"
},
"f871b8c4d63549319221e294e4f88074": {
"active_preset": "vacation",
"active_preset": "home",
"available_schedules": [
"Badkamer",
"Test",
"Vakantie",
"Weekschema",
"Test",
"off"
],
"climate_mode": "auto",
@@ -232,9 +180,8 @@
"dev_class": "climate",
"model": "ThermoZone",
"name": "Bathroom",
"preset_modes": ["vacation", "no_frost", "asleep", "home", "away"],
"select_schedule": "off",
"select_zone_profile": "passive",
"preset_modes": ["no_frost", "asleep", "vacation", "home", "away"],
"select_schedule": "Badkamer",
"sensors": {
"electricity_consumed": 0.0,
"electricity_produced": 0.0,
@@ -250,7 +197,6 @@
"primary": ["e2f4322d57924fa090fbbc48b3a140dc"],
"secondary": ["1772a4ea304041adb83f357b751341ff"]
},
"vendor": "Plugwise",
"zone_profiles": ["active", "off", "passive"]
"vendor": "Plugwise"
}
}

View File

@@ -8,7 +8,6 @@
"name": "Slaapkamer",
"preset_modes": ["home", "asleep", "away", "vacation", "no_frost"],
"select_schedule": null,
"select_zone_profile": "active",
"sensors": {
"temperature": 24.2
},
@@ -22,8 +21,7 @@
"primary": ["1346fbd8498d4dbcab7e18d51b771f3d"],
"secondary": ["356b65335e274d769c338223e7af9c33"]
},
"vendor": "Plugwise",
"zone_profiles": ["active", "off", "passive"]
"vendor": "Plugwise"
},
"13228dab8ce04617af318a2888b3c548": {
"active_preset": "home",
@@ -35,7 +33,6 @@
"name": "Woonkamer",
"preset_modes": ["home", "asleep", "away", "vacation", "no_frost"],
"select_schedule": null,
"select_zone_profile": "active",
"sensors": {
"temperature": 27.4
},
@@ -49,8 +46,7 @@
"primary": ["f61f1a2535f54f52ad006a3d18e459ca"],
"secondary": ["833de10f269c4deab58fb9df69901b4e"]
},
"vendor": "Plugwise",
"zone_profiles": ["active", "off", "passive"]
"vendor": "Plugwise"
},
"1346fbd8498d4dbcab7e18d51b771f3d": {
"available": true,
@@ -252,7 +248,6 @@
"name": "Kinderkamer",
"preset_modes": ["home", "asleep", "away", "vacation", "no_frost"],
"select_schedule": null,
"select_zone_profile": "active",
"sensors": {
"temperature": 30.0
},
@@ -266,8 +261,7 @@
"primary": ["6f3e9d7084214c21b9dfa46f6eeb8700"],
"secondary": ["d4496250d0e942cfa7aea3476e9070d5"]
},
"vendor": "Plugwise",
"zone_profiles": ["active", "off", "passive"]
"vendor": "Plugwise"
},
"d4496250d0e942cfa7aea3476e9070d5": {
"available": true,
@@ -303,7 +297,6 @@
"name": "Logeerkamer",
"preset_modes": ["home", "asleep", "away", "vacation", "no_frost"],
"select_schedule": null,
"select_zone_profile": "active",
"sensors": {
"temperature": 30.0
},
@@ -317,8 +310,7 @@
"primary": ["a6abc6a129ee499c88a4d420cc413b47"],
"secondary": ["1da4d325838e4ad8aac12177214505c9"]
},
"vendor": "Plugwise",
"zone_profiles": ["active", "off", "passive"]
"vendor": "Plugwise"
},
"e4684553153b44afbef2200885f379dc": {
"available": true,

View File

@@ -13,9 +13,9 @@
'max_temp': 35.0,
'min_temp': 0.0,
'preset_modes': list([
'vacation',
'no_frost',
'asleep',
'vacation',
'home',
'away',
]),
@@ -63,11 +63,11 @@
]),
'max_temp': 35.0,
'min_temp': 0.0,
'preset_mode': 'vacation',
'preset_mode': 'home',
'preset_modes': list([
'vacation',
'no_frost',
'asleep',
'vacation',
'home',
'away',
]),
@@ -97,9 +97,9 @@
'max_temp': 35.0,
'min_temp': 1.0,
'preset_modes': list([
'vacation',
'no_frost',
'asleep',
'vacation',
'home',
'away',
]),
@@ -149,9 +149,9 @@
'min_temp': 1.0,
'preset_mode': 'home',
'preset_modes': list([
'vacation',
'no_frost',
'asleep',
'vacation',
'home',
'away',
]),

View File

@@ -35,7 +35,7 @@
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'select_gateway_mode',
'translation_key': 'gateway_mode',
'unique_id': 'da224107914542988a88561b4452b0f6-select_gateway_mode',
'unit_of_measurement': None,
})
@@ -65,10 +65,10 @@
'area_id': None,
'capabilities': dict({
'options': list([
'bleeding_cold',
'heating',
'off',
'bleeding_hot',
'bleeding_cold',
'off',
'heating',
'cooling',
]),
}),
@@ -96,7 +96,7 @@
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'select_regulation_mode',
'translation_key': 'regulation_mode',
'unique_id': 'da224107914542988a88561b4452b0f6-select_regulation_mode',
'unit_of_measurement': None,
})
@@ -106,10 +106,10 @@
'attributes': ReadOnlyDict({
'friendly_name': 'Adam Regulation mode',
'options': list([
'bleeding_cold',
'heating',
'off',
'bleeding_hot',
'bleeding_cold',
'off',
'heating',
'cooling',
]),
}),
@@ -129,9 +129,9 @@
'capabilities': dict({
'options': list([
'Badkamer',
'Test',
'Vakantie',
'Weekschema',
'Test',
'off',
]),
}),
@@ -170,9 +170,9 @@
'friendly_name': 'Bathroom Thermostat schedule',
'options': list([
'Badkamer',
'Test',
'Vakantie',
'Weekschema',
'Test',
'off',
]),
}),
@@ -181,66 +181,7 @@
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'off',
})
# ---
# name: test_adam_2_select_entities[platforms0-True-m_adam_cooling][select.bathroom_zone_profile-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([
'active',
'off',
'passive',
]),
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'select',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'select.bathroom_zone_profile',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Zone profile',
'platform': 'plugwise',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'select_zone_profile',
'unique_id': 'f871b8c4d63549319221e294e4f88074-select_zone_profile',
'unit_of_measurement': None,
})
# ---
# name: test_adam_2_select_entities[platforms0-True-m_adam_cooling][select.bathroom_zone_profile-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Bathroom Zone profile',
'options': list([
'active',
'off',
'passive',
]),
}),
'context': <ANY>,
'entity_id': 'select.bathroom_zone_profile',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'passive',
'state': 'Badkamer',
})
# ---
# name: test_adam_2_select_entities[platforms0-True-m_adam_cooling][select.living_room_thermostat_schedule-entry]
@@ -251,9 +192,9 @@
'capabilities': dict({
'options': list([
'Badkamer',
'Test',
'Vakantie',
'Weekschema',
'Test',
'off',
]),
}),
@@ -292,9 +233,9 @@
'friendly_name': 'Living room Thermostat schedule',
'options': list([
'Badkamer',
'Test',
'Vakantie',
'Weekschema',
'Test',
'off',
]),
}),
@@ -306,65 +247,6 @@
'state': 'off',
})
# ---
# name: test_adam_2_select_entities[platforms0-True-m_adam_cooling][select.living_room_zone_profile-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': dict({
'options': list([
'active',
'off',
'passive',
]),
}),
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'select',
'entity_category': <EntityCategory.CONFIG: 'config'>,
'entity_id': 'select.living_room_zone_profile',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Zone profile',
'platform': 'plugwise',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'select_zone_profile',
'unique_id': 'f2bf9048bef64cc5b6d5110154e33c81-select_zone_profile',
'unit_of_measurement': None,
})
# ---
# name: test_adam_2_select_entities[platforms0-True-m_adam_cooling][select.living_room_zone_profile-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Living room Zone profile',
'options': list([
'active',
'off',
'passive',
]),
}),
'context': <ANY>,
'entity_id': 'select.living_room_zone_profile',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'active',
})
# ---
# name: test_adam_select_entities[platforms0][select.badkamer_thermostat_schedule-entry]
EntityRegistryEntrySnapshot({
'aliases': set({

File diff suppressed because it is too large Load Diff

View File

@@ -257,7 +257,7 @@ async def test_update_device(
entity_registry, mock_config_entry.entry_id
)
)
== 51
== 38
)
assert (
len(
@@ -265,7 +265,7 @@ async def test_update_device(
device_registry, mock_config_entry.entry_id
)
)
== 10
== 8
)
# Add a 2nd Tom/Floor
@@ -289,7 +289,7 @@ async def test_update_device(
entity_registry, mock_config_entry.entry_id
)
)
== 58
== 45
)
assert (
len(
@@ -297,7 +297,7 @@ async def test_update_device(
device_registry, mock_config_entry.entry_id
)
)
== 11
== 9
)
item_list: list[str] = []
for device_entry in list(device_registry.devices.values()):
@@ -320,7 +320,7 @@ async def test_update_device(
entity_registry, mock_config_entry.entry_id
)
)
== 51
== 38
)
assert (
len(
@@ -328,7 +328,7 @@ async def test_update_device(
device_registry, mock_config_entry.entry_id
)
)
== 10
== 8
)
item_list: list[str] = []
for device_entry in list(device_registry.devices.values()):

View File

@@ -5,11 +5,6 @@ from unittest.mock import MagicMock
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.plugwise.const import (
SELECT_REGULATION_MODE,
SELECT_SCHEDULE,
SELECT_ZONE_PROFILE,
)
from homeassistant.components.select import (
ATTR_OPTION,
DOMAIN as SELECT_DOMAIN,
@@ -50,9 +45,10 @@ async def test_adam_change_select_entity(
},
blocking=True,
)
assert mock_smile_adam.set_select.call_count == 1
mock_smile_adam.set_select.assert_called_with(
SELECT_SCHEDULE,
"select_schedule",
"c50f167537524366a5af7aa3942feb1e",
"Badkamer Schema",
"on",
@@ -96,39 +92,13 @@ async def test_adam_select_regulation_mode(
)
assert mock_smile_adam_heat_cool.set_select.call_count == 1
mock_smile_adam_heat_cool.set_select.assert_called_with(
SELECT_REGULATION_MODE,
"select_regulation_mode",
"bc93488efab249e5bc54fd7e175a6f91",
"heating",
"on",
)
@pytest.mark.parametrize("chosen_env", ["m_adam_heating"], indirect=True)
@pytest.mark.parametrize("cooling_present", [True], indirect=True)
async def test_adam_select_zone_profile(
hass: HomeAssistant,
mock_smile_adam_heat_cool: MagicMock,
init_integration: MockConfigEntry,
) -> None:
"""Test changing the zone_profile select."""
await hass.services.async_call(
SELECT_DOMAIN,
SERVICE_SELECT_OPTION,
{
ATTR_ENTITY_ID: "select.living_room_zone_profile",
ATTR_OPTION: "passive",
},
blocking=True,
)
assert mock_smile_adam_heat_cool.set_select.call_count == 1
mock_smile_adam_heat_cool.set_select.assert_called_with(
SELECT_ZONE_PROFILE,
"f2bf9048bef64cc5b6d5110154e33c81",
"passive",
"on",
)
async def test_legacy_anna_select_entities(
hass: HomeAssistant,
mock_smile_legacy_anna: MagicMock,

View File

@@ -13,13 +13,11 @@ from homeassistant.helpers import entity_registry as er
from tests.common import MockConfigEntry, snapshot_platform
@pytest.mark.parametrize("chosen_env", ["m_adam_heating"], indirect=True)
@pytest.mark.parametrize("cooling_present", [False], indirect=True)
@pytest.mark.parametrize("platforms", [(SENSOR_DOMAIN,)])
@pytest.mark.usefixtures("entity_registry_enabled_by_default")
async def test_adam_sensor_snapshot(
hass: HomeAssistant,
mock_smile_adam_heat_cool: MagicMock,
mock_smile_adam: MagicMock,
snapshot: SnapshotAssertion,
entity_registry: er.EntityRegistry,
setup_platform: MockConfigEntry,

View File

@@ -1,893 +0,0 @@
"""Models for SQLAlchemy.
This file contains the model definitions for schema version 51.
It is used to test the schema migration logic.
"""
from __future__ import annotations
from collections.abc import Callable
from datetime import datetime, timedelta
import logging
import time
from typing import Any, Final, Protocol, Self
import ciso8601
from fnv_hash_fast import fnv1a_32
from sqlalchemy import (
CHAR,
JSON,
BigInteger,
Boolean,
ColumnElement,
DateTime,
Float,
ForeignKey,
Identity,
Index,
Integer,
LargeBinary,
SmallInteger,
String,
Text,
case,
type_coerce,
)
from sqlalchemy.dialects import mysql, oracle, postgresql, sqlite
from sqlalchemy.engine.interfaces import Dialect
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.orm import DeclarativeBase, Mapped, aliased, mapped_column, relationship
from sqlalchemy.types import TypeDecorator
from homeassistant.components.recorder.const import (
ALL_DOMAIN_EXCLUDE_ATTRS,
SupportedDialect,
)
from homeassistant.components.recorder.models import (
StatisticData,
StatisticDataTimestamp,
StatisticMeanType,
StatisticMetaData,
datetime_to_timestamp_or_none,
process_timestamp,
ulid_to_bytes_or_none,
uuid_hex_to_bytes_or_none,
)
from homeassistant.components.sensor import ATTR_STATE_CLASS
from homeassistant.const import (
ATTR_DEVICE_CLASS,
ATTR_FRIENDLY_NAME,
ATTR_UNIT_OF_MEASUREMENT,
MATCH_ALL,
MAX_LENGTH_EVENT_EVENT_TYPE,
MAX_LENGTH_STATE_ENTITY_ID,
MAX_LENGTH_STATE_STATE,
)
from homeassistant.core import Event, EventStateChangedData
from homeassistant.helpers.json import JSON_DUMP, json_bytes, json_bytes_strip_null
from homeassistant.util import dt as dt_util
# SQLAlchemy Schema
class Base(DeclarativeBase):
"""Base class for tables."""
class LegacyBase(DeclarativeBase):
"""Base class for tables, used for schema migration."""
SCHEMA_VERSION = 51
_LOGGER = logging.getLogger(__name__)
TABLE_EVENTS = "events"
TABLE_EVENT_DATA = "event_data"
TABLE_EVENT_TYPES = "event_types"
TABLE_STATES = "states"
TABLE_STATE_ATTRIBUTES = "state_attributes"
TABLE_STATES_META = "states_meta"
TABLE_RECORDER_RUNS = "recorder_runs"
TABLE_SCHEMA_CHANGES = "schema_changes"
TABLE_STATISTICS = "statistics"
TABLE_STATISTICS_META = "statistics_meta"
TABLE_STATISTICS_RUNS = "statistics_runs"
TABLE_STATISTICS_SHORT_TERM = "statistics_short_term"
TABLE_MIGRATION_CHANGES = "migration_changes"
STATISTICS_TABLES = ("statistics", "statistics_short_term")
MAX_STATE_ATTRS_BYTES = 16384
MAX_EVENT_DATA_BYTES = 32768
PSQL_DIALECT = SupportedDialect.POSTGRESQL
ALL_TABLES = [
TABLE_STATES,
TABLE_STATE_ATTRIBUTES,
TABLE_EVENTS,
TABLE_EVENT_DATA,
TABLE_EVENT_TYPES,
TABLE_RECORDER_RUNS,
TABLE_SCHEMA_CHANGES,
TABLE_MIGRATION_CHANGES,
TABLE_STATES_META,
TABLE_STATISTICS,
TABLE_STATISTICS_META,
TABLE_STATISTICS_RUNS,
TABLE_STATISTICS_SHORT_TERM,
]
TABLES_TO_CHECK = [
TABLE_STATES,
TABLE_EVENTS,
TABLE_RECORDER_RUNS,
TABLE_SCHEMA_CHANGES,
]
LAST_UPDATED_INDEX_TS = "ix_states_last_updated_ts"
METADATA_ID_LAST_UPDATED_INDEX_TS = "ix_states_metadata_id_last_updated_ts"
EVENTS_CONTEXT_ID_BIN_INDEX = "ix_events_context_id_bin"
STATES_CONTEXT_ID_BIN_INDEX = "ix_states_context_id_bin"
LEGACY_STATES_EVENT_ID_INDEX = "ix_states_event_id"
LEGACY_STATES_ENTITY_ID_LAST_UPDATED_TS_INDEX = "ix_states_entity_id_last_updated_ts"
LEGACY_MAX_LENGTH_EVENT_CONTEXT_ID: Final = 36
CONTEXT_ID_BIN_MAX_LENGTH = 16
MYSQL_COLLATE = "utf8mb4_unicode_ci"
MYSQL_DEFAULT_CHARSET = "utf8mb4"
MYSQL_ENGINE = "InnoDB"
_DEFAULT_TABLE_ARGS = {
"mysql_default_charset": MYSQL_DEFAULT_CHARSET,
"mysql_collate": MYSQL_COLLATE,
"mysql_engine": MYSQL_ENGINE,
"mariadb_default_charset": MYSQL_DEFAULT_CHARSET,
"mariadb_collate": MYSQL_COLLATE,
"mariadb_engine": MYSQL_ENGINE,
}
_MATCH_ALL_KEEP = {
ATTR_DEVICE_CLASS,
ATTR_STATE_CLASS,
ATTR_UNIT_OF_MEASUREMENT,
ATTR_FRIENDLY_NAME,
}
class UnusedDateTime(DateTime):
"""An unused column type that behaves like a datetime."""
class Unused(CHAR):
"""An unused column type that behaves like a string."""
@compiles(UnusedDateTime, "mysql", "mariadb", "sqlite")
@compiles(Unused, "mysql", "mariadb", "sqlite")
def compile_char_zero(type_: TypeDecorator, compiler: Any, **kw: Any) -> str:
"""Compile UnusedDateTime and Unused as CHAR(0) on mysql, mariadb, and sqlite."""
return "CHAR(0)" # Uses 1 byte on MySQL (no change on sqlite)
@compiles(Unused, "postgresql")
def compile_char_one(type_: TypeDecorator, compiler: Any, **kw: Any) -> str:
"""Compile Unused as CHAR(1) on postgresql."""
return "CHAR(1)" # Uses 1 byte
class FAST_PYSQLITE_DATETIME(sqlite.DATETIME):
"""Use ciso8601 to parse datetimes instead of sqlalchemy built-in regex."""
def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None:
"""Offload the datetime parsing to ciso8601."""
return lambda value: None if value is None else ciso8601.parse_datetime(value)
class NativeLargeBinary(LargeBinary):
"""A faster version of LargeBinary for engines that support python bytes natively."""
def result_processor(self, dialect: Dialect, coltype: Any) -> Callable | None:
"""No conversion needed for engines that support native bytes."""
return None
# Although all integers are same in SQLite, it does not allow an identity column to be BIGINT
# https://sqlite.org/forum/info/2dfa968a702e1506e885cb06d92157d492108b22bf39459506ab9f7125bca7fd
ID_TYPE = BigInteger().with_variant(sqlite.INTEGER, "sqlite")
# For MariaDB and MySQL we can use an unsigned integer type since it will fit 2**32
# for sqlite and postgresql we use a bigint
UINT_32_TYPE = BigInteger().with_variant(
mysql.INTEGER(unsigned=True), # type: ignore[no-untyped-call]
"mysql",
"mariadb",
)
JSON_VARIANT_CAST = Text().with_variant(
postgresql.JSON(none_as_null=True),
"postgresql",
)
JSONB_VARIANT_CAST = Text().with_variant(
postgresql.JSONB(none_as_null=True),
"postgresql",
)
DATETIME_TYPE = (
DateTime(timezone=True)
.with_variant(mysql.DATETIME(timezone=True, fsp=6), "mysql", "mariadb") # type: ignore[no-untyped-call]
.with_variant(FAST_PYSQLITE_DATETIME(), "sqlite") # type: ignore[no-untyped-call]
)
DOUBLE_TYPE = (
Float()
.with_variant(mysql.DOUBLE(asdecimal=False), "mysql", "mariadb") # type: ignore[no-untyped-call]
.with_variant(oracle.DOUBLE_PRECISION(), "oracle")
.with_variant(postgresql.DOUBLE_PRECISION(), "postgresql")
)
UNUSED_LEGACY_COLUMN = Unused(0)
UNUSED_LEGACY_DATETIME_COLUMN = UnusedDateTime(timezone=True)
UNUSED_LEGACY_INTEGER_COLUMN = SmallInteger()
DOUBLE_PRECISION_TYPE_SQL = "DOUBLE PRECISION"
BIG_INTEGER_SQL = "BIGINT"
CONTEXT_BINARY_TYPE = LargeBinary(CONTEXT_ID_BIN_MAX_LENGTH).with_variant(
NativeLargeBinary(CONTEXT_ID_BIN_MAX_LENGTH), "mysql", "mariadb", "sqlite"
)
TIMESTAMP_TYPE = DOUBLE_TYPE
class _LiteralProcessorType(Protocol):
def __call__(self, value: Any) -> str: ...
class JSONLiteral(JSON):
"""Teach SA how to literalize json."""
def literal_processor(self, dialect: Dialect) -> _LiteralProcessorType:
"""Processor to convert a value to JSON."""
def process(value: Any) -> str:
"""Dump json."""
return JSON_DUMP(value)
return process
class Events(Base):
"""Event history data."""
__table_args__ = (
# Used for fetching events at a specific time
# see logbook
Index(
"ix_events_event_type_id_time_fired_ts", "event_type_id", "time_fired_ts"
),
Index(
EVENTS_CONTEXT_ID_BIN_INDEX,
"context_id_bin",
mysql_length=CONTEXT_ID_BIN_MAX_LENGTH,
mariadb_length=CONTEXT_ID_BIN_MAX_LENGTH,
),
_DEFAULT_TABLE_ARGS,
)
__tablename__ = TABLE_EVENTS
event_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
event_type: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
event_data: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
origin: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
origin_idx: Mapped[int | None] = mapped_column(SmallInteger)
time_fired: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
time_fired_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, index=True)
context_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
context_user_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
context_parent_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
data_id: Mapped[int | None] = mapped_column(
ID_TYPE, ForeignKey("event_data.data_id"), index=True
)
context_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE)
context_user_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE)
context_parent_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE)
event_type_id: Mapped[int | None] = mapped_column(
ID_TYPE, ForeignKey("event_types.event_type_id")
)
event_data_rel: Mapped[EventData | None] = relationship("EventData")
event_type_rel: Mapped[EventTypes | None] = relationship("EventTypes")
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
"<recorder.Events("
f"id={self.event_id}, event_type_id='{self.event_type_id}', "
f"origin_idx='{self.origin_idx}', time_fired='{self._time_fired_isotime}'"
f", data_id={self.data_id})>"
)
@property
def _time_fired_isotime(self) -> str | None:
"""Return time_fired as an isotime string."""
date_time: datetime | None
if self.time_fired_ts is not None:
date_time = dt_util.utc_from_timestamp(self.time_fired_ts)
else:
date_time = process_timestamp(self.time_fired)
if date_time is None:
return None
return date_time.isoformat(sep=" ", timespec="seconds")
@staticmethod
def from_event(event: Event) -> Events:
"""Create an event database object from a native event."""
context = event.context
return Events(
event_type=None,
event_data=None,
origin_idx=event.origin.idx,
time_fired=None,
time_fired_ts=event.time_fired_timestamp,
context_id=None,
context_id_bin=ulid_to_bytes_or_none(context.id),
context_user_id=None,
context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id),
context_parent_id=None,
context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id),
)
class LegacyEvents(LegacyBase):
"""Event history data with event_id, used for schema migration."""
__table_args__ = (_DEFAULT_TABLE_ARGS,)
__tablename__ = TABLE_EVENTS
event_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
context_id: Mapped[str | None] = mapped_column(
String(LEGACY_MAX_LENGTH_EVENT_CONTEXT_ID), index=True
)
class EventData(Base):
"""Event data history."""
__table_args__ = (_DEFAULT_TABLE_ARGS,)
__tablename__ = TABLE_EVENT_DATA
data_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
hash: Mapped[int | None] = mapped_column(UINT_32_TYPE, index=True)
# Note that this is not named attributes to avoid confusion with the states table
shared_data: Mapped[str | None] = mapped_column(
Text().with_variant(mysql.LONGTEXT, "mysql", "mariadb")
)
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
"<recorder.EventData("
f"id={self.data_id}, hash='{self.hash}', data='{self.shared_data}'"
")>"
)
@staticmethod
def shared_data_bytes_from_event(
event: Event, dialect: SupportedDialect | None
) -> bytes:
"""Create shared_data from an event."""
encoder = json_bytes_strip_null if dialect == PSQL_DIALECT else json_bytes
bytes_result = encoder(event.data)
if len(bytes_result) > MAX_EVENT_DATA_BYTES:
_LOGGER.warning(
"Event data for %s exceed maximum size of %s bytes. "
"This can cause database performance issues; Event data "
"will not be stored",
event.event_type,
MAX_EVENT_DATA_BYTES,
)
return b"{}"
return bytes_result
@staticmethod
def hash_shared_data_bytes(shared_data_bytes: bytes) -> int:
"""Return the hash of json encoded shared data."""
return fnv1a_32(shared_data_bytes)
class EventTypes(Base):
"""Event type history."""
__table_args__ = (_DEFAULT_TABLE_ARGS,)
__tablename__ = TABLE_EVENT_TYPES
event_type_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
event_type: Mapped[str | None] = mapped_column(
String(MAX_LENGTH_EVENT_EVENT_TYPE), index=True, unique=True
)
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
"<recorder.EventTypes("
f"id={self.event_type_id}, event_type='{self.event_type}'"
")>"
)
class States(Base):
"""State change history."""
__table_args__ = (
# Used for fetching the state of entities at a specific time
# (get_states in history.py)
Index(METADATA_ID_LAST_UPDATED_INDEX_TS, "metadata_id", "last_updated_ts"),
Index(
STATES_CONTEXT_ID_BIN_INDEX,
"context_id_bin",
mysql_length=CONTEXT_ID_BIN_MAX_LENGTH,
mariadb_length=CONTEXT_ID_BIN_MAX_LENGTH,
),
_DEFAULT_TABLE_ARGS,
)
__tablename__ = TABLE_STATES
state_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
entity_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
state: Mapped[str | None] = mapped_column(String(MAX_LENGTH_STATE_STATE))
attributes: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
event_id: Mapped[int | None] = mapped_column(UNUSED_LEGACY_INTEGER_COLUMN)
last_changed: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
last_changed_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE)
last_reported_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE)
last_updated: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
last_updated_ts: Mapped[float | None] = mapped_column(
TIMESTAMP_TYPE, default=time.time, index=True
)
old_state_id: Mapped[int | None] = mapped_column(
ID_TYPE, ForeignKey("states.state_id"), index=True
)
attributes_id: Mapped[int | None] = mapped_column(
ID_TYPE, ForeignKey("state_attributes.attributes_id"), index=True
)
context_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
context_user_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
context_parent_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
origin_idx: Mapped[int | None] = mapped_column(
SmallInteger
) # 0 is local, 1 is remote
old_state: Mapped[States | None] = relationship("States", remote_side=[state_id])
state_attributes: Mapped[StateAttributes | None] = relationship("StateAttributes")
context_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE)
context_user_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE)
context_parent_id_bin: Mapped[bytes | None] = mapped_column(CONTEXT_BINARY_TYPE)
metadata_id: Mapped[int | None] = mapped_column(
ID_TYPE, ForeignKey("states_meta.metadata_id")
)
states_meta_rel: Mapped[StatesMeta | None] = relationship("StatesMeta")
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
f"<recorder.States(id={self.state_id}, entity_id='{self.entity_id}'"
f" metadata_id={self.metadata_id},"
f" state='{self.state}', event_id='{self.event_id}',"
f" last_updated='{self._last_updated_isotime}',"
f" old_state_id={self.old_state_id}, attributes_id={self.attributes_id})>"
)
@property
def _last_updated_isotime(self) -> str | None:
"""Return last_updated as an isotime string."""
date_time: datetime | None
if self.last_updated_ts is not None:
date_time = dt_util.utc_from_timestamp(self.last_updated_ts)
else:
date_time = process_timestamp(self.last_updated)
if date_time is None:
return None
return date_time.isoformat(sep=" ", timespec="seconds")
@staticmethod
def from_event(event: Event[EventStateChangedData]) -> States:
"""Create object from a state_changed event."""
state = event.data["new_state"]
# None state means the state was removed from the state machine
if state is None:
state_value = ""
last_updated_ts = event.time_fired_timestamp
last_changed_ts = None
last_reported_ts = None
else:
state_value = state.state
last_updated_ts = state.last_updated_timestamp
if state.last_updated == state.last_changed:
last_changed_ts = None
else:
last_changed_ts = state.last_changed_timestamp
if state.last_updated == state.last_reported:
last_reported_ts = None
else:
last_reported_ts = state.last_reported_timestamp
context = event.context
return States(
state=state_value,
entity_id=None,
attributes=None,
context_id=None,
context_id_bin=ulid_to_bytes_or_none(context.id),
context_user_id=None,
context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id),
context_parent_id=None,
context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id),
origin_idx=event.origin.idx,
last_updated=None,
last_changed=None,
last_updated_ts=last_updated_ts,
last_changed_ts=last_changed_ts,
last_reported_ts=last_reported_ts,
)
class LegacyStates(LegacyBase):
"""State change history with entity_id, used for schema migration."""
__table_args__ = (
Index(
LEGACY_STATES_ENTITY_ID_LAST_UPDATED_TS_INDEX,
"entity_id",
"last_updated_ts",
),
_DEFAULT_TABLE_ARGS,
)
__tablename__ = TABLE_STATES
state_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
entity_id: Mapped[str | None] = mapped_column(UNUSED_LEGACY_COLUMN)
last_updated_ts: Mapped[float | None] = mapped_column(
TIMESTAMP_TYPE, default=time.time, index=True
)
context_id: Mapped[str | None] = mapped_column(
String(LEGACY_MAX_LENGTH_EVENT_CONTEXT_ID), index=True
)
class StateAttributes(Base):
"""State attribute change history."""
__table_args__ = (_DEFAULT_TABLE_ARGS,)
__tablename__ = TABLE_STATE_ATTRIBUTES
attributes_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
hash: Mapped[int | None] = mapped_column(UINT_32_TYPE, index=True)
# Note that this is not named attributes to avoid confusion with the states table
shared_attrs: Mapped[str | None] = mapped_column(
Text().with_variant(mysql.LONGTEXT, "mysql", "mariadb")
)
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
f"<recorder.StateAttributes(id={self.attributes_id}, hash='{self.hash}',"
f" attributes='{self.shared_attrs}')>"
)
@staticmethod
def shared_attrs_bytes_from_event(
event: Event[EventStateChangedData],
dialect: SupportedDialect | None,
) -> bytes:
"""Create shared_attrs from a state_changed event."""
# None state means the state was removed from the state machine
if (state := event.data["new_state"]) is None:
return b"{}"
if state_info := state.state_info:
unrecorded_attributes = state_info["unrecorded_attributes"]
exclude_attrs = {
*ALL_DOMAIN_EXCLUDE_ATTRS,
*unrecorded_attributes,
}
if MATCH_ALL in unrecorded_attributes:
# Don't exclude device class, state class, unit of measurement
# or friendly name when using the MATCH_ALL exclude constant
exclude_attrs.update(state.attributes)
exclude_attrs -= _MATCH_ALL_KEEP
else:
exclude_attrs = ALL_DOMAIN_EXCLUDE_ATTRS
encoder = json_bytes_strip_null if dialect == PSQL_DIALECT else json_bytes
bytes_result = encoder(
{k: v for k, v in state.attributes.items() if k not in exclude_attrs}
)
if len(bytes_result) > MAX_STATE_ATTRS_BYTES:
_LOGGER.warning(
"State attributes for %s exceed maximum size of %s bytes. "
"This can cause database performance issues; Attributes "
"will not be stored",
state.entity_id,
MAX_STATE_ATTRS_BYTES,
)
return b"{}"
return bytes_result
@staticmethod
def hash_shared_attrs_bytes(shared_attrs_bytes: bytes) -> int:
"""Return the hash of json encoded shared attributes."""
return fnv1a_32(shared_attrs_bytes)
class StatesMeta(Base):
"""Metadata for states."""
__table_args__ = (_DEFAULT_TABLE_ARGS,)
__tablename__ = TABLE_STATES_META
metadata_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
entity_id: Mapped[str | None] = mapped_column(
String(MAX_LENGTH_STATE_ENTITY_ID), index=True, unique=True
)
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
"<recorder.StatesMeta("
f"id={self.metadata_id}, entity_id='{self.entity_id}'"
")>"
)
class StatisticsBase:
"""Statistics base class."""
id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
created: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
created_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, default=time.time)
metadata_id: Mapped[int | None] = mapped_column(
ID_TYPE,
ForeignKey(f"{TABLE_STATISTICS_META}.id", ondelete="CASCADE"),
)
start: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
start_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE, index=True)
mean: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
mean_weight: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
min: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
max: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
last_reset: Mapped[datetime | None] = mapped_column(UNUSED_LEGACY_DATETIME_COLUMN)
last_reset_ts: Mapped[float | None] = mapped_column(TIMESTAMP_TYPE)
state: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
sum: Mapped[float | None] = mapped_column(DOUBLE_TYPE)
duration: timedelta
@classmethod
def from_stats(
cls, metadata_id: int, stats: StatisticData, now_timestamp: float | None = None
) -> Self:
"""Create object from a statistics with datetime objects."""
return cls( # type: ignore[call-arg]
metadata_id=metadata_id,
created=None,
created_ts=now_timestamp or time.time(),
start=None,
start_ts=stats["start"].timestamp(),
mean=stats.get("mean"),
mean_weight=stats.get("mean_weight"),
min=stats.get("min"),
max=stats.get("max"),
last_reset=None,
last_reset_ts=datetime_to_timestamp_or_none(stats.get("last_reset")),
state=stats.get("state"),
sum=stats.get("sum"),
)
@classmethod
def from_stats_ts(
cls,
metadata_id: int,
stats: StatisticDataTimestamp,
now_timestamp: float | None = None,
) -> Self:
"""Create object from a statistics with timestamps."""
return cls( # type: ignore[call-arg]
metadata_id=metadata_id,
created=None,
created_ts=now_timestamp or time.time(),
start=None,
start_ts=stats["start_ts"],
mean=stats.get("mean"),
mean_weight=stats.get("mean_weight"),
min=stats.get("min"),
max=stats.get("max"),
last_reset=None,
last_reset_ts=stats.get("last_reset_ts"),
state=stats.get("state"),
sum=stats.get("sum"),
)
class Statistics(Base, StatisticsBase):
"""Long term statistics."""
duration = timedelta(hours=1)
__table_args__ = (
# Used for fetching statistics for a certain entity at a specific time
Index(
"ix_statistics_statistic_id_start_ts",
"metadata_id",
"start_ts",
unique=True,
),
_DEFAULT_TABLE_ARGS,
)
__tablename__ = TABLE_STATISTICS
class _StatisticsShortTerm(StatisticsBase):
"""Short term statistics."""
duration = timedelta(minutes=5)
__tablename__ = TABLE_STATISTICS_SHORT_TERM
class StatisticsShortTerm(Base, _StatisticsShortTerm):
"""Short term statistics."""
__table_args__ = (
# Used for fetching statistics for a certain entity at a specific time
Index(
"ix_statistics_short_term_statistic_id_start_ts",
"metadata_id",
"start_ts",
unique=True,
),
_DEFAULT_TABLE_ARGS,
)
class LegacyStatisticsShortTerm(LegacyBase, _StatisticsShortTerm):
"""Short term statistics with 32-bit index, used for schema migration."""
__table_args__ = (
# Used for fetching statistics for a certain entity at a specific time
Index(
"ix_statistics_short_term_statistic_id_start_ts",
"metadata_id",
"start_ts",
unique=True,
),
_DEFAULT_TABLE_ARGS,
)
metadata_id: Mapped[int | None] = mapped_column(
Integer,
ForeignKey(f"{TABLE_STATISTICS_META}.id", ondelete="CASCADE"),
use_existing_column=True,
)
class _StatisticsMeta:
"""Statistics meta data."""
__table_args__ = (_DEFAULT_TABLE_ARGS,)
__tablename__ = TABLE_STATISTICS_META
id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
statistic_id: Mapped[str | None] = mapped_column(
String(255), index=True, unique=True
)
source: Mapped[str | None] = mapped_column(String(32))
unit_of_measurement: Mapped[str | None] = mapped_column(String(255))
unit_class: Mapped[str | None] = mapped_column(String(255))
has_mean: Mapped[bool | None] = mapped_column(Boolean)
has_sum: Mapped[bool | None] = mapped_column(Boolean)
name: Mapped[str | None] = mapped_column(String(255))
mean_type: Mapped[StatisticMeanType] = mapped_column(
SmallInteger, nullable=False, default=StatisticMeanType.NONE.value
) # See StatisticMeanType
@staticmethod
def from_meta(meta: StatisticMetaData) -> StatisticsMeta:
"""Create object from meta data."""
return StatisticsMeta(**meta)
class StatisticsMeta(Base, _StatisticsMeta):
"""Statistics meta data."""
class LegacyStatisticsMeta(LegacyBase, _StatisticsMeta):
"""Statistics meta data with 32-bit index, used for schema migration."""
id: Mapped[int] = mapped_column(
Integer,
Identity(),
primary_key=True,
use_existing_column=True,
)
class RecorderRuns(Base):
"""Representation of recorder run."""
__table_args__ = (
Index("ix_recorder_runs_start_end", "start", "end"),
_DEFAULT_TABLE_ARGS,
)
__tablename__ = TABLE_RECORDER_RUNS
run_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
start: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow)
end: Mapped[datetime | None] = mapped_column(DATETIME_TYPE)
closed_incorrect: Mapped[bool] = mapped_column(Boolean, default=False)
created: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow)
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
end = (
f"'{self.end.isoformat(sep=' ', timespec='seconds')}'" if self.end else None
)
return (
f"<recorder.RecorderRuns(id={self.run_id},"
f" start='{self.start.isoformat(sep=' ', timespec='seconds')}', end={end},"
f" closed_incorrect={self.closed_incorrect},"
f" created='{self.created.isoformat(sep=' ', timespec='seconds')}')>"
)
class MigrationChanges(Base):
"""Representation of migration changes."""
__tablename__ = TABLE_MIGRATION_CHANGES
__table_args__ = (_DEFAULT_TABLE_ARGS,)
migration_id: Mapped[str] = mapped_column(String(255), primary_key=True)
version: Mapped[int] = mapped_column(SmallInteger)
class SchemaChanges(Base):
"""Representation of schema version changes."""
__tablename__ = TABLE_SCHEMA_CHANGES
__table_args__ = (_DEFAULT_TABLE_ARGS,)
change_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
schema_version: Mapped[int | None] = mapped_column(Integer)
changed: Mapped[datetime] = mapped_column(DATETIME_TYPE, default=dt_util.utcnow)
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
"<recorder.SchemaChanges("
f"id={self.change_id}, schema_version={self.schema_version}, "
f"changed='{self.changed.isoformat(sep=' ', timespec='seconds')}'"
")>"
)
class StatisticsRuns(Base):
"""Representation of statistics run."""
__tablename__ = TABLE_STATISTICS_RUNS
__table_args__ = (_DEFAULT_TABLE_ARGS,)
run_id: Mapped[int] = mapped_column(ID_TYPE, Identity(), primary_key=True)
start: Mapped[datetime] = mapped_column(DATETIME_TYPE, index=True)
def __repr__(self) -> str:
"""Return string representation of instance for debugging."""
return (
f"<recorder.StatisticsRuns(id={self.run_id},"
f" start='{self.start.isoformat(sep=' ', timespec='seconds')}', )>"
)
EVENT_DATA_JSON = type_coerce(
EventData.shared_data.cast(JSONB_VARIANT_CAST), JSONLiteral(none_as_null=True)
)
OLD_FORMAT_EVENT_DATA_JSON = type_coerce(
Events.event_data.cast(JSONB_VARIANT_CAST), JSONLiteral(none_as_null=True)
)
SHARED_ATTRS_JSON = type_coerce(
StateAttributes.shared_attrs.cast(JSON_VARIANT_CAST), JSON(none_as_null=True)
)
OLD_FORMAT_ATTRS_JSON = type_coerce(
States.attributes.cast(JSON_VARIANT_CAST), JSON(none_as_null=True)
)
ENTITY_ID_IN_EVENT: ColumnElement = EVENT_DATA_JSON["entity_id"]
OLD_ENTITY_ID_IN_EVENT: ColumnElement = OLD_FORMAT_EVENT_DATA_JSON["entity_id"]
DEVICE_ID_IN_EVENT: ColumnElement = EVENT_DATA_JSON["device_id"]
OLD_STATE = aliased(States, name="old_state")
SHARED_ATTR_OR_LEGACY_ATTRIBUTES = case(
(StateAttributes.shared_attrs.is_(None), States.attributes),
else_=StateAttributes.shared_attrs,
).label("attributes")
SHARED_DATA_OR_LEGACY_EVENT_DATA = case(
(EventData.shared_data.is_(None), Events.event_data), else_=EventData.shared_data
).label("event_data")

View File

@@ -1,4 +1,4 @@
"""Test for migration from DB schema version 50."""
"""The tests for the recorder filter matching the EntityFilter component."""
import importlib
import sys
@@ -134,26 +134,6 @@ async def test_migrate_statistics_meta(
name="Test 3",
mean_type=StatisticMeanType.NONE,
),
# Wrong case
old_db_schema.StatisticsMeta(
statistic_id="sensor.test4",
source="recorder",
unit_of_measurement="l/min",
has_mean=None,
has_sum=True,
name="Test 4",
mean_type=StatisticMeanType.NONE,
),
# Wrong encoding
old_db_schema.StatisticsMeta(
statistic_id="sensor.test5",
source="recorder",
unit_of_measurement="",
has_mean=None,
has_sum=True,
name="Test 5",
mean_type=StatisticMeanType.NONE,
),
)
)
@@ -271,28 +251,6 @@ async def test_migrate_statistics_meta(
"statistics_unit_of_measurement": "ppm",
"unit_class": "unitless",
},
{
"display_unit_of_measurement": "l/min",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 4",
"source": "recorder",
"statistic_id": "sensor.test4",
"statistics_unit_of_measurement": "l/min",
"unit_class": None,
},
{
"display_unit_of_measurement": "",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 5",
"source": "recorder",
"statistic_id": "sensor.test5",
"statistics_unit_of_measurement": "",
"unit_class": None,
},
]
)
assert post_migration_metadata_db == {
@@ -329,27 +287,5 @@ async def test_migrate_statistics_meta(
"unit_class": "unitless",
"unit_of_measurement": "ppm",
},
"sensor.test4": {
"has_mean": None,
"has_sum": True,
"id": 4,
"mean_type": 0,
"name": "Test 4",
"source": "recorder",
"statistic_id": "sensor.test4",
"unit_class": None,
"unit_of_measurement": "l/min",
},
"sensor.test5": {
"has_mean": None,
"has_sum": True,
"id": 5,
"mean_type": 0,
"name": "Test 5",
"source": "recorder",
"statistic_id": "sensor.test5",
"unit_class": None,
"unit_of_measurement": "",
},
}
assert post_migration_metadata_api == unordered(pre_migration_metadata_api)

View File

@@ -1,456 +0,0 @@
"""Test for migration from DB schema version 51."""
import importlib
import sys
import threading
from unittest.mock import patch
import pytest
from pytest_unordered import unordered
from sqlalchemy import create_engine, inspect
from sqlalchemy.orm import Session
from homeassistant.components import recorder
from homeassistant.components.recorder import core, migration, statistics
from homeassistant.components.recorder.const import UNIT_CLASS_SCHEMA_VERSION
from homeassistant.components.recorder.db_schema import StatisticsMeta
from homeassistant.components.recorder.models import StatisticMeanType
from homeassistant.components.recorder.util import session_scope
from homeassistant.core import HomeAssistant
from .common import (
async_recorder_block_till_done,
async_wait_recording_done,
get_patched_live_version,
)
from .conftest import instrument_migration
from tests.common import async_test_home_assistant
from tests.typing import RecorderInstanceContextManager
CREATE_ENGINE_TARGET = "homeassistant.components.recorder.core.create_engine"
SCHEMA_MODULE_51 = "tests.components.recorder.db_schema_51"
@pytest.fixture
async def mock_recorder_before_hass(
async_test_recorder: RecorderInstanceContextManager,
) -> None:
"""Set up recorder."""
async def _async_wait_migration_done(hass: HomeAssistant) -> None:
"""Wait for the migration to be done."""
await recorder.get_instance(hass).async_block_till_done()
await async_recorder_block_till_done(hass)
def _create_engine_test(*args, **kwargs):
"""Test version of create_engine that initializes with old schema.
This simulates an existing db with the old schema.
"""
importlib.import_module(SCHEMA_MODULE_51)
old_db_schema = sys.modules[SCHEMA_MODULE_51]
engine = create_engine(*args, **kwargs)
old_db_schema.Base.metadata.create_all(engine)
with Session(engine) as session:
session.add(
recorder.db_schema.StatisticsRuns(start=statistics.get_start_time())
)
session.add(
recorder.db_schema.SchemaChanges(
schema_version=old_db_schema.SCHEMA_VERSION
)
)
session.commit()
return engine
@pytest.fixture
def db_schema_51():
"""Fixture to initialize the db with the old schema."""
importlib.import_module(SCHEMA_MODULE_51)
old_db_schema = sys.modules[SCHEMA_MODULE_51]
with (
patch.object(recorder, "db_schema", old_db_schema),
patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION),
patch.object(
migration,
"LIVE_MIGRATION_MIN_SCHEMA_VERSION",
get_patched_live_version(old_db_schema),
),
patch.object(migration, "non_live_data_migration_needed", return_value=False),
patch.object(core, "StatesMeta", old_db_schema.StatesMeta),
patch.object(core, "EventTypes", old_db_schema.EventTypes),
patch.object(core, "EventData", old_db_schema.EventData),
patch.object(core, "States", old_db_schema.States),
patch.object(core, "Events", old_db_schema.Events),
patch.object(core, "StateAttributes", old_db_schema.StateAttributes),
patch(CREATE_ENGINE_TARGET, new=_create_engine_test),
):
yield
@pytest.mark.parametrize(
("persistent_database", "expected_unit_class"),
[
(
True,
{
# MariaDB/MySQL should correct unit class of sensor.test4 + sensor.test5
"mysql": {
"sensor.test1": "energy",
"sensor.test2": "power",
"sensor.test3": "unitless",
"sensor.test4": None,
"sensor.test5": None,
},
# PostgreSQL is not modified by the migration
"postgresql": {
"sensor.test1": "energy",
"sensor.test2": "power",
"sensor.test3": "unitless",
"sensor.test4": "volume_flow_rate",
"sensor.test5": "area",
},
# SQLite is not modified by the migration
"sqlite": {
"sensor.test1": "energy",
"sensor.test2": "power",
"sensor.test3": "unitless",
"sensor.test4": "volume_flow_rate",
"sensor.test5": "area",
},
},
),
],
)
@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage
async def test_migrate_statistics_meta(
async_test_recorder: RecorderInstanceContextManager,
caplog: pytest.LogCaptureFixture,
expected_unit_class: dict[str, dict[str, str | None]],
) -> None:
"""Test we can fix bad migration to version 51."""
importlib.import_module(SCHEMA_MODULE_51)
old_db_schema = sys.modules[SCHEMA_MODULE_51]
def _insert_metadata():
with session_scope(hass=hass) as session:
session.add_all(
(
old_db_schema.StatisticsMeta(
statistic_id="sensor.test1",
source="recorder",
unit_of_measurement="kWh",
has_mean=None,
has_sum=True,
name="Test 1",
mean_type=StatisticMeanType.NONE,
unit_class="energy",
),
# Unexpected, but will not be changed by migration
old_db_schema.StatisticsMeta(
statistic_id="sensor.test2",
source="recorder",
unit_of_measurement="cats",
has_mean=None,
has_sum=True,
name="Test 2",
mean_type=StatisticMeanType.NONE,
unit_class="power",
),
# This will be updated to "unitless" when migration runs again
old_db_schema.StatisticsMeta(
statistic_id="sensor.test3",
source="recorder",
unit_of_measurement="ppm",
has_mean=None,
has_sum=True,
name="Test 3",
mean_type=StatisticMeanType.NONE,
unit_class=None,
),
# Wrong case
old_db_schema.StatisticsMeta(
statistic_id="sensor.test4",
source="recorder",
unit_of_measurement="l/min",
has_mean=None,
has_sum=True,
name="Test 4",
mean_type=StatisticMeanType.NONE,
unit_class="volume_flow_rate",
),
# Wrong encoding
old_db_schema.StatisticsMeta(
statistic_id="sensor.test5",
source="recorder",
unit_of_measurement="",
has_mean=None,
has_sum=True,
name="Test 5",
mean_type=StatisticMeanType.NONE,
unit_class="area",
),
)
)
# Create database with old schema
with (
patch.object(recorder, "db_schema", old_db_schema),
patch.object(migration, "SCHEMA_VERSION", old_db_schema.SCHEMA_VERSION),
patch.object(
migration,
"LIVE_MIGRATION_MIN_SCHEMA_VERSION",
get_patched_live_version(old_db_schema),
),
patch.object(migration.EventsContextIDMigration, "migrate_data"),
patch(CREATE_ENGINE_TARGET, new=_create_engine_test),
):
async with (
async_test_home_assistant() as hass,
async_test_recorder(hass) as instance,
):
await instance.async_add_executor_job(_insert_metadata)
await async_wait_recording_done(hass)
await _async_wait_migration_done(hass)
await hass.async_stop()
await hass.async_block_till_done()
def _object_as_dict(obj):
return {c.key: getattr(obj, c.key) for c in inspect(obj).mapper.column_attrs}
def _fetch_metadata():
with session_scope(hass=hass) as session:
metadatas = session.query(StatisticsMeta).all()
return {
metadata.statistic_id: _object_as_dict(metadata)
for metadata in metadatas
}
# Run again with new schema, let migration run
async with async_test_home_assistant() as hass:
with (
instrument_migration(hass) as instrumented_migration,
):
# Stall migration when the last non-live schema migration is done
instrumented_migration.stall_on_schema_version = UNIT_CLASS_SCHEMA_VERSION
async with async_test_recorder(
hass, wait_recorder=False, wait_recorder_setup=False
) as instance:
engine_name = instance.engine.dialect.name
# Wait for migration to reach migration of unit class
await hass.async_add_executor_job(
instrumented_migration.apply_update_stalled.wait
)
# Check that it's possible to read metadata via the API, this will
# stop working when version 50 is migrated off line
pre_migration_metadata_api = await instance.async_add_executor_job(
statistics.list_statistic_ids,
hass,
None,
None,
)
instrumented_migration.migration_stall.set()
instance.recorder_and_worker_thread_ids.add(threading.get_ident())
await hass.async_block_till_done()
await async_wait_recording_done(hass)
await async_wait_recording_done(hass)
post_migration_metadata_db = await instance.async_add_executor_job(
_fetch_metadata
)
post_migration_metadata_api = await instance.async_add_executor_job(
statistics.list_statistic_ids,
hass,
None,
None,
)
await hass.async_stop()
await hass.async_block_till_done()
assert pre_migration_metadata_api == unordered(
[
{
"display_unit_of_measurement": "kWh",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 1",
"source": "recorder",
"statistic_id": "sensor.test1",
"statistics_unit_of_measurement": "kWh",
"unit_class": "energy",
},
{
"display_unit_of_measurement": "cats",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 2",
"source": "recorder",
"statistic_id": "sensor.test2",
"statistics_unit_of_measurement": "cats",
"unit_class": None,
},
{
"display_unit_of_measurement": "ppm",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 3",
"source": "recorder",
"statistic_id": "sensor.test3",
"statistics_unit_of_measurement": "ppm",
"unit_class": "unitless",
},
{
"display_unit_of_measurement": "l/min",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 4",
"source": "recorder",
"statistic_id": "sensor.test4",
"statistics_unit_of_measurement": "l/min",
"unit_class": None,
},
{
"display_unit_of_measurement": "",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 5",
"source": "recorder",
"statistic_id": "sensor.test5",
"statistics_unit_of_measurement": "",
"unit_class": None,
},
]
)
assert post_migration_metadata_db == {
"sensor.test1": {
"has_mean": None,
"has_sum": True,
"id": 1,
"mean_type": 0,
"name": "Test 1",
"source": "recorder",
"statistic_id": "sensor.test1",
"unit_class": expected_unit_class[engine_name]["sensor.test1"],
"unit_of_measurement": "kWh",
},
"sensor.test2": {
"has_mean": None,
"has_sum": True,
"id": 2,
"mean_type": 0,
"name": "Test 2",
"source": "recorder",
"statistic_id": "sensor.test2",
"unit_class": expected_unit_class[engine_name]["sensor.test2"],
"unit_of_measurement": "cats",
},
"sensor.test3": {
"has_mean": None,
"has_sum": True,
"id": 3,
"mean_type": 0,
"name": "Test 3",
"source": "recorder",
"statistic_id": "sensor.test3",
"unit_class": expected_unit_class[engine_name]["sensor.test3"],
"unit_of_measurement": "ppm",
},
"sensor.test4": {
"has_mean": None,
"has_sum": True,
"id": 4,
"mean_type": 0,
"name": "Test 4",
"source": "recorder",
"statistic_id": "sensor.test4",
"unit_class": expected_unit_class[engine_name]["sensor.test4"],
"unit_of_measurement": "l/min",
},
"sensor.test5": {
"has_mean": None,
"has_sum": True,
"id": 5,
"mean_type": 0,
"name": "Test 5",
"source": "recorder",
"statistic_id": "sensor.test5",
"unit_class": expected_unit_class[engine_name]["sensor.test5"],
"unit_of_measurement": "",
},
}
assert post_migration_metadata_api == unordered(
[
{
"display_unit_of_measurement": "kWh",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 1",
"source": "recorder",
"statistic_id": "sensor.test1",
"statistics_unit_of_measurement": "kWh",
"unit_class": expected_unit_class[engine_name]["sensor.test1"],
},
{
"display_unit_of_measurement": "cats",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 2",
"source": "recorder",
"statistic_id": "sensor.test2",
"statistics_unit_of_measurement": "cats",
"unit_class": expected_unit_class[engine_name]["sensor.test2"],
},
{
"display_unit_of_measurement": "ppm",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 3",
"source": "recorder",
"statistic_id": "sensor.test3",
"statistics_unit_of_measurement": "ppm",
"unit_class": expected_unit_class[engine_name]["sensor.test3"],
},
{
"display_unit_of_measurement": "l/min",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 4",
"source": "recorder",
"statistic_id": "sensor.test4",
"statistics_unit_of_measurement": "l/min",
"unit_class": expected_unit_class[engine_name]["sensor.test4"],
},
{
"display_unit_of_measurement": "",
"has_mean": False,
"has_sum": True,
"mean_type": StatisticMeanType.NONE,
"name": "Test 5",
"source": "recorder",
"statistic_id": "sensor.test5",
"statistics_unit_of_measurement": "",
"unit_class": expected_unit_class[engine_name]["sensor.test5"],
},
]
)

View File

@@ -1,161 +0,0 @@
# serializer version: 1
# name: test_binary_sensors[binary_sensor.output-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'binary_sensor',
'entity_category': None,
'entity_id': 'binary_sensor.output',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <BinarySensorDeviceClass.SAFETY: 'safety'>,
'original_icon': None,
'original_name': None,
'platform': 'satel_integra',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '1234567890_outputs_1',
'unit_of_measurement': None,
})
# ---
# name: test_binary_sensors[binary_sensor.output-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'safety',
'friendly_name': 'Output',
}),
'context': <ANY>,
'entity_id': 'binary_sensor.output',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'off',
})
# ---
# name: test_binary_sensors[binary_sensor.zone-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'binary_sensor',
'entity_category': None,
'entity_id': 'binary_sensor.zone',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'options': dict({
}),
'original_device_class': <BinarySensorDeviceClass.MOTION: 'motion'>,
'original_icon': None,
'original_name': None,
'platform': 'satel_integra',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '1234567890_zones_1',
'unit_of_measurement': None,
})
# ---
# name: test_binary_sensors[binary_sensor.zone-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'motion',
'friendly_name': 'Zone',
}),
'context': <ANY>,
'entity_id': 'binary_sensor.zone',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'off',
})
# ---
# name: test_binary_sensors[device-output]
DeviceRegistryEntrySnapshot({
'area_id': None,
'config_entries': <ANY>,
'config_entries_subentries': <ANY>,
'configuration_url': None,
'connections': set({
}),
'disabled_by': None,
'entry_type': None,
'hw_version': None,
'id': <ANY>,
'identifiers': set({
tuple(
'satel_integra',
'1234567890_outputs_1',
),
}),
'labels': set({
}),
'manufacturer': None,
'model': None,
'model_id': None,
'name': 'Output',
'name_by_user': None,
'primary_config_entry': <ANY>,
'serial_number': None,
'sw_version': None,
'via_device_id': None,
})
# ---
# name: test_binary_sensors[device-zone]
DeviceRegistryEntrySnapshot({
'area_id': None,
'config_entries': <ANY>,
'config_entries_subentries': <ANY>,
'configuration_url': None,
'connections': set({
}),
'disabled_by': None,
'entry_type': None,
'hw_version': None,
'id': <ANY>,
'identifiers': set({
tuple(
'satel_integra',
'1234567890_zones_1',
),
}),
'labels': set({
}),
'manufacturer': None,
'model': None,
'model_id': None,
'name': 'Zone',
'name_by_user': None,
'primary_config_entry': <ANY>,
'serial_number': None,
'sw_version': None,
'via_device_id': None,
})
# ---

View File

@@ -1,102 +0,0 @@
"""Test Satel Integra Binary Sensor."""
from collections.abc import AsyncGenerator
from unittest.mock import AsyncMock, patch
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.binary_sensor import STATE_OFF, STATE_ON
from homeassistant.components.satel_integra.const import DOMAIN
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceRegistry
from homeassistant.helpers.entity_registry import EntityRegistry
from . import setup_integration
from tests.common import MockConfigEntry, snapshot_platform
@pytest.fixture(autouse=True)
async def binary_sensor_only() -> AsyncGenerator[None]:
"""Enable only the binary sensor platform."""
with patch(
"homeassistant.components.satel_integra.PLATFORMS",
[Platform.BINARY_SENSOR],
):
yield
@pytest.mark.usefixtures("mock_satel")
async def test_binary_sensors(
hass: HomeAssistant,
snapshot: SnapshotAssertion,
mock_config_entry_with_subentries: MockConfigEntry,
entity_registry: EntityRegistry,
device_registry: DeviceRegistry,
) -> None:
"""Test binary sensors correctly being set up."""
await setup_integration(hass, mock_config_entry_with_subentries)
assert mock_config_entry_with_subentries.state is ConfigEntryState.LOADED
await snapshot_platform(
hass, entity_registry, snapshot, mock_config_entry_with_subentries.entry_id
)
device_entry = device_registry.async_get_device(
identifiers={(DOMAIN, "1234567890_zones_1")}
)
assert device_entry == snapshot(name="device-zone")
device_entry = device_registry.async_get_device(
identifiers={(DOMAIN, "1234567890_outputs_1")}
)
assert device_entry == snapshot(name="device-output")
async def test_binary_sensor_initial_state_on(
hass: HomeAssistant,
mock_satel: AsyncMock,
mock_config_entry_with_subentries: MockConfigEntry,
) -> None:
"""Test binary sensors have a correct initial state ON after initialization."""
mock_satel.violated_zones = [1]
mock_satel.violated_outputs = [1]
await setup_integration(hass, mock_config_entry_with_subentries)
assert hass.states.get("binary_sensor.zone").state == STATE_ON
assert hass.states.get("binary_sensor.output").state == STATE_ON
async def test_binary_sensor_callback(
hass: HomeAssistant,
mock_satel: AsyncMock,
mock_config_entry_with_subentries: MockConfigEntry,
) -> None:
"""Test binary sensors correctly change state after a callback from the panel."""
await setup_integration(hass, mock_config_entry_with_subentries)
assert hass.states.get("binary_sensor.zone").state == STATE_OFF
assert hass.states.get("binary_sensor.output").state == STATE_OFF
monitor_status_call = mock_satel.monitor_status.call_args_list[0][0]
output_update_method = monitor_status_call[2]
zone_update_method = monitor_status_call[1]
# Should do nothing, only react to it's own number
output_update_method({"outputs": {2: 1}})
zone_update_method({"zones": {2: 1}})
assert hass.states.get("binary_sensor.zone").state == STATE_OFF
assert hass.states.get("binary_sensor.output").state == STATE_OFF
output_update_method({"outputs": {1: 1}})
zone_update_method({"zones": {1: 1}})
assert hass.states.get("binary_sensor.zone").state == STATE_ON
assert hass.states.get("binary_sensor.output").state == STATE_ON

Some files were not shown because too many files have changed in this diff Show More