mirror of
https://github.com/home-assistant/core.git
synced 2025-10-08 11:19:30 +00:00
Compare commits
27 Commits
water_hier
...
dev
Author | SHA1 | Date | |
---|---|---|---|
![]() |
cbf1b39edb | ||
![]() |
142daf5e49 | ||
![]() |
8bd0ff7cca | ||
![]() |
ac676e12f6 | ||
![]() |
c0ac3292cd | ||
![]() |
80fd07c128 | ||
![]() |
3701d8859a | ||
![]() |
6dd26bae88 | ||
![]() |
1a0abe296c | ||
![]() |
de6c61a4ab | ||
![]() |
33c677596e | ||
![]() |
e9b4b8e99b | ||
![]() |
0525c04c42 | ||
![]() |
d57b502551 | ||
![]() |
9fb708baf4 | ||
![]() |
abdf24b7a0 | ||
![]() |
29bfbd27bb | ||
![]() |
224553f8d9 | ||
![]() |
7c9f6a061f | ||
![]() |
8e115d4685 | ||
![]() |
00c189844f | ||
![]() |
4587c286bb | ||
![]() |
b46097a7fc | ||
![]() |
299cb6a2ff | ||
![]() |
1b7b91b328 | ||
![]() |
01a1480ebd | ||
![]() |
26b8abb118 |
@@ -71,4 +71,4 @@ POLLEN_CATEGORY_MAP = {
|
||||
}
|
||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10)
|
||||
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
||||
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(hours=30)
|
||||
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(minutes=30)
|
||||
|
@@ -16,10 +16,12 @@ from homeassistant.components.sensor import (
|
||||
from homeassistant.const import (
|
||||
CONCENTRATION_PARTS_PER_BILLION,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
LIGHT_LUX,
|
||||
PERCENTAGE,
|
||||
EntityCategory,
|
||||
Platform,
|
||||
UnitOfPressure,
|
||||
UnitOfSoundPressure,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -112,6 +114,21 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"lux": SensorEntityDescription(
|
||||
key="lux",
|
||||
device_class=SensorDeviceClass.ILLUMINANCE,
|
||||
native_unit_of_measurement=LIGHT_LUX,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"noise": SensorEntityDescription(
|
||||
key="noise",
|
||||
translation_key="ambient_noise",
|
||||
device_class=SensorDeviceClass.SOUND_PRESSURE,
|
||||
native_unit_of_measurement=UnitOfSoundPressure.WEIGHTED_DECIBEL_A,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
}
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
@@ -41,6 +41,9 @@
|
||||
},
|
||||
"illuminance": {
|
||||
"name": "[%key:component::sensor::entity_component::illuminance::name%]"
|
||||
},
|
||||
"ambient_noise": {
|
||||
"name": "Ambient noise"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -38,9 +38,11 @@ from home_assistant_intents import (
|
||||
ErrorKey,
|
||||
FuzzyConfig,
|
||||
FuzzyLanguageResponses,
|
||||
LanguageScores,
|
||||
get_fuzzy_config,
|
||||
get_fuzzy_language,
|
||||
get_intents,
|
||||
get_language_scores,
|
||||
get_languages,
|
||||
)
|
||||
import yaml
|
||||
@@ -59,6 +61,7 @@ from homeassistant.core import (
|
||||
)
|
||||
from homeassistant.helpers import (
|
||||
area_registry as ar,
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
floor_registry as fr,
|
||||
@@ -343,6 +346,81 @@ class DefaultAgent(ConversationEntity):
|
||||
|
||||
return result
|
||||
|
||||
async def async_debug_recognize(
|
||||
self, user_input: ConversationInput
|
||||
) -> dict[str, Any] | None:
|
||||
"""Debug recognize from user input."""
|
||||
result_dict: dict[str, Any] | None = None
|
||||
|
||||
if trigger_result := await self.async_recognize_sentence_trigger(user_input):
|
||||
result_dict = {
|
||||
# Matched a user-defined sentence trigger.
|
||||
# We can't provide the response here without executing the
|
||||
# trigger.
|
||||
"match": True,
|
||||
"source": "trigger",
|
||||
"sentence_template": trigger_result.sentence_template or "",
|
||||
}
|
||||
elif intent_result := await self.async_recognize_intent(user_input):
|
||||
successful_match = not intent_result.unmatched_entities
|
||||
result_dict = {
|
||||
# Name of the matching intent (or the closest)
|
||||
"intent": {
|
||||
"name": intent_result.intent.name,
|
||||
},
|
||||
# Slot values that would be received by the intent
|
||||
"slots": { # direct access to values
|
||||
entity_key: entity.text or entity.value
|
||||
for entity_key, entity in intent_result.entities.items()
|
||||
},
|
||||
# Extra slot details, such as the originally matched text
|
||||
"details": {
|
||||
entity_key: {
|
||||
"name": entity.name,
|
||||
"value": entity.value,
|
||||
"text": entity.text,
|
||||
}
|
||||
for entity_key, entity in intent_result.entities.items()
|
||||
},
|
||||
# Entities/areas/etc. that would be targeted
|
||||
"targets": {},
|
||||
# True if match was successful
|
||||
"match": successful_match,
|
||||
# Text of the sentence template that matched (or was closest)
|
||||
"sentence_template": "",
|
||||
# When match is incomplete, this will contain the best slot guesses
|
||||
"unmatched_slots": _get_unmatched_slots(intent_result),
|
||||
# True if match was not exact
|
||||
"fuzzy_match": False,
|
||||
}
|
||||
|
||||
if successful_match:
|
||||
result_dict["targets"] = {
|
||||
state.entity_id: {"matched": is_matched}
|
||||
for state, is_matched in _get_debug_targets(
|
||||
self.hass, intent_result
|
||||
)
|
||||
}
|
||||
|
||||
if intent_result.intent_sentence is not None:
|
||||
result_dict["sentence_template"] = intent_result.intent_sentence.text
|
||||
|
||||
if intent_result.intent_metadata:
|
||||
# Inspect metadata to determine if this matched a custom sentence
|
||||
if intent_result.intent_metadata.get(METADATA_CUSTOM_SENTENCE):
|
||||
result_dict["source"] = "custom"
|
||||
result_dict["file"] = intent_result.intent_metadata.get(
|
||||
METADATA_CUSTOM_FILE
|
||||
)
|
||||
else:
|
||||
result_dict["source"] = "builtin"
|
||||
|
||||
result_dict["fuzzy_match"] = intent_result.intent_metadata.get(
|
||||
METADATA_FUZZY_MATCH, False
|
||||
)
|
||||
|
||||
return result_dict
|
||||
|
||||
async def _async_handle_message(
|
||||
self,
|
||||
user_input: ConversationInput,
|
||||
@@ -1529,6 +1607,10 @@ class DefaultAgent(ConversationEntity):
|
||||
return None
|
||||
return response
|
||||
|
||||
async def async_get_language_scores(self) -> dict[str, LanguageScores]:
|
||||
"""Get support scores per language."""
|
||||
return await self.hass.async_add_executor_job(get_language_scores)
|
||||
|
||||
|
||||
def _make_error_result(
|
||||
language: str,
|
||||
@@ -1725,3 +1807,75 @@ def _collect_list_references(expression: Expression, list_names: set[str]) -> No
|
||||
elif isinstance(expression, ListReference):
|
||||
# {list}
|
||||
list_names.add(expression.slot_name)
|
||||
|
||||
|
||||
def _get_debug_targets(
|
||||
hass: HomeAssistant,
|
||||
result: RecognizeResult,
|
||||
) -> Iterable[tuple[State, bool]]:
|
||||
"""Yield state/is_matched pairs for a hassil recognition."""
|
||||
entities = result.entities
|
||||
|
||||
name: str | None = None
|
||||
area_name: str | None = None
|
||||
domains: set[str] | None = None
|
||||
device_classes: set[str] | None = None
|
||||
state_names: set[str] | None = None
|
||||
|
||||
if "name" in entities:
|
||||
name = str(entities["name"].value)
|
||||
|
||||
if "area" in entities:
|
||||
area_name = str(entities["area"].value)
|
||||
|
||||
if "domain" in entities:
|
||||
domains = set(cv.ensure_list(entities["domain"].value))
|
||||
|
||||
if "device_class" in entities:
|
||||
device_classes = set(cv.ensure_list(entities["device_class"].value))
|
||||
|
||||
if "state" in entities:
|
||||
# HassGetState only
|
||||
state_names = set(cv.ensure_list(entities["state"].value))
|
||||
|
||||
if (
|
||||
(name is None)
|
||||
and (area_name is None)
|
||||
and (not domains)
|
||||
and (not device_classes)
|
||||
and (not state_names)
|
||||
):
|
||||
# Avoid "matching" all entities when there is no filter
|
||||
return
|
||||
|
||||
states = intent.async_match_states(
|
||||
hass,
|
||||
name=name,
|
||||
area_name=area_name,
|
||||
domains=domains,
|
||||
device_classes=device_classes,
|
||||
)
|
||||
|
||||
for state in states:
|
||||
# For queries, a target is "matched" based on its state
|
||||
is_matched = (state_names is None) or (state.state in state_names)
|
||||
yield state, is_matched
|
||||
|
||||
|
||||
def _get_unmatched_slots(
|
||||
result: RecognizeResult,
|
||||
) -> dict[str, str | int | float]:
|
||||
"""Return a dict of unmatched text/range slot entities."""
|
||||
unmatched_slots: dict[str, str | int | float] = {}
|
||||
for entity in result.unmatched_entities_list:
|
||||
if isinstance(entity, UnmatchedTextEntity):
|
||||
if entity.text == MISSING_ENTITY:
|
||||
# Don't report <missing> since these are just missing context
|
||||
# slots.
|
||||
continue
|
||||
|
||||
unmatched_slots[entity.name] = entity.text
|
||||
elif isinstance(entity, UnmatchedRangeEntity):
|
||||
unmatched_slots[entity.name] = entity.value
|
||||
|
||||
return unmatched_slots
|
||||
|
@@ -2,21 +2,16 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Iterable
|
||||
from dataclasses import asdict
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
from hassil.recognize import MISSING_ENTITY, RecognizeResult
|
||||
from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity
|
||||
from home_assistant_intents import get_language_scores
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import http, websocket_api
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.const import MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant, State, callback
|
||||
from homeassistant.helpers import config_validation as cv, intent
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.util import language as language_util
|
||||
|
||||
from .agent_manager import (
|
||||
@@ -26,11 +21,6 @@ from .agent_manager import (
|
||||
get_agent_manager,
|
||||
)
|
||||
from .const import DATA_COMPONENT
|
||||
from .default_agent import (
|
||||
METADATA_CUSTOM_FILE,
|
||||
METADATA_CUSTOM_SENTENCE,
|
||||
METADATA_FUZZY_MATCH,
|
||||
)
|
||||
from .entity import ConversationEntity
|
||||
from .models import ConversationInput
|
||||
|
||||
@@ -206,150 +196,12 @@ async def websocket_hass_agent_debug(
|
||||
language=msg.get("language", hass.config.language),
|
||||
agent_id=agent.entity_id,
|
||||
)
|
||||
result_dict: dict[str, Any] | None = None
|
||||
|
||||
if trigger_result := await agent.async_recognize_sentence_trigger(user_input):
|
||||
result_dict = {
|
||||
# Matched a user-defined sentence trigger.
|
||||
# We can't provide the response here without executing the
|
||||
# trigger.
|
||||
"match": True,
|
||||
"source": "trigger",
|
||||
"sentence_template": trigger_result.sentence_template or "",
|
||||
}
|
||||
elif intent_result := await agent.async_recognize_intent(user_input):
|
||||
successful_match = not intent_result.unmatched_entities
|
||||
result_dict = {
|
||||
# Name of the matching intent (or the closest)
|
||||
"intent": {
|
||||
"name": intent_result.intent.name,
|
||||
},
|
||||
# Slot values that would be received by the intent
|
||||
"slots": { # direct access to values
|
||||
entity_key: entity.text or entity.value
|
||||
for entity_key, entity in intent_result.entities.items()
|
||||
},
|
||||
# Extra slot details, such as the originally matched text
|
||||
"details": {
|
||||
entity_key: {
|
||||
"name": entity.name,
|
||||
"value": entity.value,
|
||||
"text": entity.text,
|
||||
}
|
||||
for entity_key, entity in intent_result.entities.items()
|
||||
},
|
||||
# Entities/areas/etc. that would be targeted
|
||||
"targets": {},
|
||||
# True if match was successful
|
||||
"match": successful_match,
|
||||
# Text of the sentence template that matched (or was closest)
|
||||
"sentence_template": "",
|
||||
# When match is incomplete, this will contain the best slot guesses
|
||||
"unmatched_slots": _get_unmatched_slots(intent_result),
|
||||
# True if match was not exact
|
||||
"fuzzy_match": False,
|
||||
}
|
||||
|
||||
if successful_match:
|
||||
result_dict["targets"] = {
|
||||
state.entity_id: {"matched": is_matched}
|
||||
for state, is_matched in _get_debug_targets(hass, intent_result)
|
||||
}
|
||||
|
||||
if intent_result.intent_sentence is not None:
|
||||
result_dict["sentence_template"] = intent_result.intent_sentence.text
|
||||
|
||||
if intent_result.intent_metadata:
|
||||
# Inspect metadata to determine if this matched a custom sentence
|
||||
if intent_result.intent_metadata.get(METADATA_CUSTOM_SENTENCE):
|
||||
result_dict["source"] = "custom"
|
||||
result_dict["file"] = intent_result.intent_metadata.get(
|
||||
METADATA_CUSTOM_FILE
|
||||
)
|
||||
else:
|
||||
result_dict["source"] = "builtin"
|
||||
|
||||
result_dict["fuzzy_match"] = intent_result.intent_metadata.get(
|
||||
METADATA_FUZZY_MATCH, False
|
||||
)
|
||||
|
||||
result_dict = await agent.async_debug_recognize(user_input)
|
||||
result_dicts.append(result_dict)
|
||||
|
||||
connection.send_result(msg["id"], {"results": result_dicts})
|
||||
|
||||
|
||||
def _get_debug_targets(
|
||||
hass: HomeAssistant,
|
||||
result: RecognizeResult,
|
||||
) -> Iterable[tuple[State, bool]]:
|
||||
"""Yield state/is_matched pairs for a hassil recognition."""
|
||||
entities = result.entities
|
||||
|
||||
name: str | None = None
|
||||
area_name: str | None = None
|
||||
domains: set[str] | None = None
|
||||
device_classes: set[str] | None = None
|
||||
state_names: set[str] | None = None
|
||||
|
||||
if "name" in entities:
|
||||
name = str(entities["name"].value)
|
||||
|
||||
if "area" in entities:
|
||||
area_name = str(entities["area"].value)
|
||||
|
||||
if "domain" in entities:
|
||||
domains = set(cv.ensure_list(entities["domain"].value))
|
||||
|
||||
if "device_class" in entities:
|
||||
device_classes = set(cv.ensure_list(entities["device_class"].value))
|
||||
|
||||
if "state" in entities:
|
||||
# HassGetState only
|
||||
state_names = set(cv.ensure_list(entities["state"].value))
|
||||
|
||||
if (
|
||||
(name is None)
|
||||
and (area_name is None)
|
||||
and (not domains)
|
||||
and (not device_classes)
|
||||
and (not state_names)
|
||||
):
|
||||
# Avoid "matching" all entities when there is no filter
|
||||
return
|
||||
|
||||
states = intent.async_match_states(
|
||||
hass,
|
||||
name=name,
|
||||
area_name=area_name,
|
||||
domains=domains,
|
||||
device_classes=device_classes,
|
||||
)
|
||||
|
||||
for state in states:
|
||||
# For queries, a target is "matched" based on its state
|
||||
is_matched = (state_names is None) or (state.state in state_names)
|
||||
yield state, is_matched
|
||||
|
||||
|
||||
def _get_unmatched_slots(
|
||||
result: RecognizeResult,
|
||||
) -> dict[str, str | int | float]:
|
||||
"""Return a dict of unmatched text/range slot entities."""
|
||||
unmatched_slots: dict[str, str | int | float] = {}
|
||||
for entity in result.unmatched_entities_list:
|
||||
if isinstance(entity, UnmatchedTextEntity):
|
||||
if entity.text == MISSING_ENTITY:
|
||||
# Don't report <missing> since these are just missing context
|
||||
# slots.
|
||||
continue
|
||||
|
||||
unmatched_slots[entity.name] = entity.text
|
||||
elif isinstance(entity, UnmatchedRangeEntity):
|
||||
unmatched_slots[entity.name] = entity.value
|
||||
|
||||
return unmatched_slots
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "conversation/agent/homeassistant/language_scores",
|
||||
@@ -364,10 +216,13 @@ async def websocket_hass_agent_language_scores(
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Get support scores per language."""
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
assert agent is not None
|
||||
|
||||
language = msg.get("language", hass.config.language)
|
||||
country = msg.get("country", hass.config.country)
|
||||
|
||||
scores = await hass.async_add_executor_job(get_language_scores)
|
||||
scores = await agent.async_get_language_scores()
|
||||
matching_langs = language_util.matches(language, scores.keys(), country=country)
|
||||
preferred_lang = matching_langs[0] if matching_langs else language
|
||||
result = {
|
||||
|
@@ -116,10 +116,6 @@ class WaterSourceType(TypedDict):
|
||||
# an EnergyCostSensor will be automatically created
|
||||
stat_cost: str | None
|
||||
|
||||
# An optional statistic_id identifying a device
|
||||
# that includes this device's consumption in its total
|
||||
included_in_stat: str | None
|
||||
|
||||
# Used to generate costs if stat_cost is set to None
|
||||
entity_energy_price: str | None # entity_id of an entity providing price ($/m³)
|
||||
number_energy_price: float | None # Price for energy ($/m³)
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["env_canada"],
|
||||
"requirements": ["env-canada==0.11.2"]
|
||||
"requirements": ["env-canada==0.11.3"]
|
||||
}
|
||||
|
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==41.12.0",
|
||||
"aioesphomeapi==41.13.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.81", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.82", "babel==2.15.0"]
|
||||
}
|
||||
|
@@ -37,5 +37,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pylamarzocco"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pylamarzocco==2.1.1"]
|
||||
"requirements": ["pylamarzocco==2.1.2"]
|
||||
}
|
||||
|
@@ -10,7 +10,11 @@ from mill import Heater, Mill
|
||||
from mill_local import Mill as MillLocal
|
||||
|
||||
from homeassistant.components.recorder import get_instance
|
||||
from homeassistant.components.recorder.models import StatisticData, StatisticMetaData
|
||||
from homeassistant.components.recorder.models import (
|
||||
StatisticData,
|
||||
StatisticMeanType,
|
||||
StatisticMetaData,
|
||||
)
|
||||
from homeassistant.components.recorder.statistics import (
|
||||
async_add_external_statistics,
|
||||
get_last_statistics,
|
||||
@@ -147,7 +151,7 @@ class MillHistoricDataUpdateCoordinator(DataUpdateCoordinator):
|
||||
)
|
||||
)
|
||||
metadata = StatisticMetaData(
|
||||
has_mean=False,
|
||||
mean_type=StatisticMeanType.NONE,
|
||||
has_sum=True,
|
||||
name=f"{heater.name}",
|
||||
source=DOMAIN,
|
||||
|
@@ -253,6 +253,7 @@ class ModbusHub:
|
||||
self._client: (
|
||||
AsyncModbusSerialClient | AsyncModbusTcpClient | AsyncModbusUdpClient | None
|
||||
) = None
|
||||
self._lock = asyncio.Lock()
|
||||
self.event_connected = asyncio.Event()
|
||||
self.hass = hass
|
||||
self.name = client_config[CONF_NAME]
|
||||
@@ -415,7 +416,9 @@ class ModbusHub:
|
||||
"""Convert async to sync pymodbus call."""
|
||||
if not self._client:
|
||||
return None
|
||||
result = await self.low_level_pb_call(unit, address, value, use_call)
|
||||
if self._msg_wait:
|
||||
await asyncio.sleep(self._msg_wait)
|
||||
return result
|
||||
async with self._lock:
|
||||
result = await self.low_level_pb_call(unit, address, value, use_call)
|
||||
if self._msg_wait:
|
||||
# small delay until next request/response
|
||||
await asyncio.sleep(self._msg_wait)
|
||||
return result
|
||||
|
@@ -174,7 +174,7 @@ class MotionBaseDevice(MotionCoordinatorEntity, CoverEntity):
|
||||
|
||||
_restore_tilt = False
|
||||
|
||||
def __init__(self, coordinator, blind, device_class):
|
||||
def __init__(self, coordinator, blind, device_class) -> None:
|
||||
"""Initialize the blind."""
|
||||
super().__init__(coordinator, blind)
|
||||
|
||||
@@ -275,7 +275,7 @@ class MotionTiltDevice(MotionPositionDevice):
|
||||
"""
|
||||
if self._blind.angle is None:
|
||||
return None
|
||||
return self._blind.angle * 100 / 180
|
||||
return 100 - (self._blind.angle * 100 / 180)
|
||||
|
||||
@property
|
||||
def is_closed(self) -> bool | None:
|
||||
@@ -287,14 +287,14 @@ class MotionTiltDevice(MotionPositionDevice):
|
||||
async def async_open_cover_tilt(self, **kwargs: Any) -> None:
|
||||
"""Open the cover tilt."""
|
||||
async with self._api_lock:
|
||||
await self.hass.async_add_executor_job(self._blind.Set_angle, 180)
|
||||
await self.hass.async_add_executor_job(self._blind.Set_angle, 0)
|
||||
|
||||
await self.async_request_position_till_stop()
|
||||
|
||||
async def async_close_cover_tilt(self, **kwargs: Any) -> None:
|
||||
"""Close the cover tilt."""
|
||||
async with self._api_lock:
|
||||
await self.hass.async_add_executor_job(self._blind.Set_angle, 0)
|
||||
await self.hass.async_add_executor_job(self._blind.Set_angle, 180)
|
||||
|
||||
await self.async_request_position_till_stop()
|
||||
|
||||
@@ -302,7 +302,7 @@ class MotionTiltDevice(MotionPositionDevice):
|
||||
"""Move the cover tilt to a specific position."""
|
||||
angle = kwargs[ATTR_TILT_POSITION] * 180 / 100
|
||||
async with self._api_lock:
|
||||
await self.hass.async_add_executor_job(self._blind.Set_angle, angle)
|
||||
await self.hass.async_add_executor_job(self._blind.Set_angle, 180 - angle)
|
||||
|
||||
await self.async_request_position_till_stop()
|
||||
|
||||
@@ -347,9 +347,9 @@ class MotionTiltOnlyDevice(MotionTiltDevice):
|
||||
if self._blind.position is None:
|
||||
if self._blind.angle is None:
|
||||
return None
|
||||
return self._blind.angle * 100 / 180
|
||||
return 100 - (self._blind.angle * 100 / 180)
|
||||
|
||||
return self._blind.position
|
||||
return 100 - self._blind.position
|
||||
|
||||
@property
|
||||
def is_closed(self) -> bool | None:
|
||||
@@ -357,9 +357,9 @@ class MotionTiltOnlyDevice(MotionTiltDevice):
|
||||
if self._blind.position is None:
|
||||
if self._blind.angle is None:
|
||||
return None
|
||||
return self._blind.angle == 0
|
||||
return self._blind.angle == 180
|
||||
|
||||
return self._blind.position == 0
|
||||
return self._blind.position == 100
|
||||
|
||||
async def async_open_cover_tilt(self, **kwargs: Any) -> None:
|
||||
"""Open the cover tilt."""
|
||||
@@ -381,10 +381,14 @@ class MotionTiltOnlyDevice(MotionTiltDevice):
|
||||
if self._blind.position is None:
|
||||
angle = angle * 180 / 100
|
||||
async with self._api_lock:
|
||||
await self.hass.async_add_executor_job(self._blind.Set_angle, angle)
|
||||
await self.hass.async_add_executor_job(
|
||||
self._blind.Set_angle, 180 - angle
|
||||
)
|
||||
else:
|
||||
async with self._api_lock:
|
||||
await self.hass.async_add_executor_job(self._blind.Set_position, angle)
|
||||
await self.hass.async_add_executor_job(
|
||||
self._blind.Set_position, 100 - angle
|
||||
)
|
||||
|
||||
await self.async_request_position_till_stop()
|
||||
|
||||
@@ -397,10 +401,14 @@ class MotionTiltOnlyDevice(MotionTiltDevice):
|
||||
if self._blind.position is None:
|
||||
angle = angle * 180 / 100
|
||||
async with self._api_lock:
|
||||
await self.hass.async_add_executor_job(self._blind.Set_angle, angle)
|
||||
await self.hass.async_add_executor_job(
|
||||
self._blind.Set_angle, 180 - angle
|
||||
)
|
||||
else:
|
||||
async with self._api_lock:
|
||||
await self.hass.async_add_executor_job(self._blind.Set_position, angle)
|
||||
await self.hass.async_add_executor_job(
|
||||
self._blind.Set_position, 100 - angle
|
||||
)
|
||||
|
||||
await self.async_request_position_till_stop()
|
||||
|
||||
@@ -408,7 +416,7 @@ class MotionTiltOnlyDevice(MotionTiltDevice):
|
||||
class MotionTDBUDevice(MotionBaseDevice):
|
||||
"""Representation of a Motion Top Down Bottom Up blind Device."""
|
||||
|
||||
def __init__(self, coordinator, blind, device_class, motor):
|
||||
def __init__(self, coordinator, blind, device_class, motor) -> None:
|
||||
"""Initialize the blind."""
|
||||
super().__init__(coordinator, blind, device_class)
|
||||
self._motor = motor
|
||||
|
@@ -53,7 +53,7 @@ class NikoHomeControlLight(NikoHomeControlEntity, LightEntity):
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Instruct the light to turn on."""
|
||||
await self._action.turn_on(kwargs.get(ATTR_BRIGHTNESS, 255))
|
||||
await self._action.turn_on(kwargs.get(ATTR_BRIGHTNESS))
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Instruct the light to turn off."""
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/niko_home_control",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["nikohomecontrol"],
|
||||
"requirements": ["nhc==0.4.12"]
|
||||
"requirements": ["nhc==0.6.1"]
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pynintendoparental"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pynintendoparental==1.0.1"]
|
||||
"requirements": ["pynintendoparental==1.1.1"]
|
||||
}
|
||||
|
@@ -9,5 +9,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["openai==1.99.5", "python-open-router==0.3.1"]
|
||||
"requirements": ["openai==2.2.0", "python-open-router==0.3.1"]
|
||||
}
|
||||
|
@@ -316,16 +316,23 @@ class OpenAISubentryFlowHandler(ConfigSubentryFlow):
|
||||
options = self.options
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
step_schema: VolDictType = {
|
||||
vol.Optional(
|
||||
CONF_CODE_INTERPRETER,
|
||||
default=RECOMMENDED_CODE_INTERPRETER,
|
||||
): bool,
|
||||
}
|
||||
step_schema: VolDictType = {}
|
||||
|
||||
model = options[CONF_CHAT_MODEL]
|
||||
|
||||
if model.startswith(("o", "gpt-5")):
|
||||
if not model.startswith(("gpt-5-pro", "gpt-5-codex")):
|
||||
step_schema.update(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_CODE_INTERPRETER,
|
||||
default=RECOMMENDED_CODE_INTERPRETER,
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
elif CONF_CODE_INTERPRETER in options:
|
||||
options.pop(CONF_CODE_INTERPRETER)
|
||||
|
||||
if model.startswith(("o", "gpt-5")) and not model.startswith("gpt-5-pro"):
|
||||
step_schema.update(
|
||||
{
|
||||
vol.Optional(
|
||||
|
@@ -468,7 +468,9 @@ class OpenAIBaseLLMEntity(Entity):
|
||||
model_args["reasoning"] = {
|
||||
"effort": options.get(
|
||||
CONF_REASONING_EFFORT, RECOMMENDED_REASONING_EFFORT
|
||||
),
|
||||
)
|
||||
if not model_args["model"].startswith("gpt-5-pro")
|
||||
else "high", # GPT-5 pro only supports reasoning.effort: high
|
||||
"summary": "auto",
|
||||
}
|
||||
model_args["include"] = ["reasoning.encrypted_content"]
|
||||
@@ -487,7 +489,7 @@ class OpenAIBaseLLMEntity(Entity):
|
||||
|
||||
if options.get(CONF_WEB_SEARCH):
|
||||
web_search = WebSearchToolParam(
|
||||
type="web_search_preview",
|
||||
type="web_search",
|
||||
search_context_size=options.get(
|
||||
CONF_WEB_SEARCH_CONTEXT_SIZE, RECOMMENDED_WEB_SEARCH_CONTEXT_SIZE
|
||||
),
|
||||
|
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/openai_conversation",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["openai==1.99.5"]
|
||||
"requirements": ["openai==2.2.0"]
|
||||
}
|
||||
|
@@ -18,7 +18,8 @@ from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .coordinator import PortainerCoordinator
|
||||
|
||||
_PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.BUTTON, Platform.SWITCH]
|
||||
_PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH]
|
||||
|
||||
|
||||
type PortainerConfigEntry = ConfigEntry[PortainerCoordinator]
|
||||
|
||||
|
@@ -1,5 +1,10 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"image": {
|
||||
"default": "mdi:docker"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"container": {
|
||||
"default": "mdi:arrow-down-box",
|
||||
|
83
homeassistant/components/portainer/sensor.py
Normal file
83
homeassistant/components/portainer/sensor.py
Normal file
@@ -0,0 +1,83 @@
|
||||
"""Sensor platform for Portainer integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from pyportainer.models.docker import DockerContainer
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity, SensorEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import PortainerConfigEntry, PortainerCoordinator
|
||||
from .entity import PortainerContainerEntity, PortainerCoordinatorData
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class PortainerSensorEntityDescription(SensorEntityDescription):
|
||||
"""Class to hold Portainer sensor description."""
|
||||
|
||||
value_fn: Callable[[DockerContainer], str | None]
|
||||
|
||||
|
||||
CONTAINER_SENSORS: tuple[PortainerSensorEntityDescription, ...] = (
|
||||
PortainerSensorEntityDescription(
|
||||
key="image",
|
||||
translation_key="image",
|
||||
value_fn=lambda data: data.image,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: PortainerConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Portainer sensors based on a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
PortainerContainerSensor(
|
||||
coordinator,
|
||||
entity_description,
|
||||
container,
|
||||
endpoint,
|
||||
)
|
||||
for endpoint in coordinator.data.values()
|
||||
for container in endpoint.containers.values()
|
||||
for entity_description in CONTAINER_SENSORS
|
||||
)
|
||||
|
||||
|
||||
class PortainerContainerSensor(PortainerContainerEntity, SensorEntity):
|
||||
"""Representation of a Portainer container sensor."""
|
||||
|
||||
entity_description: PortainerSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PortainerCoordinator,
|
||||
entity_description: PortainerSensorEntityDescription,
|
||||
device_info: DockerContainer,
|
||||
via_device: PortainerCoordinatorData,
|
||||
) -> None:
|
||||
"""Initialize the Portainer container sensor."""
|
||||
self.entity_description = entity_description
|
||||
super().__init__(device_info, coordinator, via_device)
|
||||
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the device is available."""
|
||||
return super().available and self.endpoint_id in self.coordinator.data
|
||||
|
||||
@property
|
||||
def native_value(self) -> str | None:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(
|
||||
self.coordinator.data[self.endpoint_id].containers[self.device_id]
|
||||
)
|
@@ -46,6 +46,11 @@
|
||||
"name": "Status"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"image": {
|
||||
"name": "Image"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"container": {
|
||||
"name": "Container"
|
||||
|
@@ -9,7 +9,6 @@ from typing import TYPE_CHECKING, Any, Final
|
||||
|
||||
from aioshelly.const import BLU_TRV_IDENTIFIER, MODEL_BLU_GATEWAY_G3, RPC_GENERATIONS
|
||||
from aioshelly.exceptions import DeviceConnectionError, InvalidAuthError, RpcCallError
|
||||
from aioshelly.rpc_device import RpcDevice
|
||||
|
||||
from homeassistant.components.button import (
|
||||
DOMAIN as BUTTON_PLATFORM,
|
||||
@@ -24,16 +23,24 @@ from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN, LOGGER, SHELLY_GAS_MODELS
|
||||
from .const import DOMAIN, LOGGER, MODEL_FRANKEVER_WATER_VALVE, SHELLY_GAS_MODELS
|
||||
from .coordinator import ShellyBlockCoordinator, ShellyConfigEntry, ShellyRpcCoordinator
|
||||
from .entity import get_entity_block_device_info, get_entity_rpc_device_info
|
||||
from .entity import (
|
||||
RpcEntityDescription,
|
||||
ShellyRpcAttributeEntity,
|
||||
async_setup_entry_rpc,
|
||||
get_entity_block_device_info,
|
||||
get_entity_rpc_device_info,
|
||||
rpc_call,
|
||||
)
|
||||
from .utils import (
|
||||
async_remove_orphaned_entities,
|
||||
format_ble_addr,
|
||||
get_blu_trv_device_info,
|
||||
get_device_entry_gen,
|
||||
get_rpc_entity_name,
|
||||
get_rpc_key_ids,
|
||||
get_rpc_key_instances,
|
||||
get_rpc_role_by_key,
|
||||
get_virtual_component_ids,
|
||||
)
|
||||
|
||||
@@ -51,6 +58,11 @@ class ShellyButtonDescription[
|
||||
supported: Callable[[_ShellyCoordinatorT], bool] = lambda _: True
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class RpcButtonDescription(RpcEntityDescription, ButtonEntityDescription):
|
||||
"""Class to describe a RPC button."""
|
||||
|
||||
|
||||
BUTTONS: Final[list[ShellyButtonDescription[Any]]] = [
|
||||
ShellyButtonDescription[ShellyBlockCoordinator | ShellyRpcCoordinator](
|
||||
key="reboot",
|
||||
@@ -96,12 +108,24 @@ BLU_TRV_BUTTONS: Final[list[ShellyButtonDescription]] = [
|
||||
),
|
||||
]
|
||||
|
||||
VIRTUAL_BUTTONS: Final[list[ShellyButtonDescription]] = [
|
||||
ShellyButtonDescription[ShellyRpcCoordinator](
|
||||
RPC_VIRTUAL_BUTTONS = {
|
||||
"button_generic": RpcButtonDescription(
|
||||
key="button",
|
||||
press_action="single_push",
|
||||
)
|
||||
]
|
||||
role="generic",
|
||||
),
|
||||
"button_open": RpcButtonDescription(
|
||||
key="button",
|
||||
entity_registry_enabled_default=False,
|
||||
role="open",
|
||||
models={MODEL_FRANKEVER_WATER_VALVE},
|
||||
),
|
||||
"button_close": RpcButtonDescription(
|
||||
key="button",
|
||||
entity_registry_enabled_default=False,
|
||||
role="close",
|
||||
models={MODEL_FRANKEVER_WATER_VALVE},
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@callback
|
||||
@@ -129,8 +153,10 @@ def async_migrate_unique_ids(
|
||||
)
|
||||
}
|
||||
|
||||
if not isinstance(coordinator, ShellyRpcCoordinator):
|
||||
return None
|
||||
|
||||
if blutrv_key_ids := get_rpc_key_ids(coordinator.device.status, BLU_TRV_IDENTIFIER):
|
||||
assert isinstance(coordinator.device, RpcDevice)
|
||||
for _id in blutrv_key_ids:
|
||||
key = f"{BLU_TRV_IDENTIFIER}:{_id}"
|
||||
ble_addr: str = coordinator.device.config[key]["addr"]
|
||||
@@ -149,6 +175,26 @@ def async_migrate_unique_ids(
|
||||
)
|
||||
}
|
||||
|
||||
if virtual_button_keys := get_rpc_key_instances(
|
||||
coordinator.device.config, "button"
|
||||
):
|
||||
for key in virtual_button_keys:
|
||||
old_unique_id = f"{coordinator.mac}-{key}"
|
||||
if entity_entry.unique_id == old_unique_id:
|
||||
role = get_rpc_role_by_key(coordinator.device.config, key)
|
||||
new_unique_id = f"{coordinator.mac}-{key}-button_{role}"
|
||||
LOGGER.debug(
|
||||
"Migrating unique_id for %s entity from [%s] to [%s]",
|
||||
entity_entry.entity_id,
|
||||
old_unique_id,
|
||||
new_unique_id,
|
||||
)
|
||||
return {
|
||||
"new_unique_id": entity_entry.unique_id.replace(
|
||||
old_unique_id, new_unique_id
|
||||
)
|
||||
}
|
||||
|
||||
return None
|
||||
|
||||
|
||||
@@ -172,7 +218,7 @@ async def async_setup_entry(
|
||||
hass, config_entry.entry_id, partial(async_migrate_unique_ids, coordinator)
|
||||
)
|
||||
|
||||
entities: list[ShellyButton | ShellyBluTrvButton | ShellyVirtualButton] = []
|
||||
entities: list[ShellyButton | ShellyBluTrvButton] = []
|
||||
|
||||
entities.extend(
|
||||
ShellyButton(coordinator, button)
|
||||
@@ -185,12 +231,9 @@ async def async_setup_entry(
|
||||
return
|
||||
|
||||
# add virtual buttons
|
||||
if virtual_button_ids := get_rpc_key_ids(coordinator.device.status, "button"):
|
||||
entities.extend(
|
||||
ShellyVirtualButton(coordinator, button, id_)
|
||||
for id_ in virtual_button_ids
|
||||
for button in VIRTUAL_BUTTONS
|
||||
)
|
||||
async_setup_entry_rpc(
|
||||
hass, config_entry, async_add_entities, RPC_VIRTUAL_BUTTONS, RpcVirtualButton
|
||||
)
|
||||
|
||||
# add BLU TRV buttons
|
||||
if blutrv_key_ids := get_rpc_key_ids(coordinator.device.status, BLU_TRV_IDENTIFIER):
|
||||
@@ -332,30 +375,16 @@ class ShellyBluTrvButton(ShellyBaseButton):
|
||||
await method(self._id)
|
||||
|
||||
|
||||
class ShellyVirtualButton(ShellyBaseButton):
|
||||
"""Defines a Shelly virtual component button."""
|
||||
class RpcVirtualButton(ShellyRpcAttributeEntity, ButtonEntity):
|
||||
"""Defines a Shelly RPC virtual component button."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: ShellyRpcCoordinator,
|
||||
description: ShellyButtonDescription,
|
||||
_id: int,
|
||||
) -> None:
|
||||
"""Initialize Shelly virtual component button."""
|
||||
super().__init__(coordinator, description)
|
||||
entity_description: RpcButtonDescription
|
||||
_id: int
|
||||
|
||||
self._attr_unique_id = f"{coordinator.mac}-{description.key}:{_id}"
|
||||
self._attr_device_info = get_entity_rpc_device_info(coordinator)
|
||||
self._attr_name = get_rpc_entity_name(
|
||||
coordinator.device, f"{description.key}:{_id}"
|
||||
)
|
||||
self._id = _id
|
||||
|
||||
async def _press_method(self) -> None:
|
||||
"""Press method."""
|
||||
@rpc_call
|
||||
async def async_press(self) -> None:
|
||||
"""Triggers the Shelly button press service."""
|
||||
if TYPE_CHECKING:
|
||||
assert isinstance(self.coordinator, ShellyRpcCoordinator)
|
||||
|
||||
await self.coordinator.device.button_trigger(
|
||||
self._id, self.entity_description.press_action
|
||||
)
|
||||
await self.coordinator.device.button_trigger(self._id, "single_push")
|
||||
|
@@ -195,9 +195,11 @@ def async_setup_rpc_attribute_entities(
|
||||
):
|
||||
continue
|
||||
|
||||
if description.sub_key not in coordinator.device.status[
|
||||
key
|
||||
] and not description.supported(coordinator.device.status[key]):
|
||||
if (
|
||||
description.sub_key
|
||||
and description.sub_key not in coordinator.device.status[key]
|
||||
and not description.supported(coordinator.device.status[key])
|
||||
):
|
||||
continue
|
||||
|
||||
# Filter and remove entities that according to settings/status
|
||||
@@ -309,7 +311,7 @@ class RpcEntityDescription(EntityDescription):
|
||||
# restrict the type to str.
|
||||
name: str = ""
|
||||
|
||||
sub_key: str
|
||||
sub_key: str | None = None
|
||||
|
||||
value: Callable[[Any, Any], Any] | None = None
|
||||
available: Callable[[dict], bool] | None = None
|
||||
|
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, cast
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from aioshelly.block_device import Block
|
||||
from aioshelly.const import RPC_GENERATIONS
|
||||
@@ -37,6 +37,7 @@ from .entity import (
|
||||
ShellySleepingBlockAttributeEntity,
|
||||
async_setup_entry_attribute_entities,
|
||||
async_setup_entry_rpc,
|
||||
rpc_call,
|
||||
)
|
||||
from .utils import (
|
||||
async_remove_orphaned_entities,
|
||||
@@ -78,7 +79,7 @@ class RpcSwitchDescription(RpcEntityDescription, SwitchEntityDescription):
|
||||
is_on: Callable[[dict[str, Any]], bool]
|
||||
method_on: str
|
||||
method_off: str
|
||||
method_params_fn: Callable[[int | None, bool], dict]
|
||||
method_params_fn: Callable[[int | None, bool], tuple]
|
||||
|
||||
|
||||
RPC_RELAY_SWITCHES = {
|
||||
@@ -87,9 +88,9 @@ RPC_RELAY_SWITCHES = {
|
||||
sub_key="output",
|
||||
removal_condition=is_rpc_exclude_from_relay,
|
||||
is_on=lambda status: bool(status["output"]),
|
||||
method_on="Switch.Set",
|
||||
method_off="Switch.Set",
|
||||
method_params_fn=lambda id, value: {"id": id, "on": value},
|
||||
method_on="switch_set",
|
||||
method_off="switch_set",
|
||||
method_params_fn=lambda id, value: (id, value),
|
||||
),
|
||||
}
|
||||
|
||||
@@ -101,9 +102,9 @@ RPC_SWITCHES = {
|
||||
config, key, SWITCH_PLATFORM
|
||||
),
|
||||
is_on=lambda status: bool(status["value"]),
|
||||
method_on="Boolean.Set",
|
||||
method_off="Boolean.Set",
|
||||
method_params_fn=lambda id, value: {"id": id, "value": value},
|
||||
method_on="boolean_set",
|
||||
method_off="boolean_set",
|
||||
method_params_fn=lambda id, value: (id, value),
|
||||
role="generic",
|
||||
),
|
||||
"boolean_anti_freeze": RpcSwitchDescription(
|
||||
@@ -111,9 +112,9 @@ RPC_SWITCHES = {
|
||||
sub_key="value",
|
||||
entity_registry_enabled_default=False,
|
||||
is_on=lambda status: bool(status["value"]),
|
||||
method_on="Boolean.Set",
|
||||
method_off="Boolean.Set",
|
||||
method_params_fn=lambda id, value: {"id": id, "value": value},
|
||||
method_on="boolean_set",
|
||||
method_off="boolean_set",
|
||||
method_params_fn=lambda id, value: (id, value),
|
||||
role="anti_freeze",
|
||||
models={MODEL_LINKEDGO_ST802_THERMOSTAT, MODEL_LINKEDGO_ST1820_THERMOSTAT},
|
||||
),
|
||||
@@ -121,9 +122,9 @@ RPC_SWITCHES = {
|
||||
key="boolean",
|
||||
sub_key="value",
|
||||
is_on=lambda status: bool(status["value"]),
|
||||
method_on="Boolean.Set",
|
||||
method_off="Boolean.Set",
|
||||
method_params_fn=lambda id, value: {"id": id, "value": value},
|
||||
method_on="boolean_set",
|
||||
method_off="boolean_set",
|
||||
method_params_fn=lambda id, value: (id, value),
|
||||
role="child_lock",
|
||||
models={MODEL_LINKEDGO_ST1820_THERMOSTAT},
|
||||
),
|
||||
@@ -132,9 +133,9 @@ RPC_SWITCHES = {
|
||||
sub_key="value",
|
||||
entity_registry_enabled_default=False,
|
||||
is_on=lambda status: bool(status["value"]),
|
||||
method_on="Boolean.Set",
|
||||
method_off="Boolean.Set",
|
||||
method_params_fn=lambda id, value: {"id": id, "value": value},
|
||||
method_on="boolean_set",
|
||||
method_off="boolean_set",
|
||||
method_params_fn=lambda id, value: (id, value),
|
||||
role="enable",
|
||||
models={MODEL_LINKEDGO_ST802_THERMOSTAT, MODEL_LINKEDGO_ST1820_THERMOSTAT},
|
||||
),
|
||||
@@ -142,9 +143,9 @@ RPC_SWITCHES = {
|
||||
key="boolean",
|
||||
sub_key="value",
|
||||
is_on=lambda status: bool(status["value"]),
|
||||
method_on="Boolean.Set",
|
||||
method_off="Boolean.Set",
|
||||
method_params_fn=lambda id, value: {"id": id, "value": value},
|
||||
method_on="boolean_set",
|
||||
method_off="boolean_set",
|
||||
method_params_fn=lambda id, value: (id, value),
|
||||
role="start_charging",
|
||||
models={MODEL_TOP_EV_CHARGER_EVE01},
|
||||
),
|
||||
@@ -153,9 +154,9 @@ RPC_SWITCHES = {
|
||||
sub_key="value",
|
||||
entity_registry_enabled_default=False,
|
||||
is_on=lambda status: bool(status["value"]),
|
||||
method_on="Boolean.Set",
|
||||
method_off="Boolean.Set",
|
||||
method_params_fn=lambda id, value: {"id": id, "value": value},
|
||||
method_on="boolean_set",
|
||||
method_off="boolean_set",
|
||||
method_params_fn=lambda id, value: (id, value),
|
||||
role="state",
|
||||
models={MODEL_NEO_WATER_VALVE},
|
||||
),
|
||||
@@ -163,9 +164,9 @@ RPC_SWITCHES = {
|
||||
key="boolean",
|
||||
sub_key="value",
|
||||
is_on=lambda status: bool(status["value"]),
|
||||
method_on="Boolean.Set",
|
||||
method_off="Boolean.Set",
|
||||
method_params_fn=lambda id, value: {"id": id, "value": value},
|
||||
method_on="boolean_set",
|
||||
method_off="boolean_set",
|
||||
method_params_fn=lambda id, value: (id, value),
|
||||
role="zone0",
|
||||
models={MODEL_FRANKEVER_IRRIGATION_CONTROLLER},
|
||||
),
|
||||
@@ -173,9 +174,9 @@ RPC_SWITCHES = {
|
||||
key="boolean",
|
||||
sub_key="value",
|
||||
is_on=lambda status: bool(status["value"]),
|
||||
method_on="Boolean.Set",
|
||||
method_off="Boolean.Set",
|
||||
method_params_fn=lambda id, value: {"id": id, "value": value},
|
||||
method_on="boolean_set",
|
||||
method_off="boolean_set",
|
||||
method_params_fn=lambda id, value: (id, value),
|
||||
role="zone1",
|
||||
models={MODEL_FRANKEVER_IRRIGATION_CONTROLLER},
|
||||
),
|
||||
@@ -183,9 +184,9 @@ RPC_SWITCHES = {
|
||||
key="boolean",
|
||||
sub_key="value",
|
||||
is_on=lambda status: bool(status["value"]),
|
||||
method_on="Boolean.Set",
|
||||
method_off="Boolean.Set",
|
||||
method_params_fn=lambda id, value: {"id": id, "value": value},
|
||||
method_on="boolean_set",
|
||||
method_off="boolean_set",
|
||||
method_params_fn=lambda id, value: (id, value),
|
||||
role="zone2",
|
||||
models={MODEL_FRANKEVER_IRRIGATION_CONTROLLER},
|
||||
),
|
||||
@@ -193,9 +194,9 @@ RPC_SWITCHES = {
|
||||
key="boolean",
|
||||
sub_key="value",
|
||||
is_on=lambda status: bool(status["value"]),
|
||||
method_on="Boolean.Set",
|
||||
method_off="Boolean.Set",
|
||||
method_params_fn=lambda id, value: {"id": id, "value": value},
|
||||
method_on="boolean_set",
|
||||
method_off="boolean_set",
|
||||
method_params_fn=lambda id, value: (id, value),
|
||||
role="zone3",
|
||||
models={MODEL_FRANKEVER_IRRIGATION_CONTROLLER},
|
||||
),
|
||||
@@ -203,9 +204,9 @@ RPC_SWITCHES = {
|
||||
key="boolean",
|
||||
sub_key="value",
|
||||
is_on=lambda status: bool(status["value"]),
|
||||
method_on="Boolean.Set",
|
||||
method_off="Boolean.Set",
|
||||
method_params_fn=lambda id, value: {"id": id, "value": value},
|
||||
method_on="boolean_set",
|
||||
method_off="boolean_set",
|
||||
method_params_fn=lambda id, value: (id, value),
|
||||
role="zone4",
|
||||
models={MODEL_FRANKEVER_IRRIGATION_CONTROLLER},
|
||||
),
|
||||
@@ -213,9 +214,9 @@ RPC_SWITCHES = {
|
||||
key="boolean",
|
||||
sub_key="value",
|
||||
is_on=lambda status: bool(status["value"]),
|
||||
method_on="Boolean.Set",
|
||||
method_off="Boolean.Set",
|
||||
method_params_fn=lambda id, value: {"id": id, "value": value},
|
||||
method_on="boolean_set",
|
||||
method_off="boolean_set",
|
||||
method_params_fn=lambda id, value: (id, value),
|
||||
role="zone5",
|
||||
models={MODEL_FRANKEVER_IRRIGATION_CONTROLLER},
|
||||
),
|
||||
@@ -223,9 +224,9 @@ RPC_SWITCHES = {
|
||||
key="script",
|
||||
sub_key="running",
|
||||
is_on=lambda status: bool(status["running"]),
|
||||
method_on="Script.Start",
|
||||
method_off="Script.Stop",
|
||||
method_params_fn=lambda id, _: {"id": id},
|
||||
method_on="script_start",
|
||||
method_off="script_stop",
|
||||
method_params_fn=lambda id, _: (id,),
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
@@ -422,19 +423,27 @@ class RpcSwitch(ShellyRpcAttributeEntity, SwitchEntity):
|
||||
"""If switch is on."""
|
||||
return self.entity_description.is_on(self.status)
|
||||
|
||||
@rpc_call
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn on relay."""
|
||||
await self.call_rpc(
|
||||
self.entity_description.method_on,
|
||||
self.entity_description.method_params_fn(self._id, True),
|
||||
)
|
||||
"""Turn on switch."""
|
||||
method = getattr(self.coordinator.device, self.entity_description.method_on)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert method is not None
|
||||
|
||||
params = self.entity_description.method_params_fn(self._id, True)
|
||||
await method(*params)
|
||||
|
||||
@rpc_call
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn off relay."""
|
||||
await self.call_rpc(
|
||||
self.entity_description.method_off,
|
||||
self.entity_description.method_params_fn(self._id, False),
|
||||
)
|
||||
"""Turn off switch."""
|
||||
method = getattr(self.coordinator.device, self.entity_description.method_off)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert method is not None
|
||||
|
||||
params = self.entity_description.method_params_fn(self._id, False)
|
||||
await method(*params)
|
||||
|
||||
|
||||
class RpcRelaySwitch(RpcSwitch):
|
||||
|
@@ -34,6 +34,17 @@
|
||||
"climate": {
|
||||
"air_conditioner": {
|
||||
"state_attributes": {
|
||||
"preset_mode": {
|
||||
"state": {
|
||||
"wind_free": "mdi:weather-dust",
|
||||
"wind_free_sleep": "mdi:sleep",
|
||||
"quiet": "mdi:volume-off",
|
||||
"long_wind": "mdi:weather-windy",
|
||||
"smart": "mdi:leaf",
|
||||
"motion_direct": "mdi:account-arrow-left",
|
||||
"motion_indirect": "mdi:account-arrow-right"
|
||||
}
|
||||
},
|
||||
"fan_mode": {
|
||||
"state": {
|
||||
"turbo": "mdi:wind-power"
|
||||
|
@@ -87,7 +87,7 @@
|
||||
"wind_free_sleep": "WindFree sleep",
|
||||
"quiet": "Quiet",
|
||||
"long_wind": "Long wind",
|
||||
"smart": "Smart",
|
||||
"smart": "Smart saver",
|
||||
"motion_direct": "Motion direct",
|
||||
"motion_indirect": "Motion indirect"
|
||||
}
|
||||
|
@@ -241,7 +241,6 @@ class SuezWaterCoordinator(DataUpdateCoordinator[SuezWaterData]):
|
||||
) -> StatisticMetaData:
|
||||
"""Build statistics metadata for requested configuration."""
|
||||
return StatisticMetaData(
|
||||
has_mean=False,
|
||||
mean_type=StatisticMeanType.NONE,
|
||||
has_sum=True,
|
||||
name=f"Suez water {name} {self._counter_id}",
|
||||
|
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/systemmonitor",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["psutil"],
|
||||
"requirements": ["psutil-home-assistant==0.0.1", "psutil==7.0.0"],
|
||||
"requirements": ["psutil-home-assistant==0.0.1", "psutil==7.1.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["holidays"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["holidays==0.81"]
|
||||
"requirements": ["holidays==0.82"]
|
||||
}
|
||||
|
@@ -744,8 +744,11 @@ class ZhaConfigFlowHandler(BaseZhaFlow, ConfigFlow, domain=DOMAIN):
|
||||
|
||||
# Without confirmation, discovery can automatically progress into parts of the
|
||||
# config flow logic that interacts with hardware.
|
||||
# Ignore Zeroconf discoveries during onboarding, as they may be in use already.
|
||||
if user_input is not None or (
|
||||
not onboarding.async_is_onboarded(self.hass) and not zha_config_entries
|
||||
not onboarding.async_is_onboarded(self.hass)
|
||||
and not zha_config_entries
|
||||
and self.source != SOURCE_ZEROCONF
|
||||
):
|
||||
# Probe the radio type if we don't have one yet
|
||||
if self._radio_mgr.radio_type is None:
|
||||
|
@@ -9,7 +9,6 @@ from typing import TYPE_CHECKING, Final
|
||||
from .generated.entity_platforms import EntityPlatforms
|
||||
from .helpers.deprecation import (
|
||||
DeprecatedConstantEnum,
|
||||
EnumWithDeprecatedMembers,
|
||||
all_with_deprecated_constants,
|
||||
check_if_deprecated_constant,
|
||||
dir_with_deprecated_constants,
|
||||
@@ -704,35 +703,13 @@ class UnitOfMass(StrEnum):
|
||||
STONES = "st"
|
||||
|
||||
|
||||
class UnitOfConductivity(
|
||||
StrEnum,
|
||||
metaclass=EnumWithDeprecatedMembers,
|
||||
deprecated={
|
||||
"SIEMENS": ("UnitOfConductivity.SIEMENS_PER_CM", "2025.11.0"),
|
||||
"MICROSIEMENS": ("UnitOfConductivity.MICROSIEMENS_PER_CM", "2025.11.0"),
|
||||
"MILLISIEMENS": ("UnitOfConductivity.MILLISIEMENS_PER_CM", "2025.11.0"),
|
||||
},
|
||||
):
|
||||
class UnitOfConductivity(StrEnum):
|
||||
"""Conductivity units."""
|
||||
|
||||
SIEMENS_PER_CM = "S/cm"
|
||||
MICROSIEMENS_PER_CM = "μS/cm"
|
||||
MILLISIEMENS_PER_CM = "mS/cm"
|
||||
|
||||
# Deprecated aliases
|
||||
SIEMENS = "S/cm"
|
||||
"""Deprecated: Please use UnitOfConductivity.SIEMENS_PER_CM"""
|
||||
MICROSIEMENS = "μS/cm"
|
||||
"""Deprecated: Please use UnitOfConductivity.MICROSIEMENS_PER_CM"""
|
||||
MILLISIEMENS = "mS/cm"
|
||||
"""Deprecated: Please use UnitOfConductivity.MILLISIEMENS_PER_CM"""
|
||||
|
||||
|
||||
_DEPRECATED_CONDUCTIVITY: Final = DeprecatedConstantEnum(
|
||||
UnitOfConductivity.MICROSIEMENS_PER_CM,
|
||||
"2025.11",
|
||||
)
|
||||
"""Deprecated: please use UnitOfConductivity.MICROSIEMENS_PER_CM"""
|
||||
|
||||
# Light units
|
||||
LIGHT_LUX: Final = "lx"
|
||||
|
@@ -36,7 +36,7 @@ from homeassistant.core import (
|
||||
callback,
|
||||
split_entity_id,
|
||||
)
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.exceptions import HomeAssistantError, TemplateError
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.async_ import run_callback_threadsafe
|
||||
@@ -1004,12 +1004,9 @@ class TrackTemplateResultInfo:
|
||||
if track_template_.template.hass:
|
||||
continue
|
||||
|
||||
frame.report_usage(
|
||||
"calls async_track_template_result with template without hass",
|
||||
core_behavior=frame.ReportBehavior.LOG,
|
||||
breaks_in_ha_version="2025.10",
|
||||
raise HomeAssistantError(
|
||||
"Calls async_track_template_result with template without hass"
|
||||
)
|
||||
track_template_.template.hass = hass
|
||||
|
||||
self._rate_limit = KeyedRateLimit(hass)
|
||||
self._info: dict[Template, RenderInfo] = {}
|
||||
|
@@ -130,7 +130,7 @@ multidict>=6.0.2
|
||||
backoff>=2.0
|
||||
|
||||
# ensure pydantic version does not float since it might have breaking changes
|
||||
pydantic==2.11.9
|
||||
pydantic==2.12.0
|
||||
|
||||
# Required for Python 3.12.4 compatibility (#119223).
|
||||
mashumaro>=3.13.1
|
||||
|
16
requirements_all.txt
generated
16
requirements_all.txt
generated
@@ -247,7 +247,7 @@ aioelectricitymaps==1.1.1
|
||||
aioemonitor==1.0.5
|
||||
|
||||
# homeassistant.components.esphome
|
||||
aioesphomeapi==41.12.0
|
||||
aioesphomeapi==41.13.0
|
||||
|
||||
# homeassistant.components.flo
|
||||
aioflo==2021.11.0
|
||||
@@ -895,7 +895,7 @@ enocean==0.50
|
||||
enturclient==0.2.4
|
||||
|
||||
# homeassistant.components.environment_canada
|
||||
env-canada==0.11.2
|
||||
env-canada==0.11.3
|
||||
|
||||
# homeassistant.components.season
|
||||
ephem==4.1.6
|
||||
@@ -1183,7 +1183,7 @@ hole==0.9.0
|
||||
|
||||
# homeassistant.components.holiday
|
||||
# homeassistant.components.workday
|
||||
holidays==0.81
|
||||
holidays==0.82
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20251001.0
|
||||
@@ -1545,7 +1545,7 @@ nextcord==3.1.0
|
||||
nextdns==4.1.0
|
||||
|
||||
# homeassistant.components.niko_home_control
|
||||
nhc==0.4.12
|
||||
nhc==0.6.1
|
||||
|
||||
# homeassistant.components.nibe_heatpump
|
||||
nibe==2.19.0
|
||||
@@ -1628,7 +1628,7 @@ open-meteo==0.3.2
|
||||
|
||||
# homeassistant.components.open_router
|
||||
# homeassistant.components.openai_conversation
|
||||
openai==1.99.5
|
||||
openai==2.2.0
|
||||
|
||||
# homeassistant.components.openerz
|
||||
openerz-api==0.3.0
|
||||
@@ -1755,7 +1755,7 @@ proxmoxer==2.0.1
|
||||
psutil-home-assistant==0.0.1
|
||||
|
||||
# homeassistant.components.systemmonitor
|
||||
psutil==7.0.0
|
||||
psutil==7.1.0
|
||||
|
||||
# homeassistant.components.pulseaudio_loopback
|
||||
pulsectl==23.5.2
|
||||
@@ -2135,7 +2135,7 @@ pykwb==0.0.8
|
||||
pylacrosse==0.4
|
||||
|
||||
# homeassistant.components.lamarzocco
|
||||
pylamarzocco==2.1.1
|
||||
pylamarzocco==2.1.2
|
||||
|
||||
# homeassistant.components.lastfm
|
||||
pylast==5.1.0
|
||||
@@ -2210,7 +2210,7 @@ pynetio==0.1.9.1
|
||||
pynina==0.3.6
|
||||
|
||||
# homeassistant.components.nintendo_parental
|
||||
pynintendoparental==1.0.1
|
||||
pynintendoparental==1.1.1
|
||||
|
||||
# homeassistant.components.nobo_hub
|
||||
pynobo==1.8.1
|
||||
|
@@ -15,7 +15,7 @@ license-expression==30.4.3
|
||||
mock-open==1.4.0
|
||||
mypy-dev==1.19.0a2
|
||||
pre-commit==4.2.0
|
||||
pydantic==2.11.9
|
||||
pydantic==2.12.0
|
||||
pylint==3.3.8
|
||||
pylint-per-file-ignores==1.4.0
|
||||
pipdeptree==2.26.1
|
||||
|
16
requirements_test_all.txt
generated
16
requirements_test_all.txt
generated
@@ -235,7 +235,7 @@ aioelectricitymaps==1.1.1
|
||||
aioemonitor==1.0.5
|
||||
|
||||
# homeassistant.components.esphome
|
||||
aioesphomeapi==41.12.0
|
||||
aioesphomeapi==41.13.0
|
||||
|
||||
# homeassistant.components.flo
|
||||
aioflo==2021.11.0
|
||||
@@ -777,7 +777,7 @@ energyzero==2.1.1
|
||||
enocean==0.50
|
||||
|
||||
# homeassistant.components.environment_canada
|
||||
env-canada==0.11.2
|
||||
env-canada==0.11.3
|
||||
|
||||
# homeassistant.components.season
|
||||
ephem==4.1.6
|
||||
@@ -1032,7 +1032,7 @@ hole==0.9.0
|
||||
|
||||
# homeassistant.components.holiday
|
||||
# homeassistant.components.workday
|
||||
holidays==0.81
|
||||
holidays==0.82
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20251001.0
|
||||
@@ -1328,7 +1328,7 @@ nextcord==3.1.0
|
||||
nextdns==4.1.0
|
||||
|
||||
# homeassistant.components.niko_home_control
|
||||
nhc==0.4.12
|
||||
nhc==0.6.1
|
||||
|
||||
# homeassistant.components.nibe_heatpump
|
||||
nibe==2.19.0
|
||||
@@ -1399,7 +1399,7 @@ open-meteo==0.3.2
|
||||
|
||||
# homeassistant.components.open_router
|
||||
# homeassistant.components.openai_conversation
|
||||
openai==1.99.5
|
||||
openai==2.2.0
|
||||
|
||||
# homeassistant.components.openerz
|
||||
openerz-api==0.3.0
|
||||
@@ -1487,7 +1487,7 @@ prowlpy==1.0.2
|
||||
psutil-home-assistant==0.0.1
|
||||
|
||||
# homeassistant.components.systemmonitor
|
||||
psutil==7.0.0
|
||||
psutil==7.1.0
|
||||
|
||||
# homeassistant.components.pushbullet
|
||||
pushbullet.py==0.11.0
|
||||
@@ -1783,7 +1783,7 @@ pykrakenapi==0.1.8
|
||||
pykulersky==0.5.8
|
||||
|
||||
# homeassistant.components.lamarzocco
|
||||
pylamarzocco==2.1.1
|
||||
pylamarzocco==2.1.2
|
||||
|
||||
# homeassistant.components.lastfm
|
||||
pylast==5.1.0
|
||||
@@ -1846,7 +1846,7 @@ pynetgear==0.10.10
|
||||
pynina==0.3.6
|
||||
|
||||
# homeassistant.components.nintendo_parental
|
||||
pynintendoparental==1.0.1
|
||||
pynintendoparental==1.1.1
|
||||
|
||||
# homeassistant.components.nobo_hub
|
||||
pynobo==1.8.1
|
||||
|
@@ -155,7 +155,7 @@ multidict>=6.0.2
|
||||
backoff>=2.0
|
||||
|
||||
# ensure pydantic version does not float since it might have breaking changes
|
||||
pydantic==2.11.9
|
||||
pydantic==2.12.0
|
||||
|
||||
# Required for Python 3.12.4 compatibility (#119223).
|
||||
mashumaro>=3.13.1
|
||||
|
@@ -9,12 +9,17 @@ from airthings_ble import (
|
||||
AirthingsDevice,
|
||||
AirthingsDeviceType,
|
||||
)
|
||||
from bleak.backends.device import BLEDevice
|
||||
|
||||
from homeassistant.components.airthings_ble.const import DOMAIN
|
||||
from homeassistant.components.bluetooth.models import BluetoothServiceInfoBleak
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH, DeviceRegistry
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_BLUETOOTH,
|
||||
DeviceEntry,
|
||||
DeviceRegistry,
|
||||
)
|
||||
|
||||
from tests.common import MockConfigEntry, MockEntity
|
||||
from tests.components.bluetooth import generate_advertisement_data, generate_ble_device
|
||||
@@ -28,7 +33,15 @@ def patch_async_setup_entry(return_value=True):
|
||||
)
|
||||
|
||||
|
||||
def patch_async_ble_device_from_address(return_value: BluetoothServiceInfoBleak | None):
|
||||
def patch_async_discovered_service_info(return_value: list[BluetoothServiceInfoBleak]):
|
||||
"""Patch async_discovered_service_info to return given list."""
|
||||
return patch(
|
||||
"homeassistant.components.bluetooth.async_discovered_service_info",
|
||||
return_value=return_value,
|
||||
)
|
||||
|
||||
|
||||
def patch_async_ble_device_from_address(return_value: BLEDevice | None):
|
||||
"""Patch async ble device from address to return a given value."""
|
||||
return patch(
|
||||
"homeassistant.components.bluetooth.async_ble_device_from_address",
|
||||
@@ -101,6 +114,27 @@ WAVE_SERVICE_INFO = BluetoothServiceInfoBleak(
|
||||
tx_power=0,
|
||||
)
|
||||
|
||||
WAVE_ENHANCE_SERVICE_INFO = BluetoothServiceInfoBleak(
|
||||
name="cc-cc-cc-cc-cc-cc",
|
||||
address="cc:cc:cc:cc:cc:cc",
|
||||
device=generate_ble_device(
|
||||
address="cc:cc:cc:cc:cc:cc",
|
||||
name="Airthings Wave Enhance",
|
||||
),
|
||||
rssi=-61,
|
||||
manufacturer_data={820: b"\xe4/\xa5\xae\t\x00"},
|
||||
service_data={},
|
||||
service_uuids=[],
|
||||
source="local",
|
||||
advertisement=generate_advertisement_data(
|
||||
manufacturer_data={820: b"\xe4/\xa5\xae\t\x00"},
|
||||
service_uuids=[],
|
||||
),
|
||||
connectable=True,
|
||||
time=0,
|
||||
tx_power=0,
|
||||
)
|
||||
|
||||
VIEW_PLUS_SERVICE_INFO = BluetoothServiceInfoBleak(
|
||||
name="cc-cc-cc-cc-cc-cc",
|
||||
address="cc:cc:cc:cc:cc:cc",
|
||||
@@ -211,6 +245,26 @@ WAVE_DEVICE_INFO = AirthingsDevice(
|
||||
address="cc:cc:cc:cc:cc:cc",
|
||||
)
|
||||
|
||||
WAVE_ENHANCE_DEVICE_INFO = AirthingsDevice(
|
||||
manufacturer="Airthings AS",
|
||||
hw_version="REV X",
|
||||
sw_version="T-SUB-2.6.2-master+0",
|
||||
model=AirthingsDeviceType.WAVE_ENHANCE_EU,
|
||||
name="Airthings Wave Enhance",
|
||||
identifier="123456",
|
||||
sensors={
|
||||
"lux": 25,
|
||||
"battery": 85,
|
||||
"humidity": 60.0,
|
||||
"temperature": 21.0,
|
||||
"co2": 500.0,
|
||||
"voc": 155.0,
|
||||
"pressure": 1020,
|
||||
"noise": 40,
|
||||
},
|
||||
address="cc:cc:cc:cc:cc:cc",
|
||||
)
|
||||
|
||||
TEMPERATURE_V1 = MockEntity(
|
||||
unique_id="Airthings Wave Plus 123456_temperature",
|
||||
name="Airthings Wave Plus 123456 Temperature",
|
||||
@@ -247,23 +301,32 @@ VOC_V3 = MockEntity(
|
||||
)
|
||||
|
||||
|
||||
def create_entry(hass: HomeAssistant) -> MockConfigEntry:
|
||||
def create_entry(
|
||||
hass: HomeAssistant,
|
||||
service_info: BluetoothServiceInfoBleak,
|
||||
device_info: AirthingsDevice,
|
||||
) -> MockConfigEntry:
|
||||
"""Create a config entry."""
|
||||
entry = MockConfigEntry(
|
||||
domain=DOMAIN,
|
||||
unique_id=WAVE_SERVICE_INFO.address,
|
||||
title="Airthings Wave Plus (123456)",
|
||||
unique_id=service_info.address,
|
||||
title=f"{device_info.name} ({device_info.identifier})",
|
||||
)
|
||||
entry.add_to_hass(hass)
|
||||
return entry
|
||||
|
||||
|
||||
def create_device(entry: ConfigEntry, device_registry: DeviceRegistry):
|
||||
def create_device(
|
||||
entry: ConfigEntry,
|
||||
device_registry: DeviceRegistry,
|
||||
service_info: BluetoothServiceInfoBleak,
|
||||
device_info: AirthingsDevice,
|
||||
) -> DeviceEntry:
|
||||
"""Create a device for the given entry."""
|
||||
return device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
connections={(CONNECTION_BLUETOOTH, WAVE_SERVICE_INFO.address)},
|
||||
connections={(CONNECTION_BLUETOOTH, service_info.address)},
|
||||
manufacturer="Airthings AS",
|
||||
name="Airthings Wave Plus (123456)",
|
||||
model="Wave Plus",
|
||||
name=f"{device_info.name} ({device_info.identifier})",
|
||||
model=device_info.model.product_name,
|
||||
)
|
||||
|
@@ -2,6 +2,8 @@
|
||||
|
||||
import logging
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.airthings_ble.const import DOMAIN
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -16,10 +18,15 @@ from . import (
|
||||
VOC_V2,
|
||||
VOC_V3,
|
||||
WAVE_DEVICE_INFO,
|
||||
WAVE_ENHANCE_DEVICE_INFO,
|
||||
WAVE_ENHANCE_SERVICE_INFO,
|
||||
WAVE_SERVICE_INFO,
|
||||
create_device,
|
||||
create_entry,
|
||||
patch_airthings_ble,
|
||||
patch_airthings_device_update,
|
||||
patch_async_ble_device_from_address,
|
||||
patch_async_discovered_service_info,
|
||||
)
|
||||
|
||||
from tests.components.bluetooth import inject_bluetooth_service_info
|
||||
@@ -33,8 +40,8 @@ async def test_migration_from_v1_to_v3_unique_id(
|
||||
device_registry: dr.DeviceRegistry,
|
||||
) -> None:
|
||||
"""Verify that we can migrate from v1 (pre 2023.9.0) to the latest unique id format."""
|
||||
entry = create_entry(hass)
|
||||
device = create_device(entry, device_registry)
|
||||
entry = create_entry(hass, WAVE_SERVICE_INFO, WAVE_DEVICE_INFO)
|
||||
device = create_device(entry, device_registry, WAVE_SERVICE_INFO, WAVE_DEVICE_INFO)
|
||||
|
||||
assert entry is not None
|
||||
assert device is not None
|
||||
@@ -74,8 +81,8 @@ async def test_migration_from_v2_to_v3_unique_id(
|
||||
device_registry: dr.DeviceRegistry,
|
||||
) -> None:
|
||||
"""Verify that we can migrate from v2 (introduced in 2023.9.0) to the latest unique id format."""
|
||||
entry = create_entry(hass)
|
||||
device = create_device(entry, device_registry)
|
||||
entry = create_entry(hass, WAVE_SERVICE_INFO, WAVE_DEVICE_INFO)
|
||||
device = create_device(entry, device_registry, WAVE_SERVICE_INFO, WAVE_DEVICE_INFO)
|
||||
|
||||
assert entry is not None
|
||||
assert device is not None
|
||||
@@ -115,8 +122,8 @@ async def test_migration_from_v1_and_v2_to_v3_unique_id(
|
||||
device_registry: dr.DeviceRegistry,
|
||||
) -> None:
|
||||
"""Test if migration works when we have both v1 (pre 2023.9.0) and v2 (introduced in 2023.9.0) unique ids."""
|
||||
entry = create_entry(hass)
|
||||
device = create_device(entry, device_registry)
|
||||
entry = create_entry(hass, WAVE_SERVICE_INFO, WAVE_DEVICE_INFO)
|
||||
device = create_device(entry, device_registry, WAVE_SERVICE_INFO, WAVE_DEVICE_INFO)
|
||||
|
||||
assert entry is not None
|
||||
assert device is not None
|
||||
@@ -165,8 +172,8 @@ async def test_migration_with_all_unique_ids(
|
||||
device_registry: dr.DeviceRegistry,
|
||||
) -> None:
|
||||
"""Test if migration works when we have all unique ids."""
|
||||
entry = create_entry(hass)
|
||||
device = create_device(entry, device_registry)
|
||||
entry = create_entry(hass, WAVE_SERVICE_INFO, WAVE_DEVICE_INFO)
|
||||
device = create_device(entry, device_registry, WAVE_SERVICE_INFO, WAVE_DEVICE_INFO)
|
||||
|
||||
assert entry is not None
|
||||
assert device is not None
|
||||
@@ -215,3 +222,48 @@ async def test_migration_with_all_unique_ids(
|
||||
assert entity_registry.async_get(v1.entity_id).unique_id == VOC_V1.unique_id
|
||||
assert entity_registry.async_get(v2.entity_id).unique_id == VOC_V2.unique_id
|
||||
assert entity_registry.async_get(v3.entity_id).unique_id == VOC_V3.unique_id
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("unique_suffix", "expected_sensor_name"),
|
||||
[
|
||||
("lux", "Illuminance"),
|
||||
("noise", "Ambient noise"),
|
||||
],
|
||||
)
|
||||
async def test_translation_keys(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
device_registry: dr.DeviceRegistry,
|
||||
unique_suffix: str,
|
||||
expected_sensor_name: str,
|
||||
) -> None:
|
||||
"""Test that translated sensor names are correct."""
|
||||
entry = create_entry(hass, WAVE_ENHANCE_SERVICE_INFO, WAVE_DEVICE_INFO)
|
||||
device = create_device(
|
||||
entry, device_registry, WAVE_ENHANCE_SERVICE_INFO, WAVE_ENHANCE_DEVICE_INFO
|
||||
)
|
||||
|
||||
with (
|
||||
patch_async_ble_device_from_address(WAVE_ENHANCE_SERVICE_INFO.device),
|
||||
patch_async_discovered_service_info([WAVE_ENHANCE_SERVICE_INFO]),
|
||||
patch_airthings_ble(WAVE_ENHANCE_DEVICE_INFO),
|
||||
):
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
assert device is not None
|
||||
assert device.name == "Airthings Wave Enhance (123456)"
|
||||
|
||||
unique_id = f"{WAVE_ENHANCE_DEVICE_INFO.address}_{unique_suffix}"
|
||||
entity_id = entity_registry.async_get_entity_id(Platform.SENSOR, DOMAIN, unique_id)
|
||||
assert entity_id is not None
|
||||
|
||||
state = hass.states.get(entity_id)
|
||||
assert state is not None
|
||||
|
||||
expected_value = WAVE_ENHANCE_DEVICE_INFO.sensors[unique_suffix]
|
||||
assert state.state == str(expected_value)
|
||||
|
||||
expected_name = f"Airthings Wave Enhance (123456) {expected_sensor_name}"
|
||||
assert state.attributes.get("friendly_name") == expected_name
|
||||
|
@@ -45,7 +45,7 @@ def dimmable_light() -> NHCLight:
|
||||
mock.is_dimmable = True
|
||||
mock.name = "dimmable light"
|
||||
mock.suggested_area = "room"
|
||||
mock.state = 255
|
||||
mock.state = 100
|
||||
return mock
|
||||
|
||||
|
||||
|
@@ -41,7 +41,7 @@
|
||||
# name: test_entities[light.dimmable_light-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'brightness': 255,
|
||||
'brightness': 100,
|
||||
'color_mode': <ColorMode.BRIGHTNESS: 'brightness'>,
|
||||
'friendly_name': 'dimmable light',
|
||||
'supported_color_modes': list([
|
||||
|
@@ -42,7 +42,7 @@ async def test_entities(
|
||||
@pytest.mark.parametrize(
|
||||
("light_id", "data", "set_brightness"),
|
||||
[
|
||||
(0, {ATTR_ENTITY_ID: "light.light"}, 255),
|
||||
(0, {ATTR_ENTITY_ID: "light.light"}, None),
|
||||
(
|
||||
1,
|
||||
{ATTR_ENTITY_ID: "light.dimmable_light", ATTR_BRIGHTNESS: 50},
|
||||
@@ -119,7 +119,7 @@ async def test_updating(
|
||||
assert hass.states.get("light.light").state == STATE_OFF
|
||||
|
||||
assert hass.states.get("light.dimmable_light").state == STATE_ON
|
||||
assert hass.states.get("light.dimmable_light").attributes[ATTR_BRIGHTNESS] == 255
|
||||
assert hass.states.get("light.dimmable_light").attributes[ATTR_BRIGHTNESS] == 100
|
||||
|
||||
dimmable_light.state = 204
|
||||
await find_update_callback(mock_niko_home_control_connection, 2)(204)
|
||||
|
@@ -4,7 +4,6 @@ from collections.abc import Generator
|
||||
from datetime import datetime
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
from pynintendoparental.device import Device
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.nintendo_parental.const import DOMAIN
|
||||
@@ -24,18 +23,6 @@ def mock_config_entry() -> MockConfigEntry:
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_nintendo_device() -> Device:
|
||||
"""Return a mocked device."""
|
||||
mock = AsyncMock(spec=Device)
|
||||
mock.device_id = "testdevid"
|
||||
mock.name = "Home Assistant Test"
|
||||
mock.extra = {"device": {"firmwareVersion": {"displayedVersion": "99.99.99"}}}
|
||||
mock.limit_time = 120
|
||||
mock.today_playing_time = 110
|
||||
return mock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_nintendo_authenticator() -> Generator[MagicMock]:
|
||||
"""Mock Nintendo Authenticator."""
|
||||
@@ -66,27 +53,6 @@ def mock_nintendo_authenticator() -> Generator[MagicMock]:
|
||||
yield mock_auth
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_nintendo_client(
|
||||
mock_nintendo_device: Device,
|
||||
) -> Generator[AsyncMock]:
|
||||
"""Mock a Nintendo client."""
|
||||
with (
|
||||
patch(
|
||||
"homeassistant.components.nintendo_parental.NintendoParental",
|
||||
autospec=True,
|
||||
) as mock_client,
|
||||
patch(
|
||||
"homeassistant.components.nintendo_parental.config_flow.NintendoParental",
|
||||
new=mock_client,
|
||||
),
|
||||
):
|
||||
client = mock_client.return_value
|
||||
client.update.return_value = True
|
||||
client.devices.return_value = {"testdevid": mock_nintendo_device}
|
||||
yield client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_setup_entry() -> Generator[AsyncMock]:
|
||||
"""Override async_setup_entry."""
|
||||
|
@@ -77,7 +77,7 @@ async def test_invalid_auth(
|
||||
|
||||
# Simulate invalid authentication by raising an exception
|
||||
mock_nintendo_authenticator.complete_login.side_effect = (
|
||||
InvalidSessionTokenException
|
||||
InvalidSessionTokenException(status_code=401, message="Test")
|
||||
)
|
||||
|
||||
result = await hass.config_entries.flow.async_configure(
|
||||
|
@@ -157,6 +157,7 @@ def create_function_tool_call_item(
|
||||
ResponseFunctionCallArgumentsDoneEvent(
|
||||
arguments="".join(arguments),
|
||||
item_id=id,
|
||||
name=name,
|
||||
output_index=output_index,
|
||||
sequence_number=0,
|
||||
type="response.function_call_arguments.done",
|
||||
|
@@ -569,7 +569,7 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non
|
||||
CONF_PROMPT: "Speak like a pirate",
|
||||
CONF_LLM_HASS_API: ["assist"],
|
||||
CONF_TEMPERATURE: 0.8,
|
||||
CONF_CHAT_MODEL: "o5",
|
||||
CONF_CHAT_MODEL: "gpt-5",
|
||||
CONF_TOP_P: 0.9,
|
||||
CONF_MAX_TOKENS: 1000,
|
||||
CONF_REASONING_EFFORT: "low",
|
||||
@@ -607,6 +607,52 @@ async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> Non
|
||||
CONF_CODE_INTERPRETER: False,
|
||||
},
|
||||
),
|
||||
( # Case 5: code interpreter supported to not supported model
|
||||
{
|
||||
CONF_RECOMMENDED: False,
|
||||
CONF_PROMPT: "Speak like a pirate",
|
||||
CONF_LLM_HASS_API: ["assist"],
|
||||
CONF_TEMPERATURE: 0.8,
|
||||
CONF_CHAT_MODEL: "gpt-5",
|
||||
CONF_TOP_P: 0.9,
|
||||
CONF_MAX_TOKENS: 1000,
|
||||
CONF_REASONING_EFFORT: "low",
|
||||
CONF_CODE_INTERPRETER: True,
|
||||
CONF_VERBOSITY: "medium",
|
||||
CONF_WEB_SEARCH: True,
|
||||
CONF_WEB_SEARCH_CONTEXT_SIZE: "high",
|
||||
CONF_WEB_SEARCH_USER_LOCATION: False,
|
||||
},
|
||||
(
|
||||
{
|
||||
CONF_RECOMMENDED: False,
|
||||
CONF_PROMPT: "Speak like a pirate",
|
||||
},
|
||||
{
|
||||
CONF_TEMPERATURE: 0.8,
|
||||
CONF_CHAT_MODEL: "gpt-5-pro",
|
||||
CONF_TOP_P: 0.9,
|
||||
CONF_MAX_TOKENS: 1000,
|
||||
},
|
||||
{
|
||||
CONF_WEB_SEARCH: True,
|
||||
CONF_WEB_SEARCH_CONTEXT_SIZE: "high",
|
||||
CONF_WEB_SEARCH_USER_LOCATION: False,
|
||||
},
|
||||
),
|
||||
{
|
||||
CONF_RECOMMENDED: False,
|
||||
CONF_PROMPT: "Speak like a pirate",
|
||||
CONF_TEMPERATURE: 0.8,
|
||||
CONF_CHAT_MODEL: "gpt-5-pro",
|
||||
CONF_TOP_P: 0.9,
|
||||
CONF_MAX_TOKENS: 1000,
|
||||
CONF_VERBOSITY: "medium",
|
||||
CONF_WEB_SEARCH: True,
|
||||
CONF_WEB_SEARCH_CONTEXT_SIZE: "high",
|
||||
CONF_WEB_SEARCH_USER_LOCATION: False,
|
||||
},
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_subentry_switching(
|
||||
|
@@ -474,7 +474,7 @@ async def test_web_search(
|
||||
|
||||
assert mock_create_stream.mock_calls[0][2]["tools"] == [
|
||||
{
|
||||
"type": "web_search_preview",
|
||||
"type": "web_search",
|
||||
"search_context_size": "low",
|
||||
"user_location": {
|
||||
"type": "approximate",
|
||||
|
@@ -80,7 +80,9 @@ async def test_low_battery(hass: HomeAssistant) -> None:
|
||||
async def test_initial_states(hass: HomeAssistant) -> None:
|
||||
"""Test plant initialises attributes if sensor already exists."""
|
||||
hass.states.async_set(
|
||||
MOISTURE_ENTITY, 5, {ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS}
|
||||
MOISTURE_ENTITY,
|
||||
5,
|
||||
{ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS_PER_CM},
|
||||
)
|
||||
plant_name = "some_plant"
|
||||
assert await async_setup_component(
|
||||
@@ -101,7 +103,9 @@ async def test_update_states(hass: HomeAssistant) -> None:
|
||||
hass, plant.DOMAIN, {plant.DOMAIN: {plant_name: GOOD_CONFIG}}
|
||||
)
|
||||
hass.states.async_set(
|
||||
MOISTURE_ENTITY, 5, {ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS}
|
||||
MOISTURE_ENTITY,
|
||||
5,
|
||||
{ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS_PER_CM},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
state = hass.states.get(f"plant.{plant_name}")
|
||||
@@ -121,7 +125,7 @@ async def test_unavailable_state(hass: HomeAssistant) -> None:
|
||||
hass.states.async_set(
|
||||
MOISTURE_ENTITY,
|
||||
STATE_UNAVAILABLE,
|
||||
{ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS},
|
||||
{ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS_PER_CM},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
state = hass.states.get(f"plant.{plant_name}")
|
||||
@@ -139,7 +143,9 @@ async def test_state_problem_if_unavailable(hass: HomeAssistant) -> None:
|
||||
hass, plant.DOMAIN, {plant.DOMAIN: {plant_name: GOOD_CONFIG}}
|
||||
)
|
||||
hass.states.async_set(
|
||||
MOISTURE_ENTITY, 42, {ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS}
|
||||
MOISTURE_ENTITY,
|
||||
42,
|
||||
{ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS_PER_CM},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
state = hass.states.get(f"plant.{plant_name}")
|
||||
@@ -148,7 +154,7 @@ async def test_state_problem_if_unavailable(hass: HomeAssistant) -> None:
|
||||
hass.states.async_set(
|
||||
MOISTURE_ENTITY,
|
||||
STATE_UNAVAILABLE,
|
||||
{ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS},
|
||||
{ATTR_UNIT_OF_MEASUREMENT: UnitOfConductivity.MICROSIEMENS_PER_CM},
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
state = hass.states.get(f"plant.{plant_name}")
|
||||
|
241
tests/components/portainer/snapshots/test_sensor.ambr
Normal file
241
tests/components/portainer/snapshots/test_sensor.ambr
Normal file
@@ -0,0 +1,241 @@
|
||||
# serializer version: 1
|
||||
# name: test_all_entities[sensor.focused_einstein_image-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.focused_einstein_image',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Image',
|
||||
'platform': 'portainer',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'image',
|
||||
'unique_id': 'portainer_test_entry_123_focused_einstein_image',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.focused_einstein_image-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'focused_einstein Image',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.focused_einstein_image',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'docker.io/library/redis:7',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.funny_chatelet_image-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.funny_chatelet_image',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Image',
|
||||
'platform': 'portainer',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'image',
|
||||
'unique_id': 'portainer_test_entry_123_funny_chatelet_image',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.funny_chatelet_image-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'funny_chatelet Image',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.funny_chatelet_image',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'docker.io/library/ubuntu:latest',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.practical_morse_image-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.practical_morse_image',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Image',
|
||||
'platform': 'portainer',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'image',
|
||||
'unique_id': 'portainer_test_entry_123_practical_morse_image',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.practical_morse_image-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'practical_morse Image',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.practical_morse_image',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'docker.io/library/python:3.13-slim',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.serene_banach_image-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.serene_banach_image',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Image',
|
||||
'platform': 'portainer',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'image',
|
||||
'unique_id': 'portainer_test_entry_123_serene_banach_image',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.serene_banach_image-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'serene_banach Image',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.serene_banach_image',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'docker.io/library/nginx:latest',
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.stoic_turing_image-entry]
|
||||
EntityRegistryEntrySnapshot({
|
||||
'aliases': set({
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': None,
|
||||
'config_entry_id': <ANY>,
|
||||
'config_subentry_id': <ANY>,
|
||||
'device_class': None,
|
||||
'device_id': <ANY>,
|
||||
'disabled_by': None,
|
||||
'domain': 'sensor',
|
||||
'entity_category': None,
|
||||
'entity_id': 'sensor.stoic_turing_image',
|
||||
'has_entity_name': True,
|
||||
'hidden_by': None,
|
||||
'icon': None,
|
||||
'id': <ANY>,
|
||||
'labels': set({
|
||||
}),
|
||||
'name': None,
|
||||
'options': dict({
|
||||
}),
|
||||
'original_device_class': None,
|
||||
'original_icon': None,
|
||||
'original_name': 'Image',
|
||||
'platform': 'portainer',
|
||||
'previous_unique_id': None,
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': 'image',
|
||||
'unique_id': 'portainer_test_entry_123_stoic_turing_image',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
# name: test_all_entities[sensor.stoic_turing_image-state]
|
||||
StateSnapshot({
|
||||
'attributes': ReadOnlyDict({
|
||||
'friendly_name': 'stoic_turing Image',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
'entity_id': 'sensor.stoic_turing_image',
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': 'docker.io/library/postgres:15',
|
||||
})
|
||||
# ---
|
32
tests/components/portainer/test_sensor.py
Normal file
32
tests/components/portainer/test_sensor.py
Normal file
@@ -0,0 +1,32 @@
|
||||
"""Tests for the Portainer sensor platform."""
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from . import setup_integration
|
||||
|
||||
from tests.common import MockConfigEntry, snapshot_platform
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("mock_portainer_client")
|
||||
async def test_all_entities(
|
||||
hass: HomeAssistant,
|
||||
snapshot: SnapshotAssertion,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
entity_registry: er.EntityRegistry,
|
||||
) -> None:
|
||||
"""Test all entities."""
|
||||
with patch(
|
||||
"homeassistant.components.portainer._PLATFORMS",
|
||||
[Platform.SENSOR],
|
||||
):
|
||||
await setup_integration(hass, mock_config_entry)
|
||||
await snapshot_platform(
|
||||
hass, entity_registry, snapshot, mock_config_entry.entry_id
|
||||
)
|
@@ -1785,7 +1785,7 @@ async def test_unit_conversion_priority_suggested_unit_change_2(
|
||||
UnitOfBloodGlucoseConcentration.MILLIGRAMS_PER_DECILITER,
|
||||
0,
|
||||
),
|
||||
(SensorDeviceClass.CONDUCTIVITY, UnitOfConductivity.MICROSIEMENS, 1),
|
||||
(SensorDeviceClass.CONDUCTIVITY, UnitOfConductivity.MICROSIEMENS_PER_CM, 1),
|
||||
(SensorDeviceClass.CURRENT, UnitOfElectricCurrent.MILLIAMPERE, 0),
|
||||
(SensorDeviceClass.DATA_RATE, UnitOfDataRate.KILOBITS_PER_SECOND, 0),
|
||||
(SensorDeviceClass.DATA_SIZE, UnitOfInformation.KILOBITS, 0),
|
||||
|
@@ -127,7 +127,7 @@
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '123456789ABC-button:200',
|
||||
'unique_id': '123456789ABC-button:200-button_generic',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
@@ -175,7 +175,7 @@
|
||||
'suggested_object_id': None,
|
||||
'supported_features': 0,
|
||||
'translation_key': None,
|
||||
'unique_id': '123456789ABC-button:200',
|
||||
'unique_id': '123456789ABC-button:200-button_generic',
|
||||
'unit_of_measurement': None,
|
||||
})
|
||||
# ---
|
||||
|
@@ -9,7 +9,7 @@ import pytest
|
||||
from syrupy.assertion import SnapshotAssertion
|
||||
|
||||
from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS
|
||||
from homeassistant.components.shelly.const import DOMAIN
|
||||
from homeassistant.components.shelly.const import DOMAIN, MODEL_FRANKEVER_WATER_VALVE
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState
|
||||
from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -17,7 +17,13 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceRegistry
|
||||
from homeassistant.helpers.entity_registry import EntityRegistry
|
||||
|
||||
from . import init_integration, patch_platforms, register_device, register_entity
|
||||
from . import (
|
||||
MOCK_MAC,
|
||||
init_integration,
|
||||
patch_platforms,
|
||||
register_device,
|
||||
register_entity,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
@@ -417,3 +423,56 @@ async def test_migrate_unique_id_blu_trv(
|
||||
assert entity_entry.unique_id == "F8447725F0DD-blutrv:200-calibrate"
|
||||
|
||||
assert "Migrating unique_id for button.trv_name_calibrate" in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("old_id", "new_id", "role"),
|
||||
[
|
||||
("button", "button_generic", None),
|
||||
("button", "button_open", "open"),
|
||||
("button", "button_close", "close"),
|
||||
],
|
||||
)
|
||||
async def test_migrate_unique_id_virtual_components_roles(
|
||||
hass: HomeAssistant,
|
||||
mock_rpc_device: Mock,
|
||||
entity_registry: EntityRegistry,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
old_id: str,
|
||||
new_id: str,
|
||||
role: str | None,
|
||||
) -> None:
|
||||
"""Test migration of unique_id for virtual components to include role."""
|
||||
entry = await init_integration(
|
||||
hass, 3, model=MODEL_FRANKEVER_WATER_VALVE, skip_setup=True
|
||||
)
|
||||
old_unique_id = f"{MOCK_MAC}-{old_id}:200"
|
||||
new_unique_id = f"{old_unique_id}-{new_id}"
|
||||
config = deepcopy(mock_rpc_device.config)
|
||||
if role:
|
||||
config[f"{old_id}:200"] = {
|
||||
"role": role,
|
||||
}
|
||||
else:
|
||||
config[f"{old_id}:200"] = {}
|
||||
monkeypatch.setattr(mock_rpc_device, "config", config)
|
||||
|
||||
entity = entity_registry.async_get_or_create(
|
||||
suggested_object_id="test_name_test_button",
|
||||
disabled_by=None,
|
||||
domain=BUTTON_DOMAIN,
|
||||
platform=DOMAIN,
|
||||
unique_id=old_unique_id,
|
||||
config_entry=entry,
|
||||
)
|
||||
assert entity.unique_id == old_unique_id
|
||||
|
||||
await hass.config_entries.async_setup(entry.entry_id)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
entity_entry = entity_registry.async_get("button.test_name_test_button")
|
||||
assert entity_entry
|
||||
assert entity_entry.unique_id == new_unique_id
|
||||
|
||||
assert "Migrating unique_id for button.test_name_test_button" in caplog.text
|
||||
|
@@ -404,6 +404,7 @@ async def test_rpc_device_services(
|
||||
)
|
||||
assert (state := hass.states.get(entity_id))
|
||||
assert state.state == STATE_ON
|
||||
mock_rpc_device.switch_set.assert_called_once_with(0, True)
|
||||
|
||||
monkeypatch.setitem(mock_rpc_device.status["switch:0"], "output", False)
|
||||
await hass.services.async_call(
|
||||
@@ -415,6 +416,7 @@ async def test_rpc_device_services(
|
||||
mock_rpc_device.mock_update()
|
||||
assert (state := hass.states.get(entity_id))
|
||||
assert state.state == STATE_OFF
|
||||
mock_rpc_device.switch_set.assert_called_with(0, False)
|
||||
|
||||
|
||||
async def test_rpc_device_unique_ids(
|
||||
@@ -507,7 +509,7 @@ async def test_rpc_set_state_errors(
|
||||
monkeypatch: pytest.MonkeyPatch,
|
||||
) -> None:
|
||||
"""Test RPC device set state connection/call errors."""
|
||||
monkeypatch.setattr(mock_rpc_device, "call_rpc", AsyncMock(side_effect=exc))
|
||||
mock_rpc_device.switch_set.side_effect = exc
|
||||
monkeypatch.delitem(mock_rpc_device.status, "cover:0")
|
||||
monkeypatch.setitem(mock_rpc_device.status["sys"], "relay_in_thermostat", False)
|
||||
await init_integration(hass, 2)
|
||||
@@ -525,11 +527,7 @@ async def test_rpc_auth_error(
|
||||
hass: HomeAssistant, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch
|
||||
) -> None:
|
||||
"""Test RPC device set state authentication error."""
|
||||
monkeypatch.setattr(
|
||||
mock_rpc_device,
|
||||
"call_rpc",
|
||||
AsyncMock(side_effect=InvalidAuthError),
|
||||
)
|
||||
mock_rpc_device.switch_set.side_effect = InvalidAuthError
|
||||
monkeypatch.delitem(mock_rpc_device.status, "cover:0")
|
||||
monkeypatch.setitem(mock_rpc_device.status["sys"], "relay_in_thermostat", False)
|
||||
entry = await init_integration(hass, 2)
|
||||
@@ -657,6 +655,7 @@ async def test_rpc_device_virtual_switch(
|
||||
mock_rpc_device.mock_update()
|
||||
assert (state := hass.states.get(entity_id))
|
||||
assert state.state == STATE_OFF
|
||||
mock_rpc_device.boolean_set.assert_called_once_with(200, False)
|
||||
|
||||
monkeypatch.setitem(mock_rpc_device.status["boolean:200"], "value", True)
|
||||
await hass.services.async_call(
|
||||
@@ -668,6 +667,7 @@ async def test_rpc_device_virtual_switch(
|
||||
mock_rpc_device.mock_update()
|
||||
assert (state := hass.states.get(entity_id))
|
||||
assert state.state == STATE_ON
|
||||
mock_rpc_device.boolean_set.assert_called_with(200, True)
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("disable_async_remove_shelly_rpc_entities")
|
||||
@@ -815,6 +815,7 @@ async def test_rpc_device_script_switch(
|
||||
|
||||
assert (state := hass.states.get(entity_id))
|
||||
assert state.state == STATE_OFF
|
||||
mock_rpc_device.script_stop.assert_called_once_with(1)
|
||||
|
||||
monkeypatch.setitem(mock_rpc_device.status[key], "running", True)
|
||||
await hass.services.async_call(
|
||||
@@ -827,3 +828,4 @@ async def test_rpc_device_script_switch(
|
||||
|
||||
assert (state := hass.states.get(entity_id))
|
||||
assert state.state == STATE_ON
|
||||
mock_rpc_device.script_start.assert_called_once_with(1)
|
||||
|
@@ -4,7 +4,7 @@
|
||||
'attributes': ReadOnlyDict({
|
||||
'entity_picture': 'https://i.ytimg.com/vi/wysukDrMdqU/maxresdefault.jpg',
|
||||
'friendly_name': 'Google for Developers Latest upload',
|
||||
'published_at': datetime.datetime(2023, 5, 11, 0, 20, 46, tzinfo=TzInfo(UTC)),
|
||||
'published_at': datetime.datetime(2023, 5, 11, 0, 20, 46, tzinfo=TzInfo(0)),
|
||||
'video_id': 'wysukDrMdqU',
|
||||
}),
|
||||
'context': <ANY>,
|
||||
|
@@ -952,6 +952,33 @@ async def test_zeroconf_discovery_via_socket_already_setup_with_ip_match(
|
||||
assert result["reason"] == "single_instance_allowed"
|
||||
|
||||
|
||||
@patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True))
|
||||
async def test_zeroconf_not_onboarded(hass: HomeAssistant) -> None:
|
||||
"""Test zeroconf discovery needing confirmation when not onboarded."""
|
||||
service_info = ZeroconfServiceInfo(
|
||||
ip_address=ip_address("192.168.1.100"),
|
||||
ip_addresses=[ip_address("192.168.1.100")],
|
||||
hostname="tube-zigbee-gw.local.",
|
||||
name="mock_name",
|
||||
port=6638,
|
||||
properties={"name": "tube_123456"},
|
||||
type="mock_type",
|
||||
)
|
||||
with patch(
|
||||
"homeassistant.components.onboarding.async_is_onboarded", return_value=False
|
||||
):
|
||||
result_create = await hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": config_entries.SOURCE_ZEROCONF},
|
||||
data=service_info,
|
||||
)
|
||||
await hass.async_block_till_done()
|
||||
|
||||
# not automatically confirmed
|
||||
assert result_create["type"] is FlowResultType.FORM
|
||||
assert result_create["step_id"] == "confirm"
|
||||
|
||||
|
||||
@patch(
|
||||
"homeassistant.components.zha.radio_manager.ZhaRadioManager.detect_radio_type",
|
||||
mock_detect_radio_type(radio_type=RadioType.deconz),
|
||||
|
@@ -22,7 +22,7 @@ from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import TemplateError
|
||||
from homeassistant.exceptions import HomeAssistantError, TemplateError
|
||||
from homeassistant.helpers.device_registry import EVENT_DEVICE_REGISTRY_UPDATED
|
||||
from homeassistant.helpers.entity_registry import EVENT_ENTITY_REGISTRY_UPDATED
|
||||
from homeassistant.helpers.event import (
|
||||
@@ -4975,43 +4975,25 @@ async def test_async_track_state_report_change_event(hass: HomeAssistant) -> Non
|
||||
}
|
||||
|
||||
|
||||
async def test_async_track_template_no_hass_deprecated(
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test async_track_template with a template without hass is deprecated."""
|
||||
message = (
|
||||
"Detected code that calls async_track_template_result with template without "
|
||||
"hass. This will stop working in Home Assistant 2025.10, please "
|
||||
"report this issue"
|
||||
)
|
||||
async def test_async_track_template_no_hass_fails(hass: HomeAssistant) -> None:
|
||||
"""Test async_track_template with a template without hass now fails."""
|
||||
message = "Calls async_track_template_result with template without hass"
|
||||
|
||||
async_track_template(hass, Template("blah"), lambda x, y, z: None)
|
||||
assert message in caplog.text
|
||||
caplog.clear()
|
||||
with pytest.raises(HomeAssistantError, match=message):
|
||||
async_track_template(hass, Template("blah"), lambda x, y, z: None)
|
||||
|
||||
async_track_template(hass, Template("blah", hass), lambda x, y, z: None)
|
||||
assert message not in caplog.text
|
||||
caplog.clear()
|
||||
|
||||
|
||||
async def test_async_track_template_result_no_hass_deprecated(
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test async_track_template_result with a template without hass is deprecated."""
|
||||
message = (
|
||||
"Detected code that calls async_track_template_result with template without "
|
||||
"hass. This will stop working in Home Assistant 2025.10, please "
|
||||
"report this issue"
|
||||
)
|
||||
async def test_async_track_template_result_no_hass_fails(hass: HomeAssistant) -> None:
|
||||
"""Test async_track_template_result with a template without hass now fails."""
|
||||
message = "Calls async_track_template_result with template without hass"
|
||||
|
||||
async_track_template_result(
|
||||
hass, [TrackTemplate(Template("blah"), None)], lambda x, y, z: None
|
||||
)
|
||||
assert message in caplog.text
|
||||
caplog.clear()
|
||||
with pytest.raises(HomeAssistantError, match=message):
|
||||
async_track_template_result(
|
||||
hass, [TrackTemplate(Template("blah"), None)], lambda x, y, z: None
|
||||
)
|
||||
|
||||
async_track_template_result(
|
||||
hass, [TrackTemplate(Template("blah", hass), None)], lambda x, y, z: None
|
||||
)
|
||||
assert message not in caplog.text
|
||||
caplog.clear()
|
||||
|
@@ -1,19 +1,12 @@
|
||||
"""Test const module."""
|
||||
|
||||
from enum import Enum
|
||||
import logging
|
||||
import sys
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant import const
|
||||
|
||||
from .common import (
|
||||
extract_stack_to_frame,
|
||||
help_test_all,
|
||||
import_and_test_deprecated_constant,
|
||||
)
|
||||
from .common import help_test_all, import_and_test_deprecated_constant
|
||||
|
||||
|
||||
def _create_tuples(
|
||||
@@ -48,78 +41,3 @@ def test_deprecated_constant_name_changes(
|
||||
replacement,
|
||||
breaks_in_version,
|
||||
)
|
||||
|
||||
|
||||
def test_deprecated_unit_of_conductivity_alias() -> None:
|
||||
"""Test UnitOfConductivity deprecation."""
|
||||
|
||||
# Test the deprecated members are aliases
|
||||
assert set(const.UnitOfConductivity) == {"S/cm", "μS/cm", "mS/cm"}
|
||||
|
||||
|
||||
def test_deprecated_unit_of_conductivity_members(
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
) -> None:
|
||||
"""Test UnitOfConductivity deprecation."""
|
||||
|
||||
module_name = "config.custom_components.hue.light"
|
||||
filename = f"/home/paulus/{module_name.replace('.', '/')}.py"
|
||||
|
||||
with (
|
||||
patch.dict(sys.modules, {module_name: Mock(__file__=filename)}),
|
||||
patch(
|
||||
"homeassistant.helpers.frame.linecache.getline",
|
||||
return_value="await session.close()",
|
||||
),
|
||||
patch(
|
||||
"homeassistant.helpers.frame.get_current_frame",
|
||||
return_value=extract_stack_to_frame(
|
||||
[
|
||||
Mock(
|
||||
filename="/home/paulus/homeassistant/core.py",
|
||||
lineno="23",
|
||||
line="do_something()",
|
||||
),
|
||||
Mock(
|
||||
filename=filename,
|
||||
lineno="23",
|
||||
line="await session.close()",
|
||||
),
|
||||
Mock(
|
||||
filename="/home/paulus/aiohue/lights.py",
|
||||
lineno="2",
|
||||
line="something()",
|
||||
),
|
||||
]
|
||||
),
|
||||
),
|
||||
):
|
||||
const.UnitOfConductivity.SIEMENS # noqa: B018
|
||||
const.UnitOfConductivity.MICROSIEMENS # noqa: B018
|
||||
const.UnitOfConductivity.MILLISIEMENS # noqa: B018
|
||||
|
||||
assert len(caplog.record_tuples) == 3
|
||||
|
||||
def deprecation_message(member: str, replacement: str) -> str:
|
||||
return (
|
||||
f"The deprecated enum member UnitOfConductivity.{member} was used from hue. "
|
||||
"It will be removed in HA Core 2025.11.0. Use UnitOfConductivity."
|
||||
f"{replacement} instead, please report it to the author of the 'hue' custom"
|
||||
" integration"
|
||||
)
|
||||
|
||||
assert (
|
||||
const.__name__,
|
||||
logging.WARNING,
|
||||
deprecation_message("SIEMENS", "SIEMENS_PER_CM"),
|
||||
) in caplog.record_tuples
|
||||
assert (
|
||||
const.__name__,
|
||||
logging.WARNING,
|
||||
deprecation_message("MICROSIEMENS", "MICROSIEMENS_PER_CM"),
|
||||
) in caplog.record_tuples
|
||||
assert (
|
||||
const.__name__,
|
||||
logging.WARNING,
|
||||
deprecation_message("MILLISIEMENS", "MILLISIEMENS_PER_CM"),
|
||||
) in caplog.record_tuples
|
||||
|
@@ -281,48 +281,6 @@ _CONVERTED_VALUE: dict[
|
||||
),
|
||||
],
|
||||
ConductivityConverter: [
|
||||
# Deprecated to deprecated
|
||||
(5, UnitOfConductivity.SIEMENS, 5e3, UnitOfConductivity.MILLISIEMENS),
|
||||
(5, UnitOfConductivity.SIEMENS, 5e6, UnitOfConductivity.MICROSIEMENS),
|
||||
(5, UnitOfConductivity.MILLISIEMENS, 5e3, UnitOfConductivity.MICROSIEMENS),
|
||||
(5, UnitOfConductivity.MILLISIEMENS, 5e-3, UnitOfConductivity.SIEMENS),
|
||||
(5e6, UnitOfConductivity.MICROSIEMENS, 5e3, UnitOfConductivity.MILLISIEMENS),
|
||||
(5e6, UnitOfConductivity.MICROSIEMENS, 5, UnitOfConductivity.SIEMENS),
|
||||
# Deprecated to new
|
||||
(5, UnitOfConductivity.SIEMENS, 5e3, UnitOfConductivity.MILLISIEMENS_PER_CM),
|
||||
(5, UnitOfConductivity.SIEMENS, 5e6, UnitOfConductivity.MICROSIEMENS_PER_CM),
|
||||
(
|
||||
5,
|
||||
UnitOfConductivity.MILLISIEMENS,
|
||||
5e3,
|
||||
UnitOfConductivity.MICROSIEMENS_PER_CM,
|
||||
),
|
||||
(5, UnitOfConductivity.MILLISIEMENS, 5e-3, UnitOfConductivity.SIEMENS_PER_CM),
|
||||
(
|
||||
5e6,
|
||||
UnitOfConductivity.MICROSIEMENS,
|
||||
5e3,
|
||||
UnitOfConductivity.MILLISIEMENS_PER_CM,
|
||||
),
|
||||
(5e6, UnitOfConductivity.MICROSIEMENS, 5, UnitOfConductivity.SIEMENS_PER_CM),
|
||||
# New to deprecated
|
||||
(5, UnitOfConductivity.SIEMENS_PER_CM, 5e3, UnitOfConductivity.MILLISIEMENS),
|
||||
(5, UnitOfConductivity.SIEMENS_PER_CM, 5e6, UnitOfConductivity.MICROSIEMENS),
|
||||
(
|
||||
5,
|
||||
UnitOfConductivity.MILLISIEMENS_PER_CM,
|
||||
5e3,
|
||||
UnitOfConductivity.MICROSIEMENS,
|
||||
),
|
||||
(5, UnitOfConductivity.MILLISIEMENS_PER_CM, 5e-3, UnitOfConductivity.SIEMENS),
|
||||
(
|
||||
5e6,
|
||||
UnitOfConductivity.MICROSIEMENS_PER_CM,
|
||||
5e3,
|
||||
UnitOfConductivity.MILLISIEMENS,
|
||||
),
|
||||
(5e6, UnitOfConductivity.MICROSIEMENS_PER_CM, 5, UnitOfConductivity.SIEMENS),
|
||||
# New to new
|
||||
(
|
||||
5,
|
||||
UnitOfConductivity.SIEMENS_PER_CM,
|
||||
|
Reference in New Issue
Block a user