mirror of
https://github.com/home-assistant/core.git
synced 2025-04-23 16:57:53 +00:00
2024.12.4 (#133422)
This commit is contained in:
commit
a5eb816dcf
@ -50,6 +50,12 @@ def _check_sleep_call_allowed(mapped_args: dict[str, Any]) -> bool:
|
||||
return False
|
||||
|
||||
|
||||
def _check_load_verify_locations_call_allowed(mapped_args: dict[str, Any]) -> bool:
|
||||
# If only cadata is passed, we can ignore it
|
||||
kwargs = mapped_args.get("kwargs")
|
||||
return bool(kwargs and len(kwargs) == 1 and "cadata" in kwargs)
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class BlockingCall:
|
||||
"""Class to hold information about a blocking call."""
|
||||
@ -158,7 +164,7 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = (
|
||||
original_func=SSLContext.load_verify_locations,
|
||||
object=SSLContext,
|
||||
function="load_verify_locations",
|
||||
check_allowed=None,
|
||||
check_allowed=_check_load_verify_locations_call_allowed,
|
||||
strict=False,
|
||||
strict_core=False,
|
||||
skip_for_tests=True,
|
||||
|
@ -28,5 +28,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.2"]
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.5"]
|
||||
}
|
||||
|
@ -274,7 +274,9 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
if isinstance(fibaro_operation_mode, str):
|
||||
with suppress(ValueError):
|
||||
return HVACMode(fibaro_operation_mode.lower())
|
||||
elif fibaro_operation_mode in OPMODES_HVAC:
|
||||
# when the mode cannot be instantiated a preset_mode is selected
|
||||
return HVACMode.AUTO
|
||||
if fibaro_operation_mode in OPMODES_HVAC:
|
||||
return OPMODES_HVAC[fibaro_operation_mode]
|
||||
return None
|
||||
|
||||
@ -282,8 +284,6 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
"""Set new target operation mode."""
|
||||
if not self._op_mode_device:
|
||||
return
|
||||
if self.preset_mode:
|
||||
return
|
||||
|
||||
if "setOperatingMode" in self._op_mode_device.fibaro_device.actions:
|
||||
self._op_mode_device.action("setOperatingMode", HA_OPMODES_HVAC[hvac_mode])
|
||||
|
@ -29,6 +29,8 @@ class FlexitNumberEntityDescription(NumberEntityDescription):
|
||||
"""Describes a Flexit number entity."""
|
||||
|
||||
native_value_fn: Callable[[FlexitBACnet], float]
|
||||
native_max_value_fn: Callable[[FlexitBACnet], int]
|
||||
native_min_value_fn: Callable[[FlexitBACnet], int]
|
||||
set_native_value_fn: Callable[[FlexitBACnet], Callable[[int], Awaitable[None]]]
|
||||
|
||||
|
||||
@ -37,121 +39,121 @@ NUMBERS: tuple[FlexitNumberEntityDescription, ...] = (
|
||||
key="away_extract_fan_setpoint",
|
||||
translation_key="away_extract_fan_setpoint",
|
||||
device_class=NumberDeviceClass.POWER_FACTOR,
|
||||
native_min_value=0,
|
||||
native_max_value=100,
|
||||
native_step=1,
|
||||
mode=NumberMode.SLIDER,
|
||||
native_value_fn=lambda device: device.fan_setpoint_extract_air_away,
|
||||
set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_away,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
native_max_value_fn=lambda device: int(device.fan_setpoint_extract_air_home),
|
||||
native_min_value_fn=lambda _: 30,
|
||||
),
|
||||
FlexitNumberEntityDescription(
|
||||
key="away_supply_fan_setpoint",
|
||||
translation_key="away_supply_fan_setpoint",
|
||||
device_class=NumberDeviceClass.POWER_FACTOR,
|
||||
native_min_value=0,
|
||||
native_max_value=100,
|
||||
native_step=1,
|
||||
mode=NumberMode.SLIDER,
|
||||
native_value_fn=lambda device: device.fan_setpoint_supply_air_away,
|
||||
set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_away,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
native_max_value_fn=lambda device: int(device.fan_setpoint_supply_air_home),
|
||||
native_min_value_fn=lambda _: 30,
|
||||
),
|
||||
FlexitNumberEntityDescription(
|
||||
key="cooker_hood_extract_fan_setpoint",
|
||||
translation_key="cooker_hood_extract_fan_setpoint",
|
||||
device_class=NumberDeviceClass.POWER_FACTOR,
|
||||
native_min_value=0,
|
||||
native_max_value=100,
|
||||
native_step=1,
|
||||
mode=NumberMode.SLIDER,
|
||||
native_value_fn=lambda device: device.fan_setpoint_extract_air_cooker,
|
||||
set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_cooker,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
native_max_value_fn=lambda _: 100,
|
||||
native_min_value_fn=lambda _: 30,
|
||||
),
|
||||
FlexitNumberEntityDescription(
|
||||
key="cooker_hood_supply_fan_setpoint",
|
||||
translation_key="cooker_hood_supply_fan_setpoint",
|
||||
device_class=NumberDeviceClass.POWER_FACTOR,
|
||||
native_min_value=0,
|
||||
native_max_value=100,
|
||||
native_step=1,
|
||||
mode=NumberMode.SLIDER,
|
||||
native_value_fn=lambda device: device.fan_setpoint_supply_air_cooker,
|
||||
set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_cooker,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
native_max_value_fn=lambda _: 100,
|
||||
native_min_value_fn=lambda _: 30,
|
||||
),
|
||||
FlexitNumberEntityDescription(
|
||||
key="fireplace_extract_fan_setpoint",
|
||||
translation_key="fireplace_extract_fan_setpoint",
|
||||
device_class=NumberDeviceClass.POWER_FACTOR,
|
||||
native_min_value=0,
|
||||
native_max_value=100,
|
||||
native_step=1,
|
||||
mode=NumberMode.SLIDER,
|
||||
native_value_fn=lambda device: device.fan_setpoint_extract_air_fire,
|
||||
set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_fire,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
native_max_value_fn=lambda _: 100,
|
||||
native_min_value_fn=lambda _: 30,
|
||||
),
|
||||
FlexitNumberEntityDescription(
|
||||
key="fireplace_supply_fan_setpoint",
|
||||
translation_key="fireplace_supply_fan_setpoint",
|
||||
device_class=NumberDeviceClass.POWER_FACTOR,
|
||||
native_min_value=0,
|
||||
native_max_value=100,
|
||||
native_step=1,
|
||||
mode=NumberMode.SLIDER,
|
||||
native_value_fn=lambda device: device.fan_setpoint_supply_air_fire,
|
||||
set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_fire,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
native_max_value_fn=lambda _: 100,
|
||||
native_min_value_fn=lambda _: 30,
|
||||
),
|
||||
FlexitNumberEntityDescription(
|
||||
key="high_extract_fan_setpoint",
|
||||
translation_key="high_extract_fan_setpoint",
|
||||
device_class=NumberDeviceClass.POWER_FACTOR,
|
||||
native_min_value=0,
|
||||
native_max_value=100,
|
||||
native_step=1,
|
||||
mode=NumberMode.SLIDER,
|
||||
native_value_fn=lambda device: device.fan_setpoint_extract_air_high,
|
||||
set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_high,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
native_max_value_fn=lambda _: 100,
|
||||
native_min_value_fn=lambda device: int(device.fan_setpoint_extract_air_home),
|
||||
),
|
||||
FlexitNumberEntityDescription(
|
||||
key="high_supply_fan_setpoint",
|
||||
translation_key="high_supply_fan_setpoint",
|
||||
device_class=NumberDeviceClass.POWER_FACTOR,
|
||||
native_min_value=0,
|
||||
native_max_value=100,
|
||||
native_step=1,
|
||||
mode=NumberMode.SLIDER,
|
||||
native_value_fn=lambda device: device.fan_setpoint_supply_air_high,
|
||||
set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_high,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
native_max_value_fn=lambda _: 100,
|
||||
native_min_value_fn=lambda device: int(device.fan_setpoint_supply_air_home),
|
||||
),
|
||||
FlexitNumberEntityDescription(
|
||||
key="home_extract_fan_setpoint",
|
||||
translation_key="home_extract_fan_setpoint",
|
||||
device_class=NumberDeviceClass.POWER_FACTOR,
|
||||
native_min_value=0,
|
||||
native_max_value=100,
|
||||
native_step=1,
|
||||
mode=NumberMode.SLIDER,
|
||||
native_value_fn=lambda device: device.fan_setpoint_extract_air_home,
|
||||
set_native_value_fn=lambda device: device.set_fan_setpoint_extract_air_home,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
native_max_value_fn=lambda _: 100,
|
||||
native_min_value_fn=lambda device: int(device.fan_setpoint_extract_air_away),
|
||||
),
|
||||
FlexitNumberEntityDescription(
|
||||
key="home_supply_fan_setpoint",
|
||||
translation_key="home_supply_fan_setpoint",
|
||||
device_class=NumberDeviceClass.POWER_FACTOR,
|
||||
native_min_value=0,
|
||||
native_max_value=100,
|
||||
native_step=1,
|
||||
mode=NumberMode.SLIDER,
|
||||
native_value_fn=lambda device: device.fan_setpoint_supply_air_home,
|
||||
set_native_value_fn=lambda device: device.set_fan_setpoint_supply_air_home,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
native_max_value_fn=lambda _: 100,
|
||||
native_min_value_fn=lambda device: int(device.fan_setpoint_supply_air_away),
|
||||
),
|
||||
)
|
||||
|
||||
@ -192,6 +194,16 @@ class FlexitNumber(FlexitEntity, NumberEntity):
|
||||
"""Return the state of the number."""
|
||||
return self.entity_description.native_value_fn(self.coordinator.device)
|
||||
|
||||
@property
|
||||
def native_max_value(self) -> float:
|
||||
"""Return the native max value of the number."""
|
||||
return self.entity_description.native_max_value_fn(self.coordinator.device)
|
||||
|
||||
@property
|
||||
def native_min_value(self) -> float:
|
||||
"""Return the native min value of the number."""
|
||||
return self.entity_description.native_min_value_fn(self.coordinator.device)
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Update the current value."""
|
||||
set_native_value_fn = self.entity_description.set_native_value_fn(
|
||||
|
@ -22,7 +22,7 @@ import homeassistant.util.dt as dt_util
|
||||
|
||||
from . import websocket_api
|
||||
from .const import DOMAIN
|
||||
from .helpers import entities_may_have_state_changes_after, has_states_before
|
||||
from .helpers import entities_may_have_state_changes_after, has_recorder_run_after
|
||||
|
||||
CONF_ORDER = "use_include_order"
|
||||
|
||||
@ -107,10 +107,7 @@ class HistoryPeriodView(HomeAssistantView):
|
||||
no_attributes = "no_attributes" in request.query
|
||||
|
||||
if (
|
||||
# has_states_before will return True if there are states older than
|
||||
# end_time. If it's false, we know there are no states in the
|
||||
# database up until end_time.
|
||||
(end_time and not has_states_before(hass, end_time))
|
||||
(end_time and not has_recorder_run_after(hass, end_time))
|
||||
or not include_start_time_state
|
||||
and entity_ids
|
||||
and not entities_may_have_state_changes_after(
|
||||
|
@ -6,6 +6,7 @@ from collections.abc import Iterable
|
||||
from datetime import datetime as dt
|
||||
|
||||
from homeassistant.components.recorder import get_instance
|
||||
from homeassistant.components.recorder.models import process_timestamp
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
|
||||
@ -25,10 +26,8 @@ def entities_may_have_state_changes_after(
|
||||
return False
|
||||
|
||||
|
||||
def has_states_before(hass: HomeAssistant, run_time: dt) -> bool:
|
||||
"""Check if the recorder has states as old or older than run_time.
|
||||
|
||||
Returns True if there may be such states.
|
||||
"""
|
||||
oldest_ts = get_instance(hass).states_manager.oldest_ts
|
||||
return oldest_ts is not None and run_time.timestamp() >= oldest_ts
|
||||
def has_recorder_run_after(hass: HomeAssistant, run_time: dt) -> bool:
|
||||
"""Check if the recorder has any runs after a specific time."""
|
||||
return run_time >= process_timestamp(
|
||||
get_instance(hass).recorder_runs_manager.first.start
|
||||
)
|
||||
|
@ -39,7 +39,7 @@ from homeassistant.util.async_ import create_eager_task
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import EVENT_COALESCE_TIME, MAX_PENDING_HISTORY_STATES
|
||||
from .helpers import entities_may_have_state_changes_after, has_states_before
|
||||
from .helpers import entities_may_have_state_changes_after, has_recorder_run_after
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@ -142,10 +142,7 @@ async def ws_get_history_during_period(
|
||||
no_attributes = msg["no_attributes"]
|
||||
|
||||
if (
|
||||
# has_states_before will return True if there are states older than
|
||||
# end_time. If it's false, we know there are no states in the
|
||||
# database up until end_time.
|
||||
(end_time and not has_states_before(hass, end_time))
|
||||
(end_time and not has_recorder_run_after(hass, end_time))
|
||||
or not include_start_time_state
|
||||
and entity_ids
|
||||
and not entities_may_have_state_changes_after(
|
||||
|
@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/holiday",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["holidays==0.62", "babel==2.15.0"]
|
||||
"requirements": ["holidays==0.63", "babel==2.15.0"]
|
||||
}
|
||||
|
@ -69,6 +69,7 @@ class HomeKitAlarmControlPanelEntity(HomeKitEntity, AlarmControlPanelEntity):
|
||||
| AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
| AlarmControlPanelEntityFeature.ARM_NIGHT
|
||||
)
|
||||
_attr_code_arm_required = False
|
||||
|
||||
def get_characteristic_types(self) -> list[str]:
|
||||
"""Define the homekit characteristics the entity cares about."""
|
||||
|
@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/imgw_pib",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["imgw_pib==1.0.6"]
|
||||
"requirements": ["imgw_pib==1.0.7"]
|
||||
}
|
||||
|
@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/incomfort",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["incomfortclient"],
|
||||
"requirements": ["incomfort-client==0.6.3-1"]
|
||||
"requirements": ["incomfort-client==0.6.4"]
|
||||
}
|
||||
|
@ -23,6 +23,7 @@
|
||||
"LIFX Ceiling",
|
||||
"LIFX Clean",
|
||||
"LIFX Color",
|
||||
"LIFX Colour",
|
||||
"LIFX DLCOL",
|
||||
"LIFX Dlight",
|
||||
"LIFX DLWW",
|
||||
@ -35,12 +36,14 @@
|
||||
"LIFX Neon",
|
||||
"LIFX Nightvision",
|
||||
"LIFX PAR38",
|
||||
"LIFX Permanent Outdoor",
|
||||
"LIFX Pls",
|
||||
"LIFX Plus",
|
||||
"LIFX Round",
|
||||
"LIFX Square",
|
||||
"LIFX String",
|
||||
"LIFX Tile",
|
||||
"LIFX Tube",
|
||||
"LIFX White",
|
||||
"LIFX Z"
|
||||
]
|
||||
@ -48,7 +51,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiolifx", "aiolifx_effects", "bitstring"],
|
||||
"requirements": [
|
||||
"aiolifx==1.1.1",
|
||||
"aiolifx==1.1.2",
|
||||
"aiolifx-effects==0.3.2",
|
||||
"aiolifx-themes==0.5.5"
|
||||
]
|
||||
|
@ -8,6 +8,6 @@
|
||||
"iot_class": "calculated",
|
||||
"loggers": ["yt_dlp"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["yt-dlp[default]==2024.12.06"],
|
||||
"requirements": ["yt-dlp[default]==2024.12.13"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
"""Component to allow running Python scripts."""
|
||||
|
||||
from collections.abc import Mapping, Sequence
|
||||
import datetime
|
||||
import glob
|
||||
import logging
|
||||
@ -7,6 +8,7 @@ from numbers import Number
|
||||
import operator
|
||||
import os
|
||||
import time
|
||||
import types
|
||||
from typing import Any
|
||||
|
||||
from RestrictedPython import (
|
||||
@ -167,6 +169,20 @@ IOPERATOR_TO_OPERATOR = {
|
||||
}
|
||||
|
||||
|
||||
def guarded_import(
|
||||
name: str,
|
||||
globals: Mapping[str, object] | None = None,
|
||||
locals: Mapping[str, object] | None = None,
|
||||
fromlist: Sequence[str] = (),
|
||||
level: int = 0,
|
||||
) -> types.ModuleType:
|
||||
"""Guard imports."""
|
||||
# Allow import of _strptime needed by datetime.datetime.strptime
|
||||
if name == "_strptime":
|
||||
return __import__(name, globals, locals, fromlist, level)
|
||||
raise ScriptError(f"Not allowed to import {name}")
|
||||
|
||||
|
||||
def guarded_inplacevar(op: str, target: Any, operand: Any) -> Any:
|
||||
"""Implement augmented-assign (+=, -=, etc.) operators for restricted code.
|
||||
|
||||
@ -232,6 +248,7 @@ def execute(hass, filename, source, data=None, return_response=False):
|
||||
return getattr(obj, name, default)
|
||||
|
||||
extra_builtins = {
|
||||
"__import__": guarded_import,
|
||||
"datetime": datetime,
|
||||
"sorted": sorted,
|
||||
"time": TimeWrapper(),
|
||||
|
@ -1431,7 +1431,6 @@ class Recorder(threading.Thread):
|
||||
with session_scope(session=self.get_session()) as session:
|
||||
end_incomplete_runs(session, self.recorder_runs_manager.recording_start)
|
||||
self.recorder_runs_manager.start(session)
|
||||
self.states_manager.load_from_db(session)
|
||||
|
||||
self._open_event_session()
|
||||
|
||||
|
@ -22,9 +22,9 @@ from homeassistant.core import HomeAssistant, State, split_entity_id
|
||||
from homeassistant.helpers.recorder import get_instance
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from ..db_schema import StateAttributes, States
|
||||
from ..db_schema import RecorderRuns, StateAttributes, States
|
||||
from ..filters import Filters
|
||||
from ..models import process_timestamp_to_utc_isoformat
|
||||
from ..models import process_timestamp, process_timestamp_to_utc_isoformat
|
||||
from ..models.legacy import LegacyLazyState, legacy_row_to_compressed_state
|
||||
from ..util import execute_stmt_lambda_element, session_scope
|
||||
from .const import (
|
||||
@ -436,7 +436,7 @@ def get_last_state_changes(
|
||||
|
||||
|
||||
def _get_states_for_entities_stmt(
|
||||
run_start_ts: float,
|
||||
run_start: datetime,
|
||||
utc_point_in_time: datetime,
|
||||
entity_ids: list[str],
|
||||
no_attributes: bool,
|
||||
@ -447,6 +447,7 @@ def _get_states_for_entities_stmt(
|
||||
)
|
||||
# We got an include-list of entities, accelerate the query by filtering already
|
||||
# in the inner query.
|
||||
run_start_ts = process_timestamp(run_start).timestamp()
|
||||
utc_point_in_time_ts = dt_util.utc_to_timestamp(utc_point_in_time)
|
||||
stmt += lambda q: q.join(
|
||||
(
|
||||
@ -482,7 +483,7 @@ def _get_rows_with_session(
|
||||
session: Session,
|
||||
utc_point_in_time: datetime,
|
||||
entity_ids: list[str],
|
||||
*,
|
||||
run: RecorderRuns | None = None,
|
||||
no_attributes: bool = False,
|
||||
) -> Iterable[Row]:
|
||||
"""Return the states at a specific point in time."""
|
||||
@ -494,16 +495,17 @@ def _get_rows_with_session(
|
||||
),
|
||||
)
|
||||
|
||||
oldest_ts = get_instance(hass).states_manager.oldest_ts
|
||||
if run is None:
|
||||
run = get_instance(hass).recorder_runs_manager.get(utc_point_in_time)
|
||||
|
||||
if oldest_ts is None or oldest_ts > utc_point_in_time.timestamp():
|
||||
# We don't have any states for the requested time
|
||||
if run is None or process_timestamp(run.start) > utc_point_in_time:
|
||||
# History did not run before utc_point_in_time
|
||||
return []
|
||||
|
||||
# We have more than one entity to look at so we need to do a query on states
|
||||
# since the last recorder run started.
|
||||
stmt = _get_states_for_entities_stmt(
|
||||
oldest_ts, utc_point_in_time, entity_ids, no_attributes
|
||||
run.start, utc_point_in_time, entity_ids, no_attributes
|
||||
)
|
||||
return execute_stmt_lambda_element(session, stmt)
|
||||
|
||||
|
@ -34,6 +34,7 @@ from ..models import (
|
||||
LazyState,
|
||||
datetime_to_timestamp_or_none,
|
||||
extract_metadata_ids,
|
||||
process_timestamp,
|
||||
row_to_compressed_state,
|
||||
)
|
||||
from ..util import execute_stmt_lambda_element, session_scope
|
||||
@ -245,9 +246,9 @@ def get_significant_states_with_session(
|
||||
if metadata_id is not None
|
||||
and split_entity_id(entity_id)[0] in SIGNIFICANT_DOMAINS
|
||||
]
|
||||
oldest_ts: float | None = None
|
||||
run_start_ts: float | None = None
|
||||
if include_start_time_state and not (
|
||||
oldest_ts := _get_oldest_possible_ts(hass, start_time)
|
||||
run_start_ts := _get_run_start_ts_for_utc_point_in_time(hass, start_time)
|
||||
):
|
||||
include_start_time_state = False
|
||||
start_time_ts = dt_util.utc_to_timestamp(start_time)
|
||||
@ -263,7 +264,7 @@ def get_significant_states_with_session(
|
||||
significant_changes_only,
|
||||
no_attributes,
|
||||
include_start_time_state,
|
||||
oldest_ts,
|
||||
run_start_ts,
|
||||
),
|
||||
track_on=[
|
||||
bool(single_metadata_id),
|
||||
@ -410,9 +411,9 @@ def state_changes_during_period(
|
||||
entity_id_to_metadata_id: dict[str, int | None] = {
|
||||
entity_id: single_metadata_id
|
||||
}
|
||||
oldest_ts: float | None = None
|
||||
run_start_ts: float | None = None
|
||||
if include_start_time_state and not (
|
||||
oldest_ts := _get_oldest_possible_ts(hass, start_time)
|
||||
run_start_ts := _get_run_start_ts_for_utc_point_in_time(hass, start_time)
|
||||
):
|
||||
include_start_time_state = False
|
||||
start_time_ts = dt_util.utc_to_timestamp(start_time)
|
||||
@ -425,7 +426,7 @@ def state_changes_during_period(
|
||||
no_attributes,
|
||||
limit,
|
||||
include_start_time_state,
|
||||
oldest_ts,
|
||||
run_start_ts,
|
||||
has_last_reported,
|
||||
),
|
||||
track_on=[
|
||||
@ -599,17 +600,17 @@ def _get_start_time_state_for_entities_stmt(
|
||||
)
|
||||
|
||||
|
||||
def _get_oldest_possible_ts(
|
||||
def _get_run_start_ts_for_utc_point_in_time(
|
||||
hass: HomeAssistant, utc_point_in_time: datetime
|
||||
) -> float | None:
|
||||
"""Return the oldest possible timestamp.
|
||||
|
||||
Returns None if there are no states as old as utc_point_in_time.
|
||||
"""
|
||||
|
||||
oldest_ts = get_instance(hass).states_manager.oldest_ts
|
||||
if oldest_ts is not None and oldest_ts < utc_point_in_time.timestamp():
|
||||
return oldest_ts
|
||||
"""Return the start time of a run."""
|
||||
run = get_instance(hass).recorder_runs_manager.get(utc_point_in_time)
|
||||
if (
|
||||
run is not None
|
||||
and (run_start := process_timestamp(run.start)) < utc_point_in_time
|
||||
):
|
||||
return run_start.timestamp()
|
||||
# History did not run before utc_point_in_time but we still
|
||||
return None
|
||||
|
||||
|
||||
|
@ -123,9 +123,6 @@ def purge_old_data(
|
||||
_purge_old_entity_ids(instance, session)
|
||||
|
||||
_purge_old_recorder_runs(instance, session, purge_before)
|
||||
with session_scope(session=instance.get_session(), read_only=True) as session:
|
||||
instance.recorder_runs_manager.load_from_db(session)
|
||||
instance.states_manager.load_from_db(session)
|
||||
if repack:
|
||||
repack_database(instance)
|
||||
return True
|
||||
|
@ -637,15 +637,6 @@ def find_states_to_purge(
|
||||
)
|
||||
|
||||
|
||||
def find_oldest_state() -> StatementLambdaElement:
|
||||
"""Find the last_updated_ts of the oldest state."""
|
||||
return lambda_stmt(
|
||||
lambda: select(States.last_updated_ts).where(
|
||||
States.state_id.in_(select(func.min(States.state_id)))
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def find_short_term_statistics_to_purge(
|
||||
purge_before: datetime, max_bind_vars: int
|
||||
) -> StatementLambdaElement:
|
||||
|
@ -2,15 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
from typing import Any, cast
|
||||
|
||||
from sqlalchemy.engine.row import Row
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
from ..db_schema import States
|
||||
from ..queries import find_oldest_state
|
||||
from ..util import execute_stmt_lambda_element
|
||||
|
||||
|
||||
class StatesManager:
|
||||
@ -21,12 +13,6 @@ class StatesManager:
|
||||
self._pending: dict[str, States] = {}
|
||||
self._last_committed_id: dict[str, int] = {}
|
||||
self._last_reported: dict[int, float] = {}
|
||||
self._oldest_ts: float | None = None
|
||||
|
||||
@property
|
||||
def oldest_ts(self) -> float | None:
|
||||
"""Return the oldest timestamp."""
|
||||
return self._oldest_ts
|
||||
|
||||
def pop_pending(self, entity_id: str) -> States | None:
|
||||
"""Pop a pending state.
|
||||
@ -58,8 +44,6 @@ class StatesManager:
|
||||
recorder thread.
|
||||
"""
|
||||
self._pending[entity_id] = state
|
||||
if self._oldest_ts is None:
|
||||
self._oldest_ts = state.last_updated_ts
|
||||
|
||||
def update_pending_last_reported(
|
||||
self, state_id: int, last_reported_timestamp: float
|
||||
@ -90,22 +74,6 @@ class StatesManager:
|
||||
"""
|
||||
self._last_committed_id.clear()
|
||||
self._pending.clear()
|
||||
self._oldest_ts = None
|
||||
|
||||
def load_from_db(self, session: Session) -> None:
|
||||
"""Update the cache.
|
||||
|
||||
Must run in the recorder thread.
|
||||
"""
|
||||
result = cast(
|
||||
Sequence[Row[Any]],
|
||||
execute_stmt_lambda_element(session, find_oldest_state()),
|
||||
)
|
||||
if not result:
|
||||
ts = None
|
||||
else:
|
||||
ts = result[0].last_updated_ts
|
||||
self._oldest_ts = ts
|
||||
|
||||
def evict_purged_state_ids(self, purged_state_ids: set[int]) -> None:
|
||||
"""Evict purged states from the committed states.
|
||||
|
@ -120,6 +120,8 @@ class PurgeTask(RecorderTask):
|
||||
if purge.purge_old_data(
|
||||
instance, self.purge_before, self.repack, self.apply_filter
|
||||
):
|
||||
with instance.get_session() as session:
|
||||
instance.recorder_runs_manager.load_from_db(session)
|
||||
# We always need to do the db cleanups after a purge
|
||||
# is finished to ensure the WAL checkpoint and other
|
||||
# tasks happen after a vacuum.
|
||||
|
@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/starlink",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["starlink-grpc-core==1.2.0"]
|
||||
"requirements": ["starlink-grpc-core==1.2.2"]
|
||||
}
|
||||
|
@ -7,5 +7,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["holidays"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["holidays==0.62"]
|
||||
"requirements": ["holidays==0.63"]
|
||||
}
|
||||
|
@ -13,5 +13,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/yale",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["socketio", "engineio", "yalexs"],
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.2"]
|
||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==2.5.5"]
|
||||
}
|
||||
|
@ -12,5 +12,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/yalexs_ble",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["yalexs-ble==2.5.2"]
|
||||
"requirements": ["yalexs-ble==2.5.5"]
|
||||
}
|
||||
|
@ -25,7 +25,7 @@ if TYPE_CHECKING:
|
||||
APPLICATION_NAME: Final = "HomeAssistant"
|
||||
MAJOR_VERSION: Final = 2024
|
||||
MINOR_VERSION: Final = 12
|
||||
PATCH_VERSION: Final = "3"
|
||||
PATCH_VERSION: Final = "4"
|
||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0)
|
||||
|
@ -92,6 +92,10 @@ HOMEKIT = {
|
||||
"always_discover": True,
|
||||
"domain": "lifx",
|
||||
},
|
||||
"LIFX Colour": {
|
||||
"always_discover": True,
|
||||
"domain": "lifx",
|
||||
},
|
||||
"LIFX DLCOL": {
|
||||
"always_discover": True,
|
||||
"domain": "lifx",
|
||||
@ -140,6 +144,10 @@ HOMEKIT = {
|
||||
"always_discover": True,
|
||||
"domain": "lifx",
|
||||
},
|
||||
"LIFX Permanent Outdoor": {
|
||||
"always_discover": True,
|
||||
"domain": "lifx",
|
||||
},
|
||||
"LIFX Pls": {
|
||||
"always_discover": True,
|
||||
"domain": "lifx",
|
||||
@ -164,6 +172,10 @@ HOMEKIT = {
|
||||
"always_discover": True,
|
||||
"domain": "lifx",
|
||||
},
|
||||
"LIFX Tube": {
|
||||
"always_discover": True,
|
||||
"domain": "lifx",
|
||||
},
|
||||
"LIFX White": {
|
||||
"always_discover": True,
|
||||
"domain": "lifx",
|
||||
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "homeassistant"
|
||||
version = "2024.12.3"
|
||||
version = "2024.12.4"
|
||||
license = {text = "Apache-2.0"}
|
||||
description = "Open-source home automation platform running on Python 3."
|
||||
readme = "README.rst"
|
||||
|
@ -286,7 +286,7 @@ aiolifx-effects==0.3.2
|
||||
aiolifx-themes==0.5.5
|
||||
|
||||
# homeassistant.components.lifx
|
||||
aiolifx==1.1.1
|
||||
aiolifx==1.1.2
|
||||
|
||||
# homeassistant.components.livisi
|
||||
aiolivisi==0.0.19
|
||||
@ -1127,7 +1127,7 @@ hole==0.8.0
|
||||
|
||||
# homeassistant.components.holiday
|
||||
# homeassistant.components.workday
|
||||
holidays==0.62
|
||||
holidays==0.63
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20241127.8
|
||||
@ -1189,10 +1189,10 @@ iglo==1.2.7
|
||||
ihcsdk==2.8.5
|
||||
|
||||
# homeassistant.components.imgw_pib
|
||||
imgw_pib==1.0.6
|
||||
imgw_pib==1.0.7
|
||||
|
||||
# homeassistant.components.incomfort
|
||||
incomfort-client==0.6.3-1
|
||||
incomfort-client==0.6.4
|
||||
|
||||
# homeassistant.components.influxdb
|
||||
influxdb-client==1.24.0
|
||||
@ -2734,7 +2734,7 @@ starline==0.1.5
|
||||
starlingbank==3.2
|
||||
|
||||
# homeassistant.components.starlink
|
||||
starlink-grpc-core==1.2.0
|
||||
starlink-grpc-core==1.2.2
|
||||
|
||||
# homeassistant.components.statsd
|
||||
statsd==3.2.1
|
||||
@ -3044,7 +3044,7 @@ yalesmartalarmclient==0.4.3
|
||||
# homeassistant.components.august
|
||||
# homeassistant.components.yale
|
||||
# homeassistant.components.yalexs_ble
|
||||
yalexs-ble==2.5.2
|
||||
yalexs-ble==2.5.5
|
||||
|
||||
# homeassistant.components.august
|
||||
# homeassistant.components.yale
|
||||
@ -3066,7 +3066,7 @@ youless-api==2.1.2
|
||||
youtubeaio==1.1.5
|
||||
|
||||
# homeassistant.components.media_extractor
|
||||
yt-dlp[default]==2024.12.06
|
||||
yt-dlp[default]==2024.12.13
|
||||
|
||||
# homeassistant.components.zamg
|
||||
zamg==0.3.6
|
||||
|
@ -268,7 +268,7 @@ aiolifx-effects==0.3.2
|
||||
aiolifx-themes==0.5.5
|
||||
|
||||
# homeassistant.components.lifx
|
||||
aiolifx==1.1.1
|
||||
aiolifx==1.1.2
|
||||
|
||||
# homeassistant.components.livisi
|
||||
aiolivisi==0.0.19
|
||||
@ -953,7 +953,7 @@ hole==0.8.0
|
||||
|
||||
# homeassistant.components.holiday
|
||||
# homeassistant.components.workday
|
||||
holidays==0.62
|
||||
holidays==0.63
|
||||
|
||||
# homeassistant.components.frontend
|
||||
home-assistant-frontend==20241127.8
|
||||
@ -1000,10 +1000,10 @@ idasen-ha==2.6.2
|
||||
ifaddr==0.2.0
|
||||
|
||||
# homeassistant.components.imgw_pib
|
||||
imgw_pib==1.0.6
|
||||
imgw_pib==1.0.7
|
||||
|
||||
# homeassistant.components.incomfort
|
||||
incomfort-client==0.6.3-1
|
||||
incomfort-client==0.6.4
|
||||
|
||||
# homeassistant.components.influxdb
|
||||
influxdb-client==1.24.0
|
||||
@ -2183,7 +2183,7 @@ srpenergy==1.3.6
|
||||
starline==0.1.5
|
||||
|
||||
# homeassistant.components.starlink
|
||||
starlink-grpc-core==1.2.0
|
||||
starlink-grpc-core==1.2.2
|
||||
|
||||
# homeassistant.components.statsd
|
||||
statsd==3.2.1
|
||||
@ -2433,7 +2433,7 @@ yalesmartalarmclient==0.4.3
|
||||
# homeassistant.components.august
|
||||
# homeassistant.components.yale
|
||||
# homeassistant.components.yalexs_ble
|
||||
yalexs-ble==2.5.2
|
||||
yalexs-ble==2.5.5
|
||||
|
||||
# homeassistant.components.august
|
||||
# homeassistant.components.yale
|
||||
@ -2452,7 +2452,7 @@ youless-api==2.1.2
|
||||
youtubeaio==1.1.5
|
||||
|
||||
# homeassistant.components.media_extractor
|
||||
yt-dlp[default]==2024.12.06
|
||||
yt-dlp[default]==2024.12.13
|
||||
|
||||
# homeassistant.components.zamg
|
||||
zamg==0.3.6
|
||||
|
@ -129,6 +129,62 @@ def mock_light() -> Mock:
|
||||
return light
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_thermostat() -> Mock:
|
||||
"""Fixture for a thermostat."""
|
||||
climate = Mock()
|
||||
climate.fibaro_id = 4
|
||||
climate.parent_fibaro_id = 0
|
||||
climate.name = "Test climate"
|
||||
climate.room_id = 1
|
||||
climate.dead = False
|
||||
climate.visible = True
|
||||
climate.enabled = True
|
||||
climate.type = "com.fibaro.thermostatDanfoss"
|
||||
climate.base_type = "com.fibaro.device"
|
||||
climate.properties = {"manufacturer": ""}
|
||||
climate.actions = {"setThermostatMode": 1}
|
||||
climate.supported_features = {}
|
||||
climate.has_supported_thermostat_modes = True
|
||||
climate.supported_thermostat_modes = ["Off", "Heat", "CustomerSpecific"]
|
||||
climate.has_operating_mode = False
|
||||
climate.has_thermostat_mode = True
|
||||
climate.thermostat_mode = "CustomerSpecific"
|
||||
value_mock = Mock()
|
||||
value_mock.has_value = True
|
||||
value_mock.int_value.return_value = 20
|
||||
climate.value = value_mock
|
||||
return climate
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_thermostat_with_operating_mode() -> Mock:
|
||||
"""Fixture for a thermostat."""
|
||||
climate = Mock()
|
||||
climate.fibaro_id = 4
|
||||
climate.parent_fibaro_id = 0
|
||||
climate.name = "Test climate"
|
||||
climate.room_id = 1
|
||||
climate.dead = False
|
||||
climate.visible = True
|
||||
climate.enabled = True
|
||||
climate.type = "com.fibaro.thermostatDanfoss"
|
||||
climate.base_type = "com.fibaro.device"
|
||||
climate.properties = {"manufacturer": ""}
|
||||
climate.actions = {"setOperationMode": 1}
|
||||
climate.supported_features = {}
|
||||
climate.has_supported_operating_modes = True
|
||||
climate.supported_operating_modes = [0, 1, 15]
|
||||
climate.has_operating_mode = True
|
||||
climate.operating_mode = 15
|
||||
climate.has_thermostat_mode = False
|
||||
value_mock = Mock()
|
||||
value_mock.has_value = True
|
||||
value_mock.int_value.return_value = 20
|
||||
climate.value = value_mock
|
||||
return climate
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry:
|
||||
"""Return the default mocked config entry."""
|
||||
|
134
tests/components/fibaro/test_climate.py
Normal file
134
tests/components/fibaro/test_climate.py
Normal file
@ -0,0 +1,134 @@
|
||||
"""Test the Fibaro climate platform."""
|
||||
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
from homeassistant.components.climate import ClimateEntityFeature, HVACMode
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .conftest import init_integration
|
||||
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
async def test_climate_setup(
|
||||
hass: HomeAssistant,
|
||||
entity_registry: er.EntityRegistry,
|
||||
mock_fibaro_client: Mock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_thermostat: Mock,
|
||||
mock_room: Mock,
|
||||
) -> None:
|
||||
"""Test that the climate creates an entity."""
|
||||
|
||||
# Arrange
|
||||
mock_fibaro_client.read_rooms.return_value = [mock_room]
|
||||
mock_fibaro_client.read_devices.return_value = [mock_thermostat]
|
||||
|
||||
with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]):
|
||||
# Act
|
||||
await init_integration(hass, mock_config_entry)
|
||||
# Assert
|
||||
entry = entity_registry.async_get("climate.room_1_test_climate_4")
|
||||
assert entry
|
||||
assert entry.unique_id == "hc2_111111.4"
|
||||
assert entry.original_name == "Room 1 Test climate"
|
||||
assert entry.supported_features == (
|
||||
ClimateEntityFeature.TURN_ON
|
||||
| ClimateEntityFeature.TURN_OFF
|
||||
| ClimateEntityFeature.PRESET_MODE
|
||||
)
|
||||
|
||||
|
||||
async def test_hvac_mode_preset(
|
||||
hass: HomeAssistant,
|
||||
mock_fibaro_client: Mock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_thermostat: Mock,
|
||||
mock_room: Mock,
|
||||
) -> None:
|
||||
"""Test that the climate state is auto when a preset is selected."""
|
||||
|
||||
# Arrange
|
||||
mock_fibaro_client.read_rooms.return_value = [mock_room]
|
||||
mock_fibaro_client.read_devices.return_value = [mock_thermostat]
|
||||
|
||||
with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]):
|
||||
# Act
|
||||
await init_integration(hass, mock_config_entry)
|
||||
# Assert
|
||||
state = hass.states.get("climate.room_1_test_climate_4")
|
||||
assert state.state == HVACMode.AUTO
|
||||
assert state.attributes["preset_mode"] == "CustomerSpecific"
|
||||
|
||||
|
||||
async def test_hvac_mode_heat(
|
||||
hass: HomeAssistant,
|
||||
mock_fibaro_client: Mock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_thermostat: Mock,
|
||||
mock_room: Mock,
|
||||
) -> None:
|
||||
"""Test that the preset mode is None if a hvac mode is active."""
|
||||
|
||||
# Arrange
|
||||
mock_thermostat.thermostat_mode = "Heat"
|
||||
mock_fibaro_client.read_rooms.return_value = [mock_room]
|
||||
mock_fibaro_client.read_devices.return_value = [mock_thermostat]
|
||||
|
||||
with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]):
|
||||
# Act
|
||||
await init_integration(hass, mock_config_entry)
|
||||
# Assert
|
||||
state = hass.states.get("climate.room_1_test_climate_4")
|
||||
assert state.state == HVACMode.HEAT
|
||||
assert state.attributes["preset_mode"] is None
|
||||
|
||||
|
||||
async def test_set_hvac_mode(
|
||||
hass: HomeAssistant,
|
||||
mock_fibaro_client: Mock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_thermostat: Mock,
|
||||
mock_room: Mock,
|
||||
) -> None:
|
||||
"""Test that set_hvac_mode() works."""
|
||||
|
||||
# Arrange
|
||||
mock_fibaro_client.read_rooms.return_value = [mock_room]
|
||||
mock_fibaro_client.read_devices.return_value = [mock_thermostat]
|
||||
|
||||
with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]):
|
||||
# Act
|
||||
await init_integration(hass, mock_config_entry)
|
||||
await hass.services.async_call(
|
||||
"climate",
|
||||
"set_hvac_mode",
|
||||
{"entity_id": "climate.room_1_test_climate_4", "hvac_mode": HVACMode.HEAT},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
# Assert
|
||||
mock_thermostat.execute_action.assert_called_once()
|
||||
|
||||
|
||||
async def test_hvac_mode_with_operation_mode_support(
|
||||
hass: HomeAssistant,
|
||||
mock_fibaro_client: Mock,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
mock_thermostat_with_operating_mode: Mock,
|
||||
mock_room: Mock,
|
||||
) -> None:
|
||||
"""Test that operating mode works."""
|
||||
|
||||
# Arrange
|
||||
mock_fibaro_client.read_rooms.return_value = [mock_room]
|
||||
mock_fibaro_client.read_devices.return_value = [mock_thermostat_with_operating_mode]
|
||||
|
||||
with patch("homeassistant.components.fibaro.PLATFORMS", [Platform.CLIMATE]):
|
||||
# Act
|
||||
await init_integration(hass, mock_config_entry)
|
||||
# Assert
|
||||
state = hass.states.get("climate.room_1_test_climate_4")
|
||||
assert state.state == HVACMode.AUTO
|
@ -68,16 +68,16 @@ def mock_flexit_bacnet() -> Generator[AsyncMock]:
|
||||
flexit_bacnet.electric_heater = True
|
||||
|
||||
# Mock fan setpoints
|
||||
flexit_bacnet.fan_setpoint_extract_air_fire = 10
|
||||
flexit_bacnet.fan_setpoint_supply_air_fire = 20
|
||||
flexit_bacnet.fan_setpoint_extract_air_away = 30
|
||||
flexit_bacnet.fan_setpoint_supply_air_away = 40
|
||||
flexit_bacnet.fan_setpoint_extract_air_home = 50
|
||||
flexit_bacnet.fan_setpoint_supply_air_home = 60
|
||||
flexit_bacnet.fan_setpoint_extract_air_high = 70
|
||||
flexit_bacnet.fan_setpoint_supply_air_high = 80
|
||||
flexit_bacnet.fan_setpoint_extract_air_cooker = 90
|
||||
flexit_bacnet.fan_setpoint_supply_air_cooker = 100
|
||||
flexit_bacnet.fan_setpoint_extract_air_fire = 56
|
||||
flexit_bacnet.fan_setpoint_supply_air_fire = 77
|
||||
flexit_bacnet.fan_setpoint_extract_air_away = 40
|
||||
flexit_bacnet.fan_setpoint_supply_air_away = 42
|
||||
flexit_bacnet.fan_setpoint_extract_air_home = 70
|
||||
flexit_bacnet.fan_setpoint_supply_air_home = 74
|
||||
flexit_bacnet.fan_setpoint_extract_air_high = 100
|
||||
flexit_bacnet.fan_setpoint_supply_air_high = 100
|
||||
flexit_bacnet.fan_setpoint_extract_air_cooker = 50
|
||||
flexit_bacnet.fan_setpoint_supply_air_cooker = 70
|
||||
|
||||
yield flexit_bacnet
|
||||
|
||||
|
@ -5,8 +5,8 @@
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'max': 100,
|
||||
'min': 0,
|
||||
'max': 70,
|
||||
'min': 30,
|
||||
'mode': <NumberMode.SLIDER: 'slider'>,
|
||||
'step': 1,
|
||||
}),
|
||||
@ -42,8 +42,8 @@
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'power_factor',
|
||||
'friendly_name': 'Device Name Away extract fan setpoint',
|
||||
'max': 100,
|
||||
'min': 0,
|
||||
'max': 70,
|
||||
'min': 30,
|
||||
'mode': <NumberMode.SLIDER: 'slider'>,
|
||||
'step': 1,
|
||||
'unit_of_measurement': '%',
|
||||
@ -53,7 +53,7 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '30',
|
||||
'state': '40',
|
||||
})
|
||||
# ---
|
||||
# name: test_numbers[number.device_name_away_supply_fan_setpoint-entry]
|
||||
@ -62,8 +62,8 @@
|
||||
}),
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'max': 100,
|
||||
'min': 0,
|
||||
'max': 74,
|
||||
'min': 30,
|
||||
'mode': <NumberMode.SLIDER: 'slider'>,
|
||||
'step': 1,
|
||||
}),
|
||||
@ -99,8 +99,8 @@
|
||||
'attributes': ReadOnlyDict({
|
||||
'device_class': 'power_factor',
|
||||
'friendly_name': 'Device Name Away supply fan setpoint',
|
||||
'max': 100,
|
||||
'min': 0,
|
||||
'max': 74,
|
||||
'min': 30,
|
||||
'mode': <NumberMode.SLIDER: 'slider'>,
|
||||
'step': 1,
|
||||
'unit_of_measurement': '%',
|
||||
@ -110,7 +110,7 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '40',
|
||||
'state': '42',
|
||||
})
|
||||
# ---
|
||||
# name: test_numbers[number.device_name_cooker_hood_extract_fan_setpoint-entry]
|
||||
@ -120,7 +120,7 @@
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'max': 100,
|
||||
'min': 0,
|
||||
'min': 30,
|
||||
'mode': <NumberMode.SLIDER: 'slider'>,
|
||||
'step': 1,
|
||||
}),
|
||||
@ -157,7 +157,7 @@
|
||||
'device_class': 'power_factor',
|
||||
'friendly_name': 'Device Name Cooker hood extract fan setpoint',
|
||||
'max': 100,
|
||||
'min': 0,
|
||||
'min': 30,
|
||||
'mode': <NumberMode.SLIDER: 'slider'>,
|
||||
'step': 1,
|
||||
'unit_of_measurement': '%',
|
||||
@ -167,7 +167,7 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '90',
|
||||
'state': '50',
|
||||
})
|
||||
# ---
|
||||
# name: test_numbers[number.device_name_cooker_hood_supply_fan_setpoint-entry]
|
||||
@ -177,7 +177,7 @@
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'max': 100,
|
||||
'min': 0,
|
||||
'min': 30,
|
||||
'mode': <NumberMode.SLIDER: 'slider'>,
|
||||
'step': 1,
|
||||
}),
|
||||
@ -214,7 +214,7 @@
|
||||
'device_class': 'power_factor',
|
||||
'friendly_name': 'Device Name Cooker hood supply fan setpoint',
|
||||
'max': 100,
|
||||
'min': 0,
|
||||
'min': 30,
|
||||
'mode': <NumberMode.SLIDER: 'slider'>,
|
||||
'step': 1,
|
||||
'unit_of_measurement': '%',
|
||||
@ -224,7 +224,7 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '100',
|
||||
'state': '70',
|
||||
})
|
||||
# ---
|
||||
# name: test_numbers[number.device_name_fireplace_extract_fan_setpoint-entry]
|
||||
@ -234,7 +234,7 @@
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'max': 100,
|
||||
'min': 0,
|
||||
'min': 30,
|
||||
'mode': <NumberMode.SLIDER: 'slider'>,
|
||||
'step': 1,
|
||||
}),
|
||||
@ -271,7 +271,7 @@
|
||||
'device_class': 'power_factor',
|
||||
'friendly_name': 'Device Name Fireplace extract fan setpoint',
|
||||
'max': 100,
|
||||
'min': 0,
|
||||
'min': 30,
|
||||
'mode': <NumberMode.SLIDER: 'slider'>,
|
||||
'step': 1,
|
||||
'unit_of_measurement': '%',
|
||||
@ -281,7 +281,7 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '10',
|
||||
'state': '56',
|
||||
})
|
||||
# ---
|
||||
# name: test_numbers[number.device_name_fireplace_supply_fan_setpoint-entry]
|
||||
@ -291,7 +291,7 @@
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'max': 100,
|
||||
'min': 0,
|
||||
'min': 30,
|
||||
'mode': <NumberMode.SLIDER: 'slider'>,
|
||||
'step': 1,
|
||||
}),
|
||||
@ -328,7 +328,7 @@
|
||||
'device_class': 'power_factor',
|
||||
'friendly_name': 'Device Name Fireplace supply fan setpoint',
|
||||
'max': 100,
|
||||
'min': 0,
|
||||
'min': 30,
|
||||
'mode': <NumberMode.SLIDER: 'slider'>,
|
||||
'step': 1,
|
||||
'unit_of_measurement': '%',
|
||||
@ -338,7 +338,7 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '20',
|
||||
'state': '77',
|
||||
})
|
||||
# ---
|
||||
# name: test_numbers[number.device_name_high_extract_fan_setpoint-entry]
|
||||
@ -348,7 +348,7 @@
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'max': 100,
|
||||
'min': 0,
|
||||
'min': 70,
|
||||
'mode': <NumberMode.SLIDER: 'slider'>,
|
||||
'step': 1,
|
||||
}),
|
||||
@ -385,7 +385,7 @@
|
||||
'device_class': 'power_factor',
|
||||
'friendly_name': 'Device Name High extract fan setpoint',
|
||||
'max': 100,
|
||||
'min': 0,
|
||||
'min': 70,
|
||||
'mode': <NumberMode.SLIDER: 'slider'>,
|
||||
'step': 1,
|
||||
'unit_of_measurement': '%',
|
||||
@ -395,7 +395,7 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '70',
|
||||
'state': '100',
|
||||
})
|
||||
# ---
|
||||
# name: test_numbers[number.device_name_high_supply_fan_setpoint-entry]
|
||||
@ -405,7 +405,7 @@
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'max': 100,
|
||||
'min': 0,
|
||||
'min': 74,
|
||||
'mode': <NumberMode.SLIDER: 'slider'>,
|
||||
'step': 1,
|
||||
}),
|
||||
@ -442,7 +442,7 @@
|
||||
'device_class': 'power_factor',
|
||||
'friendly_name': 'Device Name High supply fan setpoint',
|
||||
'max': 100,
|
||||
'min': 0,
|
||||
'min': 74,
|
||||
'mode': <NumberMode.SLIDER: 'slider'>,
|
||||
'step': 1,
|
||||
'unit_of_measurement': '%',
|
||||
@ -452,7 +452,7 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '80',
|
||||
'state': '100',
|
||||
})
|
||||
# ---
|
||||
# name: test_numbers[number.device_name_home_extract_fan_setpoint-entry]
|
||||
@ -462,7 +462,7 @@
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'max': 100,
|
||||
'min': 0,
|
||||
'min': 40,
|
||||
'mode': <NumberMode.SLIDER: 'slider'>,
|
||||
'step': 1,
|
||||
}),
|
||||
@ -499,7 +499,7 @@
|
||||
'device_class': 'power_factor',
|
||||
'friendly_name': 'Device Name Home extract fan setpoint',
|
||||
'max': 100,
|
||||
'min': 0,
|
||||
'min': 40,
|
||||
'mode': <NumberMode.SLIDER: 'slider'>,
|
||||
'step': 1,
|
||||
'unit_of_measurement': '%',
|
||||
@ -509,7 +509,7 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '50',
|
||||
'state': '70',
|
||||
})
|
||||
# ---
|
||||
# name: test_numbers[number.device_name_home_supply_fan_setpoint-entry]
|
||||
@ -519,7 +519,7 @@
|
||||
'area_id': None,
|
||||
'capabilities': dict({
|
||||
'max': 100,
|
||||
'min': 0,
|
||||
'min': 42,
|
||||
'mode': <NumberMode.SLIDER: 'slider'>,
|
||||
'step': 1,
|
||||
}),
|
||||
@ -556,7 +556,7 @@
|
||||
'device_class': 'power_factor',
|
||||
'friendly_name': 'Device Name Home supply fan setpoint',
|
||||
'max': 100,
|
||||
'min': 0,
|
||||
'min': 42,
|
||||
'mode': <NumberMode.SLIDER: 'slider'>,
|
||||
'step': 1,
|
||||
'unit_of_measurement': '%',
|
||||
@ -566,6 +566,6 @@
|
||||
'last_changed': <ANY>,
|
||||
'last_reported': <ANY>,
|
||||
'last_updated': <ANY>,
|
||||
'state': '60',
|
||||
'state': '74',
|
||||
})
|
||||
# ---
|
||||
|
@ -64,21 +64,21 @@ async def test_numbers_implementation(
|
||||
assert len(mocked_method.mock_calls) == 1
|
||||
assert hass.states.get(ENTITY_ID).state == "60"
|
||||
|
||||
mock_flexit_bacnet.fan_setpoint_supply_air_fire = 10
|
||||
mock_flexit_bacnet.fan_setpoint_supply_air_fire = 40
|
||||
|
||||
await hass.services.async_call(
|
||||
NUMBER_DOMAIN,
|
||||
SERVICE_SET_VALUE,
|
||||
{
|
||||
ATTR_ENTITY_ID: ENTITY_ID,
|
||||
ATTR_VALUE: 10,
|
||||
ATTR_VALUE: 40,
|
||||
},
|
||||
blocking=True,
|
||||
)
|
||||
|
||||
mocked_method = getattr(mock_flexit_bacnet, "set_fan_setpoint_supply_air_fire")
|
||||
assert len(mocked_method.mock_calls) == 2
|
||||
assert hass.states.get(ENTITY_ID).state == "10"
|
||||
assert hass.states.get(ENTITY_ID).state == "40"
|
||||
|
||||
# Error recovery, when setting the value
|
||||
mock_flexit_bacnet.set_fan_setpoint_supply_air_fire.side_effect = DecodingError
|
||||
@ -89,7 +89,7 @@ async def test_numbers_implementation(
|
||||
SERVICE_SET_VALUE,
|
||||
{
|
||||
ATTR_ENTITY_ID: ENTITY_ID,
|
||||
ATTR_VALUE: 10,
|
||||
ATTR_VALUE: 40,
|
||||
},
|
||||
blocking=True,
|
||||
)
|
||||
|
@ -1474,7 +1474,7 @@
|
||||
'state': dict({
|
||||
'attributes': dict({
|
||||
'changed_by': None,
|
||||
'code_arm_required': True,
|
||||
'code_arm_required': False,
|
||||
'code_format': None,
|
||||
'friendly_name': 'Aqara-Hub-E1-00A0 Security System',
|
||||
'supported_features': <AlarmControlPanelEntityFeature: 7>,
|
||||
@ -1848,7 +1848,7 @@
|
||||
'state': dict({
|
||||
'attributes': dict({
|
||||
'changed_by': None,
|
||||
'code_arm_required': True,
|
||||
'code_arm_required': False,
|
||||
'code_format': None,
|
||||
'friendly_name': 'Aqara Hub-1563 Security System',
|
||||
'supported_features': <AlarmControlPanelEntityFeature: 7>,
|
||||
|
@ -6,6 +6,7 @@ from aiohomekit.model import Accessory
|
||||
from aiohomekit.model.characteristics import CharacteristicsTypes
|
||||
from aiohomekit.model.services import ServicesTypes
|
||||
|
||||
from homeassistant.components.alarm_control_panel import ATTR_CODE_ARM_REQUIRED
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
@ -106,6 +107,7 @@ async def test_switch_read_alarm_state(
|
||||
state = await helper.poll_and_get_state()
|
||||
assert state.state == "armed_home"
|
||||
assert state.attributes["battery_level"] == 50
|
||||
assert state.attributes[ATTR_CODE_ARM_REQUIRED] is False
|
||||
|
||||
await helper.async_update(
|
||||
ServicesTypes.SECURITY_SYSTEM,
|
||||
|
@ -688,3 +688,27 @@ async def test_prohibited_augmented_assignment_operations(
|
||||
hass.async_add_executor_job(execute, hass, "aug_assign_prohibited.py", case, {})
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
assert error in caplog.text
|
||||
|
||||
|
||||
async def test_import_allow_strptime(
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test calling datetime.datetime.strptime works."""
|
||||
source = """
|
||||
test_date = datetime.datetime.strptime('2024-04-01', '%Y-%m-%d')
|
||||
logger.info(f'Date {test_date}')
|
||||
"""
|
||||
hass.async_add_executor_job(execute, hass, "test.py", source, {})
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
assert "Error executing script: Not allowed to import _strptime" not in caplog.text
|
||||
assert "Date 2024-04-01 00:00:00" in caplog.text
|
||||
|
||||
|
||||
async def test_no_other_imports_allowed(
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||
) -> None:
|
||||
"""Test imports are not allowed."""
|
||||
source = "import sys"
|
||||
hass.async_add_executor_job(execute, hass, "test.py", source, {})
|
||||
await hass.async_block_till_done(wait_background_tasks=True)
|
||||
assert "Error executing script: Not allowed to import sys" in caplog.text
|
||||
|
@ -112,9 +112,6 @@ async def test_purge_big_database(hass: HomeAssistant, recorder_mock: Recorder)
|
||||
|
||||
async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) -> None:
|
||||
"""Test deleting old states."""
|
||||
assert recorder_mock.states_manager.oldest_ts is None
|
||||
oldest_ts = recorder_mock.states_manager.oldest_ts
|
||||
|
||||
await _add_test_states(hass)
|
||||
|
||||
# make sure we start with 6 states
|
||||
@ -130,10 +127,6 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) ->
|
||||
events = session.query(Events).filter(Events.event_type == "state_changed")
|
||||
assert events.count() == 0
|
||||
|
||||
assert recorder_mock.states_manager.oldest_ts != oldest_ts
|
||||
assert recorder_mock.states_manager.oldest_ts == states[0].last_updated_ts
|
||||
oldest_ts = recorder_mock.states_manager.oldest_ts
|
||||
|
||||
assert "test.recorder2" in recorder_mock.states_manager._last_committed_id
|
||||
|
||||
purge_before = dt_util.utcnow() - timedelta(days=4)
|
||||
@ -147,8 +140,6 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) ->
|
||||
repack=False,
|
||||
)
|
||||
assert not finished
|
||||
# states_manager.oldest_ts is not updated until after the purge is complete
|
||||
assert recorder_mock.states_manager.oldest_ts == oldest_ts
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
states = session.query(States)
|
||||
@ -171,8 +162,6 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) ->
|
||||
|
||||
finished = purge_old_data(recorder_mock, purge_before, repack=False)
|
||||
assert finished
|
||||
# states_manager.oldest_ts should now be updated
|
||||
assert recorder_mock.states_manager.oldest_ts != oldest_ts
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
states = session.query(States)
|
||||
@ -180,10 +169,6 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) ->
|
||||
assert states.count() == 2
|
||||
assert state_attributes.count() == 1
|
||||
|
||||
assert recorder_mock.states_manager.oldest_ts != oldest_ts
|
||||
assert recorder_mock.states_manager.oldest_ts == states[0].last_updated_ts
|
||||
oldest_ts = recorder_mock.states_manager.oldest_ts
|
||||
|
||||
assert "test.recorder2" in recorder_mock.states_manager._last_committed_id
|
||||
|
||||
# run purge_old_data again
|
||||
@ -196,8 +181,6 @@ async def test_purge_old_states(hass: HomeAssistant, recorder_mock: Recorder) ->
|
||||
repack=False,
|
||||
)
|
||||
assert not finished
|
||||
# states_manager.oldest_ts is not updated until after the purge is complete
|
||||
assert recorder_mock.states_manager.oldest_ts == oldest_ts
|
||||
|
||||
with session_scope(hass=hass) as session:
|
||||
assert states.count() == 0
|
||||
|
@ -429,6 +429,12 @@ async def test_protect_loop_load_verify_locations(
|
||||
context.load_verify_locations("/dev/null")
|
||||
assert "Detected blocking call to load_verify_locations" in caplog.text
|
||||
|
||||
# ignore with only cadata
|
||||
caplog.clear()
|
||||
with pytest.raises(ssl.SSLError):
|
||||
context.load_verify_locations(cadata="xxx")
|
||||
assert "Detected blocking call to load_verify_locations" not in caplog.text
|
||||
|
||||
|
||||
async def test_protect_loop_load_cert_chain(
|
||||
hass: HomeAssistant, caplog: pytest.LogCaptureFixture
|
||||
|
Loading…
x
Reference in New Issue
Block a user