mirror of
https://github.com/home-assistant/core.git
synced 2025-08-01 09:38:21 +00:00
Merge pull request #73041 from home-assistant/rc
This commit is contained in:
commit
c0482bdbfd
@ -2,7 +2,7 @@
|
|||||||
"domain": "bmw_connected_drive",
|
"domain": "bmw_connected_drive",
|
||||||
"name": "BMW Connected Drive",
|
"name": "BMW Connected Drive",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive",
|
"documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive",
|
||||||
"requirements": ["bimmer_connected==0.9.3"],
|
"requirements": ["bimmer_connected==0.9.4"],
|
||||||
"codeowners": ["@gerard33", "@rikroe"],
|
"codeowners": ["@gerard33", "@rikroe"],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
|
@ -9,7 +9,7 @@ import logging
|
|||||||
from bleak import BleakScanner
|
from bleak import BleakScanner
|
||||||
from bleak.backends.device import BLEDevice
|
from bleak.backends.device import BLEDevice
|
||||||
from bleak.backends.scanner import AdvertisementData
|
from bleak.backends.scanner import AdvertisementData
|
||||||
from fjaraskupan import DEVICE_NAME, Device, State, device_filter
|
from fjaraskupan import Device, State, device_filter
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import Platform
|
from homeassistant.const import Platform
|
||||||
@ -90,7 +90,7 @@ class EntryState:
|
|||||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||||
"""Set up Fjäråskupan from a config entry."""
|
"""Set up Fjäråskupan from a config entry."""
|
||||||
|
|
||||||
scanner = BleakScanner(filters={"Pattern": DEVICE_NAME, "DuplicateData": True})
|
scanner = BleakScanner(filters={"DuplicateData": True})
|
||||||
|
|
||||||
state = EntryState(scanner, {})
|
state = EntryState(scanner, {})
|
||||||
hass.data.setdefault(DOMAIN, {})
|
hass.data.setdefault(DOMAIN, {})
|
||||||
|
@ -7,7 +7,7 @@ import async_timeout
|
|||||||
from bleak import BleakScanner
|
from bleak import BleakScanner
|
||||||
from bleak.backends.device import BLEDevice
|
from bleak.backends.device import BLEDevice
|
||||||
from bleak.backends.scanner import AdvertisementData
|
from bleak.backends.scanner import AdvertisementData
|
||||||
from fjaraskupan import DEVICE_NAME, device_filter
|
from fjaraskupan import device_filter
|
||||||
|
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.config_entry_flow import register_discovery_flow
|
from homeassistant.helpers.config_entry_flow import register_discovery_flow
|
||||||
@ -28,7 +28,7 @@ async def _async_has_devices(hass: HomeAssistant) -> bool:
|
|||||||
|
|
||||||
async with BleakScanner(
|
async with BleakScanner(
|
||||||
detection_callback=detection,
|
detection_callback=detection,
|
||||||
filters={"Pattern": DEVICE_NAME, "DuplicateData": True},
|
filters={"DuplicateData": True},
|
||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
async with async_timeout.timeout(CONST_WAIT_TIME):
|
async with async_timeout.timeout(CONST_WAIT_TIME):
|
||||||
|
@ -5,7 +5,6 @@ from __future__ import annotations
|
|||||||
from collections.abc import Awaitable, Callable
|
from collections.abc import Awaitable, Callable
|
||||||
import datetime
|
import datetime
|
||||||
import logging
|
import logging
|
||||||
import time
|
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
@ -50,12 +49,16 @@ class DeviceAuth(AuthImplementation):
|
|||||||
async def async_resolve_external_data(self, external_data: Any) -> dict:
|
async def async_resolve_external_data(self, external_data: Any) -> dict:
|
||||||
"""Resolve a Google API Credentials object to Home Assistant token."""
|
"""Resolve a Google API Credentials object to Home Assistant token."""
|
||||||
creds: Credentials = external_data[DEVICE_AUTH_CREDS]
|
creds: Credentials = external_data[DEVICE_AUTH_CREDS]
|
||||||
|
delta = creds.token_expiry.replace(tzinfo=datetime.timezone.utc) - dt.utcnow()
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Token expires at %s (in %s)", creds.token_expiry, delta.total_seconds()
|
||||||
|
)
|
||||||
return {
|
return {
|
||||||
"access_token": creds.access_token,
|
"access_token": creds.access_token,
|
||||||
"refresh_token": creds.refresh_token,
|
"refresh_token": creds.refresh_token,
|
||||||
"scope": " ".join(creds.scopes),
|
"scope": " ".join(creds.scopes),
|
||||||
"token_type": "Bearer",
|
"token_type": "Bearer",
|
||||||
"expires_in": creds.token_expiry.timestamp() - time.time(),
|
"expires_in": delta.total_seconds(),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -67,9 +67,10 @@ class HistoryStats:
|
|||||||
current_period_end_timestamp = floored_timestamp(current_period_end)
|
current_period_end_timestamp = floored_timestamp(current_period_end)
|
||||||
previous_period_start_timestamp = floored_timestamp(previous_period_start)
|
previous_period_start_timestamp = floored_timestamp(previous_period_start)
|
||||||
previous_period_end_timestamp = floored_timestamp(previous_period_end)
|
previous_period_end_timestamp = floored_timestamp(previous_period_end)
|
||||||
now_timestamp = floored_timestamp(datetime.datetime.now())
|
utc_now = dt_util.utcnow()
|
||||||
|
now_timestamp = floored_timestamp(utc_now)
|
||||||
|
|
||||||
if now_timestamp < current_period_start_timestamp:
|
if current_period_start > utc_now:
|
||||||
# History cannot tell the future
|
# History cannot tell the future
|
||||||
self._history_current_period = []
|
self._history_current_period = []
|
||||||
self._previous_run_before_start = True
|
self._previous_run_before_start = True
|
||||||
|
@ -117,7 +117,7 @@ class ISYThermostatEntity(ISYNodeEntity, ClimateEntity):
|
|||||||
"""Return the current humidity."""
|
"""Return the current humidity."""
|
||||||
if not (humidity := self._node.aux_properties.get(PROP_HUMIDITY)):
|
if not (humidity := self._node.aux_properties.get(PROP_HUMIDITY)):
|
||||||
return None
|
return None
|
||||||
if humidity == ISY_VALUE_UNKNOWN:
|
if humidity.value == ISY_VALUE_UNKNOWN:
|
||||||
return None
|
return None
|
||||||
return int(humidity.value)
|
return int(humidity.value)
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
"name": "LCN",
|
"name": "LCN",
|
||||||
"config_flow": false,
|
"config_flow": false,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/lcn",
|
"documentation": "https://www.home-assistant.io/integrations/lcn",
|
||||||
"requirements": ["pypck==0.7.14"],
|
"requirements": ["pypck==0.7.15"],
|
||||||
"codeowners": ["@alengwenus"],
|
"codeowners": ["@alengwenus"],
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["pypck"]
|
"loggers": ["pypck"]
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
"domain": "netgear",
|
"domain": "netgear",
|
||||||
"name": "NETGEAR",
|
"name": "NETGEAR",
|
||||||
"documentation": "https://www.home-assistant.io/integrations/netgear",
|
"documentation": "https://www.home-assistant.io/integrations/netgear",
|
||||||
"requirements": ["pynetgear==0.10.0"],
|
"requirements": ["pynetgear==0.10.4"],
|
||||||
"codeowners": ["@hacf-fr", "@Quentame", "@starkillerOG"],
|
"codeowners": ["@hacf-fr", "@Quentame", "@starkillerOG"],
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
|
@ -5,7 +5,7 @@ from collections.abc import Callable, Iterable
|
|||||||
import json
|
import json
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from sqlalchemy import JSON, Column, Text, cast, not_, or_
|
from sqlalchemy import Column, Text, cast, not_, or_
|
||||||
from sqlalchemy.sql.elements import ClauseList
|
from sqlalchemy.sql.elements import ClauseList
|
||||||
|
|
||||||
from homeassistant.const import CONF_DOMAINS, CONF_ENTITIES, CONF_EXCLUDE, CONF_INCLUDE
|
from homeassistant.const import CONF_DOMAINS, CONF_ENTITIES, CONF_EXCLUDE, CONF_INCLUDE
|
||||||
@ -16,6 +16,7 @@ from .models import ENTITY_ID_IN_EVENT, OLD_ENTITY_ID_IN_EVENT, States
|
|||||||
|
|
||||||
DOMAIN = "history"
|
DOMAIN = "history"
|
||||||
HISTORY_FILTERS = "history_filters"
|
HISTORY_FILTERS = "history_filters"
|
||||||
|
JSON_NULL = json.dumps(None)
|
||||||
|
|
||||||
GLOB_TO_SQL_CHARS = {
|
GLOB_TO_SQL_CHARS = {
|
||||||
ord("*"): "%",
|
ord("*"): "%",
|
||||||
@ -196,7 +197,17 @@ class Filters:
|
|||||||
"""Generate the entity filter query."""
|
"""Generate the entity filter query."""
|
||||||
_encoder = json.dumps
|
_encoder = json.dumps
|
||||||
return or_(
|
return or_(
|
||||||
(ENTITY_ID_IN_EVENT == JSON.NULL) & (OLD_ENTITY_ID_IN_EVENT == JSON.NULL),
|
# sqlalchemy's SQLite json implementation always
|
||||||
|
# wraps everything with JSON_QUOTE so it resolves to 'null'
|
||||||
|
# when its empty
|
||||||
|
#
|
||||||
|
# For MySQL and PostgreSQL it will resolve to a literal
|
||||||
|
# NULL when its empty
|
||||||
|
#
|
||||||
|
((ENTITY_ID_IN_EVENT == JSON_NULL) | ENTITY_ID_IN_EVENT.is_(None))
|
||||||
|
& (
|
||||||
|
(OLD_ENTITY_ID_IN_EVENT == JSON_NULL) | OLD_ENTITY_ID_IN_EVENT.is_(None)
|
||||||
|
),
|
||||||
self._generate_filter_for_columns(
|
self._generate_filter_for_columns(
|
||||||
(ENTITY_ID_IN_EVENT, OLD_ENTITY_ID_IN_EVENT), _encoder
|
(ENTITY_ID_IN_EVENT, OLD_ENTITY_ID_IN_EVENT), _encoder
|
||||||
).self_group(),
|
).self_group(),
|
||||||
@ -208,8 +219,11 @@ def _globs_to_like(
|
|||||||
) -> ClauseList:
|
) -> ClauseList:
|
||||||
"""Translate glob to sql."""
|
"""Translate glob to sql."""
|
||||||
matchers = [
|
matchers = [
|
||||||
cast(column, Text()).like(
|
(
|
||||||
encoder(glob_str).translate(GLOB_TO_SQL_CHARS), escape="\\"
|
column.is_not(None)
|
||||||
|
& cast(column, Text()).like(
|
||||||
|
encoder(glob_str).translate(GLOB_TO_SQL_CHARS), escape="\\"
|
||||||
|
)
|
||||||
)
|
)
|
||||||
for glob_str in glob_strs
|
for glob_str in glob_strs
|
||||||
for column in columns
|
for column in columns
|
||||||
@ -221,7 +235,10 @@ def _entity_matcher(
|
|||||||
entity_ids: Iterable[str], columns: Iterable[Column], encoder: Callable[[Any], Any]
|
entity_ids: Iterable[str], columns: Iterable[Column], encoder: Callable[[Any], Any]
|
||||||
) -> ClauseList:
|
) -> ClauseList:
|
||||||
matchers = [
|
matchers = [
|
||||||
cast(column, Text()).in_([encoder(entity_id) for entity_id in entity_ids])
|
(
|
||||||
|
column.is_not(None)
|
||||||
|
& cast(column, Text()).in_([encoder(entity_id) for entity_id in entity_ids])
|
||||||
|
)
|
||||||
for column in columns
|
for column in columns
|
||||||
]
|
]
|
||||||
return or_(*matchers) if matchers else or_(False)
|
return or_(*matchers) if matchers else or_(False)
|
||||||
@ -231,7 +248,7 @@ def _domain_matcher(
|
|||||||
domains: Iterable[str], columns: Iterable[Column], encoder: Callable[[Any], Any]
|
domains: Iterable[str], columns: Iterable[Column], encoder: Callable[[Any], Any]
|
||||||
) -> ClauseList:
|
) -> ClauseList:
|
||||||
matchers = [
|
matchers = [
|
||||||
cast(column, Text()).like(encoder(f"{domain}.%"))
|
(column.is_not(None) & cast(column, Text()).like(encoder(f"{domain}.%")))
|
||||||
for domain in domains
|
for domain in domains
|
||||||
for column in columns
|
for column in columns
|
||||||
]
|
]
|
||||||
|
@ -984,7 +984,6 @@ def _reduce_statistics_per_month(
|
|||||||
def _statistics_during_period_stmt(
|
def _statistics_during_period_stmt(
|
||||||
start_time: datetime,
|
start_time: datetime,
|
||||||
end_time: datetime | None,
|
end_time: datetime | None,
|
||||||
statistic_ids: list[str] | None,
|
|
||||||
metadata_ids: list[int] | None,
|
metadata_ids: list[int] | None,
|
||||||
table: type[Statistics | StatisticsShortTerm],
|
table: type[Statistics | StatisticsShortTerm],
|
||||||
) -> StatementLambdaElement:
|
) -> StatementLambdaElement:
|
||||||
@ -1002,7 +1001,7 @@ def _statistics_during_period_stmt(
|
|||||||
if end_time is not None:
|
if end_time is not None:
|
||||||
stmt += lambda q: q.filter(table.start < end_time)
|
stmt += lambda q: q.filter(table.start < end_time)
|
||||||
|
|
||||||
if statistic_ids is not None:
|
if metadata_ids:
|
||||||
stmt += lambda q: q.filter(table.metadata_id.in_(metadata_ids))
|
stmt += lambda q: q.filter(table.metadata_id.in_(metadata_ids))
|
||||||
|
|
||||||
stmt += lambda q: q.order_by(table.metadata_id, table.start)
|
stmt += lambda q: q.order_by(table.metadata_id, table.start)
|
||||||
@ -1038,9 +1037,7 @@ def statistics_during_period(
|
|||||||
else:
|
else:
|
||||||
table = Statistics
|
table = Statistics
|
||||||
|
|
||||||
stmt = _statistics_during_period_stmt(
|
stmt = _statistics_during_period_stmt(start_time, end_time, metadata_ids, table)
|
||||||
start_time, end_time, statistic_ids, metadata_ids, table
|
|
||||||
)
|
|
||||||
stats = execute_stmt_lambda_element(session, stmt)
|
stats = execute_stmt_lambda_element(session, stmt)
|
||||||
|
|
||||||
if not stats:
|
if not stats:
|
||||||
|
@ -205,13 +205,15 @@ class SonosMedia:
|
|||||||
self, position_info: dict[str, int], force_update: bool = False
|
self, position_info: dict[str, int], force_update: bool = False
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Update state when playing music tracks."""
|
"""Update state when playing music tracks."""
|
||||||
if (duration := position_info.get(DURATION_SECONDS)) == 0:
|
duration = position_info.get(DURATION_SECONDS)
|
||||||
|
current_position = position_info.get(POSITION_SECONDS)
|
||||||
|
|
||||||
|
if not (duration or current_position):
|
||||||
self.clear_position()
|
self.clear_position()
|
||||||
return
|
return
|
||||||
|
|
||||||
should_update = force_update
|
should_update = force_update
|
||||||
self.duration = duration
|
self.duration = duration
|
||||||
current_position = position_info.get(POSITION_SECONDS)
|
|
||||||
|
|
||||||
# player started reporting position?
|
# player started reporting position?
|
||||||
if current_position is not None and self.position is None:
|
if current_position is not None and self.position is None:
|
||||||
|
@ -7,7 +7,7 @@ from .backports.enum import StrEnum
|
|||||||
|
|
||||||
MAJOR_VERSION: Final = 2022
|
MAJOR_VERSION: Final = 2022
|
||||||
MINOR_VERSION: Final = 6
|
MINOR_VERSION: Final = 6
|
||||||
PATCH_VERSION: Final = "1"
|
PATCH_VERSION: Final = "2"
|
||||||
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
|
||||||
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
__version__: Final = f"{__short_version__}.{PATCH_VERSION}"
|
||||||
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)
|
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 9, 0)
|
||||||
|
@ -1369,19 +1369,19 @@ def multiply(value, amount, default=_SENTINEL):
|
|||||||
|
|
||||||
def logarithm(value, base=math.e, default=_SENTINEL):
|
def logarithm(value, base=math.e, default=_SENTINEL):
|
||||||
"""Filter and function to get logarithm of the value with a specific base."""
|
"""Filter and function to get logarithm of the value with a specific base."""
|
||||||
try:
|
|
||||||
value_float = float(value)
|
|
||||||
except (ValueError, TypeError):
|
|
||||||
if default is _SENTINEL:
|
|
||||||
raise_no_default("log", value)
|
|
||||||
return default
|
|
||||||
try:
|
try:
|
||||||
base_float = float(base)
|
base_float = float(base)
|
||||||
except (ValueError, TypeError):
|
except (ValueError, TypeError):
|
||||||
if default is _SENTINEL:
|
if default is _SENTINEL:
|
||||||
raise_no_default("log", base)
|
raise_no_default("log", base)
|
||||||
return default
|
return default
|
||||||
return math.log(value_float, base_float)
|
try:
|
||||||
|
value_float = float(value)
|
||||||
|
return math.log(value_float, base_float)
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
if default is _SENTINEL:
|
||||||
|
raise_no_default("log", value)
|
||||||
|
return default
|
||||||
|
|
||||||
|
|
||||||
def sine(value, default=_SENTINEL):
|
def sine(value, default=_SENTINEL):
|
||||||
|
@ -394,7 +394,7 @@ beautifulsoup4==4.11.1
|
|||||||
bellows==0.30.0
|
bellows==0.30.0
|
||||||
|
|
||||||
# homeassistant.components.bmw_connected_drive
|
# homeassistant.components.bmw_connected_drive
|
||||||
bimmer_connected==0.9.3
|
bimmer_connected==0.9.4
|
||||||
|
|
||||||
# homeassistant.components.bizkaibus
|
# homeassistant.components.bizkaibus
|
||||||
bizkaibus==0.1.1
|
bizkaibus==0.1.1
|
||||||
@ -1673,7 +1673,7 @@ pymyq==3.1.4
|
|||||||
pymysensors==0.22.1
|
pymysensors==0.22.1
|
||||||
|
|
||||||
# homeassistant.components.netgear
|
# homeassistant.components.netgear
|
||||||
pynetgear==0.10.0
|
pynetgear==0.10.4
|
||||||
|
|
||||||
# homeassistant.components.netio
|
# homeassistant.components.netio
|
||||||
pynetio==0.1.9.1
|
pynetio==0.1.9.1
|
||||||
@ -1735,7 +1735,7 @@ pyownet==0.10.0.post1
|
|||||||
pypca==0.0.7
|
pypca==0.0.7
|
||||||
|
|
||||||
# homeassistant.components.lcn
|
# homeassistant.components.lcn
|
||||||
pypck==0.7.14
|
pypck==0.7.15
|
||||||
|
|
||||||
# homeassistant.components.pjlink
|
# homeassistant.components.pjlink
|
||||||
pypjlink2==1.2.1
|
pypjlink2==1.2.1
|
||||||
|
@ -309,7 +309,7 @@ beautifulsoup4==4.11.1
|
|||||||
bellows==0.30.0
|
bellows==0.30.0
|
||||||
|
|
||||||
# homeassistant.components.bmw_connected_drive
|
# homeassistant.components.bmw_connected_drive
|
||||||
bimmer_connected==0.9.3
|
bimmer_connected==0.9.4
|
||||||
|
|
||||||
# homeassistant.components.blebox
|
# homeassistant.components.blebox
|
||||||
blebox_uniapi==1.3.3
|
blebox_uniapi==1.3.3
|
||||||
@ -1131,7 +1131,7 @@ pymyq==3.1.4
|
|||||||
pymysensors==0.22.1
|
pymysensors==0.22.1
|
||||||
|
|
||||||
# homeassistant.components.netgear
|
# homeassistant.components.netgear
|
||||||
pynetgear==0.10.0
|
pynetgear==0.10.4
|
||||||
|
|
||||||
# homeassistant.components.nina
|
# homeassistant.components.nina
|
||||||
pynina==0.1.8
|
pynina==0.1.8
|
||||||
@ -1178,7 +1178,7 @@ pyowm==3.2.0
|
|||||||
pyownet==0.10.0.post1
|
pyownet==0.10.0.post1
|
||||||
|
|
||||||
# homeassistant.components.lcn
|
# homeassistant.components.lcn
|
||||||
pypck==0.7.14
|
pypck==0.7.15
|
||||||
|
|
||||||
# homeassistant.components.plaato
|
# homeassistant.components.plaato
|
||||||
pyplaato==0.0.18
|
pyplaato==0.0.18
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
[metadata]
|
[metadata]
|
||||||
version = 2022.6.1
|
version = 2022.6.2
|
||||||
url = https://www.home-assistant.io/
|
url = https://www.home-assistant.io/
|
||||||
|
|
||||||
[options]
|
[options]
|
||||||
|
@ -16,7 +16,6 @@ from homeassistant.components.google import CONF_TRACK_NEW, DOMAIN
|
|||||||
from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET
|
from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.setup import async_setup_component
|
from homeassistant.setup import async_setup_component
|
||||||
from homeassistant.util.dt import utcnow
|
|
||||||
|
|
||||||
from tests.common import MockConfigEntry
|
from tests.common import MockConfigEntry
|
||||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||||
@ -136,7 +135,10 @@ def token_scopes() -> list[str]:
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def token_expiry() -> datetime.datetime:
|
def token_expiry() -> datetime.datetime:
|
||||||
"""Expiration time for credentials used in the test."""
|
"""Expiration time for credentials used in the test."""
|
||||||
return utcnow() + datetime.timedelta(days=7)
|
# OAuth library returns an offset-naive timestamp
|
||||||
|
return datetime.datetime.fromtimestamp(
|
||||||
|
datetime.datetime.utcnow().timestamp()
|
||||||
|
) + datetime.timedelta(hours=1)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
|
@ -8,6 +8,7 @@ from typing import Any
|
|||||||
from unittest.mock import Mock, patch
|
from unittest.mock import Mock, patch
|
||||||
|
|
||||||
from aiohttp.client_exceptions import ClientError
|
from aiohttp.client_exceptions import ClientError
|
||||||
|
from freezegun.api import FrozenDateTimeFactory
|
||||||
from oauth2client.client import (
|
from oauth2client.client import (
|
||||||
FlowExchangeError,
|
FlowExchangeError,
|
||||||
OAuth2Credentials,
|
OAuth2Credentials,
|
||||||
@ -94,11 +95,13 @@ async def fire_alarm(hass, point_in_time):
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.freeze_time("2022-06-03 15:19:59-00:00")
|
||||||
async def test_full_flow_yaml_creds(
|
async def test_full_flow_yaml_creds(
|
||||||
hass: HomeAssistant,
|
hass: HomeAssistant,
|
||||||
mock_code_flow: Mock,
|
mock_code_flow: Mock,
|
||||||
mock_exchange: Mock,
|
mock_exchange: Mock,
|
||||||
component_setup: ComponentSetup,
|
component_setup: ComponentSetup,
|
||||||
|
freezer: FrozenDateTimeFactory,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test successful creds setup."""
|
"""Test successful creds setup."""
|
||||||
assert await component_setup()
|
assert await component_setup()
|
||||||
@ -115,8 +118,8 @@ async def test_full_flow_yaml_creds(
|
|||||||
"homeassistant.components.google.async_setup_entry", return_value=True
|
"homeassistant.components.google.async_setup_entry", return_value=True
|
||||||
) as mock_setup:
|
) as mock_setup:
|
||||||
# Run one tick to invoke the credential exchange check
|
# Run one tick to invoke the credential exchange check
|
||||||
now = utcnow()
|
freezer.tick(CODE_CHECK_ALARM_TIMEDELTA)
|
||||||
await fire_alarm(hass, now + CODE_CHECK_ALARM_TIMEDELTA)
|
await fire_alarm(hass, datetime.datetime.utcnow())
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
result = await hass.config_entries.flow.async_configure(
|
result = await hass.config_entries.flow.async_configure(
|
||||||
flow_id=result["flow_id"]
|
flow_id=result["flow_id"]
|
||||||
@ -127,12 +130,11 @@ async def test_full_flow_yaml_creds(
|
|||||||
assert "data" in result
|
assert "data" in result
|
||||||
data = result["data"]
|
data = result["data"]
|
||||||
assert "token" in data
|
assert "token" in data
|
||||||
assert 0 < data["token"]["expires_in"] < 8 * 86400
|
|
||||||
assert (
|
assert (
|
||||||
datetime.datetime.now().timestamp()
|
data["token"]["expires_in"]
|
||||||
<= data["token"]["expires_at"]
|
== 60 * 60 - CODE_CHECK_ALARM_TIMEDELTA.total_seconds()
|
||||||
< (datetime.datetime.now() + datetime.timedelta(days=8)).timestamp()
|
|
||||||
)
|
)
|
||||||
|
assert data["token"]["expires_at"] == 1654273199.0
|
||||||
data["token"].pop("expires_at")
|
data["token"].pop("expires_at")
|
||||||
data["token"].pop("expires_in")
|
data["token"].pop("expires_in")
|
||||||
assert data == {
|
assert data == {
|
||||||
|
@ -5,6 +5,7 @@ from http import HTTPStatus
|
|||||||
import json
|
import json
|
||||||
from unittest.mock import patch, sentinel
|
from unittest.mock import patch, sentinel
|
||||||
|
|
||||||
|
from freezegun import freeze_time
|
||||||
import pytest
|
import pytest
|
||||||
from pytest import approx
|
from pytest import approx
|
||||||
|
|
||||||
@ -928,6 +929,141 @@ async def test_statistics_during_period(
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"units, attributes, state, value",
|
||||||
|
[
|
||||||
|
(IMPERIAL_SYSTEM, POWER_SENSOR_ATTRIBUTES, 10, 10000),
|
||||||
|
(METRIC_SYSTEM, POWER_SENSOR_ATTRIBUTES, 10, 10000),
|
||||||
|
(IMPERIAL_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, 10, 50),
|
||||||
|
(METRIC_SYSTEM, TEMPERATURE_SENSOR_ATTRIBUTES, 10, 10),
|
||||||
|
(IMPERIAL_SYSTEM, PRESSURE_SENSOR_ATTRIBUTES, 1000, 14.503774389728312),
|
||||||
|
(METRIC_SYSTEM, PRESSURE_SENSOR_ATTRIBUTES, 1000, 100000),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
async def test_statistics_during_period_in_the_past(
|
||||||
|
hass, hass_ws_client, recorder_mock, units, attributes, state, value
|
||||||
|
):
|
||||||
|
"""Test statistics_during_period in the past."""
|
||||||
|
hass.config.set_time_zone("UTC")
|
||||||
|
now = dt_util.utcnow().replace()
|
||||||
|
|
||||||
|
hass.config.units = units
|
||||||
|
await async_setup_component(hass, "history", {})
|
||||||
|
await async_setup_component(hass, "sensor", {})
|
||||||
|
await async_recorder_block_till_done(hass)
|
||||||
|
|
||||||
|
past = now - timedelta(days=3)
|
||||||
|
|
||||||
|
with freeze_time(past):
|
||||||
|
hass.states.async_set("sensor.test", state, attributes=attributes)
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
|
sensor_state = hass.states.get("sensor.test")
|
||||||
|
assert sensor_state.last_updated == past
|
||||||
|
|
||||||
|
stats_top_of_hour = past.replace(minute=0, second=0, microsecond=0)
|
||||||
|
stats_start = past.replace(minute=55)
|
||||||
|
do_adhoc_statistics(hass, start=stats_start)
|
||||||
|
await async_wait_recording_done(hass)
|
||||||
|
|
||||||
|
client = await hass_ws_client()
|
||||||
|
await client.send_json(
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"type": "history/statistics_during_period",
|
||||||
|
"start_time": now.isoformat(),
|
||||||
|
"end_time": now.isoformat(),
|
||||||
|
"statistic_ids": ["sensor.test"],
|
||||||
|
"period": "hour",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {}
|
||||||
|
|
||||||
|
await client.send_json(
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"type": "history/statistics_during_period",
|
||||||
|
"start_time": now.isoformat(),
|
||||||
|
"statistic_ids": ["sensor.test"],
|
||||||
|
"period": "5minute",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {}
|
||||||
|
|
||||||
|
past = now - timedelta(days=3)
|
||||||
|
await client.send_json(
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"type": "history/statistics_during_period",
|
||||||
|
"start_time": past.isoformat(),
|
||||||
|
"statistic_ids": ["sensor.test"],
|
||||||
|
"period": "5minute",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
"sensor.test": [
|
||||||
|
{
|
||||||
|
"statistic_id": "sensor.test",
|
||||||
|
"start": stats_start.isoformat(),
|
||||||
|
"end": (stats_start + timedelta(minutes=5)).isoformat(),
|
||||||
|
"mean": approx(value),
|
||||||
|
"min": approx(value),
|
||||||
|
"max": approx(value),
|
||||||
|
"last_reset": None,
|
||||||
|
"state": None,
|
||||||
|
"sum": None,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
start_of_day = stats_top_of_hour.replace(hour=0, minute=0)
|
||||||
|
await client.send_json(
|
||||||
|
{
|
||||||
|
"id": 4,
|
||||||
|
"type": "history/statistics_during_period",
|
||||||
|
"start_time": stats_top_of_hour.isoformat(),
|
||||||
|
"statistic_ids": ["sensor.test"],
|
||||||
|
"period": "day",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {
|
||||||
|
"sensor.test": [
|
||||||
|
{
|
||||||
|
"statistic_id": "sensor.test",
|
||||||
|
"start": start_of_day.isoformat(),
|
||||||
|
"end": (start_of_day + timedelta(days=1)).isoformat(),
|
||||||
|
"mean": approx(value),
|
||||||
|
"min": approx(value),
|
||||||
|
"max": approx(value),
|
||||||
|
"last_reset": None,
|
||||||
|
"state": None,
|
||||||
|
"sum": None,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
await client.send_json(
|
||||||
|
{
|
||||||
|
"id": 5,
|
||||||
|
"type": "history/statistics_during_period",
|
||||||
|
"start_time": now.isoformat(),
|
||||||
|
"statistic_ids": ["sensor.test"],
|
||||||
|
"period": "5minute",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.receive_json()
|
||||||
|
assert response["success"]
|
||||||
|
assert response["result"] == {}
|
||||||
|
|
||||||
|
|
||||||
async def test_statistics_during_period_bad_start_time(
|
async def test_statistics_during_period_bad_start_time(
|
||||||
hass, hass_ws_client, recorder_mock
|
hass, hass_ws_client, recorder_mock
|
||||||
):
|
):
|
||||||
|
@ -447,6 +447,8 @@ def test_logarithm(hass):
|
|||||||
assert render(hass, "{{ 'no_number' | log(10, default=1) }}") == 1
|
assert render(hass, "{{ 'no_number' | log(10, default=1) }}") == 1
|
||||||
assert render(hass, "{{ log('no_number', 10, 1) }}") == 1
|
assert render(hass, "{{ log('no_number', 10, 1) }}") == 1
|
||||||
assert render(hass, "{{ log('no_number', 10, default=1) }}") == 1
|
assert render(hass, "{{ log('no_number', 10, default=1) }}") == 1
|
||||||
|
assert render(hass, "{{ log(0, 10, 1) }}") == 1
|
||||||
|
assert render(hass, "{{ log(0, 10, default=1) }}") == 1
|
||||||
|
|
||||||
|
|
||||||
def test_sine(hass):
|
def test_sine(hass):
|
||||||
|
Loading…
x
Reference in New Issue
Block a user