mirror of
https://github.com/home-assistant/core.git
synced 2025-05-03 13:39:16 +00:00

* delete more code * tweak * tweak * wrappers * restore lost performance * restore lost performance * restore lost performance * compact * reduce * fix refactor * DRY * tweak * delete the start time state injector * move away the legacy code * tweak * adjust * adjust * tweak * ignore impossible * fix a bug where the first start was changed to the start time when there was no previous history recorded before * avoid the empty scan most cases * postgresql * fixes * workaround for mariadb < 10.4 * remove unused * remove unused * adjust * bail early * tweak * tweak * fix more tests * fix recorderrun being init in the future in the test * run history tests on schema 30 as well * Revert "run history tests on schema 30 as well" This reverts commit d798b100ac45c7f8c4cee5d284d94beed5e3d454. * reduce * cleanup * tweak * reduce * prune * adjust * adjust * adjust * reverse later is faster because the index is in forward order and the data size we are reversing is much smaller even if we are in python code * Revert "reverse later is faster because the index is in forward order and the data size we are reversing is much smaller even if we are in python code" This reverts commit bf974e103e651a1334493a9594e08d19e51e392b. * fix test * Revert "Revert "reverse later is faster because the index is in forward order and the data size we are reversing is much smaller even if we are in python code"" This reverts commit 119354499ecf7c1025ec40350e97e73d62d3fd4b. * more coverage * adjust * fix for table order * impossible for it to be missing * remove some more legacy from the all states
699 lines
24 KiB
Python
699 lines
24 KiB
Python
"""The tests the History component."""
|
|
# pylint: disable=invalid-name
|
|
from datetime import timedelta
|
|
from http import HTTPStatus
|
|
import json
|
|
from unittest.mock import patch, sentinel
|
|
|
|
import pytest
|
|
|
|
from homeassistant.components import history
|
|
from homeassistant.components.recorder import Recorder
|
|
from homeassistant.components.recorder.history import get_significant_states
|
|
from homeassistant.components.recorder.models import process_timestamp
|
|
from homeassistant.const import EVENT_HOMEASSISTANT_FINAL_WRITE
|
|
from homeassistant.core import HomeAssistant
|
|
from homeassistant.helpers.json import JSONEncoder
|
|
from homeassistant.setup import async_setup_component
|
|
import homeassistant.util.dt as dt_util
|
|
|
|
from tests.components.recorder.common import (
|
|
assert_dict_of_states_equal_without_context_and_last_changed,
|
|
assert_multiple_states_equal_without_context,
|
|
assert_multiple_states_equal_without_context_and_last_changed,
|
|
assert_states_equal_without_context,
|
|
async_wait_recording_done,
|
|
wait_recording_done,
|
|
)
|
|
from tests.typing import ClientSessionGenerator
|
|
|
|
|
|
def listeners_without_writes(listeners: dict[str, int]) -> dict[str, int]:
|
|
"""Return listeners without final write listeners since we are not testing for these."""
|
|
return {
|
|
key: value
|
|
for key, value in listeners.items()
|
|
if key != EVENT_HOMEASSISTANT_FINAL_WRITE
|
|
}
|
|
|
|
|
|
@pytest.mark.usefixtures("hass_history")
|
|
def test_setup() -> None:
|
|
"""Test setup method of history."""
|
|
# Verification occurs in the fixture
|
|
|
|
|
|
def test_get_significant_states(hass_history) -> None:
|
|
"""Test that only significant states are returned.
|
|
|
|
We should get back every thermostat change that
|
|
includes an attribute change, but only the state updates for
|
|
media player (attribute changes are not significant and not returned).
|
|
"""
|
|
hass = hass_history
|
|
zero, four, states = record_states(hass)
|
|
hist = get_significant_states(hass, zero, four, entity_ids=list(states))
|
|
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
|
|
|
|
|
|
def test_get_significant_states_minimal_response(hass_history) -> None:
|
|
"""Test that only significant states are returned.
|
|
|
|
When minimal responses is set only the first and
|
|
last states return a complete state.
|
|
|
|
We should get back every thermostat change that
|
|
includes an attribute change, but only the state updates for
|
|
media player (attribute changes are not significant and not returned).
|
|
"""
|
|
hass = hass_history
|
|
zero, four, states = record_states(hass)
|
|
hist = get_significant_states(
|
|
hass, zero, four, minimal_response=True, entity_ids=list(states)
|
|
)
|
|
entites_with_reducable_states = [
|
|
"media_player.test",
|
|
"media_player.test3",
|
|
]
|
|
|
|
# All states for media_player.test state are reduced
|
|
# down to last_changed and state when minimal_response
|
|
# is set except for the first state.
|
|
# is set. We use JSONEncoder to make sure that are
|
|
# pre-encoded last_changed is always the same as what
|
|
# will happen with encoding a native state
|
|
for entity_id in entites_with_reducable_states:
|
|
entity_states = states[entity_id]
|
|
for state_idx in range(1, len(entity_states)):
|
|
input_state = entity_states[state_idx]
|
|
orig_last_changed = orig_last_changed = json.dumps(
|
|
process_timestamp(input_state.last_changed),
|
|
cls=JSONEncoder,
|
|
).replace('"', "")
|
|
orig_state = input_state.state
|
|
entity_states[state_idx] = {
|
|
"last_changed": orig_last_changed,
|
|
"state": orig_state,
|
|
}
|
|
assert len(hist) == len(states)
|
|
assert_states_equal_without_context(
|
|
states["media_player.test"][0], hist["media_player.test"][0]
|
|
)
|
|
assert states["media_player.test"][1] == hist["media_player.test"][1]
|
|
assert states["media_player.test"][2] == hist["media_player.test"][2]
|
|
|
|
assert_multiple_states_equal_without_context(
|
|
states["media_player.test2"], hist["media_player.test2"]
|
|
)
|
|
assert_states_equal_without_context(
|
|
states["media_player.test3"][0], hist["media_player.test3"][0]
|
|
)
|
|
assert states["media_player.test3"][1] == hist["media_player.test3"][1]
|
|
|
|
assert_multiple_states_equal_without_context(
|
|
states["script.can_cancel_this_one"], hist["script.can_cancel_this_one"]
|
|
)
|
|
assert_multiple_states_equal_without_context_and_last_changed(
|
|
states["thermostat.test"], hist["thermostat.test"]
|
|
)
|
|
assert_multiple_states_equal_without_context_and_last_changed(
|
|
states["thermostat.test2"], hist["thermostat.test2"]
|
|
)
|
|
|
|
|
|
def test_get_significant_states_with_initial(hass_history) -> None:
|
|
"""Test that only significant states are returned.
|
|
|
|
We should get back every thermostat change that
|
|
includes an attribute change, but only the state updates for
|
|
media player (attribute changes are not significant and not returned).
|
|
"""
|
|
hass = hass_history
|
|
zero, four, states = record_states(hass)
|
|
one_and_half = zero + timedelta(seconds=1.5)
|
|
for entity_id in states:
|
|
if entity_id == "media_player.test":
|
|
states[entity_id] = states[entity_id][1:]
|
|
for state in states[entity_id]:
|
|
# If the state is recorded before the start time
|
|
# start it will have its last_updated and last_changed
|
|
# set to the start time.
|
|
if state.last_updated < one_and_half:
|
|
state.last_updated = one_and_half
|
|
state.last_changed = one_and_half
|
|
|
|
hist = get_significant_states(
|
|
hass, one_and_half, four, include_start_time_state=True, entity_ids=list(states)
|
|
)
|
|
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
|
|
|
|
|
|
def test_get_significant_states_without_initial(hass_history) -> None:
|
|
"""Test that only significant states are returned.
|
|
|
|
We should get back every thermostat change that
|
|
includes an attribute change, but only the state updates for
|
|
media player (attribute changes are not significant and not returned).
|
|
"""
|
|
hass = hass_history
|
|
zero, four, states = record_states(hass)
|
|
one = zero + timedelta(seconds=1)
|
|
one_with_microsecond = zero + timedelta(seconds=1, microseconds=1)
|
|
one_and_half = zero + timedelta(seconds=1.5)
|
|
for entity_id in states:
|
|
states[entity_id] = list(
|
|
filter(
|
|
lambda s: s.last_changed != one
|
|
and s.last_changed != one_with_microsecond,
|
|
states[entity_id],
|
|
)
|
|
)
|
|
del states["media_player.test2"]
|
|
|
|
hist = get_significant_states(
|
|
hass,
|
|
one_and_half,
|
|
four,
|
|
include_start_time_state=False,
|
|
entity_ids=list(states),
|
|
)
|
|
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
|
|
|
|
|
|
def test_get_significant_states_entity_id(hass_history) -> None:
|
|
"""Test that only significant states are returned for one entity."""
|
|
hass = hass_history
|
|
zero, four, states = record_states(hass)
|
|
del states["media_player.test2"]
|
|
del states["media_player.test3"]
|
|
del states["thermostat.test"]
|
|
del states["thermostat.test2"]
|
|
del states["script.can_cancel_this_one"]
|
|
|
|
hist = get_significant_states(hass, zero, four, ["media_player.test"])
|
|
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
|
|
|
|
|
|
def test_get_significant_states_multiple_entity_ids(hass_history) -> None:
|
|
"""Test that only significant states are returned for one entity."""
|
|
hass = hass_history
|
|
zero, four, states = record_states(hass)
|
|
del states["media_player.test2"]
|
|
del states["media_player.test3"]
|
|
del states["thermostat.test2"]
|
|
del states["script.can_cancel_this_one"]
|
|
|
|
hist = get_significant_states(
|
|
hass,
|
|
zero,
|
|
four,
|
|
["media_player.test", "thermostat.test"],
|
|
)
|
|
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
|
|
|
|
|
|
def test_get_significant_states_are_ordered(hass_history) -> None:
|
|
"""Test order of results from get_significant_states.
|
|
|
|
When entity ids are given, the results should be returned with the data
|
|
in the same order.
|
|
"""
|
|
hass = hass_history
|
|
zero, four, _states = record_states(hass)
|
|
entity_ids = ["media_player.test", "media_player.test2"]
|
|
hist = get_significant_states(hass, zero, four, entity_ids)
|
|
assert list(hist.keys()) == entity_ids
|
|
entity_ids = ["media_player.test2", "media_player.test"]
|
|
hist = get_significant_states(hass, zero, four, entity_ids)
|
|
assert list(hist.keys()) == entity_ids
|
|
|
|
|
|
def test_get_significant_states_only(hass_history) -> None:
|
|
"""Test significant states when significant_states_only is set."""
|
|
hass = hass_history
|
|
entity_id = "sensor.test"
|
|
|
|
def set_state(state, **kwargs):
|
|
"""Set the state."""
|
|
hass.states.set(entity_id, state, **kwargs)
|
|
wait_recording_done(hass)
|
|
return hass.states.get(entity_id)
|
|
|
|
start = dt_util.utcnow() - timedelta(minutes=4)
|
|
points = []
|
|
for i in range(1, 4):
|
|
points.append(start + timedelta(minutes=i))
|
|
|
|
states = []
|
|
with patch(
|
|
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=start
|
|
):
|
|
set_state("123", attributes={"attribute": 10.64})
|
|
|
|
with patch(
|
|
"homeassistant.components.recorder.core.dt_util.utcnow",
|
|
return_value=points[0],
|
|
):
|
|
# Attributes are different, state not
|
|
states.append(set_state("123", attributes={"attribute": 21.42}))
|
|
|
|
with patch(
|
|
"homeassistant.components.recorder.core.dt_util.utcnow",
|
|
return_value=points[1],
|
|
):
|
|
# state is different, attributes not
|
|
states.append(set_state("32", attributes={"attribute": 21.42}))
|
|
|
|
with patch(
|
|
"homeassistant.components.recorder.core.dt_util.utcnow",
|
|
return_value=points[2],
|
|
):
|
|
# everything is different
|
|
states.append(set_state("412", attributes={"attribute": 54.23}))
|
|
|
|
hist = get_significant_states(
|
|
hass,
|
|
start,
|
|
significant_changes_only=True,
|
|
entity_ids=list({state.entity_id for state in states}),
|
|
)
|
|
|
|
assert len(hist[entity_id]) == 2
|
|
assert not any(
|
|
state.last_updated == states[0].last_updated for state in hist[entity_id]
|
|
)
|
|
assert any(
|
|
state.last_updated == states[1].last_updated for state in hist[entity_id]
|
|
)
|
|
assert any(
|
|
state.last_updated == states[2].last_updated for state in hist[entity_id]
|
|
)
|
|
|
|
hist = get_significant_states(
|
|
hass,
|
|
start,
|
|
significant_changes_only=False,
|
|
entity_ids=list({state.entity_id for state in states}),
|
|
)
|
|
|
|
assert len(hist[entity_id]) == 3
|
|
assert_multiple_states_equal_without_context_and_last_changed(
|
|
states, hist[entity_id]
|
|
)
|
|
|
|
|
|
def check_significant_states(hass, zero, four, states, config):
|
|
"""Check if significant states are retrieved."""
|
|
hist = get_significant_states(hass, zero, four)
|
|
assert_dict_of_states_equal_without_context_and_last_changed(states, hist)
|
|
|
|
|
|
def record_states(hass):
|
|
"""Record some test states.
|
|
|
|
We inject a bunch of state updates from media player, zone and
|
|
thermostat.
|
|
"""
|
|
mp = "media_player.test"
|
|
mp2 = "media_player.test2"
|
|
mp3 = "media_player.test3"
|
|
therm = "thermostat.test"
|
|
therm2 = "thermostat.test2"
|
|
zone = "zone.home"
|
|
script_c = "script.can_cancel_this_one"
|
|
|
|
def set_state(entity_id, state, **kwargs):
|
|
"""Set the state."""
|
|
hass.states.set(entity_id, state, **kwargs)
|
|
wait_recording_done(hass)
|
|
return hass.states.get(entity_id)
|
|
|
|
zero = dt_util.utcnow()
|
|
one = zero + timedelta(seconds=1)
|
|
two = one + timedelta(seconds=1)
|
|
three = two + timedelta(seconds=1)
|
|
four = three + timedelta(seconds=1)
|
|
|
|
states = {therm: [], therm2: [], mp: [], mp2: [], mp3: [], script_c: []}
|
|
with patch(
|
|
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=one
|
|
):
|
|
states[mp].append(
|
|
set_state(mp, "idle", attributes={"media_title": str(sentinel.mt1)})
|
|
)
|
|
states[mp2].append(
|
|
set_state(mp2, "YouTube", attributes={"media_title": str(sentinel.mt2)})
|
|
)
|
|
states[mp3].append(
|
|
set_state(mp3, "idle", attributes={"media_title": str(sentinel.mt1)})
|
|
)
|
|
states[therm].append(
|
|
set_state(therm, 20, attributes={"current_temperature": 19.5})
|
|
)
|
|
|
|
with patch(
|
|
"homeassistant.components.recorder.core.dt_util.utcnow",
|
|
return_value=one + timedelta(microseconds=1),
|
|
):
|
|
states[mp].append(
|
|
set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt2)})
|
|
)
|
|
|
|
with patch(
|
|
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=two
|
|
):
|
|
# This state will be skipped only different in time
|
|
set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt3)})
|
|
# This state will be skipped because domain is excluded
|
|
set_state(zone, "zoning")
|
|
states[script_c].append(
|
|
set_state(script_c, "off", attributes={"can_cancel": True})
|
|
)
|
|
states[therm].append(
|
|
set_state(therm, 21, attributes={"current_temperature": 19.8})
|
|
)
|
|
states[therm2].append(
|
|
set_state(therm2, 20, attributes={"current_temperature": 19})
|
|
)
|
|
|
|
with patch(
|
|
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=three
|
|
):
|
|
states[mp].append(
|
|
set_state(mp, "Netflix", attributes={"media_title": str(sentinel.mt4)})
|
|
)
|
|
states[mp3].append(
|
|
set_state(mp3, "Netflix", attributes={"media_title": str(sentinel.mt3)})
|
|
)
|
|
# Attributes changed even though state is the same
|
|
states[therm].append(
|
|
set_state(therm, 21, attributes={"current_temperature": 20})
|
|
)
|
|
|
|
return zero, four, states
|
|
|
|
|
|
async def test_fetch_period_api(
|
|
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
|
|
) -> None:
|
|
"""Test the fetch period view for history."""
|
|
await async_setup_component(hass, "history", {})
|
|
client = await hass_client()
|
|
response = await client.get(
|
|
f"/api/history/period/{dt_util.utcnow().isoformat()}?filter_entity_id=sensor.power"
|
|
)
|
|
assert response.status == HTTPStatus.OK
|
|
|
|
|
|
async def test_fetch_period_api_with_use_include_order(
|
|
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
|
|
) -> None:
|
|
"""Test the fetch period view for history with include order."""
|
|
await async_setup_component(
|
|
hass, "history", {history.DOMAIN: {history.CONF_ORDER: True}}
|
|
)
|
|
client = await hass_client()
|
|
response = await client.get(
|
|
f"/api/history/period/{dt_util.utcnow().isoformat()}?filter_entity_id=sensor.power"
|
|
)
|
|
assert response.status == HTTPStatus.OK
|
|
|
|
|
|
async def test_fetch_period_api_with_minimal_response(
|
|
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
|
|
) -> None:
|
|
"""Test the fetch period view for history with minimal_response."""
|
|
now = dt_util.utcnow()
|
|
await async_setup_component(hass, "history", {})
|
|
|
|
hass.states.async_set("sensor.power", 0, {"attr": "any"})
|
|
await async_wait_recording_done(hass)
|
|
hass.states.async_set("sensor.power", 50, {"attr": "any"})
|
|
await async_wait_recording_done(hass)
|
|
hass.states.async_set("sensor.power", 23, {"attr": "any"})
|
|
last_changed = hass.states.get("sensor.power").last_changed
|
|
await async_wait_recording_done(hass)
|
|
hass.states.async_set("sensor.power", 23, {"attr": "any"})
|
|
await async_wait_recording_done(hass)
|
|
client = await hass_client()
|
|
response = await client.get(
|
|
f"/api/history/period/{now.isoformat()}?filter_entity_id=sensor.power&minimal_response&no_attributes"
|
|
)
|
|
assert response.status == HTTPStatus.OK
|
|
response_json = await response.json()
|
|
assert len(response_json[0]) == 3
|
|
state_list = response_json[0]
|
|
|
|
assert state_list[0]["entity_id"] == "sensor.power"
|
|
assert state_list[0]["attributes"] == {}
|
|
assert state_list[0]["state"] == "0"
|
|
|
|
assert "attributes" not in state_list[1]
|
|
assert "entity_id" not in state_list[1]
|
|
assert state_list[1]["state"] == "50"
|
|
|
|
assert "attributes" not in state_list[2]
|
|
assert "entity_id" not in state_list[2]
|
|
assert state_list[2]["state"] == "23"
|
|
assert state_list[2]["last_changed"] == json.dumps(
|
|
process_timestamp(last_changed),
|
|
cls=JSONEncoder,
|
|
).replace('"', "")
|
|
|
|
|
|
async def test_fetch_period_api_with_no_timestamp(
|
|
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
|
|
) -> None:
|
|
"""Test the fetch period view for history with no timestamp."""
|
|
await async_setup_component(hass, "history", {})
|
|
client = await hass_client()
|
|
response = await client.get("/api/history/period?filter_entity_id=sensor.power")
|
|
assert response.status == HTTPStatus.OK
|
|
|
|
|
|
async def test_fetch_period_api_with_include_order(
|
|
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
|
|
) -> None:
|
|
"""Test the fetch period view for history."""
|
|
await async_setup_component(
|
|
hass,
|
|
"history",
|
|
{
|
|
"history": {
|
|
"use_include_order": True,
|
|
"include": {"entities": ["light.kitchen"]},
|
|
}
|
|
},
|
|
)
|
|
client = await hass_client()
|
|
response = await client.get(
|
|
f"/api/history/period/{dt_util.utcnow().isoformat()}",
|
|
params={"filter_entity_id": "non.existing,something.else"},
|
|
)
|
|
assert response.status == HTTPStatus.OK
|
|
|
|
|
|
async def test_entity_ids_limit_via_api(
|
|
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
|
|
) -> None:
|
|
"""Test limiting history to entity_ids."""
|
|
await async_setup_component(
|
|
hass,
|
|
"history",
|
|
{"history": {}},
|
|
)
|
|
hass.states.async_set("light.kitchen", "on")
|
|
hass.states.async_set("light.cow", "on")
|
|
hass.states.async_set("light.nomatch", "on")
|
|
|
|
await async_wait_recording_done(hass)
|
|
|
|
client = await hass_client()
|
|
response = await client.get(
|
|
f"/api/history/period/{dt_util.utcnow().isoformat()}?filter_entity_id=light.kitchen,light.cow",
|
|
)
|
|
assert response.status == HTTPStatus.OK
|
|
response_json = await response.json()
|
|
assert len(response_json) == 2
|
|
assert response_json[0][0]["entity_id"] == "light.kitchen"
|
|
assert response_json[1][0]["entity_id"] == "light.cow"
|
|
|
|
|
|
async def test_entity_ids_limit_via_api_with_skip_initial_state(
|
|
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
|
|
) -> None:
|
|
"""Test limiting history to entity_ids with skip_initial_state."""
|
|
await async_setup_component(
|
|
hass,
|
|
"history",
|
|
{"history": {}},
|
|
)
|
|
hass.states.async_set("light.kitchen", "on")
|
|
hass.states.async_set("light.cow", "on")
|
|
hass.states.async_set("light.nomatch", "on")
|
|
|
|
await async_wait_recording_done(hass)
|
|
|
|
client = await hass_client()
|
|
response = await client.get(
|
|
f"/api/history/period/{dt_util.utcnow().isoformat()}?filter_entity_id=light.kitchen,light.cow&skip_initial_state",
|
|
)
|
|
assert response.status == HTTPStatus.OK
|
|
response_json = await response.json()
|
|
assert len(response_json) == 0
|
|
|
|
when = dt_util.utcnow() - timedelta(minutes=1)
|
|
response = await client.get(
|
|
f"/api/history/period/{when.isoformat()}?filter_entity_id=light.kitchen,light.cow&skip_initial_state",
|
|
)
|
|
assert response.status == HTTPStatus.OK
|
|
response_json = await response.json()
|
|
assert len(response_json) == 2
|
|
assert response_json[0][0]["entity_id"] == "light.kitchen"
|
|
assert response_json[1][0]["entity_id"] == "light.cow"
|
|
|
|
|
|
async def test_fetch_period_api_before_history_started(
|
|
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
|
|
) -> None:
|
|
"""Test the fetch period view for history for the far past."""
|
|
await async_setup_component(
|
|
hass,
|
|
"history",
|
|
{},
|
|
)
|
|
await async_wait_recording_done(hass)
|
|
far_past = dt_util.utcnow() - timedelta(days=365)
|
|
|
|
client = await hass_client()
|
|
response = await client.get(
|
|
f"/api/history/period/{far_past.isoformat()}?filter_entity_id=light.kitchen",
|
|
)
|
|
assert response.status == HTTPStatus.OK
|
|
response_json = await response.json()
|
|
assert response_json == []
|
|
|
|
|
|
async def test_fetch_period_api_far_future(
|
|
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
|
|
) -> None:
|
|
"""Test the fetch period view for history for the far future."""
|
|
await async_setup_component(
|
|
hass,
|
|
"history",
|
|
{},
|
|
)
|
|
await async_wait_recording_done(hass)
|
|
far_future = dt_util.utcnow() + timedelta(days=365)
|
|
|
|
client = await hass_client()
|
|
response = await client.get(
|
|
f"/api/history/period/{far_future.isoformat()}?filter_entity_id=light.kitchen",
|
|
)
|
|
assert response.status == HTTPStatus.OK
|
|
response_json = await response.json()
|
|
assert response_json == []
|
|
|
|
|
|
async def test_fetch_period_api_with_invalid_datetime(
|
|
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
|
|
) -> None:
|
|
"""Test the fetch period view for history with an invalid date time."""
|
|
await async_setup_component(
|
|
hass,
|
|
"history",
|
|
{},
|
|
)
|
|
await async_wait_recording_done(hass)
|
|
client = await hass_client()
|
|
response = await client.get(
|
|
"/api/history/period/INVALID?filter_entity_id=light.kitchen",
|
|
)
|
|
assert response.status == HTTPStatus.BAD_REQUEST
|
|
response_json = await response.json()
|
|
assert response_json == {"message": "Invalid datetime"}
|
|
|
|
|
|
async def test_fetch_period_api_invalid_end_time(
|
|
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
|
|
) -> None:
|
|
"""Test the fetch period view for history with an invalid end time."""
|
|
await async_setup_component(
|
|
hass,
|
|
"history",
|
|
{},
|
|
)
|
|
await async_wait_recording_done(hass)
|
|
far_past = dt_util.utcnow() - timedelta(days=365)
|
|
|
|
client = await hass_client()
|
|
response = await client.get(
|
|
f"/api/history/period/{far_past.isoformat()}",
|
|
params={"filter_entity_id": "light.kitchen", "end_time": "INVALID"},
|
|
)
|
|
assert response.status == HTTPStatus.BAD_REQUEST
|
|
response_json = await response.json()
|
|
assert response_json == {"message": "Invalid end_time"}
|
|
|
|
|
|
async def test_entity_ids_limit_via_api_with_end_time(
|
|
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
|
|
) -> None:
|
|
"""Test limiting history to entity_ids with end_time."""
|
|
await async_setup_component(
|
|
hass,
|
|
"history",
|
|
{"history": {}},
|
|
)
|
|
start = dt_util.utcnow()
|
|
hass.states.async_set("light.kitchen", "on")
|
|
hass.states.async_set("light.cow", "on")
|
|
hass.states.async_set("light.nomatch", "on")
|
|
|
|
await async_wait_recording_done(hass)
|
|
|
|
end_time = start + timedelta(minutes=1)
|
|
future_second = dt_util.utcnow() + timedelta(seconds=1)
|
|
|
|
client = await hass_client()
|
|
response = await client.get(
|
|
f"/api/history/period/{future_second.isoformat()}",
|
|
params={
|
|
"filter_entity_id": "light.kitchen,light.cow",
|
|
"end_time": end_time.isoformat(),
|
|
},
|
|
)
|
|
assert response.status == HTTPStatus.OK
|
|
response_json = await response.json()
|
|
assert len(response_json) == 0
|
|
|
|
when = start - timedelta(minutes=1)
|
|
response = await client.get(
|
|
f"/api/history/period/{when.isoformat()}",
|
|
params={
|
|
"filter_entity_id": "light.kitchen,light.cow",
|
|
"end_time": end_time.isoformat(),
|
|
},
|
|
)
|
|
assert response.status == HTTPStatus.OK
|
|
response_json = await response.json()
|
|
assert len(response_json) == 2
|
|
assert response_json[0][0]["entity_id"] == "light.kitchen"
|
|
assert response_json[1][0]["entity_id"] == "light.cow"
|
|
|
|
|
|
async def test_fetch_period_api_with_no_entity_ids(
|
|
recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator
|
|
) -> None:
|
|
"""Test the fetch period view for history with minimal_response."""
|
|
await async_setup_component(hass, "history", {})
|
|
await async_wait_recording_done(hass)
|
|
|
|
yesterday = dt_util.utcnow() - timedelta(days=1)
|
|
|
|
client = await hass_client()
|
|
response = await client.get(f"/api/history/period/{yesterday.isoformat()}")
|
|
assert response.status == HTTPStatus.BAD_REQUEST
|
|
response_json = await response.json()
|
|
assert response_json == {"message": "filter_entity_id is missing"}
|