Fix last state in history minimal respones when all the states at the end are skipped (#72203)

This commit is contained in:
J. Nick Koston 2022-05-19 22:58:32 -05:00 committed by GitHub
parent 5c2c602686
commit 1001f9e39a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 49 additions and 36 deletions

View File

@ -695,8 +695,6 @@ def _sorted_states_to_dict(
prev_state = first_state.state prev_state = first_state.state
ent_results.append(state_class(first_state, attr_cache)) ent_results.append(state_class(first_state, attr_cache))
initial_state_count = len(ent_results)
row = None
for row in group: for row in group:
# With minimal response we do not care about attribute # With minimal response we do not care about attribute
# changes so we can filter out duplicate states # changes so we can filter out duplicate states
@ -716,12 +714,6 @@ def _sorted_states_to_dict(
) )
prev_state = state prev_state = state
if row and len(ent_results) != initial_state_count:
# There was at least one state change
# replace the last minimal state with
# a full state
ent_results[-1] = state_class(row, attr_cache)
# If there are no states beyond the initial state, # If there are no states beyond the initial state,
# the state a was never popped from initial_states # the state a was never popped from initial_states
for ent_id, row in initial_states.items(): for ent_id, row in initial_states.items():

View File

@ -61,23 +61,30 @@ def test_get_significant_states_minimal_response(hass_history):
hist = get_significant_states( hist = get_significant_states(
hass, zero, four, filters=history.Filters(), minimal_response=True hass, zero, four, filters=history.Filters(), minimal_response=True
) )
entites_with_reducable_states = [
"media_player.test",
"media_player.test3",
]
# The second media_player.test state is reduced # All states for media_player.test state are reduced
# down to last_changed and state when minimal_response # down to last_changed and state when minimal_response
# is set except for the first state.
# is set. We use JSONEncoder to make sure that are # is set. We use JSONEncoder to make sure that are
# pre-encoded last_changed is always the same as what # pre-encoded last_changed is always the same as what
# will happen with encoding a native state # will happen with encoding a native state
input_state = states["media_player.test"][1] for entity_id in entites_with_reducable_states:
orig_last_changed = json.dumps( entity_states = states[entity_id]
process_timestamp(input_state.last_changed), for state_idx in range(1, len(entity_states)):
cls=JSONEncoder, input_state = entity_states[state_idx]
).replace('"', "") orig_last_changed = orig_last_changed = json.dumps(
orig_state = input_state.state process_timestamp(input_state.last_changed),
states["media_player.test"][1] = { cls=JSONEncoder,
"last_changed": orig_last_changed, ).replace('"', "")
"state": orig_state, orig_state = input_state.state
} entity_states[state_idx] = {
"last_changed": orig_last_changed,
"state": orig_state,
}
assert states == hist assert states == hist
@ -616,6 +623,9 @@ async def test_fetch_period_api_with_minimal_response(hass, recorder_mock, hass_
hass.states.async_set("sensor.power", 50, {"attr": "any"}) hass.states.async_set("sensor.power", 50, {"attr": "any"})
await async_wait_recording_done(hass) await async_wait_recording_done(hass)
hass.states.async_set("sensor.power", 23, {"attr": "any"}) hass.states.async_set("sensor.power", 23, {"attr": "any"})
last_changed = hass.states.get("sensor.power").last_changed
await async_wait_recording_done(hass)
hass.states.async_set("sensor.power", 23, {"attr": "any"})
await async_wait_recording_done(hass) await async_wait_recording_done(hass)
client = await hass_client() client = await hass_client()
response = await client.get( response = await client.get(
@ -634,9 +644,13 @@ async def test_fetch_period_api_with_minimal_response(hass, recorder_mock, hass_
assert "entity_id" not in state_list[1] assert "entity_id" not in state_list[1]
assert state_list[1]["state"] == "50" assert state_list[1]["state"] == "50"
assert state_list[2]["entity_id"] == "sensor.power" assert "attributes" not in state_list[2]
assert state_list[2]["attributes"] == {} assert "entity_id" not in state_list[2]
assert state_list[2]["state"] == "23" assert state_list[2]["state"] == "23"
assert state_list[2]["last_changed"] == json.dumps(
process_timestamp(last_changed),
cls=JSONEncoder,
).replace('"', "")
async def test_fetch_period_api_with_no_timestamp(hass, hass_client, recorder_mock): async def test_fetch_period_api_with_no_timestamp(hass, hass_client, recorder_mock):
@ -1131,7 +1145,7 @@ async def test_history_during_period(hass, hass_ws_client, recorder_mock):
assert "lc" not in sensor_test_history[1] # skipped if the same a last_updated (lu) assert "lc" not in sensor_test_history[1] # skipped if the same a last_updated (lu)
assert sensor_test_history[2]["s"] == "on" assert sensor_test_history[2]["s"] == "on"
assert sensor_test_history[2]["a"] == {} assert "a" not in sensor_test_history[2]
await client.send_json( await client.send_json(
{ {

View File

@ -388,23 +388,30 @@ def test_get_significant_states_minimal_response(hass_recorder):
hass = hass_recorder() hass = hass_recorder()
zero, four, states = record_states(hass) zero, four, states = record_states(hass)
hist = history.get_significant_states(hass, zero, four, minimal_response=True) hist = history.get_significant_states(hass, zero, four, minimal_response=True)
entites_with_reducable_states = [
"media_player.test",
"media_player.test3",
]
# The second media_player.test state is reduced # All states for media_player.test state are reduced
# down to last_changed and state when minimal_response # down to last_changed and state when minimal_response
# is set except for the first state.
# is set. We use JSONEncoder to make sure that are # is set. We use JSONEncoder to make sure that are
# pre-encoded last_changed is always the same as what # pre-encoded last_changed is always the same as what
# will happen with encoding a native state # will happen with encoding a native state
input_state = states["media_player.test"][1] for entity_id in entites_with_reducable_states:
orig_last_changed = json.dumps( entity_states = states[entity_id]
process_timestamp(input_state.last_changed), for state_idx in range(1, len(entity_states)):
cls=JSONEncoder, input_state = entity_states[state_idx]
).replace('"', "") orig_last_changed = orig_last_changed = json.dumps(
orig_state = input_state.state process_timestamp(input_state.last_changed),
states["media_player.test"][1] = { cls=JSONEncoder,
"last_changed": orig_last_changed, ).replace('"', "")
"state": orig_state, orig_state = input_state.state
} entity_states[state_idx] = {
"last_changed": orig_last_changed,
"state": orig_state,
}
assert states == hist assert states == hist
@ -565,7 +572,7 @@ def test_get_significant_states_only(hass_recorder):
assert states == hist[entity_id] assert states == hist[entity_id]
def record_states(hass): def record_states(hass) -> tuple[datetime, datetime, dict[str, list[State]]]:
"""Record some test states. """Record some test states.
We inject a bunch of state updates from media player, zone and We inject a bunch of state updates from media player, zone and