mirror of
https://github.com/home-assistant/core.git
synced 2025-04-24 01:08:12 +00:00
Increase test scope of MariaDB + PostgreSQL tests (#87019)
Co-authored-by: J. Nick Koston <nick@koston.org>
This commit is contained in:
parent
026b4f5307
commit
4ca0a24f87
@ -114,7 +114,10 @@ tests: &tests
|
||||
- tests/auth/**
|
||||
- tests/backports/**
|
||||
- tests/common.py
|
||||
- tests/components/history/**
|
||||
- tests/components/logbook/**
|
||||
- tests/components/recorder/**
|
||||
- tests/components/sensor/**
|
||||
- tests/conftest.py
|
||||
- tests/hassfest/**
|
||||
- tests/helpers/**
|
||||
|
10
.github/workflows/ci.yaml
vendored
10
.github/workflows/ci.yaml
vendored
@ -1012,7 +1012,10 @@ jobs:
|
||||
--durations=10 \
|
||||
-p no:sugar \
|
||||
--dburl=mysql://root:password@127.0.0.1/homeassistant-test \
|
||||
tests/components/recorder
|
||||
tests/components/history \
|
||||
tests/components/logbook \
|
||||
tests/components/recorder \
|
||||
tests/components/sensor
|
||||
- name: Upload coverage artifact
|
||||
uses: actions/upload-artifact@v3.1.2
|
||||
with:
|
||||
@ -1116,7 +1119,10 @@ jobs:
|
||||
--durations-min=10 \
|
||||
-p no:sugar \
|
||||
--dburl=postgresql://postgres:password@127.0.0.1/homeassistant-test \
|
||||
tests/components/recorder
|
||||
tests/components/history \
|
||||
tests/components/logbook \
|
||||
tests/components/recorder \
|
||||
tests/components/sensor
|
||||
- name: Upload coverage artifact
|
||||
uses: actions/upload-artifact@v3.1.0
|
||||
with:
|
||||
|
@ -166,10 +166,15 @@ def test_get_significant_states_without_initial(hass_history) -> None:
|
||||
hass = hass_history
|
||||
zero, four, states = record_states(hass)
|
||||
one = zero + timedelta(seconds=1)
|
||||
one_with_microsecond = zero + timedelta(seconds=1, microseconds=1)
|
||||
one_and_half = zero + timedelta(seconds=1.5)
|
||||
for entity_id in states:
|
||||
states[entity_id] = list(
|
||||
filter(lambda s: s.last_changed != one, states[entity_id])
|
||||
filter(
|
||||
lambda s: s.last_changed != one
|
||||
and s.last_changed != one_with_microsecond,
|
||||
states[entity_id],
|
||||
)
|
||||
)
|
||||
del states["media_player.test2"]
|
||||
|
||||
@ -587,9 +592,6 @@ def record_states(hass):
|
||||
states[mp].append(
|
||||
set_state(mp, "idle", attributes={"media_title": str(sentinel.mt1)})
|
||||
)
|
||||
states[mp].append(
|
||||
set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt2)})
|
||||
)
|
||||
states[mp2].append(
|
||||
set_state(mp2, "YouTube", attributes={"media_title": str(sentinel.mt2)})
|
||||
)
|
||||
@ -600,6 +602,14 @@ def record_states(hass):
|
||||
set_state(therm, 20, attributes={"current_temperature": 19.5})
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.recorder.core.dt_util.utcnow",
|
||||
return_value=one + timedelta(microseconds=1),
|
||||
):
|
||||
states[mp].append(
|
||||
set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt2)})
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=two
|
||||
):
|
||||
|
@ -176,12 +176,13 @@ def test_get_significant_states_with_initial(hass_history) -> None:
|
||||
hass = hass_history
|
||||
zero, four, states = record_states(hass)
|
||||
one = zero + timedelta(seconds=1)
|
||||
one_with_microsecond = zero + timedelta(seconds=1, microseconds=1)
|
||||
one_and_half = zero + timedelta(seconds=1.5)
|
||||
for entity_id in states:
|
||||
if entity_id == "media_player.test":
|
||||
states[entity_id] = states[entity_id][1:]
|
||||
for state in states[entity_id]:
|
||||
if state.last_changed == one:
|
||||
if state.last_changed == one or state.last_changed == one_with_microsecond:
|
||||
state.last_changed = one_and_half
|
||||
state.last_updated = one_and_half
|
||||
|
||||
@ -205,10 +206,15 @@ def test_get_significant_states_without_initial(hass_history) -> None:
|
||||
hass = hass_history
|
||||
zero, four, states = record_states(hass)
|
||||
one = zero + timedelta(seconds=1)
|
||||
one_with_microsecond = zero + timedelta(seconds=1, microseconds=1)
|
||||
one_and_half = zero + timedelta(seconds=1.5)
|
||||
for entity_id in states:
|
||||
states[entity_id] = list(
|
||||
filter(lambda s: s.last_changed != one, states[entity_id])
|
||||
filter(
|
||||
lambda s: s.last_changed != one
|
||||
and s.last_changed != one_with_microsecond,
|
||||
states[entity_id],
|
||||
)
|
||||
)
|
||||
del states["media_player.test2"]
|
||||
|
||||
@ -626,9 +632,6 @@ def record_states(hass):
|
||||
states[mp].append(
|
||||
set_state(mp, "idle", attributes={"media_title": str(sentinel.mt1)})
|
||||
)
|
||||
states[mp].append(
|
||||
set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt2)})
|
||||
)
|
||||
states[mp2].append(
|
||||
set_state(mp2, "YouTube", attributes={"media_title": str(sentinel.mt2)})
|
||||
)
|
||||
@ -639,6 +642,14 @@ def record_states(hass):
|
||||
set_state(therm, 20, attributes={"current_temperature": 19.5})
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.recorder.core.dt_util.utcnow",
|
||||
return_value=one + timedelta(microseconds=1),
|
||||
):
|
||||
states[mp].append(
|
||||
set_state(mp, "YouTube", attributes={"media_title": str(sentinel.mt2)})
|
||||
)
|
||||
|
||||
with patch(
|
||||
"homeassistant.components.recorder.core.dt_util.utcnow", return_value=two
|
||||
):
|
||||
|
@ -2324,15 +2324,16 @@ async def test_recorder_is_far_behind(
|
||||
assert msg["id"] == 7
|
||||
assert msg["type"] == "event"
|
||||
assert msg["event"]["events"] == []
|
||||
|
||||
hass.bus.async_fire("mock_event", {"device_id": device.id, "message": "1"})
|
||||
await hass.async_block_till_done()
|
||||
await async_wait_recording_done(hass)
|
||||
|
||||
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
|
||||
assert msg["id"] == 7
|
||||
assert msg["type"] == "event"
|
||||
assert msg["event"]["events"] == []
|
||||
|
||||
hass.bus.async_fire("mock_event", {"device_id": device.id, "message": "1"})
|
||||
await hass.async_block_till_done()
|
||||
|
||||
msg = await asyncio.wait_for(websocket_client.receive_json(), 2)
|
||||
assert msg["id"] == 7
|
||||
assert msg["type"] == "event"
|
||||
|
@ -1438,7 +1438,7 @@ def test_delete_metadata_duplicates_many(caplog, tmpdir):
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1)
|
||||
)
|
||||
for _ in range(3000):
|
||||
for _ in range(1100):
|
||||
session.add(
|
||||
recorder.db_schema.StatisticsMeta.from_meta(
|
||||
external_energy_metadata_1
|
||||
@ -1468,15 +1468,15 @@ def test_delete_metadata_duplicates_many(caplog, tmpdir):
|
||||
wait_recording_done(hass)
|
||||
wait_recording_done(hass)
|
||||
|
||||
assert "Deleted 3002 duplicated statistics_meta rows" in caplog.text
|
||||
assert "Deleted 1102 duplicated statistics_meta rows" in caplog.text
|
||||
with session_scope(hass=hass) as session:
|
||||
tmp = session.query(recorder.db_schema.StatisticsMeta).all()
|
||||
assert len(tmp) == 3
|
||||
assert tmp[0].id == 3001
|
||||
assert tmp[0].id == 1101
|
||||
assert tmp[0].statistic_id == "test:total_energy_import_tariff_1"
|
||||
assert tmp[1].id == 3003
|
||||
assert tmp[1].id == 1103
|
||||
assert tmp[1].statistic_id == "test:total_energy_import_tariff_2"
|
||||
assert tmp[2].id == 3005
|
||||
assert tmp[2].id == 1105
|
||||
assert tmp[2].statistic_id == "test:fossil_percentage"
|
||||
|
||||
hass.stop()
|
||||
|
@ -3094,7 +3094,7 @@ def test_compile_statistics_hourly_daily_monthly_summary(hass_recorder, caplog):
|
||||
sum_attributes,
|
||||
seq[j : j + 1],
|
||||
)
|
||||
start_meter = start + timedelta(minutes=1)
|
||||
start_meter += timedelta(minutes=1)
|
||||
states["sensor.test4"] += _states["sensor.test4"]
|
||||
last_state = last_states["sensor.test4"]
|
||||
expected_states["sensor.test4"].append(seq[-1])
|
||||
|
Loading…
x
Reference in New Issue
Block a user