mirror of
https://github.com/home-assistant/core.git
synced 2025-04-23 08:47:57 +00:00
Fail CI on lingering tasks (#88905)
This commit is contained in:
parent
85bcf11aeb
commit
c51bde9a26
@ -3,6 +3,7 @@
|
||||
import json
|
||||
|
||||
from aiogithubapi import GitHubException
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.github.const import CONF_REPOSITORIES, DOMAIN
|
||||
from homeassistant.core import HomeAssistant
|
||||
@ -15,6 +16,8 @@ from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
from tests.typing import ClientSessionGenerator
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
@ -54,6 +57,8 @@ async def test_entry_diagnostics(
|
||||
)
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_entry_diagnostics_exception(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
|
@ -11,6 +11,8 @@ from tests.common import MockConfigEntry
|
||||
from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_device_registry_cleanup(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
@ -46,6 +48,8 @@ async def test_device_registry_cleanup(
|
||||
assert len(devices) == 0
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_subscription_setup(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
@ -61,6 +65,8 @@ async def test_subscription_setup(
|
||||
)
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_subscription_setup_polling_disabled(
|
||||
hass: HomeAssistant,
|
||||
mock_config_entry: MockConfigEntry,
|
||||
|
@ -1,6 +1,8 @@
|
||||
"""Test GitHub sensor."""
|
||||
import json
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.github.const import DOMAIN, FALLBACK_UPDATE_INTERVAL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.util import dt
|
||||
@ -13,6 +15,8 @@ from tests.test_util.aiohttp import AiohttpClientMocker
|
||||
TEST_SENSOR_ENTITY = "sensor.octocat_hello_world_latest_release"
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_sensor_updates_with_empty_release_array(
|
||||
hass: HomeAssistant,
|
||||
init_integration: MockConfigEntry,
|
||||
|
@ -69,6 +69,8 @@ def _aldb_dict(mem_addr):
|
||||
}
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_get_aldb(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aldb_data
|
||||
) -> None:
|
||||
@ -85,6 +87,8 @@ async def test_get_aldb(
|
||||
assert len(result) == 5
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_change_aldb_record(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aldb_data
|
||||
) -> None:
|
||||
@ -108,6 +112,8 @@ async def test_change_aldb_record(
|
||||
_compare_records(rec, change_rec)
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_create_aldb_record(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aldb_data
|
||||
) -> None:
|
||||
@ -131,6 +137,8 @@ async def test_create_aldb_record(
|
||||
_compare_records(rec, new_rec)
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_write_aldb(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aldb_data
|
||||
) -> None:
|
||||
@ -152,6 +160,8 @@ async def test_write_aldb(
|
||||
assert devices.async_save.call_count == 1
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_load_aldb(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aldb_data
|
||||
) -> None:
|
||||
@ -172,6 +182,8 @@ async def test_load_aldb(
|
||||
assert devices.async_save.call_count == 1
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_reset_aldb(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aldb_data
|
||||
) -> None:
|
||||
@ -203,6 +215,8 @@ async def test_reset_aldb(
|
||||
assert not devices["33.33.33"].aldb.pending_changes
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_default_links(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aldb_data
|
||||
) -> None:
|
||||
@ -224,6 +238,8 @@ async def test_default_links(
|
||||
assert devices.async_save.call_count == 1
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_notify_on_aldb_status(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aldb_data
|
||||
) -> None:
|
||||
@ -247,6 +263,8 @@ async def test_notify_on_aldb_status(
|
||||
assert not msg["event"]["is_loading"]
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_notify_on_aldb_record_added(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aldb_data
|
||||
) -> None:
|
||||
@ -274,6 +292,8 @@ async def test_notify_on_aldb_record_added(
|
||||
assert msg["event"]["type"] == "record_loaded"
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_bad_address(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aldb_data
|
||||
) -> None:
|
||||
|
@ -5,6 +5,7 @@ from unittest.mock import patch
|
||||
from pyinsteon.constants import DeviceAction
|
||||
from pyinsteon.topics import DEVICE_LIST_CHANGED
|
||||
from pyinsteon.utils import publish_topic
|
||||
import pytest
|
||||
|
||||
from homeassistant.components import insteon
|
||||
from homeassistant.components.insteon.api import async_load_api
|
||||
@ -154,6 +155,8 @@ async def test_get_ha_device_name(
|
||||
assert name == ""
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_add_device_api(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator
|
||||
) -> None:
|
||||
|
@ -59,6 +59,8 @@ async def _setup(hass, hass_ws_client, scene_data):
|
||||
return ws_client, devices
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_get_scenes(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data
|
||||
) -> None:
|
||||
@ -73,6 +75,8 @@ async def test_get_scenes(
|
||||
assert len(result["20"]) == 3
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_get_scene(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data
|
||||
) -> None:
|
||||
@ -86,6 +90,8 @@ async def test_get_scene(
|
||||
assert len(result["devices"]) == 3
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_save_scene(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data, remove_json
|
||||
) -> None:
|
||||
@ -115,6 +121,8 @@ async def test_save_scene(
|
||||
assert result["scene_id"] == 20
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_save_new_scene(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data, remove_json
|
||||
) -> None:
|
||||
@ -144,6 +152,8 @@ async def test_save_new_scene(
|
||||
assert result["scene_id"] == 21
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_save_scene_error(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data, remove_json
|
||||
) -> None:
|
||||
@ -173,6 +183,8 @@ async def test_save_scene_error(
|
||||
assert result["scene_id"] == 20
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_delete_scene(
|
||||
hass: HomeAssistant, hass_ws_client: WebSocketGenerator, scene_data, remove_json
|
||||
) -> None:
|
||||
|
@ -1,4 +1,6 @@
|
||||
"""Test KNX climate."""
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.climate import PRESET_ECO, PRESET_SLEEP, HVACMode
|
||||
from homeassistant.components.knx.schema import ClimateSchema
|
||||
from homeassistant.const import CONF_NAME, STATE_IDLE
|
||||
@ -15,6 +17,8 @@ RAW_FLOAT_21_0 = (0x0C, 0x1A)
|
||||
RAW_FLOAT_22_0 = (0x0C, 0x4C)
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_climate_basic_temperature_set(
|
||||
hass: HomeAssistant, knx: KNXTestKit
|
||||
) -> None:
|
||||
@ -54,6 +58,8 @@ async def test_climate_basic_temperature_set(
|
||||
assert len(events) == 1
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_climate_hvac_mode(hass: HomeAssistant, knx: KNXTestKit) -> None:
|
||||
"""Test KNX climate hvac mode."""
|
||||
events = async_capture_events(hass, "state_changed")
|
||||
@ -107,6 +113,8 @@ async def test_climate_hvac_mode(hass: HomeAssistant, knx: KNXTestKit) -> None:
|
||||
await knx.assert_write("1/2/6", (0x01,))
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_climate_preset_mode(
|
||||
hass: HomeAssistant, knx: KNXTestKit, entity_registry: er.EntityRegistry
|
||||
) -> None:
|
||||
@ -174,6 +182,8 @@ async def test_climate_preset_mode(
|
||||
assert len(knx.xknx.devices) == 0
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_update_entity(hass: HomeAssistant, knx: KNXTestKit) -> None:
|
||||
"""Test update climate entity for KNX."""
|
||||
events = async_capture_events(hass, "state_changed")
|
||||
@ -219,6 +229,8 @@ async def test_update_entity(hass: HomeAssistant, knx: KNXTestKit) -> None:
|
||||
await knx.assert_read("1/2/7")
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_command_value_idle_mode(hass: HomeAssistant, knx: KNXTestKit) -> None:
|
||||
"""Test KNX climate command_value."""
|
||||
await knx.setup_integration(
|
||||
|
@ -1,4 +1,6 @@
|
||||
"""Test KNX cover."""
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.knx.schema import CoverSchema
|
||||
from homeassistant.const import CONF_NAME, STATE_CLOSING
|
||||
from homeassistant.core import HomeAssistant
|
||||
@ -8,6 +10,8 @@ from .conftest import KNXTestKit
|
||||
from tests.common import async_capture_events
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_cover_basic(hass: HomeAssistant, knx: KNXTestKit) -> None:
|
||||
"""Test KNX cover basic."""
|
||||
events = async_capture_events(hass, "state_changed")
|
||||
|
@ -2,6 +2,7 @@
|
||||
from pypck.inputs import Input, ModSendKeysHost, ModStatusAccessControl
|
||||
from pypck.lcn_addr import LcnAddr
|
||||
from pypck.lcn_defs import AccessControlPeriphery, KeyAction, SendKeyCommand
|
||||
import pytest
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
@ -137,6 +138,8 @@ async def test_dont_fire_on_non_module_input(
|
||||
assert len(events) == 0
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_dont_fire_on_unknown_module(hass: HomeAssistant, lcn_connection) -> None:
|
||||
"""Test for no event is fired if an input from an unknown module is received."""
|
||||
inp = ModStatusAccessControl(
|
||||
|
@ -15,6 +15,8 @@ from homeassistant.helpers import device_registry as dr
|
||||
from .common import load_and_parse_node_fixture, setup_integration_with_node_fixture
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_device_registry_single_node_device(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
@ -42,6 +44,8 @@ async def test_device_registry_single_node_device(
|
||||
assert entry.sw_version == "v1.0"
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_device_registry_single_node_device_alt(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
@ -113,6 +117,8 @@ async def test_device_registry_bridge(
|
||||
assert device2_entry.sw_version == "1.49.1"
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_node_added_subscription(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
|
@ -4,6 +4,7 @@ from unittest.mock import MagicMock, call
|
||||
|
||||
from aiohttp import ClientWebSocketResponse
|
||||
from matter_server.common.errors import InvalidCommand, NodeCommissionFailed
|
||||
import pytest
|
||||
|
||||
from homeassistant.components.matter.api import ID, TYPE
|
||||
from homeassistant.core import HomeAssistant
|
||||
@ -11,6 +12,8 @@ from homeassistant.core import HomeAssistant
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_commission(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: Callable[[HomeAssistant], Awaitable[ClientWebSocketResponse]],
|
||||
@ -51,6 +54,8 @@ async def test_commission(
|
||||
matter_client.commission_with_code.assert_called_once_with("12345678")
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_commission_on_network(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: Callable[[HomeAssistant], Awaitable[ClientWebSocketResponse]],
|
||||
@ -91,6 +96,8 @@ async def test_commission_on_network(
|
||||
matter_client.commission_on_network.assert_called_once_with(1234)
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_set_thread_dataset(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: Callable[[HomeAssistant], Awaitable[ClientWebSocketResponse]],
|
||||
@ -131,6 +138,8 @@ async def test_set_thread_dataset(
|
||||
matter_client.set_thread_operational_dataset.assert_called_once_with("test_dataset")
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_set_wifi_credentials(
|
||||
hass: HomeAssistant,
|
||||
hass_ws_client: Callable[[HomeAssistant], Awaitable[ClientWebSocketResponse]],
|
||||
|
@ -23,6 +23,8 @@ async def contact_sensor_node_fixture(
|
||||
)
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_contact_sensor(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
@ -53,6 +55,8 @@ async def occupancy_sensor_node_fixture(
|
||||
)
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_occupancy_sensor(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
|
@ -56,6 +56,8 @@ async def test_matter_attribute_redact(device_diagnostics: dict[str, Any]) -> No
|
||||
assert redacted_device_diagnostics == device_diagnostics
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_config_entry_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
@ -74,6 +76,8 @@ async def test_config_entry_diagnostics(
|
||||
assert diagnostics == config_entry_diagnostics_redacted
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_device_diagnostics(
|
||||
hass: HomeAssistant,
|
||||
hass_client: ClientSessionGenerator,
|
||||
|
@ -18,6 +18,8 @@ from .common import setup_integration_with_node_fixture
|
||||
from tests.common import MockConfigEntry
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_get_device_id(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
@ -31,6 +33,8 @@ async def test_get_device_id(
|
||||
assert device_id == "00000000000004D2-0000000000000005-MatterNodeDevice"
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_get_node_from_device_entry(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
|
@ -81,6 +81,8 @@ async def test_entry_setup_unload(
|
||||
assert entity_state.state == STATE_UNAVAILABLE
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_home_assistant_stop(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
@ -408,6 +410,8 @@ async def test_update_addon(
|
||||
assert update_addon.call_count == update_calls
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_issue_registry_invalid_version(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
@ -604,6 +608,8 @@ async def test_remove_entry(
|
||||
assert "Failed to uninstall the Matter Server add-on" in caplog.text
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_remove_config_entry_device(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
@ -644,6 +650,8 @@ async def test_remove_config_entry_device(
|
||||
assert not hass.states.get(entity_id)
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_remove_config_entry_device_no_node(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
|
@ -14,6 +14,8 @@ from .common import (
|
||||
)
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
@pytest.mark.parametrize(
|
||||
("fixture", "entity_id"),
|
||||
[
|
||||
@ -90,6 +92,8 @@ async def test_on_off_light(
|
||||
matter_client.send_device_command.reset_mock()
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
@pytest.mark.parametrize(
|
||||
("fixture", "entity_id"),
|
||||
[
|
||||
@ -144,6 +148,8 @@ async def test_dimmable_light(
|
||||
matter_client.send_device_command.reset_mock()
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
@pytest.mark.parametrize(
|
||||
("fixture", "entity_id"),
|
||||
[
|
||||
@ -208,6 +214,8 @@ async def test_color_temperature_light(
|
||||
matter_client.send_device_command.reset_mock()
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
@pytest.mark.parametrize(
|
||||
("fixture", "entity_id"),
|
||||
[
|
||||
|
@ -61,6 +61,8 @@ async def temperature_sensor_node_fixture(
|
||||
)
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_sensor_null_value(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
@ -79,6 +81,8 @@ async def test_sensor_null_value(
|
||||
assert state.state == "unknown"
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_flow_sensor(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
@ -97,6 +101,8 @@ async def test_flow_sensor(
|
||||
assert state.state == "2.0"
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_humidity_sensor(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
@ -115,6 +121,8 @@ async def test_humidity_sensor(
|
||||
assert state.state == "40.0"
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_light_sensor(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
@ -133,6 +141,8 @@ async def test_light_sensor(
|
||||
assert state.state == "2.0"
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_pressure_sensor(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
@ -151,6 +161,8 @@ async def test_pressure_sensor(
|
||||
assert state.state == "101.0"
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_temperature_sensor(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
|
@ -24,6 +24,8 @@ async def switch_node_fixture(
|
||||
)
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_turn_on(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
@ -58,6 +60,8 @@ async def test_turn_on(
|
||||
assert state.state == "on"
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_turn_off(
|
||||
hass: HomeAssistant,
|
||||
matter_client: MagicMock,
|
||||
|
@ -40,6 +40,8 @@ async def async_setup_sdm(hass):
|
||||
await hass.async_block_till_done()
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
@pytest.mark.parametrize("nest_test_config", [TEST_CONFIGFLOW_YAML_ONLY])
|
||||
async def test_auth(hass: HomeAssistant, aioclient_mock: AiohttpClientMocker) -> None:
|
||||
"""Exercise authentication library creates valid credentials."""
|
||||
@ -92,6 +94,8 @@ async def test_auth(hass: HomeAssistant, aioclient_mock: AiohttpClientMocker) ->
|
||||
assert creds.scopes == SDM_SCOPES
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
@pytest.mark.parametrize("nest_test_config", [TEST_CONFIGFLOW_YAML_ONLY])
|
||||
async def test_auth_expired_token(
|
||||
hass: HomeAssistant, aioclient_mock: AiohttpClientMocker
|
||||
|
@ -2,6 +2,7 @@
|
||||
from unittest.mock import patch
|
||||
|
||||
from pyotgw.vars import OTGW, OTGW_ABOUT
|
||||
import pytest
|
||||
|
||||
from homeassistant import setup
|
||||
from homeassistant.components.opentherm_gw.const import DOMAIN
|
||||
@ -28,6 +29,8 @@ MOCK_CONFIG_ENTRY = MockConfigEntry(
|
||||
)
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_device_registry_insert(hass: HomeAssistant) -> None:
|
||||
"""Test that the device registry is initialized correctly."""
|
||||
MOCK_CONFIG_ENTRY.add_to_hass(hass)
|
||||
@ -46,6 +49,8 @@ async def test_device_registry_insert(hass: HomeAssistant) -> None:
|
||||
assert gw_dev.sw_version == VERSION_OLD
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_device_registry_update(
|
||||
hass: HomeAssistant, device_registry: dr.DeviceRegistry
|
||||
) -> None:
|
||||
|
@ -168,6 +168,8 @@ async def test_no_servers_found(
|
||||
assert result["errors"]["base"] == "no_servers"
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_single_available_server(
|
||||
hass: HomeAssistant, mock_plex_calls, current_request_with_host: None
|
||||
) -> None:
|
||||
@ -206,6 +208,8 @@ async def test_single_available_server(
|
||||
await hass.config_entries.async_unload(result["result"].entry_id)
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_multiple_servers_with_selection(
|
||||
hass: HomeAssistant,
|
||||
mock_plex_calls,
|
||||
@ -261,6 +265,8 @@ async def test_multiple_servers_with_selection(
|
||||
await hass.config_entries.async_unload(result["result"].entry_id)
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_adding_last_unconfigured_server(
|
||||
hass: HomeAssistant,
|
||||
mock_plex_calls,
|
||||
|
@ -148,6 +148,8 @@ async def test_transition_off(hass: HomeAssistant, snooz_fan_entity_id: str) ->
|
||||
assert ATTR_ASSUMED_STATE not in state.attributes
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_push_events(
|
||||
hass: HomeAssistant, mock_connected_snooz: SnoozFixture, snooz_fan_entity_id: str
|
||||
) -> None:
|
||||
@ -172,6 +174,8 @@ async def test_push_events(
|
||||
assert state.attributes[ATTR_ASSUMED_STATE] is True
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_restore_state(
|
||||
hass: HomeAssistant, entity_registry: er.EntityRegistry
|
||||
) -> None:
|
||||
|
@ -1,11 +1,15 @@
|
||||
"""Test Snooz configuration."""
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import SnoozFixture
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_removing_entry_cleans_up_connections(
|
||||
hass: HomeAssistant, mock_connected_snooz: SnoozFixture
|
||||
) -> None:
|
||||
@ -16,6 +20,8 @@ async def test_removing_entry_cleans_up_connections(
|
||||
assert not mock_connected_snooz.device.is_connected
|
||||
|
||||
|
||||
# This tests needs to be adjusted to remove lingering tasks
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
async def test_reloading_entry_cleans_up_connections(
|
||||
hass: HomeAssistant, mock_connected_snooz: SnoozFixture
|
||||
) -> None:
|
||||
|
@ -257,9 +257,21 @@ def garbage_collection() -> None:
|
||||
gc.collect()
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def expected_lingering_tasks() -> bool:
|
||||
"""Temporary ability to bypass test failures.
|
||||
|
||||
Parametrize to True to bypass the pytest failure.
|
||||
@pytest.mark.parametrize("expected_lingering_tasks", [True])
|
||||
|
||||
This should be removed when all lingering tasks have been cleaned up.
|
||||
"""
|
||||
return False
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def verify_cleanup(
|
||||
event_loop: asyncio.AbstractEventLoop,
|
||||
event_loop: asyncio.AbstractEventLoop, expected_lingering_tasks: bool
|
||||
) -> Generator[None, None, None]:
|
||||
"""Verify that the test has cleaned up resources correctly."""
|
||||
threads_before = frozenset(threading.enumerate())
|
||||
@ -278,7 +290,10 @@ def verify_cleanup(
|
||||
# before moving on to the next test.
|
||||
tasks = asyncio.all_tasks(event_loop) - tasks_before
|
||||
for task in tasks:
|
||||
_LOGGER.warning("Linger task after test %r", task)
|
||||
if expected_lingering_tasks:
|
||||
_LOGGER.warning("Linger task after test %r", task)
|
||||
else:
|
||||
pytest.fail(f"Linger task after test {repr(task)}")
|
||||
task.cancel()
|
||||
if tasks:
|
||||
event_loop.run_until_complete(asyncio.wait(tasks))
|
||||
|
Loading…
x
Reference in New Issue
Block a user