This commit is contained in:
Paulus Schoutsen 2023-03-14 00:10:23 -04:00 committed by GitHub
commit d084e70aff
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 119 additions and 73 deletions

View File

@ -60,7 +60,7 @@ from .const import (
DEFAULT_PROBABILITY_THRESHOLD, DEFAULT_PROBABILITY_THRESHOLD,
) )
from .helpers import Observation from .helpers import Observation
from .repairs import raise_mirrored_entries, raise_no_prob_given_false from .issues import raise_mirrored_entries, raise_no_prob_given_false
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)

View File

@ -1,4 +1,4 @@
"""Helpers for generating repairs.""" """Helpers for generating issues."""
from __future__ import annotations from __future__ import annotations
from homeassistant.core import HomeAssistant from homeassistant.core import HomeAssistant

View File

@ -8,7 +8,7 @@
"iot_class": "local_push", "iot_class": "local_push",
"loggers": ["pydeconz"], "loggers": ["pydeconz"],
"quality_scale": "platinum", "quality_scale": "platinum",
"requirements": ["pydeconz==108"], "requirements": ["pydeconz==110"],
"ssdp": [ "ssdp": [
{ {
"manufacturer": "Royal Philips Electronics", "manufacturer": "Royal Philips Electronics",

View File

@ -94,9 +94,9 @@ class FibaroCover(FibaroDevice, CoverEntity):
"""Return if the cover is closed.""" """Return if the cover is closed."""
if self._is_open_close_only(): if self._is_open_close_only():
state = self.fibaro_device.state state = self.fibaro_device.state
if not state.has_value or state.str_value.lower() == "unknown": if not state.has_value or state.str_value().lower() == "unknown":
return None return None
return state.str_value.lower() == "closed" return state.str_value().lower() == "closed"
if self.current_cover_position is None: if self.current_cover_position is None:
return None return None

View File

@ -342,12 +342,14 @@ def get_next_departure(
origin_stop_time.departure_time origin_stop_time.departure_time
LIMIT :limit LIMIT :limit
""" """
result = schedule.engine.execute( result = schedule.engine.connect().execute(
text(sql_query), text(sql_query),
origin_station_id=start_station_id, {
end_station_id=end_station_id, "origin_station_id": start_station_id,
today=now_date, "end_station_id": end_station_id,
limit=limit, "today": now_date,
"limit": limit,
},
) )
# Create lookup timetable for today and possibly tomorrow, taking into # Create lookup timetable for today and possibly tomorrow, taking into
@ -357,7 +359,8 @@ def get_next_departure(
yesterday_start = today_start = tomorrow_start = None yesterday_start = today_start = tomorrow_start = None
yesterday_last = today_last = "" yesterday_last = today_last = ""
for row in result: for row_cursor in result:
row = row_cursor._asdict()
if row["yesterday"] == 1 and yesterday_date >= row["start_date"]: if row["yesterday"] == 1 and yesterday_date >= row["start_date"]:
extras = {"day": "yesterday", "first": None, "last": False} extras = {"day": "yesterday", "first": None, "last": False}
if yesterday_start is None: if yesterday_start is None:
@ -800,7 +803,10 @@ class GTFSDepartureSensor(SensorEntity):
@staticmethod @staticmethod
def dict_for_table(resource: Any) -> dict: def dict_for_table(resource: Any) -> dict:
"""Return a dictionary for the SQLAlchemy resource given.""" """Return a dictionary for the SQLAlchemy resource given."""
return {col: getattr(resource, col) for col in resource.__table__.columns} _dict = {}
for column in resource.__table__.columns:
_dict[column.name] = str(getattr(resource, column.name))
return _dict
def append_keys(self, resource: dict, prefix: str | None = None) -> None: def append_keys(self, resource: dict, prefix: str | None = None) -> None:
"""Properly format key val pairs to append to attributes.""" """Properly format key val pairs to append to attributes."""

View File

@ -96,7 +96,7 @@ from .handler import ( # noqa: F401
) )
from .http import HassIOView from .http import HassIOView
from .ingress import async_setup_ingress_view from .ingress import async_setup_ingress_view
from .repairs import SupervisorRepairs from .issues import SupervisorIssues
from .websocket_api import async_load_websocket_api from .websocket_api import async_load_websocket_api
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -123,7 +123,7 @@ DATA_SUPERVISOR_INFO = "hassio_supervisor_info"
DATA_ADDONS_CHANGELOGS = "hassio_addons_changelogs" DATA_ADDONS_CHANGELOGS = "hassio_addons_changelogs"
DATA_ADDONS_INFO = "hassio_addons_info" DATA_ADDONS_INFO = "hassio_addons_info"
DATA_ADDONS_STATS = "hassio_addons_stats" DATA_ADDONS_STATS = "hassio_addons_stats"
DATA_SUPERVISOR_REPAIRS = "supervisor_repairs" DATA_SUPERVISOR_ISSUES = "supervisor_issues"
HASSIO_UPDATE_INTERVAL = timedelta(minutes=5) HASSIO_UPDATE_INTERVAL = timedelta(minutes=5)
ADDONS_COORDINATOR = "hassio_addons_coordinator" ADDONS_COORDINATOR = "hassio_addons_coordinator"
@ -581,9 +581,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
hass.config_entries.flow.async_init(DOMAIN, context={"source": "system"}) hass.config_entries.flow.async_init(DOMAIN, context={"source": "system"})
) )
# Start listening for problems with supervisor and making repairs # Start listening for problems with supervisor and making issues
hass.data[DATA_SUPERVISOR_REPAIRS] = repairs = SupervisorRepairs(hass, hassio) hass.data[DATA_SUPERVISOR_ISSUES] = issues = SupervisorIssues(hass, hassio)
await repairs.setup() await issues.setup()
return True return True

View File

@ -70,11 +70,11 @@ UNHEALTHY_REASONS = {
} }
class SupervisorRepairs: class SupervisorIssues:
"""Create repairs from supervisor events.""" """Create issues from supervisor events."""
def __init__(self, hass: HomeAssistant, client: HassIO) -> None: def __init__(self, hass: HomeAssistant, client: HassIO) -> None:
"""Initialize supervisor repairs.""" """Initialize supervisor issues."""
self._hass = hass self._hass = hass
self._client = client self._client = client
self._unsupported_reasons: set[str] = set() self._unsupported_reasons: set[str] = set()
@ -87,7 +87,7 @@ class SupervisorRepairs:
@unhealthy_reasons.setter @unhealthy_reasons.setter
def unhealthy_reasons(self, reasons: set[str]) -> None: def unhealthy_reasons(self, reasons: set[str]) -> None:
"""Set unhealthy reasons. Create or delete repairs as necessary.""" """Set unhealthy reasons. Create or delete issues as necessary."""
for unhealthy in reasons - self.unhealthy_reasons: for unhealthy in reasons - self.unhealthy_reasons:
if unhealthy in UNHEALTHY_REASONS: if unhealthy in UNHEALTHY_REASONS:
translation_key = f"unhealthy_{unhealthy}" translation_key = f"unhealthy_{unhealthy}"
@ -119,7 +119,7 @@ class SupervisorRepairs:
@unsupported_reasons.setter @unsupported_reasons.setter
def unsupported_reasons(self, reasons: set[str]) -> None: def unsupported_reasons(self, reasons: set[str]) -> None:
"""Set unsupported reasons. Create or delete repairs as necessary.""" """Set unsupported reasons. Create or delete issues as necessary."""
for unsupported in reasons - UNSUPPORTED_SKIP_REPAIR - self.unsupported_reasons: for unsupported in reasons - UNSUPPORTED_SKIP_REPAIR - self.unsupported_reasons:
if unsupported in UNSUPPORTED_REASONS: if unsupported in UNSUPPORTED_REASONS:
translation_key = f"unsupported_{unsupported}" translation_key = f"unsupported_{unsupported}"
@ -149,18 +149,18 @@ class SupervisorRepairs:
await self.update() await self.update()
async_dispatcher_connect( async_dispatcher_connect(
self._hass, EVENT_SUPERVISOR_EVENT, self._supervisor_events_to_repairs self._hass, EVENT_SUPERVISOR_EVENT, self._supervisor_events_to_issues
) )
async def update(self) -> None: async def update(self) -> None:
"""Update repairs from Supervisor resolution center.""" """Update issuess from Supervisor resolution center."""
data = await self._client.get_resolution_info() data = await self._client.get_resolution_info()
self.unhealthy_reasons = set(data[ATTR_UNHEALTHY]) self.unhealthy_reasons = set(data[ATTR_UNHEALTHY])
self.unsupported_reasons = set(data[ATTR_UNSUPPORTED]) self.unsupported_reasons = set(data[ATTR_UNSUPPORTED])
@callback @callback
def _supervisor_events_to_repairs(self, event: dict[str, Any]) -> None: def _supervisor_events_to_issues(self, event: dict[str, Any]) -> None:
"""Create repairs from supervisor events.""" """Create issues from supervisor events."""
if ATTR_WS_EVENT not in event: if ATTR_WS_EVENT not in event:
return return

View File

@ -20,10 +20,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
entry_data[CONF_CLIENT_DEVICE_ID] = entry.entry_id entry_data[CONF_CLIENT_DEVICE_ID] = entry.entry_id
hass.config_entries.async_update_entry(entry, data=entry_data) hass.config_entries.async_update_entry(entry, data=entry_data)
client = create_client( device_id = entry.data[CONF_CLIENT_DEVICE_ID]
device_id=entry.data[CONF_CLIENT_DEVICE_ID], device_name = ascii(hass.config.location_name)
device_name=hass.config.location_name,
) client = create_client(device_id=device_id, device_name=device_name)
try: try:
user_id, connect_result = await validate_input(hass, dict(entry.data), client) user_id, connect_result = await validate_input(hass, dict(entry.data), client)

View File

@ -139,7 +139,7 @@ class NibeClimateEntity(CoordinatorEntity[Coordinator], ClimateEntity):
mode = HVACMode.OFF mode = HVACMode.OFF
if _get_value(self._coil_use_room_sensor) == "ON": if _get_value(self._coil_use_room_sensor) == "ON":
if _get_value(self._coil_cooling_with_room_sensor) == "ON": if _get_value(self._coil_cooling_with_room_sensor) != "OFF":
mode = HVACMode.HEAT_COOL mode = HVACMode.HEAT_COOL
else: else:
mode = HVACMode.HEAT mode = HVACMode.HEAT

View File

@ -7,5 +7,5 @@
"iot_class": "cloud_polling", "iot_class": "cloud_polling",
"loggers": ["aiopvpc"], "loggers": ["aiopvpc"],
"quality_scale": "platinum", "quality_scale": "platinum",
"requirements": ["aiopvpc==4.0.1"] "requirements": ["aiopvpc==4.1.0"]
} }

View File

@ -282,8 +282,10 @@ def _significant_states_stmt(
(States.last_changed_ts == States.last_updated_ts) (States.last_changed_ts == States.last_updated_ts)
| States.last_changed_ts.is_(None) | States.last_changed_ts.is_(None)
) )
else:
stmt += lambda q: q.filter( stmt += lambda q: q.filter(
(States.last_changed == States.last_updated) | States.last_changed.is_(None) (States.last_changed == States.last_updated)
| States.last_changed.is_(None)
) )
elif significant_changes_only: elif significant_changes_only:
if schema_version >= 31: if schema_version >= 31:

View File

@ -6,5 +6,5 @@
"integration_type": "system", "integration_type": "system",
"iot_class": "local_push", "iot_class": "local_push",
"quality_scale": "internal", "quality_scale": "internal",
"requirements": ["sqlalchemy==2.0.5.post1", "fnvhash==0.1.0"] "requirements": ["sqlalchemy==2.0.6", "fnvhash==0.1.0"]
} }

View File

@ -1072,7 +1072,7 @@ def _migrate_columns_to_timestamp(
result = session.connection().execute( result = session.connection().execute(
text( text(
"UPDATE events set time_fired_ts=" "UPDATE events set time_fired_ts="
"IF(time_fired is NULL,0," "IF(time_fired is NULL or UNIX_TIMESTAMP(time_fired) is NULL,0,"
"UNIX_TIMESTAMP(time_fired)" "UNIX_TIMESTAMP(time_fired)"
") " ") "
"where time_fired_ts is NULL " "where time_fired_ts is NULL "
@ -1085,7 +1085,7 @@ def _migrate_columns_to_timestamp(
result = session.connection().execute( result = session.connection().execute(
text( text(
"UPDATE states set last_updated_ts=" "UPDATE states set last_updated_ts="
"IF(last_updated is NULL,0," "IF(last_updated is NULL or UNIX_TIMESTAMP(last_updated) is NULL,0,"
"UNIX_TIMESTAMP(last_updated) " "UNIX_TIMESTAMP(last_updated) "
"), " "), "
"last_changed_ts=" "last_changed_ts="
@ -1161,7 +1161,7 @@ def _migrate_statistics_columns_to_timestamp(
result = session.connection().execute( result = session.connection().execute(
text( text(
f"UPDATE {table} set start_ts=" f"UPDATE {table} set start_ts="
"IF(start is NULL,0," "IF(start is NULL or UNIX_TIMESTAMP(start) is NULL,0,"
"UNIX_TIMESTAMP(start) " "UNIX_TIMESTAMP(start) "
"), " "), "
"created_ts=" "created_ts="

View File

@ -159,11 +159,9 @@ class ScreenlogicDataUpdateCoordinator(DataUpdateCoordinator):
"""Fetch data from the Screenlogic gateway.""" """Fetch data from the Screenlogic gateway."""
try: try:
await self._async_update_configured_data() await self._async_update_configured_data()
except ScreenLogicError as error: except (ScreenLogicError, ScreenLogicWarning) as ex:
_LOGGER.warning("Update error - attempting reconnect: %s", error) _LOGGER.warning("Update error - attempting reconnect: %s", ex)
await self._async_reconnect_update_data() await self._async_reconnect_update_data()
except ScreenLogicWarning as warn:
raise UpdateFailed(f"Incomplete update: {warn}") from warn
return None return None

View File

@ -5,5 +5,5 @@
"config_flow": true, "config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/sql", "documentation": "https://www.home-assistant.io/integrations/sql",
"iot_class": "local_polling", "iot_class": "local_polling",
"requirements": ["sqlalchemy==2.0.5.post1"] "requirements": ["sqlalchemy==2.0.6"]
} }

View File

@ -107,7 +107,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b
zha_data.setdefault(platform, []) zha_data.setdefault(platform, [])
if config.get(CONF_ENABLE_QUIRKS, True): if config.get(CONF_ENABLE_QUIRKS, True):
setup_quirks(config) setup_quirks(custom_quirks_path=config.get(CONF_CUSTOM_QUIRKS_PATH))
# temporary code to remove the ZHA storage file from disk. # temporary code to remove the ZHA storage file from disk.
# this will be removed in 2022.10.0 # this will be removed in 2022.10.0

View File

@ -20,10 +20,10 @@
"zigpy_znp" "zigpy_znp"
], ],
"requirements": [ "requirements": [
"bellows==0.34.9", "bellows==0.34.10",
"pyserial==3.5", "pyserial==3.5",
"pyserial-asyncio==0.6", "pyserial-asyncio==0.6",
"zha-quirks==0.0.93", "zha-quirks==0.0.94",
"zigpy-deconz==0.19.2", "zigpy-deconz==0.19.2",
"zigpy==0.53.2", "zigpy==0.53.2",
"zigpy-xbee==0.16.2", "zigpy-xbee==0.16.2",

View File

@ -8,7 +8,7 @@ from .backports.enum import StrEnum
APPLICATION_NAME: Final = "HomeAssistant" APPLICATION_NAME: Final = "HomeAssistant"
MAJOR_VERSION: Final = 2023 MAJOR_VERSION: Final = 2023
MINOR_VERSION: Final = 3 MINOR_VERSION: Final = 3
PATCH_VERSION: Final = "3" PATCH_VERSION: Final = "4"
__short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}"
__version__: Final = f"{__short_version__}.{PATCH_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}"
REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 10, 0) REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 10, 0)

View File

@ -39,6 +39,20 @@ SERVER_SOFTWARE = "{0}/{1} aiohttp/{2} Python/{3[0]}.{3[1]}".format(
WARN_CLOSE_MSG = "closes the Home Assistant aiohttp session" WARN_CLOSE_MSG = "closes the Home Assistant aiohttp session"
#
# The default connection limit of 100 meant that you could only have
# 100 concurrent connections.
#
# This was effectively a limit of 100 devices and than
# the supervisor API would fail as soon as it was hit.
#
# We now apply the 100 limit per host, so that we can have 100 connections
# to a single host, but can have more than 4096 connections in total to
# prevent a single host from using all available connections.
#
MAXIMUM_CONNECTIONS = 4096
MAXIMUM_CONNECTIONS_PER_HOST = 100
class HassClientResponse(aiohttp.ClientResponse): class HassClientResponse(aiohttp.ClientResponse):
"""aiohttp.ClientResponse with a json method that uses json_loads by default.""" """aiohttp.ClientResponse with a json method that uses json_loads by default."""
@ -261,7 +275,12 @@ def _async_get_connector(
else: else:
ssl_context = False ssl_context = False
connector = aiohttp.TCPConnector(enable_cleanup_closed=True, ssl=ssl_context) connector = aiohttp.TCPConnector(
enable_cleanup_closed=True,
ssl=ssl_context,
limit=MAXIMUM_CONNECTIONS,
limit_per_host=MAXIMUM_CONNECTIONS_PER_HOST,
)
hass.data[key] = connector hass.data[key] = connector
async def _async_close_connector(event: Event) -> None: async def _async_close_connector(event: Event) -> None:

View File

@ -42,7 +42,7 @@ pyudev==0.23.2
pyyaml==6.0 pyyaml==6.0
requests==2.28.2 requests==2.28.2
scapy==2.5.0 scapy==2.5.0
sqlalchemy==2.0.5.post1 sqlalchemy==2.0.6
typing-extensions>=4.5.0,<5.0 typing-extensions>=4.5.0,<5.0
voluptuous-serialize==2.6.0 voluptuous-serialize==2.6.0
voluptuous==0.13.1 voluptuous==0.13.1

View File

@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
[project] [project]
name = "homeassistant" name = "homeassistant"
version = "2023.3.3" version = "2023.3.4"
license = {text = "Apache-2.0"} license = {text = "Apache-2.0"}
description = "Open-source home automation platform running on Python 3." description = "Open-source home automation platform running on Python 3."
readme = "README.rst" readme = "README.rst"

View File

@ -241,7 +241,7 @@ aiopurpleair==2022.12.1
aiopvapi==2.0.4 aiopvapi==2.0.4
# homeassistant.components.pvpc_hourly_pricing # homeassistant.components.pvpc_hourly_pricing
aiopvpc==4.0.1 aiopvpc==4.1.0
# homeassistant.components.lidarr # homeassistant.components.lidarr
# homeassistant.components.radarr # homeassistant.components.radarr
@ -422,7 +422,7 @@ beautifulsoup4==4.11.1
# beewi_smartclim==0.0.10 # beewi_smartclim==0.0.10
# homeassistant.components.zha # homeassistant.components.zha
bellows==0.34.9 bellows==0.34.10
# homeassistant.components.bmw_connected_drive # homeassistant.components.bmw_connected_drive
bimmer_connected==0.12.1 bimmer_connected==0.12.1
@ -1573,7 +1573,7 @@ pydaikin==2.9.0
pydanfossair==0.1.0 pydanfossair==0.1.0
# homeassistant.components.deconz # homeassistant.components.deconz
pydeconz==108 pydeconz==110
# homeassistant.components.delijn # homeassistant.components.delijn
pydelijn==1.0.0 pydelijn==1.0.0
@ -2398,7 +2398,7 @@ spotipy==2.22.1
# homeassistant.components.recorder # homeassistant.components.recorder
# homeassistant.components.sql # homeassistant.components.sql
sqlalchemy==2.0.5.post1 sqlalchemy==2.0.6
# homeassistant.components.srp_energy # homeassistant.components.srp_energy
srpenergy==1.3.6 srpenergy==1.3.6
@ -2706,7 +2706,7 @@ zeroconf==0.47.3
zeversolar==0.3.1 zeversolar==0.3.1
# homeassistant.components.zha # homeassistant.components.zha
zha-quirks==0.0.93 zha-quirks==0.0.94
# homeassistant.components.zhong_hong # homeassistant.components.zhong_hong
zhong_hong_hvac==1.0.9 zhong_hong_hvac==1.0.9

View File

@ -219,7 +219,7 @@ aiopurpleair==2022.12.1
aiopvapi==2.0.4 aiopvapi==2.0.4
# homeassistant.components.pvpc_hourly_pricing # homeassistant.components.pvpc_hourly_pricing
aiopvpc==4.0.1 aiopvpc==4.1.0
# homeassistant.components.lidarr # homeassistant.components.lidarr
# homeassistant.components.radarr # homeassistant.components.radarr
@ -352,7 +352,7 @@ base36==0.1.1
beautifulsoup4==4.11.1 beautifulsoup4==4.11.1
# homeassistant.components.zha # homeassistant.components.zha
bellows==0.34.9 bellows==0.34.10
# homeassistant.components.bmw_connected_drive # homeassistant.components.bmw_connected_drive
bimmer_connected==0.12.1 bimmer_connected==0.12.1
@ -1134,7 +1134,7 @@ pycoolmasternet-async==0.1.5
pydaikin==2.9.0 pydaikin==2.9.0
# homeassistant.components.deconz # homeassistant.components.deconz
pydeconz==108 pydeconz==110
# homeassistant.components.dexcom # homeassistant.components.dexcom
pydexcom==0.2.3 pydexcom==0.2.3
@ -1698,7 +1698,7 @@ spotipy==2.22.1
# homeassistant.components.recorder # homeassistant.components.recorder
# homeassistant.components.sql # homeassistant.components.sql
sqlalchemy==2.0.5.post1 sqlalchemy==2.0.6
# homeassistant.components.srp_energy # homeassistant.components.srp_energy
srpenergy==1.3.6 srpenergy==1.3.6
@ -1922,7 +1922,7 @@ zeroconf==0.47.3
zeversolar==0.3.1 zeversolar==0.3.1
# homeassistant.components.zha # homeassistant.components.zha
zha-quirks==0.0.93 zha-quirks==0.0.94
# homeassistant.components.zha # homeassistant.components.zha
zigpy-deconz==0.19.2 zigpy-deconz==0.19.2

View File

@ -52,7 +52,7 @@ def hassio_stubs(hassio_env, hass, hass_client, aioclient_mock):
"homeassistant.components.hassio.HassIO.get_ingress_panels", "homeassistant.components.hassio.HassIO.get_ingress_panels",
return_value={"panels": []}, return_value={"panels": []},
), patch( ), patch(
"homeassistant.components.hassio.repairs.SupervisorRepairs.setup" "homeassistant.components.hassio.issues.SupervisorIssues.setup"
), patch( ), patch(
"homeassistant.components.hassio.HassIO.refresh_updates" "homeassistant.components.hassio.HassIO.refresh_updates"
): ):

View File

@ -1,4 +1,4 @@
"""Test repairs from supervisor issues.""" """Test issues from supervisor issues."""
from __future__ import annotations from __future__ import annotations
import os import os
@ -145,12 +145,12 @@ def assert_repair_in_list(issues: list[dict[str, Any]], unhealthy: bool, reason:
} in issues } in issues
async def test_unhealthy_repairs( async def test_unhealthy_issues(
hass: HomeAssistant, hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker, aioclient_mock: AiohttpClientMocker,
hass_ws_client: WebSocketGenerator, hass_ws_client: WebSocketGenerator,
) -> None: ) -> None:
"""Test repairs added for unhealthy systems.""" """Test issues added for unhealthy systems."""
mock_resolution_info(aioclient_mock, unhealthy=["docker", "setup"]) mock_resolution_info(aioclient_mock, unhealthy=["docker", "setup"])
result = await async_setup_component(hass, "hassio", {}) result = await async_setup_component(hass, "hassio", {})
@ -166,12 +166,12 @@ async def test_unhealthy_repairs(
assert_repair_in_list(msg["result"]["issues"], unhealthy=True, reason="setup") assert_repair_in_list(msg["result"]["issues"], unhealthy=True, reason="setup")
async def test_unsupported_repairs( async def test_unsupported_issues(
hass: HomeAssistant, hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker, aioclient_mock: AiohttpClientMocker,
hass_ws_client: WebSocketGenerator, hass_ws_client: WebSocketGenerator,
) -> None: ) -> None:
"""Test repairs added for unsupported systems.""" """Test issues added for unsupported systems."""
mock_resolution_info(aioclient_mock, unsupported=["content_trust", "os"]) mock_resolution_info(aioclient_mock, unsupported=["content_trust", "os"])
result = await async_setup_component(hass, "hassio", {}) result = await async_setup_component(hass, "hassio", {})
@ -189,12 +189,12 @@ async def test_unsupported_repairs(
assert_repair_in_list(msg["result"]["issues"], unhealthy=False, reason="os") assert_repair_in_list(msg["result"]["issues"], unhealthy=False, reason="os")
async def test_unhealthy_repairs_add_remove( async def test_unhealthy_issues_add_remove(
hass: HomeAssistant, hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker, aioclient_mock: AiohttpClientMocker,
hass_ws_client: WebSocketGenerator, hass_ws_client: WebSocketGenerator,
) -> None: ) -> None:
"""Test unhealthy repairs added and removed from dispatches.""" """Test unhealthy issues added and removed from dispatches."""
mock_resolution_info(aioclient_mock) mock_resolution_info(aioclient_mock)
result = await async_setup_component(hass, "hassio", {}) result = await async_setup_component(hass, "hassio", {})
@ -245,12 +245,12 @@ async def test_unhealthy_repairs_add_remove(
assert msg["result"] == {"issues": []} assert msg["result"] == {"issues": []}
async def test_unsupported_repairs_add_remove( async def test_unsupported_issues_add_remove(
hass: HomeAssistant, hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker, aioclient_mock: AiohttpClientMocker,
hass_ws_client: WebSocketGenerator, hass_ws_client: WebSocketGenerator,
) -> None: ) -> None:
"""Test unsupported repairs added and removed from dispatches.""" """Test unsupported issues added and removed from dispatches."""
mock_resolution_info(aioclient_mock) mock_resolution_info(aioclient_mock)
result = await async_setup_component(hass, "hassio", {}) result = await async_setup_component(hass, "hassio", {})
@ -301,12 +301,12 @@ async def test_unsupported_repairs_add_remove(
assert msg["result"] == {"issues": []} assert msg["result"] == {"issues": []}
async def test_reset_repairs_supervisor_restart( async def test_reset_issues_supervisor_restart(
hass: HomeAssistant, hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker, aioclient_mock: AiohttpClientMocker,
hass_ws_client: WebSocketGenerator, hass_ws_client: WebSocketGenerator,
) -> None: ) -> None:
"""Unsupported/unhealthy repairs reset on supervisor restart.""" """Unsupported/unhealthy issues reset on supervisor restart."""
mock_resolution_info(aioclient_mock, unsupported=["os"], unhealthy=["docker"]) mock_resolution_info(aioclient_mock, unsupported=["os"], unhealthy=["docker"])
result = await async_setup_component(hass, "hassio", {}) result = await async_setup_component(hass, "hassio", {})

View File

@ -209,6 +209,27 @@ def test_significant_states_with_session_entity_minimal_response_no_matches(
) )
def test_significant_states_with_session_single_entity(
hass_recorder: Callable[..., HomeAssistant],
) -> None:
"""Test get_significant_states_with_session with a single entity."""
hass = hass_recorder()
hass.states.set("demo.id", "any", {"attr": True})
hass.states.set("demo.id", "any2", {"attr": True})
wait_recording_done(hass)
now = dt_util.utcnow()
with session_scope(hass=hass) as session:
states = history.get_significant_states_with_session(
hass,
session,
now - timedelta(days=1),
now,
entity_ids=["demo.id"],
minimal_response=False,
)
assert len(states["demo.id"]) == 2
@pytest.mark.parametrize( @pytest.mark.parametrize(
("attributes", "no_attributes", "limit"), ("attributes", "no_attributes", "limit"),
[ [