Split hassio data coordinator

Use two data coordinators for hassio data, one for the Core,
Supervisor, and Operating System updates, and one for the add-on
updates. This allows the add-on updates to be fetched independently
of the Core, Supervisor, and Operating System updates.
This commit is contained in:
Stefan Agner 2025-06-23 14:21:10 +02:00
parent d5a8fa9c5c
commit ccc7eec253
No known key found for this signature in database
GPG Key ID: AE01353D1E44747D
8 changed files with 192 additions and 75 deletions

View File

@ -92,6 +92,7 @@ from .const import (
ATTR_LOCATION,
ATTR_PASSWORD,
ATTR_SLUG,
COORDINATOR,
DATA_COMPONENT,
DATA_CONFIG_STORE,
DATA_CORE_INFO,
@ -106,6 +107,7 @@ from .const import (
HASSIO_UPDATE_INTERVAL,
)
from .coordinator import (
HassioAddOnDataUpdateCoordinator,
HassioDataUpdateCoordinator,
get_addons_info,
get_addons_stats, # noqa: F401
@ -555,9 +557,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up a config entry."""
dev_reg = dr.async_get(hass)
coordinator = HassioDataUpdateCoordinator(hass, entry, dev_reg)
await coordinator.async_config_entry_first_refresh()
hass.data[ADDONS_COORDINATOR] = coordinator
hass.data[COORDINATOR] = coordinator
addon_coordinator = HassioAddOnDataUpdateCoordinator(hass, entry, dev_reg)
await addon_coordinator.async_config_entry_first_refresh()
hass.data[ADDONS_COORDINATOR] = addon_coordinator
def deprecated_setup_issue() -> None:
os_info = get_os_info(hass)

View File

@ -41,15 +41,15 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Binary sensor set up for Hass.io config entry."""
coordinator = hass.data[ADDONS_COORDINATOR]
addons_coordinator = hass.data[ADDONS_COORDINATOR]
async_add_entities(
HassioAddonBinarySensor(
addon=addon,
coordinator=coordinator,
coordinator=addons_coordinator,
entity_description=entity_description,
)
for addon in coordinator.data[DATA_KEY_ADDONS].values()
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
for entity_description in ADDON_ENTITY_DESCRIPTIONS
)

View File

@ -71,6 +71,7 @@ EVENT_ISSUE_REMOVED = "issue_removed"
UPDATE_KEY_SUPERVISOR = "supervisor"
COORDINATOR = "hassio_coordinator"
ADDONS_COORDINATOR = "hassio_addons_coordinator"
@ -87,7 +88,8 @@ DATA_SUPERVISOR_INFO = "hassio_supervisor_info"
DATA_SUPERVISOR_STATS = "hassio_supervisor_stats"
DATA_ADDONS_INFO = "hassio_addons_info"
DATA_ADDONS_STATS = "hassio_addons_stats"
HASSIO_UPDATE_INTERVAL = timedelta(minutes=5)
HASSIO_UPDATE_INTERVAL = timedelta(minutes=1)
HASSIO_ADDON_UPDATE_INTERVAL = timedelta(minutes=2)
ATTR_AUTO_UPDATE = "auto_update"
ATTR_VERSION = "version"

View File

@ -49,6 +49,7 @@ from .const import (
DATA_SUPERVISOR_INFO,
DATA_SUPERVISOR_STATS,
DOMAIN,
HASSIO_ADDON_UPDATE_INTERVAL,
HASSIO_UPDATE_INTERVAL,
REQUEST_REFRESH_DELAY,
SUPERVISOR_CONTAINER,
@ -279,8 +280,8 @@ def async_remove_addons_from_dev_reg(
dev_reg.async_remove_device(dev.id)
class HassioDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to retrieve Hass.io status."""
class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to retrieve Hass.io Add-on status."""
config_entry: ConfigEntry
@ -293,7 +294,7 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=HASSIO_UPDATE_INTERVAL,
update_interval=HASSIO_ADDON_UPDATE_INTERVAL,
# We don't want an immediate refresh since we want to avoid
# fetching the container stats right away and avoid hammering
# the Supervisor API on startup
@ -305,7 +306,6 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
self.data = {}
self.entry_id = config_entry.entry_id
self.dev_reg = dev_reg
self.is_hass_os = (get_info(self.hass) or {}).get("hassos") is not None
self._container_updates: defaultdict[str, dict[str, set[str]]] = defaultdict(
lambda: defaultdict(set)
)
@ -347,35 +347,12 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
}
for addon in supervisor_info.get("addons", [])
}
if self.is_hass_os:
new_data[DATA_KEY_OS] = get_os_info(self.hass)
new_data[DATA_KEY_CORE] = {
**(get_core_info(self.hass) or {}),
**get_core_stats(self.hass),
}
new_data[DATA_KEY_SUPERVISOR] = {
**supervisor_info,
**get_supervisor_stats(self.hass),
}
new_data[DATA_KEY_HOST] = get_host_info(self.hass) or {}
# If this is the initial refresh, register all addons and return the dict
if is_first_update:
async_register_addons_in_dev_reg(
self.entry_id, self.dev_reg, new_data[DATA_KEY_ADDONS].values()
)
async_register_core_in_dev_reg(
self.entry_id, self.dev_reg, new_data[DATA_KEY_CORE]
)
async_register_supervisor_in_dev_reg(
self.entry_id, self.dev_reg, new_data[DATA_KEY_SUPERVISOR]
)
async_register_host_in_dev_reg(self.entry_id, self.dev_reg)
if self.is_hass_os:
async_register_os_in_dev_reg(
self.entry_id, self.dev_reg, new_data[DATA_KEY_OS]
)
# Remove add-ons that are no longer installed from device registry
supervisor_addon_devices = {
@ -388,12 +365,6 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
if stale_addons := supervisor_addon_devices - set(new_data[DATA_KEY_ADDONS]):
async_remove_addons_from_dev_reg(self.dev_reg, stale_addons)
if not self.is_hass_os and (
dev := self.dev_reg.async_get_device(identifiers={(DOMAIN, "OS")})
):
# Remove the OS device if it exists and the installation is not hassos
self.dev_reg.async_remove_device(dev.id)
# If there are new add-ons, we should reload the config entry so we can
# create new devices and entities. We can return an empty dict because
# coordinator will be recreated.
@ -419,21 +390,7 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
container_updates = self._container_updates
data = self.hass.data
hassio = self.hassio
updates = {
DATA_INFO: hassio.get_info(),
DATA_CORE_INFO: hassio.get_core_info(),
DATA_SUPERVISOR_INFO: hassio.get_supervisor_info(),
DATA_OS_INFO: hassio.get_os_info(),
}
if CONTAINER_STATS in container_updates[CORE_CONTAINER]:
updates[DATA_CORE_STATS] = hassio.get_core_stats()
if CONTAINER_STATS in container_updates[SUPERVISOR_CONTAINER]:
updates[DATA_SUPERVISOR_STATS] = hassio.get_supervisor_stats()
results = await asyncio.gather(*updates.values())
for key, result in zip(updates, results, strict=False):
data[key] = result
data[DATA_SUPERVISOR_INFO] = await self.hassio.get_supervisor_info()
_addon_data = data[DATA_SUPERVISOR_INFO].get("addons", [])
all_addons: list[str] = []
@ -531,14 +488,159 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
) -> None:
"""Refresh data."""
if not scheduled and not raise_on_auth_failed:
# Force refreshing updates for non-scheduled updates
# Force reloading add-on updates for non-scheduled
# updates.
#
# If `raise_on_auth_failed` is set, it means this is
# the first refresh and we do not want to delay
# startup or cause a timeout so we only refresh the
# updates if this is not a scheduled refresh and
# we are not doing the first refresh.
try:
await self.supervisor_client.refresh_updates()
await self.supervisor_client.store.reload()
except SupervisorError as err:
_LOGGER.warning("Error on Supervisor API: %s", err)
await super()._async_refresh(
log_failures, raise_on_auth_failed, scheduled, raise_on_entry_error
)
class HassioDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to retrieve Hass.io status."""
config_entry: ConfigEntry
def __init__(
self, hass: HomeAssistant, config_entry: ConfigEntry, dev_reg: dr.DeviceRegistry
) -> None:
"""Initialize coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=HASSIO_UPDATE_INTERVAL,
# We don't want an immediate refresh since we want to avoid
# fetching the container stats right away and avoid hammering
# the Supervisor API on startup
request_refresh_debouncer=Debouncer(
hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=False
),
)
self.hassio = hass.data[DATA_COMPONENT]
self.data = {}
self.entry_id = config_entry.entry_id
self.dev_reg = dev_reg
self.is_hass_os = (get_info(self.hass) or {}).get("hassos") is not None
self._container_updates: defaultdict[str, dict[str, set[str]]] = defaultdict(
lambda: defaultdict(set)
)
self.supervisor_client = get_supervisor_client(hass)
async def _async_update_data(self) -> dict[str, Any]:
"""Update data via library."""
is_first_update = not self.data
try:
await self.force_data_refresh(is_first_update)
except HassioAPIError as err:
raise UpdateFailed(f"Error on Supervisor API: {err}") from err
new_data: dict[str, Any] = {}
supervisor_info = get_supervisor_info(self.hass) or {}
if self.is_hass_os:
new_data[DATA_KEY_OS] = get_os_info(self.hass)
new_data[DATA_KEY_CORE] = {
**(get_core_info(self.hass) or {}),
**get_core_stats(self.hass),
}
new_data[DATA_KEY_SUPERVISOR] = {
**supervisor_info,
**get_supervisor_stats(self.hass),
}
new_data[DATA_KEY_HOST] = get_host_info(self.hass) or {}
# If this is the initial refresh, register all main components
if is_first_update:
async_register_core_in_dev_reg(
self.entry_id, self.dev_reg, new_data[DATA_KEY_CORE]
)
async_register_supervisor_in_dev_reg(
self.entry_id, self.dev_reg, new_data[DATA_KEY_SUPERVISOR]
)
async_register_host_in_dev_reg(self.entry_id, self.dev_reg)
if self.is_hass_os:
async_register_os_in_dev_reg(
self.entry_id, self.dev_reg, new_data[DATA_KEY_OS]
)
if not self.is_hass_os and (
dev := self.dev_reg.async_get_device(identifiers={(DOMAIN, "OS")})
):
# Remove the OS device if it exists and the installation is not hassos
self.dev_reg.async_remove_device(dev.id)
return new_data
async def force_data_refresh(self, first_update: bool) -> None:
"""Force update of the addon info."""
container_updates = self._container_updates
data = self.hass.data
hassio = self.hassio
updates = {
DATA_INFO: hassio.get_info(),
DATA_CORE_INFO: hassio.get_core_info(),
DATA_SUPERVISOR_INFO: hassio.get_supervisor_info(),
DATA_OS_INFO: hassio.get_os_info(),
}
if CONTAINER_STATS in container_updates[CORE_CONTAINER]:
updates[DATA_CORE_STATS] = hassio.get_core_stats()
if CONTAINER_STATS in container_updates[SUPERVISOR_CONTAINER]:
updates[DATA_SUPERVISOR_STATS] = hassio.get_supervisor_stats()
results = await asyncio.gather(*updates.values())
for key, result in zip(updates, results, strict=False):
data[key] = result
@callback
def async_enable_container_updates(
self, slug: str, entity_id: str, types: set[str]
) -> CALLBACK_TYPE:
"""Enable updates for an add-on."""
enabled_updates = self._container_updates[slug]
for key in types:
enabled_updates[key].add(entity_id)
@callback
def _remove() -> None:
for key in types:
enabled_updates[key].remove(entity_id)
return _remove
async def _async_refresh(
self,
log_failures: bool = True,
raise_on_auth_failed: bool = False,
scheduled: bool = False,
raise_on_entry_error: bool = False,
) -> None:
"""Refresh data."""
if not scheduled and not raise_on_auth_failed:
# Force reloading updates of main components for
# non-scheduled updates.
#
# If `raise_on_auth_failed` is set, it means this is
# the first refresh and we do not want to delay
# startup or cause a timeout so we only refresh the
# updates if this is not a scheduled refresh and
# we are not doing the first refresh.
try:
await self.supervisor_client.reload_updates()
except SupervisorError as err:
_LOGGER.warning("Error on Supervisor API: %s", err)

View File

@ -10,8 +10,8 @@ from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr, entity_registry as er
from .const import ADDONS_COORDINATOR
from .coordinator import HassioDataUpdateCoordinator
from .const import ADDONS_COORDINATOR, COORDINATOR
from .coordinator import HassioAddOnDataUpdateCoordinator, HassioDataUpdateCoordinator
async def async_get_config_entry_diagnostics(
@ -19,7 +19,8 @@ async def async_get_config_entry_diagnostics(
config_entry: ConfigEntry,
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
coordinator: HassioDataUpdateCoordinator = hass.data[ADDONS_COORDINATOR]
coordinator: HassioDataUpdateCoordinator = hass.data[COORDINATOR]
addons_coordinator: HassioAddOnDataUpdateCoordinator = hass.data[ADDONS_COORDINATOR]
device_registry = dr.async_get(hass)
entity_registry = er.async_get(hass)
@ -50,5 +51,6 @@ async def async_get_config_entry_diagnostics(
return {
"coordinator_data": coordinator.data,
"addons_coordinator_data": addons_coordinator.data,
"devices": devices,
}

View File

@ -21,17 +21,17 @@ from .const import (
KEY_TO_UPDATE_TYPES,
SUPERVISOR_CONTAINER,
)
from .coordinator import HassioDataUpdateCoordinator
from .coordinator import HassioAddOnDataUpdateCoordinator, HassioDataUpdateCoordinator
class HassioAddonEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
class HassioAddonEntity(CoordinatorEntity[HassioAddOnDataUpdateCoordinator]):
"""Base entity for a Hass.io add-on."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: HassioDataUpdateCoordinator,
coordinator: HassioAddOnDataUpdateCoordinator,
entity_description: EntityDescription,
addon: dict[str, Any],
) -> None:

View File

@ -19,6 +19,7 @@ from .const import (
ATTR_MEMORY_PERCENT,
ATTR_VERSION,
ATTR_VERSION_LATEST,
COORDINATOR,
DATA_KEY_ADDONS,
DATA_KEY_CORE,
DATA_KEY_HOST,
@ -114,20 +115,21 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Sensor set up for Hass.io config entry."""
coordinator = hass.data[ADDONS_COORDINATOR]
addons_coordinator = hass.data[ADDONS_COORDINATOR]
entities: list[
HassioOSSensor | HassioAddonSensor | CoreSensor | SupervisorSensor | HostSensor
] = [
HassioAddonSensor(
addon=addon,
coordinator=coordinator,
coordinator=addons_coordinator,
entity_description=entity_description,
)
for addon in coordinator.data[DATA_KEY_ADDONS].values()
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
for entity_description in ADDON_ENTITY_DESCRIPTIONS
]
coordinator = hass.data[COORDINATOR]
entities.extend(
CoreSensor(
coordinator=coordinator,

View File

@ -24,6 +24,7 @@ from .const import (
ATTR_AUTO_UPDATE,
ATTR_VERSION,
ATTR_VERSION_LATEST,
COORDINATOR,
DATA_KEY_ADDONS,
DATA_KEY_CORE,
DATA_KEY_OS,
@ -49,9 +50,9 @@ async def async_setup_entry(
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Supervisor update based on a config entry."""
coordinator = hass.data[ADDONS_COORDINATOR]
coordinator = hass.data[COORDINATOR]
entities = [
entities: list[UpdateEntity] = [
SupervisorSupervisorUpdateEntity(
coordinator=coordinator,
entity_description=ENTITY_DESCRIPTION,
@ -62,15 +63,6 @@ async def async_setup_entry(
),
]
entities.extend(
SupervisorAddonUpdateEntity(
addon=addon,
coordinator=coordinator,
entity_description=ENTITY_DESCRIPTION,
)
for addon in coordinator.data[DATA_KEY_ADDONS].values()
)
if coordinator.is_hass_os:
entities.append(
SupervisorOSUpdateEntity(
@ -79,6 +71,16 @@ async def async_setup_entry(
)
)
addons_coordinator = hass.data[ADDONS_COORDINATOR]
entities.extend(
SupervisorAddonUpdateEntity(
addon=addon,
coordinator=addons_coordinator,
entity_description=ENTITY_DESCRIPTION,
)
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
)
async_add_entities(entities)