mirror of
https://github.com/home-assistant/core.git
synced 2025-11-22 01:06:59 +00:00
Compare commits
7 Commits
instance-u
...
hassio-spl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fb68146bce | ||
|
|
072c570660 | ||
|
|
be62023040 | ||
|
|
b9c563538a | ||
|
|
0e78002c4a | ||
|
|
e921373833 | ||
|
|
ccc7eec253 |
@@ -92,6 +92,7 @@ from .const import (
|
|||||||
ATTR_LOCATION,
|
ATTR_LOCATION,
|
||||||
ATTR_PASSWORD,
|
ATTR_PASSWORD,
|
||||||
ATTR_SLUG,
|
ATTR_SLUG,
|
||||||
|
COORDINATOR,
|
||||||
DATA_COMPONENT,
|
DATA_COMPONENT,
|
||||||
DATA_CONFIG_STORE,
|
DATA_CONFIG_STORE,
|
||||||
DATA_CORE_INFO,
|
DATA_CORE_INFO,
|
||||||
@@ -106,6 +107,7 @@ from .const import (
|
|||||||
HASSIO_UPDATE_INTERVAL,
|
HASSIO_UPDATE_INTERVAL,
|
||||||
)
|
)
|
||||||
from .coordinator import (
|
from .coordinator import (
|
||||||
|
HassioAddOnDataUpdateCoordinator,
|
||||||
HassioDataUpdateCoordinator,
|
HassioDataUpdateCoordinator,
|
||||||
get_addons_info,
|
get_addons_info,
|
||||||
get_addons_stats, # noqa: F401
|
get_addons_stats, # noqa: F401
|
||||||
@@ -555,9 +557,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
|
|||||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||||
"""Set up a config entry."""
|
"""Set up a config entry."""
|
||||||
dev_reg = dr.async_get(hass)
|
dev_reg = dr.async_get(hass)
|
||||||
|
|
||||||
coordinator = HassioDataUpdateCoordinator(hass, entry, dev_reg)
|
coordinator = HassioDataUpdateCoordinator(hass, entry, dev_reg)
|
||||||
await coordinator.async_config_entry_first_refresh()
|
await coordinator.async_config_entry_first_refresh()
|
||||||
hass.data[ADDONS_COORDINATOR] = coordinator
|
hass.data[COORDINATOR] = coordinator
|
||||||
|
|
||||||
|
addon_coordinator = HassioAddOnDataUpdateCoordinator(hass, entry, dev_reg)
|
||||||
|
await addon_coordinator.async_config_entry_first_refresh()
|
||||||
|
hass.data[ADDONS_COORDINATOR] = addon_coordinator
|
||||||
|
|
||||||
def deprecated_setup_issue() -> None:
|
def deprecated_setup_issue() -> None:
|
||||||
os_info = get_os_info(hass)
|
os_info = get_os_info(hass)
|
||||||
|
|||||||
@@ -41,15 +41,15 @@ async def async_setup_entry(
|
|||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Binary sensor set up for Hass.io config entry."""
|
"""Binary sensor set up for Hass.io config entry."""
|
||||||
coordinator = hass.data[ADDONS_COORDINATOR]
|
addons_coordinator = hass.data[ADDONS_COORDINATOR]
|
||||||
|
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
HassioAddonBinarySensor(
|
HassioAddonBinarySensor(
|
||||||
addon=addon,
|
addon=addon,
|
||||||
coordinator=coordinator,
|
coordinator=addons_coordinator,
|
||||||
entity_description=entity_description,
|
entity_description=entity_description,
|
||||||
)
|
)
|
||||||
for addon in coordinator.data[DATA_KEY_ADDONS].values()
|
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
|
||||||
for entity_description in ADDON_ENTITY_DESCRIPTIONS
|
for entity_description in ADDON_ENTITY_DESCRIPTIONS
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -71,6 +71,7 @@ EVENT_ISSUE_REMOVED = "issue_removed"
|
|||||||
|
|
||||||
UPDATE_KEY_SUPERVISOR = "supervisor"
|
UPDATE_KEY_SUPERVISOR = "supervisor"
|
||||||
|
|
||||||
|
COORDINATOR = "hassio_coordinator"
|
||||||
ADDONS_COORDINATOR = "hassio_addons_coordinator"
|
ADDONS_COORDINATOR = "hassio_addons_coordinator"
|
||||||
|
|
||||||
|
|
||||||
@@ -85,9 +86,11 @@ DATA_OS_INFO = "hassio_os_info"
|
|||||||
DATA_NETWORK_INFO = "hassio_network_info"
|
DATA_NETWORK_INFO = "hassio_network_info"
|
||||||
DATA_SUPERVISOR_INFO = "hassio_supervisor_info"
|
DATA_SUPERVISOR_INFO = "hassio_supervisor_info"
|
||||||
DATA_SUPERVISOR_STATS = "hassio_supervisor_stats"
|
DATA_SUPERVISOR_STATS = "hassio_supervisor_stats"
|
||||||
|
DATA_ADDONS = "hassio_addons"
|
||||||
DATA_ADDONS_INFO = "hassio_addons_info"
|
DATA_ADDONS_INFO = "hassio_addons_info"
|
||||||
DATA_ADDONS_STATS = "hassio_addons_stats"
|
DATA_ADDONS_STATS = "hassio_addons_stats"
|
||||||
HASSIO_UPDATE_INTERVAL = timedelta(minutes=5)
|
HASSIO_UPDATE_INTERVAL = timedelta(minutes=5)
|
||||||
|
HASSIO_ADDON_UPDATE_INTERVAL = timedelta(minutes=15)
|
||||||
|
|
||||||
ATTR_AUTO_UPDATE = "auto_update"
|
ATTR_AUTO_UPDATE = "auto_update"
|
||||||
ATTR_VERSION = "version"
|
ATTR_VERSION = "version"
|
||||||
|
|||||||
@@ -30,6 +30,7 @@ from .const import (
|
|||||||
CONTAINER_INFO,
|
CONTAINER_INFO,
|
||||||
CONTAINER_STATS,
|
CONTAINER_STATS,
|
||||||
CORE_CONTAINER,
|
CORE_CONTAINER,
|
||||||
|
DATA_ADDONS,
|
||||||
DATA_ADDONS_INFO,
|
DATA_ADDONS_INFO,
|
||||||
DATA_ADDONS_STATS,
|
DATA_ADDONS_STATS,
|
||||||
DATA_COMPONENT,
|
DATA_COMPONENT,
|
||||||
@@ -49,6 +50,7 @@ from .const import (
|
|||||||
DATA_SUPERVISOR_INFO,
|
DATA_SUPERVISOR_INFO,
|
||||||
DATA_SUPERVISOR_STATS,
|
DATA_SUPERVISOR_STATS,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
|
HASSIO_ADDON_UPDATE_INTERVAL,
|
||||||
HASSIO_UPDATE_INTERVAL,
|
HASSIO_UPDATE_INTERVAL,
|
||||||
REQUEST_REFRESH_DELAY,
|
REQUEST_REFRESH_DELAY,
|
||||||
SUPERVISOR_CONTAINER,
|
SUPERVISOR_CONTAINER,
|
||||||
@@ -112,6 +114,16 @@ def get_network_info(hass: HomeAssistant) -> dict[str, Any] | None:
|
|||||||
return hass.data.get(DATA_NETWORK_INFO)
|
return hass.data.get(DATA_NETWORK_INFO)
|
||||||
|
|
||||||
|
|
||||||
|
@callback
|
||||||
|
@bind_hass
|
||||||
|
def get_addons(hass: HomeAssistant) -> dict[str, Any] | None:
|
||||||
|
"""Return Addons info.
|
||||||
|
|
||||||
|
Async friendly.
|
||||||
|
"""
|
||||||
|
return hass.data.get(DATA_ADDONS)
|
||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
@bind_hass
|
@bind_hass
|
||||||
def get_addons_info(hass: HomeAssistant) -> dict[str, dict[str, Any]] | None:
|
def get_addons_info(hass: HomeAssistant) -> dict[str, dict[str, Any]] | None:
|
||||||
@@ -279,8 +291,8 @@ def async_remove_addons_from_dev_reg(
|
|||||||
dev_reg.async_remove_device(dev.id)
|
dev_reg.async_remove_device(dev.id)
|
||||||
|
|
||||||
|
|
||||||
class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
class HassioAddOnDataUpdateCoordinator(DataUpdateCoordinator):
|
||||||
"""Class to retrieve Hass.io status."""
|
"""Class to retrieve Hass.io Add-on status."""
|
||||||
|
|
||||||
config_entry: ConfigEntry
|
config_entry: ConfigEntry
|
||||||
|
|
||||||
@@ -293,7 +305,7 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
|||||||
_LOGGER,
|
_LOGGER,
|
||||||
config_entry=config_entry,
|
config_entry=config_entry,
|
||||||
name=DOMAIN,
|
name=DOMAIN,
|
||||||
update_interval=HASSIO_UPDATE_INTERVAL,
|
update_interval=HASSIO_ADDON_UPDATE_INTERVAL,
|
||||||
# We don't want an immediate refresh since we want to avoid
|
# We don't want an immediate refresh since we want to avoid
|
||||||
# fetching the container stats right away and avoid hammering
|
# fetching the container stats right away and avoid hammering
|
||||||
# the Supervisor API on startup
|
# the Supervisor API on startup
|
||||||
@@ -305,7 +317,6 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
|||||||
self.data = {}
|
self.data = {}
|
||||||
self.entry_id = config_entry.entry_id
|
self.entry_id = config_entry.entry_id
|
||||||
self.dev_reg = dev_reg
|
self.dev_reg = dev_reg
|
||||||
self.is_hass_os = (get_info(self.hass) or {}).get("hassos") is not None
|
|
||||||
self._container_updates: defaultdict[str, dict[str, set[str]]] = defaultdict(
|
self._container_updates: defaultdict[str, dict[str, set[str]]] = defaultdict(
|
||||||
lambda: defaultdict(set)
|
lambda: defaultdict(set)
|
||||||
)
|
)
|
||||||
@@ -321,7 +332,7 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
|||||||
raise UpdateFailed(f"Error on Supervisor API: {err}") from err
|
raise UpdateFailed(f"Error on Supervisor API: {err}") from err
|
||||||
|
|
||||||
new_data: dict[str, Any] = {}
|
new_data: dict[str, Any] = {}
|
||||||
supervisor_info = get_supervisor_info(self.hass) or {}
|
addons = get_addons(self.hass) or {}
|
||||||
addons_info = get_addons_info(self.hass) or {}
|
addons_info = get_addons_info(self.hass) or {}
|
||||||
addons_stats = get_addons_stats(self.hass)
|
addons_stats = get_addons_stats(self.hass)
|
||||||
store_data = get_store(self.hass)
|
store_data = get_store(self.hass)
|
||||||
@@ -345,37 +356,14 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
|||||||
addon.get(ATTR_REPOSITORY), addon.get(ATTR_REPOSITORY, "")
|
addon.get(ATTR_REPOSITORY), addon.get(ATTR_REPOSITORY, "")
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
for addon in supervisor_info.get("addons", [])
|
for addon in addons.get("addons", [])
|
||||||
}
|
}
|
||||||
if self.is_hass_os:
|
|
||||||
new_data[DATA_KEY_OS] = get_os_info(self.hass)
|
|
||||||
|
|
||||||
new_data[DATA_KEY_CORE] = {
|
|
||||||
**(get_core_info(self.hass) or {}),
|
|
||||||
**get_core_stats(self.hass),
|
|
||||||
}
|
|
||||||
new_data[DATA_KEY_SUPERVISOR] = {
|
|
||||||
**supervisor_info,
|
|
||||||
**get_supervisor_stats(self.hass),
|
|
||||||
}
|
|
||||||
new_data[DATA_KEY_HOST] = get_host_info(self.hass) or {}
|
|
||||||
|
|
||||||
# If this is the initial refresh, register all addons and return the dict
|
# If this is the initial refresh, register all addons and return the dict
|
||||||
if is_first_update:
|
if is_first_update:
|
||||||
async_register_addons_in_dev_reg(
|
async_register_addons_in_dev_reg(
|
||||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_ADDONS].values()
|
self.entry_id, self.dev_reg, new_data[DATA_KEY_ADDONS].values()
|
||||||
)
|
)
|
||||||
async_register_core_in_dev_reg(
|
|
||||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_CORE]
|
|
||||||
)
|
|
||||||
async_register_supervisor_in_dev_reg(
|
|
||||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_SUPERVISOR]
|
|
||||||
)
|
|
||||||
async_register_host_in_dev_reg(self.entry_id, self.dev_reg)
|
|
||||||
if self.is_hass_os:
|
|
||||||
async_register_os_in_dev_reg(
|
|
||||||
self.entry_id, self.dev_reg, new_data[DATA_KEY_OS]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Remove add-ons that are no longer installed from device registry
|
# Remove add-ons that are no longer installed from device registry
|
||||||
supervisor_addon_devices = {
|
supervisor_addon_devices = {
|
||||||
@@ -388,12 +376,6 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
|||||||
if stale_addons := supervisor_addon_devices - set(new_data[DATA_KEY_ADDONS]):
|
if stale_addons := supervisor_addon_devices - set(new_data[DATA_KEY_ADDONS]):
|
||||||
async_remove_addons_from_dev_reg(self.dev_reg, stale_addons)
|
async_remove_addons_from_dev_reg(self.dev_reg, stale_addons)
|
||||||
|
|
||||||
if not self.is_hass_os and (
|
|
||||||
dev := self.dev_reg.async_get_device(identifiers={(DOMAIN, "OS")})
|
|
||||||
):
|
|
||||||
# Remove the OS device if it exists and the installation is not hassos
|
|
||||||
self.dev_reg.async_remove_device(dev.id)
|
|
||||||
|
|
||||||
# If there are new add-ons, we should reload the config entry so we can
|
# If there are new add-ons, we should reload the config entry so we can
|
||||||
# create new devices and entities. We can return an empty dict because
|
# create new devices and entities. We can return an empty dict because
|
||||||
# coordinator will be recreated.
|
# coordinator will be recreated.
|
||||||
@@ -419,23 +401,9 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
|||||||
container_updates = self._container_updates
|
container_updates = self._container_updates
|
||||||
|
|
||||||
data = self.hass.data
|
data = self.hass.data
|
||||||
hassio = self.hassio
|
data[DATA_ADDONS] = await self.hassio.get_addons()
|
||||||
updates = {
|
|
||||||
DATA_INFO: hassio.get_info(),
|
|
||||||
DATA_CORE_INFO: hassio.get_core_info(),
|
|
||||||
DATA_SUPERVISOR_INFO: hassio.get_supervisor_info(),
|
|
||||||
DATA_OS_INFO: hassio.get_os_info(),
|
|
||||||
}
|
|
||||||
if CONTAINER_STATS in container_updates[CORE_CONTAINER]:
|
|
||||||
updates[DATA_CORE_STATS] = hassio.get_core_stats()
|
|
||||||
if CONTAINER_STATS in container_updates[SUPERVISOR_CONTAINER]:
|
|
||||||
updates[DATA_SUPERVISOR_STATS] = hassio.get_supervisor_stats()
|
|
||||||
|
|
||||||
results = await asyncio.gather(*updates.values())
|
_addon_data = data[DATA_ADDONS].get("addons", [])
|
||||||
for key, result in zip(updates, results, strict=False):
|
|
||||||
data[key] = result
|
|
||||||
|
|
||||||
_addon_data = data[DATA_SUPERVISOR_INFO].get("addons", [])
|
|
||||||
all_addons: list[str] = []
|
all_addons: list[str] = []
|
||||||
started_addons: list[str] = []
|
started_addons: list[str] = []
|
||||||
for addon in _addon_data:
|
for addon in _addon_data:
|
||||||
@@ -531,14 +499,159 @@ class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Refresh data."""
|
"""Refresh data."""
|
||||||
if not scheduled and not raise_on_auth_failed:
|
if not scheduled and not raise_on_auth_failed:
|
||||||
# Force refreshing updates for non-scheduled updates
|
# Force reloading add-on updates for non-scheduled
|
||||||
|
# updates.
|
||||||
|
#
|
||||||
# If `raise_on_auth_failed` is set, it means this is
|
# If `raise_on_auth_failed` is set, it means this is
|
||||||
# the first refresh and we do not want to delay
|
# the first refresh and we do not want to delay
|
||||||
# startup or cause a timeout so we only refresh the
|
# startup or cause a timeout so we only refresh the
|
||||||
# updates if this is not a scheduled refresh and
|
# updates if this is not a scheduled refresh and
|
||||||
# we are not doing the first refresh.
|
# we are not doing the first refresh.
|
||||||
try:
|
try:
|
||||||
await self.supervisor_client.refresh_updates()
|
await self.supervisor_client.store.reload()
|
||||||
|
except SupervisorError as err:
|
||||||
|
_LOGGER.warning("Error on Supervisor API: %s", err)
|
||||||
|
|
||||||
|
await super()._async_refresh(
|
||||||
|
log_failures, raise_on_auth_failed, scheduled, raise_on_entry_error
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class HassioDataUpdateCoordinator(DataUpdateCoordinator):
|
||||||
|
"""Class to retrieve Hass.io status."""
|
||||||
|
|
||||||
|
config_entry: ConfigEntry
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, hass: HomeAssistant, config_entry: ConfigEntry, dev_reg: dr.DeviceRegistry
|
||||||
|
) -> None:
|
||||||
|
"""Initialize coordinator."""
|
||||||
|
super().__init__(
|
||||||
|
hass,
|
||||||
|
_LOGGER,
|
||||||
|
config_entry=config_entry,
|
||||||
|
name=DOMAIN,
|
||||||
|
update_interval=HASSIO_UPDATE_INTERVAL,
|
||||||
|
# We don't want an immediate refresh since we want to avoid
|
||||||
|
# fetching the container stats right away and avoid hammering
|
||||||
|
# the Supervisor API on startup
|
||||||
|
request_refresh_debouncer=Debouncer(
|
||||||
|
hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=False
|
||||||
|
),
|
||||||
|
)
|
||||||
|
self.hassio = hass.data[DATA_COMPONENT]
|
||||||
|
self.data = {}
|
||||||
|
self.entry_id = config_entry.entry_id
|
||||||
|
self.dev_reg = dev_reg
|
||||||
|
self.is_hass_os = (get_info(self.hass) or {}).get("hassos") is not None
|
||||||
|
self._container_updates: defaultdict[str, dict[str, set[str]]] = defaultdict(
|
||||||
|
lambda: defaultdict(set)
|
||||||
|
)
|
||||||
|
self.supervisor_client = get_supervisor_client(hass)
|
||||||
|
|
||||||
|
async def _async_update_data(self) -> dict[str, Any]:
|
||||||
|
"""Update data via library."""
|
||||||
|
is_first_update = not self.data
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.force_data_refresh(is_first_update)
|
||||||
|
except HassioAPIError as err:
|
||||||
|
raise UpdateFailed(f"Error on Supervisor API: {err}") from err
|
||||||
|
|
||||||
|
new_data: dict[str, Any] = {}
|
||||||
|
supervisor_info = get_supervisor_info(self.hass) or {}
|
||||||
|
|
||||||
|
if self.is_hass_os:
|
||||||
|
new_data[DATA_KEY_OS] = get_os_info(self.hass)
|
||||||
|
|
||||||
|
new_data[DATA_KEY_CORE] = {
|
||||||
|
**(get_core_info(self.hass) or {}),
|
||||||
|
**get_core_stats(self.hass),
|
||||||
|
}
|
||||||
|
new_data[DATA_KEY_SUPERVISOR] = {
|
||||||
|
**supervisor_info,
|
||||||
|
**get_supervisor_stats(self.hass),
|
||||||
|
}
|
||||||
|
new_data[DATA_KEY_HOST] = get_host_info(self.hass) or {}
|
||||||
|
|
||||||
|
# If this is the initial refresh, register all main components
|
||||||
|
if is_first_update:
|
||||||
|
async_register_core_in_dev_reg(
|
||||||
|
self.entry_id, self.dev_reg, new_data[DATA_KEY_CORE]
|
||||||
|
)
|
||||||
|
async_register_supervisor_in_dev_reg(
|
||||||
|
self.entry_id, self.dev_reg, new_data[DATA_KEY_SUPERVISOR]
|
||||||
|
)
|
||||||
|
async_register_host_in_dev_reg(self.entry_id, self.dev_reg)
|
||||||
|
if self.is_hass_os:
|
||||||
|
async_register_os_in_dev_reg(
|
||||||
|
self.entry_id, self.dev_reg, new_data[DATA_KEY_OS]
|
||||||
|
)
|
||||||
|
|
||||||
|
if not self.is_hass_os and (
|
||||||
|
dev := self.dev_reg.async_get_device(identifiers={(DOMAIN, "OS")})
|
||||||
|
):
|
||||||
|
# Remove the OS device if it exists and the installation is not hassos
|
||||||
|
self.dev_reg.async_remove_device(dev.id)
|
||||||
|
|
||||||
|
return new_data
|
||||||
|
|
||||||
|
async def force_data_refresh(self, first_update: bool) -> None:
|
||||||
|
"""Force update of the addon info."""
|
||||||
|
container_updates = self._container_updates
|
||||||
|
|
||||||
|
data = self.hass.data
|
||||||
|
hassio = self.hassio
|
||||||
|
updates = {
|
||||||
|
DATA_INFO: hassio.get_info(),
|
||||||
|
DATA_CORE_INFO: hassio.get_core_info(),
|
||||||
|
DATA_SUPERVISOR_INFO: hassio.get_supervisor_info(),
|
||||||
|
DATA_OS_INFO: hassio.get_os_info(),
|
||||||
|
}
|
||||||
|
if CONTAINER_STATS in container_updates[CORE_CONTAINER]:
|
||||||
|
updates[DATA_CORE_STATS] = hassio.get_core_stats()
|
||||||
|
if CONTAINER_STATS in container_updates[SUPERVISOR_CONTAINER]:
|
||||||
|
updates[DATA_SUPERVISOR_STATS] = hassio.get_supervisor_stats()
|
||||||
|
|
||||||
|
results = await asyncio.gather(*updates.values())
|
||||||
|
for key, result in zip(updates, results, strict=False):
|
||||||
|
data[key] = result
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def async_enable_container_updates(
|
||||||
|
self, slug: str, entity_id: str, types: set[str]
|
||||||
|
) -> CALLBACK_TYPE:
|
||||||
|
"""Enable updates for an add-on."""
|
||||||
|
enabled_updates = self._container_updates[slug]
|
||||||
|
for key in types:
|
||||||
|
enabled_updates[key].add(entity_id)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def _remove() -> None:
|
||||||
|
for key in types:
|
||||||
|
enabled_updates[key].remove(entity_id)
|
||||||
|
|
||||||
|
return _remove
|
||||||
|
|
||||||
|
async def _async_refresh(
|
||||||
|
self,
|
||||||
|
log_failures: bool = True,
|
||||||
|
raise_on_auth_failed: bool = False,
|
||||||
|
scheduled: bool = False,
|
||||||
|
raise_on_entry_error: bool = False,
|
||||||
|
) -> None:
|
||||||
|
"""Refresh data."""
|
||||||
|
if not scheduled and not raise_on_auth_failed:
|
||||||
|
# Force reloading updates of main components for
|
||||||
|
# non-scheduled updates.
|
||||||
|
#
|
||||||
|
# If `raise_on_auth_failed` is set, it means this is
|
||||||
|
# the first refresh and we do not want to delay
|
||||||
|
# startup or cause a timeout so we only refresh the
|
||||||
|
# updates if this is not a scheduled refresh and
|
||||||
|
# we are not doing the first refresh.
|
||||||
|
try:
|
||||||
|
await self.supervisor_client.reload_updates()
|
||||||
except SupervisorError as err:
|
except SupervisorError as err:
|
||||||
_LOGGER.warning("Error on Supervisor API: %s", err)
|
_LOGGER.warning("Error on Supervisor API: %s", err)
|
||||||
|
|
||||||
|
|||||||
@@ -10,8 +10,8 @@ from homeassistant.config_entries import ConfigEntry
|
|||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||||
|
|
||||||
from .const import ADDONS_COORDINATOR
|
from .const import ADDONS_COORDINATOR, COORDINATOR
|
||||||
from .coordinator import HassioDataUpdateCoordinator
|
from .coordinator import HassioAddOnDataUpdateCoordinator, HassioDataUpdateCoordinator
|
||||||
|
|
||||||
|
|
||||||
async def async_get_config_entry_diagnostics(
|
async def async_get_config_entry_diagnostics(
|
||||||
@@ -19,7 +19,8 @@ async def async_get_config_entry_diagnostics(
|
|||||||
config_entry: ConfigEntry,
|
config_entry: ConfigEntry,
|
||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
"""Return diagnostics for a config entry."""
|
"""Return diagnostics for a config entry."""
|
||||||
coordinator: HassioDataUpdateCoordinator = hass.data[ADDONS_COORDINATOR]
|
coordinator: HassioDataUpdateCoordinator = hass.data[COORDINATOR]
|
||||||
|
addons_coordinator: HassioAddOnDataUpdateCoordinator = hass.data[ADDONS_COORDINATOR]
|
||||||
device_registry = dr.async_get(hass)
|
device_registry = dr.async_get(hass)
|
||||||
entity_registry = er.async_get(hass)
|
entity_registry = er.async_get(hass)
|
||||||
|
|
||||||
@@ -50,5 +51,6 @@ async def async_get_config_entry_diagnostics(
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
"coordinator_data": coordinator.data,
|
"coordinator_data": coordinator.data,
|
||||||
|
"addons_coordinator_data": addons_coordinator.data,
|
||||||
"devices": devices,
|
"devices": devices,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,17 +21,17 @@ from .const import (
|
|||||||
KEY_TO_UPDATE_TYPES,
|
KEY_TO_UPDATE_TYPES,
|
||||||
SUPERVISOR_CONTAINER,
|
SUPERVISOR_CONTAINER,
|
||||||
)
|
)
|
||||||
from .coordinator import HassioDataUpdateCoordinator
|
from .coordinator import HassioAddOnDataUpdateCoordinator, HassioDataUpdateCoordinator
|
||||||
|
|
||||||
|
|
||||||
class HassioAddonEntity(CoordinatorEntity[HassioDataUpdateCoordinator]):
|
class HassioAddonEntity(CoordinatorEntity[HassioAddOnDataUpdateCoordinator]):
|
||||||
"""Base entity for a Hass.io add-on."""
|
"""Base entity for a Hass.io add-on."""
|
||||||
|
|
||||||
_attr_has_entity_name = True
|
_attr_has_entity_name = True
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
coordinator: HassioDataUpdateCoordinator,
|
coordinator: HassioAddOnDataUpdateCoordinator,
|
||||||
entity_description: EntityDescription,
|
entity_description: EntityDescription,
|
||||||
addon: dict[str, Any],
|
addon: dict[str, Any],
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|||||||
@@ -226,6 +226,14 @@ class HassIO:
|
|||||||
"""
|
"""
|
||||||
return self.send_command("/ingress/panels", method="get")
|
return self.send_command("/ingress/panels", method="get")
|
||||||
|
|
||||||
|
@api_data
|
||||||
|
def get_addons(self) -> Coroutine:
|
||||||
|
"""Return data installed Add-ons.
|
||||||
|
|
||||||
|
This method returns a coroutine.
|
||||||
|
"""
|
||||||
|
return self.send_command("/addons", method="get")
|
||||||
|
|
||||||
@_api_bool
|
@_api_bool
|
||||||
async def update_hass_api(
|
async def update_hass_api(
|
||||||
self, http_config: dict[str, Any], refresh_token: RefreshToken
|
self, http_config: dict[str, Any], refresh_token: RefreshToken
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ from .const import (
|
|||||||
ATTR_MEMORY_PERCENT,
|
ATTR_MEMORY_PERCENT,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
ATTR_VERSION_LATEST,
|
ATTR_VERSION_LATEST,
|
||||||
|
COORDINATOR,
|
||||||
DATA_KEY_ADDONS,
|
DATA_KEY_ADDONS,
|
||||||
DATA_KEY_CORE,
|
DATA_KEY_CORE,
|
||||||
DATA_KEY_HOST,
|
DATA_KEY_HOST,
|
||||||
@@ -114,20 +115,21 @@ async def async_setup_entry(
|
|||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Sensor set up for Hass.io config entry."""
|
"""Sensor set up for Hass.io config entry."""
|
||||||
coordinator = hass.data[ADDONS_COORDINATOR]
|
addons_coordinator = hass.data[ADDONS_COORDINATOR]
|
||||||
|
|
||||||
entities: list[
|
entities: list[
|
||||||
HassioOSSensor | HassioAddonSensor | CoreSensor | SupervisorSensor | HostSensor
|
HassioOSSensor | HassioAddonSensor | CoreSensor | SupervisorSensor | HostSensor
|
||||||
] = [
|
] = [
|
||||||
HassioAddonSensor(
|
HassioAddonSensor(
|
||||||
addon=addon,
|
addon=addon,
|
||||||
coordinator=coordinator,
|
coordinator=addons_coordinator,
|
||||||
entity_description=entity_description,
|
entity_description=entity_description,
|
||||||
)
|
)
|
||||||
for addon in coordinator.data[DATA_KEY_ADDONS].values()
|
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
|
||||||
for entity_description in ADDON_ENTITY_DESCRIPTIONS
|
for entity_description in ADDON_ENTITY_DESCRIPTIONS
|
||||||
]
|
]
|
||||||
|
|
||||||
|
coordinator = hass.data[COORDINATOR]
|
||||||
entities.extend(
|
entities.extend(
|
||||||
CoreSensor(
|
CoreSensor(
|
||||||
coordinator=coordinator,
|
coordinator=coordinator,
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ from .const import (
|
|||||||
ATTR_AUTO_UPDATE,
|
ATTR_AUTO_UPDATE,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
ATTR_VERSION_LATEST,
|
ATTR_VERSION_LATEST,
|
||||||
|
COORDINATOR,
|
||||||
DATA_KEY_ADDONS,
|
DATA_KEY_ADDONS,
|
||||||
DATA_KEY_CORE,
|
DATA_KEY_CORE,
|
||||||
DATA_KEY_OS,
|
DATA_KEY_OS,
|
||||||
@@ -49,9 +50,9 @@ async def async_setup_entry(
|
|||||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Set up Supervisor update based on a config entry."""
|
"""Set up Supervisor update based on a config entry."""
|
||||||
coordinator = hass.data[ADDONS_COORDINATOR]
|
coordinator = hass.data[COORDINATOR]
|
||||||
|
|
||||||
entities = [
|
entities: list[UpdateEntity] = [
|
||||||
SupervisorSupervisorUpdateEntity(
|
SupervisorSupervisorUpdateEntity(
|
||||||
coordinator=coordinator,
|
coordinator=coordinator,
|
||||||
entity_description=ENTITY_DESCRIPTION,
|
entity_description=ENTITY_DESCRIPTION,
|
||||||
@@ -62,15 +63,6 @@ async def async_setup_entry(
|
|||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
entities.extend(
|
|
||||||
SupervisorAddonUpdateEntity(
|
|
||||||
addon=addon,
|
|
||||||
coordinator=coordinator,
|
|
||||||
entity_description=ENTITY_DESCRIPTION,
|
|
||||||
)
|
|
||||||
for addon in coordinator.data[DATA_KEY_ADDONS].values()
|
|
||||||
)
|
|
||||||
|
|
||||||
if coordinator.is_hass_os:
|
if coordinator.is_hass_os:
|
||||||
entities.append(
|
entities.append(
|
||||||
SupervisorOSUpdateEntity(
|
SupervisorOSUpdateEntity(
|
||||||
@@ -79,6 +71,16 @@ async def async_setup_entry(
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
addons_coordinator = hass.data[ADDONS_COORDINATOR]
|
||||||
|
entities.extend(
|
||||||
|
SupervisorAddonUpdateEntity(
|
||||||
|
addon=addon,
|
||||||
|
coordinator=addons_coordinator,
|
||||||
|
entity_description=ENTITY_DESCRIPTION,
|
||||||
|
)
|
||||||
|
for addon in addons_coordinator.data[DATA_KEY_ADDONS].values()
|
||||||
|
)
|
||||||
|
|
||||||
async_add_entities(entities)
|
async_add_entities(entities)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -106,6 +106,38 @@ def mock_all(
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
aioclient_mock.get(
|
||||||
|
"http://127.0.0.1/addons",
|
||||||
|
json={
|
||||||
|
"result": "ok",
|
||||||
|
"data": {
|
||||||
|
"addons": [
|
||||||
|
{
|
||||||
|
"name": "test",
|
||||||
|
"slug": "test",
|
||||||
|
"state": "started",
|
||||||
|
"update_available": True,
|
||||||
|
"icon": False,
|
||||||
|
"version": "2.0.0",
|
||||||
|
"version_latest": "2.0.1",
|
||||||
|
"repository": "core",
|
||||||
|
"url": "https://github.com/home-assistant/addons/test",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "test2",
|
||||||
|
"slug": "test2",
|
||||||
|
"state": "stopped",
|
||||||
|
"update_available": False,
|
||||||
|
"icon": False,
|
||||||
|
"version": "3.1.0",
|
||||||
|
"version_latest": "3.2.0",
|
||||||
|
"repository": "core",
|
||||||
|
"url": "https://github.com",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
aioclient_mock.get(
|
aioclient_mock.get(
|
||||||
"http://127.0.0.1/core/stats",
|
"http://127.0.0.1/core/stats",
|
||||||
json={
|
json={
|
||||||
|
|||||||
@@ -109,6 +109,38 @@ def mock_all(
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
aioclient_mock.get(
|
||||||
|
"http://127.0.0.1/addons",
|
||||||
|
json={
|
||||||
|
"result": "ok",
|
||||||
|
"data": {
|
||||||
|
"addons": [
|
||||||
|
{
|
||||||
|
"name": "test",
|
||||||
|
"slug": "test",
|
||||||
|
"state": "started",
|
||||||
|
"update_available": True,
|
||||||
|
"icon": False,
|
||||||
|
"version": "2.0.0",
|
||||||
|
"version_latest": "2.0.1",
|
||||||
|
"repository": "core",
|
||||||
|
"url": "https://github.com/home-assistant/addons/test",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "test2",
|
||||||
|
"slug": "test2",
|
||||||
|
"state": "stopped",
|
||||||
|
"update_available": False,
|
||||||
|
"icon": True,
|
||||||
|
"version": "3.1.0",
|
||||||
|
"version_latest": "3.1.0",
|
||||||
|
"repository": "core",
|
||||||
|
"url": "https://github.com",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
aioclient_mock.get(
|
aioclient_mock.get(
|
||||||
"http://127.0.0.1/core/stats",
|
"http://127.0.0.1/core/stats",
|
||||||
json={
|
json={
|
||||||
@@ -177,10 +209,10 @@ async def test_diagnostics(
|
|||||||
hass, hass_client, config_entry
|
hass, hass_client, config_entry
|
||||||
)
|
)
|
||||||
|
|
||||||
assert "addons" in diagnostics["coordinator_data"]
|
|
||||||
assert "core" in diagnostics["coordinator_data"]
|
assert "core" in diagnostics["coordinator_data"]
|
||||||
assert "supervisor" in diagnostics["coordinator_data"]
|
assert "supervisor" in diagnostics["coordinator_data"]
|
||||||
assert "os" in diagnostics["coordinator_data"]
|
assert "os" in diagnostics["coordinator_data"]
|
||||||
assert "host" in diagnostics["coordinator_data"]
|
assert "host" in diagnostics["coordinator_data"]
|
||||||
|
assert "addons" in diagnostics["addons_coordinator_data"]
|
||||||
|
|
||||||
assert len(diagnostics["devices"]) == 6
|
assert len(diagnostics["devices"]) == 6
|
||||||
|
|||||||
@@ -142,6 +142,38 @@ def mock_all(
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
aioclient_mock.get(
|
||||||
|
"http://127.0.0.1/addons",
|
||||||
|
json={
|
||||||
|
"result": "ok",
|
||||||
|
"data": {
|
||||||
|
"addons": [
|
||||||
|
{
|
||||||
|
"name": "test",
|
||||||
|
"slug": "test",
|
||||||
|
"state": "started",
|
||||||
|
"update_available": True,
|
||||||
|
"icon": False,
|
||||||
|
"version": "2.0.0",
|
||||||
|
"version_latest": "2.0.1",
|
||||||
|
"repository": "core",
|
||||||
|
"url": "https://github.com/home-assistant/addons/test",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "test2",
|
||||||
|
"slug": "test2",
|
||||||
|
"state": "stopped",
|
||||||
|
"update_available": False,
|
||||||
|
"icon": True,
|
||||||
|
"version": "3.1.0",
|
||||||
|
"version_latest": "3.1.0",
|
||||||
|
"repository": "core",
|
||||||
|
"url": "https://github.com",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
aioclient_mock.get(
|
aioclient_mock.get(
|
||||||
"http://127.0.0.1/core/stats",
|
"http://127.0.0.1/core/stats",
|
||||||
json={
|
json={
|
||||||
@@ -232,7 +264,7 @@ async def test_setup_api_ping(
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert result
|
assert result
|
||||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
|
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||||
assert get_core_info(hass)["version_latest"] == "1.0.0"
|
assert get_core_info(hass)["version_latest"] == "1.0.0"
|
||||||
assert is_hassio(hass)
|
assert is_hassio(hass)
|
||||||
|
|
||||||
@@ -279,7 +311,7 @@ async def test_setup_api_push_api_data(
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert result
|
assert result
|
||||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
|
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||||
assert not aioclient_mock.mock_calls[0][2]["ssl"]
|
assert not aioclient_mock.mock_calls[0][2]["ssl"]
|
||||||
assert aioclient_mock.mock_calls[0][2]["port"] == 9999
|
assert aioclient_mock.mock_calls[0][2]["port"] == 9999
|
||||||
assert "watchdog" not in aioclient_mock.mock_calls[0][2]
|
assert "watchdog" not in aioclient_mock.mock_calls[0][2]
|
||||||
@@ -300,7 +332,7 @@ async def test_setup_api_push_api_data_server_host(
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert result
|
assert result
|
||||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
|
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||||
assert not aioclient_mock.mock_calls[0][2]["ssl"]
|
assert not aioclient_mock.mock_calls[0][2]["ssl"]
|
||||||
assert aioclient_mock.mock_calls[0][2]["port"] == 9999
|
assert aioclient_mock.mock_calls[0][2]["port"] == 9999
|
||||||
assert not aioclient_mock.mock_calls[0][2]["watchdog"]
|
assert not aioclient_mock.mock_calls[0][2]["watchdog"]
|
||||||
@@ -321,7 +353,7 @@ async def test_setup_api_push_api_data_default(
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert result
|
assert result
|
||||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
|
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||||
assert not aioclient_mock.mock_calls[0][2]["ssl"]
|
assert not aioclient_mock.mock_calls[0][2]["ssl"]
|
||||||
assert aioclient_mock.mock_calls[0][2]["port"] == 8123
|
assert aioclient_mock.mock_calls[0][2]["port"] == 8123
|
||||||
refresh_token = aioclient_mock.mock_calls[0][2]["refresh_token"]
|
refresh_token = aioclient_mock.mock_calls[0][2]["refresh_token"]
|
||||||
@@ -402,7 +434,7 @@ async def test_setup_api_existing_hassio_user(
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert result
|
assert result
|
||||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
|
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||||
assert not aioclient_mock.mock_calls[0][2]["ssl"]
|
assert not aioclient_mock.mock_calls[0][2]["ssl"]
|
||||||
assert aioclient_mock.mock_calls[0][2]["port"] == 8123
|
assert aioclient_mock.mock_calls[0][2]["port"] == 8123
|
||||||
assert aioclient_mock.mock_calls[0][2]["refresh_token"] == token.token
|
assert aioclient_mock.mock_calls[0][2]["refresh_token"] == token.token
|
||||||
@@ -421,7 +453,7 @@ async def test_setup_core_push_config(
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert result
|
assert result
|
||||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
|
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||||
assert aioclient_mock.mock_calls[1][2]["timezone"] == "testzone"
|
assert aioclient_mock.mock_calls[1][2]["timezone"] == "testzone"
|
||||||
|
|
||||||
with patch("homeassistant.util.dt.set_default_time_zone"):
|
with patch("homeassistant.util.dt.set_default_time_zone"):
|
||||||
@@ -445,7 +477,7 @@ async def test_setup_hassio_no_additional_data(
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert result
|
assert result
|
||||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
|
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||||
assert aioclient_mock.mock_calls[-1][3]["Authorization"] == "Bearer 123456"
|
assert aioclient_mock.mock_calls[-1][3]["Authorization"] == "Bearer 123456"
|
||||||
|
|
||||||
|
|
||||||
@@ -527,14 +559,14 @@ async def test_service_calls(
|
|||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 22
|
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 23
|
||||||
assert aioclient_mock.mock_calls[-1][2] == "test"
|
assert aioclient_mock.mock_calls[-1][2] == "test"
|
||||||
|
|
||||||
await hass.services.async_call("hassio", "host_shutdown", {})
|
await hass.services.async_call("hassio", "host_shutdown", {})
|
||||||
await hass.services.async_call("hassio", "host_reboot", {})
|
await hass.services.async_call("hassio", "host_reboot", {})
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 24
|
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 25
|
||||||
|
|
||||||
await hass.services.async_call("hassio", "backup_full", {})
|
await hass.services.async_call("hassio", "backup_full", {})
|
||||||
await hass.services.async_call(
|
await hass.services.async_call(
|
||||||
@@ -549,7 +581,7 @@ async def test_service_calls(
|
|||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 26
|
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 27
|
||||||
assert aioclient_mock.mock_calls[-1][2] == {
|
assert aioclient_mock.mock_calls[-1][2] == {
|
||||||
"name": "2021-11-13 03:48:00",
|
"name": "2021-11-13 03:48:00",
|
||||||
"homeassistant": True,
|
"homeassistant": True,
|
||||||
@@ -574,7 +606,7 @@ async def test_service_calls(
|
|||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 28
|
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 29
|
||||||
assert aioclient_mock.mock_calls[-1][2] == {
|
assert aioclient_mock.mock_calls[-1][2] == {
|
||||||
"addons": ["test"],
|
"addons": ["test"],
|
||||||
"folders": ["ssl"],
|
"folders": ["ssl"],
|
||||||
@@ -593,7 +625,7 @@ async def test_service_calls(
|
|||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 29
|
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 30
|
||||||
assert aioclient_mock.mock_calls[-1][2] == {
|
assert aioclient_mock.mock_calls[-1][2] == {
|
||||||
"name": "backup_name",
|
"name": "backup_name",
|
||||||
"location": "backup_share",
|
"location": "backup_share",
|
||||||
@@ -609,7 +641,7 @@ async def test_service_calls(
|
|||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 30
|
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 31
|
||||||
assert aioclient_mock.mock_calls[-1][2] == {
|
assert aioclient_mock.mock_calls[-1][2] == {
|
||||||
"name": "2021-11-13 03:48:00",
|
"name": "2021-11-13 03:48:00",
|
||||||
"location": None,
|
"location": None,
|
||||||
@@ -628,7 +660,7 @@ async def test_service_calls(
|
|||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 32
|
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 33
|
||||||
assert aioclient_mock.mock_calls[-1][2] == {
|
assert aioclient_mock.mock_calls[-1][2] == {
|
||||||
"name": "2021-11-13 11:48:00",
|
"name": "2021-11-13 11:48:00",
|
||||||
"location": None,
|
"location": None,
|
||||||
@@ -683,7 +715,7 @@ async def test_addon_service_call_with_complex_slug(
|
|||||||
with (
|
with (
|
||||||
patch.dict(os.environ, MOCK_ENVIRON),
|
patch.dict(os.environ, MOCK_ENVIRON),
|
||||||
patch(
|
patch(
|
||||||
"homeassistant.components.hassio.HassIO.get_supervisor_info",
|
"homeassistant.components.hassio.HassIO.get_addons",
|
||||||
return_value=supervisor_mock_data,
|
return_value=supervisor_mock_data,
|
||||||
),
|
),
|
||||||
):
|
):
|
||||||
@@ -845,11 +877,11 @@ async def test_device_registry_calls(
|
|||||||
):
|
):
|
||||||
async_fire_time_changed(hass, dt_util.now() + timedelta(hours=1))
|
async_fire_time_changed(hass, dt_util.now() + timedelta(hours=1))
|
||||||
await hass.async_block_till_done(wait_background_tasks=True)
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
assert len(device_registry.devices) == 5
|
assert len(device_registry.devices) == 6
|
||||||
|
|
||||||
async_fire_time_changed(hass, dt_util.now() + timedelta(hours=2))
|
async_fire_time_changed(hass, dt_util.now() + timedelta(hours=2))
|
||||||
await hass.async_block_till_done(wait_background_tasks=True)
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
assert len(device_registry.devices) == 5
|
assert len(device_registry.devices) == 6
|
||||||
|
|
||||||
supervisor_mock_data = {
|
supervisor_mock_data = {
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
@@ -903,7 +935,7 @@ async def test_device_registry_calls(
|
|||||||
):
|
):
|
||||||
async_fire_time_changed(hass, dt_util.now() + timedelta(hours=3))
|
async_fire_time_changed(hass, dt_util.now() + timedelta(hours=3))
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
assert len(device_registry.devices) == 5
|
assert len(device_registry.devices) == 6
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("addon_installed")
|
@pytest.mark.usefixtures("addon_installed")
|
||||||
@@ -919,13 +951,13 @@ async def test_coordinator_updates(
|
|||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
# Initial refresh, no update refresh call
|
# Initial refresh, no update refresh call
|
||||||
supervisor_client.refresh_updates.assert_not_called()
|
supervisor_client.reload_updates.assert_not_called()
|
||||||
|
|
||||||
async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=20))
|
async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=20))
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
# Scheduled refresh, no update refresh call
|
# Scheduled refresh, no update refresh call
|
||||||
supervisor_client.refresh_updates.assert_not_called()
|
supervisor_client.reload_updates.assert_not_called()
|
||||||
|
|
||||||
await hass.services.async_call(
|
await hass.services.async_call(
|
||||||
"homeassistant",
|
"homeassistant",
|
||||||
@@ -940,15 +972,15 @@ async def test_coordinator_updates(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer
|
# There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer
|
||||||
supervisor_client.refresh_updates.assert_not_called()
|
supervisor_client.reload_updates.assert_not_called()
|
||||||
async_fire_time_changed(
|
async_fire_time_changed(
|
||||||
hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY)
|
hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY)
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
supervisor_client.refresh_updates.assert_called_once()
|
supervisor_client.reload_updates.assert_called_once()
|
||||||
|
|
||||||
supervisor_client.refresh_updates.reset_mock()
|
supervisor_client.reload_updates.reset_mock()
|
||||||
supervisor_client.refresh_updates.side_effect = SupervisorError("Unknown")
|
supervisor_client.reload_updates.side_effect = SupervisorError("Unknown")
|
||||||
await hass.services.async_call(
|
await hass.services.async_call(
|
||||||
"homeassistant",
|
"homeassistant",
|
||||||
"update_entity",
|
"update_entity",
|
||||||
@@ -965,7 +997,7 @@ async def test_coordinator_updates(
|
|||||||
hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY)
|
hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY)
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
supervisor_client.refresh_updates.assert_called_once()
|
supervisor_client.reload_updates.assert_called_once()
|
||||||
assert "Error on Supervisor API: Unknown" in caplog.text
|
assert "Error on Supervisor API: Unknown" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
@@ -983,7 +1015,7 @@ async def test_coordinator_updates_stats_entities_enabled(
|
|||||||
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
assert await hass.config_entries.async_setup(config_entry.entry_id)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
# Initial refresh without stats
|
# Initial refresh without stats
|
||||||
supervisor_client.refresh_updates.assert_not_called()
|
supervisor_client.reload_updates.assert_not_called()
|
||||||
|
|
||||||
# Refresh with stats once we know which ones are needed
|
# Refresh with stats once we know which ones are needed
|
||||||
async_fire_time_changed(
|
async_fire_time_changed(
|
||||||
@@ -991,12 +1023,12 @@ async def test_coordinator_updates_stats_entities_enabled(
|
|||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
supervisor_client.refresh_updates.assert_called_once()
|
supervisor_client.reload_updates.assert_called_once()
|
||||||
|
|
||||||
supervisor_client.refresh_updates.reset_mock()
|
supervisor_client.reload_updates.reset_mock()
|
||||||
async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=20))
|
async_fire_time_changed(hass, dt_util.now() + timedelta(minutes=20))
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
supervisor_client.refresh_updates.assert_not_called()
|
supervisor_client.reload_updates.assert_not_called()
|
||||||
|
|
||||||
await hass.services.async_call(
|
await hass.services.async_call(
|
||||||
"homeassistant",
|
"homeassistant",
|
||||||
@@ -1009,7 +1041,7 @@ async def test_coordinator_updates_stats_entities_enabled(
|
|||||||
},
|
},
|
||||||
blocking=True,
|
blocking=True,
|
||||||
)
|
)
|
||||||
supervisor_client.refresh_updates.assert_not_called()
|
supervisor_client.reload_updates.assert_not_called()
|
||||||
|
|
||||||
# There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer
|
# There is a REQUEST_REFRESH_DELAYs cooldown on the debouncer
|
||||||
async_fire_time_changed(
|
async_fire_time_changed(
|
||||||
@@ -1017,8 +1049,8 @@ async def test_coordinator_updates_stats_entities_enabled(
|
|||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
|
|
||||||
supervisor_client.refresh_updates.reset_mock()
|
supervisor_client.reload_updates.reset_mock()
|
||||||
supervisor_client.refresh_updates.side_effect = SupervisorError("Unknown")
|
supervisor_client.reload_updates.side_effect = SupervisorError("Unknown")
|
||||||
await hass.services.async_call(
|
await hass.services.async_call(
|
||||||
"homeassistant",
|
"homeassistant",
|
||||||
"update_entity",
|
"update_entity",
|
||||||
@@ -1035,7 +1067,7 @@ async def test_coordinator_updates_stats_entities_enabled(
|
|||||||
hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY)
|
hass, dt_util.now() + timedelta(seconds=REQUEST_REFRESH_DELAY)
|
||||||
)
|
)
|
||||||
await hass.async_block_till_done()
|
await hass.async_block_till_done()
|
||||||
supervisor_client.refresh_updates.assert_called_once()
|
supervisor_client.reload_updates.assert_called_once()
|
||||||
assert "Error on Supervisor API: Unknown" in caplog.text
|
assert "Error on Supervisor API: Unknown" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
@@ -1074,7 +1106,7 @@ async def test_setup_hardware_integration(
|
|||||||
await hass.async_block_till_done(wait_background_tasks=True)
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
|
|
||||||
assert result
|
assert result
|
||||||
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 18
|
assert aioclient_mock.call_count + len(supervisor_client.mock_calls) == 19
|
||||||
assert len(mock_setup_entry.mock_calls) == 1
|
assert len(mock_setup_entry.mock_calls) == 1
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -9,8 +9,11 @@ from freezegun.api import FrozenDateTimeFactory
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from homeassistant import config_entries
|
from homeassistant import config_entries
|
||||||
from homeassistant.components.hassio import DOMAIN, HASSIO_UPDATE_INTERVAL
|
from homeassistant.components.hassio import DOMAIN
|
||||||
from homeassistant.components.hassio.const import REQUEST_REFRESH_DELAY
|
from homeassistant.components.hassio.const import (
|
||||||
|
HASSIO_ADDON_UPDATE_INTERVAL,
|
||||||
|
REQUEST_REFRESH_DELAY,
|
||||||
|
)
|
||||||
from homeassistant.config_entries import ConfigEntryState
|
from homeassistant.config_entries import ConfigEntryState
|
||||||
from homeassistant.const import STATE_UNAVAILABLE
|
from homeassistant.const import STATE_UNAVAILABLE
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
@@ -112,6 +115,38 @@ def _install_default_mocks(aioclient_mock: AiohttpClientMocker):
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
aioclient_mock.get(
|
||||||
|
"http://127.0.0.1/addons",
|
||||||
|
json={
|
||||||
|
"result": "ok",
|
||||||
|
"data": {
|
||||||
|
"addons": [
|
||||||
|
{
|
||||||
|
"name": "test",
|
||||||
|
"slug": "test",
|
||||||
|
"state": "started",
|
||||||
|
"update_available": True,
|
||||||
|
"icon": False,
|
||||||
|
"version": "2.0.0",
|
||||||
|
"version_latest": "2.0.1",
|
||||||
|
"repository": "core",
|
||||||
|
"url": "https://github.com/home-assistant/addons/test",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "test2",
|
||||||
|
"slug": "test2",
|
||||||
|
"state": "stopped",
|
||||||
|
"update_available": False,
|
||||||
|
"icon": False,
|
||||||
|
"version": "3.1.0",
|
||||||
|
"version_latest": "3.2.0",
|
||||||
|
"repository": "core",
|
||||||
|
"url": "https://github.com",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
aioclient_mock.get(
|
aioclient_mock.get(
|
||||||
"http://127.0.0.1/core/stats",
|
"http://127.0.0.1/core/stats",
|
||||||
json={
|
json={
|
||||||
@@ -259,7 +294,7 @@ async def test_stats_addon_sensor(
|
|||||||
_install_default_mocks(aioclient_mock)
|
_install_default_mocks(aioclient_mock)
|
||||||
addon_stats.side_effect = SupervisorError
|
addon_stats.side_effect = SupervisorError
|
||||||
|
|
||||||
freezer.tick(HASSIO_UPDATE_INTERVAL + timedelta(seconds=1))
|
freezer.tick(HASSIO_ADDON_UPDATE_INTERVAL + timedelta(seconds=1))
|
||||||
async_fire_time_changed(hass)
|
async_fire_time_changed(hass)
|
||||||
await hass.async_block_till_done(wait_background_tasks=True)
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
|
|
||||||
@@ -269,7 +304,7 @@ async def test_stats_addon_sensor(
|
|||||||
_install_default_mocks(aioclient_mock)
|
_install_default_mocks(aioclient_mock)
|
||||||
addon_stats.side_effect = None
|
addon_stats.side_effect = None
|
||||||
|
|
||||||
freezer.tick(HASSIO_UPDATE_INTERVAL + timedelta(seconds=1))
|
freezer.tick(HASSIO_ADDON_UPDATE_INTERVAL + timedelta(seconds=1))
|
||||||
async_fire_time_changed(hass)
|
async_fire_time_changed(hass)
|
||||||
await hass.async_block_till_done(wait_background_tasks=True)
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
|
|
||||||
@@ -285,13 +320,13 @@ async def test_stats_addon_sensor(
|
|||||||
assert entity_registry.async_get(entity_id).disabled_by is None
|
assert entity_registry.async_get(entity_id).disabled_by is None
|
||||||
|
|
||||||
# The config entry just reloaded, so we need to wait for the next update
|
# The config entry just reloaded, so we need to wait for the next update
|
||||||
freezer.tick(HASSIO_UPDATE_INTERVAL + timedelta(seconds=1))
|
freezer.tick(HASSIO_ADDON_UPDATE_INTERVAL + timedelta(seconds=1))
|
||||||
async_fire_time_changed(hass)
|
async_fire_time_changed(hass)
|
||||||
await hass.async_block_till_done(wait_background_tasks=True)
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
|
|
||||||
assert hass.states.get(entity_id) is not None
|
assert hass.states.get(entity_id) is not None
|
||||||
|
|
||||||
freezer.tick(HASSIO_UPDATE_INTERVAL + timedelta(seconds=1))
|
freezer.tick(HASSIO_ADDON_UPDATE_INTERVAL + timedelta(seconds=1))
|
||||||
async_fire_time_changed(hass)
|
async_fire_time_changed(hass)
|
||||||
await hass.async_block_till_done(wait_background_tasks=True)
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
# Verify that the entity have the expected state.
|
# Verify that the entity have the expected state.
|
||||||
@@ -302,7 +337,7 @@ async def test_stats_addon_sensor(
|
|||||||
_install_default_mocks(aioclient_mock)
|
_install_default_mocks(aioclient_mock)
|
||||||
addon_stats.side_effect = SupervisorError
|
addon_stats.side_effect = SupervisorError
|
||||||
|
|
||||||
freezer.tick(HASSIO_UPDATE_INTERVAL + timedelta(seconds=1))
|
freezer.tick(HASSIO_ADDON_UPDATE_INTERVAL + timedelta(seconds=1))
|
||||||
async_fire_time_changed(hass)
|
async_fire_time_changed(hass)
|
||||||
await hass.async_block_till_done(wait_background_tasks=True)
|
await hass.async_block_till_done(wait_background_tasks=True)
|
||||||
|
|
||||||
|
|||||||
@@ -130,6 +130,38 @@ def mock_all(
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
aioclient_mock.get(
|
||||||
|
"http://127.0.0.1/addons",
|
||||||
|
json={
|
||||||
|
"result": "ok",
|
||||||
|
"data": {
|
||||||
|
"addons": [
|
||||||
|
{
|
||||||
|
"name": "test",
|
||||||
|
"slug": "test",
|
||||||
|
"state": "started",
|
||||||
|
"update_available": True,
|
||||||
|
"icon": False,
|
||||||
|
"version": "2.0.0",
|
||||||
|
"version_latest": "2.0.1",
|
||||||
|
"repository": "core",
|
||||||
|
"url": "https://github.com/home-assistant/addons/test",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "test2",
|
||||||
|
"slug": "test2",
|
||||||
|
"state": "stopped",
|
||||||
|
"update_available": False,
|
||||||
|
"icon": True,
|
||||||
|
"version": "3.1.0",
|
||||||
|
"version_latest": "3.1.0",
|
||||||
|
"repository": "core",
|
||||||
|
"url": "https://github.com",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
aioclient_mock.get(
|
aioclient_mock.get(
|
||||||
"http://127.0.0.1/core/stats",
|
"http://127.0.0.1/core/stats",
|
||||||
json={
|
json={
|
||||||
|
|||||||
Reference in New Issue
Block a user