mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-11-05 17:09:36 +00:00
Send progress updates during image pull for install/update (#6102)
* Send progress updates during image pull for install/update * Add extra to tests about job APIs * Sent out of date progress to sentry and combine done event * Pulling container image layer
This commit is contained in:
@@ -1,7 +1,10 @@
|
|||||||
"""Docker constants."""
|
"""Docker constants."""
|
||||||
|
|
||||||
from enum import StrEnum
|
from contextlib import suppress
|
||||||
|
from enum import Enum, StrEnum
|
||||||
|
from functools import total_ordering
|
||||||
from pathlib import PurePath
|
from pathlib import PurePath
|
||||||
|
from typing import Self, cast
|
||||||
|
|
||||||
from docker.types import Mount
|
from docker.types import Mount
|
||||||
|
|
||||||
@@ -67,6 +70,51 @@ class PropagationMode(StrEnum):
|
|||||||
RSLAVE = "rslave"
|
RSLAVE = "rslave"
|
||||||
|
|
||||||
|
|
||||||
|
@total_ordering
|
||||||
|
class PullImageLayerStage(Enum):
|
||||||
|
"""Job stages for pulling an image layer.
|
||||||
|
|
||||||
|
These are a subset of the statuses in a docker pull image log. They
|
||||||
|
are the standardized ones that are the most useful to us.
|
||||||
|
"""
|
||||||
|
|
||||||
|
PULLING_FS_LAYER = 1, "Pulling fs layer"
|
||||||
|
DOWNLOADING = 2, "Downloading"
|
||||||
|
VERIFYING_CHECKSUM = 3, "Verifying Checksum"
|
||||||
|
DOWNLOAD_COMPLETE = 4, "Download complete"
|
||||||
|
EXTRACTING = 5, "Extracting"
|
||||||
|
PULL_COMPLETE = 6, "Pull complete"
|
||||||
|
|
||||||
|
def __init__(self, order: int, status: str) -> None:
|
||||||
|
"""Set fields from values."""
|
||||||
|
self.order = order
|
||||||
|
self.status = status
|
||||||
|
|
||||||
|
def __eq__(self, value: object, /) -> bool:
|
||||||
|
"""Check equality, allow StrEnum style comparisons on status."""
|
||||||
|
with suppress(AttributeError):
|
||||||
|
return self.status == cast(PullImageLayerStage, value).status
|
||||||
|
return self.status == value
|
||||||
|
|
||||||
|
def __lt__(self, other: object) -> bool:
|
||||||
|
"""Order instances."""
|
||||||
|
with suppress(AttributeError):
|
||||||
|
return self.order < cast(PullImageLayerStage, other).order
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __hash__(self) -> int:
|
||||||
|
"""Hash instance."""
|
||||||
|
return hash(self.status)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_status(cls, status: str) -> Self | None:
|
||||||
|
"""Return stage instance from pull log status."""
|
||||||
|
for i in cls:
|
||||||
|
if i.status == status:
|
||||||
|
return i
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
ENV_TIME = "TZ"
|
ENV_TIME = "TZ"
|
||||||
ENV_TOKEN = "SUPERVISOR_TOKEN"
|
ENV_TOKEN = "SUPERVISOR_TOKEN"
|
||||||
ENV_TOKEN_OLD = "HASSIO_TOKEN"
|
ENV_TOKEN_OLD = "HASSIO_TOKEN"
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ from docker.models.containers import Container
|
|||||||
from docker.models.images import Image
|
from docker.models.images import Image
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
from ..bus import EventListener
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_PASSWORD,
|
ATTR_PASSWORD,
|
||||||
ATTR_REGISTRY,
|
ATTR_REGISTRY,
|
||||||
@@ -35,17 +36,19 @@ from ..exceptions import (
|
|||||||
DockerAPIError,
|
DockerAPIError,
|
||||||
DockerError,
|
DockerError,
|
||||||
DockerJobError,
|
DockerJobError,
|
||||||
|
DockerLogOutOfOrder,
|
||||||
DockerNotFound,
|
DockerNotFound,
|
||||||
DockerRequestError,
|
DockerRequestError,
|
||||||
DockerTrustError,
|
DockerTrustError,
|
||||||
)
|
)
|
||||||
|
from ..jobs import SupervisorJob
|
||||||
from ..jobs.const import JOB_GROUP_DOCKER_INTERFACE, JobConcurrency
|
from ..jobs.const import JOB_GROUP_DOCKER_INTERFACE, JobConcurrency
|
||||||
from ..jobs.decorator import Job
|
from ..jobs.decorator import Job
|
||||||
from ..jobs.job_group import JobGroup
|
from ..jobs.job_group import JobGroup
|
||||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||||
from ..utils.sentry import async_capture_exception
|
from ..utils.sentry import async_capture_exception
|
||||||
from .const import ContainerState, RestartPolicy
|
from .const import ContainerState, PullImageLayerStage, RestartPolicy
|
||||||
from .manager import CommandReturn
|
from .manager import CommandReturn, PullLogEntry
|
||||||
from .monitor import DockerContainerStateEvent
|
from .monitor import DockerContainerStateEvent
|
||||||
from .stats import DockerStats
|
from .stats import DockerStats
|
||||||
|
|
||||||
@@ -217,6 +220,106 @@ class DockerInterface(JobGroup, ABC):
|
|||||||
|
|
||||||
await self.sys_run_in_executor(self.sys_docker.docker.login, **credentials)
|
await self.sys_run_in_executor(self.sys_docker.docker.login, **credentials)
|
||||||
|
|
||||||
|
def _process_pull_image_log(self, job_id: str, reference: PullLogEntry) -> None:
|
||||||
|
"""Process events fired from a docker while pulling an image, filtered to a given job id."""
|
||||||
|
if (
|
||||||
|
reference.job_id != job_id
|
||||||
|
or not reference.id
|
||||||
|
or not reference.status
|
||||||
|
or not (stage := PullImageLayerStage.from_status(reference.status))
|
||||||
|
):
|
||||||
|
return
|
||||||
|
|
||||||
|
# Pulling FS Layer is our marker for a layer that needs to be downloaded and extracted. Otherwise it already exists and we can ignore
|
||||||
|
job: SupervisorJob | None = None
|
||||||
|
if stage == PullImageLayerStage.PULLING_FS_LAYER:
|
||||||
|
job = self.sys_jobs.new_job(
|
||||||
|
name="Pulling container image layer",
|
||||||
|
initial_stage=stage.status,
|
||||||
|
reference=reference.id,
|
||||||
|
parent_id=job_id,
|
||||||
|
)
|
||||||
|
job.done = False
|
||||||
|
return
|
||||||
|
|
||||||
|
# Find our sub job to update details of
|
||||||
|
for j in self.sys_jobs.jobs:
|
||||||
|
if j.parent_id == job_id and j.reference == reference.id:
|
||||||
|
job = j
|
||||||
|
break
|
||||||
|
|
||||||
|
# This likely only occurs if the logs came in out of sync and we got progress before the Pulling FS Layer one
|
||||||
|
if not job:
|
||||||
|
raise DockerLogOutOfOrder(
|
||||||
|
f"Received pull image log with status {reference.status} for image id {reference.id} and parent job {job_id} but could not find a matching job, skipping",
|
||||||
|
_LOGGER.debug,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Hopefully these come in order but if they sometimes get out of sync, avoid accidentally going backwards
|
||||||
|
# If it happens a lot though we may need to reconsider the value of this feature
|
||||||
|
if job.done:
|
||||||
|
raise DockerLogOutOfOrder(
|
||||||
|
f"Received pull image log with status {reference.status} for job {job.uuid} but job was done, skipping",
|
||||||
|
_LOGGER.debug,
|
||||||
|
)
|
||||||
|
|
||||||
|
if job.stage and stage < PullImageLayerStage.from_status(job.stage):
|
||||||
|
raise DockerLogOutOfOrder(
|
||||||
|
f"Received pull image log with status {reference.status} for job {job.uuid} but job was already on stage {job.stage}, skipping",
|
||||||
|
_LOGGER.debug,
|
||||||
|
)
|
||||||
|
|
||||||
|
# For progress calcuation we assume downloading and extracting are each 50% of the time and others stages negligible
|
||||||
|
progress = job.progress
|
||||||
|
match stage:
|
||||||
|
case PullImageLayerStage.DOWNLOADING | PullImageLayerStage.EXTRACTING:
|
||||||
|
if (
|
||||||
|
reference.progress_detail
|
||||||
|
and reference.progress_detail.current
|
||||||
|
and reference.progress_detail.total
|
||||||
|
):
|
||||||
|
progress = 50 * (
|
||||||
|
reference.progress_detail.current
|
||||||
|
/ reference.progress_detail.total
|
||||||
|
)
|
||||||
|
if stage == PullImageLayerStage.EXTRACTING:
|
||||||
|
progress += 50
|
||||||
|
case (
|
||||||
|
PullImageLayerStage.VERIFYING_CHECKSUM
|
||||||
|
| PullImageLayerStage.DOWNLOAD_COMPLETE
|
||||||
|
):
|
||||||
|
progress = 50
|
||||||
|
case PullImageLayerStage.PULL_COMPLETE:
|
||||||
|
progress = 100
|
||||||
|
|
||||||
|
if progress < job.progress:
|
||||||
|
raise DockerLogOutOfOrder(
|
||||||
|
f"Received pull image log with status {reference.status} for job {job.uuid} that implied progress was {progress} but current progress is {job.progress}, skipping",
|
||||||
|
_LOGGER.debug,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Our filters have all passed. Time to update the job
|
||||||
|
# Only downloading and extracting have progress details. Use that to set extra
|
||||||
|
# We'll leave it around on other stages as the total bytes may be useful after that stage
|
||||||
|
if (
|
||||||
|
stage in {PullImageLayerStage.DOWNLOADING, PullImageLayerStage.EXTRACTING}
|
||||||
|
and reference.progress_detail
|
||||||
|
):
|
||||||
|
job.update(
|
||||||
|
progress=progress,
|
||||||
|
stage=stage.status,
|
||||||
|
extra={
|
||||||
|
"current": reference.progress_detail.current,
|
||||||
|
"total": reference.progress_detail.total,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
job.update(
|
||||||
|
progress=progress,
|
||||||
|
stage=stage.status,
|
||||||
|
done=stage == PullImageLayerStage.PULL_COMPLETE,
|
||||||
|
)
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
name="docker_interface_install",
|
name="docker_interface_install",
|
||||||
on_condition=DockerJobError,
|
on_condition=DockerJobError,
|
||||||
@@ -235,6 +338,7 @@ class DockerInterface(JobGroup, ABC):
|
|||||||
raise ValueError("Cannot pull without an image!")
|
raise ValueError("Cannot pull without an image!")
|
||||||
|
|
||||||
image_arch = str(arch) if arch else self.sys_arch.supervisor
|
image_arch = str(arch) if arch else self.sys_arch.supervisor
|
||||||
|
listener: EventListener | None = None
|
||||||
|
|
||||||
_LOGGER.info("Downloading docker image %s with tag %s.", image, version)
|
_LOGGER.info("Downloading docker image %s with tag %s.", image, version)
|
||||||
try:
|
try:
|
||||||
@@ -242,9 +346,24 @@ class DockerInterface(JobGroup, ABC):
|
|||||||
# Try login if we have defined credentials
|
# Try login if we have defined credentials
|
||||||
await self._docker_login(image)
|
await self._docker_login(image)
|
||||||
|
|
||||||
|
job_id = self.sys_jobs.current.uuid
|
||||||
|
|
||||||
|
async def process_pull_image_log(reference: PullLogEntry) -> None:
|
||||||
|
try:
|
||||||
|
self._process_pull_image_log(job_id, reference)
|
||||||
|
except DockerLogOutOfOrder as err:
|
||||||
|
# Send all these to sentry. Missing a few progress updates
|
||||||
|
# shouldn't matter to users but matters to us
|
||||||
|
await async_capture_exception(err)
|
||||||
|
|
||||||
|
listener = self.sys_bus.register_event(
|
||||||
|
BusEvent.DOCKER_IMAGE_PULL_UPDATE, process_pull_image_log
|
||||||
|
)
|
||||||
|
|
||||||
# Pull new image
|
# Pull new image
|
||||||
docker_image = await self.sys_run_in_executor(
|
docker_image = await self.sys_run_in_executor(
|
||||||
self.sys_docker.pull_image,
|
self.sys_docker.pull_image,
|
||||||
|
self.sys_jobs.current.uuid,
|
||||||
image,
|
image,
|
||||||
str(version),
|
str(version),
|
||||||
platform=MAP_ARCH[image_arch],
|
platform=MAP_ARCH[image_arch],
|
||||||
@@ -297,6 +416,9 @@ class DockerInterface(JobGroup, ABC):
|
|||||||
f"Error happened on Content-Trust check for {image}:{version!s}: {err!s}",
|
f"Error happened on Content-Trust check for {image}:{version!s}: {err!s}",
|
||||||
_LOGGER.error,
|
_LOGGER.error,
|
||||||
) from err
|
) from err
|
||||||
|
finally:
|
||||||
|
if listener:
|
||||||
|
self.sys_bus.remove_listener(listener)
|
||||||
|
|
||||||
self._meta = docker_image.attrs
|
self._meta = docker_image.attrs
|
||||||
|
|
||||||
|
|||||||
@@ -129,6 +129,7 @@ class PullLogEntry:
|
|||||||
exactly matches "error". As that is redundant, skipping for now.
|
exactly matches "error". As that is redundant, skipping for now.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
job_id: str # Not part of the docker object. Used to link log entries to supervisor jobs
|
||||||
id: str | None = None
|
id: str | None = None
|
||||||
status: str | None = None
|
status: str | None = None
|
||||||
progress: str | None = None
|
progress: str | None = None
|
||||||
@@ -136,9 +137,10 @@ class PullLogEntry:
|
|||||||
error: str | None = None
|
error: str | None = None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_pull_log_dict(cls, value: dict[str, Any]) -> PullLogEntry:
|
def from_pull_log_dict(cls, job_id: str, value: dict[str, Any]) -> PullLogEntry:
|
||||||
"""Convert pull progress log dictionary into instance."""
|
"""Convert pull progress log dictionary into instance."""
|
||||||
return cls(
|
return cls(
|
||||||
|
job_id=job_id,
|
||||||
id=value.get("id"),
|
id=value.get("id"),
|
||||||
status=value.get("status"),
|
status=value.get("status"),
|
||||||
progress=value.get("progress"),
|
progress=value.get("progress"),
|
||||||
@@ -376,7 +378,11 @@ class DockerAPI(CoreSysAttributes):
|
|||||||
return container
|
return container
|
||||||
|
|
||||||
def pull_image(
|
def pull_image(
|
||||||
self, repository: str, tag: str = "latest", platform: str | None = None
|
self,
|
||||||
|
job_id: str,
|
||||||
|
repository: str,
|
||||||
|
tag: str = "latest",
|
||||||
|
platform: str | None = None,
|
||||||
) -> Image:
|
) -> Image:
|
||||||
"""Pull the specified image and return it.
|
"""Pull the specified image and return it.
|
||||||
|
|
||||||
@@ -391,7 +397,7 @@ class DockerAPI(CoreSysAttributes):
|
|||||||
repository, tag=tag, platform=platform, stream=True, decode=True
|
repository, tag=tag, platform=platform, stream=True, decode=True
|
||||||
)
|
)
|
||||||
for e in pull_log:
|
for e in pull_log:
|
||||||
entry = PullLogEntry.from_pull_log_dict(e)
|
entry = PullLogEntry.from_pull_log_dict(job_id, e)
|
||||||
if entry.error:
|
if entry.error:
|
||||||
raise entry.exception
|
raise entry.exception
|
||||||
self.sys_loop.call_soon_threadsafe(
|
self.sys_loop.call_soon_threadsafe(
|
||||||
|
|||||||
@@ -556,6 +556,10 @@ class DockerNotFound(DockerError):
|
|||||||
"""Docker object don't Exists."""
|
"""Docker object don't Exists."""
|
||||||
|
|
||||||
|
|
||||||
|
class DockerLogOutOfOrder(DockerError):
|
||||||
|
"""Raise when log from docker action was out of order."""
|
||||||
|
|
||||||
|
|
||||||
class DockerNoSpaceOnDevice(DockerError):
|
class DockerNoSpaceOnDevice(DockerError):
|
||||||
"""Raise if a docker pull fails due to available space."""
|
"""Raise if a docker pull fails due to available space."""
|
||||||
|
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ from ..exceptions import HassioError, JobNotFound, JobStartException
|
|||||||
from ..homeassistant.const import WSEvent
|
from ..homeassistant.const import WSEvent
|
||||||
from ..utils.common import FileConfiguration
|
from ..utils.common import FileConfiguration
|
||||||
from ..utils.dt import utcnow
|
from ..utils.dt import utcnow
|
||||||
|
from ..utils.sentinel import DEFAULT
|
||||||
from .const import ATTR_IGNORE_CONDITIONS, FILE_CONFIG_JOBS, JobCondition
|
from .const import ATTR_IGNORE_CONDITIONS, FILE_CONFIG_JOBS, JobCondition
|
||||||
from .validate import SCHEMA_JOBS_CONFIG
|
from .validate import SCHEMA_JOBS_CONFIG
|
||||||
|
|
||||||
@@ -103,14 +104,13 @@ class SupervisorJob:
|
|||||||
)
|
)
|
||||||
parent_id: str | None = field(factory=_CURRENT_JOB.get, on_setattr=frozen)
|
parent_id: str | None = field(factory=_CURRENT_JOB.get, on_setattr=frozen)
|
||||||
done: bool | None = field(init=False, default=None, on_setattr=_on_change)
|
done: bool | None = field(init=False, default=None, on_setattr=_on_change)
|
||||||
on_change: Callable[["SupervisorJob", Attribute, Any], None] | None = field(
|
on_change: Callable[["SupervisorJob", Attribute, Any], None] | None = None
|
||||||
default=None, on_setattr=frozen
|
|
||||||
)
|
|
||||||
internal: bool = field(default=False)
|
internal: bool = field(default=False)
|
||||||
errors: list[SupervisorJobError] = field(
|
errors: list[SupervisorJobError] = field(
|
||||||
init=False, factory=list, on_setattr=_on_change
|
init=False, factory=list, on_setattr=_on_change
|
||||||
)
|
)
|
||||||
release_event: asyncio.Event | None = None
|
release_event: asyncio.Event | None = None
|
||||||
|
extra: dict[str, Any] | None = None
|
||||||
|
|
||||||
def as_dict(self) -> dict[str, Any]:
|
def as_dict(self) -> dict[str, Any]:
|
||||||
"""Return dictionary representation."""
|
"""Return dictionary representation."""
|
||||||
@@ -124,6 +124,7 @@ class SupervisorJob:
|
|||||||
"parent_id": self.parent_id,
|
"parent_id": self.parent_id,
|
||||||
"errors": [err.as_dict() for err in self.errors],
|
"errors": [err.as_dict() for err in self.errors],
|
||||||
"created": self.created.isoformat(),
|
"created": self.created.isoformat(),
|
||||||
|
"extra": self.extra,
|
||||||
}
|
}
|
||||||
|
|
||||||
def capture_error(self, err: HassioError | None = None) -> None:
|
def capture_error(self, err: HassioError | None = None) -> None:
|
||||||
@@ -157,6 +158,30 @@ class SupervisorJob:
|
|||||||
if token:
|
if token:
|
||||||
_CURRENT_JOB.reset(token)
|
_CURRENT_JOB.reset(token)
|
||||||
|
|
||||||
|
def update(
|
||||||
|
self,
|
||||||
|
progress: float | None = None,
|
||||||
|
stage: str | None = None,
|
||||||
|
extra: dict[str, Any] | None = DEFAULT, # type: ignore
|
||||||
|
done: bool | None = None,
|
||||||
|
) -> None:
|
||||||
|
"""Update multiple fields with one on change event."""
|
||||||
|
on_change = self.on_change
|
||||||
|
self.on_change = None
|
||||||
|
|
||||||
|
if progress is not None:
|
||||||
|
self.progress = progress
|
||||||
|
if stage is not None:
|
||||||
|
self.stage = stage
|
||||||
|
if extra != DEFAULT:
|
||||||
|
self.extra = extra
|
||||||
|
if done is not None:
|
||||||
|
self.done = done
|
||||||
|
|
||||||
|
self.on_change = on_change
|
||||||
|
# Just triggers the normal on change
|
||||||
|
self.reference = self.reference
|
||||||
|
|
||||||
|
|
||||||
class JobManager(FileConfiguration, CoreSysAttributes):
|
class JobManager(FileConfiguration, CoreSysAttributes):
|
||||||
"""Job Manager class."""
|
"""Job Manager class."""
|
||||||
@@ -224,7 +249,7 @@ class JobManager(FileConfiguration, CoreSysAttributes):
|
|||||||
reference: str | None = None,
|
reference: str | None = None,
|
||||||
initial_stage: str | None = None,
|
initial_stage: str | None = None,
|
||||||
internal: bool = False,
|
internal: bool = False,
|
||||||
no_parent: bool = False,
|
parent_id: str | None = DEFAULT, # type: ignore
|
||||||
) -> SupervisorJob:
|
) -> SupervisorJob:
|
||||||
"""Create a new job."""
|
"""Create a new job."""
|
||||||
job = SupervisorJob(
|
job = SupervisorJob(
|
||||||
@@ -233,7 +258,7 @@ class JobManager(FileConfiguration, CoreSysAttributes):
|
|||||||
stage=initial_stage,
|
stage=initial_stage,
|
||||||
on_change=None if internal else self._notify_on_job_change,
|
on_change=None if internal else self._notify_on_job_change,
|
||||||
internal=internal,
|
internal=internal,
|
||||||
**({"parent_id": None} if no_parent else {}),
|
**({} if parent_id == DEFAULT else {"parent_id": parent_id}), # type: ignore
|
||||||
)
|
)
|
||||||
self._jobs[job.uuid] = job
|
self._jobs[job.uuid] = job
|
||||||
return job
|
return job
|
||||||
@@ -267,7 +292,7 @@ class JobManager(FileConfiguration, CoreSysAttributes):
|
|||||||
**kwargs,
|
**kwargs,
|
||||||
) -> tuple[SupervisorJob, asyncio.Task | asyncio.TimerHandle]:
|
) -> tuple[SupervisorJob, asyncio.Task | asyncio.TimerHandle]:
|
||||||
"""Schedule a job to run later and return job and task or timer handle."""
|
"""Schedule a job to run later and return job and task or timer handle."""
|
||||||
job = self.new_job(no_parent=True)
|
job = self.new_job(parent_id=None)
|
||||||
|
|
||||||
def _wrap_task() -> asyncio.Task:
|
def _wrap_task() -> asyncio.Task:
|
||||||
return self.sys_create_task(
|
return self.sys_create_task(
|
||||||
|
|||||||
@@ -112,6 +112,7 @@ async def test_jobs_tree_representation(api_client: TestClient, coresys: CoreSys
|
|||||||
"done": False,
|
"done": False,
|
||||||
"child_jobs": [],
|
"child_jobs": [],
|
||||||
"errors": [],
|
"errors": [],
|
||||||
|
"extra": None,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"created": ANY,
|
"created": ANY,
|
||||||
@@ -122,6 +123,7 @@ async def test_jobs_tree_representation(api_client: TestClient, coresys: CoreSys
|
|||||||
"stage": None,
|
"stage": None,
|
||||||
"done": False,
|
"done": False,
|
||||||
"errors": [],
|
"errors": [],
|
||||||
|
"extra": None,
|
||||||
"child_jobs": [
|
"child_jobs": [
|
||||||
{
|
{
|
||||||
"created": ANY,
|
"created": ANY,
|
||||||
@@ -133,6 +135,7 @@ async def test_jobs_tree_representation(api_client: TestClient, coresys: CoreSys
|
|||||||
"done": False,
|
"done": False,
|
||||||
"child_jobs": [],
|
"child_jobs": [],
|
||||||
"errors": [],
|
"errors": [],
|
||||||
|
"extra": None,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
@@ -154,6 +157,7 @@ async def test_jobs_tree_representation(api_client: TestClient, coresys: CoreSys
|
|||||||
"done": True,
|
"done": True,
|
||||||
"child_jobs": [],
|
"child_jobs": [],
|
||||||
"errors": [],
|
"errors": [],
|
||||||
|
"extra": None,
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
await outer_task
|
await outer_task
|
||||||
@@ -196,6 +200,7 @@ async def test_job_manual_cleanup(api_client: TestClient, coresys: CoreSys):
|
|||||||
"done": False,
|
"done": False,
|
||||||
"child_jobs": [],
|
"child_jobs": [],
|
||||||
"errors": [],
|
"errors": [],
|
||||||
|
"extra": None,
|
||||||
}
|
}
|
||||||
|
|
||||||
# Only done jobs can be deleted via API
|
# Only done jobs can be deleted via API
|
||||||
@@ -282,6 +287,7 @@ async def test_jobs_sorted(api_client: TestClient, coresys: CoreSys):
|
|||||||
"done": True,
|
"done": True,
|
||||||
"errors": [],
|
"errors": [],
|
||||||
"child_jobs": [],
|
"child_jobs": [],
|
||||||
|
"extra": None,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"created": ANY,
|
"created": ANY,
|
||||||
@@ -292,6 +298,7 @@ async def test_jobs_sorted(api_client: TestClient, coresys: CoreSys):
|
|||||||
"stage": None,
|
"stage": None,
|
||||||
"done": True,
|
"done": True,
|
||||||
"errors": [],
|
"errors": [],
|
||||||
|
"extra": None,
|
||||||
"child_jobs": [
|
"child_jobs": [
|
||||||
{
|
{
|
||||||
"created": ANY,
|
"created": ANY,
|
||||||
@@ -303,6 +310,7 @@ async def test_jobs_sorted(api_client: TestClient, coresys: CoreSys):
|
|||||||
"done": True,
|
"done": True,
|
||||||
"errors": [],
|
"errors": [],
|
||||||
"child_jobs": [],
|
"child_jobs": [],
|
||||||
|
"extra": None,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"created": ANY,
|
"created": ANY,
|
||||||
@@ -314,6 +322,7 @@ async def test_jobs_sorted(api_client: TestClient, coresys: CoreSys):
|
|||||||
"done": True,
|
"done": True,
|
||||||
"errors": [],
|
"errors": [],
|
||||||
"child_jobs": [],
|
"child_jobs": [],
|
||||||
|
"extra": None,
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
@@ -359,6 +368,7 @@ async def test_job_with_error(
|
|||||||
"progress": 0,
|
"progress": 0,
|
||||||
"stage": "test",
|
"stage": "test",
|
||||||
"done": True,
|
"done": True,
|
||||||
|
"extra": None,
|
||||||
"errors": [
|
"errors": [
|
||||||
{
|
{
|
||||||
"type": "SupervisorError",
|
"type": "SupervisorError",
|
||||||
@@ -375,6 +385,7 @@ async def test_job_with_error(
|
|||||||
"progress": 0,
|
"progress": 0,
|
||||||
"stage": None,
|
"stage": None,
|
||||||
"done": True,
|
"done": True,
|
||||||
|
"extra": None,
|
||||||
"errors": [
|
"errors": [
|
||||||
{
|
{
|
||||||
"type": "SupervisorError",
|
"type": "SupervisorError",
|
||||||
|
|||||||
@@ -1126,6 +1126,7 @@ def _make_backup_message_for_assert(
|
|||||||
"parent_id": None,
|
"parent_id": None,
|
||||||
"errors": [],
|
"errors": [],
|
||||||
"created": ANY,
|
"created": ANY,
|
||||||
|
"extra": None,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from unittest.mock import MagicMock, Mock, PropertyMock, call, patch
|
from unittest.mock import ANY, AsyncMock, MagicMock, Mock, PropertyMock, call, patch
|
||||||
|
|
||||||
from awesomeversion import AwesomeVersion
|
from awesomeversion import AwesomeVersion
|
||||||
from docker.errors import DockerException, NotFound
|
from docker.errors import DockerException, NotFound
|
||||||
@@ -12,7 +12,7 @@ import pytest
|
|||||||
from requests import RequestException
|
from requests import RequestException
|
||||||
|
|
||||||
from supervisor.addons.manager import Addon
|
from supervisor.addons.manager import Addon
|
||||||
from supervisor.const import BusEvent, CpuArch
|
from supervisor.const import BusEvent, CoreState, CpuArch
|
||||||
from supervisor.coresys import CoreSys
|
from supervisor.coresys import CoreSys
|
||||||
from supervisor.docker.const import ContainerState
|
from supervisor.docker.const import ContainerState
|
||||||
from supervisor.docker.interface import DockerInterface
|
from supervisor.docker.interface import DockerInterface
|
||||||
@@ -25,6 +25,10 @@ from supervisor.exceptions import (
|
|||||||
DockerNotFound,
|
DockerNotFound,
|
||||||
DockerRequestError,
|
DockerRequestError,
|
||||||
)
|
)
|
||||||
|
from supervisor.homeassistant.const import WSEvent
|
||||||
|
from supervisor.jobs import JobSchedulerOptions, SupervisorJob
|
||||||
|
|
||||||
|
from tests.common import load_json_fixture
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
@@ -346,58 +350,256 @@ async def test_install_fires_progress_events(
|
|||||||
await asyncio.sleep(1)
|
await asyncio.sleep(1)
|
||||||
assert events == [
|
assert events == [
|
||||||
PullLogEntry(
|
PullLogEntry(
|
||||||
|
job_id=ANY,
|
||||||
status="Pulling from home-assistant/odroid-n2-homeassistant",
|
status="Pulling from home-assistant/odroid-n2-homeassistant",
|
||||||
id="2025.7.2",
|
id="2025.7.2",
|
||||||
),
|
),
|
||||||
PullLogEntry(
|
PullLogEntry(
|
||||||
|
job_id=ANY,
|
||||||
status="Already exists",
|
status="Already exists",
|
||||||
progress_detail=PullProgressDetail(),
|
progress_detail=PullProgressDetail(),
|
||||||
id="6e771e15690e",
|
id="6e771e15690e",
|
||||||
),
|
),
|
||||||
PullLogEntry(
|
PullLogEntry(
|
||||||
|
job_id=ANY,
|
||||||
status="Pulling fs layer",
|
status="Pulling fs layer",
|
||||||
progress_detail=PullProgressDetail(),
|
progress_detail=PullProgressDetail(),
|
||||||
id="1578b14a573c",
|
id="1578b14a573c",
|
||||||
),
|
),
|
||||||
PullLogEntry(
|
PullLogEntry(
|
||||||
status="Waiting", progress_detail=PullProgressDetail(), id="2488d0e401e1"
|
job_id=ANY,
|
||||||
|
status="Waiting",
|
||||||
|
progress_detail=PullProgressDetail(),
|
||||||
|
id="2488d0e401e1",
|
||||||
),
|
),
|
||||||
PullLogEntry(
|
PullLogEntry(
|
||||||
|
job_id=ANY,
|
||||||
status="Downloading",
|
status="Downloading",
|
||||||
progress_detail=PullProgressDetail(current=1378, total=1486),
|
progress_detail=PullProgressDetail(current=1378, total=1486),
|
||||||
progress="[==============================================> ] 1.378kB/1.486kB",
|
progress="[==============================================> ] 1.378kB/1.486kB",
|
||||||
id="1578b14a573c",
|
id="1578b14a573c",
|
||||||
),
|
),
|
||||||
PullLogEntry(
|
PullLogEntry(
|
||||||
|
job_id=ANY,
|
||||||
status="Download complete",
|
status="Download complete",
|
||||||
progress_detail=PullProgressDetail(),
|
progress_detail=PullProgressDetail(),
|
||||||
id="1578b14a573c",
|
id="1578b14a573c",
|
||||||
),
|
),
|
||||||
PullLogEntry(
|
PullLogEntry(
|
||||||
|
job_id=ANY,
|
||||||
status="Extracting",
|
status="Extracting",
|
||||||
progress_detail=PullProgressDetail(current=1486, total=1486),
|
progress_detail=PullProgressDetail(current=1486, total=1486),
|
||||||
progress="[==================================================>] 1.486kB/1.486kB",
|
progress="[==================================================>] 1.486kB/1.486kB",
|
||||||
id="1578b14a573c",
|
id="1578b14a573c",
|
||||||
),
|
),
|
||||||
PullLogEntry(
|
PullLogEntry(
|
||||||
|
job_id=ANY,
|
||||||
status="Pull complete",
|
status="Pull complete",
|
||||||
progress_detail=PullProgressDetail(),
|
progress_detail=PullProgressDetail(),
|
||||||
id="1578b14a573c",
|
id="1578b14a573c",
|
||||||
),
|
),
|
||||||
PullLogEntry(
|
PullLogEntry(
|
||||||
|
job_id=ANY,
|
||||||
status="Verifying Checksum",
|
status="Verifying Checksum",
|
||||||
progress_detail=PullProgressDetail(),
|
progress_detail=PullProgressDetail(),
|
||||||
id="6a1e931d8f88",
|
id="6a1e931d8f88",
|
||||||
),
|
),
|
||||||
PullLogEntry(
|
PullLogEntry(
|
||||||
status="Digest: sha256:490080d7da0f385928022927990e04f604615f7b8c622ef3e58253d0f089881d"
|
job_id=ANY,
|
||||||
|
status="Digest: sha256:490080d7da0f385928022927990e04f604615f7b8c622ef3e58253d0f089881d",
|
||||||
),
|
),
|
||||||
PullLogEntry(
|
PullLogEntry(
|
||||||
status="Status: Downloaded newer image for ghcr.io/home-assistant/odroid-n2-homeassistant:2025.7.2"
|
job_id=ANY,
|
||||||
|
status="Status: Downloaded newer image for ghcr.io/home-assistant/odroid-n2-homeassistant:2025.7.2",
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
async def test_install_sends_progress_to_home_assistant(
|
||||||
|
coresys: CoreSys, test_docker_interface: DockerInterface, ha_ws_client: AsyncMock
|
||||||
|
):
|
||||||
|
"""Test progress events are sent as job updates to Home Assistant."""
|
||||||
|
coresys.core.set_state(CoreState.RUNNING)
|
||||||
|
coresys.docker.docker.api.pull.return_value = load_json_fixture(
|
||||||
|
"docker_pull_image_log.json"
|
||||||
|
)
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch.object(
|
||||||
|
type(coresys.supervisor), "arch", PropertyMock(return_value="i386")
|
||||||
|
),
|
||||||
|
):
|
||||||
|
# Schedule job so we can listen for the end. Then we can assert against the WS mock
|
||||||
|
event = asyncio.Event()
|
||||||
|
job, install_task = coresys.jobs.schedule_job(
|
||||||
|
test_docker_interface.install,
|
||||||
|
JobSchedulerOptions(),
|
||||||
|
AwesomeVersion("1.2.3"),
|
||||||
|
"test",
|
||||||
|
)
|
||||||
|
|
||||||
|
async def listen_for_job_end(reference: SupervisorJob):
|
||||||
|
if reference.uuid != job.uuid:
|
||||||
|
return
|
||||||
|
event.set()
|
||||||
|
|
||||||
|
coresys.bus.register_event(BusEvent.SUPERVISOR_JOB_END, listen_for_job_end)
|
||||||
|
await install_task
|
||||||
|
await event.wait()
|
||||||
|
|
||||||
|
events = [
|
||||||
|
evt.args[0]["data"]["data"]
|
||||||
|
for evt in ha_ws_client.async_send_command.call_args_list
|
||||||
|
if "data" in evt.args[0] and evt.args[0]["data"]["event"] == WSEvent.JOB
|
||||||
|
]
|
||||||
|
assert events[0]["name"] == "docker_interface_install"
|
||||||
|
assert events[0]["uuid"] == job.uuid
|
||||||
|
assert events[0]["done"] is None
|
||||||
|
assert events[1]["name"] == "docker_interface_install"
|
||||||
|
assert events[1]["uuid"] == job.uuid
|
||||||
|
assert events[1]["done"] is False
|
||||||
|
assert events[-1]["name"] == "docker_interface_install"
|
||||||
|
assert events[-1]["uuid"] == job.uuid
|
||||||
|
assert events[-1]["done"] is True
|
||||||
|
|
||||||
|
def make_sub_log(layer_id: str):
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"stage": evt["stage"],
|
||||||
|
"progress": evt["progress"],
|
||||||
|
"done": evt["done"],
|
||||||
|
"extra": evt["extra"],
|
||||||
|
}
|
||||||
|
for evt in events
|
||||||
|
if evt["name"] == "Pulling container image layer"
|
||||||
|
and evt["reference"] == layer_id
|
||||||
|
and evt["parent_id"] == job.uuid
|
||||||
|
]
|
||||||
|
|
||||||
|
layer_1_log = make_sub_log("1e214cd6d7d0")
|
||||||
|
layer_2_log = make_sub_log("1a38e1d5e18d")
|
||||||
|
assert len(layer_1_log) == 20
|
||||||
|
assert len(layer_2_log) == 19
|
||||||
|
assert len(events) == 42
|
||||||
|
assert layer_1_log == [
|
||||||
|
{"stage": "Pulling fs layer", "progress": 0, "done": False, "extra": None},
|
||||||
|
{
|
||||||
|
"stage": "Downloading",
|
||||||
|
"progress": 0.1,
|
||||||
|
"done": False,
|
||||||
|
"extra": {"current": 539462, "total": 436480882},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": "Downloading",
|
||||||
|
"progress": 0.6,
|
||||||
|
"done": False,
|
||||||
|
"extra": {"current": 4864838, "total": 436480882},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": "Downloading",
|
||||||
|
"progress": 0.9,
|
||||||
|
"done": False,
|
||||||
|
"extra": {"current": 7552896, "total": 436480882},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": "Downloading",
|
||||||
|
"progress": 1.2,
|
||||||
|
"done": False,
|
||||||
|
"extra": {"current": 10252544, "total": 436480882},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": "Downloading",
|
||||||
|
"progress": 2.9,
|
||||||
|
"done": False,
|
||||||
|
"extra": {"current": 25369792, "total": 436480882},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": "Downloading",
|
||||||
|
"progress": 11.9,
|
||||||
|
"done": False,
|
||||||
|
"extra": {"current": 103619904, "total": 436480882},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": "Downloading",
|
||||||
|
"progress": 26.1,
|
||||||
|
"done": False,
|
||||||
|
"extra": {"current": 227726144, "total": 436480882},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": "Downloading",
|
||||||
|
"progress": 49.6,
|
||||||
|
"done": False,
|
||||||
|
"extra": {"current": 433170048, "total": 436480882},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": "Verifying Checksum",
|
||||||
|
"progress": 50,
|
||||||
|
"done": False,
|
||||||
|
"extra": {"current": 433170048, "total": 436480882},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": "Download complete",
|
||||||
|
"progress": 50,
|
||||||
|
"done": False,
|
||||||
|
"extra": {"current": 433170048, "total": 436480882},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": "Extracting",
|
||||||
|
"progress": 50.1,
|
||||||
|
"done": False,
|
||||||
|
"extra": {"current": 557056, "total": 436480882},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": "Extracting",
|
||||||
|
"progress": 60.3,
|
||||||
|
"done": False,
|
||||||
|
"extra": {"current": 89686016, "total": 436480882},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": "Extracting",
|
||||||
|
"progress": 70.0,
|
||||||
|
"done": False,
|
||||||
|
"extra": {"current": 174358528, "total": 436480882},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": "Extracting",
|
||||||
|
"progress": 80.0,
|
||||||
|
"done": False,
|
||||||
|
"extra": {"current": 261816320, "total": 436480882},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": "Extracting",
|
||||||
|
"progress": 88.4,
|
||||||
|
"done": False,
|
||||||
|
"extra": {"current": 334790656, "total": 436480882},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": "Extracting",
|
||||||
|
"progress": 94.0,
|
||||||
|
"done": False,
|
||||||
|
"extra": {"current": 383811584, "total": 436480882},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": "Extracting",
|
||||||
|
"progress": 99.9,
|
||||||
|
"done": False,
|
||||||
|
"extra": {"current": 435617792, "total": 436480882},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": "Extracting",
|
||||||
|
"progress": 100.0,
|
||||||
|
"done": False,
|
||||||
|
"extra": {"current": 436480882, "total": 436480882},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": "Pull complete",
|
||||||
|
"progress": 100.0,
|
||||||
|
"done": True,
|
||||||
|
"extra": {"current": 436480882, "total": 436480882},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("error_log", "exc_type", "exc_msg"),
|
("error_log", "exc_type", "exc_msg"),
|
||||||
[
|
[
|
||||||
|
|||||||
351
tests/fixtures/docker_pull_image_log.json
vendored
Normal file
351
tests/fixtures/docker_pull_image_log.json
vendored
Normal file
@@ -0,0 +1,351 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"status": "Pulling from home-assistant/odroid-n2-homeassistant",
|
||||||
|
"id": "2025.7.1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Already exists",
|
||||||
|
"progressDetail": {},
|
||||||
|
"id": "6e771e15690e"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Already exists",
|
||||||
|
"progressDetail": {},
|
||||||
|
"id": "58da640818f4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Pulling fs layer",
|
||||||
|
"progressDetail": {},
|
||||||
|
"id": "1e214cd6d7d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Pulling fs layer",
|
||||||
|
"progressDetail": {},
|
||||||
|
"id": "1a38e1d5e18d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Waiting",
|
||||||
|
"progressDetail": {},
|
||||||
|
"id": "1e214cd6d7d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Waiting",
|
||||||
|
"progressDetail": {},
|
||||||
|
"id": "1a38e1d5e18d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Downloading",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 539462,
|
||||||
|
"total": 436480882
|
||||||
|
},
|
||||||
|
"progress": "[> ] 539.5kB/436.5MB",
|
||||||
|
"id": "1e214cd6d7d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Downloading",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 262144,
|
||||||
|
"total": 26132657
|
||||||
|
},
|
||||||
|
"progress": "[> ] 262.1kB/26.13MB",
|
||||||
|
"id": "1a38e1d5e18d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Downloading",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 4864838,
|
||||||
|
"total": 436480882
|
||||||
|
},
|
||||||
|
"progress": "[> ] 4.865MB/436.5MB",
|
||||||
|
"id": "1e214cd6d7d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Downloading",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 6580032,
|
||||||
|
"total": 26132657
|
||||||
|
},
|
||||||
|
"progress": "[============> ] 6.58MB/26.13MB",
|
||||||
|
"id": "1a38e1d5e18d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Downloading",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 7552896,
|
||||||
|
"total": 436480882
|
||||||
|
},
|
||||||
|
"progress": "[> ] 7.553MB/436.5MB",
|
||||||
|
"id": "1e214cd6d7d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Downloading",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 18688896,
|
||||||
|
"total": 26132657
|
||||||
|
},
|
||||||
|
"progress": "[===================================> ] 18.69MB/26.13MB",
|
||||||
|
"id": "1a38e1d5e18d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Downloading",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 10252544,
|
||||||
|
"total": 436480882
|
||||||
|
},
|
||||||
|
"progress": "[=> ] 10.25MB/436.5MB",
|
||||||
|
"id": "1e214cd6d7d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Downloading",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 25815872,
|
||||||
|
"total": 26132657
|
||||||
|
},
|
||||||
|
"progress": "[=================================================> ] 25.82MB/26.13MB",
|
||||||
|
"id": "1a38e1d5e18d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Verifying Checksum",
|
||||||
|
"progressDetail": {},
|
||||||
|
"id": "1a38e1d5e18d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Download complete",
|
||||||
|
"progressDetail": {},
|
||||||
|
"id": "1a38e1d5e18d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Downloading",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 25369792,
|
||||||
|
"total": 436480882
|
||||||
|
},
|
||||||
|
"progress": "[==> ] 25.37MB/436.5MB",
|
||||||
|
"id": "1e214cd6d7d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Downloading",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 103619904,
|
||||||
|
"total": 436480882
|
||||||
|
},
|
||||||
|
"progress": "[===========> ] 103.6MB/436.5MB",
|
||||||
|
"id": "1e214cd6d7d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Downloading",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 227726144,
|
||||||
|
"total": 436480882
|
||||||
|
},
|
||||||
|
"progress": "[==========================> ] 227.7MB/436.5MB",
|
||||||
|
"id": "1e214cd6d7d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Downloading",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 433170048,
|
||||||
|
"total": 436480882
|
||||||
|
},
|
||||||
|
"progress": "[=================================================> ] 433.2MB/436.5MB",
|
||||||
|
"id": "1e214cd6d7d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Verifying Checksum",
|
||||||
|
"progressDetail": {},
|
||||||
|
"id": "1e214cd6d7d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Download complete",
|
||||||
|
"progressDetail": {},
|
||||||
|
"id": "1e214cd6d7d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Extracting",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 557056,
|
||||||
|
"total": 436480882
|
||||||
|
},
|
||||||
|
"progress": "[> ] 557.1kB/436.5MB",
|
||||||
|
"id": "1e214cd6d7d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Extracting",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 89686016,
|
||||||
|
"total": 436480882
|
||||||
|
},
|
||||||
|
"progress": "[==========> ] 89.69MB/436.5MB",
|
||||||
|
"id": "1e214cd6d7d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Extracting",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 174358528,
|
||||||
|
"total": 436480882
|
||||||
|
},
|
||||||
|
"progress": "[===================> ] 174.4MB/436.5MB",
|
||||||
|
"id": "1e214cd6d7d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Extracting",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 261816320,
|
||||||
|
"total": 436480882
|
||||||
|
},
|
||||||
|
"progress": "[=============================> ] 261.8MB/436.5MB",
|
||||||
|
"id": "1e214cd6d7d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Extracting",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 334790656,
|
||||||
|
"total": 436480882
|
||||||
|
},
|
||||||
|
"progress": "[======================================> ] 334.8MB/436.5MB",
|
||||||
|
"id": "1e214cd6d7d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Extracting",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 383811584,
|
||||||
|
"total": 436480882
|
||||||
|
},
|
||||||
|
"progress": "[===========================================> ] 383.8MB/436.5MB",
|
||||||
|
"id": "1e214cd6d7d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Extracting",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 435617792,
|
||||||
|
"total": 436480882
|
||||||
|
},
|
||||||
|
"progress": "[=================================================> ] 435.6MB/436.5MB",
|
||||||
|
"id": "1e214cd6d7d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Extracting",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 436480882,
|
||||||
|
"total": 436480882
|
||||||
|
},
|
||||||
|
"progress": "[==================================================>] 436.5MB/436.5MB",
|
||||||
|
"id": "1e214cd6d7d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Pull complete",
|
||||||
|
"progressDetail": {},
|
||||||
|
"id": "1e214cd6d7d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Extracting",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 262144,
|
||||||
|
"total": 26132657
|
||||||
|
},
|
||||||
|
"progress": "[> ] 262.1kB/26.13MB",
|
||||||
|
"id": "1a38e1d5e18d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Extracting",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 2621440,
|
||||||
|
"total": 26132657
|
||||||
|
},
|
||||||
|
"progress": "[=====> ] 2.621MB/26.13MB",
|
||||||
|
"id": "1a38e1d5e18d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Extracting",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 6029312,
|
||||||
|
"total": 26132657
|
||||||
|
},
|
||||||
|
"progress": "[===========> ] 6.029MB/26.13MB",
|
||||||
|
"id": "1a38e1d5e18d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Extracting",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 8650752,
|
||||||
|
"total": 26132657
|
||||||
|
},
|
||||||
|
"progress": "[================> ] 8.651MB/26.13MB",
|
||||||
|
"id": "1a38e1d5e18d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Extracting",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 11010048,
|
||||||
|
"total": 26132657
|
||||||
|
},
|
||||||
|
"progress": "[=====================> ] 11.01MB/26.13MB",
|
||||||
|
"id": "1a38e1d5e18d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Extracting",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 13369344,
|
||||||
|
"total": 26132657
|
||||||
|
},
|
||||||
|
"progress": "[=========================> ] 13.37MB/26.13MB",
|
||||||
|
"id": "1a38e1d5e18d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Extracting",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 15990784,
|
||||||
|
"total": 26132657
|
||||||
|
},
|
||||||
|
"progress": "[==============================> ] 15.99MB/26.13MB",
|
||||||
|
"id": "1a38e1d5e18d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Extracting",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 18350080,
|
||||||
|
"total": 26132657
|
||||||
|
},
|
||||||
|
"progress": "[===================================> ] 18.35MB/26.13MB",
|
||||||
|
"id": "1a38e1d5e18d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Extracting",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 20709376,
|
||||||
|
"total": 26132657
|
||||||
|
},
|
||||||
|
"progress": "[=======================================> ] 20.71MB/26.13MB",
|
||||||
|
"id": "1a38e1d5e18d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Extracting",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 23068672,
|
||||||
|
"total": 26132657
|
||||||
|
},
|
||||||
|
"progress": "[============================================> ] 23.07MB/26.13MB",
|
||||||
|
"id": "1a38e1d5e18d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Extracting",
|
||||||
|
"progressDetail": {
|
||||||
|
"current": 26132657,
|
||||||
|
"total": 26132657
|
||||||
|
},
|
||||||
|
"progress": "[==================================================>] 26.13MB/26.13MB",
|
||||||
|
"id": "1a38e1d5e18d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Pull complete",
|
||||||
|
"progressDetail": {},
|
||||||
|
"id": "1a38e1d5e18d"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Digest: sha256:7d97da645f232f82a768d0a537e452536719d56d484d419836e53dbe3e4ec736"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"status": "Status: Downloaded newer image for ghcr.io/home-assistant/odroid-n2-homeassistant:2025.7.1"
|
||||||
|
}
|
||||||
|
]
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
"""Test Home Assistant core."""
|
"""Test Home Assistant core."""
|
||||||
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from unittest.mock import MagicMock, Mock, PropertyMock, patch
|
from unittest.mock import ANY, MagicMock, Mock, PropertyMock, patch
|
||||||
|
|
||||||
from awesomeversion import AwesomeVersion
|
from awesomeversion import AwesomeVersion
|
||||||
from docker.errors import APIError, DockerException, ImageNotFound, NotFound
|
from docker.errors import APIError, DockerException, ImageNotFound, NotFound
|
||||||
@@ -408,6 +408,7 @@ async def test_core_loads_wrong_image_for_machine(
|
|||||||
"force": True,
|
"force": True,
|
||||||
}
|
}
|
||||||
coresys.docker.pull_image.assert_called_once_with(
|
coresys.docker.pull_image.assert_called_once_with(
|
||||||
|
ANY,
|
||||||
"ghcr.io/home-assistant/qemux86-64-homeassistant",
|
"ghcr.io/home-assistant/qemux86-64-homeassistant",
|
||||||
"2024.4.0",
|
"2024.4.0",
|
||||||
platform="linux/amd64",
|
platform="linux/amd64",
|
||||||
@@ -456,6 +457,7 @@ async def test_core_loads_wrong_image_for_architecture(
|
|||||||
"force": True,
|
"force": True,
|
||||||
}
|
}
|
||||||
coresys.docker.pull_image.assert_called_once_with(
|
coresys.docker.pull_image.assert_called_once_with(
|
||||||
|
ANY,
|
||||||
"ghcr.io/home-assistant/qemux86-64-homeassistant",
|
"ghcr.io/home-assistant/qemux86-64-homeassistant",
|
||||||
"2024.4.0",
|
"2024.4.0",
|
||||||
platform="linux/amd64",
|
platform="linux/amd64",
|
||||||
|
|||||||
@@ -993,6 +993,7 @@ async def test_internal_jobs_no_notify(coresys: CoreSys, ha_ws_client: AsyncMock
|
|||||||
"parent_id": None,
|
"parent_id": None,
|
||||||
"errors": [],
|
"errors": [],
|
||||||
"created": ANY,
|
"created": ANY,
|
||||||
|
"extra": None,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -105,6 +105,7 @@ async def test_notify_on_change(coresys: CoreSys, ha_ws_client: AsyncMock):
|
|||||||
"parent_id": None,
|
"parent_id": None,
|
||||||
"errors": [],
|
"errors": [],
|
||||||
"created": ANY,
|
"created": ANY,
|
||||||
|
"extra": None,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -127,6 +128,7 @@ async def test_notify_on_change(coresys: CoreSys, ha_ws_client: AsyncMock):
|
|||||||
"parent_id": None,
|
"parent_id": None,
|
||||||
"errors": [],
|
"errors": [],
|
||||||
"created": ANY,
|
"created": ANY,
|
||||||
|
"extra": None,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -149,6 +151,7 @@ async def test_notify_on_change(coresys: CoreSys, ha_ws_client: AsyncMock):
|
|||||||
"parent_id": None,
|
"parent_id": None,
|
||||||
"errors": [],
|
"errors": [],
|
||||||
"created": ANY,
|
"created": ANY,
|
||||||
|
"extra": None,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -171,6 +174,7 @@ async def test_notify_on_change(coresys: CoreSys, ha_ws_client: AsyncMock):
|
|||||||
"parent_id": None,
|
"parent_id": None,
|
||||||
"errors": [],
|
"errors": [],
|
||||||
"created": ANY,
|
"created": ANY,
|
||||||
|
"extra": None,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -199,6 +203,7 @@ async def test_notify_on_change(coresys: CoreSys, ha_ws_client: AsyncMock):
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"created": ANY,
|
"created": ANY,
|
||||||
|
"extra": None,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -226,6 +231,7 @@ async def test_notify_on_change(coresys: CoreSys, ha_ws_client: AsyncMock):
|
|||||||
}
|
}
|
||||||
],
|
],
|
||||||
"created": ANY,
|
"created": ANY,
|
||||||
|
"extra": None,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
"""Test base plugin functionality."""
|
"""Test base plugin functionality."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from unittest.mock import MagicMock, Mock, PropertyMock, patch
|
from unittest.mock import ANY, MagicMock, Mock, PropertyMock, patch
|
||||||
|
|
||||||
from awesomeversion import AwesomeVersion
|
from awesomeversion import AwesomeVersion
|
||||||
import pytest
|
import pytest
|
||||||
@@ -373,7 +373,7 @@ async def test_load_with_incorrect_image(
|
|||||||
"force": True,
|
"force": True,
|
||||||
}
|
}
|
||||||
coresys.docker.pull_image.assert_called_once_with(
|
coresys.docker.pull_image.assert_called_once_with(
|
||||||
correct_image, "2024.4.0", platform="linux/amd64"
|
ANY, correct_image, "2024.4.0", platform="linux/amd64"
|
||||||
)
|
)
|
||||||
assert plugin.image == correct_image
|
assert plugin.image == correct_image
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user