Compare commits

..

10 Commits

Author SHA1 Message Date
Mike Degatano
b59ab70655 Add cast back and test for latest version of installed addon 2025-08-28 20:58:37 +00:00
Mike Degatano
46c2ac882e Add availability API for addons 2025-08-28 20:15:42 +00:00
dependabot[bot]
3c39f2f785 Bump sentry-sdk from 2.35.0 to 2.35.1 (#6139)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-08-28 10:31:03 +02:00
Stefan Agner
30db72df78 Add WebSocket proxy timeout handling (#6138)
Add TimeoutError handling for WebSocket connections to add-ons. Also
log debug information for WebSocket proxy connections.
2025-08-28 10:18:46 +02:00
Stefan Agner
00a78f372b Fix ConnectionResetError during ingress proxing (#6137)
Under certain (timing) conditions ConnectionResetError can be raised
when the client closes the connection while we are still writing to it.
Make sure to handle the appropriate exceptions to avoid flooding the
logs with stack traces.
2025-08-28 10:15:32 +02:00
dependabot[bot]
b69546f2c1 Bump orjson from 3.11.2 to 3.11.3 (#6135)
Bumps [orjson](https://github.com/ijl/orjson) from 3.11.2 to 3.11.3.
- [Release notes](https://github.com/ijl/orjson/releases)
- [Changelog](https://github.com/ijl/orjson/blob/master/CHANGELOG.md)
- [Commits](https://github.com/ijl/orjson/compare/3.11.2...3.11.3)

---
updated-dependencies:
- dependency-name: orjson
  dependency-version: 3.11.3
  dependency-type: direct:production
  update-type: version-update:semver-patch
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-08-27 16:13:52 +02:00
Mike Degatano
78be155b94 Handle download retart in pull progres log (#6131) 2025-08-25 23:20:00 +02:00
Mike Degatano
9900dfc8ca Do not skip messages in pull progress log due to rounding (#6129) 2025-08-25 22:25:38 +02:00
Stefan Agner
3a1ebc9d37 Handle malformed addon map entries gracefully (#6126)
* Handle missing type attribute in add-on map config

Handle missing type attribute in the add-on `map` configuration key.

* Make sure wrong volumes are cleared in any case

Also add warning when string mapping is rejected.

* Add unit tests

* Improve test coverage
2025-08-25 22:24:46 +02:00
Jan Čermák
580c3273dc Fix guarding of timezone setting for older OS 16.2 dev builds (#6127)
As some 16.2 dev versions did not support setting of the timezone yet,
if they were not updated before the Supervisor #6099 was merged, the
system could end up unhealthy as setting of the timezone during setup
fails there. This would prevent such systems from being updated to the
new OS version.

Now that we know an exact OS version with TZ setting support, only
attempt doing it if it's supported.
2025-08-25 19:47:47 +02:00
13 changed files with 646 additions and 88 deletions

View File

@@ -17,13 +17,13 @@ faust-cchardet==2.1.19
gitpython==3.1.45
jinja2==3.1.6
log-rate-limit==1.4.2
orjson==3.11.2
orjson==3.11.3
pulsectl==24.12.0
pyudev==0.24.3
PyYAML==6.0.2
requests==2.32.5
securetar==2025.2.1
sentry-sdk==2.35.0
sentry-sdk==2.35.1
setuptools==80.9.0
voluptuous==0.15.2
dbus-fast==2.44.3

View File

@@ -266,10 +266,23 @@ def _migrate_addon_config(protocol=False):
volumes = []
for entry in config.get(ATTR_MAP, []):
if isinstance(entry, dict):
# Validate that dict entries have required 'type' field
if ATTR_TYPE not in entry:
_LOGGER.warning(
"Add-on config has invalid map entry missing 'type' field: %s. Skipping invalid entry for %s",
entry,
name,
)
continue
volumes.append(entry)
if isinstance(entry, str):
result = RE_VOLUME.match(entry)
if not result:
_LOGGER.warning(
"Add-on config has invalid map entry: %s. Skipping invalid entry for %s",
entry,
name,
)
continue
volumes.append(
{
@@ -278,7 +291,7 @@ def _migrate_addon_config(protocol=False):
}
)
if volumes:
# Always update config to clear potentially malformed ones
config[ATTR_MAP] = volumes
# 2023-10 "config" became "homeassistant" so /config can be used for addon's public config

View File

@@ -735,6 +735,10 @@ class RestAPI(CoreSysAttributes):
"/store/addons/{addon}/documentation",
api_store.addons_addon_documentation,
),
web.get(
"/store/addons/{addon}/availability",
api_store.addons_addon_availability,
),
web.post(
"/store/addons/{addon}/install", api_store.addons_addon_install
),

View File

@@ -199,6 +199,8 @@ class APIIngress(CoreSysAttributes):
url = f"{url}?{request.query_string}"
# Start proxy
try:
_LOGGER.debug("Proxing WebSocket to %s, upstream url: %s", addon.slug, url)
async with self.sys_websession.ws_connect(
url,
headers=source_header,
@@ -214,6 +216,8 @@ class APIIngress(CoreSysAttributes):
],
return_when=asyncio.FIRST_COMPLETED,
)
except TimeoutError:
_LOGGER.warning("WebSocket proxy to %s timed out", addon.slug)
return ws_server
@@ -286,6 +290,7 @@ class APIIngress(CoreSysAttributes):
aiohttp.ClientError,
aiohttp.ClientPayloadError,
ConnectionResetError,
ConnectionError,
) as err:
_LOGGER.error("Stream error with %s: %s", url, err)

View File

@@ -297,6 +297,12 @@ class APIStore(CoreSysAttributes):
_read_static_text_file, addon.path_documentation
)
@api_process
async def addons_addon_availability(self, request: web.Request) -> None:
"""Check add-on availability for current system."""
addon = cast(AddonStore, self._extract_addon(request))
addon.validate_availability()
@api_process
async def repositories_list(self, request: web.Request) -> list[dict[str, Any]]:
"""Return all repositories."""

View File

@@ -1,15 +1,20 @@
"""Docker constants."""
from __future__ import annotations
from contextlib import suppress
from enum import Enum, StrEnum
from functools import total_ordering
from pathlib import PurePath
from typing import Self, cast
import re
from typing import cast
from docker.types import Mount
from ..const import MACHINE_ID
RE_RETRYING_DOWNLOAD_STATUS = re.compile(r"Retrying in \d+ seconds?")
class Capabilities(StrEnum):
"""Linux Capabilities."""
@@ -79,6 +84,7 @@ class PullImageLayerStage(Enum):
"""
PULLING_FS_LAYER = 1, "Pulling fs layer"
RETRYING_DOWNLOAD = 2, "Retrying download"
DOWNLOADING = 2, "Downloading"
VERIFYING_CHECKSUM = 3, "Verifying Checksum"
DOWNLOAD_COMPLETE = 4, "Download complete"
@@ -107,11 +113,16 @@ class PullImageLayerStage(Enum):
return hash(self.status)
@classmethod
def from_status(cls, status: str) -> Self | None:
def from_status(cls, status: str) -> PullImageLayerStage | None:
"""Return stage instance from pull log status."""
for i in cls:
if i.status == status:
return i
# This one includes number of seconds until download so its not constant
if RE_RETRYING_DOWNLOAD_STATUS.match(status):
return cls.RETRYING_DOWNLOAD
return None

View File

@@ -291,8 +291,10 @@ class DockerInterface(JobGroup, ABC):
progress = 50
case PullImageLayerStage.PULL_COMPLETE:
progress = 100
case PullImageLayerStage.RETRYING_DOWNLOAD:
progress = 0
if progress < job.progress:
if stage != PullImageLayerStage.RETRYING_DOWNLOAD and progress < job.progress:
raise DockerLogOutOfOrder(
f"Received pull image log with status {reference.status} for job {job.uuid} that implied progress was {progress} but current progress is {job.progress}, skipping",
_LOGGER.debug,
@@ -300,7 +302,7 @@ class DockerInterface(JobGroup, ABC):
# Our filters have all passed. Time to update the job
# Only downloading and extracting have progress details. Use that to set extra
# We'll leave it around on other stages as the total bytes may be useful after that stage
# We'll leave it around on later stages as the total bytes may be useful after that stage
if (
stage in {PullImageLayerStage.DOWNLOADING, PullImageLayerStage.EXTRACTING}
and reference.progress_detail
@@ -318,6 +320,9 @@ class DockerInterface(JobGroup, ABC):
progress=progress,
stage=stage.status,
done=stage == PullImageLayerStage.PULL_COMPLETE,
extra=None
if stage == PullImageLayerStage.RETRYING_DOWNLOAD
else job.extra,
)
@Job(

View File

@@ -91,7 +91,7 @@ class SystemControl(CoreSysAttributes):
if (
self.coresys.os.available
and self.coresys.os.version is not None
and self.sys_os.version >= AwesomeVersion("16.2.dev0")
and self.sys_os.version >= AwesomeVersion("16.2.dev20250814")
):
_LOGGER.info("Setting host timezone: %s", timezone)
await self.sys_dbus.timedate.set_timezone(timezone)

View File

@@ -7,7 +7,6 @@ from contextvars import Context, ContextVar, Token
from dataclasses import dataclass
from datetime import datetime
import logging
import math
from typing import Any, Self
from uuid import uuid4
@@ -98,7 +97,6 @@ class SupervisorJob:
default=0,
validator=[ge(0), le(100), _invalid_if_done],
on_setattr=_on_change,
converter=lambda val: math.floor(val * 10) / 10,
)
stage: str | None = field(
default=None, validator=[_invalid_if_done], on_setattr=_on_change
@@ -119,7 +117,7 @@ class SupervisorJob:
"name": self.name,
"reference": self.reference,
"uuid": self.uuid,
"progress": self.progress,
"progress": round(self.progress, 1),
"stage": self.stage,
"done": self.done,
"parent_id": self.parent_id,

View File

@@ -140,6 +140,46 @@ def test_valid_map():
vd.SCHEMA_ADDON_CONFIG(config)
def test_malformed_map_entries():
"""Test that malformed map entries are handled gracefully (issue #6124)."""
config = load_json_fixture("basic-addon-config.json")
# Test case 1: Empty dict in map (should be skipped with warning)
config["map"] = [{}]
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
assert valid_config["map"] == []
# Test case 2: Dict missing required 'type' field (should be skipped with warning)
config["map"] = [{"read_only": False, "path": "/custom"}]
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
assert valid_config["map"] == []
# Test case 3: Invalid string format that doesn't match regex
config["map"] = ["invalid_format", "not:a:valid:mapping", "share:invalid_mode"]
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
assert valid_config["map"] == []
# Test case 4: Mix of valid and invalid entries (invalid should be filtered out)
config["map"] = [
"share:rw", # Valid string format
"invalid_string", # Invalid string format
{}, # Invalid empty dict
{"type": "config", "read_only": True}, # Valid dict format
{"read_only": False}, # Invalid - missing type
]
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
# Should only keep the valid entries
assert len(valid_config["map"]) == 2
assert any(entry["type"] == "share" for entry in valid_config["map"])
assert any(entry["type"] == "config" for entry in valid_config["map"])
# Test case 5: The specific case from the UplandJacob repo (malformed YAML format)
# This simulates what YAML "- addon_config: rw" creates
config["map"] = [{"addon_config": "rw"}] # Wrong structure, missing 'type' key
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
assert valid_config["map"] == []
def test_valid_basic_build():
"""Validate basic build config."""
config = load_json_fixture("basic-build-config.json")

View File

@@ -6,6 +6,7 @@ from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch
from aiohttp import ClientResponse
from aiohttp.test_utils import TestClient
from awesomeversion import AwesomeVersion
import pytest
from supervisor.addons.addon import Addon
@@ -17,6 +18,7 @@ from supervisor.docker.addon import DockerAddon
from supervisor.docker.const import ContainerState
from supervisor.docker.interface import DockerInterface
from supervisor.docker.monitor import DockerContainerStateEvent
from supervisor.homeassistant.module import HomeAssistant
from supervisor.store.addon import AddonStore
from supervisor.store.repository import Repository
@@ -305,6 +307,7 @@ async def get_message(resp: ClientResponse, json_expected: bool) -> str:
("post", "/store/addons/bad/install/1", True),
("post", "/store/addons/bad/update", True),
("post", "/store/addons/bad/update/1", True),
("get", "/store/addons/bad/availability", True),
# Legacy paths
("get", "/addons/bad/icon", False),
("get", "/addons/bad/logo", False),
@@ -390,3 +393,131 @@ async def test_api_store_addons_changelog_corrupted(
assert resp.status == 200
result = await resp.text()
assert result == "Text with an invalid UTF-8 char: <20>"
async def test_api_store_addons_addon_availability_success(
api_client: TestClient, store_addon: AddonStore
):
"""Test /store/addons/{addon}/availability REST API - success case."""
resp = await api_client.get(f"/store/addons/{store_addon.slug}/availability")
assert resp.status == 200
async def test_api_store_addons_addon_availability_arch_not_supported(
api_client: TestClient, coresys: CoreSys
):
"""Test /store/addons/{addon}/availability REST API - architecture not supported."""
# Create an addon with unsupported architecture
addon_obj = AddonStore(coresys, "test_arch_addon")
coresys.addons.store[addon_obj.slug] = addon_obj
# Set addon config with unsupported architecture
addon_config = {
"advanced": False,
"arch": ["i386"], # Not supported on current system
"slug": "test_arch",
"description": "Test arch add-on",
"name": "Test Arch Add-on",
"repository": "test",
"stage": "stable",
"version": "1.0.0",
}
coresys.store.data.addons[addon_obj.slug] = addon_config
# Mock the system architecture to be different
with patch.object(CpuArch, "supported", new=PropertyMock(return_value=["amd64"])):
resp = await api_client.get(f"/store/addons/{addon_obj.slug}/availability")
assert resp.status == 400
result = await resp.json()
assert "not supported on this platform" in result["message"]
@pytest.mark.parametrize("supported_machines", [["odroid-n2"], ["!qemux86-64"]])
async def test_api_store_addons_addon_availability_machine_not_supported(
api_client: TestClient, coresys: CoreSys, supported_machines: list[str]
):
"""Test /store/addons/{addon}/availability REST API - machine not supported."""
# Create an addon with unsupported machine type
addon_obj = AddonStore(coresys, "test_machine_addon")
coresys.addons.store[addon_obj.slug] = addon_obj
# Set addon config with unsupported machine
addon_config = {
"advanced": False,
"arch": ["amd64"],
"machine": supported_machines,
"slug": "test_machine",
"description": "Test machine add-on",
"name": "Test Machine Add-on",
"repository": "test",
"stage": "stable",
"version": "1.0.0",
}
coresys.store.data.addons[addon_obj.slug] = addon_config
# Mock the system machine to be different
with patch.object(CoreSys, "machine", new=PropertyMock(return_value="qemux86-64")):
resp = await api_client.get(f"/store/addons/{addon_obj.slug}/availability")
assert resp.status == 400
result = await resp.json()
assert "not supported on this machine" in result["message"]
async def test_api_store_addons_addon_availability_homeassistant_version_too_old(
api_client: TestClient, coresys: CoreSys, test_repository: Repository
):
"""Test /store/addons/{addon}/availability REST API - Home Assistant version too old."""
# Create an addon that requires newer Home Assistant version
addon_obj = AddonStore(coresys, "test_version_addon")
coresys.addons.store[addon_obj.slug] = addon_obj
# Set addon config with minimum Home Assistant version requirement
addon_config = {
"advanced": False,
"arch": ["amd64"],
"homeassistant": "2023.1.1", # Requires newer version than current
"slug": "test_version",
"description": "Test version add-on",
"name": "Test Version Add-on",
"repository": "test",
"stage": "stable",
"version": "1.0.0",
}
coresys.store.data.addons[addon_obj.slug] = addon_config
# Mock the Home Assistant version to be older
with patch.object(
HomeAssistant,
"version",
new=PropertyMock(return_value=AwesomeVersion("2022.1.1")),
):
resp = await api_client.get(f"/store/addons/{addon_obj.slug}/availability")
assert resp.status == 400
result = await resp.json()
assert (
"requires Home Assistant version 2023.1.1 or greater" in result["message"]
)
async def test_api_store_addons_addon_availability_installed_addon(
api_client: TestClient, install_addon_ssh: Addon
):
"""Test /store/addons/{addon}/availability REST API - installed addon checks against latest version."""
resp = await api_client.get("/store/addons/local_ssh/availability")
assert resp.status == 200
install_addon_ssh.data_store["version"] = AwesomeVersion("10.0.0")
install_addon_ssh.data_store["homeassistant"] = AwesomeVersion("2023.1.1")
# Mock the Home Assistant version to be older
with patch.object(
HomeAssistant,
"version",
new=PropertyMock(return_value=AwesomeVersion("2022.1.1")),
):
resp = await api_client.get("/store/addons/local_ssh/availability")
assert resp.status == 400
result = await resp.json()
assert (
"requires Home Assistant version 2023.1.1 or greater" in result["message"]
)

View File

@@ -21,7 +21,6 @@ from supervisor.docker.monitor import DockerContainerStateEvent
from supervisor.exceptions import (
DockerAPIError,
DockerError,
DockerLogOutOfOrder,
DockerNoSpaceOnDevice,
DockerNotFound,
DockerRequestError,
@@ -486,25 +485,25 @@ async def test_install_sends_progress_to_home_assistant(
{"stage": "Pulling fs layer", "progress": 0, "done": False, "extra": None},
{
"stage": "Downloading",
"progress": 0.0,
"progress": 0.1,
"done": False,
"extra": {"current": 539462, "total": 436480882},
},
{
"stage": "Downloading",
"progress": 0.5,
"progress": 0.6,
"done": False,
"extra": {"current": 4864838, "total": 436480882},
},
{
"stage": "Downloading",
"progress": 0.8,
"progress": 0.9,
"done": False,
"extra": {"current": 7552896, "total": 436480882},
},
{
"stage": "Downloading",
"progress": 1.1,
"progress": 1.2,
"done": False,
"extra": {"current": 10252544, "total": 436480882},
},
@@ -516,13 +515,13 @@ async def test_install_sends_progress_to_home_assistant(
},
{
"stage": "Downloading",
"progress": 11.8,
"progress": 11.9,
"done": False,
"extra": {"current": 103619904, "total": 436480882},
},
{
"stage": "Downloading",
"progress": 26.0,
"progress": 26.1,
"done": False,
"extra": {"current": 227726144, "total": 436480882},
},
@@ -534,49 +533,49 @@ async def test_install_sends_progress_to_home_assistant(
},
{
"stage": "Verifying Checksum",
"progress": 50.0,
"progress": 50,
"done": False,
"extra": {"current": 433170048, "total": 436480882},
},
{
"stage": "Download complete",
"progress": 50.0,
"progress": 50,
"done": False,
"extra": {"current": 433170048, "total": 436480882},
},
{
"stage": "Extracting",
"progress": 50.0,
"progress": 50.1,
"done": False,
"extra": {"current": 557056, "total": 436480882},
},
{
"stage": "Extracting",
"progress": 60.2,
"progress": 60.3,
"done": False,
"extra": {"current": 89686016, "total": 436480882},
},
{
"stage": "Extracting",
"progress": 69.9,
"progress": 70.0,
"done": False,
"extra": {"current": 174358528, "total": 436480882},
},
{
"stage": "Extracting",
"progress": 79.9,
"progress": 80.0,
"done": False,
"extra": {"current": 261816320, "total": 436480882},
},
{
"stage": "Extracting",
"progress": 88.3,
"progress": 88.4,
"done": False,
"extra": {"current": 334790656, "total": 436480882},
},
{
"stage": "Extracting",
"progress": 93.9,
"progress": 94.0,
"done": False,
"extra": {"current": 383811584, "total": 436480882},
},
@@ -601,6 +600,136 @@ async def test_install_sends_progress_to_home_assistant(
]
async def test_install_progress_rounding_does_not_cause_misses(
coresys: CoreSys, test_docker_interface: DockerInterface, ha_ws_client: AsyncMock
):
"""Test extremely close progress events do not create rounding issues."""
coresys.core.set_state(CoreState.RUNNING)
coresys.docker.docker.api.pull.return_value = [
{
"status": "Pulling from home-assistant/odroid-n2-homeassistant",
"id": "2025.7.1",
},
{"status": "Pulling fs layer", "progressDetail": {}, "id": "1e214cd6d7d0"},
{
"status": "Downloading",
"progressDetail": {"current": 432700000, "total": 436480882},
"progress": "[=================================================> ] 432.7MB/436.5MB",
"id": "1e214cd6d7d0",
},
{
"status": "Downloading",
"progressDetail": {"current": 432800000, "total": 436480882},
"progress": "[=================================================> ] 432.8MB/436.5MB",
"id": "1e214cd6d7d0",
},
{"status": "Verifying Checksum", "progressDetail": {}, "id": "1e214cd6d7d0"},
{"status": "Download complete", "progressDetail": {}, "id": "1e214cd6d7d0"},
{
"status": "Extracting",
"progressDetail": {"current": 432700000, "total": 436480882},
"progress": "[=================================================> ] 432.7MB/436.5MB",
"id": "1e214cd6d7d0",
},
{
"status": "Extracting",
"progressDetail": {"current": 432800000, "total": 436480882},
"progress": "[=================================================> ] 432.8MB/436.5MB",
"id": "1e214cd6d7d0",
},
{"status": "Pull complete", "progressDetail": {}, "id": "1e214cd6d7d0"},
{
"status": "Digest: sha256:7d97da645f232f82a768d0a537e452536719d56d484d419836e53dbe3e4ec736"
},
{
"status": "Status: Downloaded newer image for ghcr.io/home-assistant/odroid-n2-homeassistant:2025.7.1"
},
]
with (
patch.object(
type(coresys.supervisor), "arch", PropertyMock(return_value="i386")
),
):
# Schedule job so we can listen for the end. Then we can assert against the WS mock
event = asyncio.Event()
job, install_task = coresys.jobs.schedule_job(
test_docker_interface.install,
JobSchedulerOptions(),
AwesomeVersion("1.2.3"),
"test",
)
async def listen_for_job_end(reference: SupervisorJob):
if reference.uuid != job.uuid:
return
event.set()
coresys.bus.register_event(BusEvent.SUPERVISOR_JOB_END, listen_for_job_end)
await install_task
await event.wait()
events = [
evt.args[0]["data"]["data"]
for evt in ha_ws_client.async_send_command.call_args_list
if "data" in evt.args[0]
and evt.args[0]["data"]["event"] == WSEvent.JOB
and evt.args[0]["data"]["data"]["reference"] == "1e214cd6d7d0"
and evt.args[0]["data"]["data"]["stage"] in {"Downloading", "Extracting"}
]
assert events == [
{
"name": "Pulling container image layer",
"stage": "Downloading",
"progress": 49.6,
"done": False,
"extra": {"current": 432700000, "total": 436480882},
"reference": "1e214cd6d7d0",
"parent_id": job.uuid,
"errors": [],
"uuid": ANY,
"created": ANY,
},
{
"name": "Pulling container image layer",
"stage": "Downloading",
"progress": 49.6,
"done": False,
"extra": {"current": 432800000, "total": 436480882},
"reference": "1e214cd6d7d0",
"parent_id": job.uuid,
"errors": [],
"uuid": ANY,
"created": ANY,
},
{
"name": "Pulling container image layer",
"stage": "Extracting",
"progress": 99.6,
"done": False,
"extra": {"current": 432700000, "total": 436480882},
"reference": "1e214cd6d7d0",
"parent_id": job.uuid,
"errors": [],
"uuid": ANY,
"created": ANY,
},
{
"name": "Pulling container image layer",
"stage": "Extracting",
"progress": 99.6,
"done": False,
"extra": {"current": 432800000, "total": 436480882},
"reference": "1e214cd6d7d0",
"parent_id": job.uuid,
"errors": [],
"uuid": ANY,
"created": ANY,
},
]
@pytest.mark.parametrize(
("error_log", "exc_type", "exc_msg"),
[
@@ -647,56 +776,138 @@ async def test_install_raises_on_pull_error(
await test_docker_interface.install(AwesomeVersion("1.2.3"), "test")
async def test_process_pull_image_log_precision_fix(
coresys: CoreSys, test_docker_interface: DockerInterface
async def test_install_progress_handles_download_restart(
coresys: CoreSys, test_docker_interface: DockerInterface, ha_ws_client: AsyncMock
):
"""Test that precision issues don't cause DockerLogOutOfOrder errors."""
job_id = "test_job_123"
layer_id = "abc123"
# First, create the job with a "Pulling fs layer" event
fs_layer_entry = PullLogEntry(
job_id=job_id,
id=layer_id,
status="Pulling fs layer",
)
test_docker_interface._process_pull_image_log(job_id, fs_layer_entry)
# First extracting event with higher progress
entry1 = PullLogEntry(
job_id=job_id,
id=layer_id,
status="Extracting",
progress_detail=PullProgressDetail(current=91300, total=100000),
"""Test install handles docker progress events that include a download restart."""
coresys.core.set_state(CoreState.RUNNING)
coresys.docker.docker.api.pull.return_value = load_json_fixture(
"docker_pull_image_log_restart.json"
)
# Second extracting event with slightly lower progress that would cause precision issue
# This simulates the real-world scenario from the Sentry error
entry2 = PullLogEntry(
job_id=job_id,
id=layer_id,
status="Extracting",
progress_detail=PullProgressDetail(current=91284, total=100000),
with (
patch.object(
type(coresys.supervisor), "arch", PropertyMock(return_value="i386")
),
):
# Schedule job so we can listen for the end. Then we can assert against the WS mock
event = asyncio.Event()
job, install_task = coresys.jobs.schedule_job(
test_docker_interface.install,
JobSchedulerOptions(),
AwesomeVersion("1.2.3"),
"test",
)
# Process first extracting entry
test_docker_interface._process_pull_image_log(job_id, entry1)
async def listen_for_job_end(reference: SupervisorJob):
if reference.uuid != job.uuid:
return
event.set()
# Find the job to verify progress
layer_job = None
for job in coresys.jobs.jobs:
if job.parent_id == job_id and job.reference == layer_id:
layer_job = job
break
coresys.bus.register_event(BusEvent.SUPERVISOR_JOB_END, listen_for_job_end)
await install_task
await event.wait()
assert layer_job is not None, "Layer job should have been created"
# Progress calculation: 50 + (50 * 91300/100000) = 50 + 45.65 = 95.65 -> floors to 95.6
assert layer_job.progress == 95.6
events = [
evt.args[0]["data"]["data"]
for evt in ha_ws_client.async_send_command.call_args_list
if "data" in evt.args[0] and evt.args[0]["data"]["event"] == WSEvent.JOB
]
# Process second entry - this should NOT raise DockerLogOutOfOrder
# Previously this would fail because the calculated progress (95.642...) was less than stored (95.7 if rounded up)
# With floor rounding, both values are consistent: calculated 95.6 <= stored 95.6
try:
test_docker_interface._process_pull_image_log(job_id, entry2)
except DockerLogOutOfOrder:
pytest.fail("DockerLogOutOfOrder should not be raised due to precision fix")
def make_sub_log(layer_id: str):
return [
{
"stage": evt["stage"],
"progress": evt["progress"],
"done": evt["done"],
"extra": evt["extra"],
}
for evt in events
if evt["name"] == "Pulling container image layer"
and evt["reference"] == layer_id
and evt["parent_id"] == job.uuid
]
layer_1_log = make_sub_log("1e214cd6d7d0")
assert len(layer_1_log) == 14
assert layer_1_log == [
{"stage": "Pulling fs layer", "progress": 0, "done": False, "extra": None},
{
"stage": "Downloading",
"progress": 11.9,
"done": False,
"extra": {"current": 103619904, "total": 436480882},
},
{
"stage": "Downloading",
"progress": 26.1,
"done": False,
"extra": {"current": 227726144, "total": 436480882},
},
{
"stage": "Downloading",
"progress": 49.6,
"done": False,
"extra": {"current": 433170048, "total": 436480882},
},
{
"stage": "Retrying download",
"progress": 0,
"done": False,
"extra": None,
},
{
"stage": "Retrying download",
"progress": 0,
"done": False,
"extra": None,
},
{
"stage": "Downloading",
"progress": 11.9,
"done": False,
"extra": {"current": 103619904, "total": 436480882},
},
{
"stage": "Downloading",
"progress": 26.1,
"done": False,
"extra": {"current": 227726144, "total": 436480882},
},
{
"stage": "Downloading",
"progress": 49.6,
"done": False,
"extra": {"current": 433170048, "total": 436480882},
},
{
"stage": "Verifying Checksum",
"progress": 50,
"done": False,
"extra": {"current": 433170048, "total": 436480882},
},
{
"stage": "Download complete",
"progress": 50,
"done": False,
"extra": {"current": 433170048, "total": 436480882},
},
{
"stage": "Extracting",
"progress": 80.0,
"done": False,
"extra": {"current": 261816320, "total": 436480882},
},
{
"stage": "Extracting",
"progress": 100.0,
"done": False,
"extra": {"current": 436480882, "total": 436480882},
},
{
"stage": "Pull complete",
"progress": 100.0,
"done": True,
"extra": {"current": 436480882, "total": 436480882},
},
]

View File

@@ -0,0 +1,134 @@
[
{
"status": "Pulling from home-assistant/odroid-n2-homeassistant",
"id": "2025.7.1"
},
{
"status": "Already exists",
"progressDetail": {},
"id": "6e771e15690e"
},
{
"status": "Already exists",
"progressDetail": {},
"id": "58da640818f4"
},
{
"status": "Pulling fs layer",
"progressDetail": {},
"id": "1e214cd6d7d0"
},
{
"status": "Already exists",
"progressDetail": {},
"id": "1a38e1d5e18d"
},
{
"status": "Waiting",
"progressDetail": {},
"id": "1e214cd6d7d0"
},
{
"status": "Downloading",
"progressDetail": {
"current": 103619904,
"total": 436480882
},
"progress": "[===========> ] 103.6MB/436.5MB",
"id": "1e214cd6d7d0"
},
{
"status": "Downloading",
"progressDetail": {
"current": 227726144,
"total": 436480882
},
"progress": "[==========================> ] 227.7MB/436.5MB",
"id": "1e214cd6d7d0"
},
{
"status": "Downloading",
"progressDetail": {
"current": 433170048,
"total": 436480882
},
"progress": "[=================================================> ] 433.2MB/436.5MB",
"id": "1e214cd6d7d0"
},
{
"status": "Retrying in 2 seconds",
"progressDetail": {},
"id": "1e214cd6d7d0"
},
{
"status": "Retrying in 1 seconds",
"progressDetail": {},
"id": "1e214cd6d7d0"
},
{
"status": "Downloading",
"progressDetail": {
"current": 103619904,
"total": 436480882
},
"progress": "[===========> ] 103.6MB/436.5MB",
"id": "1e214cd6d7d0"
},
{
"status": "Downloading",
"progressDetail": {
"current": 227726144,
"total": 436480882
},
"progress": "[==========================> ] 227.7MB/436.5MB",
"id": "1e214cd6d7d0"
},
{
"status": "Downloading",
"progressDetail": {
"current": 433170048,
"total": 436480882
},
"progress": "[=================================================> ] 433.2MB/436.5MB",
"id": "1e214cd6d7d0"
},
{
"status": "Verifying Checksum",
"progressDetail": {},
"id": "1e214cd6d7d0"
},
{
"status": "Download complete",
"progressDetail": {},
"id": "1e214cd6d7d0"
},
{
"status": "Extracting",
"progressDetail": {
"current": 261816320,
"total": 436480882
},
"progress": "[=============================> ] 261.8MB/436.5MB",
"id": "1e214cd6d7d0"
},
{
"status": "Extracting",
"progressDetail": {
"current": 436480882,
"total": 436480882
},
"progress": "[==================================================>] 436.5MB/436.5MB",
"id": "1e214cd6d7d0"
},
{
"status": "Pull complete",
"progressDetail": {},
"id": "1e214cd6d7d0"
},
{
"status": "Digest: sha256:7d97da645f232f82a768d0a537e452536719d56d484d419836e53dbe3e4ec736"
},
{
"status": "Status: Downloaded newer image for ghcr.io/home-assistant/odroid-n2-homeassistant:2025.7.1"
}
]