mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-09-05 11:16:26 +00:00
Compare commits
12 Commits
handle-lay
...
mark-old-c
Author | SHA1 | Date | |
---|---|---|---|
![]() |
4b5bf61dcf | ||
![]() |
b3cf3da932 | ||
![]() |
5ce62f324f | ||
![]() |
f84d514958 | ||
![]() |
3c39f2f785 | ||
![]() |
30db72df78 | ||
![]() |
00a78f372b | ||
![]() |
b69546f2c1 | ||
![]() |
78be155b94 | ||
![]() |
9900dfc8ca | ||
![]() |
3a1ebc9d37 | ||
![]() |
580c3273dc |
@@ -17,13 +17,13 @@ faust-cchardet==2.1.19
|
||||
gitpython==3.1.45
|
||||
jinja2==3.1.6
|
||||
log-rate-limit==1.4.2
|
||||
orjson==3.11.2
|
||||
orjson==3.11.3
|
||||
pulsectl==24.12.0
|
||||
pyudev==0.24.3
|
||||
PyYAML==6.0.2
|
||||
requests==2.32.5
|
||||
securetar==2025.2.1
|
||||
sentry-sdk==2.35.0
|
||||
sentry-sdk==2.35.1
|
||||
setuptools==80.9.0
|
||||
voluptuous==0.15.2
|
||||
dbus-fast==2.44.3
|
||||
|
@@ -1,5 +1,5 @@
|
||||
astroid==3.3.11
|
||||
coverage==7.10.5
|
||||
coverage==7.10.6
|
||||
mypy==1.17.1
|
||||
pre-commit==4.3.0
|
||||
pylint==3.3.8
|
||||
@@ -8,7 +8,7 @@ pytest-asyncio==0.25.2
|
||||
pytest-cov==6.2.1
|
||||
pytest-timeout==2.4.0
|
||||
pytest==8.4.1
|
||||
ruff==0.12.10
|
||||
ruff==0.12.11
|
||||
time-machine==2.19.0
|
||||
types-docker==7.1.0.20250822
|
||||
types-pyyaml==6.0.12.20250822
|
||||
|
@@ -266,10 +266,23 @@ def _migrate_addon_config(protocol=False):
|
||||
volumes = []
|
||||
for entry in config.get(ATTR_MAP, []):
|
||||
if isinstance(entry, dict):
|
||||
# Validate that dict entries have required 'type' field
|
||||
if ATTR_TYPE not in entry:
|
||||
_LOGGER.warning(
|
||||
"Add-on config has invalid map entry missing 'type' field: %s. Skipping invalid entry for %s",
|
||||
entry,
|
||||
name,
|
||||
)
|
||||
continue
|
||||
volumes.append(entry)
|
||||
if isinstance(entry, str):
|
||||
result = RE_VOLUME.match(entry)
|
||||
if not result:
|
||||
_LOGGER.warning(
|
||||
"Add-on config has invalid map entry: %s. Skipping invalid entry for %s",
|
||||
entry,
|
||||
name,
|
||||
)
|
||||
continue
|
||||
volumes.append(
|
||||
{
|
||||
@@ -278,8 +291,8 @@ def _migrate_addon_config(protocol=False):
|
||||
}
|
||||
)
|
||||
|
||||
if volumes:
|
||||
config[ATTR_MAP] = volumes
|
||||
# Always update config to clear potentially malformed ones
|
||||
config[ATTR_MAP] = volumes
|
||||
|
||||
# 2023-10 "config" became "homeassistant" so /config can be used for addon's public config
|
||||
if any(volume[ATTR_TYPE] == MappingType.CONFIG for volume in volumes):
|
||||
|
@@ -199,21 +199,25 @@ class APIIngress(CoreSysAttributes):
|
||||
url = f"{url}?{request.query_string}"
|
||||
|
||||
# Start proxy
|
||||
async with self.sys_websession.ws_connect(
|
||||
url,
|
||||
headers=source_header,
|
||||
protocols=req_protocols,
|
||||
autoclose=False,
|
||||
autoping=False,
|
||||
) as ws_client:
|
||||
# Proxy requests
|
||||
await asyncio.wait(
|
||||
[
|
||||
self.sys_create_task(_websocket_forward(ws_server, ws_client)),
|
||||
self.sys_create_task(_websocket_forward(ws_client, ws_server)),
|
||||
],
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
try:
|
||||
_LOGGER.debug("Proxing WebSocket to %s, upstream url: %s", addon.slug, url)
|
||||
async with self.sys_websession.ws_connect(
|
||||
url,
|
||||
headers=source_header,
|
||||
protocols=req_protocols,
|
||||
autoclose=False,
|
||||
autoping=False,
|
||||
) as ws_client:
|
||||
# Proxy requests
|
||||
await asyncio.wait(
|
||||
[
|
||||
self.sys_create_task(_websocket_forward(ws_server, ws_client)),
|
||||
self.sys_create_task(_websocket_forward(ws_client, ws_server)),
|
||||
],
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
except TimeoutError:
|
||||
_LOGGER.warning("WebSocket proxy to %s timed out", addon.slug)
|
||||
|
||||
return ws_server
|
||||
|
||||
@@ -286,6 +290,7 @@ class APIIngress(CoreSysAttributes):
|
||||
aiohttp.ClientError,
|
||||
aiohttp.ClientPayloadError,
|
||||
ConnectionResetError,
|
||||
ConnectionError,
|
||||
) as err:
|
||||
_LOGGER.error("Stream error with %s: %s", url, err)
|
||||
|
||||
|
@@ -1,15 +1,20 @@
|
||||
"""Docker constants."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
from enum import Enum, StrEnum
|
||||
from functools import total_ordering
|
||||
from pathlib import PurePath
|
||||
from typing import Self, cast
|
||||
import re
|
||||
from typing import cast
|
||||
|
||||
from docker.types import Mount
|
||||
|
||||
from ..const import MACHINE_ID
|
||||
|
||||
RE_RETRYING_DOWNLOAD_STATUS = re.compile(r"Retrying in \d+ seconds?")
|
||||
|
||||
|
||||
class Capabilities(StrEnum):
|
||||
"""Linux Capabilities."""
|
||||
@@ -79,6 +84,7 @@ class PullImageLayerStage(Enum):
|
||||
"""
|
||||
|
||||
PULLING_FS_LAYER = 1, "Pulling fs layer"
|
||||
RETRYING_DOWNLOAD = 2, "Retrying download"
|
||||
DOWNLOADING = 2, "Downloading"
|
||||
VERIFYING_CHECKSUM = 3, "Verifying Checksum"
|
||||
DOWNLOAD_COMPLETE = 4, "Download complete"
|
||||
@@ -107,11 +113,16 @@ class PullImageLayerStage(Enum):
|
||||
return hash(self.status)
|
||||
|
||||
@classmethod
|
||||
def from_status(cls, status: str) -> Self | None:
|
||||
def from_status(cls, status: str) -> PullImageLayerStage | None:
|
||||
"""Return stage instance from pull log status."""
|
||||
for i in cls:
|
||||
if i.status == status:
|
||||
return i
|
||||
|
||||
# This one includes number of seconds until download so its not constant
|
||||
if RE_RETRYING_DOWNLOAD_STATUS.match(status):
|
||||
return cls.RETRYING_DOWNLOAD
|
||||
|
||||
return None
|
||||
|
||||
|
||||
|
@@ -291,8 +291,10 @@ class DockerInterface(JobGroup, ABC):
|
||||
progress = 50
|
||||
case PullImageLayerStage.PULL_COMPLETE:
|
||||
progress = 100
|
||||
case PullImageLayerStage.RETRYING_DOWNLOAD:
|
||||
progress = 0
|
||||
|
||||
if progress < job.progress:
|
||||
if stage != PullImageLayerStage.RETRYING_DOWNLOAD and progress < job.progress:
|
||||
raise DockerLogOutOfOrder(
|
||||
f"Received pull image log with status {reference.status} for job {job.uuid} that implied progress was {progress} but current progress is {job.progress}, skipping",
|
||||
_LOGGER.debug,
|
||||
@@ -300,7 +302,7 @@ class DockerInterface(JobGroup, ABC):
|
||||
|
||||
# Our filters have all passed. Time to update the job
|
||||
# Only downloading and extracting have progress details. Use that to set extra
|
||||
# We'll leave it around on other stages as the total bytes may be useful after that stage
|
||||
# We'll leave it around on later stages as the total bytes may be useful after that stage
|
||||
if (
|
||||
stage in {PullImageLayerStage.DOWNLOADING, PullImageLayerStage.EXTRACTING}
|
||||
and reference.progress_detail
|
||||
@@ -318,6 +320,9 @@ class DockerInterface(JobGroup, ABC):
|
||||
progress=progress,
|
||||
stage=stage.status,
|
||||
done=stage == PullImageLayerStage.PULL_COMPLETE,
|
||||
extra=None
|
||||
if stage == PullImageLayerStage.RETRYING_DOWNLOAD
|
||||
else job.extra,
|
||||
)
|
||||
|
||||
@Job(
|
||||
|
@@ -91,7 +91,7 @@ class SystemControl(CoreSysAttributes):
|
||||
if (
|
||||
self.coresys.os.available
|
||||
and self.coresys.os.version is not None
|
||||
and self.sys_os.version >= AwesomeVersion("16.2.dev0")
|
||||
and self.sys_os.version >= AwesomeVersion("16.2.dev20250814")
|
||||
):
|
||||
_LOGGER.info("Setting host timezone: %s", timezone)
|
||||
await self.sys_dbus.timedate.set_timezone(timezone)
|
||||
|
@@ -7,7 +7,6 @@ from contextvars import Context, ContextVar, Token
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import math
|
||||
from typing import Any, Self
|
||||
from uuid import uuid4
|
||||
|
||||
@@ -98,7 +97,6 @@ class SupervisorJob:
|
||||
default=0,
|
||||
validator=[ge(0), le(100), _invalid_if_done],
|
||||
on_setattr=_on_change,
|
||||
converter=lambda val: math.floor(val * 10) / 10,
|
||||
)
|
||||
stage: str | None = field(
|
||||
default=None, validator=[_invalid_if_done], on_setattr=_on_change
|
||||
@@ -119,7 +117,7 @@ class SupervisorJob:
|
||||
"name": self.name,
|
||||
"reference": self.reference,
|
||||
"uuid": self.uuid,
|
||||
"progress": self.progress,
|
||||
"progress": round(self.progress, 1),
|
||||
"stage": self.stage,
|
||||
"done": self.done,
|
||||
"parent_id": self.parent_id,
|
||||
|
@@ -20,6 +20,7 @@ class JobCondition(StrEnum):
|
||||
"""Job condition enum."""
|
||||
|
||||
AUTO_UPDATE = "auto_update"
|
||||
CORE_SUPPORTED = "core_supported"
|
||||
FREE_SPACE = "free_space"
|
||||
FROZEN = "frozen"
|
||||
HAOS = "haos"
|
||||
|
@@ -404,6 +404,14 @@ class Job(CoreSysAttributes):
|
||||
f"'{method_name}' blocked from execution, unsupported OS version"
|
||||
)
|
||||
|
||||
if (
|
||||
JobCondition.CORE_SUPPORTED in used_conditions
|
||||
and UnsupportedReason.CORE_VERSION in coresys.sys_resolution.unsupported
|
||||
):
|
||||
raise JobConditionException(
|
||||
f"'{method_name}' blocked from execution, unsupported Core version"
|
||||
)
|
||||
|
||||
if (
|
||||
JobCondition.HOST_NETWORK in used_conditions
|
||||
and not coresys.sys_dbus.network.is_connected
|
||||
|
@@ -40,6 +40,7 @@ class UnsupportedReason(StrEnum):
|
||||
CGROUP_VERSION = "cgroup_version"
|
||||
CONNECTIVITY_CHECK = "connectivity_check"
|
||||
CONTENT_TRUST = "content_trust"
|
||||
CORE_VERSION = "core_version"
|
||||
DBUS = "dbus"
|
||||
DNS_SERVER = "dns_server"
|
||||
DOCKER_CONFIGURATION = "docker_configuration"
|
||||
|
84
supervisor/resolution/evaluations/core_version.py
Normal file
84
supervisor/resolution/evaluations/core_version.py
Normal file
@@ -0,0 +1,84 @@
|
||||
"""Evaluation class for Core version."""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from awesomeversion import (
|
||||
AwesomeVersion,
|
||||
AwesomeVersionException,
|
||||
AwesomeVersionStrategy,
|
||||
)
|
||||
|
||||
from ...const import CoreState
|
||||
from ...coresys import CoreSys
|
||||
from ...homeassistant.const import LANDINGPAGE
|
||||
from ..const import UnsupportedReason
|
||||
from .base import EvaluateBase
|
||||
|
||||
|
||||
def setup(coresys: CoreSys) -> EvaluateBase:
|
||||
"""Initialize evaluation-setup function."""
|
||||
return EvaluateCoreVersion(coresys)
|
||||
|
||||
|
||||
class EvaluateCoreVersion(EvaluateBase):
|
||||
"""Evaluate the Home Assistant Core version."""
|
||||
|
||||
@property
|
||||
def reason(self) -> UnsupportedReason:
|
||||
"""Return a UnsupportedReason enum."""
|
||||
return UnsupportedReason.CORE_VERSION
|
||||
|
||||
@property
|
||||
def on_failure(self) -> str:
|
||||
"""Return a string that is printed when self.evaluate is True."""
|
||||
return f"Home Assistant Core version '{self.sys_homeassistant.version}' is more than 2 years old!"
|
||||
|
||||
@property
|
||||
def states(self) -> list[CoreState]:
|
||||
"""Return a list of valid states when this evaluation can run."""
|
||||
return [CoreState.RUNNING, CoreState.SETUP]
|
||||
|
||||
async def evaluate(self) -> bool:
|
||||
"""Run evaluation."""
|
||||
if not (current := self.sys_homeassistant.version) or not (
|
||||
latest := self.sys_homeassistant.latest_version
|
||||
):
|
||||
return False
|
||||
|
||||
# Skip evaluation for landingpage version
|
||||
if current == LANDINGPAGE:
|
||||
return False
|
||||
|
||||
try:
|
||||
# Calculate if the current version was released more than 2 years ago
|
||||
# Home Assistant releases happen monthly, so approximately 24 versions per 2 years
|
||||
# However, we'll be more precise and check based on actual version numbers
|
||||
# Home Assistant follows a versioning scheme like 2024.1, 2024.2, etc.
|
||||
|
||||
# Extract year from current version
|
||||
current_year = int(str(current).split(".")[0])
|
||||
|
||||
# Calculate 2 years ago from now
|
||||
two_years_ago = datetime.now() - timedelta(days=730) # 2 years = 730 days
|
||||
cutoff_year = two_years_ago.year
|
||||
cutoff_month = two_years_ago.month
|
||||
|
||||
# Create a cutoff version based on the date 2 years ago
|
||||
cutoff_version = AwesomeVersion(
|
||||
f"{cutoff_year}.{cutoff_month}",
|
||||
ensure_strategy=AwesomeVersionStrategy.CALVER,
|
||||
)
|
||||
|
||||
# Compare current version with the cutoff
|
||||
return current < cutoff_version
|
||||
|
||||
except (AwesomeVersionException, ValueError, IndexError):
|
||||
# If we can't parse the version format, fall back to conservative approach
|
||||
# Consider unsupported if current is significantly behind latest
|
||||
try:
|
||||
# If latest version is from current year and current is from 2+ years ago
|
||||
latest_year = int(str(latest).split(".")[0])
|
||||
current_year = int(str(current).split(".")[0])
|
||||
return (latest_year - current_year) >= 2
|
||||
except (ValueError, IndexError):
|
||||
return False
|
@@ -140,6 +140,46 @@ def test_valid_map():
|
||||
vd.SCHEMA_ADDON_CONFIG(config)
|
||||
|
||||
|
||||
def test_malformed_map_entries():
|
||||
"""Test that malformed map entries are handled gracefully (issue #6124)."""
|
||||
config = load_json_fixture("basic-addon-config.json")
|
||||
|
||||
# Test case 1: Empty dict in map (should be skipped with warning)
|
||||
config["map"] = [{}]
|
||||
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
|
||||
assert valid_config["map"] == []
|
||||
|
||||
# Test case 2: Dict missing required 'type' field (should be skipped with warning)
|
||||
config["map"] = [{"read_only": False, "path": "/custom"}]
|
||||
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
|
||||
assert valid_config["map"] == []
|
||||
|
||||
# Test case 3: Invalid string format that doesn't match regex
|
||||
config["map"] = ["invalid_format", "not:a:valid:mapping", "share:invalid_mode"]
|
||||
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
|
||||
assert valid_config["map"] == []
|
||||
|
||||
# Test case 4: Mix of valid and invalid entries (invalid should be filtered out)
|
||||
config["map"] = [
|
||||
"share:rw", # Valid string format
|
||||
"invalid_string", # Invalid string format
|
||||
{}, # Invalid empty dict
|
||||
{"type": "config", "read_only": True}, # Valid dict format
|
||||
{"read_only": False}, # Invalid - missing type
|
||||
]
|
||||
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
|
||||
# Should only keep the valid entries
|
||||
assert len(valid_config["map"]) == 2
|
||||
assert any(entry["type"] == "share" for entry in valid_config["map"])
|
||||
assert any(entry["type"] == "config" for entry in valid_config["map"])
|
||||
|
||||
# Test case 5: The specific case from the UplandJacob repo (malformed YAML format)
|
||||
# This simulates what YAML "- addon_config: rw" creates
|
||||
config["map"] = [{"addon_config": "rw"}] # Wrong structure, missing 'type' key
|
||||
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
|
||||
assert valid_config["map"] == []
|
||||
|
||||
|
||||
def test_valid_basic_build():
|
||||
"""Validate basic build config."""
|
||||
config = load_json_fixture("basic-build-config.json")
|
||||
|
@@ -21,7 +21,6 @@ from supervisor.docker.monitor import DockerContainerStateEvent
|
||||
from supervisor.exceptions import (
|
||||
DockerAPIError,
|
||||
DockerError,
|
||||
DockerLogOutOfOrder,
|
||||
DockerNoSpaceOnDevice,
|
||||
DockerNotFound,
|
||||
DockerRequestError,
|
||||
@@ -486,25 +485,25 @@ async def test_install_sends_progress_to_home_assistant(
|
||||
{"stage": "Pulling fs layer", "progress": 0, "done": False, "extra": None},
|
||||
{
|
||||
"stage": "Downloading",
|
||||
"progress": 0.0,
|
||||
"progress": 0.1,
|
||||
"done": False,
|
||||
"extra": {"current": 539462, "total": 436480882},
|
||||
},
|
||||
{
|
||||
"stage": "Downloading",
|
||||
"progress": 0.5,
|
||||
"progress": 0.6,
|
||||
"done": False,
|
||||
"extra": {"current": 4864838, "total": 436480882},
|
||||
},
|
||||
{
|
||||
"stage": "Downloading",
|
||||
"progress": 0.8,
|
||||
"progress": 0.9,
|
||||
"done": False,
|
||||
"extra": {"current": 7552896, "total": 436480882},
|
||||
},
|
||||
{
|
||||
"stage": "Downloading",
|
||||
"progress": 1.1,
|
||||
"progress": 1.2,
|
||||
"done": False,
|
||||
"extra": {"current": 10252544, "total": 436480882},
|
||||
},
|
||||
@@ -516,13 +515,13 @@ async def test_install_sends_progress_to_home_assistant(
|
||||
},
|
||||
{
|
||||
"stage": "Downloading",
|
||||
"progress": 11.8,
|
||||
"progress": 11.9,
|
||||
"done": False,
|
||||
"extra": {"current": 103619904, "total": 436480882},
|
||||
},
|
||||
{
|
||||
"stage": "Downloading",
|
||||
"progress": 26.0,
|
||||
"progress": 26.1,
|
||||
"done": False,
|
||||
"extra": {"current": 227726144, "total": 436480882},
|
||||
},
|
||||
@@ -534,49 +533,49 @@ async def test_install_sends_progress_to_home_assistant(
|
||||
},
|
||||
{
|
||||
"stage": "Verifying Checksum",
|
||||
"progress": 50.0,
|
||||
"progress": 50,
|
||||
"done": False,
|
||||
"extra": {"current": 433170048, "total": 436480882},
|
||||
},
|
||||
{
|
||||
"stage": "Download complete",
|
||||
"progress": 50.0,
|
||||
"progress": 50,
|
||||
"done": False,
|
||||
"extra": {"current": 433170048, "total": 436480882},
|
||||
},
|
||||
{
|
||||
"stage": "Extracting",
|
||||
"progress": 50.0,
|
||||
"progress": 50.1,
|
||||
"done": False,
|
||||
"extra": {"current": 557056, "total": 436480882},
|
||||
},
|
||||
{
|
||||
"stage": "Extracting",
|
||||
"progress": 60.2,
|
||||
"progress": 60.3,
|
||||
"done": False,
|
||||
"extra": {"current": 89686016, "total": 436480882},
|
||||
},
|
||||
{
|
||||
"stage": "Extracting",
|
||||
"progress": 69.9,
|
||||
"progress": 70.0,
|
||||
"done": False,
|
||||
"extra": {"current": 174358528, "total": 436480882},
|
||||
},
|
||||
{
|
||||
"stage": "Extracting",
|
||||
"progress": 79.9,
|
||||
"progress": 80.0,
|
||||
"done": False,
|
||||
"extra": {"current": 261816320, "total": 436480882},
|
||||
},
|
||||
{
|
||||
"stage": "Extracting",
|
||||
"progress": 88.3,
|
||||
"progress": 88.4,
|
||||
"done": False,
|
||||
"extra": {"current": 334790656, "total": 436480882},
|
||||
},
|
||||
{
|
||||
"stage": "Extracting",
|
||||
"progress": 93.9,
|
||||
"progress": 94.0,
|
||||
"done": False,
|
||||
"extra": {"current": 383811584, "total": 436480882},
|
||||
},
|
||||
@@ -601,6 +600,136 @@ async def test_install_sends_progress_to_home_assistant(
|
||||
]
|
||||
|
||||
|
||||
async def test_install_progress_rounding_does_not_cause_misses(
|
||||
coresys: CoreSys, test_docker_interface: DockerInterface, ha_ws_client: AsyncMock
|
||||
):
|
||||
"""Test extremely close progress events do not create rounding issues."""
|
||||
coresys.core.set_state(CoreState.RUNNING)
|
||||
coresys.docker.docker.api.pull.return_value = [
|
||||
{
|
||||
"status": "Pulling from home-assistant/odroid-n2-homeassistant",
|
||||
"id": "2025.7.1",
|
||||
},
|
||||
{"status": "Pulling fs layer", "progressDetail": {}, "id": "1e214cd6d7d0"},
|
||||
{
|
||||
"status": "Downloading",
|
||||
"progressDetail": {"current": 432700000, "total": 436480882},
|
||||
"progress": "[=================================================> ] 432.7MB/436.5MB",
|
||||
"id": "1e214cd6d7d0",
|
||||
},
|
||||
{
|
||||
"status": "Downloading",
|
||||
"progressDetail": {"current": 432800000, "total": 436480882},
|
||||
"progress": "[=================================================> ] 432.8MB/436.5MB",
|
||||
"id": "1e214cd6d7d0",
|
||||
},
|
||||
{"status": "Verifying Checksum", "progressDetail": {}, "id": "1e214cd6d7d0"},
|
||||
{"status": "Download complete", "progressDetail": {}, "id": "1e214cd6d7d0"},
|
||||
{
|
||||
"status": "Extracting",
|
||||
"progressDetail": {"current": 432700000, "total": 436480882},
|
||||
"progress": "[=================================================> ] 432.7MB/436.5MB",
|
||||
"id": "1e214cd6d7d0",
|
||||
},
|
||||
{
|
||||
"status": "Extracting",
|
||||
"progressDetail": {"current": 432800000, "total": 436480882},
|
||||
"progress": "[=================================================> ] 432.8MB/436.5MB",
|
||||
"id": "1e214cd6d7d0",
|
||||
},
|
||||
{"status": "Pull complete", "progressDetail": {}, "id": "1e214cd6d7d0"},
|
||||
{
|
||||
"status": "Digest: sha256:7d97da645f232f82a768d0a537e452536719d56d484d419836e53dbe3e4ec736"
|
||||
},
|
||||
{
|
||||
"status": "Status: Downloaded newer image for ghcr.io/home-assistant/odroid-n2-homeassistant:2025.7.1"
|
||||
},
|
||||
]
|
||||
|
||||
with (
|
||||
patch.object(
|
||||
type(coresys.supervisor), "arch", PropertyMock(return_value="i386")
|
||||
),
|
||||
):
|
||||
# Schedule job so we can listen for the end. Then we can assert against the WS mock
|
||||
event = asyncio.Event()
|
||||
job, install_task = coresys.jobs.schedule_job(
|
||||
test_docker_interface.install,
|
||||
JobSchedulerOptions(),
|
||||
AwesomeVersion("1.2.3"),
|
||||
"test",
|
||||
)
|
||||
|
||||
async def listen_for_job_end(reference: SupervisorJob):
|
||||
if reference.uuid != job.uuid:
|
||||
return
|
||||
event.set()
|
||||
|
||||
coresys.bus.register_event(BusEvent.SUPERVISOR_JOB_END, listen_for_job_end)
|
||||
await install_task
|
||||
await event.wait()
|
||||
|
||||
events = [
|
||||
evt.args[0]["data"]["data"]
|
||||
for evt in ha_ws_client.async_send_command.call_args_list
|
||||
if "data" in evt.args[0]
|
||||
and evt.args[0]["data"]["event"] == WSEvent.JOB
|
||||
and evt.args[0]["data"]["data"]["reference"] == "1e214cd6d7d0"
|
||||
and evt.args[0]["data"]["data"]["stage"] in {"Downloading", "Extracting"}
|
||||
]
|
||||
|
||||
assert events == [
|
||||
{
|
||||
"name": "Pulling container image layer",
|
||||
"stage": "Downloading",
|
||||
"progress": 49.6,
|
||||
"done": False,
|
||||
"extra": {"current": 432700000, "total": 436480882},
|
||||
"reference": "1e214cd6d7d0",
|
||||
"parent_id": job.uuid,
|
||||
"errors": [],
|
||||
"uuid": ANY,
|
||||
"created": ANY,
|
||||
},
|
||||
{
|
||||
"name": "Pulling container image layer",
|
||||
"stage": "Downloading",
|
||||
"progress": 49.6,
|
||||
"done": False,
|
||||
"extra": {"current": 432800000, "total": 436480882},
|
||||
"reference": "1e214cd6d7d0",
|
||||
"parent_id": job.uuid,
|
||||
"errors": [],
|
||||
"uuid": ANY,
|
||||
"created": ANY,
|
||||
},
|
||||
{
|
||||
"name": "Pulling container image layer",
|
||||
"stage": "Extracting",
|
||||
"progress": 99.6,
|
||||
"done": False,
|
||||
"extra": {"current": 432700000, "total": 436480882},
|
||||
"reference": "1e214cd6d7d0",
|
||||
"parent_id": job.uuid,
|
||||
"errors": [],
|
||||
"uuid": ANY,
|
||||
"created": ANY,
|
||||
},
|
||||
{
|
||||
"name": "Pulling container image layer",
|
||||
"stage": "Extracting",
|
||||
"progress": 99.6,
|
||||
"done": False,
|
||||
"extra": {"current": 432800000, "total": 436480882},
|
||||
"reference": "1e214cd6d7d0",
|
||||
"parent_id": job.uuid,
|
||||
"errors": [],
|
||||
"uuid": ANY,
|
||||
"created": ANY,
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("error_log", "exc_type", "exc_msg"),
|
||||
[
|
||||
@@ -647,56 +776,138 @@ async def test_install_raises_on_pull_error(
|
||||
await test_docker_interface.install(AwesomeVersion("1.2.3"), "test")
|
||||
|
||||
|
||||
async def test_process_pull_image_log_precision_fix(
|
||||
coresys: CoreSys, test_docker_interface: DockerInterface
|
||||
async def test_install_progress_handles_download_restart(
|
||||
coresys: CoreSys, test_docker_interface: DockerInterface, ha_ws_client: AsyncMock
|
||||
):
|
||||
"""Test that precision issues don't cause DockerLogOutOfOrder errors."""
|
||||
job_id = "test_job_123"
|
||||
layer_id = "abc123"
|
||||
|
||||
# First, create the job with a "Pulling fs layer" event
|
||||
fs_layer_entry = PullLogEntry(
|
||||
job_id=job_id,
|
||||
id=layer_id,
|
||||
status="Pulling fs layer",
|
||||
)
|
||||
test_docker_interface._process_pull_image_log(job_id, fs_layer_entry)
|
||||
|
||||
# First extracting event with higher progress
|
||||
entry1 = PullLogEntry(
|
||||
job_id=job_id,
|
||||
id=layer_id,
|
||||
status="Extracting",
|
||||
progress_detail=PullProgressDetail(current=91300, total=100000),
|
||||
"""Test install handles docker progress events that include a download restart."""
|
||||
coresys.core.set_state(CoreState.RUNNING)
|
||||
coresys.docker.docker.api.pull.return_value = load_json_fixture(
|
||||
"docker_pull_image_log_restart.json"
|
||||
)
|
||||
|
||||
# Second extracting event with slightly lower progress that would cause precision issue
|
||||
# This simulates the real-world scenario from the Sentry error
|
||||
entry2 = PullLogEntry(
|
||||
job_id=job_id,
|
||||
id=layer_id,
|
||||
status="Extracting",
|
||||
progress_detail=PullProgressDetail(current=91284, total=100000),
|
||||
)
|
||||
with (
|
||||
patch.object(
|
||||
type(coresys.supervisor), "arch", PropertyMock(return_value="i386")
|
||||
),
|
||||
):
|
||||
# Schedule job so we can listen for the end. Then we can assert against the WS mock
|
||||
event = asyncio.Event()
|
||||
job, install_task = coresys.jobs.schedule_job(
|
||||
test_docker_interface.install,
|
||||
JobSchedulerOptions(),
|
||||
AwesomeVersion("1.2.3"),
|
||||
"test",
|
||||
)
|
||||
|
||||
# Process first extracting entry
|
||||
test_docker_interface._process_pull_image_log(job_id, entry1)
|
||||
async def listen_for_job_end(reference: SupervisorJob):
|
||||
if reference.uuid != job.uuid:
|
||||
return
|
||||
event.set()
|
||||
|
||||
# Find the job to verify progress
|
||||
layer_job = None
|
||||
for job in coresys.jobs.jobs:
|
||||
if job.parent_id == job_id and job.reference == layer_id:
|
||||
layer_job = job
|
||||
break
|
||||
coresys.bus.register_event(BusEvent.SUPERVISOR_JOB_END, listen_for_job_end)
|
||||
await install_task
|
||||
await event.wait()
|
||||
|
||||
assert layer_job is not None, "Layer job should have been created"
|
||||
# Progress calculation: 50 + (50 * 91300/100000) = 50 + 45.65 = 95.65 -> floors to 95.6
|
||||
assert layer_job.progress == 95.6
|
||||
events = [
|
||||
evt.args[0]["data"]["data"]
|
||||
for evt in ha_ws_client.async_send_command.call_args_list
|
||||
if "data" in evt.args[0] and evt.args[0]["data"]["event"] == WSEvent.JOB
|
||||
]
|
||||
|
||||
# Process second entry - this should NOT raise DockerLogOutOfOrder
|
||||
# Previously this would fail because the calculated progress (95.642...) was less than stored (95.7 if rounded up)
|
||||
# With floor rounding, both values are consistent: calculated 95.6 <= stored 95.6
|
||||
try:
|
||||
test_docker_interface._process_pull_image_log(job_id, entry2)
|
||||
except DockerLogOutOfOrder:
|
||||
pytest.fail("DockerLogOutOfOrder should not be raised due to precision fix")
|
||||
def make_sub_log(layer_id: str):
|
||||
return [
|
||||
{
|
||||
"stage": evt["stage"],
|
||||
"progress": evt["progress"],
|
||||
"done": evt["done"],
|
||||
"extra": evt["extra"],
|
||||
}
|
||||
for evt in events
|
||||
if evt["name"] == "Pulling container image layer"
|
||||
and evt["reference"] == layer_id
|
||||
and evt["parent_id"] == job.uuid
|
||||
]
|
||||
|
||||
layer_1_log = make_sub_log("1e214cd6d7d0")
|
||||
assert len(layer_1_log) == 14
|
||||
assert layer_1_log == [
|
||||
{"stage": "Pulling fs layer", "progress": 0, "done": False, "extra": None},
|
||||
{
|
||||
"stage": "Downloading",
|
||||
"progress": 11.9,
|
||||
"done": False,
|
||||
"extra": {"current": 103619904, "total": 436480882},
|
||||
},
|
||||
{
|
||||
"stage": "Downloading",
|
||||
"progress": 26.1,
|
||||
"done": False,
|
||||
"extra": {"current": 227726144, "total": 436480882},
|
||||
},
|
||||
{
|
||||
"stage": "Downloading",
|
||||
"progress": 49.6,
|
||||
"done": False,
|
||||
"extra": {"current": 433170048, "total": 436480882},
|
||||
},
|
||||
{
|
||||
"stage": "Retrying download",
|
||||
"progress": 0,
|
||||
"done": False,
|
||||
"extra": None,
|
||||
},
|
||||
{
|
||||
"stage": "Retrying download",
|
||||
"progress": 0,
|
||||
"done": False,
|
||||
"extra": None,
|
||||
},
|
||||
{
|
||||
"stage": "Downloading",
|
||||
"progress": 11.9,
|
||||
"done": False,
|
||||
"extra": {"current": 103619904, "total": 436480882},
|
||||
},
|
||||
{
|
||||
"stage": "Downloading",
|
||||
"progress": 26.1,
|
||||
"done": False,
|
||||
"extra": {"current": 227726144, "total": 436480882},
|
||||
},
|
||||
{
|
||||
"stage": "Downloading",
|
||||
"progress": 49.6,
|
||||
"done": False,
|
||||
"extra": {"current": 433170048, "total": 436480882},
|
||||
},
|
||||
{
|
||||
"stage": "Verifying Checksum",
|
||||
"progress": 50,
|
||||
"done": False,
|
||||
"extra": {"current": 433170048, "total": 436480882},
|
||||
},
|
||||
{
|
||||
"stage": "Download complete",
|
||||
"progress": 50,
|
||||
"done": False,
|
||||
"extra": {"current": 433170048, "total": 436480882},
|
||||
},
|
||||
{
|
||||
"stage": "Extracting",
|
||||
"progress": 80.0,
|
||||
"done": False,
|
||||
"extra": {"current": 261816320, "total": 436480882},
|
||||
},
|
||||
{
|
||||
"stage": "Extracting",
|
||||
"progress": 100.0,
|
||||
"done": False,
|
||||
"extra": {"current": 436480882, "total": 436480882},
|
||||
},
|
||||
{
|
||||
"stage": "Pull complete",
|
||||
"progress": 100.0,
|
||||
"done": True,
|
||||
"extra": {"current": 436480882, "total": 436480882},
|
||||
},
|
||||
]
|
||||
|
134
tests/fixtures/docker_pull_image_log_restart.json
vendored
Normal file
134
tests/fixtures/docker_pull_image_log_restart.json
vendored
Normal file
@@ -0,0 +1,134 @@
|
||||
[
|
||||
{
|
||||
"status": "Pulling from home-assistant/odroid-n2-homeassistant",
|
||||
"id": "2025.7.1"
|
||||
},
|
||||
{
|
||||
"status": "Already exists",
|
||||
"progressDetail": {},
|
||||
"id": "6e771e15690e"
|
||||
},
|
||||
{
|
||||
"status": "Already exists",
|
||||
"progressDetail": {},
|
||||
"id": "58da640818f4"
|
||||
},
|
||||
{
|
||||
"status": "Pulling fs layer",
|
||||
"progressDetail": {},
|
||||
"id": "1e214cd6d7d0"
|
||||
},
|
||||
{
|
||||
"status": "Already exists",
|
||||
"progressDetail": {},
|
||||
"id": "1a38e1d5e18d"
|
||||
},
|
||||
{
|
||||
"status": "Waiting",
|
||||
"progressDetail": {},
|
||||
"id": "1e214cd6d7d0"
|
||||
},
|
||||
{
|
||||
"status": "Downloading",
|
||||
"progressDetail": {
|
||||
"current": 103619904,
|
||||
"total": 436480882
|
||||
},
|
||||
"progress": "[===========> ] 103.6MB/436.5MB",
|
||||
"id": "1e214cd6d7d0"
|
||||
},
|
||||
{
|
||||
"status": "Downloading",
|
||||
"progressDetail": {
|
||||
"current": 227726144,
|
||||
"total": 436480882
|
||||
},
|
||||
"progress": "[==========================> ] 227.7MB/436.5MB",
|
||||
"id": "1e214cd6d7d0"
|
||||
},
|
||||
{
|
||||
"status": "Downloading",
|
||||
"progressDetail": {
|
||||
"current": 433170048,
|
||||
"total": 436480882
|
||||
},
|
||||
"progress": "[=================================================> ] 433.2MB/436.5MB",
|
||||
"id": "1e214cd6d7d0"
|
||||
},
|
||||
{
|
||||
"status": "Retrying in 2 seconds",
|
||||
"progressDetail": {},
|
||||
"id": "1e214cd6d7d0"
|
||||
},
|
||||
{
|
||||
"status": "Retrying in 1 seconds",
|
||||
"progressDetail": {},
|
||||
"id": "1e214cd6d7d0"
|
||||
},
|
||||
{
|
||||
"status": "Downloading",
|
||||
"progressDetail": {
|
||||
"current": 103619904,
|
||||
"total": 436480882
|
||||
},
|
||||
"progress": "[===========> ] 103.6MB/436.5MB",
|
||||
"id": "1e214cd6d7d0"
|
||||
},
|
||||
{
|
||||
"status": "Downloading",
|
||||
"progressDetail": {
|
||||
"current": 227726144,
|
||||
"total": 436480882
|
||||
},
|
||||
"progress": "[==========================> ] 227.7MB/436.5MB",
|
||||
"id": "1e214cd6d7d0"
|
||||
},
|
||||
{
|
||||
"status": "Downloading",
|
||||
"progressDetail": {
|
||||
"current": 433170048,
|
||||
"total": 436480882
|
||||
},
|
||||
"progress": "[=================================================> ] 433.2MB/436.5MB",
|
||||
"id": "1e214cd6d7d0"
|
||||
},
|
||||
{
|
||||
"status": "Verifying Checksum",
|
||||
"progressDetail": {},
|
||||
"id": "1e214cd6d7d0"
|
||||
},
|
||||
{
|
||||
"status": "Download complete",
|
||||
"progressDetail": {},
|
||||
"id": "1e214cd6d7d0"
|
||||
},
|
||||
{
|
||||
"status": "Extracting",
|
||||
"progressDetail": {
|
||||
"current": 261816320,
|
||||
"total": 436480882
|
||||
},
|
||||
"progress": "[=============================> ] 261.8MB/436.5MB",
|
||||
"id": "1e214cd6d7d0"
|
||||
},
|
||||
{
|
||||
"status": "Extracting",
|
||||
"progressDetail": {
|
||||
"current": 436480882,
|
||||
"total": 436480882
|
||||
},
|
||||
"progress": "[==================================================>] 436.5MB/436.5MB",
|
||||
"id": "1e214cd6d7d0"
|
||||
},
|
||||
{
|
||||
"status": "Pull complete",
|
||||
"progressDetail": {},
|
||||
"id": "1e214cd6d7d0"
|
||||
},
|
||||
{
|
||||
"status": "Digest: sha256:7d97da645f232f82a768d0a537e452536719d56d484d419836e53dbe3e4ec736"
|
||||
},
|
||||
{
|
||||
"status": "Status: Downloaded newer image for ghcr.io/home-assistant/odroid-n2-homeassistant:2025.7.1"
|
||||
}
|
||||
]
|
@@ -25,7 +25,7 @@ from supervisor.jobs.decorator import Job, JobCondition
|
||||
from supervisor.jobs.job_group import JobGroup
|
||||
from supervisor.os.manager import OSManager
|
||||
from supervisor.plugins.audio import PluginAudio
|
||||
from supervisor.resolution.const import UnhealthyReason
|
||||
from supervisor.resolution.const import UnhealthyReason, UnsupportedReason
|
||||
from supervisor.supervisor import Supervisor
|
||||
from supervisor.utils.dt import utcnow
|
||||
|
||||
@@ -1358,3 +1358,31 @@ async def test_group_concurrency_with_group_throttling(coresys: CoreSys):
|
||||
|
||||
assert test.call_count == 2 # Should execute now
|
||||
assert test.nested_call_count == 2 # Nested call should also execute
|
||||
|
||||
|
||||
async def test_core_supported(coresys: CoreSys, caplog: pytest.LogCaptureFixture):
|
||||
"""Test the core_supported decorator."""
|
||||
|
||||
class TestClass:
|
||||
"""Test class."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize the test class."""
|
||||
self.coresys = coresys
|
||||
|
||||
@Job(
|
||||
name="test_core_supported_execute", conditions=[JobCondition.CORE_SUPPORTED]
|
||||
)
|
||||
async def execute(self):
|
||||
"""Execute the class method."""
|
||||
return True
|
||||
|
||||
test = TestClass(coresys)
|
||||
assert await test.execute()
|
||||
|
||||
coresys.resolution.unsupported.append(UnsupportedReason.CORE_VERSION)
|
||||
assert not await test.execute()
|
||||
assert "blocked from execution, unsupported Core version" in caplog.text
|
||||
|
||||
coresys.jobs.ignore_conditions = [JobCondition.CORE_SUPPORTED]
|
||||
assert await test.execute()
|
||||
|
146
tests/resolution/evaluation/test_evaluate_core_version.py
Normal file
146
tests/resolution/evaluation/test_evaluate_core_version.py
Normal file
@@ -0,0 +1,146 @@
|
||||
"""Test Core Version evaluation."""
|
||||
|
||||
from datetime import datetime
|
||||
from unittest.mock import PropertyMock, patch
|
||||
|
||||
from awesomeversion import AwesomeVersion
|
||||
import pytest
|
||||
|
||||
from supervisor.const import CoreState
|
||||
from supervisor.coresys import CoreSys
|
||||
from supervisor.homeassistant.const import LANDINGPAGE
|
||||
from supervisor.homeassistant.module import HomeAssistant
|
||||
from supervisor.resolution.evaluations.core_version import EvaluateCoreVersion
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"current,expected",
|
||||
[
|
||||
("2022.1.0", True), # More than 2 years old, should be unsupported
|
||||
("2023.12.0", False), # Less than 2 years old, should be supported
|
||||
(f"{datetime.now().year}.1", False), # Current year, supported
|
||||
(f"{datetime.now().year - 1}.12", False), # 1 year old, supported
|
||||
(f"{datetime.now().year - 2}.1", True), # 2 years old, unsupported
|
||||
(f"{datetime.now().year - 3}.1", True), # 3 years old, unsupported
|
||||
("2021.6.0", True), # Very old version, unsupported
|
||||
("landingpage", False), # Landingpage version, should be supported
|
||||
(None, False), # No current version info, check skipped
|
||||
],
|
||||
)
|
||||
async def test_core_version_evaluation(
|
||||
coresys: CoreSys, current: str | None, expected: bool
|
||||
):
|
||||
"""Test evaluation logic on Core versions."""
|
||||
evaluation = EvaluateCoreVersion(coresys)
|
||||
await coresys.core.set_state(CoreState.RUNNING)
|
||||
|
||||
with (
|
||||
patch.object(
|
||||
HomeAssistant,
|
||||
"version",
|
||||
new=PropertyMock(return_value=current and AwesomeVersion(current)),
|
||||
),
|
||||
patch.object(
|
||||
HomeAssistant,
|
||||
"latest_version",
|
||||
new=PropertyMock(
|
||||
return_value=AwesomeVersion("2024.12.0")
|
||||
), # Mock latest version
|
||||
),
|
||||
):
|
||||
assert evaluation.reason not in coresys.resolution.unsupported
|
||||
await evaluation()
|
||||
assert (evaluation.reason in coresys.resolution.unsupported) is expected
|
||||
|
||||
|
||||
async def test_core_version_evaluation_no_latest(coresys: CoreSys):
|
||||
"""Test evaluation when no latest version is available."""
|
||||
evaluation = EvaluateCoreVersion(coresys)
|
||||
await coresys.core.set_state(CoreState.RUNNING)
|
||||
|
||||
with (
|
||||
patch.object(
|
||||
HomeAssistant,
|
||||
"version",
|
||||
new=PropertyMock(return_value=AwesomeVersion("2022.1.0")),
|
||||
),
|
||||
patch.object(
|
||||
HomeAssistant,
|
||||
"latest_version",
|
||||
new=PropertyMock(return_value=None),
|
||||
),
|
||||
):
|
||||
assert evaluation.reason not in coresys.resolution.unsupported
|
||||
await evaluation()
|
||||
assert evaluation.reason not in coresys.resolution.unsupported
|
||||
|
||||
|
||||
async def test_core_version_invalid_format(coresys: CoreSys):
|
||||
"""Test evaluation with invalid version format."""
|
||||
evaluation = EvaluateCoreVersion(coresys)
|
||||
await coresys.core.set_state(CoreState.RUNNING)
|
||||
|
||||
with (
|
||||
patch.object(
|
||||
HomeAssistant,
|
||||
"version",
|
||||
new=PropertyMock(return_value=AwesomeVersion("invalid.version")),
|
||||
),
|
||||
patch.object(
|
||||
HomeAssistant,
|
||||
"latest_version",
|
||||
new=PropertyMock(return_value=AwesomeVersion("2024.12.0")),
|
||||
),
|
||||
):
|
||||
assert evaluation.reason not in coresys.resolution.unsupported
|
||||
await evaluation()
|
||||
# Should handle gracefully and not mark as unsupported
|
||||
assert evaluation.reason not in coresys.resolution.unsupported
|
||||
|
||||
|
||||
async def test_core_version_landingpage(coresys: CoreSys):
|
||||
"""Test evaluation with landingpage version."""
|
||||
evaluation = EvaluateCoreVersion(coresys)
|
||||
await coresys.core.set_state(CoreState.RUNNING)
|
||||
|
||||
with (
|
||||
patch.object(
|
||||
HomeAssistant,
|
||||
"version",
|
||||
new=PropertyMock(return_value=LANDINGPAGE),
|
||||
),
|
||||
patch.object(
|
||||
HomeAssistant,
|
||||
"latest_version",
|
||||
new=PropertyMock(return_value=AwesomeVersion("2024.12.0")),
|
||||
),
|
||||
):
|
||||
assert evaluation.reason not in coresys.resolution.unsupported
|
||||
await evaluation()
|
||||
# Landingpage should never be marked as unsupported
|
||||
assert evaluation.reason not in coresys.resolution.unsupported
|
||||
|
||||
|
||||
async def test_did_run(coresys: CoreSys):
|
||||
"""Test that the evaluation ran as expected."""
|
||||
evaluation = EvaluateCoreVersion(coresys)
|
||||
should_run = evaluation.states
|
||||
should_not_run = [state for state in CoreState if state not in should_run]
|
||||
assert len(should_run) != 0
|
||||
assert len(should_not_run) != 0
|
||||
|
||||
with patch(
|
||||
"supervisor.resolution.evaluations.core_version.EvaluateCoreVersion.evaluate",
|
||||
return_value=None,
|
||||
) as evaluate:
|
||||
for state in should_run:
|
||||
await coresys.core.set_state(state)
|
||||
await evaluation()
|
||||
evaluate.assert_called_once()
|
||||
evaluate.reset_mock()
|
||||
|
||||
for state in should_not_run:
|
||||
await coresys.core.set_state(state)
|
||||
await evaluation()
|
||||
evaluate.assert_not_called()
|
||||
evaluate.reset_mock()
|
Reference in New Issue
Block a user