Compare commits

...

11 Commits

Author SHA1 Message Date
copilot-swe-agent[bot]
ef63083c08 Support Docker containerd snapshotter for image extraction progress
Co-authored-by: agners <34061+agners@users.noreply.github.com>
2025-11-11 09:42:10 +00:00
copilot-swe-agent[bot]
8ac60b7c34 Initial plan 2025-11-11 09:30:49 +00:00
Stefan Agner
91a9cb98c3 Avoid adding Content-Type to non-body responses (#6266)
* Avoid adding Content-Type to non-body responses

The current code sets the content-type header for all responses
to the result's content_type property if upstream does not set a
content_type. The default value for content_type is
"application/octet-stream".

For responses that do not have a body (like 204 No Content or
304 Not Modified), setting a content-type header is unnecessary and
potentially misleading. Follow HTTP standards by only adding the
content-type header to responses that actually contain a body.

* Add pytest for ingress proxy

* Preserve Content-Type header for HEAD requests in ingress API
2025-11-10 17:39:10 +01:00
Stefan Agner
8f2b0763b7 Add zstd compression support (#6302)
Add zstd compression support to allow zstd compressed proxing for
ingress. Zstd is automatically supported by aiohttp if the package
is present.
2025-11-10 17:04:06 +01:00
Stefan Agner
5018d5d04e Bump pytest-asyncio to 1.2.0 (#6301) 2025-11-10 12:00:25 +01:00
Stefan Agner
1ba1ad9fc7 Remove Docker version from unhealthy reasons (#6292)
Any unhealthy reason blocks Home Assistant OS updates. If the Docker
version on a system running Home Assistant OS is outdated, the user
needs to be able to update Home Assistant OS to get a supported Docker
version. Therefore, we should not mark the system as unhealthy due to
an outdated Docker version.
2025-11-10 10:23:12 +01:00
dependabot[bot]
f0ef40eb3e Bump astroid from 4.0.1 to 4.0.2 (#6297)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-10 09:55:16 +01:00
dependabot[bot]
6eed5b02b4 Bump coverage from 7.11.0 to 7.11.3 (#6298) 2025-11-09 23:24:55 -08:00
dependabot[bot]
e59dcf7089 Bump dbus-fast from 2.44.5 to 2.45.1 (#6299) 2025-11-09 23:15:39 -08:00
dependabot[bot]
48da3d8a8d Bump pre-commit from 4.3.0 to 4.4.0 (#6300) 2025-11-09 23:07:49 -08:00
dependabot[bot]
7b82ebe3aa Bump ruff from 0.14.3 to 0.14.4 (#6291)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-07 09:06:06 +01:00
8 changed files with 356 additions and 18 deletions

View File

@@ -3,6 +3,7 @@ aiohttp==3.13.2
atomicwrites-homeassistant==1.4.1 atomicwrites-homeassistant==1.4.1
attrs==25.4.0 attrs==25.4.0
awesomeversion==25.8.0 awesomeversion==25.8.0
backports.zstd==1.0.0
blockbuster==1.5.25 blockbuster==1.5.25
brotli==1.2.0 brotli==1.2.0
ciso8601==2.3.3 ciso8601==2.3.3
@@ -26,5 +27,5 @@ securetar==2025.2.1
sentry-sdk==2.43.0 sentry-sdk==2.43.0
setuptools==80.9.0 setuptools==80.9.0
voluptuous==0.15.2 voluptuous==0.15.2
dbus-fast==2.44.5 dbus-fast==2.45.1
zlib-fast==0.2.1 zlib-fast==0.2.1

View File

@@ -1,14 +1,14 @@
astroid==4.0.1 astroid==4.0.2
coverage==7.11.0 coverage==7.11.3
mypy==1.18.2 mypy==1.18.2
pre-commit==4.3.0 pre-commit==4.4.0
pylint==4.0.2 pylint==4.0.2
pytest-aiohttp==1.1.0 pytest-aiohttp==1.1.0
pytest-asyncio==0.25.2 pytest-asyncio==1.2.0
pytest-cov==7.0.0 pytest-cov==7.0.0
pytest-timeout==2.4.0 pytest-timeout==2.4.0
pytest==8.4.2 pytest==8.4.2
ruff==0.14.3 ruff==0.14.4
time-machine==2.19.0 time-machine==2.19.0
types-docker==7.1.0.20251009 types-docker==7.1.0.20251009
types-pyyaml==6.0.12.20250915 types-pyyaml==6.0.12.20250915

View File

@@ -253,18 +253,28 @@ class APIIngress(CoreSysAttributes):
skip_auto_headers={hdrs.CONTENT_TYPE}, skip_auto_headers={hdrs.CONTENT_TYPE},
) as result: ) as result:
headers = _response_header(result) headers = _response_header(result)
# Avoid parsing content_type in simple cases for better performance # Avoid parsing content_type in simple cases for better performance
if maybe_content_type := result.headers.get(hdrs.CONTENT_TYPE): if maybe_content_type := result.headers.get(hdrs.CONTENT_TYPE):
content_type = (maybe_content_type.partition(";"))[0].strip() content_type = (maybe_content_type.partition(";"))[0].strip()
else: else:
content_type = result.content_type content_type = result.content_type
# Empty body responses (304, 204, HEAD, etc.) should not be streamed,
# otherwise aiohttp < 3.9.0 may generate an invalid "0\r\n\r\n" chunk
# This also avoids setting content_type for empty responses.
if must_be_empty_body(request.method, result.status):
# If upstream contains content-type, preserve it (e.g. for HEAD requests)
if maybe_content_type:
headers[hdrs.CONTENT_TYPE] = content_type
return web.Response(
headers=headers,
status=result.status,
)
# Simple request # Simple request
if ( if (
# empty body responses should not be streamed, hdrs.CONTENT_LENGTH in result.headers
# otherwise aiohttp < 3.9.0 may generate
# an invalid "0\r\n\r\n" chunk instead of an empty response.
must_be_empty_body(request.method, result.status)
or hdrs.CONTENT_LENGTH in result.headers
and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000 and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000
): ):
# Return Response # Return Response

View File

@@ -309,18 +309,30 @@ class DockerInterface(JobGroup, ABC):
stage in {PullImageLayerStage.DOWNLOADING, PullImageLayerStage.EXTRACTING} stage in {PullImageLayerStage.DOWNLOADING, PullImageLayerStage.EXTRACTING}
and reference.progress_detail and reference.progress_detail
): ):
# For containerd snapshotter, extracting phase has total=None
# In that case, use the download_total from the downloading phase
current_extra: dict[str, Any] = job.extra if job.extra else {}
if (
stage == PullImageLayerStage.DOWNLOADING
and reference.progress_detail.total
):
# Store download total for use in extraction phase with containerd snapshotter
current_extra["download_total"] = reference.progress_detail.total
job.update( job.update(
progress=progress, progress=progress,
stage=stage.status, stage=stage.status,
extra={ extra={
"current": reference.progress_detail.current, "current": reference.progress_detail.current,
"total": reference.progress_detail.total, "total": reference.progress_detail.total
or current_extra.get("download_total"),
"download_total": current_extra.get("download_total"),
}, },
) )
else: else:
# If we reach DOWNLOAD_COMPLETE without ever having set extra (small layers that skip # If we reach DOWNLOAD_COMPLETE without ever having set extra (small layers that skip
# the downloading phase), set a minimal extra so aggregate progress calculation can proceed # the downloading phase), set a minimal extra so aggregate progress calculation can proceed
extra = job.extra extra: dict[str, Any] | None = job.extra
if stage == PullImageLayerStage.DOWNLOAD_COMPLETE and not job.extra: if stage == PullImageLayerStage.DOWNLOAD_COMPLETE and not job.extra:
extra = {"current": 1, "total": 1} extra = {"current": 1, "total": 1}
@@ -346,7 +358,11 @@ class DockerInterface(JobGroup, ABC):
for job in layer_jobs: for job in layer_jobs:
if not job.extra: if not job.extra:
return return
total += job.extra["total"] # Use download_total if available (for containerd snapshotter), otherwise use total
layer_total = job.extra.get("download_total") or job.extra.get("total")
if layer_total is None:
return
total += layer_total
install_job.extra = {"total": total} install_job.extra = {"total": total}
else: else:
total = install_job.extra["total"] total = install_job.extra["total"]
@@ -357,7 +373,11 @@ class DockerInterface(JobGroup, ABC):
for job in layer_jobs: for job in layer_jobs:
if not job.extra: if not job.extra:
return return
progress += job.progress * (job.extra["total"] / total) # Use download_total if available (for containerd snapshotter), otherwise use total
layer_total = job.extra.get("download_total") or job.extra.get("total")
if layer_total is None:
return
progress += job.progress * (layer_total / total)
job_stage = PullImageLayerStage.from_status(cast(str, job.stage)) job_stage = PullImageLayerStage.from_status(cast(str, job.stage))
if job_stage < PullImageLayerStage.EXTRACTING: if job_stage < PullImageLayerStage.EXTRACTING:

View File

@@ -13,7 +13,6 @@ from .validate import get_valid_modules
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
UNHEALTHY = [ UNHEALTHY = [
UnsupportedReason.DOCKER_VERSION,
UnsupportedReason.LXC, UnsupportedReason.LXC,
UnsupportedReason.PRIVILEGED, UnsupportedReason.PRIVILEGED,
] ]

View File

@@ -1,12 +1,28 @@
"""Test ingress API.""" """Test ingress API."""
from unittest.mock import AsyncMock, patch from collections.abc import AsyncGenerator
from unittest.mock import AsyncMock, MagicMock, patch
from aiohttp.test_utils import TestClient import aiohttp
from aiohttp import hdrs, web
from aiohttp.test_utils import TestClient, TestServer
import pytest
from supervisor.addons.addon import Addon
from supervisor.coresys import CoreSys from supervisor.coresys import CoreSys
@pytest.fixture(name="real_websession")
async def fixture_real_websession(
coresys: CoreSys,
) -> AsyncGenerator[aiohttp.ClientSession]:
"""Fixture for real aiohttp ClientSession for ingress proxy tests."""
session = aiohttp.ClientSession()
coresys._websession = session # pylint: disable=W0212
yield session
await session.close()
async def test_validate_session(api_client: TestClient, coresys: CoreSys): async def test_validate_session(api_client: TestClient, coresys: CoreSys):
"""Test validating ingress session.""" """Test validating ingress session."""
with patch("aiohttp.web_request.BaseRequest.__getitem__", return_value=None): with patch("aiohttp.web_request.BaseRequest.__getitem__", return_value=None):
@@ -86,3 +102,126 @@ async def test_validate_session_with_user_id(
assert ( assert (
coresys.ingress.get_session_data(session).user.display_name == "Some Name" coresys.ingress.get_session_data(session).user.display_name == "Some Name"
) )
async def test_ingress_proxy_no_content_type_for_empty_body_responses(
api_client: TestClient, coresys: CoreSys, real_websession: aiohttp.ClientSession
):
"""Test that empty body responses don't get Content-Type header."""
# Create a mock add-on backend server that returns various status codes
async def mock_addon_handler(request: web.Request) -> web.Response:
"""Mock add-on handler that returns different status codes based on path."""
path = request.path
if path == "/204":
# 204 No Content - should not have Content-Type
return web.Response(status=204)
elif path == "/304":
# 304 Not Modified - should not have Content-Type
return web.Response(status=304)
elif path == "/100":
# 100 Continue - should not have Content-Type
return web.Response(status=100)
elif path == "/head":
# HEAD request - should have Content-Type (same as GET would)
return web.Response(body=b"test", content_type="text/html")
elif path == "/200":
# 200 OK with body - should have Content-Type
return web.Response(body=b"test content", content_type="text/plain")
elif path == "/200-no-content-type":
# 200 OK without explicit Content-Type - should get default
return web.Response(body=b"test content")
elif path == "/200-json":
# 200 OK with JSON - should preserve Content-Type
return web.Response(
body=b'{"key": "value"}', content_type="application/json"
)
else:
return web.Response(body=b"default", content_type="text/html")
# Create test server for mock add-on
app = web.Application()
app.router.add_route("*", "/{tail:.*}", mock_addon_handler)
addon_server = TestServer(app)
await addon_server.start_server()
try:
# Create ingress session
resp = await api_client.post("/ingress/session")
result = await resp.json()
session = result["data"]["session"]
# Create a mock add-on
mock_addon = MagicMock(spec=Addon)
mock_addon.slug = "test_addon"
mock_addon.ip_address = addon_server.host
mock_addon.ingress_port = addon_server.port
mock_addon.ingress_stream = False
# Generate an ingress token and register the add-on
ingress_token = coresys.ingress.create_session()
with patch.object(coresys.ingress, "get", return_value=mock_addon):
# Test 204 No Content - should NOT have Content-Type
resp = await api_client.get(
f"/ingress/{ingress_token}/204",
cookies={"ingress_session": session},
)
assert resp.status == 204
assert hdrs.CONTENT_TYPE not in resp.headers
# Test 304 Not Modified - should NOT have Content-Type
resp = await api_client.get(
f"/ingress/{ingress_token}/304",
cookies={"ingress_session": session},
)
assert resp.status == 304
assert hdrs.CONTENT_TYPE not in resp.headers
# Test HEAD request - SHOULD have Content-Type (same as GET)
# per RFC 9110: HEAD should return same headers as GET
resp = await api_client.head(
f"/ingress/{ingress_token}/head",
cookies={"ingress_session": session},
)
assert resp.status == 200
assert hdrs.CONTENT_TYPE in resp.headers
assert "text/html" in resp.headers[hdrs.CONTENT_TYPE]
# Body should be empty for HEAD
body = await resp.read()
assert body == b""
# Test 200 OK with body - SHOULD have Content-Type
resp = await api_client.get(
f"/ingress/{ingress_token}/200",
cookies={"ingress_session": session},
)
assert resp.status == 200
assert hdrs.CONTENT_TYPE in resp.headers
assert resp.headers[hdrs.CONTENT_TYPE] == "text/plain"
body = await resp.read()
assert body == b"test content"
# Test 200 OK without explicit Content-Type - SHOULD get default
resp = await api_client.get(
f"/ingress/{ingress_token}/200-no-content-type",
cookies={"ingress_session": session},
)
assert resp.status == 200
assert hdrs.CONTENT_TYPE in resp.headers
# Should get application/octet-stream as default from aiohttp ClientResponse
assert "application/octet-stream" in resp.headers[hdrs.CONTENT_TYPE]
# Test 200 OK with JSON - SHOULD preserve Content-Type
resp = await api_client.get(
f"/ingress/{ingress_token}/200-json",
cookies={"ingress_session": session},
)
assert resp.status == 200
assert hdrs.CONTENT_TYPE in resp.headers
assert "application/json" in resp.headers[hdrs.CONTENT_TYPE]
body = await resp.read()
assert body == b'{"key": "value"}'
finally:
await addon_server.close()

View File

@@ -675,3 +675,50 @@ async def test_install_progress_handles_layers_skipping_download(
assert job.done is True assert job.done is True
assert job.progress == 100 assert job.progress == 100
capture_exception.assert_not_called() capture_exception.assert_not_called()
async def test_install_progress_handles_containerd_snapshotter(
coresys: CoreSys,
test_docker_interface: DockerInterface,
capture_exception: Mock,
):
"""Test install handles containerd snapshotter format where extraction has no total bytes.
With containerd snapshotter, the extraction phase reports time elapsed in seconds
rather than bytes extracted. The progress_detail has format:
{"current": <seconds>, "units": "s"} with total=None
This test ensures we handle this gracefully by using the download size for
aggregate progress calculation.
"""
coresys.core.set_state(CoreState.RUNNING)
# Fixture emulates containerd snapshotter pull log format
coresys.docker.docker.api.pull.return_value = load_json_fixture(
"docker_pull_image_log_containerd.json"
)
with patch.object(
type(coresys.supervisor), "arch", PropertyMock(return_value="i386")
):
event = asyncio.Event()
job, install_task = coresys.jobs.schedule_job(
test_docker_interface.install,
JobSchedulerOptions(),
AwesomeVersion("1.2.3"),
"test",
)
async def listen_for_job_end(reference: SupervisorJob):
if reference.uuid != job.uuid:
return
event.set()
coresys.bus.register_event(BusEvent.SUPERVISOR_JOB_END, listen_for_job_end)
await install_task
await event.wait()
# Job should complete successfully without exceptions
assert job.done is True
assert job.progress == 100
capture_exception.assert_not_called()

View File

@@ -0,0 +1,122 @@
[
{
"status": "Pulling from home-assistant/test-image",
"id": "2025.7.1"
},
{
"status": "Pulling fs layer",
"progressDetail": {},
"id": "layer1"
},
{
"status": "Pulling fs layer",
"progressDetail": {},
"id": "layer2"
},
{
"status": "Downloading",
"progressDetail": {
"current": 1048576,
"total": 5178461
},
"progress": "[===========> ] 1.049MB/5.178MB",
"id": "layer1"
},
{
"status": "Downloading",
"progressDetail": {
"current": 5178461,
"total": 5178461
},
"progress": "[==================================================>] 5.178MB/5.178MB",
"id": "layer1"
},
{
"status": "Download complete",
"progressDetail": {
"hidecounts": true
},
"id": "layer1"
},
{
"status": "Downloading",
"progressDetail": {
"current": 1048576,
"total": 10485760
},
"progress": "[=====> ] 1.049MB/10.49MB",
"id": "layer2"
},
{
"status": "Downloading",
"progressDetail": {
"current": 10485760,
"total": 10485760
},
"progress": "[==================================================>] 10.49MB/10.49MB",
"id": "layer2"
},
{
"status": "Download complete",
"progressDetail": {
"hidecounts": true
},
"id": "layer2"
},
{
"status": "Extracting",
"progressDetail": {
"current": 1,
"units": "s"
},
"progress": "1 s",
"id": "layer1"
},
{
"status": "Extracting",
"progressDetail": {
"current": 5,
"units": "s"
},
"progress": "5 s",
"id": "layer1"
},
{
"status": "Pull complete",
"progressDetail": {
"hidecounts": true
},
"id": "layer1"
},
{
"status": "Extracting",
"progressDetail": {
"current": 1,
"units": "s"
},
"progress": "1 s",
"id": "layer2"
},
{
"status": "Extracting",
"progressDetail": {
"current": 3,
"units": "s"
},
"progress": "3 s",
"id": "layer2"
},
{
"status": "Pull complete",
"progressDetail": {
"hidecounts": true
},
"id": "layer2"
},
{
"status": "Digest: sha256:abc123"
},
{
"status": "Status: Downloaded newer image for test/image:2025.7.1"
}
]