mirror of
https://github.com/home-assistant/core.git
synced 2025-07-19 11:17:21 +00:00
Add Hassio HTTP logs/follow to allowed paths (#126606)
* Add logs/follow to admin paths in hassio.http * Add tests for logs/follow admin paths in hassio.http * Add tests for logs/follow admin paths in hassio.http * Add compress and timeout exclusions for hassio http api * Fix should_compress usage in hassio/ingress * Add missing follow exceptions for hassio/http * Add hassio range header forward for logs endpoints * Fix test syntax hassio/http
This commit is contained in:
parent
95bcb272e0
commit
2453e1284f
@ -18,6 +18,7 @@ from aiohttp.hdrs import (
|
|||||||
CONTENT_ENCODING,
|
CONTENT_ENCODING,
|
||||||
CONTENT_LENGTH,
|
CONTENT_LENGTH,
|
||||||
CONTENT_TYPE,
|
CONTENT_TYPE,
|
||||||
|
RANGE,
|
||||||
TRANSFER_ENCODING,
|
TRANSFER_ENCODING,
|
||||||
)
|
)
|
||||||
from aiohttp.web_exceptions import HTTPBadGateway
|
from aiohttp.web_exceptions import HTTPBadGateway
|
||||||
@ -41,6 +42,15 @@ NO_TIMEOUT = re.compile(
|
|||||||
r"|backups/.+/full"
|
r"|backups/.+/full"
|
||||||
r"|backups/.+/partial"
|
r"|backups/.+/partial"
|
||||||
r"|backups/[^/]+/(?:upload|download)"
|
r"|backups/[^/]+/(?:upload|download)"
|
||||||
|
r"|audio/logs/follow"
|
||||||
|
r"|cli/logs/follow"
|
||||||
|
r"|core/logs/follow"
|
||||||
|
r"|dns/logs/follow"
|
||||||
|
r"|host/logs/follow"
|
||||||
|
r"|multicast/logs/follow"
|
||||||
|
r"|observer/logs/follow"
|
||||||
|
r"|supervisor/logs/follow"
|
||||||
|
r"|addons/[^/]+/logs/follow"
|
||||||
r")$"
|
r")$"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -59,14 +69,23 @@ PATHS_ADMIN = re.compile(
|
|||||||
r"|backups/[a-f0-9]{8}(/info|/download|/restore/full|/restore/partial)?"
|
r"|backups/[a-f0-9]{8}(/info|/download|/restore/full|/restore/partial)?"
|
||||||
r"|backups/new/upload"
|
r"|backups/new/upload"
|
||||||
r"|audio/logs"
|
r"|audio/logs"
|
||||||
|
r"|audio/logs/follow"
|
||||||
r"|cli/logs"
|
r"|cli/logs"
|
||||||
|
r"|cli/logs/follow"
|
||||||
r"|core/logs"
|
r"|core/logs"
|
||||||
|
r"|core/logs/follow"
|
||||||
r"|dns/logs"
|
r"|dns/logs"
|
||||||
|
r"|dns/logs/follow"
|
||||||
r"|host/logs"
|
r"|host/logs"
|
||||||
|
r"|host/logs/follow"
|
||||||
r"|multicast/logs"
|
r"|multicast/logs"
|
||||||
|
r"|multicast/logs/follow"
|
||||||
r"|observer/logs"
|
r"|observer/logs"
|
||||||
|
r"|observer/logs/follow"
|
||||||
r"|supervisor/logs"
|
r"|supervisor/logs"
|
||||||
|
r"|supervisor/logs/follow"
|
||||||
r"|addons/[^/]+/(changelog|documentation|logs)"
|
r"|addons/[^/]+/(changelog|documentation|logs)"
|
||||||
|
r"|addons/[^/]+/logs/follow"
|
||||||
r")$"
|
r")$"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -83,8 +102,47 @@ NO_STORE = re.compile(
|
|||||||
r"|app/entrypoint.js"
|
r"|app/entrypoint.js"
|
||||||
r")$"
|
r")$"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Follow logs should not be compressed, to be able to get streamed by frontend
|
||||||
|
NO_COMPRESS = re.compile(
|
||||||
|
r"^(?:"
|
||||||
|
r"|audio/logs/follow"
|
||||||
|
r"|cli/logs/follow"
|
||||||
|
r"|core/logs/follow"
|
||||||
|
r"|dns/logs/follow"
|
||||||
|
r"|host/logs/follow"
|
||||||
|
r"|multicast/logs/follow"
|
||||||
|
r"|observer/logs/follow"
|
||||||
|
r"|supervisor/logs/follow"
|
||||||
|
r"|addons/[^/]+/logs/follow"
|
||||||
|
r")$"
|
||||||
|
)
|
||||||
|
|
||||||
|
PATHS_LOGS = re.compile(
|
||||||
|
r"^(?:"
|
||||||
|
r"|audio/logs"
|
||||||
|
r"|audio/logs/follow"
|
||||||
|
r"|cli/logs"
|
||||||
|
r"|cli/logs/follow"
|
||||||
|
r"|core/logs"
|
||||||
|
r"|core/logs/follow"
|
||||||
|
r"|dns/logs"
|
||||||
|
r"|dns/logs/follow"
|
||||||
|
r"|host/logs"
|
||||||
|
r"|host/logs/follow"
|
||||||
|
r"|multicast/logs"
|
||||||
|
r"|multicast/logs/follow"
|
||||||
|
r"|observer/logs"
|
||||||
|
r"|observer/logs/follow"
|
||||||
|
r"|supervisor/logs"
|
||||||
|
r"|supervisor/logs/follow"
|
||||||
|
r"|addons/[^/]+/logs"
|
||||||
|
r"|addons/[^/]+/logs/follow"
|
||||||
|
r")$"
|
||||||
|
)
|
||||||
# fmt: on
|
# fmt: on
|
||||||
|
|
||||||
|
|
||||||
RESPONSE_HEADERS_FILTER = {
|
RESPONSE_HEADERS_FILTER = {
|
||||||
TRANSFER_ENCODING,
|
TRANSFER_ENCODING,
|
||||||
CONTENT_LENGTH,
|
CONTENT_LENGTH,
|
||||||
@ -161,6 +219,10 @@ class HassIOView(HomeAssistantView):
|
|||||||
assert isinstance(request._stored_content_type, str) # noqa: SLF001
|
assert isinstance(request._stored_content_type, str) # noqa: SLF001
|
||||||
headers[CONTENT_TYPE] = request._stored_content_type # noqa: SLF001
|
headers[CONTENT_TYPE] = request._stored_content_type # noqa: SLF001
|
||||||
|
|
||||||
|
# forward range headers for logs
|
||||||
|
if PATHS_LOGS.match(path) and request.headers.get(RANGE):
|
||||||
|
headers[RANGE] = request.headers[RANGE]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
client = await self._websession.request(
|
client = await self._websession.request(
|
||||||
method=request.method,
|
method=request.method,
|
||||||
@ -177,7 +239,7 @@ class HassIOView(HomeAssistantView):
|
|||||||
)
|
)
|
||||||
response.content_type = client.content_type
|
response.content_type = client.content_type
|
||||||
|
|
||||||
if should_compress(response.content_type):
|
if should_compress(response.content_type, path):
|
||||||
response.enable_compression()
|
response.enable_compression()
|
||||||
await response.prepare(request)
|
await response.prepare(request)
|
||||||
# In testing iter_chunked, iter_any, and iter_chunks:
|
# In testing iter_chunked, iter_any, and iter_chunks:
|
||||||
@ -217,8 +279,10 @@ def _get_timeout(path: str) -> ClientTimeout:
|
|||||||
return ClientTimeout(connect=10, total=300)
|
return ClientTimeout(connect=10, total=300)
|
||||||
|
|
||||||
|
|
||||||
def should_compress(content_type: str) -> bool:
|
def should_compress(content_type: str, path: str | None = None) -> bool:
|
||||||
"""Return if we should compress a response."""
|
"""Return if we should compress a response."""
|
||||||
|
if path is not None and NO_COMPRESS.match(path):
|
||||||
|
return False
|
||||||
if content_type.startswith("image/"):
|
if content_type.startswith("image/"):
|
||||||
return "svg" in content_type
|
return "svg" in content_type
|
||||||
if content_type.startswith("application/"):
|
if content_type.startswith("application/"):
|
||||||
|
@ -82,7 +82,9 @@ async def test_forward_request_onboarded_user_unallowed_methods(
|
|||||||
# Unauthenticated path
|
# Unauthenticated path
|
||||||
("supervisor/info", HTTPStatus.UNAUTHORIZED),
|
("supervisor/info", HTTPStatus.UNAUTHORIZED),
|
||||||
("supervisor/logs", HTTPStatus.UNAUTHORIZED),
|
("supervisor/logs", HTTPStatus.UNAUTHORIZED),
|
||||||
|
("supervisor/logs/follow", HTTPStatus.UNAUTHORIZED),
|
||||||
("addons/bl_b392/logs", HTTPStatus.UNAUTHORIZED),
|
("addons/bl_b392/logs", HTTPStatus.UNAUTHORIZED),
|
||||||
|
("addons/bl_b392/logs/follow", HTTPStatus.UNAUTHORIZED),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_forward_request_onboarded_user_unallowed_paths(
|
async def test_forward_request_onboarded_user_unallowed_paths(
|
||||||
@ -152,7 +154,9 @@ async def test_forward_request_onboarded_noauth_unallowed_methods(
|
|||||||
# Unauthenticated path
|
# Unauthenticated path
|
||||||
("supervisor/info", HTTPStatus.UNAUTHORIZED),
|
("supervisor/info", HTTPStatus.UNAUTHORIZED),
|
||||||
("supervisor/logs", HTTPStatus.UNAUTHORIZED),
|
("supervisor/logs", HTTPStatus.UNAUTHORIZED),
|
||||||
|
("supervisor/logs/follow", HTTPStatus.UNAUTHORIZED),
|
||||||
("addons/bl_b392/logs", HTTPStatus.UNAUTHORIZED),
|
("addons/bl_b392/logs", HTTPStatus.UNAUTHORIZED),
|
||||||
|
("addons/bl_b392/logs/follow", HTTPStatus.UNAUTHORIZED),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_forward_request_onboarded_noauth_unallowed_paths(
|
async def test_forward_request_onboarded_noauth_unallowed_paths(
|
||||||
@ -265,7 +269,9 @@ async def test_forward_request_not_onboarded_unallowed_methods(
|
|||||||
# Unauthenticated path
|
# Unauthenticated path
|
||||||
("supervisor/info", HTTPStatus.UNAUTHORIZED),
|
("supervisor/info", HTTPStatus.UNAUTHORIZED),
|
||||||
("supervisor/logs", HTTPStatus.UNAUTHORIZED),
|
("supervisor/logs", HTTPStatus.UNAUTHORIZED),
|
||||||
|
("supervisor/logs/follow", HTTPStatus.UNAUTHORIZED),
|
||||||
("addons/bl_b392/logs", HTTPStatus.UNAUTHORIZED),
|
("addons/bl_b392/logs", HTTPStatus.UNAUTHORIZED),
|
||||||
|
("addons/bl_b392/logs/follow", HTTPStatus.UNAUTHORIZED),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
async def test_forward_request_not_onboarded_unallowed_paths(
|
async def test_forward_request_not_onboarded_unallowed_paths(
|
||||||
@ -292,7 +298,9 @@ async def test_forward_request_not_onboarded_unallowed_paths(
|
|||||||
("addons/bl_b392/icon", False),
|
("addons/bl_b392/icon", False),
|
||||||
("backups/1234abcd/info", True),
|
("backups/1234abcd/info", True),
|
||||||
("supervisor/logs", True),
|
("supervisor/logs", True),
|
||||||
|
("supervisor/logs/follow", True),
|
||||||
("addons/bl_b392/logs", True),
|
("addons/bl_b392/logs", True),
|
||||||
|
("addons/bl_b392/logs/follow", True),
|
||||||
("addons/bl_b392/changelog", True),
|
("addons/bl_b392/changelog", True),
|
||||||
("addons/bl_b392/documentation", True),
|
("addons/bl_b392/documentation", True),
|
||||||
],
|
],
|
||||||
@ -494,3 +502,57 @@ async def test_entrypoint_cache_control(
|
|||||||
assert resp1.headers["Cache-Control"] == "no-store, max-age=0"
|
assert resp1.headers["Cache-Control"] == "no-store, max-age=0"
|
||||||
|
|
||||||
assert "Cache-Control" not in resp2.headers
|
assert "Cache-Control" not in resp2.headers
|
||||||
|
|
||||||
|
|
||||||
|
async def test_no_follow_logs_compress(
|
||||||
|
hassio_client: TestClient, aioclient_mock: AiohttpClientMocker
|
||||||
|
) -> None:
|
||||||
|
"""Test that we do not compress follow logs."""
|
||||||
|
aioclient_mock.get("http://127.0.0.1/supervisor/logs/follow")
|
||||||
|
aioclient_mock.get("http://127.0.0.1/supervisor/logs")
|
||||||
|
|
||||||
|
resp1 = await hassio_client.get("/api/hassio/supervisor/logs/follow")
|
||||||
|
resp2 = await hassio_client.get("/api/hassio/supervisor/logs")
|
||||||
|
|
||||||
|
# Check we got right response
|
||||||
|
assert resp1.status == HTTPStatus.OK
|
||||||
|
assert resp1.headers.get("Content-Encoding") is None
|
||||||
|
|
||||||
|
assert resp2.status == HTTPStatus.OK
|
||||||
|
assert resp2.headers.get("Content-Encoding") == "deflate"
|
||||||
|
|
||||||
|
|
||||||
|
async def test_forward_range_header_for_logs(
|
||||||
|
hassio_client: TestClient, aioclient_mock: AiohttpClientMocker
|
||||||
|
) -> None:
|
||||||
|
"""Test that we forward the Range header for logs."""
|
||||||
|
aioclient_mock.get("http://127.0.0.1/host/logs")
|
||||||
|
aioclient_mock.get("http://127.0.0.1/addons/123abc_esphome/logs")
|
||||||
|
aioclient_mock.get("http://127.0.0.1/backups/1234abcd/download")
|
||||||
|
|
||||||
|
test_range = ":-100:50"
|
||||||
|
|
||||||
|
host_resp = await hassio_client.get(
|
||||||
|
"/api/hassio/host/logs", headers={"Range": test_range}
|
||||||
|
)
|
||||||
|
addon_resp = await hassio_client.get(
|
||||||
|
"/api/hassio/addons/123abc_esphome/logs", headers={"Range": test_range}
|
||||||
|
)
|
||||||
|
backup_resp = await hassio_client.get(
|
||||||
|
"/api/hassio/backups/1234abcd/download", headers={"Range": test_range}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert host_resp.status == HTTPStatus.OK
|
||||||
|
assert addon_resp.status == HTTPStatus.OK
|
||||||
|
assert backup_resp.status == HTTPStatus.OK
|
||||||
|
|
||||||
|
assert len(aioclient_mock.mock_calls) == 3
|
||||||
|
|
||||||
|
req_headers1 = aioclient_mock.mock_calls[0][-1]
|
||||||
|
assert req_headers1.get("Range") == test_range
|
||||||
|
|
||||||
|
req_headers2 = aioclient_mock.mock_calls[1][-1]
|
||||||
|
assert req_headers2.get("Range") == test_range
|
||||||
|
|
||||||
|
req_headers3 = aioclient_mock.mock_calls[2][-1]
|
||||||
|
assert req_headers3.get("Range") is None
|
||||||
|
Loading…
x
Reference in New Issue
Block a user