mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-11-16 22:40:47 +00:00
Compare commits
2 Commits
copilot/su
...
fix-websoc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3e307c5c8b | ||
|
|
1cd499b4a5 |
33
.github/workflows/builder.yml
vendored
33
.github/workflows/builder.yml
vendored
@@ -107,7 +107,7 @@ jobs:
|
|||||||
# home-assistant/wheels doesn't support sha pinning
|
# home-assistant/wheels doesn't support sha pinning
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
if: needs.init.outputs.requirements == 'true'
|
if: needs.init.outputs.requirements == 'true'
|
||||||
uses: home-assistant/wheels@2025.10.0
|
uses: home-assistant/wheels@2025.09.1
|
||||||
with:
|
with:
|
||||||
abi: cp313
|
abi: cp313
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
@@ -132,7 +132,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # v3.10.0
|
||||||
with:
|
with:
|
||||||
cosign-release: "v2.5.3"
|
cosign-release: "v2.5.3"
|
||||||
|
|
||||||
@@ -170,6 +170,8 @@ jobs:
|
|||||||
--target /data \
|
--target /data \
|
||||||
--cosign \
|
--cosign \
|
||||||
--generic ${{ needs.init.outputs.version }}
|
--generic ${{ needs.init.outputs.version }}
|
||||||
|
env:
|
||||||
|
CAS_API_KEY: ${{ secrets.CAS_TOKEN }}
|
||||||
|
|
||||||
version:
|
version:
|
||||||
name: Update version
|
name: Update version
|
||||||
@@ -291,6 +293,33 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
- name: Check the Supervisor code sign
|
||||||
|
if: needs.init.outputs.publish == 'true'
|
||||||
|
run: |
|
||||||
|
echo "Enable Content-Trust"
|
||||||
|
test=$(docker exec hassio_cli ha security options --content-trust=true --no-progress --raw-json | jq -r '.result')
|
||||||
|
if [ "$test" != "ok" ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Run supervisor health check"
|
||||||
|
test=$(docker exec hassio_cli ha resolution healthcheck --no-progress --raw-json | jq -r '.result')
|
||||||
|
if [ "$test" != "ok" ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Check supervisor unhealthy"
|
||||||
|
test=$(docker exec hassio_cli ha resolution info --no-progress --raw-json | jq -r '.data.unhealthy[]')
|
||||||
|
if [ "$test" != "" ]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Check supervisor supported"
|
||||||
|
test=$(docker exec hassio_cli ha resolution info --no-progress --raw-json | jq -r '.data.unsupported[]')
|
||||||
|
if [[ "$test" =~ source_mods ]]; then
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Create full backup
|
- name: Create full backup
|
||||||
id: backup
|
id: backup
|
||||||
run: |
|
run: |
|
||||||
|
|||||||
6
.github/workflows/ci.yaml
vendored
6
.github/workflows/ci.yaml
vendored
@@ -346,7 +346,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # v3.10.0
|
||||||
with:
|
with:
|
||||||
cosign-release: "v2.5.3"
|
cosign-release: "v2.5.3"
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
@@ -386,7 +386,7 @@ jobs:
|
|||||||
-o console_output_style=count \
|
-o console_output_style=count \
|
||||||
tests
|
tests
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v5.0.0
|
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: coverage
|
name: coverage
|
||||||
path: .coverage
|
path: .coverage
|
||||||
@@ -417,7 +417,7 @@ jobs:
|
|||||||
echo "Failed to restore Python virtual environment from cache"
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
exit 1
|
exit 1
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
with:
|
with:
|
||||||
name: coverage
|
name: coverage
|
||||||
path: coverage/
|
path: coverage/
|
||||||
|
|||||||
2
.github/workflows/sentry.yaml
vendored
2
.github/workflows/sentry.yaml
vendored
@@ -12,7 +12,7 @@ jobs:
|
|||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
- name: Sentry Release
|
- name: Sentry Release
|
||||||
uses: getsentry/action-release@128c5058bbbe93c8e02147fe0a9c713f166259a6 # v3.4.0
|
uses: getsentry/action-release@4f502acc1df792390abe36f2dcb03612ef144818 # v3.3.0
|
||||||
env:
|
env:
|
||||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||||
|
|||||||
1
.github/workflows/stale.yml
vendored
1
.github/workflows/stale.yml
vendored
@@ -16,7 +16,6 @@ jobs:
|
|||||||
days-before-close: 7
|
days-before-close: 7
|
||||||
stale-issue-label: "stale"
|
stale-issue-label: "stale"
|
||||||
exempt-issue-labels: "no-stale,Help%20wanted,help-wanted,pinned,rfc,security"
|
exempt-issue-labels: "no-stale,Help%20wanted,help-wanted,pinned,rfc,security"
|
||||||
only-issue-types: "bug"
|
|
||||||
stale-issue-message: >
|
stale-issue-message: >
|
||||||
There hasn't been any activity on this issue recently. Due to the
|
There hasn't been any activity on this issue recently. Due to the
|
||||||
high number of incoming GitHub notifications, we have to clean some
|
high number of incoming GitHub notifications, we have to clean some
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.14.3
|
rev: v0.11.10
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args:
|
args:
|
||||||
|
|||||||
@@ -1,15 +1,14 @@
|
|||||||
aiodns==3.5.0
|
aiodns==3.5.0
|
||||||
aiohttp==3.13.2
|
aiohttp==3.13.0
|
||||||
atomicwrites-homeassistant==1.4.1
|
atomicwrites-homeassistant==1.4.1
|
||||||
attrs==25.4.0
|
attrs==25.4.0
|
||||||
awesomeversion==25.8.0
|
awesomeversion==25.8.0
|
||||||
backports.zstd==1.0.0
|
|
||||||
blockbuster==1.5.25
|
blockbuster==1.5.25
|
||||||
brotli==1.2.0
|
brotli==1.1.0
|
||||||
ciso8601==2.3.3
|
ciso8601==2.3.3
|
||||||
colorlog==6.10.1
|
colorlog==6.9.0
|
||||||
cpe==1.3.1
|
cpe==1.3.1
|
||||||
cryptography==46.0.3
|
cryptography==46.0.2
|
||||||
debugpy==1.8.17
|
debugpy==1.8.17
|
||||||
deepmerge==2.0
|
deepmerge==2.0
|
||||||
dirhash==0.5.0
|
dirhash==0.5.0
|
||||||
@@ -18,14 +17,14 @@ faust-cchardet==2.1.19
|
|||||||
gitpython==3.1.45
|
gitpython==3.1.45
|
||||||
jinja2==3.1.6
|
jinja2==3.1.6
|
||||||
log-rate-limit==1.4.2
|
log-rate-limit==1.4.2
|
||||||
orjson==3.11.4
|
orjson==3.11.3
|
||||||
pulsectl==24.12.0
|
pulsectl==24.12.0
|
||||||
pyudev==0.24.4
|
pyudev==0.24.3
|
||||||
PyYAML==6.0.3
|
PyYAML==6.0.3
|
||||||
requests==2.32.5
|
requests==2.32.5
|
||||||
securetar==2025.2.1
|
securetar==2025.2.1
|
||||||
sentry-sdk==2.43.0
|
sentry-sdk==2.40.0
|
||||||
setuptools==80.9.0
|
setuptools==80.9.0
|
||||||
voluptuous==0.15.2
|
voluptuous==0.15.2
|
||||||
dbus-fast==2.45.1
|
dbus-fast==2.44.5
|
||||||
zlib-fast==0.2.1
|
zlib-fast==0.2.1
|
||||||
|
|||||||
@@ -1,16 +1,16 @@
|
|||||||
astroid==4.0.2
|
astroid==3.3.11
|
||||||
coverage==7.11.3
|
coverage==7.10.7
|
||||||
mypy==1.18.2
|
mypy==1.18.2
|
||||||
pre-commit==4.4.0
|
pre-commit==4.3.0
|
||||||
pylint==4.0.2
|
pylint==3.3.9
|
||||||
pytest-aiohttp==1.1.0
|
pytest-aiohttp==1.1.0
|
||||||
pytest-asyncio==1.2.0
|
pytest-asyncio==0.25.2
|
||||||
pytest-cov==7.0.0
|
pytest-cov==7.0.0
|
||||||
pytest-timeout==2.4.0
|
pytest-timeout==2.4.0
|
||||||
pytest==8.4.2
|
pytest==8.4.2
|
||||||
ruff==0.14.4
|
ruff==0.14.0
|
||||||
time-machine==2.19.0
|
time-machine==2.19.0
|
||||||
types-docker==7.1.0.20251009
|
types-docker==7.1.0.20250916
|
||||||
types-pyyaml==6.0.12.20250915
|
types-pyyaml==6.0.12.20250915
|
||||||
types-requests==2.32.4.20250913
|
types-requests==2.32.4.20250913
|
||||||
urllib3==2.5.0
|
urllib3==2.5.0
|
||||||
|
|||||||
@@ -1513,6 +1513,13 @@ class Addon(AddonModel):
|
|||||||
_LOGGER.info("Finished restore for add-on %s", self.slug)
|
_LOGGER.info("Finished restore for add-on %s", self.slug)
|
||||||
return wait_for_start
|
return wait_for_start
|
||||||
|
|
||||||
|
def check_trust(self) -> Awaitable[None]:
|
||||||
|
"""Calculate Addon docker content trust.
|
||||||
|
|
||||||
|
Return Coroutine.
|
||||||
|
"""
|
||||||
|
return self.instance.check_trust()
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
name="addon_restart_after_problem",
|
name="addon_restart_after_problem",
|
||||||
throttle_period=WATCHDOG_THROTTLE_PERIOD,
|
throttle_period=WATCHDOG_THROTTLE_PERIOD,
|
||||||
@@ -1555,15 +1562,7 @@ class Addon(AddonModel):
|
|||||||
)
|
)
|
||||||
break
|
break
|
||||||
|
|
||||||
# Exponential backoff to spread retries over the throttle window
|
await asyncio.sleep(WATCHDOG_RETRY_SECONDS)
|
||||||
delay = WATCHDOG_RETRY_SECONDS * (1 << max(attempts - 1, 0))
|
|
||||||
_LOGGER.debug(
|
|
||||||
"Watchdog will retry addon %s in %s seconds (attempt %s)",
|
|
||||||
self.name,
|
|
||||||
delay,
|
|
||||||
attempts + 1,
|
|
||||||
)
|
|
||||||
await asyncio.sleep(delay)
|
|
||||||
|
|
||||||
async def container_state_changed(self, event: DockerContainerStateEvent) -> None:
|
async def container_state_changed(self, event: DockerContainerStateEvent) -> None:
|
||||||
"""Set addon state from container state."""
|
"""Set addon state from container state."""
|
||||||
|
|||||||
@@ -103,6 +103,7 @@ from .configuration import FolderMapping
|
|||||||
from .const import (
|
from .const import (
|
||||||
ATTR_BACKUP,
|
ATTR_BACKUP,
|
||||||
ATTR_BREAKING_VERSIONS,
|
ATTR_BREAKING_VERSIONS,
|
||||||
|
ATTR_CODENOTARY,
|
||||||
ATTR_PATH,
|
ATTR_PATH,
|
||||||
ATTR_READ_ONLY,
|
ATTR_READ_ONLY,
|
||||||
AddonBackupMode,
|
AddonBackupMode,
|
||||||
@@ -631,8 +632,13 @@ class AddonModel(JobGroup, ABC):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def signed(self) -> bool:
|
def signed(self) -> bool:
|
||||||
"""Currently no signing support."""
|
"""Return True if the image is signed."""
|
||||||
return False
|
return ATTR_CODENOTARY in self.data
|
||||||
|
|
||||||
|
@property
|
||||||
|
def codenotary(self) -> str | None:
|
||||||
|
"""Return Signer email address for CAS."""
|
||||||
|
return self.data.get(ATTR_CODENOTARY)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def breaking_versions(self) -> list[AwesomeVersion]:
|
def breaking_versions(self) -> list[AwesomeVersion]:
|
||||||
|
|||||||
@@ -207,12 +207,6 @@ def _warn_addon_config(config: dict[str, Any]):
|
|||||||
name,
|
name,
|
||||||
)
|
)
|
||||||
|
|
||||||
if ATTR_CODENOTARY in config:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Add-on '%s' uses deprecated 'codenotary' field in config. This field is no longer used and will be ignored. Please report this to the maintainer.",
|
|
||||||
name,
|
|
||||||
)
|
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
@@ -423,6 +417,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
|||||||
vol.Optional(ATTR_BACKUP, default=AddonBackupMode.HOT): vol.Coerce(
|
vol.Optional(ATTR_BACKUP, default=AddonBackupMode.HOT): vol.Coerce(
|
||||||
AddonBackupMode
|
AddonBackupMode
|
||||||
),
|
),
|
||||||
|
vol.Optional(ATTR_CODENOTARY): vol.Email(),
|
||||||
vol.Optional(ATTR_OPTIONS, default={}): dict,
|
vol.Optional(ATTR_OPTIONS, default={}): dict,
|
||||||
vol.Optional(ATTR_SCHEMA, default={}): vol.Any(
|
vol.Optional(ATTR_SCHEMA, default={}): vol.Any(
|
||||||
vol.Schema({str: SCHEMA_ELEMENT}),
|
vol.Schema({str: SCHEMA_ELEMENT}),
|
||||||
|
|||||||
@@ -253,28 +253,18 @@ class APIIngress(CoreSysAttributes):
|
|||||||
skip_auto_headers={hdrs.CONTENT_TYPE},
|
skip_auto_headers={hdrs.CONTENT_TYPE},
|
||||||
) as result:
|
) as result:
|
||||||
headers = _response_header(result)
|
headers = _response_header(result)
|
||||||
|
|
||||||
# Avoid parsing content_type in simple cases for better performance
|
# Avoid parsing content_type in simple cases for better performance
|
||||||
if maybe_content_type := result.headers.get(hdrs.CONTENT_TYPE):
|
if maybe_content_type := result.headers.get(hdrs.CONTENT_TYPE):
|
||||||
content_type = (maybe_content_type.partition(";"))[0].strip()
|
content_type = (maybe_content_type.partition(";"))[0].strip()
|
||||||
else:
|
else:
|
||||||
content_type = result.content_type
|
content_type = result.content_type
|
||||||
|
|
||||||
# Empty body responses (304, 204, HEAD, etc.) should not be streamed,
|
|
||||||
# otherwise aiohttp < 3.9.0 may generate an invalid "0\r\n\r\n" chunk
|
|
||||||
# This also avoids setting content_type for empty responses.
|
|
||||||
if must_be_empty_body(request.method, result.status):
|
|
||||||
# If upstream contains content-type, preserve it (e.g. for HEAD requests)
|
|
||||||
if maybe_content_type:
|
|
||||||
headers[hdrs.CONTENT_TYPE] = content_type
|
|
||||||
return web.Response(
|
|
||||||
headers=headers,
|
|
||||||
status=result.status,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Simple request
|
# Simple request
|
||||||
if (
|
if (
|
||||||
hdrs.CONTENT_LENGTH in result.headers
|
# empty body responses should not be streamed,
|
||||||
|
# otherwise aiohttp < 3.9.0 may generate
|
||||||
|
# an invalid "0\r\n\r\n" chunk instead of an empty response.
|
||||||
|
must_be_empty_body(request.method, result.status)
|
||||||
|
or hdrs.CONTENT_LENGTH in result.headers
|
||||||
and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000
|
and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000
|
||||||
):
|
):
|
||||||
# Return Response
|
# Return Response
|
||||||
|
|||||||
@@ -222,6 +222,11 @@ class APIProxy(CoreSysAttributes):
|
|||||||
raise HTTPBadGateway()
|
raise HTTPBadGateway()
|
||||||
_LOGGER.info("Home Assistant WebSocket API request initialize")
|
_LOGGER.info("Home Assistant WebSocket API request initialize")
|
||||||
|
|
||||||
|
# Check if transport is still valid before WebSocket upgrade
|
||||||
|
if request.transport is None:
|
||||||
|
_LOGGER.warning("WebSocket connection lost before upgrade")
|
||||||
|
raise web.HTTPBadRequest(reason="Connection closed")
|
||||||
|
|
||||||
# init server
|
# init server
|
||||||
server = web.WebSocketResponse(heartbeat=30)
|
server = web.WebSocketResponse(heartbeat=30)
|
||||||
await server.prepare(request)
|
await server.prepare(request)
|
||||||
|
|||||||
@@ -1,20 +1,24 @@
|
|||||||
"""Init file for Supervisor Security RESTful API."""
|
"""Init file for Supervisor Security RESTful API."""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
|
import attr
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from supervisor.exceptions import APIGone
|
from ..const import ATTR_CONTENT_TRUST, ATTR_FORCE_SECURITY, ATTR_PWNED
|
||||||
|
|
||||||
from ..const import ATTR_FORCE_SECURITY, ATTR_PWNED
|
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_OPTIONS = vol.Schema(
|
SCHEMA_OPTIONS = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_PWNED): vol.Boolean(),
|
vol.Optional(ATTR_PWNED): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_CONTENT_TRUST): vol.Boolean(),
|
||||||
vol.Optional(ATTR_FORCE_SECURITY): vol.Boolean(),
|
vol.Optional(ATTR_FORCE_SECURITY): vol.Boolean(),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@@ -27,6 +31,7 @@ class APISecurity(CoreSysAttributes):
|
|||||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return Security information."""
|
"""Return Security information."""
|
||||||
return {
|
return {
|
||||||
|
ATTR_CONTENT_TRUST: self.sys_security.content_trust,
|
||||||
ATTR_PWNED: self.sys_security.pwned,
|
ATTR_PWNED: self.sys_security.pwned,
|
||||||
ATTR_FORCE_SECURITY: self.sys_security.force,
|
ATTR_FORCE_SECURITY: self.sys_security.force,
|
||||||
}
|
}
|
||||||
@@ -38,6 +43,8 @@ class APISecurity(CoreSysAttributes):
|
|||||||
|
|
||||||
if ATTR_PWNED in body:
|
if ATTR_PWNED in body:
|
||||||
self.sys_security.pwned = body[ATTR_PWNED]
|
self.sys_security.pwned = body[ATTR_PWNED]
|
||||||
|
if ATTR_CONTENT_TRUST in body:
|
||||||
|
self.sys_security.content_trust = body[ATTR_CONTENT_TRUST]
|
||||||
if ATTR_FORCE_SECURITY in body:
|
if ATTR_FORCE_SECURITY in body:
|
||||||
self.sys_security.force = body[ATTR_FORCE_SECURITY]
|
self.sys_security.force = body[ATTR_FORCE_SECURITY]
|
||||||
|
|
||||||
@@ -47,9 +54,6 @@ class APISecurity(CoreSysAttributes):
|
|||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def integrity_check(self, request: web.Request) -> dict[str, Any]:
|
async def integrity_check(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Run backend integrity check.
|
"""Run backend integrity check."""
|
||||||
|
result = await asyncio.shield(self.sys_security.integrity_check())
|
||||||
CodeNotary integrity checking has been removed. This endpoint now returns
|
return attr.asdict(result)
|
||||||
an error indicating the feature is gone.
|
|
||||||
"""
|
|
||||||
raise APIGone("Integrity check feature has been removed.")
|
|
||||||
|
|||||||
@@ -16,12 +16,14 @@ from ..const import (
|
|||||||
ATTR_BLK_READ,
|
ATTR_BLK_READ,
|
||||||
ATTR_BLK_WRITE,
|
ATTR_BLK_WRITE,
|
||||||
ATTR_CHANNEL,
|
ATTR_CHANNEL,
|
||||||
|
ATTR_CONTENT_TRUST,
|
||||||
ATTR_COUNTRY,
|
ATTR_COUNTRY,
|
||||||
ATTR_CPU_PERCENT,
|
ATTR_CPU_PERCENT,
|
||||||
ATTR_DEBUG,
|
ATTR_DEBUG,
|
||||||
ATTR_DEBUG_BLOCK,
|
ATTR_DEBUG_BLOCK,
|
||||||
ATTR_DETECT_BLOCKING_IO,
|
ATTR_DETECT_BLOCKING_IO,
|
||||||
ATTR_DIAGNOSTICS,
|
ATTR_DIAGNOSTICS,
|
||||||
|
ATTR_FORCE_SECURITY,
|
||||||
ATTR_HEALTHY,
|
ATTR_HEALTHY,
|
||||||
ATTR_ICON,
|
ATTR_ICON,
|
||||||
ATTR_IP_ADDRESS,
|
ATTR_IP_ADDRESS,
|
||||||
@@ -67,6 +69,8 @@ SCHEMA_OPTIONS = vol.Schema(
|
|||||||
vol.Optional(ATTR_DEBUG): vol.Boolean(),
|
vol.Optional(ATTR_DEBUG): vol.Boolean(),
|
||||||
vol.Optional(ATTR_DEBUG_BLOCK): vol.Boolean(),
|
vol.Optional(ATTR_DEBUG_BLOCK): vol.Boolean(),
|
||||||
vol.Optional(ATTR_DIAGNOSTICS): vol.Boolean(),
|
vol.Optional(ATTR_DIAGNOSTICS): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_CONTENT_TRUST): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_FORCE_SECURITY): vol.Boolean(),
|
||||||
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
|
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
|
||||||
vol.Optional(ATTR_DETECT_BLOCKING_IO): vol.Coerce(DetectBlockingIO),
|
vol.Optional(ATTR_DETECT_BLOCKING_IO): vol.Coerce(DetectBlockingIO),
|
||||||
vol.Optional(ATTR_COUNTRY): str,
|
vol.Optional(ATTR_COUNTRY): str,
|
||||||
|
|||||||
@@ -151,7 +151,7 @@ def api_return_error(
|
|||||||
if check_exception_chain(error, DockerAPIError):
|
if check_exception_chain(error, DockerAPIError):
|
||||||
message = format_message(message)
|
message = format_message(message)
|
||||||
if not message:
|
if not message:
|
||||||
message = "Unknown error, see Supervisor logs (check with 'ha supervisor logs')"
|
message = "Unknown error, see supervisor"
|
||||||
|
|
||||||
match error_type:
|
match error_type:
|
||||||
case const.CONTENT_TYPE_TEXT:
|
case const.CONTENT_TYPE_TEXT:
|
||||||
|
|||||||
@@ -105,6 +105,7 @@ async def initialize_coresys() -> CoreSys:
|
|||||||
|
|
||||||
if coresys.dev:
|
if coresys.dev:
|
||||||
coresys.updater.channel = UpdateChannel.DEV
|
coresys.updater.channel = UpdateChannel.DEV
|
||||||
|
coresys.security.content_trust = False
|
||||||
|
|
||||||
# Convert datetime
|
# Convert datetime
|
||||||
logging.Formatter.converter = lambda *args: coresys.now().timetuple()
|
logging.Formatter.converter = lambda *args: coresys.now().timetuple()
|
||||||
|
|||||||
@@ -846,6 +846,16 @@ class DockerAddon(DockerInterface):
|
|||||||
):
|
):
|
||||||
self.sys_resolution.dismiss_issue(self.addon.device_access_missing_issue)
|
self.sys_resolution.dismiss_issue(self.addon.device_access_missing_issue)
|
||||||
|
|
||||||
|
async def _validate_trust(self, image_id: str) -> None:
|
||||||
|
"""Validate trust of content."""
|
||||||
|
if not self.addon.signed:
|
||||||
|
return
|
||||||
|
|
||||||
|
checksum = image_id.partition(":")[2]
|
||||||
|
return await self.sys_security.verify_content(
|
||||||
|
cast(str, self.addon.codenotary), checksum
|
||||||
|
)
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
name="docker_addon_hardware_events",
|
name="docker_addon_hardware_events",
|
||||||
conditions=[JobCondition.OS_AGENT],
|
conditions=[JobCondition.OS_AGENT],
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from ipaddress import IPv4Address
|
|||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from awesomeversion import AwesomeVersion
|
from awesomeversion import AwesomeVersion, AwesomeVersionCompareException
|
||||||
from docker.types import Mount
|
from docker.types import Mount
|
||||||
|
|
||||||
from ..const import LABEL_MACHINE
|
from ..const import LABEL_MACHINE
|
||||||
@@ -244,3 +244,13 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
self.image,
|
self.image,
|
||||||
self.sys_homeassistant.version,
|
self.sys_homeassistant.version,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def _validate_trust(self, image_id: str) -> None:
|
||||||
|
"""Validate trust of content."""
|
||||||
|
try:
|
||||||
|
if self.version in {None, LANDINGPAGE} or self.version < _VERIFY_TRUST:
|
||||||
|
return
|
||||||
|
except AwesomeVersionCompareException:
|
||||||
|
return
|
||||||
|
|
||||||
|
await super()._validate_trust(image_id)
|
||||||
|
|||||||
@@ -31,12 +31,15 @@ from ..const import (
|
|||||||
)
|
)
|
||||||
from ..coresys import CoreSys
|
from ..coresys import CoreSys
|
||||||
from ..exceptions import (
|
from ..exceptions import (
|
||||||
|
CodeNotaryError,
|
||||||
|
CodeNotaryUntrusted,
|
||||||
DockerAPIError,
|
DockerAPIError,
|
||||||
DockerError,
|
DockerError,
|
||||||
DockerJobError,
|
DockerJobError,
|
||||||
DockerLogOutOfOrder,
|
DockerLogOutOfOrder,
|
||||||
DockerNotFound,
|
DockerNotFound,
|
||||||
DockerRequestError,
|
DockerRequestError,
|
||||||
|
DockerTrustError,
|
||||||
)
|
)
|
||||||
from ..jobs import SupervisorJob
|
from ..jobs import SupervisorJob
|
||||||
from ..jobs.const import JOB_GROUP_DOCKER_INTERFACE, JobConcurrency
|
from ..jobs.const import JOB_GROUP_DOCKER_INTERFACE, JobConcurrency
|
||||||
@@ -217,7 +220,7 @@ class DockerInterface(JobGroup, ABC):
|
|||||||
|
|
||||||
await self.sys_run_in_executor(self.sys_docker.docker.login, **credentials)
|
await self.sys_run_in_executor(self.sys_docker.docker.login, **credentials)
|
||||||
|
|
||||||
def _process_pull_image_log( # noqa: C901
|
def _process_pull_image_log(
|
||||||
self, install_job_id: str, reference: PullLogEntry
|
self, install_job_id: str, reference: PullLogEntry
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Process events fired from a docker while pulling an image, filtered to a given job id."""
|
"""Process events fired from a docker while pulling an image, filtered to a given job id."""
|
||||||
@@ -303,44 +306,26 @@ class DockerInterface(JobGroup, ABC):
|
|||||||
# Our filters have all passed. Time to update the job
|
# Our filters have all passed. Time to update the job
|
||||||
# Only downloading and extracting have progress details. Use that to set extra
|
# Only downloading and extracting have progress details. Use that to set extra
|
||||||
# We'll leave it around on later stages as the total bytes may be useful after that stage
|
# We'll leave it around on later stages as the total bytes may be useful after that stage
|
||||||
# Enforce range to prevent float drift error
|
|
||||||
progress = max(0, min(progress, 100))
|
|
||||||
if (
|
if (
|
||||||
stage in {PullImageLayerStage.DOWNLOADING, PullImageLayerStage.EXTRACTING}
|
stage in {PullImageLayerStage.DOWNLOADING, PullImageLayerStage.EXTRACTING}
|
||||||
and reference.progress_detail
|
and reference.progress_detail
|
||||||
):
|
):
|
||||||
# For containerd snapshotter, extracting phase has total=None
|
|
||||||
# In that case, use the download_total from the downloading phase
|
|
||||||
current_extra: dict[str, Any] = job.extra if job.extra else {}
|
|
||||||
if (
|
|
||||||
stage == PullImageLayerStage.DOWNLOADING
|
|
||||||
and reference.progress_detail.total
|
|
||||||
):
|
|
||||||
# Store download total for use in extraction phase with containerd snapshotter
|
|
||||||
current_extra["download_total"] = reference.progress_detail.total
|
|
||||||
|
|
||||||
job.update(
|
job.update(
|
||||||
progress=progress,
|
progress=progress,
|
||||||
stage=stage.status,
|
stage=stage.status,
|
||||||
extra={
|
extra={
|
||||||
"current": reference.progress_detail.current,
|
"current": reference.progress_detail.current,
|
||||||
"total": reference.progress_detail.total
|
"total": reference.progress_detail.total,
|
||||||
or current_extra.get("download_total"),
|
|
||||||
"download_total": current_extra.get("download_total"),
|
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
# If we reach DOWNLOAD_COMPLETE without ever having set extra (small layers that skip
|
|
||||||
# the downloading phase), set a minimal extra so aggregate progress calculation can proceed
|
|
||||||
extra: dict[str, Any] | None = job.extra
|
|
||||||
if stage == PullImageLayerStage.DOWNLOAD_COMPLETE and not job.extra:
|
|
||||||
extra = {"current": 1, "total": 1}
|
|
||||||
|
|
||||||
job.update(
|
job.update(
|
||||||
progress=progress,
|
progress=progress,
|
||||||
stage=stage.status,
|
stage=stage.status,
|
||||||
done=stage == PullImageLayerStage.PULL_COMPLETE,
|
done=stage == PullImageLayerStage.PULL_COMPLETE,
|
||||||
extra=None if stage == PullImageLayerStage.RETRYING_DOWNLOAD else extra,
|
extra=None
|
||||||
|
if stage == PullImageLayerStage.RETRYING_DOWNLOAD
|
||||||
|
else job.extra,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Once we have received a progress update for every child job, start to set status of the main one
|
# Once we have received a progress update for every child job, start to set status of the main one
|
||||||
@@ -358,11 +343,7 @@ class DockerInterface(JobGroup, ABC):
|
|||||||
for job in layer_jobs:
|
for job in layer_jobs:
|
||||||
if not job.extra:
|
if not job.extra:
|
||||||
return
|
return
|
||||||
# Use download_total if available (for containerd snapshotter), otherwise use total
|
total += job.extra["total"]
|
||||||
layer_total = job.extra.get("download_total") or job.extra.get("total")
|
|
||||||
if layer_total is None:
|
|
||||||
return
|
|
||||||
total += layer_total
|
|
||||||
install_job.extra = {"total": total}
|
install_job.extra = {"total": total}
|
||||||
else:
|
else:
|
||||||
total = install_job.extra["total"]
|
total = install_job.extra["total"]
|
||||||
@@ -373,11 +354,7 @@ class DockerInterface(JobGroup, ABC):
|
|||||||
for job in layer_jobs:
|
for job in layer_jobs:
|
||||||
if not job.extra:
|
if not job.extra:
|
||||||
return
|
return
|
||||||
# Use download_total if available (for containerd snapshotter), otherwise use total
|
progress += job.progress * (job.extra["total"] / total)
|
||||||
layer_total = job.extra.get("download_total") or job.extra.get("total")
|
|
||||||
if layer_total is None:
|
|
||||||
return
|
|
||||||
progress += job.progress * (layer_total / total)
|
|
||||||
job_stage = PullImageLayerStage.from_status(cast(str, job.stage))
|
job_stage = PullImageLayerStage.from_status(cast(str, job.stage))
|
||||||
|
|
||||||
if job_stage < PullImageLayerStage.EXTRACTING:
|
if job_stage < PullImageLayerStage.EXTRACTING:
|
||||||
@@ -394,7 +371,7 @@ class DockerInterface(JobGroup, ABC):
|
|||||||
|
|
||||||
# To reduce noise, limit updates to when result has changed by an entire percent or when stage changed
|
# To reduce noise, limit updates to when result has changed by an entire percent or when stage changed
|
||||||
if stage != install_job.stage or progress >= install_job.progress + 1:
|
if stage != install_job.stage or progress >= install_job.progress + 1:
|
||||||
install_job.update(stage=stage.status, progress=max(0, min(progress, 100)))
|
install_job.update(stage=stage.status, progress=progress)
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
name="docker_interface_install",
|
name="docker_interface_install",
|
||||||
@@ -446,6 +423,18 @@ class DockerInterface(JobGroup, ABC):
|
|||||||
platform=MAP_ARCH[image_arch],
|
platform=MAP_ARCH[image_arch],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Validate content
|
||||||
|
try:
|
||||||
|
await self._validate_trust(cast(str, docker_image.id))
|
||||||
|
except CodeNotaryError:
|
||||||
|
with suppress(docker.errors.DockerException):
|
||||||
|
await self.sys_run_in_executor(
|
||||||
|
self.sys_docker.images.remove,
|
||||||
|
image=f"{image}:{version!s}",
|
||||||
|
force=True,
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
# Tag latest
|
# Tag latest
|
||||||
if latest:
|
if latest:
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
@@ -471,6 +460,16 @@ class DockerInterface(JobGroup, ABC):
|
|||||||
raise DockerError(
|
raise DockerError(
|
||||||
f"Unknown error with {image}:{version!s} -> {err!s}", _LOGGER.error
|
f"Unknown error with {image}:{version!s} -> {err!s}", _LOGGER.error
|
||||||
) from err
|
) from err
|
||||||
|
except CodeNotaryUntrusted as err:
|
||||||
|
raise DockerTrustError(
|
||||||
|
f"Pulled image {image}:{version!s} failed on content-trust verification!",
|
||||||
|
_LOGGER.critical,
|
||||||
|
) from err
|
||||||
|
except CodeNotaryError as err:
|
||||||
|
raise DockerTrustError(
|
||||||
|
f"Error happened on Content-Trust check for {image}:{version!s}: {err!s}",
|
||||||
|
_LOGGER.error,
|
||||||
|
) from err
|
||||||
finally:
|
finally:
|
||||||
if listener:
|
if listener:
|
||||||
self.sys_bus.remove_listener(listener)
|
self.sys_bus.remove_listener(listener)
|
||||||
@@ -808,3 +807,24 @@ class DockerInterface(JobGroup, ABC):
|
|||||||
return self.sys_run_in_executor(
|
return self.sys_run_in_executor(
|
||||||
self.sys_docker.container_run_inside, self.name, command
|
self.sys_docker.container_run_inside, self.name, command
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def _validate_trust(self, image_id: str) -> None:
|
||||||
|
"""Validate trust of content."""
|
||||||
|
checksum = image_id.partition(":")[2]
|
||||||
|
return await self.sys_security.verify_own_content(checksum)
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="docker_interface_check_trust",
|
||||||
|
on_condition=DockerJobError,
|
||||||
|
concurrency=JobConcurrency.GROUP_REJECT,
|
||||||
|
)
|
||||||
|
async def check_trust(self) -> None:
|
||||||
|
"""Check trust of exists Docker image."""
|
||||||
|
try:
|
||||||
|
image = await self.sys_run_in_executor(
|
||||||
|
self.sys_docker.images.get, f"{self.image}:{self.version!s}"
|
||||||
|
)
|
||||||
|
except (docker.errors.DockerException, requests.RequestException):
|
||||||
|
return
|
||||||
|
|
||||||
|
await self._validate_trust(cast(str, image.id))
|
||||||
|
|||||||
@@ -423,12 +423,6 @@ class APINotFound(APIError):
|
|||||||
status = 404
|
status = 404
|
||||||
|
|
||||||
|
|
||||||
class APIGone(APIError):
|
|
||||||
"""API is no longer available."""
|
|
||||||
|
|
||||||
status = 410
|
|
||||||
|
|
||||||
|
|
||||||
class APIAddonNotInstalled(APIError):
|
class APIAddonNotInstalled(APIError):
|
||||||
"""Not installed addon requested at addons API."""
|
"""Not installed addon requested at addons API."""
|
||||||
|
|
||||||
@@ -583,6 +577,21 @@ class PwnedConnectivityError(PwnedError):
|
|||||||
"""Connectivity errors while checking pwned passwords."""
|
"""Connectivity errors while checking pwned passwords."""
|
||||||
|
|
||||||
|
|
||||||
|
# util/codenotary
|
||||||
|
|
||||||
|
|
||||||
|
class CodeNotaryError(HassioError):
|
||||||
|
"""Error general with CodeNotary."""
|
||||||
|
|
||||||
|
|
||||||
|
class CodeNotaryUntrusted(CodeNotaryError):
|
||||||
|
"""Error on untrusted content."""
|
||||||
|
|
||||||
|
|
||||||
|
class CodeNotaryBackendError(CodeNotaryError):
|
||||||
|
"""CodeNotary backend error happening."""
|
||||||
|
|
||||||
|
|
||||||
# util/whoami
|
# util/whoami
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -9,12 +9,7 @@ from typing import Any
|
|||||||
from supervisor.resolution.const import UnhealthyReason
|
from supervisor.resolution.const import UnhealthyReason
|
||||||
|
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
from ..exceptions import (
|
from ..exceptions import DBusError, DBusObjectError, HardwareNotFound
|
||||||
DBusError,
|
|
||||||
DBusNotConnectedError,
|
|
||||||
DBusObjectError,
|
|
||||||
HardwareNotFound,
|
|
||||||
)
|
|
||||||
from .const import UdevSubsystem
|
from .const import UdevSubsystem
|
||||||
from .data import Device
|
from .data import Device
|
||||||
|
|
||||||
@@ -212,8 +207,6 @@ class HwDisk(CoreSysAttributes):
|
|||||||
try:
|
try:
|
||||||
block_device = self.sys_dbus.udisks2.get_block_device_by_path(device_path)
|
block_device = self.sys_dbus.udisks2.get_block_device_by_path(device_path)
|
||||||
drive = self.sys_dbus.udisks2.get_drive(block_device.drive)
|
drive = self.sys_dbus.udisks2.get_drive(block_device.drive)
|
||||||
except DBusNotConnectedError:
|
|
||||||
return None
|
|
||||||
except DBusObjectError:
|
except DBusObjectError:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Unable to find UDisks2 drive for device at %s", device_path.as_posix()
|
"Unable to find UDisks2 drive for device at %s", device_path.as_posix()
|
||||||
|
|||||||
@@ -428,6 +428,13 @@ class HomeAssistantCore(JobGroup):
|
|||||||
"""
|
"""
|
||||||
return self.instance.logs()
|
return self.instance.logs()
|
||||||
|
|
||||||
|
def check_trust(self) -> Awaitable[None]:
|
||||||
|
"""Calculate HomeAssistant docker content trust.
|
||||||
|
|
||||||
|
Return Coroutine.
|
||||||
|
"""
|
||||||
|
return self.instance.check_trust()
|
||||||
|
|
||||||
async def stats(self) -> DockerStats:
|
async def stats(self) -> DockerStats:
|
||||||
"""Return stats of Home Assistant."""
|
"""Return stats of Home Assistant."""
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -98,9 +98,7 @@ class SupervisorJobError:
|
|||||||
"""Representation of an error occurring during a supervisor job."""
|
"""Representation of an error occurring during a supervisor job."""
|
||||||
|
|
||||||
type_: type[HassioError] = HassioError
|
type_: type[HassioError] = HassioError
|
||||||
message: str = (
|
message: str = "Unknown error, see supervisor logs"
|
||||||
"Unknown error, see Supervisor logs (check with 'ha supervisor logs')"
|
|
||||||
)
|
|
||||||
stage: str | None = None
|
stage: str | None = None
|
||||||
|
|
||||||
def as_dict(self) -> dict[str, str | None]:
|
def as_dict(self) -> dict[str, str | None]:
|
||||||
@@ -329,17 +327,6 @@ class JobManager(FileConfiguration, CoreSysAttributes):
|
|||||||
if not curr_parent.child_job_syncs:
|
if not curr_parent.child_job_syncs:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# HACK: If parent trigger the same child job, we just skip this second
|
|
||||||
# sync. Maybe it would be better to have this reflected in the job stage
|
|
||||||
# and reset progress to 0 instead? There is no support for such stage
|
|
||||||
# information on Core update entities today though.
|
|
||||||
if curr_parent.done is True or curr_parent.progress >= 100:
|
|
||||||
_LOGGER.debug(
|
|
||||||
"Skipping parent job sync for done parent job %s",
|
|
||||||
curr_parent.name,
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Break after first match at each parent as it doesn't make sense
|
# Break after first match at each parent as it doesn't make sense
|
||||||
# to match twice. But it could match multiple parents
|
# to match twice. But it could match multiple parents
|
||||||
for sync in curr_parent.child_job_syncs:
|
for sync in curr_parent.child_job_syncs:
|
||||||
|
|||||||
@@ -76,6 +76,13 @@ class PluginBase(ABC, FileConfiguration, CoreSysAttributes):
|
|||||||
"""Return True if a task is in progress."""
|
"""Return True if a task is in progress."""
|
||||||
return self.instance.in_progress
|
return self.instance.in_progress
|
||||||
|
|
||||||
|
def check_trust(self) -> Awaitable[None]:
|
||||||
|
"""Calculate plugin docker content trust.
|
||||||
|
|
||||||
|
Return Coroutine.
|
||||||
|
"""
|
||||||
|
return self.instance.check_trust()
|
||||||
|
|
||||||
def logs(self) -> Awaitable[bytes]:
|
def logs(self) -> Awaitable[bytes]:
|
||||||
"""Get docker plugin logs.
|
"""Get docker plugin logs.
|
||||||
|
|
||||||
|
|||||||
59
supervisor/resolution/checks/supervisor_trust.py
Normal file
59
supervisor/resolution/checks/supervisor_trust.py
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
"""Helpers to check supervisor trust."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from ...const import CoreState
|
||||||
|
from ...coresys import CoreSys
|
||||||
|
from ...exceptions import CodeNotaryError, CodeNotaryUntrusted
|
||||||
|
from ..const import ContextType, IssueType, UnhealthyReason
|
||||||
|
from .base import CheckBase
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def setup(coresys: CoreSys) -> CheckBase:
|
||||||
|
"""Check setup function."""
|
||||||
|
return CheckSupervisorTrust(coresys)
|
||||||
|
|
||||||
|
|
||||||
|
class CheckSupervisorTrust(CheckBase):
|
||||||
|
"""CheckSystemTrust class for check."""
|
||||||
|
|
||||||
|
async def run_check(self) -> None:
|
||||||
|
"""Run check if not affected by issue."""
|
||||||
|
if not self.sys_security.content_trust:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Skipping %s, content_trust is globally disabled", self.slug
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.sys_supervisor.check_trust()
|
||||||
|
except CodeNotaryUntrusted:
|
||||||
|
self.sys_resolution.add_unhealthy_reason(UnhealthyReason.UNTRUSTED)
|
||||||
|
self.sys_resolution.create_issue(IssueType.TRUST, ContextType.SUPERVISOR)
|
||||||
|
except CodeNotaryError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def approve_check(self, reference: str | None = None) -> bool:
|
||||||
|
"""Approve check if it is affected by issue."""
|
||||||
|
try:
|
||||||
|
await self.sys_supervisor.check_trust()
|
||||||
|
except CodeNotaryError:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def issue(self) -> IssueType:
|
||||||
|
"""Return a IssueType enum."""
|
||||||
|
return IssueType.TRUST
|
||||||
|
|
||||||
|
@property
|
||||||
|
def context(self) -> ContextType:
|
||||||
|
"""Return a ContextType enum."""
|
||||||
|
return ContextType.SUPERVISOR
|
||||||
|
|
||||||
|
@property
|
||||||
|
def states(self) -> list[CoreState]:
|
||||||
|
"""Return a list of valid states when this check can run."""
|
||||||
|
return [CoreState.RUNNING, CoreState.STARTUP]
|
||||||
@@ -39,6 +39,7 @@ class UnsupportedReason(StrEnum):
|
|||||||
APPARMOR = "apparmor"
|
APPARMOR = "apparmor"
|
||||||
CGROUP_VERSION = "cgroup_version"
|
CGROUP_VERSION = "cgroup_version"
|
||||||
CONNECTIVITY_CHECK = "connectivity_check"
|
CONNECTIVITY_CHECK = "connectivity_check"
|
||||||
|
CONTENT_TRUST = "content_trust"
|
||||||
DBUS = "dbus"
|
DBUS = "dbus"
|
||||||
DNS_SERVER = "dns_server"
|
DNS_SERVER = "dns_server"
|
||||||
DOCKER_CONFIGURATION = "docker_configuration"
|
DOCKER_CONFIGURATION = "docker_configuration"
|
||||||
@@ -53,6 +54,7 @@ class UnsupportedReason(StrEnum):
|
|||||||
PRIVILEGED = "privileged"
|
PRIVILEGED = "privileged"
|
||||||
RESTART_POLICY = "restart_policy"
|
RESTART_POLICY = "restart_policy"
|
||||||
SOFTWARE = "software"
|
SOFTWARE = "software"
|
||||||
|
SOURCE_MODS = "source_mods"
|
||||||
SUPERVISOR_VERSION = "supervisor_version"
|
SUPERVISOR_VERSION = "supervisor_version"
|
||||||
SYSTEMD = "systemd"
|
SYSTEMD = "systemd"
|
||||||
SYSTEMD_JOURNAL = "systemd_journal"
|
SYSTEMD_JOURNAL = "systemd_journal"
|
||||||
@@ -101,6 +103,7 @@ class IssueType(StrEnum):
|
|||||||
PWNED = "pwned"
|
PWNED = "pwned"
|
||||||
REBOOT_REQUIRED = "reboot_required"
|
REBOOT_REQUIRED = "reboot_required"
|
||||||
SECURITY = "security"
|
SECURITY = "security"
|
||||||
|
TRUST = "trust"
|
||||||
UPDATE_FAILED = "update_failed"
|
UPDATE_FAILED = "update_failed"
|
||||||
UPDATE_ROLLBACK = "update_rollback"
|
UPDATE_ROLLBACK = "update_rollback"
|
||||||
|
|
||||||
@@ -112,6 +115,7 @@ class SuggestionType(StrEnum):
|
|||||||
CLEAR_FULL_BACKUP = "clear_full_backup"
|
CLEAR_FULL_BACKUP = "clear_full_backup"
|
||||||
CREATE_FULL_BACKUP = "create_full_backup"
|
CREATE_FULL_BACKUP = "create_full_backup"
|
||||||
DISABLE_BOOT = "disable_boot"
|
DISABLE_BOOT = "disable_boot"
|
||||||
|
EXECUTE_INTEGRITY = "execute_integrity"
|
||||||
EXECUTE_REBOOT = "execute_reboot"
|
EXECUTE_REBOOT = "execute_reboot"
|
||||||
EXECUTE_REBUILD = "execute_rebuild"
|
EXECUTE_REBUILD = "execute_rebuild"
|
||||||
EXECUTE_RELOAD = "execute_reload"
|
EXECUTE_RELOAD = "execute_reload"
|
||||||
|
|||||||
@@ -13,6 +13,7 @@ from .validate import get_valid_modules
|
|||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
UNHEALTHY = [
|
UNHEALTHY = [
|
||||||
|
UnsupportedReason.DOCKER_VERSION,
|
||||||
UnsupportedReason.LXC,
|
UnsupportedReason.LXC,
|
||||||
UnsupportedReason.PRIVILEGED,
|
UnsupportedReason.PRIVILEGED,
|
||||||
]
|
]
|
||||||
|
|||||||
34
supervisor/resolution/evaluations/content_trust.py
Normal file
34
supervisor/resolution/evaluations/content_trust.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
"""Evaluation class for Content Trust."""
|
||||||
|
|
||||||
|
from ...const import CoreState
|
||||||
|
from ...coresys import CoreSys
|
||||||
|
from ..const import UnsupportedReason
|
||||||
|
from .base import EvaluateBase
|
||||||
|
|
||||||
|
|
||||||
|
def setup(coresys: CoreSys) -> EvaluateBase:
|
||||||
|
"""Initialize evaluation-setup function."""
|
||||||
|
return EvaluateContentTrust(coresys)
|
||||||
|
|
||||||
|
|
||||||
|
class EvaluateContentTrust(EvaluateBase):
|
||||||
|
"""Evaluate system content trust level."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def reason(self) -> UnsupportedReason:
|
||||||
|
"""Return a UnsupportedReason enum."""
|
||||||
|
return UnsupportedReason.CONTENT_TRUST
|
||||||
|
|
||||||
|
@property
|
||||||
|
def on_failure(self) -> str:
|
||||||
|
"""Return a string that is printed when self.evaluate is True."""
|
||||||
|
return "System run with disabled trusted content security."
|
||||||
|
|
||||||
|
@property
|
||||||
|
def states(self) -> list[CoreState]:
|
||||||
|
"""Return a list of valid states when this evaluation can run."""
|
||||||
|
return [CoreState.INITIALIZE, CoreState.SETUP, CoreState.RUNNING]
|
||||||
|
|
||||||
|
async def evaluate(self) -> bool:
|
||||||
|
"""Run evaluation."""
|
||||||
|
return not self.sys_security.content_trust
|
||||||
@@ -8,7 +8,7 @@ from ..const import UnsupportedReason
|
|||||||
from .base import EvaluateBase
|
from .base import EvaluateBase
|
||||||
|
|
||||||
EXPECTED_LOGGING = "journald"
|
EXPECTED_LOGGING = "journald"
|
||||||
EXPECTED_STORAGE = ("overlay2", "overlayfs")
|
EXPECTED_STORAGE = "overlay2"
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -41,18 +41,14 @@ class EvaluateDockerConfiguration(EvaluateBase):
|
|||||||
storage_driver = self.sys_docker.info.storage
|
storage_driver = self.sys_docker.info.storage
|
||||||
logging_driver = self.sys_docker.info.logging
|
logging_driver = self.sys_docker.info.logging
|
||||||
|
|
||||||
is_unsupported = False
|
if storage_driver != EXPECTED_STORAGE:
|
||||||
|
|
||||||
if storage_driver not in EXPECTED_STORAGE:
|
|
||||||
is_unsupported = True
|
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Docker storage driver %s is not supported!", storage_driver
|
"Docker storage driver %s is not supported!", storage_driver
|
||||||
)
|
)
|
||||||
|
|
||||||
if logging_driver != EXPECTED_LOGGING:
|
if logging_driver != EXPECTED_LOGGING:
|
||||||
is_unsupported = True
|
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Docker logging driver %s is not supported!", logging_driver
|
"Docker logging driver %s is not supported!", logging_driver
|
||||||
)
|
)
|
||||||
|
|
||||||
return is_unsupported
|
return storage_driver != EXPECTED_STORAGE or logging_driver != EXPECTED_LOGGING
|
||||||
|
|||||||
72
supervisor/resolution/evaluations/source_mods.py
Normal file
72
supervisor/resolution/evaluations/source_mods.py
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
"""Evaluation class for Content Trust."""
|
||||||
|
|
||||||
|
import errno
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from ...const import CoreState
|
||||||
|
from ...coresys import CoreSys
|
||||||
|
from ...exceptions import CodeNotaryError, CodeNotaryUntrusted
|
||||||
|
from ...utils.codenotary import calc_checksum_path_sourcecode
|
||||||
|
from ..const import ContextType, IssueType, UnhealthyReason, UnsupportedReason
|
||||||
|
from .base import EvaluateBase
|
||||||
|
|
||||||
|
_SUPERVISOR_SOURCE = Path("/usr/src/supervisor/supervisor")
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def setup(coresys: CoreSys) -> EvaluateBase:
|
||||||
|
"""Initialize evaluation-setup function."""
|
||||||
|
return EvaluateSourceMods(coresys)
|
||||||
|
|
||||||
|
|
||||||
|
class EvaluateSourceMods(EvaluateBase):
|
||||||
|
"""Evaluate supervisor source modifications."""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def reason(self) -> UnsupportedReason:
|
||||||
|
"""Return a UnsupportedReason enum."""
|
||||||
|
return UnsupportedReason.SOURCE_MODS
|
||||||
|
|
||||||
|
@property
|
||||||
|
def on_failure(self) -> str:
|
||||||
|
"""Return a string that is printed when self.evaluate is True."""
|
||||||
|
return "System detect unauthorized source code modifications."
|
||||||
|
|
||||||
|
@property
|
||||||
|
def states(self) -> list[CoreState]:
|
||||||
|
"""Return a list of valid states when this evaluation can run."""
|
||||||
|
return [CoreState.RUNNING]
|
||||||
|
|
||||||
|
async def evaluate(self) -> bool:
|
||||||
|
"""Run evaluation."""
|
||||||
|
if not self.sys_security.content_trust:
|
||||||
|
_LOGGER.warning("Disabled content-trust, skipping evaluation")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Calculate sume of the sourcecode
|
||||||
|
try:
|
||||||
|
checksum = await self.sys_run_in_executor(
|
||||||
|
calc_checksum_path_sourcecode, _SUPERVISOR_SOURCE
|
||||||
|
)
|
||||||
|
except OSError as err:
|
||||||
|
if err.errno == errno.EBADMSG:
|
||||||
|
self.sys_resolution.add_unhealthy_reason(
|
||||||
|
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||||
|
)
|
||||||
|
|
||||||
|
self.sys_resolution.create_issue(
|
||||||
|
IssueType.CORRUPT_FILESYSTEM, ContextType.SYSTEM
|
||||||
|
)
|
||||||
|
_LOGGER.error("Can't calculate checksum of source code: %s", err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Validate checksum
|
||||||
|
try:
|
||||||
|
await self.sys_security.verify_own_content(checksum)
|
||||||
|
except CodeNotaryUntrusted:
|
||||||
|
return True
|
||||||
|
except CodeNotaryError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return False
|
||||||
67
supervisor/resolution/fixups/system_execute_integrity.py
Normal file
67
supervisor/resolution/fixups/system_execute_integrity.py
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
"""Helpers to check and fix issues with free space."""
|
||||||
|
|
||||||
|
from datetime import timedelta
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from ...coresys import CoreSys
|
||||||
|
from ...exceptions import ResolutionFixupError, ResolutionFixupJobError
|
||||||
|
from ...jobs.const import JobCondition, JobThrottle
|
||||||
|
from ...jobs.decorator import Job
|
||||||
|
from ...security.const import ContentTrustResult
|
||||||
|
from ..const import ContextType, IssueType, SuggestionType
|
||||||
|
from .base import FixupBase
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def setup(coresys: CoreSys) -> FixupBase:
|
||||||
|
"""Check setup function."""
|
||||||
|
return FixupSystemExecuteIntegrity(coresys)
|
||||||
|
|
||||||
|
|
||||||
|
class FixupSystemExecuteIntegrity(FixupBase):
|
||||||
|
"""Storage class for fixup."""
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="fixup_system_execute_integrity_process",
|
||||||
|
conditions=[JobCondition.INTERNET_SYSTEM],
|
||||||
|
on_condition=ResolutionFixupJobError,
|
||||||
|
throttle_period=timedelta(hours=8),
|
||||||
|
throttle=JobThrottle.THROTTLE,
|
||||||
|
)
|
||||||
|
async def process_fixup(self, reference: str | None = None) -> None:
|
||||||
|
"""Initialize the fixup class."""
|
||||||
|
result = await self.sys_security.integrity_check()
|
||||||
|
|
||||||
|
if ContentTrustResult.FAILED in (result.core, result.supervisor):
|
||||||
|
raise ResolutionFixupError()
|
||||||
|
|
||||||
|
for plugin in result.plugins:
|
||||||
|
if plugin != ContentTrustResult.FAILED:
|
||||||
|
continue
|
||||||
|
raise ResolutionFixupError()
|
||||||
|
|
||||||
|
for addon in result.addons:
|
||||||
|
if addon != ContentTrustResult.FAILED:
|
||||||
|
continue
|
||||||
|
raise ResolutionFixupError()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def suggestion(self) -> SuggestionType:
|
||||||
|
"""Return a SuggestionType enum."""
|
||||||
|
return SuggestionType.EXECUTE_INTEGRITY
|
||||||
|
|
||||||
|
@property
|
||||||
|
def context(self) -> ContextType:
|
||||||
|
"""Return a ContextType enum."""
|
||||||
|
return ContextType.SYSTEM
|
||||||
|
|
||||||
|
@property
|
||||||
|
def issues(self) -> list[IssueType]:
|
||||||
|
"""Return a IssueType enum list."""
|
||||||
|
return [IssueType.TRUST]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def auto(self) -> bool:
|
||||||
|
"""Return if a fixup can be apply as auto fix."""
|
||||||
|
return True
|
||||||
24
supervisor/security/const.py
Normal file
24
supervisor/security/const.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
"""Security constants."""
|
||||||
|
|
||||||
|
from enum import StrEnum
|
||||||
|
|
||||||
|
import attr
|
||||||
|
|
||||||
|
|
||||||
|
class ContentTrustResult(StrEnum):
|
||||||
|
"""Content trust result enum."""
|
||||||
|
|
||||||
|
PASS = "pass"
|
||||||
|
ERROR = "error"
|
||||||
|
FAILED = "failed"
|
||||||
|
UNTESTED = "untested"
|
||||||
|
|
||||||
|
|
||||||
|
@attr.s
|
||||||
|
class IntegrityResult:
|
||||||
|
"""Result of a full integrity check."""
|
||||||
|
|
||||||
|
supervisor: ContentTrustResult = attr.ib(default=ContentTrustResult.UNTESTED)
|
||||||
|
core: ContentTrustResult = attr.ib(default=ContentTrustResult.UNTESTED)
|
||||||
|
plugins: dict[str, ContentTrustResult] = attr.ib(default={})
|
||||||
|
addons: dict[str, ContentTrustResult] = attr.ib(default={})
|
||||||
@@ -4,12 +4,27 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from ..const import ATTR_FORCE_SECURITY, ATTR_PWNED, FILE_HASSIO_SECURITY
|
from ..const import (
|
||||||
|
ATTR_CONTENT_TRUST,
|
||||||
|
ATTR_FORCE_SECURITY,
|
||||||
|
ATTR_PWNED,
|
||||||
|
FILE_HASSIO_SECURITY,
|
||||||
|
)
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
from ..exceptions import PwnedError
|
from ..exceptions import (
|
||||||
|
CodeNotaryError,
|
||||||
|
CodeNotaryUntrusted,
|
||||||
|
PwnedError,
|
||||||
|
SecurityJobError,
|
||||||
|
)
|
||||||
|
from ..jobs.const import JobConcurrency
|
||||||
|
from ..jobs.decorator import Job, JobCondition
|
||||||
|
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||||
|
from ..utils.codenotary import cas_validate
|
||||||
from ..utils.common import FileConfiguration
|
from ..utils.common import FileConfiguration
|
||||||
from ..utils.pwned import check_pwned_password
|
from ..utils.pwned import check_pwned_password
|
||||||
from ..validate import SCHEMA_SECURITY_CONFIG
|
from ..validate import SCHEMA_SECURITY_CONFIG
|
||||||
|
from .const import ContentTrustResult, IntegrityResult
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -22,6 +37,16 @@ class Security(FileConfiguration, CoreSysAttributes):
|
|||||||
super().__init__(FILE_HASSIO_SECURITY, SCHEMA_SECURITY_CONFIG)
|
super().__init__(FILE_HASSIO_SECURITY, SCHEMA_SECURITY_CONFIG)
|
||||||
self.coresys = coresys
|
self.coresys = coresys
|
||||||
|
|
||||||
|
@property
|
||||||
|
def content_trust(self) -> bool:
|
||||||
|
"""Return if content trust is enabled/disabled."""
|
||||||
|
return self._data[ATTR_CONTENT_TRUST]
|
||||||
|
|
||||||
|
@content_trust.setter
|
||||||
|
def content_trust(self, value: bool) -> None:
|
||||||
|
"""Set content trust is enabled/disabled."""
|
||||||
|
self._data[ATTR_CONTENT_TRUST] = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def force(self) -> bool:
|
def force(self) -> bool:
|
||||||
"""Return if force security is enabled/disabled."""
|
"""Return if force security is enabled/disabled."""
|
||||||
@@ -42,6 +67,30 @@ class Security(FileConfiguration, CoreSysAttributes):
|
|||||||
"""Set pwned is enabled/disabled."""
|
"""Set pwned is enabled/disabled."""
|
||||||
self._data[ATTR_PWNED] = value
|
self._data[ATTR_PWNED] = value
|
||||||
|
|
||||||
|
async def verify_content(self, signer: str, checksum: str) -> None:
|
||||||
|
"""Verify content on CAS."""
|
||||||
|
if not self.content_trust:
|
||||||
|
_LOGGER.warning("Disabled content-trust, skip validation")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
await cas_validate(signer, checksum)
|
||||||
|
except CodeNotaryUntrusted:
|
||||||
|
raise
|
||||||
|
except CodeNotaryError:
|
||||||
|
if self.force:
|
||||||
|
raise
|
||||||
|
self.sys_resolution.create_issue(
|
||||||
|
IssueType.TRUST,
|
||||||
|
ContextType.SYSTEM,
|
||||||
|
suggestions=[SuggestionType.EXECUTE_INTEGRITY],
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
async def verify_own_content(self, checksum: str) -> None:
|
||||||
|
"""Verify content from HA org."""
|
||||||
|
return await self.verify_content("notary@home-assistant.io", checksum)
|
||||||
|
|
||||||
async def verify_secret(self, pwned_hash: str) -> None:
|
async def verify_secret(self, pwned_hash: str) -> None:
|
||||||
"""Verify pwned state of a secret."""
|
"""Verify pwned state of a secret."""
|
||||||
if not self.pwned:
|
if not self.pwned:
|
||||||
@@ -54,3 +103,73 @@ class Security(FileConfiguration, CoreSysAttributes):
|
|||||||
if self.force:
|
if self.force:
|
||||||
raise
|
raise
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="security_manager_integrity_check",
|
||||||
|
conditions=[JobCondition.INTERNET_SYSTEM],
|
||||||
|
on_condition=SecurityJobError,
|
||||||
|
concurrency=JobConcurrency.REJECT,
|
||||||
|
)
|
||||||
|
async def integrity_check(self) -> IntegrityResult:
|
||||||
|
"""Run a full system integrity check of the platform.
|
||||||
|
|
||||||
|
We only allow to install trusted content.
|
||||||
|
This is a out of the band manual check.
|
||||||
|
"""
|
||||||
|
result: IntegrityResult = IntegrityResult()
|
||||||
|
if not self.content_trust:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Skipping integrity check, content_trust is globally disabled"
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
# Supervisor
|
||||||
|
try:
|
||||||
|
await self.sys_supervisor.check_trust()
|
||||||
|
result.supervisor = ContentTrustResult.PASS
|
||||||
|
except CodeNotaryUntrusted:
|
||||||
|
result.supervisor = ContentTrustResult.ERROR
|
||||||
|
self.sys_resolution.create_issue(IssueType.TRUST, ContextType.SUPERVISOR)
|
||||||
|
except CodeNotaryError:
|
||||||
|
result.supervisor = ContentTrustResult.FAILED
|
||||||
|
|
||||||
|
# Core
|
||||||
|
try:
|
||||||
|
await self.sys_homeassistant.core.check_trust()
|
||||||
|
result.core = ContentTrustResult.PASS
|
||||||
|
except CodeNotaryUntrusted:
|
||||||
|
result.core = ContentTrustResult.ERROR
|
||||||
|
self.sys_resolution.create_issue(IssueType.TRUST, ContextType.CORE)
|
||||||
|
except CodeNotaryError:
|
||||||
|
result.core = ContentTrustResult.FAILED
|
||||||
|
|
||||||
|
# Plugins
|
||||||
|
for plugin in self.sys_plugins.all_plugins:
|
||||||
|
try:
|
||||||
|
await plugin.check_trust()
|
||||||
|
result.plugins[plugin.slug] = ContentTrustResult.PASS
|
||||||
|
except CodeNotaryUntrusted:
|
||||||
|
result.plugins[plugin.slug] = ContentTrustResult.ERROR
|
||||||
|
self.sys_resolution.create_issue(
|
||||||
|
IssueType.TRUST, ContextType.PLUGIN, reference=plugin.slug
|
||||||
|
)
|
||||||
|
except CodeNotaryError:
|
||||||
|
result.plugins[plugin.slug] = ContentTrustResult.FAILED
|
||||||
|
|
||||||
|
# Add-ons
|
||||||
|
for addon in self.sys_addons.installed:
|
||||||
|
if not addon.signed:
|
||||||
|
result.addons[addon.slug] = ContentTrustResult.UNTESTED
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
await addon.check_trust()
|
||||||
|
result.addons[addon.slug] = ContentTrustResult.PASS
|
||||||
|
except CodeNotaryUntrusted:
|
||||||
|
result.addons[addon.slug] = ContentTrustResult.ERROR
|
||||||
|
self.sys_resolution.create_issue(
|
||||||
|
IssueType.TRUST, ContextType.ADDON, reference=addon.slug
|
||||||
|
)
|
||||||
|
except CodeNotaryError:
|
||||||
|
result.addons[addon.slug] = ContentTrustResult.FAILED
|
||||||
|
|
||||||
|
return result
|
||||||
|
|||||||
@@ -25,6 +25,8 @@ from .coresys import CoreSys, CoreSysAttributes
|
|||||||
from .docker.stats import DockerStats
|
from .docker.stats import DockerStats
|
||||||
from .docker.supervisor import DockerSupervisor
|
from .docker.supervisor import DockerSupervisor
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
|
CodeNotaryError,
|
||||||
|
CodeNotaryUntrusted,
|
||||||
DockerError,
|
DockerError,
|
||||||
HostAppArmorError,
|
HostAppArmorError,
|
||||||
SupervisorAppArmorError,
|
SupervisorAppArmorError,
|
||||||
@@ -35,6 +37,7 @@ from .exceptions import (
|
|||||||
from .jobs.const import JobCondition, JobThrottle
|
from .jobs.const import JobCondition, JobThrottle
|
||||||
from .jobs.decorator import Job
|
from .jobs.decorator import Job
|
||||||
from .resolution.const import ContextType, IssueType, UnhealthyReason
|
from .resolution.const import ContextType, IssueType, UnhealthyReason
|
||||||
|
from .utils.codenotary import calc_checksum
|
||||||
from .utils.sentry import async_capture_exception
|
from .utils.sentry import async_capture_exception
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
@@ -147,6 +150,20 @@ class Supervisor(CoreSysAttributes):
|
|||||||
_LOGGER.error,
|
_LOGGER.error,
|
||||||
) from err
|
) from err
|
||||||
|
|
||||||
|
# Validate
|
||||||
|
try:
|
||||||
|
await self.sys_security.verify_own_content(calc_checksum(data))
|
||||||
|
except CodeNotaryUntrusted as err:
|
||||||
|
raise SupervisorAppArmorError(
|
||||||
|
"Content-Trust is broken for the AppArmor profile fetch!",
|
||||||
|
_LOGGER.critical,
|
||||||
|
) from err
|
||||||
|
except CodeNotaryError as err:
|
||||||
|
raise SupervisorAppArmorError(
|
||||||
|
f"CodeNotary error while processing AppArmor fetch: {err!s}",
|
||||||
|
_LOGGER.error,
|
||||||
|
) from err
|
||||||
|
|
||||||
# Load
|
# Load
|
||||||
temp_dir: TemporaryDirectory | None = None
|
temp_dir: TemporaryDirectory | None = None
|
||||||
|
|
||||||
@@ -256,6 +273,13 @@ class Supervisor(CoreSysAttributes):
|
|||||||
"""
|
"""
|
||||||
return self.instance.logs()
|
return self.instance.logs()
|
||||||
|
|
||||||
|
def check_trust(self) -> Awaitable[None]:
|
||||||
|
"""Calculate Supervisor docker content trust.
|
||||||
|
|
||||||
|
Return Coroutine.
|
||||||
|
"""
|
||||||
|
return self.instance.check_trust()
|
||||||
|
|
||||||
async def stats(self) -> DockerStats:
|
async def stats(self) -> DockerStats:
|
||||||
"""Return stats of Supervisor."""
|
"""Return stats of Supervisor."""
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -31,8 +31,14 @@ from .const import (
|
|||||||
UpdateChannel,
|
UpdateChannel,
|
||||||
)
|
)
|
||||||
from .coresys import CoreSys, CoreSysAttributes
|
from .coresys import CoreSys, CoreSysAttributes
|
||||||
from .exceptions import UpdaterError, UpdaterJobError
|
from .exceptions import (
|
||||||
|
CodeNotaryError,
|
||||||
|
CodeNotaryUntrusted,
|
||||||
|
UpdaterError,
|
||||||
|
UpdaterJobError,
|
||||||
|
)
|
||||||
from .jobs.decorator import Job, JobCondition
|
from .jobs.decorator import Job, JobCondition
|
||||||
|
from .utils.codenotary import calc_checksum
|
||||||
from .utils.common import FileConfiguration
|
from .utils.common import FileConfiguration
|
||||||
from .validate import SCHEMA_UPDATER_CONFIG
|
from .validate import SCHEMA_UPDATER_CONFIG
|
||||||
|
|
||||||
@@ -283,6 +289,19 @@ class Updater(FileConfiguration, CoreSysAttributes):
|
|||||||
self.sys_bus.remove_listener(self._connectivity_listener)
|
self.sys_bus.remove_listener(self._connectivity_listener)
|
||||||
self._connectivity_listener = None
|
self._connectivity_listener = None
|
||||||
|
|
||||||
|
# Validate
|
||||||
|
try:
|
||||||
|
await self.sys_security.verify_own_content(calc_checksum(data))
|
||||||
|
except CodeNotaryUntrusted as err:
|
||||||
|
raise UpdaterError(
|
||||||
|
"Content-Trust is broken for the version file fetch!", _LOGGER.critical
|
||||||
|
) from err
|
||||||
|
except CodeNotaryError as err:
|
||||||
|
raise UpdaterError(
|
||||||
|
f"CodeNotary error while processing version fetch: {err!s}",
|
||||||
|
_LOGGER.error,
|
||||||
|
) from err
|
||||||
|
|
||||||
# Parse data
|
# Parse data
|
||||||
try:
|
try:
|
||||||
data = json.loads(data)
|
data = json.loads(data)
|
||||||
|
|||||||
109
supervisor/utils/codenotary.py
Normal file
109
supervisor/utils/codenotary.py
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
"""Small wrapper for CodeNotary."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
import shlex
|
||||||
|
from typing import Final
|
||||||
|
|
||||||
|
from dirhash import dirhash
|
||||||
|
|
||||||
|
from ..exceptions import CodeNotaryBackendError, CodeNotaryError, CodeNotaryUntrusted
|
||||||
|
from . import clean_env
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_CAS_CMD: str = (
|
||||||
|
"cas authenticate --signerID {signer} --silent --output json --hash {sum}"
|
||||||
|
)
|
||||||
|
_CACHE: set[tuple[str, str]] = set()
|
||||||
|
|
||||||
|
|
||||||
|
_ATTR_ERROR: Final = "error"
|
||||||
|
_ATTR_STATUS: Final = "status"
|
||||||
|
_FALLBACK_ERROR: Final = "Unknown CodeNotary backend issue"
|
||||||
|
|
||||||
|
|
||||||
|
def calc_checksum(data: str | bytes) -> str:
|
||||||
|
"""Generate checksum for CodeNotary."""
|
||||||
|
if isinstance(data, str):
|
||||||
|
return hashlib.sha256(data.encode()).hexdigest()
|
||||||
|
return hashlib.sha256(data).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def calc_checksum_path_sourcecode(folder: Path) -> str:
|
||||||
|
"""Calculate checksum for a path source code.
|
||||||
|
|
||||||
|
Need catch OSError.
|
||||||
|
"""
|
||||||
|
return dirhash(folder.as_posix(), "sha256", match=["*.py"])
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=unreachable
|
||||||
|
async def cas_validate(
|
||||||
|
signer: str,
|
||||||
|
checksum: str,
|
||||||
|
) -> None:
|
||||||
|
"""Validate data against CodeNotary."""
|
||||||
|
return
|
||||||
|
if (checksum, signer) in _CACHE:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Generate command for request
|
||||||
|
command = shlex.split(_CAS_CMD.format(signer=signer, sum=checksum))
|
||||||
|
|
||||||
|
# Request notary authorization
|
||||||
|
_LOGGER.debug("Send cas command: %s", command)
|
||||||
|
try:
|
||||||
|
proc = await asyncio.create_subprocess_exec(
|
||||||
|
*command,
|
||||||
|
stdin=asyncio.subprocess.DEVNULL,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE,
|
||||||
|
env=clean_env(),
|
||||||
|
)
|
||||||
|
|
||||||
|
async with asyncio.timeout(15):
|
||||||
|
data, error = await proc.communicate()
|
||||||
|
except TimeoutError:
|
||||||
|
raise CodeNotaryBackendError(
|
||||||
|
"Timeout while processing CodeNotary", _LOGGER.warning
|
||||||
|
) from None
|
||||||
|
except OSError as err:
|
||||||
|
raise CodeNotaryError(
|
||||||
|
f"CodeNotary fatal error: {err!s}", _LOGGER.critical
|
||||||
|
) from err
|
||||||
|
|
||||||
|
# Check if Notarized
|
||||||
|
if proc.returncode != 0 and not data:
|
||||||
|
if error:
|
||||||
|
try:
|
||||||
|
error = error.decode("utf-8")
|
||||||
|
except UnicodeDecodeError as err:
|
||||||
|
raise CodeNotaryBackendError(_FALLBACK_ERROR, _LOGGER.warning) from err
|
||||||
|
if "not notarized" in error:
|
||||||
|
raise CodeNotaryUntrusted()
|
||||||
|
else:
|
||||||
|
error = _FALLBACK_ERROR
|
||||||
|
raise CodeNotaryBackendError(error, _LOGGER.warning)
|
||||||
|
|
||||||
|
# Parse data
|
||||||
|
try:
|
||||||
|
data_json = json.loads(data)
|
||||||
|
_LOGGER.debug("CodeNotary response with: %s", data_json)
|
||||||
|
except (json.JSONDecodeError, UnicodeDecodeError) as err:
|
||||||
|
raise CodeNotaryError(
|
||||||
|
f"Can't parse CodeNotary output: {data!s} - {err!s}", _LOGGER.error
|
||||||
|
) from err
|
||||||
|
|
||||||
|
if _ATTR_ERROR in data_json:
|
||||||
|
raise CodeNotaryBackendError(data_json[_ATTR_ERROR], _LOGGER.warning)
|
||||||
|
|
||||||
|
if data_json[_ATTR_STATUS] == 0:
|
||||||
|
_CACHE.add((checksum, signer))
|
||||||
|
else:
|
||||||
|
raise CodeNotaryUntrusted()
|
||||||
@@ -12,6 +12,7 @@ from .const import (
|
|||||||
ATTR_AUTO_UPDATE,
|
ATTR_AUTO_UPDATE,
|
||||||
ATTR_CHANNEL,
|
ATTR_CHANNEL,
|
||||||
ATTR_CLI,
|
ATTR_CLI,
|
||||||
|
ATTR_CONTENT_TRUST,
|
||||||
ATTR_COUNTRY,
|
ATTR_COUNTRY,
|
||||||
ATTR_DEBUG,
|
ATTR_DEBUG,
|
||||||
ATTR_DEBUG_BLOCK,
|
ATTR_DEBUG_BLOCK,
|
||||||
@@ -228,6 +229,7 @@ SCHEMA_INGRESS_CONFIG = vol.Schema(
|
|||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_SECURITY_CONFIG = vol.Schema(
|
SCHEMA_SECURITY_CONFIG = vol.Schema(
|
||||||
{
|
{
|
||||||
|
vol.Optional(ATTR_CONTENT_TRUST, default=True): vol.Boolean(),
|
||||||
vol.Optional(ATTR_PWNED, default=True): vol.Boolean(),
|
vol.Optional(ATTR_PWNED, default=True): vol.Boolean(),
|
||||||
vol.Optional(ATTR_FORCE_SECURITY, default=False): vol.Boolean(),
|
vol.Optional(ATTR_FORCE_SECURITY, default=False): vol.Boolean(),
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -1,28 +1,12 @@
|
|||||||
"""Test ingress API."""
|
"""Test ingress API."""
|
||||||
|
|
||||||
from collections.abc import AsyncGenerator
|
from unittest.mock import AsyncMock, patch
|
||||||
from unittest.mock import AsyncMock, MagicMock, patch
|
|
||||||
|
|
||||||
import aiohttp
|
from aiohttp.test_utils import TestClient
|
||||||
from aiohttp import hdrs, web
|
|
||||||
from aiohttp.test_utils import TestClient, TestServer
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from supervisor.addons.addon import Addon
|
|
||||||
from supervisor.coresys import CoreSys
|
from supervisor.coresys import CoreSys
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(name="real_websession")
|
|
||||||
async def fixture_real_websession(
|
|
||||||
coresys: CoreSys,
|
|
||||||
) -> AsyncGenerator[aiohttp.ClientSession]:
|
|
||||||
"""Fixture for real aiohttp ClientSession for ingress proxy tests."""
|
|
||||||
session = aiohttp.ClientSession()
|
|
||||||
coresys._websession = session # pylint: disable=W0212
|
|
||||||
yield session
|
|
||||||
await session.close()
|
|
||||||
|
|
||||||
|
|
||||||
async def test_validate_session(api_client: TestClient, coresys: CoreSys):
|
async def test_validate_session(api_client: TestClient, coresys: CoreSys):
|
||||||
"""Test validating ingress session."""
|
"""Test validating ingress session."""
|
||||||
with patch("aiohttp.web_request.BaseRequest.__getitem__", return_value=None):
|
with patch("aiohttp.web_request.BaseRequest.__getitem__", return_value=None):
|
||||||
@@ -102,126 +86,3 @@ async def test_validate_session_with_user_id(
|
|||||||
assert (
|
assert (
|
||||||
coresys.ingress.get_session_data(session).user.display_name == "Some Name"
|
coresys.ingress.get_session_data(session).user.display_name == "Some Name"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def test_ingress_proxy_no_content_type_for_empty_body_responses(
|
|
||||||
api_client: TestClient, coresys: CoreSys, real_websession: aiohttp.ClientSession
|
|
||||||
):
|
|
||||||
"""Test that empty body responses don't get Content-Type header."""
|
|
||||||
|
|
||||||
# Create a mock add-on backend server that returns various status codes
|
|
||||||
async def mock_addon_handler(request: web.Request) -> web.Response:
|
|
||||||
"""Mock add-on handler that returns different status codes based on path."""
|
|
||||||
path = request.path
|
|
||||||
|
|
||||||
if path == "/204":
|
|
||||||
# 204 No Content - should not have Content-Type
|
|
||||||
return web.Response(status=204)
|
|
||||||
elif path == "/304":
|
|
||||||
# 304 Not Modified - should not have Content-Type
|
|
||||||
return web.Response(status=304)
|
|
||||||
elif path == "/100":
|
|
||||||
# 100 Continue - should not have Content-Type
|
|
||||||
return web.Response(status=100)
|
|
||||||
elif path == "/head":
|
|
||||||
# HEAD request - should have Content-Type (same as GET would)
|
|
||||||
return web.Response(body=b"test", content_type="text/html")
|
|
||||||
elif path == "/200":
|
|
||||||
# 200 OK with body - should have Content-Type
|
|
||||||
return web.Response(body=b"test content", content_type="text/plain")
|
|
||||||
elif path == "/200-no-content-type":
|
|
||||||
# 200 OK without explicit Content-Type - should get default
|
|
||||||
return web.Response(body=b"test content")
|
|
||||||
elif path == "/200-json":
|
|
||||||
# 200 OK with JSON - should preserve Content-Type
|
|
||||||
return web.Response(
|
|
||||||
body=b'{"key": "value"}', content_type="application/json"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return web.Response(body=b"default", content_type="text/html")
|
|
||||||
|
|
||||||
# Create test server for mock add-on
|
|
||||||
app = web.Application()
|
|
||||||
app.router.add_route("*", "/{tail:.*}", mock_addon_handler)
|
|
||||||
addon_server = TestServer(app)
|
|
||||||
await addon_server.start_server()
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Create ingress session
|
|
||||||
resp = await api_client.post("/ingress/session")
|
|
||||||
result = await resp.json()
|
|
||||||
session = result["data"]["session"]
|
|
||||||
|
|
||||||
# Create a mock add-on
|
|
||||||
mock_addon = MagicMock(spec=Addon)
|
|
||||||
mock_addon.slug = "test_addon"
|
|
||||||
mock_addon.ip_address = addon_server.host
|
|
||||||
mock_addon.ingress_port = addon_server.port
|
|
||||||
mock_addon.ingress_stream = False
|
|
||||||
|
|
||||||
# Generate an ingress token and register the add-on
|
|
||||||
ingress_token = coresys.ingress.create_session()
|
|
||||||
with patch.object(coresys.ingress, "get", return_value=mock_addon):
|
|
||||||
# Test 204 No Content - should NOT have Content-Type
|
|
||||||
resp = await api_client.get(
|
|
||||||
f"/ingress/{ingress_token}/204",
|
|
||||||
cookies={"ingress_session": session},
|
|
||||||
)
|
|
||||||
assert resp.status == 204
|
|
||||||
assert hdrs.CONTENT_TYPE not in resp.headers
|
|
||||||
|
|
||||||
# Test 304 Not Modified - should NOT have Content-Type
|
|
||||||
resp = await api_client.get(
|
|
||||||
f"/ingress/{ingress_token}/304",
|
|
||||||
cookies={"ingress_session": session},
|
|
||||||
)
|
|
||||||
assert resp.status == 304
|
|
||||||
assert hdrs.CONTENT_TYPE not in resp.headers
|
|
||||||
|
|
||||||
# Test HEAD request - SHOULD have Content-Type (same as GET)
|
|
||||||
# per RFC 9110: HEAD should return same headers as GET
|
|
||||||
resp = await api_client.head(
|
|
||||||
f"/ingress/{ingress_token}/head",
|
|
||||||
cookies={"ingress_session": session},
|
|
||||||
)
|
|
||||||
assert resp.status == 200
|
|
||||||
assert hdrs.CONTENT_TYPE in resp.headers
|
|
||||||
assert "text/html" in resp.headers[hdrs.CONTENT_TYPE]
|
|
||||||
# Body should be empty for HEAD
|
|
||||||
body = await resp.read()
|
|
||||||
assert body == b""
|
|
||||||
|
|
||||||
# Test 200 OK with body - SHOULD have Content-Type
|
|
||||||
resp = await api_client.get(
|
|
||||||
f"/ingress/{ingress_token}/200",
|
|
||||||
cookies={"ingress_session": session},
|
|
||||||
)
|
|
||||||
assert resp.status == 200
|
|
||||||
assert hdrs.CONTENT_TYPE in resp.headers
|
|
||||||
assert resp.headers[hdrs.CONTENT_TYPE] == "text/plain"
|
|
||||||
body = await resp.read()
|
|
||||||
assert body == b"test content"
|
|
||||||
|
|
||||||
# Test 200 OK without explicit Content-Type - SHOULD get default
|
|
||||||
resp = await api_client.get(
|
|
||||||
f"/ingress/{ingress_token}/200-no-content-type",
|
|
||||||
cookies={"ingress_session": session},
|
|
||||||
)
|
|
||||||
assert resp.status == 200
|
|
||||||
assert hdrs.CONTENT_TYPE in resp.headers
|
|
||||||
# Should get application/octet-stream as default from aiohttp ClientResponse
|
|
||||||
assert "application/octet-stream" in resp.headers[hdrs.CONTENT_TYPE]
|
|
||||||
|
|
||||||
# Test 200 OK with JSON - SHOULD preserve Content-Type
|
|
||||||
resp = await api_client.get(
|
|
||||||
f"/ingress/{ingress_token}/200-json",
|
|
||||||
cookies={"ingress_session": session},
|
|
||||||
)
|
|
||||||
assert resp.status == 200
|
|
||||||
assert hdrs.CONTENT_TYPE in resp.headers
|
|
||||||
assert "application/json" in resp.headers[hdrs.CONTENT_TYPE]
|
|
||||||
body = await resp.read()
|
|
||||||
assert body == b'{"key": "value"}'
|
|
||||||
|
|
||||||
finally:
|
|
||||||
await addon_server.close()
|
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import logging
|
|||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
from unittest.mock import AsyncMock, patch
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
from aiohttp import ClientWebSocketResponse, WSCloseCode
|
from aiohttp import ClientWebSocketResponse, WSCloseCode, web
|
||||||
from aiohttp.http_websocket import WSMessage, WSMsgType
|
from aiohttp.http_websocket import WSMessage, WSMsgType
|
||||||
from aiohttp.test_utils import TestClient
|
from aiohttp.test_utils import TestClient
|
||||||
import pytest
|
import pytest
|
||||||
@@ -223,6 +223,32 @@ async def test_proxy_auth_abort_log(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_websocket_transport_none(
|
||||||
|
coresys,
|
||||||
|
caplog: pytest.LogCaptureFixture,
|
||||||
|
):
|
||||||
|
"""Test WebSocket connection with transport None is handled gracefully."""
|
||||||
|
# Get the API proxy instance from coresys
|
||||||
|
api_proxy = APIProxy.__new__(APIProxy)
|
||||||
|
api_proxy.coresys = coresys
|
||||||
|
|
||||||
|
# Create a mock request with transport set to None to simulate connection loss
|
||||||
|
mock_request = AsyncMock(spec=web.Request)
|
||||||
|
mock_request.transport = None
|
||||||
|
|
||||||
|
caplog.clear()
|
||||||
|
with caplog.at_level(logging.WARNING):
|
||||||
|
# This should raise HTTPBadRequest, not AssertionError
|
||||||
|
with pytest.raises(web.HTTPBadRequest) as exc_info:
|
||||||
|
await api_proxy.websocket(mock_request)
|
||||||
|
|
||||||
|
# Verify the error reason
|
||||||
|
assert exc_info.value.reason == "Connection closed"
|
||||||
|
|
||||||
|
# Verify the warning was logged
|
||||||
|
assert "WebSocket connection lost before upgrade" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("path", ["", "mock_path"])
|
@pytest.mark.parametrize("path", ["", "mock_path"])
|
||||||
async def test_api_proxy_get_request(
|
async def test_api_proxy_get_request(
|
||||||
api_client: TestClient,
|
api_client: TestClient,
|
||||||
|
|||||||
@@ -17,6 +17,16 @@ async def test_api_security_options_force_security(api_client, coresys: CoreSys)
|
|||||||
assert coresys.security.force
|
assert coresys.security.force
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_api_security_options_content_trust(api_client, coresys: CoreSys):
|
||||||
|
"""Test security options content trust."""
|
||||||
|
assert coresys.security.content_trust
|
||||||
|
|
||||||
|
await api_client.post("/security/options", json={"content_trust": False})
|
||||||
|
|
||||||
|
assert not coresys.security.content_trust
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_api_security_options_pwned(api_client, coresys: CoreSys):
|
async def test_api_security_options_pwned(api_client, coresys: CoreSys):
|
||||||
"""Test security options pwned."""
|
"""Test security options pwned."""
|
||||||
@@ -31,8 +41,11 @@ async def test_api_security_options_pwned(api_client, coresys: CoreSys):
|
|||||||
async def test_api_integrity_check(
|
async def test_api_integrity_check(
|
||||||
api_client, coresys: CoreSys, supervisor_internet: AsyncMock
|
api_client, coresys: CoreSys, supervisor_internet: AsyncMock
|
||||||
):
|
):
|
||||||
"""Test security integrity check - now deprecated."""
|
"""Test security integrity check."""
|
||||||
resp = await api_client.post("/security/integrity")
|
coresys.security.content_trust = False
|
||||||
|
|
||||||
# CodeNotary integrity check has been removed, should return 410 Gone
|
resp = await api_client.post("/security/integrity")
|
||||||
assert resp.status == 410
|
result = await resp.json()
|
||||||
|
|
||||||
|
assert result["data"]["core"] == "untested"
|
||||||
|
assert result["data"]["supervisor"] == "untested"
|
||||||
|
|||||||
@@ -31,6 +31,15 @@ from supervisor.jobs import JobSchedulerOptions, SupervisorJob
|
|||||||
from tests.common import load_json_fixture
|
from tests.common import load_json_fixture
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def mock_verify_content(coresys: CoreSys):
|
||||||
|
"""Mock verify_content utility during tests."""
|
||||||
|
with patch.object(
|
||||||
|
coresys.security, "verify_content", return_value=None
|
||||||
|
) as verify_content:
|
||||||
|
yield verify_content
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"cpu_arch, platform",
|
"cpu_arch, platform",
|
||||||
[
|
[
|
||||||
@@ -569,156 +578,3 @@ async def test_install_progress_handles_download_restart(
|
|||||||
await event.wait()
|
await event.wait()
|
||||||
|
|
||||||
capture_exception.assert_not_called()
|
capture_exception.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
async def test_install_progress_handles_layers_skipping_download(
|
|
||||||
coresys: CoreSys,
|
|
||||||
test_docker_interface: DockerInterface,
|
|
||||||
capture_exception: Mock,
|
|
||||||
):
|
|
||||||
"""Test install handles small layers that skip downloading phase and go directly to download complete.
|
|
||||||
|
|
||||||
Reproduces the real-world scenario from Supervisor issue #6286:
|
|
||||||
- Small layer (02a6e69d8d00) completes Download complete at 10:14:08 without ever Downloading
|
|
||||||
- Normal layer (3f4a84073184) starts Downloading at 10:14:09 with progress updates
|
|
||||||
"""
|
|
||||||
coresys.core.set_state(CoreState.RUNNING)
|
|
||||||
|
|
||||||
# Reproduce EXACT sequence from SupervisorNoUpdateProgressLogs.txt:
|
|
||||||
# Small layer (02a6e69d8d00) completes BEFORE normal layer (3f4a84073184) starts downloading
|
|
||||||
coresys.docker.docker.api.pull.return_value = [
|
|
||||||
{"status": "Pulling from test/image", "id": "latest"},
|
|
||||||
# Small layer that skips downloading (02a6e69d8d00 in logs, 96 bytes)
|
|
||||||
{"status": "Pulling fs layer", "progressDetail": {}, "id": "02a6e69d8d00"},
|
|
||||||
{"status": "Pulling fs layer", "progressDetail": {}, "id": "3f4a84073184"},
|
|
||||||
{"status": "Waiting", "progressDetail": {}, "id": "02a6e69d8d00"},
|
|
||||||
{"status": "Waiting", "progressDetail": {}, "id": "3f4a84073184"},
|
|
||||||
# Goes straight to Download complete (10:14:08 in logs) - THIS IS THE KEY MOMENT
|
|
||||||
{"status": "Download complete", "progressDetail": {}, "id": "02a6e69d8d00"},
|
|
||||||
# Normal layer that downloads (3f4a84073184 in logs, 25MB)
|
|
||||||
# Downloading starts (10:14:09 in logs) - progress updates should happen NOW!
|
|
||||||
{
|
|
||||||
"status": "Downloading",
|
|
||||||
"progressDetail": {"current": 260937, "total": 25371463},
|
|
||||||
"progress": "[> ] 260.9kB/25.37MB",
|
|
||||||
"id": "3f4a84073184",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"status": "Downloading",
|
|
||||||
"progressDetail": {"current": 5505024, "total": 25371463},
|
|
||||||
"progress": "[==========> ] 5.505MB/25.37MB",
|
|
||||||
"id": "3f4a84073184",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"status": "Downloading",
|
|
||||||
"progressDetail": {"current": 11272192, "total": 25371463},
|
|
||||||
"progress": "[======================> ] 11.27MB/25.37MB",
|
|
||||||
"id": "3f4a84073184",
|
|
||||||
},
|
|
||||||
{"status": "Download complete", "progressDetail": {}, "id": "3f4a84073184"},
|
|
||||||
{
|
|
||||||
"status": "Extracting",
|
|
||||||
"progressDetail": {"current": 25371463, "total": 25371463},
|
|
||||||
"progress": "[==================================================>] 25.37MB/25.37MB",
|
|
||||||
"id": "3f4a84073184",
|
|
||||||
},
|
|
||||||
{"status": "Pull complete", "progressDetail": {}, "id": "3f4a84073184"},
|
|
||||||
# Small layer finally extracts (10:14:58 in logs)
|
|
||||||
{
|
|
||||||
"status": "Extracting",
|
|
||||||
"progressDetail": {"current": 96, "total": 96},
|
|
||||||
"progress": "[==================================================>] 96B/96B",
|
|
||||||
"id": "02a6e69d8d00",
|
|
||||||
},
|
|
||||||
{"status": "Pull complete", "progressDetail": {}, "id": "02a6e69d8d00"},
|
|
||||||
{"status": "Digest: sha256:test"},
|
|
||||||
{"status": "Status: Downloaded newer image for test/image:latest"},
|
|
||||||
]
|
|
||||||
|
|
||||||
# Capture immutable snapshots of install job progress using job.as_dict()
|
|
||||||
# This solves the mutable object problem - we snapshot state at call time
|
|
||||||
install_job_snapshots = []
|
|
||||||
original_on_job_change = coresys.jobs._on_job_change # pylint: disable=W0212
|
|
||||||
|
|
||||||
def capture_and_forward(job_obj, attribute, value):
|
|
||||||
# Capture immutable snapshot if this is the install job with progress
|
|
||||||
if job_obj.name == "docker_interface_install" and job_obj.progress > 0:
|
|
||||||
install_job_snapshots.append(job_obj.as_dict())
|
|
||||||
# Forward to original to maintain functionality
|
|
||||||
return original_on_job_change(job_obj, attribute, value)
|
|
||||||
|
|
||||||
with patch.object(coresys.jobs, "_on_job_change", side_effect=capture_and_forward):
|
|
||||||
event = asyncio.Event()
|
|
||||||
job, install_task = coresys.jobs.schedule_job(
|
|
||||||
test_docker_interface.install,
|
|
||||||
JobSchedulerOptions(),
|
|
||||||
AwesomeVersion("1.2.3"),
|
|
||||||
"test",
|
|
||||||
)
|
|
||||||
|
|
||||||
async def listen_for_job_end(reference: SupervisorJob):
|
|
||||||
if reference.uuid != job.uuid:
|
|
||||||
return
|
|
||||||
event.set()
|
|
||||||
|
|
||||||
coresys.bus.register_event(BusEvent.SUPERVISOR_JOB_END, listen_for_job_end)
|
|
||||||
await install_task
|
|
||||||
await event.wait()
|
|
||||||
|
|
||||||
# First update from layer download should have rather low progress ((260937/25445459) / 2 ~ 0.5%)
|
|
||||||
assert install_job_snapshots[0]["progress"] < 1
|
|
||||||
|
|
||||||
# Total 8 events should lead to a progress update on the install job
|
|
||||||
assert len(install_job_snapshots) == 8
|
|
||||||
|
|
||||||
# Job should complete successfully
|
|
||||||
assert job.done is True
|
|
||||||
assert job.progress == 100
|
|
||||||
capture_exception.assert_not_called()
|
|
||||||
|
|
||||||
|
|
||||||
async def test_install_progress_handles_containerd_snapshotter(
|
|
||||||
coresys: CoreSys,
|
|
||||||
test_docker_interface: DockerInterface,
|
|
||||||
capture_exception: Mock,
|
|
||||||
):
|
|
||||||
"""Test install handles containerd snapshotter format where extraction has no total bytes.
|
|
||||||
|
|
||||||
With containerd snapshotter, the extraction phase reports time elapsed in seconds
|
|
||||||
rather than bytes extracted. The progress_detail has format:
|
|
||||||
{"current": <seconds>, "units": "s"} with total=None
|
|
||||||
|
|
||||||
This test ensures we handle this gracefully by using the download size for
|
|
||||||
aggregate progress calculation.
|
|
||||||
"""
|
|
||||||
coresys.core.set_state(CoreState.RUNNING)
|
|
||||||
|
|
||||||
# Fixture emulates containerd snapshotter pull log format
|
|
||||||
coresys.docker.docker.api.pull.return_value = load_json_fixture(
|
|
||||||
"docker_pull_image_log_containerd.json"
|
|
||||||
)
|
|
||||||
|
|
||||||
with patch.object(
|
|
||||||
type(coresys.supervisor), "arch", PropertyMock(return_value="i386")
|
|
||||||
):
|
|
||||||
event = asyncio.Event()
|
|
||||||
job, install_task = coresys.jobs.schedule_job(
|
|
||||||
test_docker_interface.install,
|
|
||||||
JobSchedulerOptions(),
|
|
||||||
AwesomeVersion("1.2.3"),
|
|
||||||
"test",
|
|
||||||
)
|
|
||||||
|
|
||||||
async def listen_for_job_end(reference: SupervisorJob):
|
|
||||||
if reference.uuid != job.uuid:
|
|
||||||
return
|
|
||||||
event.set()
|
|
||||||
|
|
||||||
coresys.bus.register_event(BusEvent.SUPERVISOR_JOB_END, listen_for_job_end)
|
|
||||||
await install_task
|
|
||||||
await event.wait()
|
|
||||||
|
|
||||||
# Job should complete successfully without exceptions
|
|
||||||
assert job.done is True
|
|
||||||
assert job.progress == 100
|
|
||||||
capture_exception.assert_not_called()
|
|
||||||
|
|||||||
122
tests/fixtures/docker_pull_image_log_containerd.json
vendored
122
tests/fixtures/docker_pull_image_log_containerd.json
vendored
@@ -1,122 +0,0 @@
|
|||||||
[
|
|
||||||
{
|
|
||||||
"status": "Pulling from home-assistant/test-image",
|
|
||||||
"id": "2025.7.1"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"status": "Pulling fs layer",
|
|
||||||
"progressDetail": {},
|
|
||||||
"id": "layer1"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"status": "Pulling fs layer",
|
|
||||||
"progressDetail": {},
|
|
||||||
"id": "layer2"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"status": "Downloading",
|
|
||||||
"progressDetail": {
|
|
||||||
"current": 1048576,
|
|
||||||
"total": 5178461
|
|
||||||
},
|
|
||||||
"progress": "[===========> ] 1.049MB/5.178MB",
|
|
||||||
"id": "layer1"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"status": "Downloading",
|
|
||||||
"progressDetail": {
|
|
||||||
"current": 5178461,
|
|
||||||
"total": 5178461
|
|
||||||
},
|
|
||||||
"progress": "[==================================================>] 5.178MB/5.178MB",
|
|
||||||
"id": "layer1"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"status": "Download complete",
|
|
||||||
"progressDetail": {
|
|
||||||
"hidecounts": true
|
|
||||||
},
|
|
||||||
"id": "layer1"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"status": "Downloading",
|
|
||||||
"progressDetail": {
|
|
||||||
"current": 1048576,
|
|
||||||
"total": 10485760
|
|
||||||
},
|
|
||||||
"progress": "[=====> ] 1.049MB/10.49MB",
|
|
||||||
"id": "layer2"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"status": "Downloading",
|
|
||||||
"progressDetail": {
|
|
||||||
"current": 10485760,
|
|
||||||
"total": 10485760
|
|
||||||
},
|
|
||||||
"progress": "[==================================================>] 10.49MB/10.49MB",
|
|
||||||
"id": "layer2"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"status": "Download complete",
|
|
||||||
"progressDetail": {
|
|
||||||
"hidecounts": true
|
|
||||||
},
|
|
||||||
"id": "layer2"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"status": "Extracting",
|
|
||||||
"progressDetail": {
|
|
||||||
"current": 1,
|
|
||||||
"units": "s"
|
|
||||||
},
|
|
||||||
"progress": "1 s",
|
|
||||||
"id": "layer1"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"status": "Extracting",
|
|
||||||
"progressDetail": {
|
|
||||||
"current": 5,
|
|
||||||
"units": "s"
|
|
||||||
},
|
|
||||||
"progress": "5 s",
|
|
||||||
"id": "layer1"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"status": "Pull complete",
|
|
||||||
"progressDetail": {
|
|
||||||
"hidecounts": true
|
|
||||||
},
|
|
||||||
"id": "layer1"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"status": "Extracting",
|
|
||||||
"progressDetail": {
|
|
||||||
"current": 1,
|
|
||||||
"units": "s"
|
|
||||||
},
|
|
||||||
"progress": "1 s",
|
|
||||||
"id": "layer2"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"status": "Extracting",
|
|
||||||
"progressDetail": {
|
|
||||||
"current": 3,
|
|
||||||
"units": "s"
|
|
||||||
},
|
|
||||||
"progress": "3 s",
|
|
||||||
"id": "layer2"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"status": "Pull complete",
|
|
||||||
"progressDetail": {
|
|
||||||
"hidecounts": true
|
|
||||||
},
|
|
||||||
"id": "layer2"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"status": "Digest: sha256:abc123"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"status": "Status: Downloaded newer image for test/image:2025.7.1"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
@@ -376,14 +376,3 @@ async def test_try_get_nvme_life_time_missing_percent_used(
|
|||||||
coresys.config.path_supervisor
|
coresys.config.path_supervisor
|
||||||
)
|
)
|
||||||
assert lifetime is None
|
assert lifetime is None
|
||||||
|
|
||||||
|
|
||||||
async def test_try_get_nvme_life_time_dbus_not_connected(coresys: CoreSys):
|
|
||||||
"""Test getting lifetime info from an NVMe when DBUS is not connected."""
|
|
||||||
# Set the dbus for udisks2 bus to be None, to make it forcibly disconnected.
|
|
||||||
coresys.dbus.udisks2.dbus = None
|
|
||||||
|
|
||||||
lifetime = await coresys.hardware.disk.get_disk_life_time(
|
|
||||||
coresys.config.path_supervisor
|
|
||||||
)
|
|
||||||
assert lifetime is None
|
|
||||||
|
|||||||
@@ -7,8 +7,8 @@ import pytest
|
|||||||
|
|
||||||
from supervisor.coresys import CoreSys
|
from supervisor.coresys import CoreSys
|
||||||
from supervisor.dbus.const import DeviceType
|
from supervisor.dbus.const import DeviceType
|
||||||
from supervisor.host.configuration import Interface, VlanConfig, WifiConfig
|
from supervisor.host.configuration import Interface, VlanConfig
|
||||||
from supervisor.host.const import AuthMethod, InterfaceType, WifiMode
|
from supervisor.host.const import InterfaceType
|
||||||
|
|
||||||
from tests.dbus_service_mocks.base import DBusServiceMock
|
from tests.dbus_service_mocks.base import DBusServiceMock
|
||||||
from tests.dbus_service_mocks.network_connection_settings import (
|
from tests.dbus_service_mocks.network_connection_settings import (
|
||||||
@@ -291,237 +291,3 @@ async def test_equals_dbus_interface_eth0_10_real(
|
|||||||
|
|
||||||
# Test should pass with matching VLAN config
|
# Test should pass with matching VLAN config
|
||||||
assert test_vlan_interface.equals_dbus_interface(network_interface) is True
|
assert test_vlan_interface.equals_dbus_interface(network_interface) is True
|
||||||
|
|
||||||
|
|
||||||
def test_map_nm_wifi_non_wireless_interface():
|
|
||||||
"""Test _map_nm_wifi returns None for non-wireless interface."""
|
|
||||||
# Mock non-wireless interface
|
|
||||||
mock_interface = Mock()
|
|
||||||
mock_interface.type = DeviceType.ETHERNET
|
|
||||||
mock_interface.settings = Mock()
|
|
||||||
|
|
||||||
result = Interface._map_nm_wifi(mock_interface)
|
|
||||||
assert result is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_map_nm_wifi_no_settings():
|
|
||||||
"""Test _map_nm_wifi returns None when interface has no settings."""
|
|
||||||
# Mock wireless interface without settings
|
|
||||||
mock_interface = Mock()
|
|
||||||
mock_interface.type = DeviceType.WIRELESS
|
|
||||||
mock_interface.settings = None
|
|
||||||
|
|
||||||
result = Interface._map_nm_wifi(mock_interface)
|
|
||||||
assert result is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_map_nm_wifi_open_authentication():
|
|
||||||
"""Test _map_nm_wifi with open authentication (no security)."""
|
|
||||||
# Mock wireless interface with open authentication
|
|
||||||
mock_interface = Mock()
|
|
||||||
mock_interface.type = DeviceType.WIRELESS
|
|
||||||
mock_interface.settings = Mock()
|
|
||||||
mock_interface.settings.wireless_security = None
|
|
||||||
mock_interface.settings.wireless = Mock()
|
|
||||||
mock_interface.settings.wireless.ssid = "TestSSID"
|
|
||||||
mock_interface.settings.wireless.mode = "infrastructure"
|
|
||||||
mock_interface.wireless = None
|
|
||||||
mock_interface.interface_name = "wlan0"
|
|
||||||
|
|
||||||
result = Interface._map_nm_wifi(mock_interface)
|
|
||||||
|
|
||||||
assert result is not None
|
|
||||||
assert isinstance(result, WifiConfig)
|
|
||||||
assert result.mode == WifiMode.INFRASTRUCTURE
|
|
||||||
assert result.ssid == "TestSSID"
|
|
||||||
assert result.auth == AuthMethod.OPEN
|
|
||||||
assert result.psk is None
|
|
||||||
assert result.signal is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_map_nm_wifi_wep_authentication():
|
|
||||||
"""Test _map_nm_wifi with WEP authentication."""
|
|
||||||
# Mock wireless interface with WEP authentication
|
|
||||||
mock_interface = Mock()
|
|
||||||
mock_interface.type = DeviceType.WIRELESS
|
|
||||||
mock_interface.settings = Mock()
|
|
||||||
mock_interface.settings.wireless_security = Mock()
|
|
||||||
mock_interface.settings.wireless_security.key_mgmt = "none"
|
|
||||||
mock_interface.settings.wireless_security.psk = None
|
|
||||||
mock_interface.settings.wireless = Mock()
|
|
||||||
mock_interface.settings.wireless.ssid = "WEPNetwork"
|
|
||||||
mock_interface.settings.wireless.mode = "infrastructure"
|
|
||||||
mock_interface.wireless = None
|
|
||||||
mock_interface.interface_name = "wlan0"
|
|
||||||
|
|
||||||
result = Interface._map_nm_wifi(mock_interface)
|
|
||||||
|
|
||||||
assert result is not None
|
|
||||||
assert isinstance(result, WifiConfig)
|
|
||||||
assert result.auth == AuthMethod.WEP
|
|
||||||
assert result.ssid == "WEPNetwork"
|
|
||||||
assert result.psk is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_map_nm_wifi_wpa_psk_authentication():
|
|
||||||
"""Test _map_nm_wifi with WPA-PSK authentication."""
|
|
||||||
# Mock wireless interface with WPA-PSK authentication
|
|
||||||
mock_interface = Mock()
|
|
||||||
mock_interface.type = DeviceType.WIRELESS
|
|
||||||
mock_interface.settings = Mock()
|
|
||||||
mock_interface.settings.wireless_security = Mock()
|
|
||||||
mock_interface.settings.wireless_security.key_mgmt = "wpa-psk"
|
|
||||||
mock_interface.settings.wireless_security.psk = "SecretPassword123"
|
|
||||||
mock_interface.settings.wireless = Mock()
|
|
||||||
mock_interface.settings.wireless.ssid = "SecureNetwork"
|
|
||||||
mock_interface.settings.wireless.mode = "infrastructure"
|
|
||||||
mock_interface.wireless = None
|
|
||||||
mock_interface.interface_name = "wlan0"
|
|
||||||
|
|
||||||
result = Interface._map_nm_wifi(mock_interface)
|
|
||||||
|
|
||||||
assert result is not None
|
|
||||||
assert isinstance(result, WifiConfig)
|
|
||||||
assert result.auth == AuthMethod.WPA_PSK
|
|
||||||
assert result.ssid == "SecureNetwork"
|
|
||||||
assert result.psk == "SecretPassword123"
|
|
||||||
|
|
||||||
|
|
||||||
def test_map_nm_wifi_unsupported_authentication():
|
|
||||||
"""Test _map_nm_wifi returns None for unsupported authentication method."""
|
|
||||||
# Mock wireless interface with unsupported authentication
|
|
||||||
mock_interface = Mock()
|
|
||||||
mock_interface.type = DeviceType.WIRELESS
|
|
||||||
mock_interface.settings = Mock()
|
|
||||||
mock_interface.settings.wireless_security = Mock()
|
|
||||||
mock_interface.settings.wireless_security.key_mgmt = "wpa-eap" # Unsupported
|
|
||||||
mock_interface.settings.wireless = Mock()
|
|
||||||
mock_interface.settings.wireless.ssid = "EnterpriseNetwork"
|
|
||||||
mock_interface.interface_name = "wlan0"
|
|
||||||
|
|
||||||
result = Interface._map_nm_wifi(mock_interface)
|
|
||||||
|
|
||||||
assert result is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_map_nm_wifi_different_modes():
|
|
||||||
"""Test _map_nm_wifi with different wifi modes."""
|
|
||||||
modes_to_test = [
|
|
||||||
("infrastructure", WifiMode.INFRASTRUCTURE),
|
|
||||||
("mesh", WifiMode.MESH),
|
|
||||||
("adhoc", WifiMode.ADHOC),
|
|
||||||
("ap", WifiMode.AP),
|
|
||||||
]
|
|
||||||
|
|
||||||
for mode_value, expected_mode in modes_to_test:
|
|
||||||
mock_interface = Mock()
|
|
||||||
mock_interface.type = DeviceType.WIRELESS
|
|
||||||
mock_interface.settings = Mock()
|
|
||||||
mock_interface.settings.wireless_security = None
|
|
||||||
mock_interface.settings.wireless = Mock()
|
|
||||||
mock_interface.settings.wireless.ssid = "TestSSID"
|
|
||||||
mock_interface.settings.wireless.mode = mode_value
|
|
||||||
mock_interface.wireless = None
|
|
||||||
mock_interface.interface_name = "wlan0"
|
|
||||||
|
|
||||||
result = Interface._map_nm_wifi(mock_interface)
|
|
||||||
|
|
||||||
assert result is not None
|
|
||||||
assert result.mode == expected_mode
|
|
||||||
|
|
||||||
|
|
||||||
def test_map_nm_wifi_with_signal():
|
|
||||||
"""Test _map_nm_wifi with wireless signal strength."""
|
|
||||||
# Mock wireless interface with active connection and signal
|
|
||||||
mock_interface = Mock()
|
|
||||||
mock_interface.type = DeviceType.WIRELESS
|
|
||||||
mock_interface.settings = Mock()
|
|
||||||
mock_interface.settings.wireless_security = None
|
|
||||||
mock_interface.settings.wireless = Mock()
|
|
||||||
mock_interface.settings.wireless.ssid = "TestSSID"
|
|
||||||
mock_interface.settings.wireless.mode = "infrastructure"
|
|
||||||
mock_interface.wireless = Mock()
|
|
||||||
mock_interface.wireless.active = Mock()
|
|
||||||
mock_interface.wireless.active.strength = 75
|
|
||||||
mock_interface.interface_name = "wlan0"
|
|
||||||
|
|
||||||
result = Interface._map_nm_wifi(mock_interface)
|
|
||||||
|
|
||||||
assert result is not None
|
|
||||||
assert result.signal == 75
|
|
||||||
|
|
||||||
|
|
||||||
def test_map_nm_wifi_without_signal():
|
|
||||||
"""Test _map_nm_wifi without wireless signal (no active connection)."""
|
|
||||||
# Mock wireless interface without active connection
|
|
||||||
mock_interface = Mock()
|
|
||||||
mock_interface.type = DeviceType.WIRELESS
|
|
||||||
mock_interface.settings = Mock()
|
|
||||||
mock_interface.settings.wireless_security = None
|
|
||||||
mock_interface.settings.wireless = Mock()
|
|
||||||
mock_interface.settings.wireless.ssid = "TestSSID"
|
|
||||||
mock_interface.settings.wireless.mode = "infrastructure"
|
|
||||||
mock_interface.wireless = None
|
|
||||||
mock_interface.interface_name = "wlan0"
|
|
||||||
|
|
||||||
result = Interface._map_nm_wifi(mock_interface)
|
|
||||||
|
|
||||||
assert result is not None
|
|
||||||
assert result.signal is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_map_nm_wifi_wireless_no_active_ap():
|
|
||||||
"""Test _map_nm_wifi with wireless object but no active access point."""
|
|
||||||
# Mock wireless interface with wireless object but no active AP
|
|
||||||
mock_interface = Mock()
|
|
||||||
mock_interface.type = DeviceType.WIRELESS
|
|
||||||
mock_interface.settings = Mock()
|
|
||||||
mock_interface.settings.wireless_security = None
|
|
||||||
mock_interface.settings.wireless = Mock()
|
|
||||||
mock_interface.settings.wireless.ssid = "TestSSID"
|
|
||||||
mock_interface.settings.wireless.mode = "infrastructure"
|
|
||||||
mock_interface.wireless = Mock()
|
|
||||||
mock_interface.wireless.active = None
|
|
||||||
mock_interface.interface_name = "wlan0"
|
|
||||||
|
|
||||||
result = Interface._map_nm_wifi(mock_interface)
|
|
||||||
|
|
||||||
assert result is not None
|
|
||||||
assert result.signal is None
|
|
||||||
|
|
||||||
|
|
||||||
def test_map_nm_wifi_no_wireless_settings():
|
|
||||||
"""Test _map_nm_wifi when wireless settings are missing."""
|
|
||||||
# Mock wireless interface without wireless settings
|
|
||||||
mock_interface = Mock()
|
|
||||||
mock_interface.type = DeviceType.WIRELESS
|
|
||||||
mock_interface.settings = Mock()
|
|
||||||
mock_interface.settings.wireless_security = None
|
|
||||||
mock_interface.settings.wireless = None
|
|
||||||
mock_interface.wireless = None
|
|
||||||
mock_interface.interface_name = "wlan0"
|
|
||||||
|
|
||||||
result = Interface._map_nm_wifi(mock_interface)
|
|
||||||
|
|
||||||
assert result is not None
|
|
||||||
assert result.ssid == ""
|
|
||||||
assert result.mode == WifiMode.INFRASTRUCTURE # Default mode
|
|
||||||
|
|
||||||
|
|
||||||
def test_map_nm_wifi_no_wireless_mode():
|
|
||||||
"""Test _map_nm_wifi when wireless mode is not specified."""
|
|
||||||
# Mock wireless interface without mode specified
|
|
||||||
mock_interface = Mock()
|
|
||||||
mock_interface.type = DeviceType.WIRELESS
|
|
||||||
mock_interface.settings = Mock()
|
|
||||||
mock_interface.settings.wireless_security = None
|
|
||||||
mock_interface.settings.wireless = Mock()
|
|
||||||
mock_interface.settings.wireless.ssid = "TestSSID"
|
|
||||||
mock_interface.settings.wireless.mode = None
|
|
||||||
mock_interface.wireless = None
|
|
||||||
mock_interface.interface_name = "wlan0"
|
|
||||||
|
|
||||||
result = Interface._map_nm_wifi(mock_interface)
|
|
||||||
|
|
||||||
assert result is not None
|
|
||||||
assert result.mode == WifiMode.INFRASTRUCTURE # Default mode
|
|
||||||
|
|||||||
@@ -198,7 +198,7 @@ async def test_notify_on_change(coresys: CoreSys, ha_ws_client: AsyncMock):
|
|||||||
"errors": [
|
"errors": [
|
||||||
{
|
{
|
||||||
"type": "HassioError",
|
"type": "HassioError",
|
||||||
"message": "Unknown error, see Supervisor logs (check with 'ha supervisor logs')",
|
"message": "Unknown error, see supervisor logs",
|
||||||
"stage": "test",
|
"stage": "test",
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@@ -226,7 +226,7 @@ async def test_notify_on_change(coresys: CoreSys, ha_ws_client: AsyncMock):
|
|||||||
"errors": [
|
"errors": [
|
||||||
{
|
{
|
||||||
"type": "HassioError",
|
"type": "HassioError",
|
||||||
"message": "Unknown error, see Supervisor logs (check with 'ha supervisor logs')",
|
"message": "Unknown error, see supervisor logs",
|
||||||
"stage": "test",
|
"stage": "test",
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
|||||||
@@ -181,6 +181,7 @@ async def test_reload_updater_triggers_supervisor_update(
|
|||||||
"""Test an updater reload triggers a supervisor update if there is one."""
|
"""Test an updater reload triggers a supervisor update if there is one."""
|
||||||
coresys.hardware.disk.get_disk_free_space = lambda x: 5000
|
coresys.hardware.disk.get_disk_free_space = lambda x: 5000
|
||||||
await coresys.core.set_state(CoreState.RUNNING)
|
await coresys.core.set_state(CoreState.RUNNING)
|
||||||
|
coresys.security.content_trust = False
|
||||||
|
|
||||||
with (
|
with (
|
||||||
patch.object(
|
patch.object(
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ from supervisor.exceptions import (
|
|||||||
AudioJobError,
|
AudioJobError,
|
||||||
CliError,
|
CliError,
|
||||||
CliJobError,
|
CliJobError,
|
||||||
|
CodeNotaryUntrusted,
|
||||||
CoreDNSError,
|
CoreDNSError,
|
||||||
CoreDNSJobError,
|
CoreDNSJobError,
|
||||||
DockerError,
|
DockerError,
|
||||||
@@ -336,12 +337,14 @@ async def test_repair_failed(
|
|||||||
patch.object(
|
patch.object(
|
||||||
DockerInterface, "arch", new=PropertyMock(return_value=CpuArch.AMD64)
|
DockerInterface, "arch", new=PropertyMock(return_value=CpuArch.AMD64)
|
||||||
),
|
),
|
||||||
patch.object(DockerInterface, "install", side_effect=DockerError),
|
patch(
|
||||||
|
"supervisor.security.module.cas_validate", side_effect=CodeNotaryUntrusted
|
||||||
|
),
|
||||||
):
|
):
|
||||||
await plugin.repair()
|
await plugin.repair()
|
||||||
|
|
||||||
capture_exception.assert_called_once()
|
capture_exception.assert_called_once()
|
||||||
assert check_exception_chain(capture_exception.call_args[0][0], DockerError)
|
assert check_exception_chain(capture_exception.call_args[0][0], CodeNotaryUntrusted)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
|||||||
@@ -51,6 +51,7 @@ async def test_if_check_make_issue(coresys: CoreSys):
|
|||||||
"""Test check for setup."""
|
"""Test check for setup."""
|
||||||
free_space = Issue(IssueType.FREE_SPACE, ContextType.SYSTEM)
|
free_space = Issue(IssueType.FREE_SPACE, ContextType.SYSTEM)
|
||||||
await coresys.core.set_state(CoreState.RUNNING)
|
await coresys.core.set_state(CoreState.RUNNING)
|
||||||
|
coresys.security.content_trust = False
|
||||||
|
|
||||||
with patch("shutil.disk_usage", return_value=(1, 1, 1)):
|
with patch("shutil.disk_usage", return_value=(1, 1, 1)):
|
||||||
await coresys.resolution.check.check_system()
|
await coresys.resolution.check.check_system()
|
||||||
@@ -62,6 +63,7 @@ async def test_if_check_cleanup_issue(coresys: CoreSys):
|
|||||||
"""Test check for setup."""
|
"""Test check for setup."""
|
||||||
free_space = Issue(IssueType.FREE_SPACE, ContextType.SYSTEM)
|
free_space = Issue(IssueType.FREE_SPACE, ContextType.SYSTEM)
|
||||||
await coresys.core.set_state(CoreState.RUNNING)
|
await coresys.core.set_state(CoreState.RUNNING)
|
||||||
|
coresys.security.content_trust = False
|
||||||
|
|
||||||
with patch("shutil.disk_usage", return_value=(1, 1, 1)):
|
with patch("shutil.disk_usage", return_value=(1, 1, 1)):
|
||||||
await coresys.resolution.check.check_system()
|
await coresys.resolution.check.check_system()
|
||||||
|
|||||||
96
tests/resolution/check/test_check_supervisor_trust.py
Normal file
96
tests/resolution/check/test_check_supervisor_trust.py
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
"""Test Check Supervisor trust."""
|
||||||
|
|
||||||
|
# pylint: disable=import-error,protected-access
|
||||||
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
|
from supervisor.const import CoreState
|
||||||
|
from supervisor.coresys import CoreSys
|
||||||
|
from supervisor.exceptions import CodeNotaryError, CodeNotaryUntrusted
|
||||||
|
from supervisor.resolution.checks.supervisor_trust import CheckSupervisorTrust
|
||||||
|
from supervisor.resolution.const import IssueType, UnhealthyReason
|
||||||
|
|
||||||
|
|
||||||
|
async def test_base(coresys: CoreSys):
|
||||||
|
"""Test check basics."""
|
||||||
|
supervisor_trust = CheckSupervisorTrust(coresys)
|
||||||
|
assert supervisor_trust.slug == "supervisor_trust"
|
||||||
|
assert supervisor_trust.enabled
|
||||||
|
|
||||||
|
|
||||||
|
async def test_check(coresys: CoreSys):
|
||||||
|
"""Test check."""
|
||||||
|
supervisor_trust = CheckSupervisorTrust(coresys)
|
||||||
|
await coresys.core.set_state(CoreState.RUNNING)
|
||||||
|
|
||||||
|
assert len(coresys.resolution.issues) == 0
|
||||||
|
|
||||||
|
coresys.supervisor.check_trust = AsyncMock(side_effect=CodeNotaryError)
|
||||||
|
await supervisor_trust.run_check()
|
||||||
|
assert coresys.supervisor.check_trust.called
|
||||||
|
|
||||||
|
coresys.supervisor.check_trust = AsyncMock(return_value=None)
|
||||||
|
await supervisor_trust.run_check()
|
||||||
|
assert coresys.supervisor.check_trust.called
|
||||||
|
|
||||||
|
assert len(coresys.resolution.issues) == 0
|
||||||
|
|
||||||
|
coresys.supervisor.check_trust = AsyncMock(side_effect=CodeNotaryUntrusted)
|
||||||
|
await supervisor_trust.run_check()
|
||||||
|
assert coresys.supervisor.check_trust.called
|
||||||
|
|
||||||
|
assert len(coresys.resolution.issues) == 1
|
||||||
|
assert coresys.resolution.issues[-1].type == IssueType.TRUST
|
||||||
|
|
||||||
|
assert UnhealthyReason.UNTRUSTED in coresys.resolution.unhealthy
|
||||||
|
|
||||||
|
|
||||||
|
async def test_approve(coresys: CoreSys):
|
||||||
|
"""Test check."""
|
||||||
|
supervisor_trust = CheckSupervisorTrust(coresys)
|
||||||
|
await coresys.core.set_state(CoreState.RUNNING)
|
||||||
|
|
||||||
|
coresys.supervisor.check_trust = AsyncMock(side_effect=CodeNotaryUntrusted)
|
||||||
|
assert await supervisor_trust.approve_check()
|
||||||
|
|
||||||
|
coresys.supervisor.check_trust = AsyncMock(return_value=None)
|
||||||
|
assert not await supervisor_trust.approve_check()
|
||||||
|
|
||||||
|
|
||||||
|
async def test_with_global_disable(coresys: CoreSys, caplog):
|
||||||
|
"""Test when pwned is globally disabled."""
|
||||||
|
coresys.security.content_trust = False
|
||||||
|
supervisor_trust = CheckSupervisorTrust(coresys)
|
||||||
|
await coresys.core.set_state(CoreState.RUNNING)
|
||||||
|
|
||||||
|
assert len(coresys.resolution.issues) == 0
|
||||||
|
coresys.security.verify_own_content = AsyncMock(side_effect=CodeNotaryUntrusted)
|
||||||
|
await supervisor_trust.run_check()
|
||||||
|
assert not coresys.security.verify_own_content.called
|
||||||
|
assert (
|
||||||
|
"Skipping supervisor_trust, content_trust is globally disabled" in caplog.text
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_did_run(coresys: CoreSys):
|
||||||
|
"""Test that the check ran as expected."""
|
||||||
|
supervisor_trust = CheckSupervisorTrust(coresys)
|
||||||
|
should_run = supervisor_trust.states
|
||||||
|
should_not_run = [state for state in CoreState if state not in should_run]
|
||||||
|
assert len(should_run) != 0
|
||||||
|
assert len(should_not_run) != 0
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"supervisor.resolution.checks.supervisor_trust.CheckSupervisorTrust.run_check",
|
||||||
|
return_value=None,
|
||||||
|
) as check:
|
||||||
|
for state in should_run:
|
||||||
|
await coresys.core.set_state(state)
|
||||||
|
await supervisor_trust()
|
||||||
|
check.assert_called_once()
|
||||||
|
check.reset_mock()
|
||||||
|
|
||||||
|
for state in should_not_run:
|
||||||
|
await coresys.core.set_state(state)
|
||||||
|
await supervisor_trust()
|
||||||
|
check.assert_not_called()
|
||||||
|
check.reset_mock()
|
||||||
46
tests/resolution/evaluation/test_evaluate_content_trust.py
Normal file
46
tests/resolution/evaluation/test_evaluate_content_trust.py
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
"""Test evaluation base."""
|
||||||
|
|
||||||
|
# pylint: disable=import-error,protected-access
|
||||||
|
from unittest.mock import patch
|
||||||
|
|
||||||
|
from supervisor.const import CoreState
|
||||||
|
from supervisor.coresys import CoreSys
|
||||||
|
from supervisor.resolution.evaluations.content_trust import EvaluateContentTrust
|
||||||
|
|
||||||
|
|
||||||
|
async def test_evaluation(coresys: CoreSys):
|
||||||
|
"""Test evaluation."""
|
||||||
|
job_conditions = EvaluateContentTrust(coresys)
|
||||||
|
await coresys.core.set_state(CoreState.SETUP)
|
||||||
|
|
||||||
|
await job_conditions()
|
||||||
|
assert job_conditions.reason not in coresys.resolution.unsupported
|
||||||
|
|
||||||
|
coresys.security.content_trust = False
|
||||||
|
await job_conditions()
|
||||||
|
assert job_conditions.reason in coresys.resolution.unsupported
|
||||||
|
|
||||||
|
|
||||||
|
async def test_did_run(coresys: CoreSys):
|
||||||
|
"""Test that the evaluation ran as expected."""
|
||||||
|
job_conditions = EvaluateContentTrust(coresys)
|
||||||
|
should_run = job_conditions.states
|
||||||
|
should_not_run = [state for state in CoreState if state not in should_run]
|
||||||
|
assert len(should_run) != 0
|
||||||
|
assert len(should_not_run) != 0
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"supervisor.resolution.evaluations.content_trust.EvaluateContentTrust.evaluate",
|
||||||
|
return_value=None,
|
||||||
|
) as evaluate:
|
||||||
|
for state in should_run:
|
||||||
|
await coresys.core.set_state(state)
|
||||||
|
await job_conditions()
|
||||||
|
evaluate.assert_called_once()
|
||||||
|
evaluate.reset_mock()
|
||||||
|
|
||||||
|
for state in should_not_run:
|
||||||
|
await coresys.core.set_state(state)
|
||||||
|
await job_conditions()
|
||||||
|
evaluate.assert_not_called()
|
||||||
|
evaluate.reset_mock()
|
||||||
@@ -25,18 +25,13 @@ async def test_evaluation(coresys: CoreSys):
|
|||||||
assert docker_configuration.reason in coresys.resolution.unsupported
|
assert docker_configuration.reason in coresys.resolution.unsupported
|
||||||
coresys.resolution.unsupported.clear()
|
coresys.resolution.unsupported.clear()
|
||||||
|
|
||||||
coresys.docker.info.storage = EXPECTED_STORAGE[0]
|
coresys.docker.info.storage = EXPECTED_STORAGE
|
||||||
coresys.docker.info.logging = "unsupported"
|
coresys.docker.info.logging = "unsupported"
|
||||||
await docker_configuration()
|
await docker_configuration()
|
||||||
assert docker_configuration.reason in coresys.resolution.unsupported
|
assert docker_configuration.reason in coresys.resolution.unsupported
|
||||||
coresys.resolution.unsupported.clear()
|
coresys.resolution.unsupported.clear()
|
||||||
|
|
||||||
coresys.docker.info.storage = "overlay2"
|
coresys.docker.info.storage = EXPECTED_STORAGE
|
||||||
coresys.docker.info.logging = EXPECTED_LOGGING
|
|
||||||
await docker_configuration()
|
|
||||||
assert docker_configuration.reason not in coresys.resolution.unsupported
|
|
||||||
|
|
||||||
coresys.docker.info.storage = "overlayfs"
|
|
||||||
coresys.docker.info.logging = EXPECTED_LOGGING
|
coresys.docker.info.logging = EXPECTED_LOGGING
|
||||||
await docker_configuration()
|
await docker_configuration()
|
||||||
assert docker_configuration.reason not in coresys.resolution.unsupported
|
assert docker_configuration.reason not in coresys.resolution.unsupported
|
||||||
|
|||||||
89
tests/resolution/evaluation/test_evaluate_source_mods.py
Normal file
89
tests/resolution/evaluation/test_evaluate_source_mods.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
"""Test evaluation base."""
|
||||||
|
|
||||||
|
# pylint: disable=import-error,protected-access
|
||||||
|
import errno
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
|
from supervisor.const import CoreState
|
||||||
|
from supervisor.coresys import CoreSys
|
||||||
|
from supervisor.exceptions import CodeNotaryError, CodeNotaryUntrusted
|
||||||
|
from supervisor.resolution.const import ContextType, IssueType
|
||||||
|
from supervisor.resolution.data import Issue
|
||||||
|
from supervisor.resolution.evaluations.source_mods import EvaluateSourceMods
|
||||||
|
|
||||||
|
|
||||||
|
async def test_evaluation(coresys: CoreSys):
|
||||||
|
"""Test evaluation."""
|
||||||
|
with patch(
|
||||||
|
"supervisor.resolution.evaluations.source_mods._SUPERVISOR_SOURCE",
|
||||||
|
Path(f"{os.getcwd()}/supervisor"),
|
||||||
|
):
|
||||||
|
sourcemods = EvaluateSourceMods(coresys)
|
||||||
|
await coresys.core.set_state(CoreState.RUNNING)
|
||||||
|
|
||||||
|
assert sourcemods.reason not in coresys.resolution.unsupported
|
||||||
|
coresys.security.verify_own_content = AsyncMock(side_effect=CodeNotaryUntrusted)
|
||||||
|
await sourcemods()
|
||||||
|
assert sourcemods.reason in coresys.resolution.unsupported
|
||||||
|
|
||||||
|
coresys.security.verify_own_content = AsyncMock(side_effect=CodeNotaryError)
|
||||||
|
await sourcemods()
|
||||||
|
assert sourcemods.reason not in coresys.resolution.unsupported
|
||||||
|
|
||||||
|
coresys.security.verify_own_content = AsyncMock()
|
||||||
|
await sourcemods()
|
||||||
|
assert sourcemods.reason not in coresys.resolution.unsupported
|
||||||
|
|
||||||
|
|
||||||
|
async def test_did_run(coresys: CoreSys):
|
||||||
|
"""Test that the evaluation ran as expected."""
|
||||||
|
sourcemods = EvaluateSourceMods(coresys)
|
||||||
|
should_run = sourcemods.states
|
||||||
|
should_not_run = [state for state in CoreState if state not in should_run]
|
||||||
|
assert len(should_run) != 0
|
||||||
|
assert len(should_not_run) != 0
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"supervisor.resolution.evaluations.source_mods.EvaluateSourceMods.evaluate",
|
||||||
|
return_value=None,
|
||||||
|
) as evaluate:
|
||||||
|
for state in should_run:
|
||||||
|
await coresys.core.set_state(state)
|
||||||
|
await sourcemods()
|
||||||
|
evaluate.assert_called_once()
|
||||||
|
evaluate.reset_mock()
|
||||||
|
|
||||||
|
for state in should_not_run:
|
||||||
|
await coresys.core.set_state(state)
|
||||||
|
await sourcemods()
|
||||||
|
evaluate.assert_not_called()
|
||||||
|
evaluate.reset_mock()
|
||||||
|
|
||||||
|
|
||||||
|
async def test_evaluation_error(coresys: CoreSys):
|
||||||
|
"""Test error reading file during evaluation."""
|
||||||
|
sourcemods = EvaluateSourceMods(coresys)
|
||||||
|
await coresys.core.set_state(CoreState.RUNNING)
|
||||||
|
corrupt_fs = Issue(IssueType.CORRUPT_FILESYSTEM, ContextType.SYSTEM)
|
||||||
|
|
||||||
|
assert sourcemods.reason not in coresys.resolution.unsupported
|
||||||
|
assert corrupt_fs not in coresys.resolution.issues
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"supervisor.utils.codenotary.dirhash",
|
||||||
|
side_effect=(err := OSError()),
|
||||||
|
):
|
||||||
|
err.errno = errno.EBUSY
|
||||||
|
await sourcemods()
|
||||||
|
assert sourcemods.reason not in coresys.resolution.unsupported
|
||||||
|
assert corrupt_fs in coresys.resolution.issues
|
||||||
|
assert coresys.core.healthy is True
|
||||||
|
|
||||||
|
coresys.resolution.dismiss_issue(corrupt_fs)
|
||||||
|
err.errno = errno.EBADMSG
|
||||||
|
await sourcemods()
|
||||||
|
assert sourcemods.reason not in coresys.resolution.unsupported
|
||||||
|
assert corrupt_fs in coresys.resolution.issues
|
||||||
|
assert coresys.core.healthy is False
|
||||||
69
tests/resolution/fixup/test_system_execute_integrity.py
Normal file
69
tests/resolution/fixup/test_system_execute_integrity.py
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
"""Test evaluation base."""
|
||||||
|
|
||||||
|
# pylint: disable=import-error,protected-access
|
||||||
|
from datetime import timedelta
|
||||||
|
from unittest.mock import AsyncMock
|
||||||
|
|
||||||
|
import time_machine
|
||||||
|
|
||||||
|
from supervisor.coresys import CoreSys
|
||||||
|
from supervisor.resolution.const import ContextType, IssueType, SuggestionType
|
||||||
|
from supervisor.resolution.data import Issue, Suggestion
|
||||||
|
from supervisor.resolution.fixups.system_execute_integrity import (
|
||||||
|
FixupSystemExecuteIntegrity,
|
||||||
|
)
|
||||||
|
from supervisor.security.const import ContentTrustResult, IntegrityResult
|
||||||
|
from supervisor.utils.dt import utcnow
|
||||||
|
|
||||||
|
|
||||||
|
async def test_fixup(coresys: CoreSys, supervisor_internet: AsyncMock):
|
||||||
|
"""Test fixup."""
|
||||||
|
system_execute_integrity = FixupSystemExecuteIntegrity(coresys)
|
||||||
|
|
||||||
|
assert system_execute_integrity.auto
|
||||||
|
|
||||||
|
coresys.resolution.add_suggestion(
|
||||||
|
Suggestion(SuggestionType.EXECUTE_INTEGRITY, ContextType.SYSTEM)
|
||||||
|
)
|
||||||
|
coresys.resolution.add_issue(Issue(IssueType.TRUST, ContextType.SYSTEM))
|
||||||
|
|
||||||
|
coresys.security.integrity_check = AsyncMock(
|
||||||
|
return_value=IntegrityResult(
|
||||||
|
ContentTrustResult.PASS,
|
||||||
|
ContentTrustResult.PASS,
|
||||||
|
{"audio": ContentTrustResult.PASS},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
await system_execute_integrity()
|
||||||
|
|
||||||
|
assert coresys.security.integrity_check.called
|
||||||
|
assert len(coresys.resolution.suggestions) == 0
|
||||||
|
assert len(coresys.resolution.issues) == 0
|
||||||
|
|
||||||
|
|
||||||
|
async def test_fixup_error(coresys: CoreSys, supervisor_internet: AsyncMock):
|
||||||
|
"""Test fixup."""
|
||||||
|
system_execute_integrity = FixupSystemExecuteIntegrity(coresys)
|
||||||
|
|
||||||
|
assert system_execute_integrity.auto
|
||||||
|
|
||||||
|
coresys.resolution.add_suggestion(
|
||||||
|
Suggestion(SuggestionType.EXECUTE_INTEGRITY, ContextType.SYSTEM)
|
||||||
|
)
|
||||||
|
coresys.resolution.add_issue(Issue(IssueType.TRUST, ContextType.SYSTEM))
|
||||||
|
|
||||||
|
coresys.security.integrity_check = AsyncMock(
|
||||||
|
return_value=IntegrityResult(
|
||||||
|
ContentTrustResult.FAILED,
|
||||||
|
ContentTrustResult.PASS,
|
||||||
|
{"audio": ContentTrustResult.PASS},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
with time_machine.travel(utcnow() + timedelta(hours=24)):
|
||||||
|
await system_execute_integrity()
|
||||||
|
|
||||||
|
assert coresys.security.integrity_check.called
|
||||||
|
assert len(coresys.resolution.suggestions) == 1
|
||||||
|
assert len(coresys.resolution.issues) == 1
|
||||||
@@ -1,15 +1,21 @@
|
|||||||
"""Test evaluations."""
|
"""Test evaluations."""
|
||||||
|
|
||||||
from unittest.mock import Mock
|
from unittest.mock import Mock, patch
|
||||||
|
|
||||||
from supervisor.const import CoreState
|
from supervisor.const import CoreState
|
||||||
from supervisor.coresys import CoreSys
|
from supervisor.coresys import CoreSys
|
||||||
|
from supervisor.utils import check_exception_chain
|
||||||
|
|
||||||
|
|
||||||
async def test_evaluate_system_error(coresys: CoreSys, capture_exception: Mock):
|
async def test_evaluate_system_error(coresys: CoreSys, capture_exception: Mock):
|
||||||
"""Test error while evaluating system."""
|
"""Test error while evaluating system."""
|
||||||
await coresys.core.set_state(CoreState.RUNNING)
|
await coresys.core.set_state(CoreState.RUNNING)
|
||||||
|
|
||||||
await coresys.resolution.evaluate.evaluate_system()
|
with patch(
|
||||||
|
"supervisor.resolution.evaluations.source_mods.calc_checksum_path_sourcecode",
|
||||||
|
side_effect=RuntimeError,
|
||||||
|
):
|
||||||
|
await coresys.resolution.evaluate.evaluate_system()
|
||||||
|
|
||||||
capture_exception.assert_not_called()
|
capture_exception.assert_called_once()
|
||||||
|
assert check_exception_chain(capture_exception.call_args[0][0], RuntimeError)
|
||||||
|
|||||||
127
tests/security/test_module.py
Normal file
127
tests/security/test_module.py
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
"""Testing handling with Security."""
|
||||||
|
|
||||||
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from supervisor.coresys import CoreSys
|
||||||
|
from supervisor.exceptions import CodeNotaryError, CodeNotaryUntrusted
|
||||||
|
from supervisor.security.const import ContentTrustResult
|
||||||
|
|
||||||
|
|
||||||
|
async def test_content_trust(coresys: CoreSys):
|
||||||
|
"""Test Content-Trust."""
|
||||||
|
|
||||||
|
with patch("supervisor.security.module.cas_validate", AsyncMock()) as cas_validate:
|
||||||
|
await coresys.security.verify_content("test@mail.com", "ffffffffffffff")
|
||||||
|
assert cas_validate.called
|
||||||
|
cas_validate.assert_called_once_with("test@mail.com", "ffffffffffffff")
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"supervisor.security.module.cas_validate", AsyncMock()
|
||||||
|
) as cas_validate:
|
||||||
|
await coresys.security.verify_own_content("ffffffffffffff")
|
||||||
|
assert cas_validate.called
|
||||||
|
cas_validate.assert_called_once_with(
|
||||||
|
"notary@home-assistant.io", "ffffffffffffff"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_disabled_content_trust(coresys: CoreSys):
|
||||||
|
"""Test Content-Trust."""
|
||||||
|
coresys.security.content_trust = False
|
||||||
|
|
||||||
|
with patch("supervisor.security.module.cas_validate", AsyncMock()) as cas_validate:
|
||||||
|
await coresys.security.verify_content("test@mail.com", "ffffffffffffff")
|
||||||
|
assert not cas_validate.called
|
||||||
|
|
||||||
|
with patch("supervisor.security.module.cas_validate", AsyncMock()) as cas_validate:
|
||||||
|
await coresys.security.verify_own_content("ffffffffffffff")
|
||||||
|
assert not cas_validate.called
|
||||||
|
|
||||||
|
|
||||||
|
async def test_force_content_trust(coresys: CoreSys):
|
||||||
|
"""Force Content-Trust tests."""
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"supervisor.security.module.cas_validate",
|
||||||
|
AsyncMock(side_effect=CodeNotaryError),
|
||||||
|
) as cas_validate:
|
||||||
|
await coresys.security.verify_content("test@mail.com", "ffffffffffffff")
|
||||||
|
assert cas_validate.called
|
||||||
|
cas_validate.assert_called_once_with("test@mail.com", "ffffffffffffff")
|
||||||
|
|
||||||
|
coresys.security.force = True
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"supervisor.security.module.cas_validate",
|
||||||
|
AsyncMock(side_effect=CodeNotaryError),
|
||||||
|
) as cas_validate,
|
||||||
|
pytest.raises(CodeNotaryError),
|
||||||
|
):
|
||||||
|
await coresys.security.verify_content("test@mail.com", "ffffffffffffff")
|
||||||
|
|
||||||
|
|
||||||
|
async def test_integrity_check_disabled(coresys: CoreSys):
|
||||||
|
"""Test integrity check with disabled content trust."""
|
||||||
|
coresys.security.content_trust = False
|
||||||
|
|
||||||
|
result = await coresys.security.integrity_check.__wrapped__(coresys.security)
|
||||||
|
|
||||||
|
assert result.core == ContentTrustResult.UNTESTED
|
||||||
|
assert result.supervisor == ContentTrustResult.UNTESTED
|
||||||
|
|
||||||
|
|
||||||
|
async def test_integrity_check(coresys: CoreSys, install_addon_ssh):
|
||||||
|
"""Test integrity check with content trust."""
|
||||||
|
coresys.homeassistant.core.check_trust = AsyncMock()
|
||||||
|
coresys.supervisor.check_trust = AsyncMock()
|
||||||
|
install_addon_ssh.check_trust = AsyncMock()
|
||||||
|
install_addon_ssh.data["codenotary"] = "test@example.com"
|
||||||
|
|
||||||
|
result = await coresys.security.integrity_check.__wrapped__(coresys.security)
|
||||||
|
|
||||||
|
assert result.core == ContentTrustResult.PASS
|
||||||
|
assert result.supervisor == ContentTrustResult.PASS
|
||||||
|
assert result.addons[install_addon_ssh.slug] == ContentTrustResult.PASS
|
||||||
|
|
||||||
|
|
||||||
|
async def test_integrity_check_error(coresys: CoreSys, install_addon_ssh):
|
||||||
|
"""Test integrity check with content trust issues."""
|
||||||
|
coresys.homeassistant.core.check_trust = AsyncMock(side_effect=CodeNotaryUntrusted)
|
||||||
|
coresys.supervisor.check_trust = AsyncMock(side_effect=CodeNotaryUntrusted)
|
||||||
|
install_addon_ssh.check_trust = AsyncMock(side_effect=CodeNotaryUntrusted)
|
||||||
|
install_addon_ssh.data["codenotary"] = "test@example.com"
|
||||||
|
|
||||||
|
result = await coresys.security.integrity_check.__wrapped__(coresys.security)
|
||||||
|
|
||||||
|
assert result.core == ContentTrustResult.ERROR
|
||||||
|
assert result.supervisor == ContentTrustResult.ERROR
|
||||||
|
assert result.addons[install_addon_ssh.slug] == ContentTrustResult.ERROR
|
||||||
|
|
||||||
|
|
||||||
|
async def test_integrity_check_failed(coresys: CoreSys, install_addon_ssh):
|
||||||
|
"""Test integrity check with content trust failed."""
|
||||||
|
coresys.homeassistant.core.check_trust = AsyncMock(side_effect=CodeNotaryError)
|
||||||
|
coresys.supervisor.check_trust = AsyncMock(side_effect=CodeNotaryError)
|
||||||
|
install_addon_ssh.check_trust = AsyncMock(side_effect=CodeNotaryError)
|
||||||
|
install_addon_ssh.data["codenotary"] = "test@example.com"
|
||||||
|
|
||||||
|
result = await coresys.security.integrity_check.__wrapped__(coresys.security)
|
||||||
|
|
||||||
|
assert result.core == ContentTrustResult.FAILED
|
||||||
|
assert result.supervisor == ContentTrustResult.FAILED
|
||||||
|
assert result.addons[install_addon_ssh.slug] == ContentTrustResult.FAILED
|
||||||
|
|
||||||
|
|
||||||
|
async def test_integrity_check_addon(coresys: CoreSys, install_addon_ssh):
|
||||||
|
"""Test integrity check with content trust but no signed add-ons."""
|
||||||
|
coresys.homeassistant.core.check_trust = AsyncMock()
|
||||||
|
coresys.supervisor.check_trust = AsyncMock()
|
||||||
|
|
||||||
|
result = await coresys.security.integrity_check.__wrapped__(coresys.security)
|
||||||
|
|
||||||
|
assert result.core == ContentTrustResult.PASS
|
||||||
|
assert result.supervisor == ContentTrustResult.PASS
|
||||||
|
assert result.addons[install_addon_ssh.slug] == ContentTrustResult.UNTESTED
|
||||||
@@ -86,9 +86,10 @@ async def test_os_update_path(
|
|||||||
"""Test OS upgrade path across major versions."""
|
"""Test OS upgrade path across major versions."""
|
||||||
coresys.os._board = "rpi4" # pylint: disable=protected-access
|
coresys.os._board = "rpi4" # pylint: disable=protected-access
|
||||||
coresys.os._version = AwesomeVersion(version) # pylint: disable=protected-access
|
coresys.os._version = AwesomeVersion(version) # pylint: disable=protected-access
|
||||||
await coresys.updater.fetch_data()
|
with patch.object(type(coresys.security), "verify_own_content"):
|
||||||
|
await coresys.updater.fetch_data()
|
||||||
|
|
||||||
assert coresys.updater.version_hassos == AwesomeVersion(expected)
|
assert coresys.updater.version_hassos == AwesomeVersion(expected)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.usefixtures("no_job_throttle")
|
@pytest.mark.usefixtures("no_job_throttle")
|
||||||
@@ -104,6 +105,7 @@ async def test_delayed_fetch_for_connectivity(
|
|||||||
load_binary_fixture("version_stable.json")
|
load_binary_fixture("version_stable.json")
|
||||||
)
|
)
|
||||||
coresys.websession.head = AsyncMock()
|
coresys.websession.head = AsyncMock()
|
||||||
|
coresys.security.verify_own_content = AsyncMock()
|
||||||
|
|
||||||
# Network connectivity change causes a series of async tasks to eventually do a version fetch
|
# Network connectivity change causes a series of async tasks to eventually do a version fetch
|
||||||
# Rather then use some kind of sleep loop, set up listener for start of fetch data job
|
# Rather then use some kind of sleep loop, set up listener for start of fetch data job
|
||||||
|
|||||||
128
tests/utils/test_codenotary.py
Normal file
128
tests/utils/test_codenotary.py
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
"""Test CodeNotary."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from unittest.mock import AsyncMock, Mock, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from supervisor.exceptions import (
|
||||||
|
CodeNotaryBackendError,
|
||||||
|
CodeNotaryError,
|
||||||
|
CodeNotaryUntrusted,
|
||||||
|
)
|
||||||
|
from supervisor.utils.codenotary import calc_checksum, cas_validate
|
||||||
|
|
||||||
|
pytest.skip("code notary has been disabled due to issues", allow_module_level=True)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class SubprocessResponse:
|
||||||
|
"""Class for specifying subprocess exec response."""
|
||||||
|
|
||||||
|
returncode: int = 0
|
||||||
|
data: str = ""
|
||||||
|
error: str | None = None
|
||||||
|
exception: Exception | None = None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(name="subprocess_exec")
|
||||||
|
def fixture_subprocess_exec(request):
|
||||||
|
"""Mock subprocess exec with specific return."""
|
||||||
|
response = request.param
|
||||||
|
if response.exception:
|
||||||
|
communicate_return = AsyncMock(side_effect=response.exception)
|
||||||
|
else:
|
||||||
|
communicate_return = AsyncMock(return_value=(response.data, response.error))
|
||||||
|
|
||||||
|
exec_return = Mock(returncode=response.returncode, communicate=communicate_return)
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"supervisor.utils.codenotary.asyncio.create_subprocess_exec",
|
||||||
|
return_value=exec_return,
|
||||||
|
) as subprocess_exec:
|
||||||
|
yield subprocess_exec
|
||||||
|
|
||||||
|
|
||||||
|
def test_checksum_calc():
|
||||||
|
"""Calc Checkusm as test."""
|
||||||
|
assert calc_checksum("test") == calc_checksum(b"test")
|
||||||
|
assert (
|
||||||
|
calc_checksum("test")
|
||||||
|
== "9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_valid_checksum():
|
||||||
|
"""Test a valid autorization."""
|
||||||
|
await cas_validate(
|
||||||
|
"notary@home-assistant.io",
|
||||||
|
"4434a33ff9c695e870bc5bbe04230ea3361ecf4c129eb06133dd1373975a43f0",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_invalid_checksum():
|
||||||
|
"""Test a invalid autorization."""
|
||||||
|
with pytest.raises(CodeNotaryUntrusted):
|
||||||
|
await cas_validate(
|
||||||
|
"notary@home-assistant.io",
|
||||||
|
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"subprocess_exec",
|
||||||
|
[SubprocessResponse(returncode=1, error=b"x is not notarized")],
|
||||||
|
)
|
||||||
|
async def test_not_notarized_error(subprocess_exec):
|
||||||
|
"""Test received a not notarized error response from command."""
|
||||||
|
with pytest.raises(CodeNotaryUntrusted):
|
||||||
|
await cas_validate(
|
||||||
|
"notary@home-assistant.io",
|
||||||
|
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"subprocess_exec",
|
||||||
|
[
|
||||||
|
SubprocessResponse(returncode=1, error=b"test"),
|
||||||
|
SubprocessResponse(returncode=0, data='{"error":"asn1: structure error"}'),
|
||||||
|
SubprocessResponse(returncode=1, error="test".encode("utf-16")),
|
||||||
|
],
|
||||||
|
indirect=True,
|
||||||
|
)
|
||||||
|
async def test_cas_backend_error(subprocess_exec):
|
||||||
|
"""Test backend error executing cas command."""
|
||||||
|
with pytest.raises(CodeNotaryBackendError):
|
||||||
|
await cas_validate(
|
||||||
|
"notary@home-assistant.io",
|
||||||
|
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"subprocess_exec",
|
||||||
|
[SubprocessResponse(returncode=0, data='{"status":1}')],
|
||||||
|
indirect=True,
|
||||||
|
)
|
||||||
|
async def test_cas_notarized_untrusted(subprocess_exec):
|
||||||
|
"""Test cas found notarized but untrusted content."""
|
||||||
|
with pytest.raises(CodeNotaryUntrusted):
|
||||||
|
await cas_validate(
|
||||||
|
"notary@home-assistant.io",
|
||||||
|
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"subprocess_exec", [SubprocessResponse(exception=OSError())], indirect=True
|
||||||
|
)
|
||||||
|
async def test_cas_exec_os_error(subprocess_exec):
|
||||||
|
"""Test os error attempting to execute cas command."""
|
||||||
|
with pytest.raises(CodeNotaryError):
|
||||||
|
await cas_validate(
|
||||||
|
"notary@home-assistant.io",
|
||||||
|
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
||||||
|
)
|
||||||
Reference in New Issue
Block a user