mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-10-25 11:39:33 +00:00
Compare commits
7 Commits
remove-cod
...
remove-dep
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
66a3766b5a | ||
|
|
53a8044aff | ||
|
|
c71553f37d | ||
|
|
c1eb97d8ab | ||
|
|
190b734332 | ||
|
|
7031a58083 | ||
|
|
3c0e62f6ba |
@@ -8,7 +8,7 @@ pytest-asyncio==0.25.2
|
|||||||
pytest-cov==7.0.0
|
pytest-cov==7.0.0
|
||||||
pytest-timeout==2.4.0
|
pytest-timeout==2.4.0
|
||||||
pytest==8.4.2
|
pytest==8.4.2
|
||||||
ruff==0.13.3
|
ruff==0.14.0
|
||||||
time-machine==2.19.0
|
time-machine==2.19.0
|
||||||
types-docker==7.1.0.20250916
|
types-docker==7.1.0.20250916
|
||||||
types-pyyaml==6.0.12.20250915
|
types-pyyaml==6.0.12.20250915
|
||||||
|
|||||||
@@ -226,6 +226,7 @@ class Addon(AddonModel):
|
|||||||
)
|
)
|
||||||
|
|
||||||
await self._check_ingress_port()
|
await self._check_ingress_port()
|
||||||
|
|
||||||
default_image = self._image(self.data)
|
default_image = self._image(self.data)
|
||||||
try:
|
try:
|
||||||
await self.instance.attach(version=self.version)
|
await self.instance.attach(version=self.version)
|
||||||
@@ -774,7 +775,6 @@ class Addon(AddonModel):
|
|||||||
raise AddonsError("Missing from store, cannot install!")
|
raise AddonsError("Missing from store, cannot install!")
|
||||||
|
|
||||||
await self.sys_addons.data.install(self.addon_store)
|
await self.sys_addons.data.install(self.addon_store)
|
||||||
await self.load()
|
|
||||||
|
|
||||||
def setup_data():
|
def setup_data():
|
||||||
if not self.path_data.is_dir():
|
if not self.path_data.is_dir():
|
||||||
@@ -797,6 +797,9 @@ class Addon(AddonModel):
|
|||||||
await self.sys_addons.data.uninstall(self)
|
await self.sys_addons.data.uninstall(self)
|
||||||
raise AddonsError() from err
|
raise AddonsError() from err
|
||||||
|
|
||||||
|
# Finish initialization and set up listeners
|
||||||
|
await self.load()
|
||||||
|
|
||||||
# Add to addon manager
|
# Add to addon manager
|
||||||
self.sys_addons.local[self.slug] = self
|
self.sys_addons.local[self.slug] = self
|
||||||
|
|
||||||
@@ -1510,6 +1513,13 @@ class Addon(AddonModel):
|
|||||||
_LOGGER.info("Finished restore for add-on %s", self.slug)
|
_LOGGER.info("Finished restore for add-on %s", self.slug)
|
||||||
return wait_for_start
|
return wait_for_start
|
||||||
|
|
||||||
|
def check_trust(self) -> Awaitable[None]:
|
||||||
|
"""Calculate Addon docker content trust.
|
||||||
|
|
||||||
|
Return Coroutine.
|
||||||
|
"""
|
||||||
|
return self.instance.check_trust()
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
name="addon_restart_after_problem",
|
name="addon_restart_after_problem",
|
||||||
throttle_period=WATCHDOG_THROTTLE_PERIOD,
|
throttle_period=WATCHDOG_THROTTLE_PERIOD,
|
||||||
|
|||||||
@@ -9,8 +9,6 @@ from typing import Self, Union
|
|||||||
|
|
||||||
from attr import evolve
|
from attr import evolve
|
||||||
|
|
||||||
from supervisor.jobs.const import JobConcurrency
|
|
||||||
|
|
||||||
from ..const import AddonBoot, AddonStartup, AddonState
|
from ..const import AddonBoot, AddonStartup, AddonState
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
from ..exceptions import (
|
from ..exceptions import (
|
||||||
@@ -21,6 +19,8 @@ from ..exceptions import (
|
|||||||
DockerError,
|
DockerError,
|
||||||
HassioError,
|
HassioError,
|
||||||
)
|
)
|
||||||
|
from ..jobs import ChildJobSyncFilter
|
||||||
|
from ..jobs.const import JobConcurrency
|
||||||
from ..jobs.decorator import Job, JobCondition
|
from ..jobs.decorator import Job, JobCondition
|
||||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||||
from ..store.addon import AddonStore
|
from ..store.addon import AddonStore
|
||||||
@@ -182,6 +182,9 @@ class AddonManager(CoreSysAttributes):
|
|||||||
conditions=ADDON_UPDATE_CONDITIONS,
|
conditions=ADDON_UPDATE_CONDITIONS,
|
||||||
on_condition=AddonsJobError,
|
on_condition=AddonsJobError,
|
||||||
concurrency=JobConcurrency.QUEUE,
|
concurrency=JobConcurrency.QUEUE,
|
||||||
|
child_job_syncs=[
|
||||||
|
ChildJobSyncFilter("docker_interface_install", progress_allocation=1.0)
|
||||||
|
],
|
||||||
)
|
)
|
||||||
async def install(
|
async def install(
|
||||||
self, slug: str, *, validation_complete: asyncio.Event | None = None
|
self, slug: str, *, validation_complete: asyncio.Event | None = None
|
||||||
@@ -229,6 +232,13 @@ class AddonManager(CoreSysAttributes):
|
|||||||
name="addon_manager_update",
|
name="addon_manager_update",
|
||||||
conditions=ADDON_UPDATE_CONDITIONS,
|
conditions=ADDON_UPDATE_CONDITIONS,
|
||||||
on_condition=AddonsJobError,
|
on_condition=AddonsJobError,
|
||||||
|
# We assume for now the docker image pull is 100% of this task for progress
|
||||||
|
# allocation. But from a user perspective that isn't true. Other steps
|
||||||
|
# that take time which is not accounted for in progress include:
|
||||||
|
# partial backup, image cleanup, apparmor update, and addon restart
|
||||||
|
child_job_syncs=[
|
||||||
|
ChildJobSyncFilter("docker_interface_install", progress_allocation=1.0)
|
||||||
|
],
|
||||||
)
|
)
|
||||||
async def update(
|
async def update(
|
||||||
self,
|
self,
|
||||||
@@ -271,7 +281,10 @@ class AddonManager(CoreSysAttributes):
|
|||||||
addons=[addon.slug],
|
addons=[addon.slug],
|
||||||
)
|
)
|
||||||
|
|
||||||
return await addon.update()
|
task = await addon.update()
|
||||||
|
|
||||||
|
_LOGGER.info("Add-on '%s' successfully updated", slug)
|
||||||
|
return task
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
name="addon_manager_rebuild",
|
name="addon_manager_rebuild",
|
||||||
|
|||||||
@@ -72,6 +72,7 @@ from ..const import (
|
|||||||
ATTR_TYPE,
|
ATTR_TYPE,
|
||||||
ATTR_UART,
|
ATTR_UART,
|
||||||
ATTR_UDEV,
|
ATTR_UDEV,
|
||||||
|
ATTR_ULIMITS,
|
||||||
ATTR_URL,
|
ATTR_URL,
|
||||||
ATTR_USB,
|
ATTR_USB,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
@@ -462,6 +463,11 @@ class AddonModel(JobGroup, ABC):
|
|||||||
"""Return True if the add-on have his own udev."""
|
"""Return True if the add-on have his own udev."""
|
||||||
return self.data[ATTR_UDEV]
|
return self.data[ATTR_UDEV]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ulimits(self) -> dict[str, Any]:
|
||||||
|
"""Return ulimits configuration."""
|
||||||
|
return self.data[ATTR_ULIMITS]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_kernel_modules(self) -> bool:
|
def with_kernel_modules(self) -> bool:
|
||||||
"""Return True if the add-on access to kernel modules."""
|
"""Return True if the add-on access to kernel modules."""
|
||||||
|
|||||||
@@ -88,6 +88,7 @@ from ..const import (
|
|||||||
ATTR_TYPE,
|
ATTR_TYPE,
|
||||||
ATTR_UART,
|
ATTR_UART,
|
||||||
ATTR_UDEV,
|
ATTR_UDEV,
|
||||||
|
ATTR_ULIMITS,
|
||||||
ATTR_URL,
|
ATTR_URL,
|
||||||
ATTR_USB,
|
ATTR_USB,
|
||||||
ATTR_USER,
|
ATTR_USER,
|
||||||
@@ -206,12 +207,6 @@ def _warn_addon_config(config: dict[str, Any]):
|
|||||||
name,
|
name,
|
||||||
)
|
)
|
||||||
|
|
||||||
if ATTR_CODENOTARY in config:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Add-on '%s' uses deprecated 'codenotary' field in config. This field is no longer used and will be ignored. Please report this to the maintainer.",
|
|
||||||
name,
|
|
||||||
)
|
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
@@ -429,6 +424,20 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
|||||||
False,
|
False,
|
||||||
),
|
),
|
||||||
vol.Optional(ATTR_IMAGE): docker_image,
|
vol.Optional(ATTR_IMAGE): docker_image,
|
||||||
|
vol.Optional(ATTR_ULIMITS, default=dict): vol.Any(
|
||||||
|
{str: vol.Coerce(int)}, # Simple format: {name: limit}
|
||||||
|
{
|
||||||
|
str: vol.Any(
|
||||||
|
vol.Coerce(int), # Simple format for individual entries
|
||||||
|
vol.Schema(
|
||||||
|
{ # Detailed format for individual entries
|
||||||
|
vol.Required("soft"): vol.Coerce(int),
|
||||||
|
vol.Required("hard"): vol.Coerce(int),
|
||||||
|
}
|
||||||
|
),
|
||||||
|
)
|
||||||
|
},
|
||||||
|
),
|
||||||
vol.Optional(ATTR_TIMEOUT, default=10): vol.All(
|
vol.Optional(ATTR_TIMEOUT, default=10): vol.All(
|
||||||
vol.Coerce(int), vol.Range(min=10, max=300)
|
vol.Coerce(int), vol.Range(min=10, max=300)
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -1,14 +1,19 @@
|
|||||||
"""Init file for Supervisor Security RESTful API."""
|
"""Init file for Supervisor Security RESTful API."""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
|
import attr
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..const import ATTR_CONTENT_TRUST, ATTR_FORCE_SECURITY, ATTR_PWNED
|
from ..const import ATTR_CONTENT_TRUST, ATTR_FORCE_SECURITY, ATTR_PWNED
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_OPTIONS = vol.Schema(
|
SCHEMA_OPTIONS = vol.Schema(
|
||||||
{
|
{
|
||||||
@@ -49,9 +54,6 @@ class APISecurity(CoreSysAttributes):
|
|||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def integrity_check(self, request: web.Request) -> dict[str, Any]:
|
async def integrity_check(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Run backend integrity check.
|
"""Run backend integrity check."""
|
||||||
|
result = await asyncio.shield(self.sys_security.integrity_check())
|
||||||
CodeNotary integrity checking has been removed. This endpoint now returns
|
return attr.asdict(result)
|
||||||
an error indicating the feature is currently non-functional.
|
|
||||||
"""
|
|
||||||
return {"error": "No integrity checking available"}
|
|
||||||
|
|||||||
@@ -108,8 +108,7 @@ class APISupervisor(CoreSysAttributes):
|
|||||||
ATTR_AUTO_UPDATE: self.sys_updater.auto_update,
|
ATTR_AUTO_UPDATE: self.sys_updater.auto_update,
|
||||||
ATTR_DETECT_BLOCKING_IO: BlockBusterManager.is_enabled(),
|
ATTR_DETECT_BLOCKING_IO: BlockBusterManager.is_enabled(),
|
||||||
ATTR_COUNTRY: self.sys_config.country,
|
ATTR_COUNTRY: self.sys_config.country,
|
||||||
# Depricated
|
# Deprecated
|
||||||
ATTR_WAIT_BOOT: self.sys_config.wait_boot,
|
|
||||||
ATTR_ADDONS: [
|
ATTR_ADDONS: [
|
||||||
{
|
{
|
||||||
ATTR_NAME: addon.name,
|
ATTR_NAME: addon.name,
|
||||||
@@ -123,10 +122,6 @@ class APISupervisor(CoreSysAttributes):
|
|||||||
}
|
}
|
||||||
for addon in self.sys_addons.local.values()
|
for addon in self.sys_addons.local.values()
|
||||||
],
|
],
|
||||||
ATTR_ADDONS_REPOSITORIES: [
|
|
||||||
{ATTR_NAME: store.name, ATTR_SLUG: store.slug}
|
|
||||||
for store in self.sys_store.all
|
|
||||||
],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -182,20 +177,10 @@ class APISupervisor(CoreSysAttributes):
|
|||||||
self.sys_config.detect_blocking_io = False
|
self.sys_config.detect_blocking_io = False
|
||||||
BlockBusterManager.deactivate()
|
BlockBusterManager.deactivate()
|
||||||
|
|
||||||
# Deprecated
|
|
||||||
if ATTR_WAIT_BOOT in body:
|
|
||||||
self.sys_config.wait_boot = body[ATTR_WAIT_BOOT]
|
|
||||||
|
|
||||||
# Save changes before processing addons in case of errors
|
# Save changes before processing addons in case of errors
|
||||||
await self.sys_updater.save_data()
|
await self.sys_updater.save_data()
|
||||||
await self.sys_config.save_data()
|
await self.sys_config.save_data()
|
||||||
|
|
||||||
# Remove: 2022.9
|
|
||||||
if ATTR_ADDONS_REPOSITORIES in body:
|
|
||||||
await asyncio.shield(
|
|
||||||
self.sys_store.update_repositories(set(body[ATTR_ADDONS_REPOSITORIES]))
|
|
||||||
)
|
|
||||||
|
|
||||||
await self.sys_resolution.evaluate.evaluate_system()
|
await self.sys_resolution.evaluate.evaluate_system()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
|
|||||||
@@ -348,6 +348,7 @@ ATTR_TRANSLATIONS = "translations"
|
|||||||
ATTR_TYPE = "type"
|
ATTR_TYPE = "type"
|
||||||
ATTR_UART = "uart"
|
ATTR_UART = "uart"
|
||||||
ATTR_UDEV = "udev"
|
ATTR_UDEV = "udev"
|
||||||
|
ATTR_ULIMITS = "ulimits"
|
||||||
ATTR_UNHEALTHY = "unhealthy"
|
ATTR_UNHEALTHY = "unhealthy"
|
||||||
ATTR_UNSAVED = "unsaved"
|
ATTR_UNSAVED = "unsaved"
|
||||||
ATTR_UNSUPPORTED = "unsupported"
|
ATTR_UNSUPPORTED = "unsupported"
|
||||||
|
|||||||
@@ -318,7 +318,18 @@ class DockerAddon(DockerInterface):
|
|||||||
mem = 128 * 1024 * 1024
|
mem = 128 * 1024 * 1024
|
||||||
limits.append(docker.types.Ulimit(name="memlock", soft=mem, hard=mem))
|
limits.append(docker.types.Ulimit(name="memlock", soft=mem, hard=mem))
|
||||||
|
|
||||||
# Return None if no capabilities is present
|
# Add configurable ulimits from add-on config
|
||||||
|
for name, config in self.addon.ulimits.items():
|
||||||
|
if isinstance(config, int):
|
||||||
|
# Simple format: both soft and hard limits are the same
|
||||||
|
limits.append(docker.types.Ulimit(name=name, soft=config, hard=config))
|
||||||
|
elif isinstance(config, dict):
|
||||||
|
# Detailed format: both soft and hard limits are mandatory
|
||||||
|
soft = config["soft"]
|
||||||
|
hard = config["hard"]
|
||||||
|
limits.append(docker.types.Ulimit(name=name, soft=soft, hard=hard))
|
||||||
|
|
||||||
|
# Return None if no ulimits are present
|
||||||
if limits:
|
if limits:
|
||||||
return limits
|
return limits
|
||||||
return None
|
return None
|
||||||
@@ -835,6 +846,16 @@ class DockerAddon(DockerInterface):
|
|||||||
):
|
):
|
||||||
self.sys_resolution.dismiss_issue(self.addon.device_access_missing_issue)
|
self.sys_resolution.dismiss_issue(self.addon.device_access_missing_issue)
|
||||||
|
|
||||||
|
async def _validate_trust(self, image_id: str) -> None:
|
||||||
|
"""Validate trust of content."""
|
||||||
|
if not self.addon.signed:
|
||||||
|
return
|
||||||
|
|
||||||
|
checksum = image_id.partition(":")[2]
|
||||||
|
return await self.sys_security.verify_content(
|
||||||
|
cast(str, self.addon.codenotary), checksum
|
||||||
|
)
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
name="docker_addon_hardware_events",
|
name="docker_addon_hardware_events",
|
||||||
conditions=[JobCondition.OS_AGENT],
|
conditions=[JobCondition.OS_AGENT],
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from ipaddress import IPv4Address
|
|||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from awesomeversion import AwesomeVersion
|
from awesomeversion import AwesomeVersion, AwesomeVersionCompareException
|
||||||
from docker.types import Mount
|
from docker.types import Mount
|
||||||
|
|
||||||
from ..const import LABEL_MACHINE
|
from ..const import LABEL_MACHINE
|
||||||
@@ -244,3 +244,13 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
self.image,
|
self.image,
|
||||||
self.sys_homeassistant.version,
|
self.sys_homeassistant.version,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def _validate_trust(self, image_id: str) -> None:
|
||||||
|
"""Validate trust of content."""
|
||||||
|
try:
|
||||||
|
if self.version in {None, LANDINGPAGE} or self.version < _VERIFY_TRUST:
|
||||||
|
return
|
||||||
|
except AwesomeVersionCompareException:
|
||||||
|
return
|
||||||
|
|
||||||
|
await super()._validate_trust(image_id)
|
||||||
|
|||||||
@@ -31,12 +31,15 @@ from ..const import (
|
|||||||
)
|
)
|
||||||
from ..coresys import CoreSys
|
from ..coresys import CoreSys
|
||||||
from ..exceptions import (
|
from ..exceptions import (
|
||||||
|
CodeNotaryError,
|
||||||
|
CodeNotaryUntrusted,
|
||||||
DockerAPIError,
|
DockerAPIError,
|
||||||
DockerError,
|
DockerError,
|
||||||
DockerJobError,
|
DockerJobError,
|
||||||
DockerLogOutOfOrder,
|
DockerLogOutOfOrder,
|
||||||
DockerNotFound,
|
DockerNotFound,
|
||||||
DockerRequestError,
|
DockerRequestError,
|
||||||
|
DockerTrustError,
|
||||||
)
|
)
|
||||||
from ..jobs import SupervisorJob
|
from ..jobs import SupervisorJob
|
||||||
from ..jobs.const import JOB_GROUP_DOCKER_INTERFACE, JobConcurrency
|
from ..jobs.const import JOB_GROUP_DOCKER_INTERFACE, JobConcurrency
|
||||||
@@ -217,10 +220,12 @@ class DockerInterface(JobGroup, ABC):
|
|||||||
|
|
||||||
await self.sys_run_in_executor(self.sys_docker.docker.login, **credentials)
|
await self.sys_run_in_executor(self.sys_docker.docker.login, **credentials)
|
||||||
|
|
||||||
def _process_pull_image_log(self, job_id: str, reference: PullLogEntry) -> None:
|
def _process_pull_image_log(
|
||||||
|
self, install_job_id: str, reference: PullLogEntry
|
||||||
|
) -> None:
|
||||||
"""Process events fired from a docker while pulling an image, filtered to a given job id."""
|
"""Process events fired from a docker while pulling an image, filtered to a given job id."""
|
||||||
if (
|
if (
|
||||||
reference.job_id != job_id
|
reference.job_id != install_job_id
|
||||||
or not reference.id
|
or not reference.id
|
||||||
or not reference.status
|
or not reference.status
|
||||||
or not (stage := PullImageLayerStage.from_status(reference.status))
|
or not (stage := PullImageLayerStage.from_status(reference.status))
|
||||||
@@ -234,21 +239,22 @@ class DockerInterface(JobGroup, ABC):
|
|||||||
name="Pulling container image layer",
|
name="Pulling container image layer",
|
||||||
initial_stage=stage.status,
|
initial_stage=stage.status,
|
||||||
reference=reference.id,
|
reference=reference.id,
|
||||||
parent_id=job_id,
|
parent_id=install_job_id,
|
||||||
|
internal=True,
|
||||||
)
|
)
|
||||||
job.done = False
|
job.done = False
|
||||||
return
|
return
|
||||||
|
|
||||||
# Find our sub job to update details of
|
# Find our sub job to update details of
|
||||||
for j in self.sys_jobs.jobs:
|
for j in self.sys_jobs.jobs:
|
||||||
if j.parent_id == job_id and j.reference == reference.id:
|
if j.parent_id == install_job_id and j.reference == reference.id:
|
||||||
job = j
|
job = j
|
||||||
break
|
break
|
||||||
|
|
||||||
# This likely only occurs if the logs came in out of sync and we got progress before the Pulling FS Layer one
|
# This likely only occurs if the logs came in out of sync and we got progress before the Pulling FS Layer one
|
||||||
if not job:
|
if not job:
|
||||||
raise DockerLogOutOfOrder(
|
raise DockerLogOutOfOrder(
|
||||||
f"Received pull image log with status {reference.status} for image id {reference.id} and parent job {job_id} but could not find a matching job, skipping",
|
f"Received pull image log with status {reference.status} for image id {reference.id} and parent job {install_job_id} but could not find a matching job, skipping",
|
||||||
_LOGGER.debug,
|
_LOGGER.debug,
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -322,10 +328,56 @@ class DockerInterface(JobGroup, ABC):
|
|||||||
else job.extra,
|
else job.extra,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Once we have received a progress update for every child job, start to set status of the main one
|
||||||
|
install_job = self.sys_jobs.get_job(install_job_id)
|
||||||
|
layer_jobs = [
|
||||||
|
job
|
||||||
|
for job in self.sys_jobs.jobs
|
||||||
|
if job.parent_id == install_job.uuid
|
||||||
|
and job.name == "Pulling container image layer"
|
||||||
|
]
|
||||||
|
|
||||||
|
# First set the total bytes to be downloaded/extracted on the main job
|
||||||
|
if not install_job.extra:
|
||||||
|
total = 0
|
||||||
|
for job in layer_jobs:
|
||||||
|
if not job.extra:
|
||||||
|
return
|
||||||
|
total += job.extra["total"]
|
||||||
|
install_job.extra = {"total": total}
|
||||||
|
else:
|
||||||
|
total = install_job.extra["total"]
|
||||||
|
|
||||||
|
# Then determine total progress based on progress of each sub-job, factoring in size of each compared to total
|
||||||
|
progress = 0.0
|
||||||
|
stage = PullImageLayerStage.PULL_COMPLETE
|
||||||
|
for job in layer_jobs:
|
||||||
|
if not job.extra:
|
||||||
|
return
|
||||||
|
progress += job.progress * (job.extra["total"] / total)
|
||||||
|
job_stage = PullImageLayerStage.from_status(cast(str, job.stage))
|
||||||
|
|
||||||
|
if job_stage < PullImageLayerStage.EXTRACTING:
|
||||||
|
stage = PullImageLayerStage.DOWNLOADING
|
||||||
|
elif (
|
||||||
|
stage == PullImageLayerStage.PULL_COMPLETE
|
||||||
|
and job_stage < PullImageLayerStage.PULL_COMPLETE
|
||||||
|
):
|
||||||
|
stage = PullImageLayerStage.EXTRACTING
|
||||||
|
|
||||||
|
# Ensure progress is 100 at this point to prevent float drift
|
||||||
|
if stage == PullImageLayerStage.PULL_COMPLETE:
|
||||||
|
progress = 100
|
||||||
|
|
||||||
|
# To reduce noise, limit updates to when result has changed by an entire percent or when stage changed
|
||||||
|
if stage != install_job.stage or progress >= install_job.progress + 1:
|
||||||
|
install_job.update(stage=stage.status, progress=progress)
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
name="docker_interface_install",
|
name="docker_interface_install",
|
||||||
on_condition=DockerJobError,
|
on_condition=DockerJobError,
|
||||||
concurrency=JobConcurrency.GROUP_REJECT,
|
concurrency=JobConcurrency.GROUP_REJECT,
|
||||||
|
internal=True,
|
||||||
)
|
)
|
||||||
async def install(
|
async def install(
|
||||||
self,
|
self,
|
||||||
@@ -348,11 +400,11 @@ class DockerInterface(JobGroup, ABC):
|
|||||||
# Try login if we have defined credentials
|
# Try login if we have defined credentials
|
||||||
await self._docker_login(image)
|
await self._docker_login(image)
|
||||||
|
|
||||||
job_id = self.sys_jobs.current.uuid
|
curr_job_id = self.sys_jobs.current.uuid
|
||||||
|
|
||||||
async def process_pull_image_log(reference: PullLogEntry) -> None:
|
async def process_pull_image_log(reference: PullLogEntry) -> None:
|
||||||
try:
|
try:
|
||||||
self._process_pull_image_log(job_id, reference)
|
self._process_pull_image_log(curr_job_id, reference)
|
||||||
except DockerLogOutOfOrder as err:
|
except DockerLogOutOfOrder as err:
|
||||||
# Send all these to sentry. Missing a few progress updates
|
# Send all these to sentry. Missing a few progress updates
|
||||||
# shouldn't matter to users but matters to us
|
# shouldn't matter to users but matters to us
|
||||||
@@ -371,7 +423,17 @@ class DockerInterface(JobGroup, ABC):
|
|||||||
platform=MAP_ARCH[image_arch],
|
platform=MAP_ARCH[image_arch],
|
||||||
)
|
)
|
||||||
|
|
||||||
# CodeNotary content trust validation has been removed
|
# Validate content
|
||||||
|
try:
|
||||||
|
await self._validate_trust(cast(str, docker_image.id))
|
||||||
|
except CodeNotaryError:
|
||||||
|
with suppress(docker.errors.DockerException):
|
||||||
|
await self.sys_run_in_executor(
|
||||||
|
self.sys_docker.images.remove,
|
||||||
|
image=f"{image}:{version!s}",
|
||||||
|
force=True,
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
# Tag latest
|
# Tag latest
|
||||||
if latest:
|
if latest:
|
||||||
@@ -398,6 +460,16 @@ class DockerInterface(JobGroup, ABC):
|
|||||||
raise DockerError(
|
raise DockerError(
|
||||||
f"Unknown error with {image}:{version!s} -> {err!s}", _LOGGER.error
|
f"Unknown error with {image}:{version!s} -> {err!s}", _LOGGER.error
|
||||||
) from err
|
) from err
|
||||||
|
except CodeNotaryUntrusted as err:
|
||||||
|
raise DockerTrustError(
|
||||||
|
f"Pulled image {image}:{version!s} failed on content-trust verification!",
|
||||||
|
_LOGGER.critical,
|
||||||
|
) from err
|
||||||
|
except CodeNotaryError as err:
|
||||||
|
raise DockerTrustError(
|
||||||
|
f"Error happened on Content-Trust check for {image}:{version!s}: {err!s}",
|
||||||
|
_LOGGER.error,
|
||||||
|
) from err
|
||||||
finally:
|
finally:
|
||||||
if listener:
|
if listener:
|
||||||
self.sys_bus.remove_listener(listener)
|
self.sys_bus.remove_listener(listener)
|
||||||
@@ -606,7 +678,10 @@ class DockerInterface(JobGroup, ABC):
|
|||||||
concurrency=JobConcurrency.GROUP_REJECT,
|
concurrency=JobConcurrency.GROUP_REJECT,
|
||||||
)
|
)
|
||||||
async def update(
|
async def update(
|
||||||
self, version: AwesomeVersion, image: str | None = None, latest: bool = False
|
self,
|
||||||
|
version: AwesomeVersion,
|
||||||
|
image: str | None = None,
|
||||||
|
latest: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Update a Docker image."""
|
"""Update a Docker image."""
|
||||||
image = image or self.image
|
image = image or self.image
|
||||||
@@ -732,3 +807,24 @@ class DockerInterface(JobGroup, ABC):
|
|||||||
return self.sys_run_in_executor(
|
return self.sys_run_in_executor(
|
||||||
self.sys_docker.container_run_inside, self.name, command
|
self.sys_docker.container_run_inside, self.name, command
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def _validate_trust(self, image_id: str) -> None:
|
||||||
|
"""Validate trust of content."""
|
||||||
|
checksum = image_id.partition(":")[2]
|
||||||
|
return await self.sys_security.verify_own_content(checksum)
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="docker_interface_check_trust",
|
||||||
|
on_condition=DockerJobError,
|
||||||
|
concurrency=JobConcurrency.GROUP_REJECT,
|
||||||
|
)
|
||||||
|
async def check_trust(self) -> None:
|
||||||
|
"""Check trust of exists Docker image."""
|
||||||
|
try:
|
||||||
|
image = await self.sys_run_in_executor(
|
||||||
|
self.sys_docker.images.get, f"{self.image}:{self.version!s}"
|
||||||
|
)
|
||||||
|
except (docker.errors.DockerException, requests.RequestException):
|
||||||
|
return
|
||||||
|
|
||||||
|
await self._validate_trust(cast(str, image.id))
|
||||||
|
|||||||
@@ -577,6 +577,21 @@ class PwnedConnectivityError(PwnedError):
|
|||||||
"""Connectivity errors while checking pwned passwords."""
|
"""Connectivity errors while checking pwned passwords."""
|
||||||
|
|
||||||
|
|
||||||
|
# util/codenotary
|
||||||
|
|
||||||
|
|
||||||
|
class CodeNotaryError(HassioError):
|
||||||
|
"""Error general with CodeNotary."""
|
||||||
|
|
||||||
|
|
||||||
|
class CodeNotaryUntrusted(CodeNotaryError):
|
||||||
|
"""Error on untrusted content."""
|
||||||
|
|
||||||
|
|
||||||
|
class CodeNotaryBackendError(CodeNotaryError):
|
||||||
|
"""CodeNotary backend error happening."""
|
||||||
|
|
||||||
|
|
||||||
# util/whoami
|
# util/whoami
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ from ..exceptions import (
|
|||||||
HomeAssistantUpdateError,
|
HomeAssistantUpdateError,
|
||||||
JobException,
|
JobException,
|
||||||
)
|
)
|
||||||
|
from ..jobs import ChildJobSyncFilter
|
||||||
from ..jobs.const import JOB_GROUP_HOME_ASSISTANT_CORE, JobConcurrency, JobThrottle
|
from ..jobs.const import JOB_GROUP_HOME_ASSISTANT_CORE, JobConcurrency, JobThrottle
|
||||||
from ..jobs.decorator import Job, JobCondition
|
from ..jobs.decorator import Job, JobCondition
|
||||||
from ..jobs.job_group import JobGroup
|
from ..jobs.job_group import JobGroup
|
||||||
@@ -224,6 +225,13 @@ class HomeAssistantCore(JobGroup):
|
|||||||
],
|
],
|
||||||
on_condition=HomeAssistantJobError,
|
on_condition=HomeAssistantJobError,
|
||||||
concurrency=JobConcurrency.GROUP_REJECT,
|
concurrency=JobConcurrency.GROUP_REJECT,
|
||||||
|
# We assume for now the docker image pull is 100% of this task. But from
|
||||||
|
# a user perspective that isn't true. Other steps that take time which
|
||||||
|
# is not accounted for in progress include: partial backup, image
|
||||||
|
# cleanup, and Home Assistant restart
|
||||||
|
child_job_syncs=[
|
||||||
|
ChildJobSyncFilter("docker_interface_install", progress_allocation=1.0)
|
||||||
|
],
|
||||||
)
|
)
|
||||||
async def update(
|
async def update(
|
||||||
self,
|
self,
|
||||||
@@ -420,6 +428,13 @@ class HomeAssistantCore(JobGroup):
|
|||||||
"""
|
"""
|
||||||
return self.instance.logs()
|
return self.instance.logs()
|
||||||
|
|
||||||
|
def check_trust(self) -> Awaitable[None]:
|
||||||
|
"""Calculate HomeAssistant docker content trust.
|
||||||
|
|
||||||
|
Return Coroutine.
|
||||||
|
"""
|
||||||
|
return self.instance.check_trust()
|
||||||
|
|
||||||
async def stats(self) -> DockerStats:
|
async def stats(self) -> DockerStats:
|
||||||
"""Return stats of Home Assistant."""
|
"""Return stats of Home Assistant."""
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -282,8 +282,10 @@ class JobManager(FileConfiguration, CoreSysAttributes):
|
|||||||
# reporting shouldn't raise and break the active job
|
# reporting shouldn't raise and break the active job
|
||||||
continue
|
continue
|
||||||
|
|
||||||
progress = sync.starting_progress + (
|
progress = min(
|
||||||
sync.progress_allocation * job_data["progress"]
|
100,
|
||||||
|
sync.starting_progress
|
||||||
|
+ (sync.progress_allocation * job_data["progress"]),
|
||||||
)
|
)
|
||||||
# Using max would always trigger on change even if progress was unchanged
|
# Using max would always trigger on change even if progress was unchanged
|
||||||
# pylint: disable-next=R1731
|
# pylint: disable-next=R1731
|
||||||
|
|||||||
@@ -76,6 +76,13 @@ class PluginBase(ABC, FileConfiguration, CoreSysAttributes):
|
|||||||
"""Return True if a task is in progress."""
|
"""Return True if a task is in progress."""
|
||||||
return self.instance.in_progress
|
return self.instance.in_progress
|
||||||
|
|
||||||
|
def check_trust(self) -> Awaitable[None]:
|
||||||
|
"""Calculate plugin docker content trust.
|
||||||
|
|
||||||
|
Return Coroutine.
|
||||||
|
"""
|
||||||
|
return self.instance.check_trust()
|
||||||
|
|
||||||
def logs(self) -> Awaitable[bytes]:
|
def logs(self) -> Awaitable[bytes]:
|
||||||
"""Get docker plugin logs.
|
"""Get docker plugin logs.
|
||||||
|
|
||||||
|
|||||||
59
supervisor/resolution/checks/supervisor_trust.py
Normal file
59
supervisor/resolution/checks/supervisor_trust.py
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
"""Helpers to check supervisor trust."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from ...const import CoreState
|
||||||
|
from ...coresys import CoreSys
|
||||||
|
from ...exceptions import CodeNotaryError, CodeNotaryUntrusted
|
||||||
|
from ..const import ContextType, IssueType, UnhealthyReason
|
||||||
|
from .base import CheckBase
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def setup(coresys: CoreSys) -> CheckBase:
|
||||||
|
"""Check setup function."""
|
||||||
|
return CheckSupervisorTrust(coresys)
|
||||||
|
|
||||||
|
|
||||||
|
class CheckSupervisorTrust(CheckBase):
|
||||||
|
"""CheckSystemTrust class for check."""
|
||||||
|
|
||||||
|
async def run_check(self) -> None:
|
||||||
|
"""Run check if not affected by issue."""
|
||||||
|
if not self.sys_security.content_trust:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Skipping %s, content_trust is globally disabled", self.slug
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.sys_supervisor.check_trust()
|
||||||
|
except CodeNotaryUntrusted:
|
||||||
|
self.sys_resolution.add_unhealthy_reason(UnhealthyReason.UNTRUSTED)
|
||||||
|
self.sys_resolution.create_issue(IssueType.TRUST, ContextType.SUPERVISOR)
|
||||||
|
except CodeNotaryError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def approve_check(self, reference: str | None = None) -> bool:
|
||||||
|
"""Approve check if it is affected by issue."""
|
||||||
|
try:
|
||||||
|
await self.sys_supervisor.check_trust()
|
||||||
|
except CodeNotaryError:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def issue(self) -> IssueType:
|
||||||
|
"""Return a IssueType enum."""
|
||||||
|
return IssueType.TRUST
|
||||||
|
|
||||||
|
@property
|
||||||
|
def context(self) -> ContextType:
|
||||||
|
"""Return a ContextType enum."""
|
||||||
|
return ContextType.SUPERVISOR
|
||||||
|
|
||||||
|
@property
|
||||||
|
def states(self) -> list[CoreState]:
|
||||||
|
"""Return a list of valid states when this check can run."""
|
||||||
|
return [CoreState.RUNNING, CoreState.STARTUP]
|
||||||
@@ -1,11 +1,14 @@
|
|||||||
"""Evaluation class for Content Trust."""
|
"""Evaluation class for Content Trust."""
|
||||||
|
|
||||||
|
import errno
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from ...const import CoreState
|
from ...const import CoreState
|
||||||
from ...coresys import CoreSys
|
from ...coresys import CoreSys
|
||||||
from ..const import UnsupportedReason
|
from ...exceptions import CodeNotaryError, CodeNotaryUntrusted
|
||||||
|
from ...utils.codenotary import calc_checksum_path_sourcecode
|
||||||
|
from ..const import ContextType, IssueType, UnhealthyReason, UnsupportedReason
|
||||||
from .base import EvaluateBase
|
from .base import EvaluateBase
|
||||||
|
|
||||||
_SUPERVISOR_SOURCE = Path("/usr/src/supervisor/supervisor")
|
_SUPERVISOR_SOURCE = Path("/usr/src/supervisor/supervisor")
|
||||||
@@ -41,4 +44,29 @@ class EvaluateSourceMods(EvaluateBase):
|
|||||||
_LOGGER.warning("Disabled content-trust, skipping evaluation")
|
_LOGGER.warning("Disabled content-trust, skipping evaluation")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# Calculate sume of the sourcecode
|
||||||
|
try:
|
||||||
|
checksum = await self.sys_run_in_executor(
|
||||||
|
calc_checksum_path_sourcecode, _SUPERVISOR_SOURCE
|
||||||
|
)
|
||||||
|
except OSError as err:
|
||||||
|
if err.errno == errno.EBADMSG:
|
||||||
|
self.sys_resolution.add_unhealthy_reason(
|
||||||
|
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||||
|
)
|
||||||
|
|
||||||
|
self.sys_resolution.create_issue(
|
||||||
|
IssueType.CORRUPT_FILESYSTEM, ContextType.SYSTEM
|
||||||
|
)
|
||||||
|
_LOGGER.error("Can't calculate checksum of source code: %s", err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Validate checksum
|
||||||
|
try:
|
||||||
|
await self.sys_security.verify_own_content(checksum)
|
||||||
|
except CodeNotaryUntrusted:
|
||||||
|
return True
|
||||||
|
except CodeNotaryError:
|
||||||
|
pass
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|||||||
67
supervisor/resolution/fixups/system_execute_integrity.py
Normal file
67
supervisor/resolution/fixups/system_execute_integrity.py
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
"""Helpers to check and fix issues with free space."""
|
||||||
|
|
||||||
|
from datetime import timedelta
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from ...coresys import CoreSys
|
||||||
|
from ...exceptions import ResolutionFixupError, ResolutionFixupJobError
|
||||||
|
from ...jobs.const import JobCondition, JobThrottle
|
||||||
|
from ...jobs.decorator import Job
|
||||||
|
from ...security.const import ContentTrustResult
|
||||||
|
from ..const import ContextType, IssueType, SuggestionType
|
||||||
|
from .base import FixupBase
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def setup(coresys: CoreSys) -> FixupBase:
|
||||||
|
"""Check setup function."""
|
||||||
|
return FixupSystemExecuteIntegrity(coresys)
|
||||||
|
|
||||||
|
|
||||||
|
class FixupSystemExecuteIntegrity(FixupBase):
|
||||||
|
"""Storage class for fixup."""
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="fixup_system_execute_integrity_process",
|
||||||
|
conditions=[JobCondition.INTERNET_SYSTEM],
|
||||||
|
on_condition=ResolutionFixupJobError,
|
||||||
|
throttle_period=timedelta(hours=8),
|
||||||
|
throttle=JobThrottle.THROTTLE,
|
||||||
|
)
|
||||||
|
async def process_fixup(self, reference: str | None = None) -> None:
|
||||||
|
"""Initialize the fixup class."""
|
||||||
|
result = await self.sys_security.integrity_check()
|
||||||
|
|
||||||
|
if ContentTrustResult.FAILED in (result.core, result.supervisor):
|
||||||
|
raise ResolutionFixupError()
|
||||||
|
|
||||||
|
for plugin in result.plugins:
|
||||||
|
if plugin != ContentTrustResult.FAILED:
|
||||||
|
continue
|
||||||
|
raise ResolutionFixupError()
|
||||||
|
|
||||||
|
for addon in result.addons:
|
||||||
|
if addon != ContentTrustResult.FAILED:
|
||||||
|
continue
|
||||||
|
raise ResolutionFixupError()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def suggestion(self) -> SuggestionType:
|
||||||
|
"""Return a SuggestionType enum."""
|
||||||
|
return SuggestionType.EXECUTE_INTEGRITY
|
||||||
|
|
||||||
|
@property
|
||||||
|
def context(self) -> ContextType:
|
||||||
|
"""Return a ContextType enum."""
|
||||||
|
return ContextType.SYSTEM
|
||||||
|
|
||||||
|
@property
|
||||||
|
def issues(self) -> list[IssueType]:
|
||||||
|
"""Return a IssueType enum list."""
|
||||||
|
return [IssueType.TRUST]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def auto(self) -> bool:
|
||||||
|
"""Return if a fixup can be apply as auto fix."""
|
||||||
|
return True
|
||||||
@@ -11,10 +11,20 @@ from ..const import (
|
|||||||
FILE_HASSIO_SECURITY,
|
FILE_HASSIO_SECURITY,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
from ..exceptions import PwnedError
|
from ..exceptions import (
|
||||||
|
CodeNotaryError,
|
||||||
|
CodeNotaryUntrusted,
|
||||||
|
PwnedError,
|
||||||
|
SecurityJobError,
|
||||||
|
)
|
||||||
|
from ..jobs.const import JobConcurrency
|
||||||
|
from ..jobs.decorator import Job, JobCondition
|
||||||
|
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||||
|
from ..utils.codenotary import cas_validate
|
||||||
from ..utils.common import FileConfiguration
|
from ..utils.common import FileConfiguration
|
||||||
from ..utils.pwned import check_pwned_password
|
from ..utils.pwned import check_pwned_password
|
||||||
from ..validate import SCHEMA_SECURITY_CONFIG
|
from ..validate import SCHEMA_SECURITY_CONFIG
|
||||||
|
from .const import ContentTrustResult, IntegrityResult
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -57,6 +67,30 @@ class Security(FileConfiguration, CoreSysAttributes):
|
|||||||
"""Set pwned is enabled/disabled."""
|
"""Set pwned is enabled/disabled."""
|
||||||
self._data[ATTR_PWNED] = value
|
self._data[ATTR_PWNED] = value
|
||||||
|
|
||||||
|
async def verify_content(self, signer: str, checksum: str) -> None:
|
||||||
|
"""Verify content on CAS."""
|
||||||
|
if not self.content_trust:
|
||||||
|
_LOGGER.warning("Disabled content-trust, skip validation")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
await cas_validate(signer, checksum)
|
||||||
|
except CodeNotaryUntrusted:
|
||||||
|
raise
|
||||||
|
except CodeNotaryError:
|
||||||
|
if self.force:
|
||||||
|
raise
|
||||||
|
self.sys_resolution.create_issue(
|
||||||
|
IssueType.TRUST,
|
||||||
|
ContextType.SYSTEM,
|
||||||
|
suggestions=[SuggestionType.EXECUTE_INTEGRITY],
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
async def verify_own_content(self, checksum: str) -> None:
|
||||||
|
"""Verify content from HA org."""
|
||||||
|
return await self.verify_content("notary@home-assistant.io", checksum)
|
||||||
|
|
||||||
async def verify_secret(self, pwned_hash: str) -> None:
|
async def verify_secret(self, pwned_hash: str) -> None:
|
||||||
"""Verify pwned state of a secret."""
|
"""Verify pwned state of a secret."""
|
||||||
if not self.pwned:
|
if not self.pwned:
|
||||||
@@ -69,3 +103,73 @@ class Security(FileConfiguration, CoreSysAttributes):
|
|||||||
if self.force:
|
if self.force:
|
||||||
raise
|
raise
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="security_manager_integrity_check",
|
||||||
|
conditions=[JobCondition.INTERNET_SYSTEM],
|
||||||
|
on_condition=SecurityJobError,
|
||||||
|
concurrency=JobConcurrency.REJECT,
|
||||||
|
)
|
||||||
|
async def integrity_check(self) -> IntegrityResult:
|
||||||
|
"""Run a full system integrity check of the platform.
|
||||||
|
|
||||||
|
We only allow to install trusted content.
|
||||||
|
This is a out of the band manual check.
|
||||||
|
"""
|
||||||
|
result: IntegrityResult = IntegrityResult()
|
||||||
|
if not self.content_trust:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Skipping integrity check, content_trust is globally disabled"
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
# Supervisor
|
||||||
|
try:
|
||||||
|
await self.sys_supervisor.check_trust()
|
||||||
|
result.supervisor = ContentTrustResult.PASS
|
||||||
|
except CodeNotaryUntrusted:
|
||||||
|
result.supervisor = ContentTrustResult.ERROR
|
||||||
|
self.sys_resolution.create_issue(IssueType.TRUST, ContextType.SUPERVISOR)
|
||||||
|
except CodeNotaryError:
|
||||||
|
result.supervisor = ContentTrustResult.FAILED
|
||||||
|
|
||||||
|
# Core
|
||||||
|
try:
|
||||||
|
await self.sys_homeassistant.core.check_trust()
|
||||||
|
result.core = ContentTrustResult.PASS
|
||||||
|
except CodeNotaryUntrusted:
|
||||||
|
result.core = ContentTrustResult.ERROR
|
||||||
|
self.sys_resolution.create_issue(IssueType.TRUST, ContextType.CORE)
|
||||||
|
except CodeNotaryError:
|
||||||
|
result.core = ContentTrustResult.FAILED
|
||||||
|
|
||||||
|
# Plugins
|
||||||
|
for plugin in self.sys_plugins.all_plugins:
|
||||||
|
try:
|
||||||
|
await plugin.check_trust()
|
||||||
|
result.plugins[plugin.slug] = ContentTrustResult.PASS
|
||||||
|
except CodeNotaryUntrusted:
|
||||||
|
result.plugins[plugin.slug] = ContentTrustResult.ERROR
|
||||||
|
self.sys_resolution.create_issue(
|
||||||
|
IssueType.TRUST, ContextType.PLUGIN, reference=plugin.slug
|
||||||
|
)
|
||||||
|
except CodeNotaryError:
|
||||||
|
result.plugins[plugin.slug] = ContentTrustResult.FAILED
|
||||||
|
|
||||||
|
# Add-ons
|
||||||
|
for addon in self.sys_addons.installed:
|
||||||
|
if not addon.signed:
|
||||||
|
result.addons[addon.slug] = ContentTrustResult.UNTESTED
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
await addon.check_trust()
|
||||||
|
result.addons[addon.slug] = ContentTrustResult.PASS
|
||||||
|
except CodeNotaryUntrusted:
|
||||||
|
result.addons[addon.slug] = ContentTrustResult.ERROR
|
||||||
|
self.sys_resolution.create_issue(
|
||||||
|
IssueType.TRUST, ContextType.ADDON, reference=addon.slug
|
||||||
|
)
|
||||||
|
except CodeNotaryError:
|
||||||
|
result.addons[addon.slug] = ContentTrustResult.FAILED
|
||||||
|
|
||||||
|
return result
|
||||||
|
|||||||
@@ -13,6 +13,8 @@ import aiohttp
|
|||||||
from aiohttp.client_exceptions import ClientError
|
from aiohttp.client_exceptions import ClientError
|
||||||
from awesomeversion import AwesomeVersion, AwesomeVersionException
|
from awesomeversion import AwesomeVersion, AwesomeVersionException
|
||||||
|
|
||||||
|
from supervisor.jobs import ChildJobSyncFilter
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTR_SUPERVISOR_INTERNET,
|
ATTR_SUPERVISOR_INTERNET,
|
||||||
SUPERVISOR_VERSION,
|
SUPERVISOR_VERSION,
|
||||||
@@ -23,6 +25,8 @@ from .coresys import CoreSys, CoreSysAttributes
|
|||||||
from .docker.stats import DockerStats
|
from .docker.stats import DockerStats
|
||||||
from .docker.supervisor import DockerSupervisor
|
from .docker.supervisor import DockerSupervisor
|
||||||
from .exceptions import (
|
from .exceptions import (
|
||||||
|
CodeNotaryError,
|
||||||
|
CodeNotaryUntrusted,
|
||||||
DockerError,
|
DockerError,
|
||||||
HostAppArmorError,
|
HostAppArmorError,
|
||||||
SupervisorAppArmorError,
|
SupervisorAppArmorError,
|
||||||
@@ -33,6 +37,7 @@ from .exceptions import (
|
|||||||
from .jobs.const import JobCondition, JobThrottle
|
from .jobs.const import JobCondition, JobThrottle
|
||||||
from .jobs.decorator import Job
|
from .jobs.decorator import Job
|
||||||
from .resolution.const import ContextType, IssueType, UnhealthyReason
|
from .resolution.const import ContextType, IssueType, UnhealthyReason
|
||||||
|
from .utils.codenotary import calc_checksum
|
||||||
from .utils.sentry import async_capture_exception
|
from .utils.sentry import async_capture_exception
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
@@ -145,6 +150,20 @@ class Supervisor(CoreSysAttributes):
|
|||||||
_LOGGER.error,
|
_LOGGER.error,
|
||||||
) from err
|
) from err
|
||||||
|
|
||||||
|
# Validate
|
||||||
|
try:
|
||||||
|
await self.sys_security.verify_own_content(calc_checksum(data))
|
||||||
|
except CodeNotaryUntrusted as err:
|
||||||
|
raise SupervisorAppArmorError(
|
||||||
|
"Content-Trust is broken for the AppArmor profile fetch!",
|
||||||
|
_LOGGER.critical,
|
||||||
|
) from err
|
||||||
|
except CodeNotaryError as err:
|
||||||
|
raise SupervisorAppArmorError(
|
||||||
|
f"CodeNotary error while processing AppArmor fetch: {err!s}",
|
||||||
|
_LOGGER.error,
|
||||||
|
) from err
|
||||||
|
|
||||||
# Load
|
# Load
|
||||||
temp_dir: TemporaryDirectory | None = None
|
temp_dir: TemporaryDirectory | None = None
|
||||||
|
|
||||||
@@ -178,6 +197,15 @@ class Supervisor(CoreSysAttributes):
|
|||||||
if temp_dir:
|
if temp_dir:
|
||||||
await self.sys_run_in_executor(temp_dir.cleanup)
|
await self.sys_run_in_executor(temp_dir.cleanup)
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
name="supervisor_update",
|
||||||
|
# We assume for now the docker image pull is 100% of this task. But from
|
||||||
|
# a user perspective that isn't true. Other steps that take time which
|
||||||
|
# is not accounted for in progress include: app armor update and restart
|
||||||
|
child_job_syncs=[
|
||||||
|
ChildJobSyncFilter("docker_interface_install", progress_allocation=1.0)
|
||||||
|
],
|
||||||
|
)
|
||||||
async def update(self, version: AwesomeVersion | None = None) -> None:
|
async def update(self, version: AwesomeVersion | None = None) -> None:
|
||||||
"""Update Supervisor version."""
|
"""Update Supervisor version."""
|
||||||
version = version or self.latest_version or self.version
|
version = version or self.latest_version or self.version
|
||||||
@@ -204,6 +232,7 @@ class Supervisor(CoreSysAttributes):
|
|||||||
|
|
||||||
# Update container
|
# Update container
|
||||||
_LOGGER.info("Update Supervisor to version %s", version)
|
_LOGGER.info("Update Supervisor to version %s", version)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await self.instance.install(version, image=image)
|
await self.instance.install(version, image=image)
|
||||||
await self.instance.update_start_tag(image, version)
|
await self.instance.update_start_tag(image, version)
|
||||||
@@ -244,6 +273,13 @@ class Supervisor(CoreSysAttributes):
|
|||||||
"""
|
"""
|
||||||
return self.instance.logs()
|
return self.instance.logs()
|
||||||
|
|
||||||
|
def check_trust(self) -> Awaitable[None]:
|
||||||
|
"""Calculate Supervisor docker content trust.
|
||||||
|
|
||||||
|
Return Coroutine.
|
||||||
|
"""
|
||||||
|
return self.instance.check_trust()
|
||||||
|
|
||||||
async def stats(self) -> DockerStats:
|
async def stats(self) -> DockerStats:
|
||||||
"""Return stats of Supervisor."""
|
"""Return stats of Supervisor."""
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -31,8 +31,14 @@ from .const import (
|
|||||||
UpdateChannel,
|
UpdateChannel,
|
||||||
)
|
)
|
||||||
from .coresys import CoreSys, CoreSysAttributes
|
from .coresys import CoreSys, CoreSysAttributes
|
||||||
from .exceptions import UpdaterError, UpdaterJobError
|
from .exceptions import (
|
||||||
|
CodeNotaryError,
|
||||||
|
CodeNotaryUntrusted,
|
||||||
|
UpdaterError,
|
||||||
|
UpdaterJobError,
|
||||||
|
)
|
||||||
from .jobs.decorator import Job, JobCondition
|
from .jobs.decorator import Job, JobCondition
|
||||||
|
from .utils.codenotary import calc_checksum
|
||||||
from .utils.common import FileConfiguration
|
from .utils.common import FileConfiguration
|
||||||
from .validate import SCHEMA_UPDATER_CONFIG
|
from .validate import SCHEMA_UPDATER_CONFIG
|
||||||
|
|
||||||
@@ -283,6 +289,19 @@ class Updater(FileConfiguration, CoreSysAttributes):
|
|||||||
self.sys_bus.remove_listener(self._connectivity_listener)
|
self.sys_bus.remove_listener(self._connectivity_listener)
|
||||||
self._connectivity_listener = None
|
self._connectivity_listener = None
|
||||||
|
|
||||||
|
# Validate
|
||||||
|
try:
|
||||||
|
await self.sys_security.verify_own_content(calc_checksum(data))
|
||||||
|
except CodeNotaryUntrusted as err:
|
||||||
|
raise UpdaterError(
|
||||||
|
"Content-Trust is broken for the version file fetch!", _LOGGER.critical
|
||||||
|
) from err
|
||||||
|
except CodeNotaryError as err:
|
||||||
|
raise UpdaterError(
|
||||||
|
f"CodeNotary error while processing version fetch: {err!s}",
|
||||||
|
_LOGGER.error,
|
||||||
|
) from err
|
||||||
|
|
||||||
# Parse data
|
# Parse data
|
||||||
try:
|
try:
|
||||||
data = json.loads(data)
|
data = json.loads(data)
|
||||||
|
|||||||
109
supervisor/utils/codenotary.py
Normal file
109
supervisor/utils/codenotary.py
Normal file
@@ -0,0 +1,109 @@
|
|||||||
|
"""Small wrapper for CodeNotary."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import hashlib
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
import shlex
|
||||||
|
from typing import Final
|
||||||
|
|
||||||
|
from dirhash import dirhash
|
||||||
|
|
||||||
|
from ..exceptions import CodeNotaryBackendError, CodeNotaryError, CodeNotaryUntrusted
|
||||||
|
from . import clean_env
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_CAS_CMD: str = (
|
||||||
|
"cas authenticate --signerID {signer} --silent --output json --hash {sum}"
|
||||||
|
)
|
||||||
|
_CACHE: set[tuple[str, str]] = set()
|
||||||
|
|
||||||
|
|
||||||
|
_ATTR_ERROR: Final = "error"
|
||||||
|
_ATTR_STATUS: Final = "status"
|
||||||
|
_FALLBACK_ERROR: Final = "Unknown CodeNotary backend issue"
|
||||||
|
|
||||||
|
|
||||||
|
def calc_checksum(data: str | bytes) -> str:
|
||||||
|
"""Generate checksum for CodeNotary."""
|
||||||
|
if isinstance(data, str):
|
||||||
|
return hashlib.sha256(data.encode()).hexdigest()
|
||||||
|
return hashlib.sha256(data).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def calc_checksum_path_sourcecode(folder: Path) -> str:
|
||||||
|
"""Calculate checksum for a path source code.
|
||||||
|
|
||||||
|
Need catch OSError.
|
||||||
|
"""
|
||||||
|
return dirhash(folder.as_posix(), "sha256", match=["*.py"])
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=unreachable
|
||||||
|
async def cas_validate(
|
||||||
|
signer: str,
|
||||||
|
checksum: str,
|
||||||
|
) -> None:
|
||||||
|
"""Validate data against CodeNotary."""
|
||||||
|
return
|
||||||
|
if (checksum, signer) in _CACHE:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Generate command for request
|
||||||
|
command = shlex.split(_CAS_CMD.format(signer=signer, sum=checksum))
|
||||||
|
|
||||||
|
# Request notary authorization
|
||||||
|
_LOGGER.debug("Send cas command: %s", command)
|
||||||
|
try:
|
||||||
|
proc = await asyncio.create_subprocess_exec(
|
||||||
|
*command,
|
||||||
|
stdin=asyncio.subprocess.DEVNULL,
|
||||||
|
stdout=asyncio.subprocess.PIPE,
|
||||||
|
stderr=asyncio.subprocess.PIPE,
|
||||||
|
env=clean_env(),
|
||||||
|
)
|
||||||
|
|
||||||
|
async with asyncio.timeout(15):
|
||||||
|
data, error = await proc.communicate()
|
||||||
|
except TimeoutError:
|
||||||
|
raise CodeNotaryBackendError(
|
||||||
|
"Timeout while processing CodeNotary", _LOGGER.warning
|
||||||
|
) from None
|
||||||
|
except OSError as err:
|
||||||
|
raise CodeNotaryError(
|
||||||
|
f"CodeNotary fatal error: {err!s}", _LOGGER.critical
|
||||||
|
) from err
|
||||||
|
|
||||||
|
# Check if Notarized
|
||||||
|
if proc.returncode != 0 and not data:
|
||||||
|
if error:
|
||||||
|
try:
|
||||||
|
error = error.decode("utf-8")
|
||||||
|
except UnicodeDecodeError as err:
|
||||||
|
raise CodeNotaryBackendError(_FALLBACK_ERROR, _LOGGER.warning) from err
|
||||||
|
if "not notarized" in error:
|
||||||
|
raise CodeNotaryUntrusted()
|
||||||
|
else:
|
||||||
|
error = _FALLBACK_ERROR
|
||||||
|
raise CodeNotaryBackendError(error, _LOGGER.warning)
|
||||||
|
|
||||||
|
# Parse data
|
||||||
|
try:
|
||||||
|
data_json = json.loads(data)
|
||||||
|
_LOGGER.debug("CodeNotary response with: %s", data_json)
|
||||||
|
except (json.JSONDecodeError, UnicodeDecodeError) as err:
|
||||||
|
raise CodeNotaryError(
|
||||||
|
f"Can't parse CodeNotary output: {data!s} - {err!s}", _LOGGER.error
|
||||||
|
) from err
|
||||||
|
|
||||||
|
if _ATTR_ERROR in data_json:
|
||||||
|
raise CodeNotaryBackendError(data_json[_ATTR_ERROR], _LOGGER.warning)
|
||||||
|
|
||||||
|
if data_json[_ATTR_STATUS] == 0:
|
||||||
|
_CACHE.add((checksum, signer))
|
||||||
|
else:
|
||||||
|
raise CodeNotaryUntrusted()
|
||||||
@@ -419,3 +419,71 @@ def test_valid_schema():
|
|||||||
config["schema"] = {"field": "invalid"}
|
config["schema"] = {"field": "invalid"}
|
||||||
with pytest.raises(vol.Invalid):
|
with pytest.raises(vol.Invalid):
|
||||||
assert vd.SCHEMA_ADDON_CONFIG(config)
|
assert vd.SCHEMA_ADDON_CONFIG(config)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ulimits_simple_format():
|
||||||
|
"""Test ulimits simple format validation."""
|
||||||
|
config = load_json_fixture("basic-addon-config.json")
|
||||||
|
|
||||||
|
config["ulimits"] = {"nofile": 65535, "nproc": 32768, "memlock": 134217728}
|
||||||
|
|
||||||
|
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
|
||||||
|
assert valid_config["ulimits"]["nofile"] == 65535
|
||||||
|
assert valid_config["ulimits"]["nproc"] == 32768
|
||||||
|
assert valid_config["ulimits"]["memlock"] == 134217728
|
||||||
|
|
||||||
|
|
||||||
|
def test_ulimits_detailed_format():
|
||||||
|
"""Test ulimits detailed format validation."""
|
||||||
|
config = load_json_fixture("basic-addon-config.json")
|
||||||
|
|
||||||
|
config["ulimits"] = {
|
||||||
|
"nofile": {"soft": 20000, "hard": 40000},
|
||||||
|
"nproc": 32768, # Mixed format should work
|
||||||
|
"memlock": {"soft": 67108864, "hard": 134217728},
|
||||||
|
}
|
||||||
|
|
||||||
|
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
|
||||||
|
assert valid_config["ulimits"]["nofile"]["soft"] == 20000
|
||||||
|
assert valid_config["ulimits"]["nofile"]["hard"] == 40000
|
||||||
|
assert valid_config["ulimits"]["nproc"] == 32768
|
||||||
|
assert valid_config["ulimits"]["memlock"]["soft"] == 67108864
|
||||||
|
assert valid_config["ulimits"]["memlock"]["hard"] == 134217728
|
||||||
|
|
||||||
|
|
||||||
|
def test_ulimits_empty_dict():
|
||||||
|
"""Test ulimits with empty dict (default)."""
|
||||||
|
config = load_json_fixture("basic-addon-config.json")
|
||||||
|
|
||||||
|
valid_config = vd.SCHEMA_ADDON_CONFIG(config)
|
||||||
|
assert valid_config["ulimits"] == {}
|
||||||
|
|
||||||
|
|
||||||
|
def test_ulimits_invalid_values():
|
||||||
|
"""Test ulimits with invalid values."""
|
||||||
|
config = load_json_fixture("basic-addon-config.json")
|
||||||
|
|
||||||
|
# Invalid string values
|
||||||
|
config["ulimits"] = {"nofile": "invalid"}
|
||||||
|
with pytest.raises(vol.Invalid):
|
||||||
|
vd.SCHEMA_ADDON_CONFIG(config)
|
||||||
|
|
||||||
|
# Invalid detailed format
|
||||||
|
config["ulimits"] = {"nofile": {"invalid_key": 1000}}
|
||||||
|
with pytest.raises(vol.Invalid):
|
||||||
|
vd.SCHEMA_ADDON_CONFIG(config)
|
||||||
|
|
||||||
|
# Missing hard value in detailed format
|
||||||
|
config["ulimits"] = {"nofile": {"soft": 1000}}
|
||||||
|
with pytest.raises(vol.Invalid):
|
||||||
|
vd.SCHEMA_ADDON_CONFIG(config)
|
||||||
|
|
||||||
|
# Missing soft value in detailed format
|
||||||
|
config["ulimits"] = {"nofile": {"hard": 1000}}
|
||||||
|
with pytest.raises(vol.Invalid):
|
||||||
|
vd.SCHEMA_ADDON_CONFIG(config)
|
||||||
|
|
||||||
|
# Empty dict in detailed format
|
||||||
|
config["ulimits"] = {"nofile": {}}
|
||||||
|
with pytest.raises(vol.Invalid):
|
||||||
|
vd.SCHEMA_ADDON_CONFIG(config)
|
||||||
|
|||||||
@@ -2,16 +2,19 @@
|
|||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from unittest.mock import MagicMock, PropertyMock, patch
|
from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch
|
||||||
|
|
||||||
from aiohttp.test_utils import TestClient
|
from aiohttp.test_utils import TestClient
|
||||||
from awesomeversion import AwesomeVersion
|
from awesomeversion import AwesomeVersion
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from supervisor.backups.manager import BackupManager
|
from supervisor.backups.manager import BackupManager
|
||||||
|
from supervisor.const import CoreState
|
||||||
from supervisor.coresys import CoreSys
|
from supervisor.coresys import CoreSys
|
||||||
|
from supervisor.docker.homeassistant import DockerHomeAssistant
|
||||||
from supervisor.docker.interface import DockerInterface
|
from supervisor.docker.interface import DockerInterface
|
||||||
from supervisor.homeassistant.api import APIState
|
from supervisor.homeassistant.api import APIState, HomeAssistantAPI
|
||||||
|
from supervisor.homeassistant.const import WSEvent
|
||||||
from supervisor.homeassistant.core import HomeAssistantCore
|
from supervisor.homeassistant.core import HomeAssistantCore
|
||||||
from supervisor.homeassistant.module import HomeAssistant
|
from supervisor.homeassistant.module import HomeAssistant
|
||||||
|
|
||||||
@@ -271,3 +274,96 @@ async def test_background_home_assistant_update_fails_fast(
|
|||||||
assert resp.status == 400
|
assert resp.status == 400
|
||||||
body = await resp.json()
|
body = await resp.json()
|
||||||
assert body["message"] == "Version 2025.8.3 is already installed"
|
assert body["message"] == "Version 2025.8.3 is already installed"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("tmp_supervisor_data")
|
||||||
|
async def test_api_progress_updates_home_assistant_update(
|
||||||
|
api_client: TestClient, coresys: CoreSys, ha_ws_client: AsyncMock
|
||||||
|
):
|
||||||
|
"""Test progress updates sent to Home Assistant for updates."""
|
||||||
|
coresys.hardware.disk.get_disk_free_space = lambda x: 5000
|
||||||
|
coresys.core.set_state(CoreState.RUNNING)
|
||||||
|
coresys.docker.docker.api.pull.return_value = load_json_fixture(
|
||||||
|
"docker_pull_image_log.json"
|
||||||
|
)
|
||||||
|
coresys.homeassistant.version = AwesomeVersion("2025.8.0")
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch.object(
|
||||||
|
DockerHomeAssistant,
|
||||||
|
"version",
|
||||||
|
new=PropertyMock(return_value=AwesomeVersion("2025.8.0")),
|
||||||
|
),
|
||||||
|
patch.object(
|
||||||
|
HomeAssistantAPI, "get_config", return_value={"components": ["frontend"]}
|
||||||
|
),
|
||||||
|
):
|
||||||
|
resp = await api_client.post("/core/update", json={"version": "2025.8.3"})
|
||||||
|
|
||||||
|
assert resp.status == 200
|
||||||
|
|
||||||
|
events = [
|
||||||
|
{
|
||||||
|
"stage": evt.args[0]["data"]["data"]["stage"],
|
||||||
|
"progress": evt.args[0]["data"]["data"]["progress"],
|
||||||
|
"done": evt.args[0]["data"]["data"]["done"],
|
||||||
|
}
|
||||||
|
for evt in ha_ws_client.async_send_command.call_args_list
|
||||||
|
if "data" in evt.args[0]
|
||||||
|
and evt.args[0]["data"]["event"] == WSEvent.JOB
|
||||||
|
and evt.args[0]["data"]["data"]["name"] == "home_assistant_core_update"
|
||||||
|
]
|
||||||
|
assert events[:5] == [
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 0,
|
||||||
|
"done": None,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 0,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 0.1,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 1.2,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 2.8,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
assert events[-5:] == [
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 97.2,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 98.4,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 99.4,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 100,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 100,
|
||||||
|
"done": True,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|||||||
@@ -41,9 +41,11 @@ async def test_api_security_options_pwned(api_client, coresys: CoreSys):
|
|||||||
async def test_api_integrity_check(
|
async def test_api_integrity_check(
|
||||||
api_client, coresys: CoreSys, supervisor_internet: AsyncMock
|
api_client, coresys: CoreSys, supervisor_internet: AsyncMock
|
||||||
):
|
):
|
||||||
"""Test security integrity check - now deprecated."""
|
"""Test security integrity check."""
|
||||||
|
coresys.security.content_trust = False
|
||||||
|
|
||||||
resp = await api_client.post("/security/integrity")
|
resp = await api_client.post("/security/integrity")
|
||||||
result = await resp.json()
|
result = await resp.json()
|
||||||
|
|
||||||
# CodeNotary integrity check has been removed
|
assert result["data"]["core"] == "untested"
|
||||||
assert "error" in result["data"]
|
assert result["data"]["supervisor"] == "untested"
|
||||||
|
|||||||
@@ -13,12 +13,13 @@ from supervisor.addons.addon import Addon
|
|||||||
from supervisor.arch import CpuArch
|
from supervisor.arch import CpuArch
|
||||||
from supervisor.backups.manager import BackupManager
|
from supervisor.backups.manager import BackupManager
|
||||||
from supervisor.config import CoreConfig
|
from supervisor.config import CoreConfig
|
||||||
from supervisor.const import AddonState
|
from supervisor.const import AddonState, CoreState
|
||||||
from supervisor.coresys import CoreSys
|
from supervisor.coresys import CoreSys
|
||||||
from supervisor.docker.addon import DockerAddon
|
from supervisor.docker.addon import DockerAddon
|
||||||
from supervisor.docker.const import ContainerState
|
from supervisor.docker.const import ContainerState
|
||||||
from supervisor.docker.interface import DockerInterface
|
from supervisor.docker.interface import DockerInterface
|
||||||
from supervisor.docker.monitor import DockerContainerStateEvent
|
from supervisor.docker.monitor import DockerContainerStateEvent
|
||||||
|
from supervisor.homeassistant.const import WSEvent
|
||||||
from supervisor.homeassistant.module import HomeAssistant
|
from supervisor.homeassistant.module import HomeAssistant
|
||||||
from supervisor.store.addon import AddonStore
|
from supervisor.store.addon import AddonStore
|
||||||
from supervisor.store.repository import Repository
|
from supervisor.store.repository import Repository
|
||||||
@@ -709,3 +710,101 @@ async def test_api_store_addons_addon_availability_installed_addon(
|
|||||||
assert (
|
assert (
|
||||||
"requires Home Assistant version 2023.1.1 or greater" in result["message"]
|
"requires Home Assistant version 2023.1.1 or greater" in result["message"]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("action", "job_name", "addon_slug"),
|
||||||
|
[
|
||||||
|
("install", "addon_manager_install", "local_ssh"),
|
||||||
|
("update", "addon_manager_update", "local_example"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
@pytest.mark.usefixtures("tmp_supervisor_data")
|
||||||
|
async def test_api_progress_updates_addon_install_update(
|
||||||
|
api_client: TestClient,
|
||||||
|
coresys: CoreSys,
|
||||||
|
ha_ws_client: AsyncMock,
|
||||||
|
install_addon_example: Addon,
|
||||||
|
action: str,
|
||||||
|
job_name: str,
|
||||||
|
addon_slug: str,
|
||||||
|
):
|
||||||
|
"""Test progress updates sent to Home Assistant for installs/updates."""
|
||||||
|
coresys.hardware.disk.get_disk_free_space = lambda x: 5000
|
||||||
|
coresys.core.set_state(CoreState.RUNNING)
|
||||||
|
coresys.docker.docker.api.pull.return_value = load_json_fixture(
|
||||||
|
"docker_pull_image_log.json"
|
||||||
|
)
|
||||||
|
coresys.arch._supported_arch = ["amd64"] # pylint: disable=protected-access
|
||||||
|
install_addon_example.data_store["version"] = AwesomeVersion("2.0.0")
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch.object(Addon, "load"),
|
||||||
|
patch.object(Addon, "need_build", new=PropertyMock(return_value=False)),
|
||||||
|
patch.object(Addon, "latest_need_build", new=PropertyMock(return_value=False)),
|
||||||
|
):
|
||||||
|
resp = await api_client.post(f"/store/addons/{addon_slug}/{action}")
|
||||||
|
|
||||||
|
assert resp.status == 200
|
||||||
|
|
||||||
|
events = [
|
||||||
|
{
|
||||||
|
"stage": evt.args[0]["data"]["data"]["stage"],
|
||||||
|
"progress": evt.args[0]["data"]["data"]["progress"],
|
||||||
|
"done": evt.args[0]["data"]["data"]["done"],
|
||||||
|
}
|
||||||
|
for evt in ha_ws_client.async_send_command.call_args_list
|
||||||
|
if "data" in evt.args[0]
|
||||||
|
and evt.args[0]["data"]["event"] == WSEvent.JOB
|
||||||
|
and evt.args[0]["data"]["data"]["name"] == job_name
|
||||||
|
and evt.args[0]["data"]["data"]["reference"] == addon_slug
|
||||||
|
]
|
||||||
|
assert events[:4] == [
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 0,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 0.1,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 1.2,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 2.8,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
assert events[-5:] == [
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 97.2,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 98.4,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 99.4,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 100,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 100,
|
||||||
|
"done": True,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|||||||
@@ -2,17 +2,23 @@
|
|||||||
|
|
||||||
# pylint: disable=protected-access
|
# pylint: disable=protected-access
|
||||||
import time
|
import time
|
||||||
from unittest.mock import AsyncMock, MagicMock, patch
|
from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch
|
||||||
|
|
||||||
from aiohttp.test_utils import TestClient
|
from aiohttp.test_utils import TestClient
|
||||||
|
from awesomeversion import AwesomeVersion
|
||||||
from blockbuster import BlockingError
|
from blockbuster import BlockingError
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
from supervisor.const import CoreState
|
||||||
|
from supervisor.core import Core
|
||||||
from supervisor.coresys import CoreSys
|
from supervisor.coresys import CoreSys
|
||||||
from supervisor.exceptions import HassioError, HostNotSupportedError, StoreGitError
|
from supervisor.exceptions import HassioError, HostNotSupportedError
|
||||||
from supervisor.store.repository import Repository
|
from supervisor.homeassistant.const import WSEvent
|
||||||
|
from supervisor.supervisor import Supervisor
|
||||||
|
from supervisor.updater import Updater
|
||||||
|
|
||||||
from tests.api import common_test_api_advanced_logs
|
from tests.api import common_test_api_advanced_logs
|
||||||
|
from tests.common import load_json_fixture
|
||||||
from tests.dbus_service_mocks.base import DBusServiceMock
|
from tests.dbus_service_mocks.base import DBusServiceMock
|
||||||
from tests.dbus_service_mocks.os_agent import OSAgent as OSAgentService
|
from tests.dbus_service_mocks.os_agent import OSAgent as OSAgentService
|
||||||
|
|
||||||
@@ -28,81 +34,6 @@ async def test_api_supervisor_options_debug(api_client: TestClient, coresys: Cor
|
|||||||
assert coresys.config.debug
|
assert coresys.config.debug
|
||||||
|
|
||||||
|
|
||||||
async def test_api_supervisor_options_add_repository(
|
|
||||||
api_client: TestClient, coresys: CoreSys, supervisor_internet: AsyncMock
|
|
||||||
):
|
|
||||||
"""Test add a repository via POST /supervisor/options REST API."""
|
|
||||||
assert REPO_URL not in coresys.store.repository_urls
|
|
||||||
|
|
||||||
with (
|
|
||||||
patch("supervisor.store.repository.RepositoryGit.load", return_value=None),
|
|
||||||
patch("supervisor.store.repository.RepositoryGit.validate", return_value=True),
|
|
||||||
):
|
|
||||||
response = await api_client.post(
|
|
||||||
"/supervisor/options", json={"addons_repositories": [REPO_URL]}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status == 200
|
|
||||||
assert REPO_URL in coresys.store.repository_urls
|
|
||||||
|
|
||||||
|
|
||||||
async def test_api_supervisor_options_remove_repository(
|
|
||||||
api_client: TestClient, coresys: CoreSys, test_repository: Repository
|
|
||||||
):
|
|
||||||
"""Test remove a repository via POST /supervisor/options REST API."""
|
|
||||||
assert test_repository.source in coresys.store.repository_urls
|
|
||||||
assert test_repository.slug in coresys.store.repositories
|
|
||||||
|
|
||||||
response = await api_client.post(
|
|
||||||
"/supervisor/options", json={"addons_repositories": []}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status == 200
|
|
||||||
assert test_repository.source not in coresys.store.repository_urls
|
|
||||||
assert test_repository.slug not in coresys.store.repositories
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("git_error", [None, StoreGitError()])
|
|
||||||
async def test_api_supervisor_options_repositories_skipped_on_error(
|
|
||||||
api_client: TestClient, coresys: CoreSys, git_error: StoreGitError
|
|
||||||
):
|
|
||||||
"""Test repositories skipped on error via POST /supervisor/options REST API."""
|
|
||||||
with (
|
|
||||||
patch("supervisor.store.repository.RepositoryGit.load", side_effect=git_error),
|
|
||||||
patch("supervisor.store.repository.RepositoryGit.validate", return_value=False),
|
|
||||||
patch("supervisor.store.repository.RepositoryCustom.remove"),
|
|
||||||
):
|
|
||||||
response = await api_client.post(
|
|
||||||
"/supervisor/options", json={"addons_repositories": [REPO_URL]}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status == 400
|
|
||||||
assert len(coresys.resolution.suggestions) == 0
|
|
||||||
assert REPO_URL not in coresys.store.repository_urls
|
|
||||||
|
|
||||||
|
|
||||||
async def test_api_supervisor_options_repo_error_with_config_change(
|
|
||||||
api_client: TestClient, coresys: CoreSys
|
|
||||||
):
|
|
||||||
"""Test config change with add repository error via POST /supervisor/options REST API."""
|
|
||||||
assert not coresys.config.debug
|
|
||||||
|
|
||||||
with patch(
|
|
||||||
"supervisor.store.repository.RepositoryGit.load", side_effect=StoreGitError()
|
|
||||||
):
|
|
||||||
response = await api_client.post(
|
|
||||||
"/supervisor/options",
|
|
||||||
json={"debug": True, "addons_repositories": [REPO_URL]},
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status == 400
|
|
||||||
assert REPO_URL not in coresys.store.repository_urls
|
|
||||||
|
|
||||||
assert coresys.config.debug
|
|
||||||
coresys.updater.save_data.assert_called_once()
|
|
||||||
coresys.config.save_data.assert_called_once()
|
|
||||||
|
|
||||||
|
|
||||||
async def test_api_supervisor_options_auto_update(
|
async def test_api_supervisor_options_auto_update(
|
||||||
api_client: TestClient, coresys: CoreSys
|
api_client: TestClient, coresys: CoreSys
|
||||||
):
|
):
|
||||||
@@ -316,3 +247,97 @@ async def test_api_supervisor_options_blocking_io(
|
|||||||
|
|
||||||
# This should not raise blocking error anymore
|
# This should not raise blocking error anymore
|
||||||
time.sleep(0)
|
time.sleep(0)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.usefixtures("tmp_supervisor_data")
|
||||||
|
async def test_api_progress_updates_supervisor_update(
|
||||||
|
api_client: TestClient, coresys: CoreSys, ha_ws_client: AsyncMock
|
||||||
|
):
|
||||||
|
"""Test progress updates sent to Home Assistant for updates."""
|
||||||
|
coresys.hardware.disk.get_disk_free_space = lambda x: 5000
|
||||||
|
coresys.core.set_state(CoreState.RUNNING)
|
||||||
|
coresys.docker.docker.api.pull.return_value = load_json_fixture(
|
||||||
|
"docker_pull_image_log.json"
|
||||||
|
)
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch.object(
|
||||||
|
Supervisor,
|
||||||
|
"version",
|
||||||
|
new=PropertyMock(return_value=AwesomeVersion("2025.08.0")),
|
||||||
|
),
|
||||||
|
patch.object(
|
||||||
|
Updater,
|
||||||
|
"version_supervisor",
|
||||||
|
new=PropertyMock(return_value=AwesomeVersion("2025.08.3")),
|
||||||
|
),
|
||||||
|
patch.object(
|
||||||
|
Updater, "image_supervisor", new=PropertyMock(return_value="supervisor")
|
||||||
|
),
|
||||||
|
patch.object(Supervisor, "update_apparmor"),
|
||||||
|
patch.object(Core, "stop"),
|
||||||
|
):
|
||||||
|
resp = await api_client.post("/supervisor/update")
|
||||||
|
|
||||||
|
assert resp.status == 200
|
||||||
|
|
||||||
|
events = [
|
||||||
|
{
|
||||||
|
"stage": evt.args[0]["data"]["data"]["stage"],
|
||||||
|
"progress": evt.args[0]["data"]["data"]["progress"],
|
||||||
|
"done": evt.args[0]["data"]["data"]["done"],
|
||||||
|
}
|
||||||
|
for evt in ha_ws_client.async_send_command.call_args_list
|
||||||
|
if "data" in evt.args[0]
|
||||||
|
and evt.args[0]["data"]["event"] == WSEvent.JOB
|
||||||
|
and evt.args[0]["data"]["data"]["name"] == "supervisor_update"
|
||||||
|
]
|
||||||
|
assert events[:4] == [
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 0,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 0.1,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 1.2,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 2.8,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
assert events[-5:] == [
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 97.2,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 98.4,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 99.4,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 100,
|
||||||
|
"done": False,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"stage": None,
|
||||||
|
"progress": 100,
|
||||||
|
"done": True,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|||||||
@@ -503,3 +503,93 @@ async def test_addon_new_device_no_haos(
|
|||||||
await install_addon_ssh.stop()
|
await install_addon_ssh.stop()
|
||||||
assert coresys.resolution.issues == []
|
assert coresys.resolution.issues == []
|
||||||
assert coresys.resolution.suggestions == []
|
assert coresys.resolution.suggestions == []
|
||||||
|
|
||||||
|
|
||||||
|
async def test_ulimits_integration(
|
||||||
|
coresys: CoreSys,
|
||||||
|
install_addon_ssh: Addon,
|
||||||
|
):
|
||||||
|
"""Test ulimits integration with Docker addon."""
|
||||||
|
docker_addon = DockerAddon(coresys, install_addon_ssh)
|
||||||
|
|
||||||
|
# Test default case (no ulimits, no realtime)
|
||||||
|
assert docker_addon.ulimits is None
|
||||||
|
|
||||||
|
# Test with realtime enabled (should have built-in ulimits)
|
||||||
|
install_addon_ssh.data["realtime"] = True
|
||||||
|
ulimits = docker_addon.ulimits
|
||||||
|
assert ulimits is not None
|
||||||
|
assert len(ulimits) == 2
|
||||||
|
# Check for rtprio limit
|
||||||
|
rtprio_limit = next((u for u in ulimits if u.name == "rtprio"), None)
|
||||||
|
assert rtprio_limit is not None
|
||||||
|
assert rtprio_limit.soft == 90
|
||||||
|
assert rtprio_limit.hard == 99
|
||||||
|
# Check for memlock limit
|
||||||
|
memlock_limit = next((u for u in ulimits if u.name == "memlock"), None)
|
||||||
|
assert memlock_limit is not None
|
||||||
|
assert memlock_limit.soft == 128 * 1024 * 1024
|
||||||
|
assert memlock_limit.hard == 128 * 1024 * 1024
|
||||||
|
|
||||||
|
# Test with configurable ulimits (simple format)
|
||||||
|
install_addon_ssh.data["realtime"] = False
|
||||||
|
install_addon_ssh.data["ulimits"] = {"nofile": 65535, "nproc": 32768}
|
||||||
|
ulimits = docker_addon.ulimits
|
||||||
|
assert ulimits is not None
|
||||||
|
assert len(ulimits) == 2
|
||||||
|
|
||||||
|
nofile_limit = next((u for u in ulimits if u.name == "nofile"), None)
|
||||||
|
assert nofile_limit is not None
|
||||||
|
assert nofile_limit.soft == 65535
|
||||||
|
assert nofile_limit.hard == 65535
|
||||||
|
|
||||||
|
nproc_limit = next((u for u in ulimits if u.name == "nproc"), None)
|
||||||
|
assert nproc_limit is not None
|
||||||
|
assert nproc_limit.soft == 32768
|
||||||
|
assert nproc_limit.hard == 32768
|
||||||
|
|
||||||
|
# Test with configurable ulimits (detailed format)
|
||||||
|
install_addon_ssh.data["ulimits"] = {
|
||||||
|
"nofile": {"soft": 20000, "hard": 40000},
|
||||||
|
"memlock": {"soft": 67108864, "hard": 134217728},
|
||||||
|
}
|
||||||
|
ulimits = docker_addon.ulimits
|
||||||
|
assert ulimits is not None
|
||||||
|
assert len(ulimits) == 2
|
||||||
|
|
||||||
|
nofile_limit = next((u for u in ulimits if u.name == "nofile"), None)
|
||||||
|
assert nofile_limit is not None
|
||||||
|
assert nofile_limit.soft == 20000
|
||||||
|
assert nofile_limit.hard == 40000
|
||||||
|
|
||||||
|
memlock_limit = next((u for u in ulimits if u.name == "memlock"), None)
|
||||||
|
assert memlock_limit is not None
|
||||||
|
assert memlock_limit.soft == 67108864
|
||||||
|
assert memlock_limit.hard == 134217728
|
||||||
|
|
||||||
|
# Test mixed format and realtime (realtime + custom ulimits)
|
||||||
|
install_addon_ssh.data["realtime"] = True
|
||||||
|
install_addon_ssh.data["ulimits"] = {
|
||||||
|
"nofile": 65535,
|
||||||
|
"core": {"soft": 0, "hard": 0}, # Disable core dumps
|
||||||
|
}
|
||||||
|
ulimits = docker_addon.ulimits
|
||||||
|
assert ulimits is not None
|
||||||
|
assert (
|
||||||
|
len(ulimits) == 4
|
||||||
|
) # rtprio, memlock (from realtime) + nofile, core (from config)
|
||||||
|
|
||||||
|
# Check realtime limits still present
|
||||||
|
rtprio_limit = next((u for u in ulimits if u.name == "rtprio"), None)
|
||||||
|
assert rtprio_limit is not None
|
||||||
|
|
||||||
|
# Check custom limits added
|
||||||
|
nofile_limit = next((u for u in ulimits if u.name == "nofile"), None)
|
||||||
|
assert nofile_limit is not None
|
||||||
|
assert nofile_limit.soft == 65535
|
||||||
|
assert nofile_limit.hard == 65535
|
||||||
|
|
||||||
|
core_limit = next((u for u in ulimits if u.name == "core"), None)
|
||||||
|
assert core_limit is not None
|
||||||
|
assert core_limit.soft == 0
|
||||||
|
assert core_limit.hard == 0
|
||||||
|
|||||||
@@ -26,12 +26,20 @@ from supervisor.exceptions import (
|
|||||||
DockerNotFound,
|
DockerNotFound,
|
||||||
DockerRequestError,
|
DockerRequestError,
|
||||||
)
|
)
|
||||||
from supervisor.homeassistant.const import WSEvent
|
|
||||||
from supervisor.jobs import JobSchedulerOptions, SupervisorJob
|
from supervisor.jobs import JobSchedulerOptions, SupervisorJob
|
||||||
|
|
||||||
from tests.common import load_json_fixture
|
from tests.common import load_json_fixture
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True)
|
||||||
|
def mock_verify_content(coresys: CoreSys):
|
||||||
|
"""Mock verify_content utility during tests."""
|
||||||
|
with patch.object(
|
||||||
|
coresys.security, "verify_content", return_value=None
|
||||||
|
) as verify_content:
|
||||||
|
yield verify_content
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
"cpu_arch, platform",
|
"cpu_arch, platform",
|
||||||
[
|
[
|
||||||
@@ -408,196 +416,17 @@ async def test_install_fires_progress_events(
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
async def test_install_sends_progress_to_home_assistant(
|
|
||||||
coresys: CoreSys, test_docker_interface: DockerInterface, ha_ws_client: AsyncMock
|
|
||||||
):
|
|
||||||
"""Test progress events are sent as job updates to Home Assistant."""
|
|
||||||
coresys.core.set_state(CoreState.RUNNING)
|
|
||||||
coresys.docker.docker.api.pull.return_value = load_json_fixture(
|
|
||||||
"docker_pull_image_log.json"
|
|
||||||
)
|
|
||||||
|
|
||||||
with (
|
|
||||||
patch.object(
|
|
||||||
type(coresys.supervisor), "arch", PropertyMock(return_value="i386")
|
|
||||||
),
|
|
||||||
):
|
|
||||||
# Schedule job so we can listen for the end. Then we can assert against the WS mock
|
|
||||||
event = asyncio.Event()
|
|
||||||
job, install_task = coresys.jobs.schedule_job(
|
|
||||||
test_docker_interface.install,
|
|
||||||
JobSchedulerOptions(),
|
|
||||||
AwesomeVersion("1.2.3"),
|
|
||||||
"test",
|
|
||||||
)
|
|
||||||
|
|
||||||
async def listen_for_job_end(reference: SupervisorJob):
|
|
||||||
if reference.uuid != job.uuid:
|
|
||||||
return
|
|
||||||
event.set()
|
|
||||||
|
|
||||||
coresys.bus.register_event(BusEvent.SUPERVISOR_JOB_END, listen_for_job_end)
|
|
||||||
await install_task
|
|
||||||
await event.wait()
|
|
||||||
|
|
||||||
events = [
|
|
||||||
evt.args[0]["data"]["data"]
|
|
||||||
for evt in ha_ws_client.async_send_command.call_args_list
|
|
||||||
if "data" in evt.args[0] and evt.args[0]["data"]["event"] == WSEvent.JOB
|
|
||||||
]
|
|
||||||
assert events[0]["name"] == "docker_interface_install"
|
|
||||||
assert events[0]["uuid"] == job.uuid
|
|
||||||
assert events[0]["done"] is None
|
|
||||||
assert events[1]["name"] == "docker_interface_install"
|
|
||||||
assert events[1]["uuid"] == job.uuid
|
|
||||||
assert events[1]["done"] is False
|
|
||||||
assert events[-1]["name"] == "docker_interface_install"
|
|
||||||
assert events[-1]["uuid"] == job.uuid
|
|
||||||
assert events[-1]["done"] is True
|
|
||||||
|
|
||||||
def make_sub_log(layer_id: str):
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
"stage": evt["stage"],
|
|
||||||
"progress": evt["progress"],
|
|
||||||
"done": evt["done"],
|
|
||||||
"extra": evt["extra"],
|
|
||||||
}
|
|
||||||
for evt in events
|
|
||||||
if evt["name"] == "Pulling container image layer"
|
|
||||||
and evt["reference"] == layer_id
|
|
||||||
and evt["parent_id"] == job.uuid
|
|
||||||
]
|
|
||||||
|
|
||||||
layer_1_log = make_sub_log("1e214cd6d7d0")
|
|
||||||
layer_2_log = make_sub_log("1a38e1d5e18d")
|
|
||||||
assert len(layer_1_log) == 20
|
|
||||||
assert len(layer_2_log) == 19
|
|
||||||
assert len(events) == 42
|
|
||||||
assert layer_1_log == [
|
|
||||||
{"stage": "Pulling fs layer", "progress": 0, "done": False, "extra": None},
|
|
||||||
{
|
|
||||||
"stage": "Downloading",
|
|
||||||
"progress": 0.1,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 539462, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Downloading",
|
|
||||||
"progress": 0.6,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 4864838, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Downloading",
|
|
||||||
"progress": 0.9,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 7552896, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Downloading",
|
|
||||||
"progress": 1.2,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 10252544, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Downloading",
|
|
||||||
"progress": 2.9,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 25369792, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Downloading",
|
|
||||||
"progress": 11.9,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 103619904, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Downloading",
|
|
||||||
"progress": 26.1,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 227726144, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Downloading",
|
|
||||||
"progress": 49.6,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 433170048, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Verifying Checksum",
|
|
||||||
"progress": 50,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 433170048, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Download complete",
|
|
||||||
"progress": 50,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 433170048, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Extracting",
|
|
||||||
"progress": 50.1,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 557056, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Extracting",
|
|
||||||
"progress": 60.3,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 89686016, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Extracting",
|
|
||||||
"progress": 70.0,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 174358528, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Extracting",
|
|
||||||
"progress": 80.0,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 261816320, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Extracting",
|
|
||||||
"progress": 88.4,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 334790656, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Extracting",
|
|
||||||
"progress": 94.0,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 383811584, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Extracting",
|
|
||||||
"progress": 99.9,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 435617792, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Extracting",
|
|
||||||
"progress": 100.0,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 436480882, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Pull complete",
|
|
||||||
"progress": 100.0,
|
|
||||||
"done": True,
|
|
||||||
"extra": {"current": 436480882, "total": 436480882},
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
async def test_install_progress_rounding_does_not_cause_misses(
|
async def test_install_progress_rounding_does_not_cause_misses(
|
||||||
coresys: CoreSys, test_docker_interface: DockerInterface, ha_ws_client: AsyncMock
|
coresys: CoreSys,
|
||||||
|
test_docker_interface: DockerInterface,
|
||||||
|
ha_ws_client: AsyncMock,
|
||||||
|
capture_exception: Mock,
|
||||||
):
|
):
|
||||||
"""Test extremely close progress events do not create rounding issues."""
|
"""Test extremely close progress events do not create rounding issues."""
|
||||||
coresys.core.set_state(CoreState.RUNNING)
|
coresys.core.set_state(CoreState.RUNNING)
|
||||||
|
# Current numbers chosen to create a rounding issue with original code
|
||||||
|
# Where a progress update came in with a value between the actual previous
|
||||||
|
# value and what it was rounded to. It should not raise an out of order exception
|
||||||
coresys.docker.docker.api.pull.return_value = [
|
coresys.docker.docker.api.pull.return_value = [
|
||||||
{
|
{
|
||||||
"status": "Pulling from home-assistant/odroid-n2-homeassistant",
|
"status": "Pulling from home-assistant/odroid-n2-homeassistant",
|
||||||
@@ -662,65 +491,7 @@ async def test_install_progress_rounding_does_not_cause_misses(
|
|||||||
await install_task
|
await install_task
|
||||||
await event.wait()
|
await event.wait()
|
||||||
|
|
||||||
events = [
|
capture_exception.assert_not_called()
|
||||||
evt.args[0]["data"]["data"]
|
|
||||||
for evt in ha_ws_client.async_send_command.call_args_list
|
|
||||||
if "data" in evt.args[0]
|
|
||||||
and evt.args[0]["data"]["event"] == WSEvent.JOB
|
|
||||||
and evt.args[0]["data"]["data"]["reference"] == "1e214cd6d7d0"
|
|
||||||
and evt.args[0]["data"]["data"]["stage"] in {"Downloading", "Extracting"}
|
|
||||||
]
|
|
||||||
|
|
||||||
assert events == [
|
|
||||||
{
|
|
||||||
"name": "Pulling container image layer",
|
|
||||||
"stage": "Downloading",
|
|
||||||
"progress": 49.6,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 432700000, "total": 436480882},
|
|
||||||
"reference": "1e214cd6d7d0",
|
|
||||||
"parent_id": job.uuid,
|
|
||||||
"errors": [],
|
|
||||||
"uuid": ANY,
|
|
||||||
"created": ANY,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Pulling container image layer",
|
|
||||||
"stage": "Downloading",
|
|
||||||
"progress": 49.6,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 432800000, "total": 436480882},
|
|
||||||
"reference": "1e214cd6d7d0",
|
|
||||||
"parent_id": job.uuid,
|
|
||||||
"errors": [],
|
|
||||||
"uuid": ANY,
|
|
||||||
"created": ANY,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Pulling container image layer",
|
|
||||||
"stage": "Extracting",
|
|
||||||
"progress": 99.6,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 432700000, "total": 436480882},
|
|
||||||
"reference": "1e214cd6d7d0",
|
|
||||||
"parent_id": job.uuid,
|
|
||||||
"errors": [],
|
|
||||||
"uuid": ANY,
|
|
||||||
"created": ANY,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Pulling container image layer",
|
|
||||||
"stage": "Extracting",
|
|
||||||
"progress": 99.6,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 432800000, "total": 436480882},
|
|
||||||
"reference": "1e214cd6d7d0",
|
|
||||||
"parent_id": job.uuid,
|
|
||||||
"errors": [],
|
|
||||||
"uuid": ANY,
|
|
||||||
"created": ANY,
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
@@ -770,10 +541,15 @@ async def test_install_raises_on_pull_error(
|
|||||||
|
|
||||||
|
|
||||||
async def test_install_progress_handles_download_restart(
|
async def test_install_progress_handles_download_restart(
|
||||||
coresys: CoreSys, test_docker_interface: DockerInterface, ha_ws_client: AsyncMock
|
coresys: CoreSys,
|
||||||
|
test_docker_interface: DockerInterface,
|
||||||
|
ha_ws_client: AsyncMock,
|
||||||
|
capture_exception: Mock,
|
||||||
):
|
):
|
||||||
"""Test install handles docker progress events that include a download restart."""
|
"""Test install handles docker progress events that include a download restart."""
|
||||||
coresys.core.set_state(CoreState.RUNNING)
|
coresys.core.set_state(CoreState.RUNNING)
|
||||||
|
# Fixture emulates a download restart as it docker logs it
|
||||||
|
# A log out of order exception should not be raised
|
||||||
coresys.docker.docker.api.pull.return_value = load_json_fixture(
|
coresys.docker.docker.api.pull.return_value = load_json_fixture(
|
||||||
"docker_pull_image_log_restart.json"
|
"docker_pull_image_log_restart.json"
|
||||||
)
|
)
|
||||||
@@ -801,106 +577,4 @@ async def test_install_progress_handles_download_restart(
|
|||||||
await install_task
|
await install_task
|
||||||
await event.wait()
|
await event.wait()
|
||||||
|
|
||||||
events = [
|
capture_exception.assert_not_called()
|
||||||
evt.args[0]["data"]["data"]
|
|
||||||
for evt in ha_ws_client.async_send_command.call_args_list
|
|
||||||
if "data" in evt.args[0] and evt.args[0]["data"]["event"] == WSEvent.JOB
|
|
||||||
]
|
|
||||||
|
|
||||||
def make_sub_log(layer_id: str):
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
"stage": evt["stage"],
|
|
||||||
"progress": evt["progress"],
|
|
||||||
"done": evt["done"],
|
|
||||||
"extra": evt["extra"],
|
|
||||||
}
|
|
||||||
for evt in events
|
|
||||||
if evt["name"] == "Pulling container image layer"
|
|
||||||
and evt["reference"] == layer_id
|
|
||||||
and evt["parent_id"] == job.uuid
|
|
||||||
]
|
|
||||||
|
|
||||||
layer_1_log = make_sub_log("1e214cd6d7d0")
|
|
||||||
assert len(layer_1_log) == 14
|
|
||||||
assert layer_1_log == [
|
|
||||||
{"stage": "Pulling fs layer", "progress": 0, "done": False, "extra": None},
|
|
||||||
{
|
|
||||||
"stage": "Downloading",
|
|
||||||
"progress": 11.9,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 103619904, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Downloading",
|
|
||||||
"progress": 26.1,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 227726144, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Downloading",
|
|
||||||
"progress": 49.6,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 433170048, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Retrying download",
|
|
||||||
"progress": 0,
|
|
||||||
"done": False,
|
|
||||||
"extra": None,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Retrying download",
|
|
||||||
"progress": 0,
|
|
||||||
"done": False,
|
|
||||||
"extra": None,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Downloading",
|
|
||||||
"progress": 11.9,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 103619904, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Downloading",
|
|
||||||
"progress": 26.1,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 227726144, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Downloading",
|
|
||||||
"progress": 49.6,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 433170048, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Verifying Checksum",
|
|
||||||
"progress": 50,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 433170048, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Download complete",
|
|
||||||
"progress": 50,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 433170048, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Extracting",
|
|
||||||
"progress": 80.0,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 261816320, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Extracting",
|
|
||||||
"progress": 100.0,
|
|
||||||
"done": False,
|
|
||||||
"extra": {"current": 436480882, "total": 436480882},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"stage": "Pull complete",
|
|
||||||
"progress": 100.0,
|
|
||||||
"done": True,
|
|
||||||
"extra": {"current": 436480882, "total": 436480882},
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ from supervisor.exceptions import (
|
|||||||
AudioJobError,
|
AudioJobError,
|
||||||
CliError,
|
CliError,
|
||||||
CliJobError,
|
CliJobError,
|
||||||
|
CodeNotaryUntrusted,
|
||||||
CoreDNSError,
|
CoreDNSError,
|
||||||
CoreDNSJobError,
|
CoreDNSJobError,
|
||||||
DockerError,
|
DockerError,
|
||||||
@@ -336,12 +337,14 @@ async def test_repair_failed(
|
|||||||
patch.object(
|
patch.object(
|
||||||
DockerInterface, "arch", new=PropertyMock(return_value=CpuArch.AMD64)
|
DockerInterface, "arch", new=PropertyMock(return_value=CpuArch.AMD64)
|
||||||
),
|
),
|
||||||
patch.object(DockerInterface, "install", side_effect=DockerError),
|
patch(
|
||||||
|
"supervisor.security.module.cas_validate", side_effect=CodeNotaryUntrusted
|
||||||
|
),
|
||||||
):
|
):
|
||||||
await plugin.repair()
|
await plugin.repair()
|
||||||
|
|
||||||
capture_exception.assert_called_once()
|
capture_exception.assert_called_once()
|
||||||
assert check_exception_chain(capture_exception.call_args[0][0], DockerError)
|
assert check_exception_chain(capture_exception.call_args[0][0], CodeNotaryUntrusted)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
|||||||
96
tests/resolution/check/test_check_supervisor_trust.py
Normal file
96
tests/resolution/check/test_check_supervisor_trust.py
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
"""Test Check Supervisor trust."""
|
||||||
|
|
||||||
|
# pylint: disable=import-error,protected-access
|
||||||
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
|
from supervisor.const import CoreState
|
||||||
|
from supervisor.coresys import CoreSys
|
||||||
|
from supervisor.exceptions import CodeNotaryError, CodeNotaryUntrusted
|
||||||
|
from supervisor.resolution.checks.supervisor_trust import CheckSupervisorTrust
|
||||||
|
from supervisor.resolution.const import IssueType, UnhealthyReason
|
||||||
|
|
||||||
|
|
||||||
|
async def test_base(coresys: CoreSys):
|
||||||
|
"""Test check basics."""
|
||||||
|
supervisor_trust = CheckSupervisorTrust(coresys)
|
||||||
|
assert supervisor_trust.slug == "supervisor_trust"
|
||||||
|
assert supervisor_trust.enabled
|
||||||
|
|
||||||
|
|
||||||
|
async def test_check(coresys: CoreSys):
|
||||||
|
"""Test check."""
|
||||||
|
supervisor_trust = CheckSupervisorTrust(coresys)
|
||||||
|
await coresys.core.set_state(CoreState.RUNNING)
|
||||||
|
|
||||||
|
assert len(coresys.resolution.issues) == 0
|
||||||
|
|
||||||
|
coresys.supervisor.check_trust = AsyncMock(side_effect=CodeNotaryError)
|
||||||
|
await supervisor_trust.run_check()
|
||||||
|
assert coresys.supervisor.check_trust.called
|
||||||
|
|
||||||
|
coresys.supervisor.check_trust = AsyncMock(return_value=None)
|
||||||
|
await supervisor_trust.run_check()
|
||||||
|
assert coresys.supervisor.check_trust.called
|
||||||
|
|
||||||
|
assert len(coresys.resolution.issues) == 0
|
||||||
|
|
||||||
|
coresys.supervisor.check_trust = AsyncMock(side_effect=CodeNotaryUntrusted)
|
||||||
|
await supervisor_trust.run_check()
|
||||||
|
assert coresys.supervisor.check_trust.called
|
||||||
|
|
||||||
|
assert len(coresys.resolution.issues) == 1
|
||||||
|
assert coresys.resolution.issues[-1].type == IssueType.TRUST
|
||||||
|
|
||||||
|
assert UnhealthyReason.UNTRUSTED in coresys.resolution.unhealthy
|
||||||
|
|
||||||
|
|
||||||
|
async def test_approve(coresys: CoreSys):
|
||||||
|
"""Test check."""
|
||||||
|
supervisor_trust = CheckSupervisorTrust(coresys)
|
||||||
|
await coresys.core.set_state(CoreState.RUNNING)
|
||||||
|
|
||||||
|
coresys.supervisor.check_trust = AsyncMock(side_effect=CodeNotaryUntrusted)
|
||||||
|
assert await supervisor_trust.approve_check()
|
||||||
|
|
||||||
|
coresys.supervisor.check_trust = AsyncMock(return_value=None)
|
||||||
|
assert not await supervisor_trust.approve_check()
|
||||||
|
|
||||||
|
|
||||||
|
async def test_with_global_disable(coresys: CoreSys, caplog):
|
||||||
|
"""Test when pwned is globally disabled."""
|
||||||
|
coresys.security.content_trust = False
|
||||||
|
supervisor_trust = CheckSupervisorTrust(coresys)
|
||||||
|
await coresys.core.set_state(CoreState.RUNNING)
|
||||||
|
|
||||||
|
assert len(coresys.resolution.issues) == 0
|
||||||
|
coresys.security.verify_own_content = AsyncMock(side_effect=CodeNotaryUntrusted)
|
||||||
|
await supervisor_trust.run_check()
|
||||||
|
assert not coresys.security.verify_own_content.called
|
||||||
|
assert (
|
||||||
|
"Skipping supervisor_trust, content_trust is globally disabled" in caplog.text
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_did_run(coresys: CoreSys):
|
||||||
|
"""Test that the check ran as expected."""
|
||||||
|
supervisor_trust = CheckSupervisorTrust(coresys)
|
||||||
|
should_run = supervisor_trust.states
|
||||||
|
should_not_run = [state for state in CoreState if state not in should_run]
|
||||||
|
assert len(should_run) != 0
|
||||||
|
assert len(should_not_run) != 0
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"supervisor.resolution.checks.supervisor_trust.CheckSupervisorTrust.run_check",
|
||||||
|
return_value=None,
|
||||||
|
) as check:
|
||||||
|
for state in should_run:
|
||||||
|
await coresys.core.set_state(state)
|
||||||
|
await supervisor_trust()
|
||||||
|
check.assert_called_once()
|
||||||
|
check.reset_mock()
|
||||||
|
|
||||||
|
for state in should_not_run:
|
||||||
|
await coresys.core.set_state(state)
|
||||||
|
await supervisor_trust()
|
||||||
|
check.assert_not_called()
|
||||||
|
check.reset_mock()
|
||||||
@@ -1,20 +1,38 @@
|
|||||||
"""Test evaluation base."""
|
"""Test evaluation base."""
|
||||||
|
|
||||||
# pylint: disable=import-error,protected-access
|
# pylint: disable=import-error,protected-access
|
||||||
from unittest.mock import patch
|
import errno
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
from supervisor.const import CoreState
|
from supervisor.const import CoreState
|
||||||
from supervisor.coresys import CoreSys
|
from supervisor.coresys import CoreSys
|
||||||
|
from supervisor.exceptions import CodeNotaryError, CodeNotaryUntrusted
|
||||||
|
from supervisor.resolution.const import ContextType, IssueType
|
||||||
|
from supervisor.resolution.data import Issue
|
||||||
from supervisor.resolution.evaluations.source_mods import EvaluateSourceMods
|
from supervisor.resolution.evaluations.source_mods import EvaluateSourceMods
|
||||||
|
|
||||||
|
|
||||||
async def test_evaluation(coresys: CoreSys):
|
async def test_evaluation(coresys: CoreSys):
|
||||||
"""Test evaluation - CodeNotary removed."""
|
"""Test evaluation."""
|
||||||
|
with patch(
|
||||||
|
"supervisor.resolution.evaluations.source_mods._SUPERVISOR_SOURCE",
|
||||||
|
Path(f"{os.getcwd()}/supervisor"),
|
||||||
|
):
|
||||||
sourcemods = EvaluateSourceMods(coresys)
|
sourcemods = EvaluateSourceMods(coresys)
|
||||||
await coresys.core.set_state(CoreState.RUNNING)
|
await coresys.core.set_state(CoreState.RUNNING)
|
||||||
|
|
||||||
# CodeNotary checking removed, evaluation always returns False now
|
|
||||||
assert sourcemods.reason not in coresys.resolution.unsupported
|
assert sourcemods.reason not in coresys.resolution.unsupported
|
||||||
|
coresys.security.verify_own_content = AsyncMock(side_effect=CodeNotaryUntrusted)
|
||||||
|
await sourcemods()
|
||||||
|
assert sourcemods.reason in coresys.resolution.unsupported
|
||||||
|
|
||||||
|
coresys.security.verify_own_content = AsyncMock(side_effect=CodeNotaryError)
|
||||||
|
await sourcemods()
|
||||||
|
assert sourcemods.reason not in coresys.resolution.unsupported
|
||||||
|
|
||||||
|
coresys.security.verify_own_content = AsyncMock()
|
||||||
await sourcemods()
|
await sourcemods()
|
||||||
assert sourcemods.reason not in coresys.resolution.unsupported
|
assert sourcemods.reason not in coresys.resolution.unsupported
|
||||||
|
|
||||||
@@ -45,11 +63,27 @@ async def test_did_run(coresys: CoreSys):
|
|||||||
|
|
||||||
|
|
||||||
async def test_evaluation_error(coresys: CoreSys):
|
async def test_evaluation_error(coresys: CoreSys):
|
||||||
"""Test error reading file during evaluation - CodeNotary removed."""
|
"""Test error reading file during evaluation."""
|
||||||
sourcemods = EvaluateSourceMods(coresys)
|
sourcemods = EvaluateSourceMods(coresys)
|
||||||
await coresys.core.set_state(CoreState.RUNNING)
|
await coresys.core.set_state(CoreState.RUNNING)
|
||||||
|
corrupt_fs = Issue(IssueType.CORRUPT_FILESYSTEM, ContextType.SYSTEM)
|
||||||
|
|
||||||
# CodeNotary checking removed, evaluation always returns False now
|
|
||||||
assert sourcemods.reason not in coresys.resolution.unsupported
|
assert sourcemods.reason not in coresys.resolution.unsupported
|
||||||
|
assert corrupt_fs not in coresys.resolution.issues
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"supervisor.utils.codenotary.dirhash",
|
||||||
|
side_effect=(err := OSError()),
|
||||||
|
):
|
||||||
|
err.errno = errno.EBUSY
|
||||||
await sourcemods()
|
await sourcemods()
|
||||||
assert sourcemods.reason not in coresys.resolution.unsupported
|
assert sourcemods.reason not in coresys.resolution.unsupported
|
||||||
|
assert corrupt_fs in coresys.resolution.issues
|
||||||
|
assert coresys.core.healthy is True
|
||||||
|
|
||||||
|
coresys.resolution.dismiss_issue(corrupt_fs)
|
||||||
|
err.errno = errno.EBADMSG
|
||||||
|
await sourcemods()
|
||||||
|
assert sourcemods.reason not in coresys.resolution.unsupported
|
||||||
|
assert corrupt_fs in coresys.resolution.issues
|
||||||
|
assert coresys.core.healthy is False
|
||||||
|
|||||||
69
tests/resolution/fixup/test_system_execute_integrity.py
Normal file
69
tests/resolution/fixup/test_system_execute_integrity.py
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
"""Test evaluation base."""
|
||||||
|
|
||||||
|
# pylint: disable=import-error,protected-access
|
||||||
|
from datetime import timedelta
|
||||||
|
from unittest.mock import AsyncMock
|
||||||
|
|
||||||
|
import time_machine
|
||||||
|
|
||||||
|
from supervisor.coresys import CoreSys
|
||||||
|
from supervisor.resolution.const import ContextType, IssueType, SuggestionType
|
||||||
|
from supervisor.resolution.data import Issue, Suggestion
|
||||||
|
from supervisor.resolution.fixups.system_execute_integrity import (
|
||||||
|
FixupSystemExecuteIntegrity,
|
||||||
|
)
|
||||||
|
from supervisor.security.const import ContentTrustResult, IntegrityResult
|
||||||
|
from supervisor.utils.dt import utcnow
|
||||||
|
|
||||||
|
|
||||||
|
async def test_fixup(coresys: CoreSys, supervisor_internet: AsyncMock):
|
||||||
|
"""Test fixup."""
|
||||||
|
system_execute_integrity = FixupSystemExecuteIntegrity(coresys)
|
||||||
|
|
||||||
|
assert system_execute_integrity.auto
|
||||||
|
|
||||||
|
coresys.resolution.add_suggestion(
|
||||||
|
Suggestion(SuggestionType.EXECUTE_INTEGRITY, ContextType.SYSTEM)
|
||||||
|
)
|
||||||
|
coresys.resolution.add_issue(Issue(IssueType.TRUST, ContextType.SYSTEM))
|
||||||
|
|
||||||
|
coresys.security.integrity_check = AsyncMock(
|
||||||
|
return_value=IntegrityResult(
|
||||||
|
ContentTrustResult.PASS,
|
||||||
|
ContentTrustResult.PASS,
|
||||||
|
{"audio": ContentTrustResult.PASS},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
await system_execute_integrity()
|
||||||
|
|
||||||
|
assert coresys.security.integrity_check.called
|
||||||
|
assert len(coresys.resolution.suggestions) == 0
|
||||||
|
assert len(coresys.resolution.issues) == 0
|
||||||
|
|
||||||
|
|
||||||
|
async def test_fixup_error(coresys: CoreSys, supervisor_internet: AsyncMock):
|
||||||
|
"""Test fixup."""
|
||||||
|
system_execute_integrity = FixupSystemExecuteIntegrity(coresys)
|
||||||
|
|
||||||
|
assert system_execute_integrity.auto
|
||||||
|
|
||||||
|
coresys.resolution.add_suggestion(
|
||||||
|
Suggestion(SuggestionType.EXECUTE_INTEGRITY, ContextType.SYSTEM)
|
||||||
|
)
|
||||||
|
coresys.resolution.add_issue(Issue(IssueType.TRUST, ContextType.SYSTEM))
|
||||||
|
|
||||||
|
coresys.security.integrity_check = AsyncMock(
|
||||||
|
return_value=IntegrityResult(
|
||||||
|
ContentTrustResult.FAILED,
|
||||||
|
ContentTrustResult.PASS,
|
||||||
|
{"audio": ContentTrustResult.PASS},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
with time_machine.travel(utcnow() + timedelta(hours=24)):
|
||||||
|
await system_execute_integrity()
|
||||||
|
|
||||||
|
assert coresys.security.integrity_check.called
|
||||||
|
assert len(coresys.resolution.suggestions) == 1
|
||||||
|
assert len(coresys.resolution.issues) == 1
|
||||||
@@ -1,15 +1,21 @@
|
|||||||
"""Test evaluations."""
|
"""Test evaluations."""
|
||||||
|
|
||||||
from unittest.mock import Mock
|
from unittest.mock import Mock, patch
|
||||||
|
|
||||||
from supervisor.const import CoreState
|
from supervisor.const import CoreState
|
||||||
from supervisor.coresys import CoreSys
|
from supervisor.coresys import CoreSys
|
||||||
|
from supervisor.utils import check_exception_chain
|
||||||
|
|
||||||
|
|
||||||
async def test_evaluate_system_error(coresys: CoreSys, capture_exception: Mock):
|
async def test_evaluate_system_error(coresys: CoreSys, capture_exception: Mock):
|
||||||
"""Test error while evaluating system."""
|
"""Test error while evaluating system."""
|
||||||
await coresys.core.set_state(CoreState.RUNNING)
|
await coresys.core.set_state(CoreState.RUNNING)
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"supervisor.resolution.evaluations.source_mods.calc_checksum_path_sourcecode",
|
||||||
|
side_effect=RuntimeError,
|
||||||
|
):
|
||||||
await coresys.resolution.evaluate.evaluate_system()
|
await coresys.resolution.evaluate.evaluate_system()
|
||||||
|
|
||||||
capture_exception.assert_not_called()
|
capture_exception.assert_called_once()
|
||||||
|
assert check_exception_chain(capture_exception.call_args[0][0], RuntimeError)
|
||||||
|
|||||||
127
tests/security/test_module.py
Normal file
127
tests/security/test_module.py
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
"""Testing handling with Security."""
|
||||||
|
|
||||||
|
from unittest.mock import AsyncMock, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from supervisor.coresys import CoreSys
|
||||||
|
from supervisor.exceptions import CodeNotaryError, CodeNotaryUntrusted
|
||||||
|
from supervisor.security.const import ContentTrustResult
|
||||||
|
|
||||||
|
|
||||||
|
async def test_content_trust(coresys: CoreSys):
|
||||||
|
"""Test Content-Trust."""
|
||||||
|
|
||||||
|
with patch("supervisor.security.module.cas_validate", AsyncMock()) as cas_validate:
|
||||||
|
await coresys.security.verify_content("test@mail.com", "ffffffffffffff")
|
||||||
|
assert cas_validate.called
|
||||||
|
cas_validate.assert_called_once_with("test@mail.com", "ffffffffffffff")
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"supervisor.security.module.cas_validate", AsyncMock()
|
||||||
|
) as cas_validate:
|
||||||
|
await coresys.security.verify_own_content("ffffffffffffff")
|
||||||
|
assert cas_validate.called
|
||||||
|
cas_validate.assert_called_once_with(
|
||||||
|
"notary@home-assistant.io", "ffffffffffffff"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_disabled_content_trust(coresys: CoreSys):
|
||||||
|
"""Test Content-Trust."""
|
||||||
|
coresys.security.content_trust = False
|
||||||
|
|
||||||
|
with patch("supervisor.security.module.cas_validate", AsyncMock()) as cas_validate:
|
||||||
|
await coresys.security.verify_content("test@mail.com", "ffffffffffffff")
|
||||||
|
assert not cas_validate.called
|
||||||
|
|
||||||
|
with patch("supervisor.security.module.cas_validate", AsyncMock()) as cas_validate:
|
||||||
|
await coresys.security.verify_own_content("ffffffffffffff")
|
||||||
|
assert not cas_validate.called
|
||||||
|
|
||||||
|
|
||||||
|
async def test_force_content_trust(coresys: CoreSys):
|
||||||
|
"""Force Content-Trust tests."""
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"supervisor.security.module.cas_validate",
|
||||||
|
AsyncMock(side_effect=CodeNotaryError),
|
||||||
|
) as cas_validate:
|
||||||
|
await coresys.security.verify_content("test@mail.com", "ffffffffffffff")
|
||||||
|
assert cas_validate.called
|
||||||
|
cas_validate.assert_called_once_with("test@mail.com", "ffffffffffffff")
|
||||||
|
|
||||||
|
coresys.security.force = True
|
||||||
|
|
||||||
|
with (
|
||||||
|
patch(
|
||||||
|
"supervisor.security.module.cas_validate",
|
||||||
|
AsyncMock(side_effect=CodeNotaryError),
|
||||||
|
) as cas_validate,
|
||||||
|
pytest.raises(CodeNotaryError),
|
||||||
|
):
|
||||||
|
await coresys.security.verify_content("test@mail.com", "ffffffffffffff")
|
||||||
|
|
||||||
|
|
||||||
|
async def test_integrity_check_disabled(coresys: CoreSys):
|
||||||
|
"""Test integrity check with disabled content trust."""
|
||||||
|
coresys.security.content_trust = False
|
||||||
|
|
||||||
|
result = await coresys.security.integrity_check.__wrapped__(coresys.security)
|
||||||
|
|
||||||
|
assert result.core == ContentTrustResult.UNTESTED
|
||||||
|
assert result.supervisor == ContentTrustResult.UNTESTED
|
||||||
|
|
||||||
|
|
||||||
|
async def test_integrity_check(coresys: CoreSys, install_addon_ssh):
|
||||||
|
"""Test integrity check with content trust."""
|
||||||
|
coresys.homeassistant.core.check_trust = AsyncMock()
|
||||||
|
coresys.supervisor.check_trust = AsyncMock()
|
||||||
|
install_addon_ssh.check_trust = AsyncMock()
|
||||||
|
install_addon_ssh.data["codenotary"] = "test@example.com"
|
||||||
|
|
||||||
|
result = await coresys.security.integrity_check.__wrapped__(coresys.security)
|
||||||
|
|
||||||
|
assert result.core == ContentTrustResult.PASS
|
||||||
|
assert result.supervisor == ContentTrustResult.PASS
|
||||||
|
assert result.addons[install_addon_ssh.slug] == ContentTrustResult.PASS
|
||||||
|
|
||||||
|
|
||||||
|
async def test_integrity_check_error(coresys: CoreSys, install_addon_ssh):
|
||||||
|
"""Test integrity check with content trust issues."""
|
||||||
|
coresys.homeassistant.core.check_trust = AsyncMock(side_effect=CodeNotaryUntrusted)
|
||||||
|
coresys.supervisor.check_trust = AsyncMock(side_effect=CodeNotaryUntrusted)
|
||||||
|
install_addon_ssh.check_trust = AsyncMock(side_effect=CodeNotaryUntrusted)
|
||||||
|
install_addon_ssh.data["codenotary"] = "test@example.com"
|
||||||
|
|
||||||
|
result = await coresys.security.integrity_check.__wrapped__(coresys.security)
|
||||||
|
|
||||||
|
assert result.core == ContentTrustResult.ERROR
|
||||||
|
assert result.supervisor == ContentTrustResult.ERROR
|
||||||
|
assert result.addons[install_addon_ssh.slug] == ContentTrustResult.ERROR
|
||||||
|
|
||||||
|
|
||||||
|
async def test_integrity_check_failed(coresys: CoreSys, install_addon_ssh):
|
||||||
|
"""Test integrity check with content trust failed."""
|
||||||
|
coresys.homeassistant.core.check_trust = AsyncMock(side_effect=CodeNotaryError)
|
||||||
|
coresys.supervisor.check_trust = AsyncMock(side_effect=CodeNotaryError)
|
||||||
|
install_addon_ssh.check_trust = AsyncMock(side_effect=CodeNotaryError)
|
||||||
|
install_addon_ssh.data["codenotary"] = "test@example.com"
|
||||||
|
|
||||||
|
result = await coresys.security.integrity_check.__wrapped__(coresys.security)
|
||||||
|
|
||||||
|
assert result.core == ContentTrustResult.FAILED
|
||||||
|
assert result.supervisor == ContentTrustResult.FAILED
|
||||||
|
assert result.addons[install_addon_ssh.slug] == ContentTrustResult.FAILED
|
||||||
|
|
||||||
|
|
||||||
|
async def test_integrity_check_addon(coresys: CoreSys, install_addon_ssh):
|
||||||
|
"""Test integrity check with content trust but no signed add-ons."""
|
||||||
|
coresys.homeassistant.core.check_trust = AsyncMock()
|
||||||
|
coresys.supervisor.check_trust = AsyncMock()
|
||||||
|
|
||||||
|
result = await coresys.security.integrity_check.__wrapped__(coresys.security)
|
||||||
|
|
||||||
|
assert result.core == ContentTrustResult.PASS
|
||||||
|
assert result.supervisor == ContentTrustResult.PASS
|
||||||
|
assert result.addons[install_addon_ssh.slug] == ContentTrustResult.UNTESTED
|
||||||
@@ -86,7 +86,7 @@ async def test_os_update_path(
|
|||||||
"""Test OS upgrade path across major versions."""
|
"""Test OS upgrade path across major versions."""
|
||||||
coresys.os._board = "rpi4" # pylint: disable=protected-access
|
coresys.os._board = "rpi4" # pylint: disable=protected-access
|
||||||
coresys.os._version = AwesomeVersion(version) # pylint: disable=protected-access
|
coresys.os._version = AwesomeVersion(version) # pylint: disable=protected-access
|
||||||
# CodeNotary verification removed
|
with patch.object(type(coresys.security), "verify_own_content"):
|
||||||
await coresys.updater.fetch_data()
|
await coresys.updater.fetch_data()
|
||||||
|
|
||||||
assert coresys.updater.version_hassos == AwesomeVersion(expected)
|
assert coresys.updater.version_hassos == AwesomeVersion(expected)
|
||||||
@@ -105,6 +105,7 @@ async def test_delayed_fetch_for_connectivity(
|
|||||||
load_binary_fixture("version_stable.json")
|
load_binary_fixture("version_stable.json")
|
||||||
)
|
)
|
||||||
coresys.websession.head = AsyncMock()
|
coresys.websession.head = AsyncMock()
|
||||||
|
coresys.security.verify_own_content = AsyncMock()
|
||||||
|
|
||||||
# Network connectivity change causes a series of async tasks to eventually do a version fetch
|
# Network connectivity change causes a series of async tasks to eventually do a version fetch
|
||||||
# Rather then use some kind of sleep loop, set up listener for start of fetch data job
|
# Rather then use some kind of sleep loop, set up listener for start of fetch data job
|
||||||
|
|||||||
128
tests/utils/test_codenotary.py
Normal file
128
tests/utils/test_codenotary.py
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
"""Test CodeNotary."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from unittest.mock import AsyncMock, Mock, patch
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from supervisor.exceptions import (
|
||||||
|
CodeNotaryBackendError,
|
||||||
|
CodeNotaryError,
|
||||||
|
CodeNotaryUntrusted,
|
||||||
|
)
|
||||||
|
from supervisor.utils.codenotary import calc_checksum, cas_validate
|
||||||
|
|
||||||
|
pytest.skip("code notary has been disabled due to issues", allow_module_level=True)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class SubprocessResponse:
|
||||||
|
"""Class for specifying subprocess exec response."""
|
||||||
|
|
||||||
|
returncode: int = 0
|
||||||
|
data: str = ""
|
||||||
|
error: str | None = None
|
||||||
|
exception: Exception | None = None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(name="subprocess_exec")
|
||||||
|
def fixture_subprocess_exec(request):
|
||||||
|
"""Mock subprocess exec with specific return."""
|
||||||
|
response = request.param
|
||||||
|
if response.exception:
|
||||||
|
communicate_return = AsyncMock(side_effect=response.exception)
|
||||||
|
else:
|
||||||
|
communicate_return = AsyncMock(return_value=(response.data, response.error))
|
||||||
|
|
||||||
|
exec_return = Mock(returncode=response.returncode, communicate=communicate_return)
|
||||||
|
|
||||||
|
with patch(
|
||||||
|
"supervisor.utils.codenotary.asyncio.create_subprocess_exec",
|
||||||
|
return_value=exec_return,
|
||||||
|
) as subprocess_exec:
|
||||||
|
yield subprocess_exec
|
||||||
|
|
||||||
|
|
||||||
|
def test_checksum_calc():
|
||||||
|
"""Calc Checkusm as test."""
|
||||||
|
assert calc_checksum("test") == calc_checksum(b"test")
|
||||||
|
assert (
|
||||||
|
calc_checksum("test")
|
||||||
|
== "9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_valid_checksum():
|
||||||
|
"""Test a valid autorization."""
|
||||||
|
await cas_validate(
|
||||||
|
"notary@home-assistant.io",
|
||||||
|
"4434a33ff9c695e870bc5bbe04230ea3361ecf4c129eb06133dd1373975a43f0",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def test_invalid_checksum():
|
||||||
|
"""Test a invalid autorization."""
|
||||||
|
with pytest.raises(CodeNotaryUntrusted):
|
||||||
|
await cas_validate(
|
||||||
|
"notary@home-assistant.io",
|
||||||
|
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"subprocess_exec",
|
||||||
|
[SubprocessResponse(returncode=1, error=b"x is not notarized")],
|
||||||
|
)
|
||||||
|
async def test_not_notarized_error(subprocess_exec):
|
||||||
|
"""Test received a not notarized error response from command."""
|
||||||
|
with pytest.raises(CodeNotaryUntrusted):
|
||||||
|
await cas_validate(
|
||||||
|
"notary@home-assistant.io",
|
||||||
|
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"subprocess_exec",
|
||||||
|
[
|
||||||
|
SubprocessResponse(returncode=1, error=b"test"),
|
||||||
|
SubprocessResponse(returncode=0, data='{"error":"asn1: structure error"}'),
|
||||||
|
SubprocessResponse(returncode=1, error="test".encode("utf-16")),
|
||||||
|
],
|
||||||
|
indirect=True,
|
||||||
|
)
|
||||||
|
async def test_cas_backend_error(subprocess_exec):
|
||||||
|
"""Test backend error executing cas command."""
|
||||||
|
with pytest.raises(CodeNotaryBackendError):
|
||||||
|
await cas_validate(
|
||||||
|
"notary@home-assistant.io",
|
||||||
|
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"subprocess_exec",
|
||||||
|
[SubprocessResponse(returncode=0, data='{"status":1}')],
|
||||||
|
indirect=True,
|
||||||
|
)
|
||||||
|
async def test_cas_notarized_untrusted(subprocess_exec):
|
||||||
|
"""Test cas found notarized but untrusted content."""
|
||||||
|
with pytest.raises(CodeNotaryUntrusted):
|
||||||
|
await cas_validate(
|
||||||
|
"notary@home-assistant.io",
|
||||||
|
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"subprocess_exec", [SubprocessResponse(exception=OSError())], indirect=True
|
||||||
|
)
|
||||||
|
async def test_cas_exec_os_error(subprocess_exec):
|
||||||
|
"""Test os error attempting to execute cas command."""
|
||||||
|
with pytest.raises(CodeNotaryError):
|
||||||
|
await cas_validate(
|
||||||
|
"notary@home-assistant.io",
|
||||||
|
"ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff",
|
||||||
|
)
|
||||||
Reference in New Issue
Block a user