mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-10-04 01:09:31 +00:00
Compare commits
8 Commits
2025.09.3
...
remove-dep
Author | SHA1 | Date | |
---|---|---|---|
![]() |
7031a58083 | ||
![]() |
3c0e62f6ba | ||
![]() |
78a2e15ebb | ||
![]() |
f3e1e0f423 | ||
![]() |
5779b567f1 | ||
![]() |
3c5f4920a0 | ||
![]() |
64f94a159c | ||
![]() |
ab3b147876 |
2
.github/workflows/builder.yml
vendored
2
.github/workflows/builder.yml
vendored
@@ -150,7 +150,7 @@ jobs:
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
|
@@ -8,7 +8,7 @@ brotli==1.1.0
|
||||
ciso8601==2.3.3
|
||||
colorlog==6.9.0
|
||||
cpe==1.3.1
|
||||
cryptography==46.0.1
|
||||
cryptography==46.0.2
|
||||
debugpy==1.8.17
|
||||
deepmerge==2.0
|
||||
dirhash==0.5.0
|
||||
|
@@ -72,7 +72,6 @@ from ..exceptions import (
|
||||
AddonsJobError,
|
||||
ConfigurationFileError,
|
||||
DockerError,
|
||||
HomeAssistantAPIError,
|
||||
HostAppArmorError,
|
||||
)
|
||||
from ..hardware.data import Device
|
||||
@@ -842,8 +841,7 @@ class Addon(AddonModel):
|
||||
# Cleanup Ingress panel from sidebar
|
||||
if self.ingress_panel:
|
||||
self.ingress_panel = False
|
||||
with suppress(HomeAssistantAPIError):
|
||||
await self.sys_ingress.update_hass_panel(self)
|
||||
await self.sys_ingress.update_hass_panel(self)
|
||||
|
||||
# Cleanup Ingress dynamic port assignment
|
||||
need_ingress_token_cleanup = False
|
||||
|
@@ -20,7 +20,6 @@ from ..exceptions import (
|
||||
CoreDNSError,
|
||||
DockerError,
|
||||
HassioError,
|
||||
HomeAssistantAPIError,
|
||||
)
|
||||
from ..jobs.decorator import Job, JobCondition
|
||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||
@@ -351,8 +350,7 @@ class AddonManager(CoreSysAttributes):
|
||||
# Update ingress
|
||||
if had_ingress != addon.ingress_panel:
|
||||
await self.sys_ingress.reload()
|
||||
with suppress(HomeAssistantAPIError):
|
||||
await self.sys_ingress.update_hass_panel(addon)
|
||||
await self.sys_ingress.update_hass_panel(addon)
|
||||
|
||||
return wait_for_start
|
||||
|
||||
|
@@ -77,10 +77,10 @@ class APIProxy(CoreSysAttributes):
|
||||
yield resp
|
||||
return
|
||||
|
||||
except HomeAssistantAuthError:
|
||||
_LOGGER.error("Authenticate error on API for request %s", path)
|
||||
except HomeAssistantAPIError:
|
||||
_LOGGER.error("Error on API for request %s", path)
|
||||
except HomeAssistantAuthError as err:
|
||||
_LOGGER.error("Authenticate error on API for request %s: %s", path, err)
|
||||
except HomeAssistantAPIError as err:
|
||||
_LOGGER.error("Error on API for request %s: %s", path, err)
|
||||
except aiohttp.ClientError as err:
|
||||
_LOGGER.error("Client error on API %s request %s", path, err)
|
||||
except TimeoutError:
|
||||
|
@@ -108,8 +108,7 @@ class APISupervisor(CoreSysAttributes):
|
||||
ATTR_AUTO_UPDATE: self.sys_updater.auto_update,
|
||||
ATTR_DETECT_BLOCKING_IO: BlockBusterManager.is_enabled(),
|
||||
ATTR_COUNTRY: self.sys_config.country,
|
||||
# Depricated
|
||||
ATTR_WAIT_BOOT: self.sys_config.wait_boot,
|
||||
# Deprecated
|
||||
ATTR_ADDONS: [
|
||||
{
|
||||
ATTR_NAME: addon.name,
|
||||
@@ -123,10 +122,6 @@ class APISupervisor(CoreSysAttributes):
|
||||
}
|
||||
for addon in self.sys_addons.local.values()
|
||||
],
|
||||
ATTR_ADDONS_REPOSITORIES: [
|
||||
{ATTR_NAME: store.name, ATTR_SLUG: store.slug}
|
||||
for store in self.sys_store.all
|
||||
],
|
||||
}
|
||||
|
||||
@api_process
|
||||
@@ -182,20 +177,10 @@ class APISupervisor(CoreSysAttributes):
|
||||
self.sys_config.detect_blocking_io = False
|
||||
BlockBusterManager.deactivate()
|
||||
|
||||
# Deprecated
|
||||
if ATTR_WAIT_BOOT in body:
|
||||
self.sys_config.wait_boot = body[ATTR_WAIT_BOOT]
|
||||
|
||||
# Save changes before processing addons in case of errors
|
||||
await self.sys_updater.save_data()
|
||||
await self.sys_config.save_data()
|
||||
|
||||
# Remove: 2022.9
|
||||
if ATTR_ADDONS_REPOSITORIES in body:
|
||||
await asyncio.shield(
|
||||
self.sys_store.update_repositories(set(body[ATTR_ADDONS_REPOSITORIES]))
|
||||
)
|
||||
|
||||
await self.sys_resolution.evaluate.evaluate_system()
|
||||
|
||||
@api_process
|
||||
|
@@ -132,8 +132,8 @@ class Auth(FileConfiguration, CoreSysAttributes):
|
||||
_LOGGER.warning("Unauthorized login for '%s'", username)
|
||||
await self._dismatch_cache(username, password)
|
||||
return False
|
||||
except HomeAssistantAPIError:
|
||||
_LOGGER.error("Can't request auth on Home Assistant!")
|
||||
except HomeAssistantAPIError as err:
|
||||
_LOGGER.error("Can't request auth on Home Assistant: %s", err)
|
||||
finally:
|
||||
self._running.pop(username, None)
|
||||
|
||||
@@ -152,8 +152,8 @@ class Auth(FileConfiguration, CoreSysAttributes):
|
||||
return
|
||||
|
||||
_LOGGER.warning("The user '%s' is not registered", username)
|
||||
except HomeAssistantAPIError:
|
||||
_LOGGER.error("Can't request password reset on Home Assistant!")
|
||||
except HomeAssistantAPIError as err:
|
||||
_LOGGER.error("Can't request password reset on Home Assistant: %s", err)
|
||||
|
||||
raise AuthPasswordResetError()
|
||||
|
||||
|
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from uuid import uuid4
|
||||
@@ -119,7 +118,7 @@ class Discovery(CoreSysAttributes, FileConfiguration):
|
||||
data = attr.asdict(message)
|
||||
data.pop(ATTR_CONFIG)
|
||||
|
||||
with suppress(HomeAssistantAPIError):
|
||||
try:
|
||||
async with self.sys_homeassistant.api.make_request(
|
||||
command,
|
||||
f"api/hassio_push/discovery/{message.uuid}",
|
||||
@@ -128,5 +127,5 @@ class Discovery(CoreSysAttributes, FileConfiguration):
|
||||
):
|
||||
_LOGGER.info("Discovery %s message send", message.uuid)
|
||||
return
|
||||
|
||||
_LOGGER.warning("Discovery %s message fail", message.uuid)
|
||||
except HomeAssistantAPIError as err:
|
||||
_LOGGER.error("Discovery %s message failed: %s", message.uuid, err)
|
||||
|
@@ -326,11 +326,19 @@ class DockerAPI(CoreSysAttributes):
|
||||
if name:
|
||||
cidfile_path = self.coresys.config.path_cid_files / f"{name}.cid"
|
||||
|
||||
# Remove the file if it exists e.g. as a leftover from unclean shutdown
|
||||
if cidfile_path.is_file():
|
||||
with suppress(OSError):
|
||||
# Remove the file/directory if it exists e.g. as a leftover from unclean shutdown
|
||||
# Note: Can be a directory if Docker auto-started container with restart policy
|
||||
# before Supervisor could write the CID file
|
||||
with suppress(OSError):
|
||||
if cidfile_path.is_dir():
|
||||
cidfile_path.rmdir()
|
||||
elif cidfile_path.is_file():
|
||||
cidfile_path.unlink(missing_ok=True)
|
||||
|
||||
# Create empty CID file before adding it to volumes to prevent Docker
|
||||
# from creating it as a directory if container auto-starts
|
||||
cidfile_path.touch()
|
||||
|
||||
extern_cidfile_path = (
|
||||
self.coresys.config.path_extern_cid_files / f"{name}.cid"
|
||||
)
|
||||
|
@@ -2,7 +2,7 @@
|
||||
|
||||
import asyncio
|
||||
from collections.abc import AsyncIterator
|
||||
from contextlib import asynccontextmanager, suppress
|
||||
from contextlib import asynccontextmanager
|
||||
from dataclasses import dataclass
|
||||
from datetime import UTC, datetime, timedelta
|
||||
import logging
|
||||
@@ -15,9 +15,7 @@ from multidict import MultiMapping
|
||||
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import HomeAssistantAPIError, HomeAssistantAuthError
|
||||
from ..jobs.const import JobConcurrency
|
||||
from ..jobs.decorator import Job
|
||||
from ..utils import check_port, version_is_new_enough
|
||||
from ..utils import version_is_new_enough
|
||||
from .const import LANDINGPAGE
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@@ -43,14 +41,19 @@ class HomeAssistantAPI(CoreSysAttributes):
|
||||
# We don't persist access tokens. Instead we fetch new ones when needed
|
||||
self.access_token: str | None = None
|
||||
self._access_token_expires: datetime | None = None
|
||||
self._token_lock: asyncio.Lock = asyncio.Lock()
|
||||
|
||||
@Job(
|
||||
name="home_assistant_api_ensure_access_token",
|
||||
internal=True,
|
||||
concurrency=JobConcurrency.QUEUE,
|
||||
)
|
||||
async def ensure_access_token(self) -> None:
|
||||
"""Ensure there is an access token."""
|
||||
"""Ensure there is a valid access token.
|
||||
|
||||
Raises:
|
||||
HomeAssistantAuthError: When we cannot get a valid token
|
||||
aiohttp.ClientError: On network or connection errors
|
||||
TimeoutError: On request timeouts
|
||||
|
||||
"""
|
||||
# Fast path check without lock (avoid unnecessary locking
|
||||
# for the majority of calls).
|
||||
if (
|
||||
self.access_token
|
||||
and self._access_token_expires
|
||||
@@ -58,7 +61,15 @@ class HomeAssistantAPI(CoreSysAttributes):
|
||||
):
|
||||
return
|
||||
|
||||
with suppress(asyncio.TimeoutError, aiohttp.ClientError):
|
||||
async with self._token_lock:
|
||||
# Double-check after acquiring lock (avoid race condition)
|
||||
if (
|
||||
self.access_token
|
||||
and self._access_token_expires
|
||||
and self._access_token_expires > datetime.now(tz=UTC)
|
||||
):
|
||||
return
|
||||
|
||||
async with self.sys_websession.post(
|
||||
f"{self.sys_homeassistant.api_url}/auth/token",
|
||||
timeout=aiohttp.ClientTimeout(total=30),
|
||||
@@ -92,7 +103,36 @@ class HomeAssistantAPI(CoreSysAttributes):
|
||||
params: MultiMapping[str] | None = None,
|
||||
headers: dict[str, str] | None = None,
|
||||
) -> AsyncIterator[aiohttp.ClientResponse]:
|
||||
"""Async context manager to make a request with right auth."""
|
||||
"""Async context manager to make authenticated requests to Home Assistant API.
|
||||
|
||||
This context manager handles authentication token management automatically,
|
||||
including token refresh on 401 responses. It yields the HTTP response
|
||||
for the caller to handle.
|
||||
|
||||
Error Handling:
|
||||
- HTTP error status codes (4xx, 5xx) are preserved in the response
|
||||
- Authentication is handled transparently with one retry on 401
|
||||
- Network/connection failures raise HomeAssistantAPIError
|
||||
- No logging is performed - callers should handle logging as needed
|
||||
|
||||
Args:
|
||||
method: HTTP method (get, post, etc.)
|
||||
path: API path relative to Home Assistant base URL
|
||||
json: JSON data to send in request body
|
||||
content_type: Override content-type header
|
||||
data: Raw data to send in request body
|
||||
timeout: Request timeout in seconds
|
||||
params: URL query parameters
|
||||
headers: Additional HTTP headers
|
||||
|
||||
Yields:
|
||||
aiohttp.ClientResponse: The HTTP response object
|
||||
|
||||
Raises:
|
||||
HomeAssistantAPIError: When request cannot be completed due to
|
||||
network errors, timeouts, or connection failures
|
||||
|
||||
"""
|
||||
url = f"{self.sys_homeassistant.api_url}/{path}"
|
||||
headers = headers or {}
|
||||
|
||||
@@ -101,10 +141,9 @@ class HomeAssistantAPI(CoreSysAttributes):
|
||||
headers[hdrs.CONTENT_TYPE] = content_type
|
||||
|
||||
for _ in (1, 2):
|
||||
await self.ensure_access_token()
|
||||
headers[hdrs.AUTHORIZATION] = f"Bearer {self.access_token}"
|
||||
|
||||
try:
|
||||
await self.ensure_access_token()
|
||||
headers[hdrs.AUTHORIZATION] = f"Bearer {self.access_token}"
|
||||
async with getattr(self.sys_websession, method)(
|
||||
url,
|
||||
data=data,
|
||||
@@ -120,23 +159,19 @@ class HomeAssistantAPI(CoreSysAttributes):
|
||||
continue
|
||||
yield resp
|
||||
return
|
||||
except TimeoutError:
|
||||
_LOGGER.error("Timeout on call %s.", url)
|
||||
break
|
||||
except TimeoutError as err:
|
||||
_LOGGER.debug("Timeout on call %s.", url)
|
||||
raise HomeAssistantAPIError(str(err)) from err
|
||||
except aiohttp.ClientError as err:
|
||||
_LOGGER.error("Error on call %s: %s", url, err)
|
||||
break
|
||||
|
||||
raise HomeAssistantAPIError()
|
||||
_LOGGER.debug("Error on call %s: %s", url, err)
|
||||
raise HomeAssistantAPIError(str(err)) from err
|
||||
|
||||
async def _get_json(self, path: str) -> dict[str, Any]:
|
||||
"""Return Home Assistant get API."""
|
||||
async with self.make_request("get", path) as resp:
|
||||
if resp.status in (200, 201):
|
||||
return await resp.json()
|
||||
else:
|
||||
_LOGGER.debug("Home Assistant API return: %d", resp.status)
|
||||
raise HomeAssistantAPIError()
|
||||
raise HomeAssistantAPIError(f"Home Assistant Core API return {resp.status}")
|
||||
|
||||
async def get_config(self) -> dict[str, Any]:
|
||||
"""Return Home Assistant config."""
|
||||
@@ -155,15 +190,8 @@ class HomeAssistantAPI(CoreSysAttributes):
|
||||
):
|
||||
return None
|
||||
|
||||
# Check if port is up
|
||||
if not await check_port(
|
||||
self.sys_homeassistant.ip_address,
|
||||
self.sys_homeassistant.api_port,
|
||||
):
|
||||
return None
|
||||
|
||||
# Check if API is up
|
||||
with suppress(HomeAssistantAPIError):
|
||||
try:
|
||||
# get_core_state is available since 2023.8.0 and preferred
|
||||
# since it is significantly faster than get_config because
|
||||
# it does not require serializing the entire config
|
||||
@@ -181,6 +209,8 @@ class HomeAssistantAPI(CoreSysAttributes):
|
||||
migrating = recorder_state.get("migration_in_progress", False)
|
||||
live_migration = recorder_state.get("migration_is_live", False)
|
||||
return APIState(state, migrating and not live_migration)
|
||||
except HomeAssistantAPIError as err:
|
||||
_LOGGER.debug("Can't connect to Home Assistant API: %s", err)
|
||||
|
||||
return None
|
||||
|
||||
|
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
from typing import Any, TypeVar, cast
|
||||
|
||||
@@ -202,7 +203,8 @@ class HomeAssistantWebSocket(CoreSysAttributes):
|
||||
if self._client is not None and self._client.connected:
|
||||
return self._client
|
||||
|
||||
await self.sys_homeassistant.api.ensure_access_token()
|
||||
with suppress(asyncio.TimeoutError, aiohttp.ClientError):
|
||||
await self.sys_homeassistant.api.ensure_access_token()
|
||||
client = await WSClient.connect_with_auth(
|
||||
self.sys_websession,
|
||||
self.sys_loop,
|
||||
|
@@ -15,6 +15,7 @@ from .const import (
|
||||
IngressSessionDataDict,
|
||||
)
|
||||
from .coresys import CoreSys, CoreSysAttributes
|
||||
from .exceptions import HomeAssistantAPIError
|
||||
from .utils import check_port
|
||||
from .utils.common import FileConfiguration
|
||||
from .utils.dt import utc_from_timestamp, utcnow
|
||||
@@ -191,12 +192,17 @@ class Ingress(FileConfiguration, CoreSysAttributes):
|
||||
|
||||
# Update UI
|
||||
method = "post" if addon.ingress_panel else "delete"
|
||||
async with self.sys_homeassistant.api.make_request(
|
||||
method, f"api/hassio_push/panel/{addon.slug}"
|
||||
) as resp:
|
||||
if resp.status in (200, 201):
|
||||
_LOGGER.info("Update Ingress as panel for %s", addon.slug)
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Fails Ingress panel for %s with %i", addon.slug, resp.status
|
||||
)
|
||||
try:
|
||||
async with self.sys_homeassistant.api.make_request(
|
||||
method, f"api/hassio_push/panel/{addon.slug}"
|
||||
) as resp:
|
||||
if resp.status in (200, 201):
|
||||
_LOGGER.info("Update Ingress as panel for %s", addon.slug)
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Failed to update the Ingress panel for %s with %i",
|
||||
addon.slug,
|
||||
resp.status,
|
||||
)
|
||||
except HomeAssistantAPIError as err:
|
||||
_LOGGER.error("Panel update request failed for %s: %s", addon.slug, err)
|
||||
|
@@ -1,5 +1,7 @@
|
||||
"""Supervisor job manager."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable, Coroutine, Generator
|
||||
from contextlib import contextmanager, suppress
|
||||
@@ -10,6 +12,7 @@ import logging
|
||||
from typing import Any, Self
|
||||
from uuid import uuid4
|
||||
|
||||
from attr.validators import gt, lt
|
||||
from attrs import Attribute, define, field
|
||||
from attrs.setters import convert as attr_convert, frozen, validate as attr_validate
|
||||
from attrs.validators import ge, le
|
||||
@@ -47,13 +50,13 @@ def _remove_current_job(context: Context) -> Context:
|
||||
return context
|
||||
|
||||
|
||||
def _invalid_if_done(instance: "SupervisorJob", *_) -> None:
|
||||
def _invalid_if_done(instance: SupervisorJob, *_) -> None:
|
||||
"""Validate that job is not done."""
|
||||
if instance.done:
|
||||
raise ValueError("Cannot update a job that is done")
|
||||
|
||||
|
||||
def _on_change(instance: "SupervisorJob", attribute: Attribute, value: Any) -> Any:
|
||||
def _on_change(instance: SupervisorJob, attribute: Attribute, value: Any) -> Any:
|
||||
"""Forward a change to a field on to the listener if defined."""
|
||||
value = attr_convert(instance, attribute, value)
|
||||
value = attr_validate(instance, attribute, value)
|
||||
@@ -62,12 +65,34 @@ def _on_change(instance: "SupervisorJob", attribute: Attribute, value: Any) -> A
|
||||
return value
|
||||
|
||||
|
||||
def _invalid_if_started(instance: "SupervisorJob", *_) -> None:
|
||||
def _invalid_if_started(instance: SupervisorJob, *_) -> None:
|
||||
"""Validate that job has not been started."""
|
||||
if instance.done is not None:
|
||||
raise ValueError("Field cannot be updated once job has started")
|
||||
|
||||
|
||||
@define(frozen=True)
|
||||
class ChildJobSyncFilter:
|
||||
"""Filter to identify a child job to sync progress from."""
|
||||
|
||||
name: str
|
||||
reference: str | None | type[DEFAULT] = DEFAULT
|
||||
progress_allocation: float = field(default=1.0, validator=[gt(0.0), le(1.0)])
|
||||
|
||||
def matches(self, job: SupervisorJob) -> bool:
|
||||
"""Return true if job matches filter."""
|
||||
return job.name == self.name and self.reference in (DEFAULT, job.reference)
|
||||
|
||||
|
||||
@define(frozen=True)
|
||||
class ParentJobSync:
|
||||
"""Parent job sync details."""
|
||||
|
||||
uuid: str
|
||||
starting_progress: float = field(validator=[ge(0.0), lt(100.0)])
|
||||
progress_allocation: float = field(validator=[gt(0.0), le(1.0)])
|
||||
|
||||
|
||||
@define
|
||||
class SupervisorJobError:
|
||||
"""Representation of an error occurring during a supervisor job."""
|
||||
@@ -103,13 +128,15 @@ class SupervisorJob:
|
||||
)
|
||||
parent_id: str | None = field(factory=_CURRENT_JOB.get, on_setattr=frozen)
|
||||
done: bool | None = field(init=False, default=None, on_setattr=_on_change)
|
||||
on_change: Callable[["SupervisorJob", Attribute, Any], None] | None = None
|
||||
on_change: Callable[[SupervisorJob, Attribute, Any], None] | None = None
|
||||
internal: bool = field(default=False)
|
||||
errors: list[SupervisorJobError] = field(
|
||||
init=False, factory=list, on_setattr=_on_change
|
||||
)
|
||||
release_event: asyncio.Event | None = None
|
||||
extra: dict[str, Any] | None = None
|
||||
child_job_syncs: list[ChildJobSyncFilter] | None = None
|
||||
parent_job_syncs: list[ParentJobSync] = field(init=False, factory=list)
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return dictionary representation."""
|
||||
@@ -152,8 +179,14 @@ class SupervisorJob:
|
||||
try:
|
||||
token = _CURRENT_JOB.set(self.uuid)
|
||||
yield self
|
||||
# Cannot have an else without an except so we do nothing and re-raise
|
||||
except: # noqa: TRY203
|
||||
raise
|
||||
else:
|
||||
self.update(progress=100, done=True)
|
||||
finally:
|
||||
self.done = True
|
||||
if not self.done:
|
||||
self.done = True
|
||||
if token:
|
||||
_CURRENT_JOB.reset(token)
|
||||
|
||||
@@ -174,12 +207,14 @@ class SupervisorJob:
|
||||
self.stage = stage
|
||||
if extra != DEFAULT:
|
||||
self.extra = extra
|
||||
|
||||
# Done has special event. use that to trigger on change if included
|
||||
# If not then just use any other field to trigger
|
||||
self.on_change = on_change
|
||||
if done is not None:
|
||||
self.done = done
|
||||
|
||||
self.on_change = on_change
|
||||
# Just triggers the normal on change
|
||||
self.reference = self.reference
|
||||
else:
|
||||
self.reference = self.reference
|
||||
|
||||
|
||||
class JobManager(FileConfiguration, CoreSysAttributes):
|
||||
@@ -225,16 +260,35 @@ class JobManager(FileConfiguration, CoreSysAttributes):
|
||||
"""Return true if there is an active job for the current asyncio task."""
|
||||
return _CURRENT_JOB.get() is not None
|
||||
|
||||
def _notify_on_job_change(
|
||||
def _on_job_change(
|
||||
self, job: SupervisorJob, attribute: Attribute, value: Any
|
||||
) -> None:
|
||||
"""Notify Home Assistant of a change to a job and bus on job start/end."""
|
||||
"""Take on change actions such as notify home assistant and sync progress."""
|
||||
# Job object will be before the change. Combine the change with current data
|
||||
if attribute.name == "errors":
|
||||
value = [err.as_dict() for err in value]
|
||||
job_data = job.as_dict() | {attribute.name: value}
|
||||
|
||||
self.sys_homeassistant.websocket.supervisor_event(
|
||||
WSEvent.JOB, job.as_dict() | {attribute.name: value}
|
||||
)
|
||||
# Notify Home Assistant of change if its not internal
|
||||
if not job.internal:
|
||||
self.sys_homeassistant.websocket.supervisor_event(WSEvent.JOB, job_data)
|
||||
|
||||
# If we have any parent job syncs, sync progress to them
|
||||
for sync in job.parent_job_syncs:
|
||||
try:
|
||||
parent_job = self.get_job(sync.uuid)
|
||||
except JobNotFound:
|
||||
# Shouldn't happen but failure to find a parent for progress
|
||||
# reporting shouldn't raise and break the active job
|
||||
continue
|
||||
|
||||
progress = sync.starting_progress + (
|
||||
sync.progress_allocation * job_data["progress"]
|
||||
)
|
||||
# Using max would always trigger on change even if progress was unchanged
|
||||
# pylint: disable-next=R1731
|
||||
if parent_job.progress < progress: # noqa: PLR1730
|
||||
parent_job.progress = progress
|
||||
|
||||
if attribute.name == "done":
|
||||
if value is False:
|
||||
@@ -249,16 +303,41 @@ class JobManager(FileConfiguration, CoreSysAttributes):
|
||||
initial_stage: str | None = None,
|
||||
internal: bool = False,
|
||||
parent_id: str | None = DEFAULT, # type: ignore
|
||||
child_job_syncs: list[ChildJobSyncFilter] | None = None,
|
||||
) -> SupervisorJob:
|
||||
"""Create a new job."""
|
||||
job = SupervisorJob(
|
||||
name,
|
||||
reference=reference,
|
||||
stage=initial_stage,
|
||||
on_change=None if internal else self._notify_on_job_change,
|
||||
on_change=self._on_job_change,
|
||||
internal=internal,
|
||||
child_job_syncs=child_job_syncs,
|
||||
**({} if parent_id == DEFAULT else {"parent_id": parent_id}), # type: ignore
|
||||
)
|
||||
|
||||
# Shouldn't happen but inability to find a parent for progress reporting
|
||||
# shouldn't raise and break the active job
|
||||
with suppress(JobNotFound):
|
||||
curr_parent = job
|
||||
while curr_parent.parent_id:
|
||||
curr_parent = self.get_job(curr_parent.parent_id)
|
||||
if not curr_parent.child_job_syncs:
|
||||
continue
|
||||
|
||||
# Break after first match at each parent as it doesn't make sense
|
||||
# to match twice. But it could match multiple parents
|
||||
for sync in curr_parent.child_job_syncs:
|
||||
if sync.matches(job):
|
||||
job.parent_job_syncs.append(
|
||||
ParentJobSync(
|
||||
curr_parent.uuid,
|
||||
starting_progress=curr_parent.progress,
|
||||
progress_allocation=sync.progress_allocation,
|
||||
)
|
||||
)
|
||||
break
|
||||
|
||||
self._jobs[job.uuid] = job
|
||||
return job
|
||||
|
||||
|
@@ -24,7 +24,7 @@ from ..resolution.const import (
|
||||
UnsupportedReason,
|
||||
)
|
||||
from ..utils.sentry import async_capture_exception
|
||||
from . import SupervisorJob
|
||||
from . import ChildJobSyncFilter, SupervisorJob
|
||||
from .const import JobConcurrency, JobCondition, JobThrottle
|
||||
from .job_group import JobGroup
|
||||
|
||||
@@ -48,6 +48,7 @@ class Job(CoreSysAttributes):
|
||||
| None = None,
|
||||
throttle_max_calls: int | None = None,
|
||||
internal: bool = False,
|
||||
child_job_syncs: list[ChildJobSyncFilter] | None = None,
|
||||
): # pylint: disable=too-many-positional-arguments
|
||||
"""Initialize the Job decorator.
|
||||
|
||||
@@ -61,6 +62,7 @@ class Job(CoreSysAttributes):
|
||||
throttle_period (timedelta | Callable | None): Throttle period as a timedelta or a callable returning a timedelta (for throttled jobs).
|
||||
throttle_max_calls (int | None): Maximum number of calls allowed within the throttle period (for rate-limited jobs).
|
||||
internal (bool): Whether the job is internal (not exposed through the Supervisor API). Defaults to False.
|
||||
child_job_syncs (list[ChildJobSyncFilter] | None): Use if jobs progress should be kept in sync with progress of one or more of its child jobs.ye
|
||||
|
||||
Raises:
|
||||
RuntimeError: If job name is not unique, or required throttle parameters are missing for the selected throttle policy.
|
||||
@@ -80,6 +82,7 @@ class Job(CoreSysAttributes):
|
||||
self._last_call: dict[str | None, datetime] = {}
|
||||
self._rate_limited_calls: dict[str | None, list[datetime]] | None = None
|
||||
self._internal = internal
|
||||
self._child_job_syncs = child_job_syncs
|
||||
|
||||
self.concurrency = concurrency
|
||||
self.throttle = throttle
|
||||
@@ -258,6 +261,7 @@ class Job(CoreSysAttributes):
|
||||
job = _job__use_existing
|
||||
job.name = self.name
|
||||
job.internal = self._internal
|
||||
job.child_job_syncs = self._child_job_syncs
|
||||
if job_group:
|
||||
job.reference = job_group.job_reference
|
||||
else:
|
||||
@@ -265,6 +269,7 @@ class Job(CoreSysAttributes):
|
||||
self.name,
|
||||
job_group.job_reference if job_group else None,
|
||||
internal=self._internal,
|
||||
child_job_syncs=self._child_job_syncs,
|
||||
)
|
||||
|
||||
try:
|
||||
|
@@ -52,5 +52,5 @@ class ResolutionNotify(CoreSysAttributes):
|
||||
_LOGGER.debug("Successfully created persistent_notification")
|
||||
else:
|
||||
_LOGGER.error("Can't create persistant notification")
|
||||
except HomeAssistantAPIError:
|
||||
_LOGGER.error("Can't create persistant notification")
|
||||
except HomeAssistantAPIError as err:
|
||||
_LOGGER.error("Can't create persistant notification: %s", err)
|
||||
|
@@ -117,7 +117,7 @@ async def journal_logs_reader(
|
||||
continue
|
||||
|
||||
# strip \n for simple fields before decoding
|
||||
entries[field_name] = data[:-1].decode("utf-8")
|
||||
entries[field_name] = data[:-1].decode("utf-8", errors="replace")
|
||||
|
||||
|
||||
def _parse_boot_json(boot_json_bytes: bytes) -> tuple[int, str]:
|
||||
|
@@ -152,7 +152,7 @@ async def test_jobs_tree_representation(api_client: TestClient, coresys: CoreSys
|
||||
"name": "test_jobs_tree_alt",
|
||||
"reference": None,
|
||||
"uuid": ANY,
|
||||
"progress": 0,
|
||||
"progress": 100,
|
||||
"stage": "end",
|
||||
"done": True,
|
||||
"child_jobs": [],
|
||||
@@ -282,7 +282,7 @@ async def test_jobs_sorted(api_client: TestClient, coresys: CoreSys):
|
||||
"name": "test_jobs_sorted_2",
|
||||
"reference": None,
|
||||
"uuid": ANY,
|
||||
"progress": 0,
|
||||
"progress": 100,
|
||||
"stage": None,
|
||||
"done": True,
|
||||
"errors": [],
|
||||
@@ -294,7 +294,7 @@ async def test_jobs_sorted(api_client: TestClient, coresys: CoreSys):
|
||||
"name": "test_jobs_sorted_1",
|
||||
"reference": None,
|
||||
"uuid": ANY,
|
||||
"progress": 0,
|
||||
"progress": 100,
|
||||
"stage": None,
|
||||
"done": True,
|
||||
"errors": [],
|
||||
@@ -305,7 +305,7 @@ async def test_jobs_sorted(api_client: TestClient, coresys: CoreSys):
|
||||
"name": "test_jobs_sorted_inner_1",
|
||||
"reference": None,
|
||||
"uuid": ANY,
|
||||
"progress": 0,
|
||||
"progress": 100,
|
||||
"stage": None,
|
||||
"done": True,
|
||||
"errors": [],
|
||||
@@ -317,7 +317,7 @@ async def test_jobs_sorted(api_client: TestClient, coresys: CoreSys):
|
||||
"name": "test_jobs_sorted_inner_2",
|
||||
"reference": None,
|
||||
"uuid": ANY,
|
||||
"progress": 0,
|
||||
"progress": 100,
|
||||
"stage": None,
|
||||
"done": True,
|
||||
"errors": [],
|
||||
|
@@ -9,8 +9,7 @@ from blockbuster import BlockingError
|
||||
import pytest
|
||||
|
||||
from supervisor.coresys import CoreSys
|
||||
from supervisor.exceptions import HassioError, HostNotSupportedError, StoreGitError
|
||||
from supervisor.store.repository import Repository
|
||||
from supervisor.exceptions import HassioError, HostNotSupportedError
|
||||
|
||||
from tests.api import common_test_api_advanced_logs
|
||||
from tests.dbus_service_mocks.base import DBusServiceMock
|
||||
@@ -28,81 +27,6 @@ async def test_api_supervisor_options_debug(api_client: TestClient, coresys: Cor
|
||||
assert coresys.config.debug
|
||||
|
||||
|
||||
async def test_api_supervisor_options_add_repository(
|
||||
api_client: TestClient, coresys: CoreSys, supervisor_internet: AsyncMock
|
||||
):
|
||||
"""Test add a repository via POST /supervisor/options REST API."""
|
||||
assert REPO_URL not in coresys.store.repository_urls
|
||||
|
||||
with (
|
||||
patch("supervisor.store.repository.RepositoryGit.load", return_value=None),
|
||||
patch("supervisor.store.repository.RepositoryGit.validate", return_value=True),
|
||||
):
|
||||
response = await api_client.post(
|
||||
"/supervisor/options", json={"addons_repositories": [REPO_URL]}
|
||||
)
|
||||
|
||||
assert response.status == 200
|
||||
assert REPO_URL in coresys.store.repository_urls
|
||||
|
||||
|
||||
async def test_api_supervisor_options_remove_repository(
|
||||
api_client: TestClient, coresys: CoreSys, test_repository: Repository
|
||||
):
|
||||
"""Test remove a repository via POST /supervisor/options REST API."""
|
||||
assert test_repository.source in coresys.store.repository_urls
|
||||
assert test_repository.slug in coresys.store.repositories
|
||||
|
||||
response = await api_client.post(
|
||||
"/supervisor/options", json={"addons_repositories": []}
|
||||
)
|
||||
|
||||
assert response.status == 200
|
||||
assert test_repository.source not in coresys.store.repository_urls
|
||||
assert test_repository.slug not in coresys.store.repositories
|
||||
|
||||
|
||||
@pytest.mark.parametrize("git_error", [None, StoreGitError()])
|
||||
async def test_api_supervisor_options_repositories_skipped_on_error(
|
||||
api_client: TestClient, coresys: CoreSys, git_error: StoreGitError
|
||||
):
|
||||
"""Test repositories skipped on error via POST /supervisor/options REST API."""
|
||||
with (
|
||||
patch("supervisor.store.repository.RepositoryGit.load", side_effect=git_error),
|
||||
patch("supervisor.store.repository.RepositoryGit.validate", return_value=False),
|
||||
patch("supervisor.store.repository.RepositoryCustom.remove"),
|
||||
):
|
||||
response = await api_client.post(
|
||||
"/supervisor/options", json={"addons_repositories": [REPO_URL]}
|
||||
)
|
||||
|
||||
assert response.status == 400
|
||||
assert len(coresys.resolution.suggestions) == 0
|
||||
assert REPO_URL not in coresys.store.repository_urls
|
||||
|
||||
|
||||
async def test_api_supervisor_options_repo_error_with_config_change(
|
||||
api_client: TestClient, coresys: CoreSys
|
||||
):
|
||||
"""Test config change with add repository error via POST /supervisor/options REST API."""
|
||||
assert not coresys.config.debug
|
||||
|
||||
with patch(
|
||||
"supervisor.store.repository.RepositoryGit.load", side_effect=StoreGitError()
|
||||
):
|
||||
response = await api_client.post(
|
||||
"/supervisor/options",
|
||||
json={"debug": True, "addons_repositories": [REPO_URL]},
|
||||
)
|
||||
|
||||
assert response.status == 400
|
||||
assert REPO_URL not in coresys.store.repository_urls
|
||||
|
||||
assert coresys.config.debug
|
||||
coresys.updater.save_data.assert_called_once()
|
||||
coresys.config.save_data.assert_called_once()
|
||||
|
||||
|
||||
async def test_api_supervisor_options_auto_update(
|
||||
api_client: TestClient, coresys: CoreSys
|
||||
):
|
||||
|
@@ -1110,6 +1110,7 @@ def _make_backup_message_for_assert(
|
||||
reference: str,
|
||||
stage: str | None,
|
||||
done: bool = False,
|
||||
progress: float = 0.0,
|
||||
):
|
||||
"""Make a backup message to use for assert test."""
|
||||
return {
|
||||
@@ -1120,7 +1121,7 @@ def _make_backup_message_for_assert(
|
||||
"name": f"backup_manager_{action}",
|
||||
"reference": reference,
|
||||
"uuid": ANY,
|
||||
"progress": 0,
|
||||
"progress": progress,
|
||||
"stage": stage,
|
||||
"done": done,
|
||||
"parent_id": None,
|
||||
@@ -1132,13 +1133,12 @@ def _make_backup_message_for_assert(
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("tmp_supervisor_data", "path_extern")
|
||||
async def test_backup_progress(
|
||||
coresys: CoreSys,
|
||||
install_addon_ssh: Addon,
|
||||
container: MagicMock,
|
||||
ha_ws_client: AsyncMock,
|
||||
tmp_supervisor_data,
|
||||
path_extern,
|
||||
):
|
||||
"""Test progress is tracked during backups."""
|
||||
container.status = "running"
|
||||
@@ -1182,7 +1182,10 @@ async def test_backup_progress(
|
||||
reference=full_backup.slug, stage="await_addon_restarts"
|
||||
),
|
||||
_make_backup_message_for_assert(
|
||||
reference=full_backup.slug, stage="await_addon_restarts", done=True
|
||||
reference=full_backup.slug,
|
||||
stage="await_addon_restarts",
|
||||
done=True,
|
||||
progress=100,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1227,18 +1230,17 @@ async def test_backup_progress(
|
||||
reference=partial_backup.slug,
|
||||
stage="finishing_file",
|
||||
done=True,
|
||||
progress=100,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("supervisor_internet", "tmp_supervisor_data", "path_extern")
|
||||
async def test_restore_progress(
|
||||
coresys: CoreSys,
|
||||
supervisor_internet,
|
||||
install_addon_ssh: Addon,
|
||||
container: MagicMock,
|
||||
ha_ws_client: AsyncMock,
|
||||
tmp_supervisor_data,
|
||||
path_extern,
|
||||
):
|
||||
"""Test progress is tracked during backups."""
|
||||
container.status = "running"
|
||||
@@ -1320,6 +1322,7 @@ async def test_restore_progress(
|
||||
reference=full_backup.slug,
|
||||
stage="await_home_assistant_restart",
|
||||
done=True,
|
||||
progress=100,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1358,6 +1361,7 @@ async def test_restore_progress(
|
||||
reference=folders_backup.slug,
|
||||
stage="folders",
|
||||
done=True,
|
||||
progress=100,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1404,17 +1408,17 @@ async def test_restore_progress(
|
||||
reference=addon_backup.slug,
|
||||
stage="addons",
|
||||
done=True,
|
||||
progress=100,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("tmp_supervisor_data", "path_extern")
|
||||
async def test_freeze_thaw(
|
||||
coresys: CoreSys,
|
||||
install_addon_ssh: Addon,
|
||||
container: MagicMock,
|
||||
ha_ws_client: AsyncMock,
|
||||
tmp_supervisor_data,
|
||||
path_extern,
|
||||
):
|
||||
"""Test manual freeze and thaw for external snapshots."""
|
||||
container.status = "running"
|
||||
@@ -1460,7 +1464,11 @@ async def test_freeze_thaw(
|
||||
action="thaw_all", reference=None, stage=None
|
||||
),
|
||||
_make_backup_message_for_assert(
|
||||
action="freeze_all", reference=None, stage="addons", done=True
|
||||
action="freeze_all",
|
||||
reference=None,
|
||||
stage="addons",
|
||||
done=True,
|
||||
progress=100,
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1488,7 +1496,11 @@ async def test_freeze_thaw(
|
||||
action="thaw_all", reference=None, stage="addons"
|
||||
),
|
||||
_make_backup_message_for_assert(
|
||||
action="thaw_all", reference=None, stage="addons", done=True
|
||||
action="thaw_all",
|
||||
reference=None,
|
||||
stage="addons",
|
||||
done=True,
|
||||
progress=100,
|
||||
),
|
||||
]
|
||||
|
||||
|
@@ -318,7 +318,10 @@ def test_not_journald_addon(
|
||||
|
||||
|
||||
async def test_addon_run_docker_error(
|
||||
coresys: CoreSys, addonsdata_system: dict[str, Data], path_extern
|
||||
coresys: CoreSys,
|
||||
addonsdata_system: dict[str, Data],
|
||||
path_extern,
|
||||
tmp_supervisor_data: Path,
|
||||
):
|
||||
"""Test docker error when addon is run."""
|
||||
await coresys.dbus.timedate.connect(coresys.dbus.bus)
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""Test Docker interface."""
|
||||
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from unittest.mock import ANY, AsyncMock, MagicMock, Mock, PropertyMock, call, patch
|
||||
|
||||
@@ -281,6 +282,7 @@ async def test_run_missing_image(
|
||||
container: MagicMock,
|
||||
capture_exception: Mock,
|
||||
path_extern,
|
||||
tmp_supervisor_data: Path,
|
||||
):
|
||||
"""Test run captures the exception when image is missing."""
|
||||
coresys.docker.containers.create.side_effect = [NotFound("missing"), MagicMock()]
|
||||
|
@@ -293,6 +293,8 @@ async def test_cidfile_cleanup_handles_oserror(
|
||||
# Mock the containers.get method and cidfile cleanup to raise OSError
|
||||
with (
|
||||
patch.object(docker.containers, "get", return_value=mock_container),
|
||||
patch("pathlib.Path.is_dir", return_value=False),
|
||||
patch("pathlib.Path.is_file", return_value=True),
|
||||
patch(
|
||||
"pathlib.Path.unlink", side_effect=OSError("File not found")
|
||||
) as mock_unlink,
|
||||
@@ -306,3 +308,46 @@ async def test_cidfile_cleanup_handles_oserror(
|
||||
|
||||
# Verify cidfile cleanup was attempted
|
||||
mock_unlink.assert_called_once_with(missing_ok=True)
|
||||
|
||||
|
||||
async def test_run_container_with_leftover_cidfile_directory(
|
||||
coresys: CoreSys, docker: DockerAPI, path_extern, tmp_supervisor_data
|
||||
):
|
||||
"""Test container creation removes leftover cidfile directory before creating new one.
|
||||
|
||||
This can happen when Docker auto-starts a container with restart policy
|
||||
before Supervisor could write the CID file, causing Docker to create
|
||||
the bind mount source as a directory.
|
||||
"""
|
||||
# Mock container
|
||||
mock_container = MagicMock()
|
||||
mock_container.id = "test_container_id_new"
|
||||
|
||||
container_name = "test_container"
|
||||
cidfile_path = coresys.config.path_cid_files / f"{container_name}.cid"
|
||||
|
||||
# Create a leftover directory (simulating Docker's behavior)
|
||||
cidfile_path.mkdir()
|
||||
assert cidfile_path.is_dir()
|
||||
|
||||
# Mock container creation
|
||||
with patch.object(
|
||||
docker.containers, "create", return_value=mock_container
|
||||
) as create_mock:
|
||||
# Execute run with a container name
|
||||
loop = asyncio.get_event_loop()
|
||||
result = await loop.run_in_executor(
|
||||
None,
|
||||
lambda kwrgs: docker.run(**kwrgs),
|
||||
{"image": "test_image", "tag": "latest", "name": container_name},
|
||||
)
|
||||
|
||||
# Verify container was created
|
||||
create_mock.assert_called_once()
|
||||
|
||||
# Verify new cidfile was written as a file (not directory)
|
||||
assert cidfile_path.exists()
|
||||
assert cidfile_path.is_file()
|
||||
assert cidfile_path.read_text() == mock_container.id
|
||||
|
||||
assert result == mock_container
|
||||
|
@@ -19,7 +19,7 @@ from supervisor.exceptions import (
|
||||
)
|
||||
from supervisor.host.const import HostFeature
|
||||
from supervisor.host.manager import HostManager
|
||||
from supervisor.jobs import JobSchedulerOptions, SupervisorJob
|
||||
from supervisor.jobs import ChildJobSyncFilter, JobSchedulerOptions, SupervisorJob
|
||||
from supervisor.jobs.const import JobConcurrency, JobThrottle
|
||||
from supervisor.jobs.decorator import Job, JobCondition
|
||||
from supervisor.jobs.job_group import JobGroup
|
||||
@@ -1003,7 +1003,7 @@ async def test_internal_jobs_no_notify(coresys: CoreSys, ha_ws_client: AsyncMock
|
||||
"name": "test_internal_jobs_no_notify_default",
|
||||
"reference": None,
|
||||
"uuid": ANY,
|
||||
"progress": 0,
|
||||
"progress": 100,
|
||||
"stage": None,
|
||||
"done": True,
|
||||
"parent_id": None,
|
||||
@@ -1415,3 +1415,87 @@ async def test_core_supported(coresys: CoreSys, caplog: pytest.LogCaptureFixture
|
||||
|
||||
coresys.jobs.ignore_conditions = [JobCondition.HOME_ASSISTANT_CORE_SUPPORTED]
|
||||
assert await test.execute()
|
||||
|
||||
|
||||
async def test_progress_syncing(coresys: CoreSys):
|
||||
"""Test progress syncing from child jobs to parent."""
|
||||
group_child_event = asyncio.Event()
|
||||
child_event = asyncio.Event()
|
||||
execute_event = asyncio.Event()
|
||||
main_event = asyncio.Event()
|
||||
|
||||
class TestClassGroup(JobGroup):
|
||||
"""Test class group."""
|
||||
|
||||
def __init__(self, coresys: CoreSys) -> None:
|
||||
super().__init__(coresys, "test_class_group", "test")
|
||||
|
||||
@Job(name="test_progress_syncing_group_child", internal=True)
|
||||
async def test_progress_syncing_group_child(self):
|
||||
"""Test progress syncing group child."""
|
||||
coresys.jobs.current.progress = 50
|
||||
main_event.set()
|
||||
await group_child_event.wait()
|
||||
coresys.jobs.current.progress = 100
|
||||
|
||||
class TestClass:
|
||||
"""Test class."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize the test class."""
|
||||
self.coresys = coresys
|
||||
self.test_group = TestClassGroup(coresys)
|
||||
|
||||
@Job(
|
||||
name="test_progress_syncing_execute",
|
||||
child_job_syncs=[
|
||||
ChildJobSyncFilter(
|
||||
"test_progress_syncing_child_execute", progress_allocation=0.5
|
||||
),
|
||||
ChildJobSyncFilter(
|
||||
"test_progress_syncing_group_child",
|
||||
reference="test",
|
||||
progress_allocation=0.5,
|
||||
),
|
||||
],
|
||||
)
|
||||
async def test_progress_syncing_execute(self):
|
||||
"""Test progress syncing execute."""
|
||||
await self.test_progress_syncing_child_execute()
|
||||
await self.test_group.test_progress_syncing_group_child()
|
||||
main_event.set()
|
||||
await execute_event.wait()
|
||||
|
||||
@Job(name="test_progress_syncing_child_execute", internal=True)
|
||||
async def test_progress_syncing_child_execute(self):
|
||||
"""Test progress syncing child execute."""
|
||||
coresys.jobs.current.progress = 50
|
||||
main_event.set()
|
||||
await child_event.wait()
|
||||
coresys.jobs.current.progress = 100
|
||||
|
||||
test = TestClass(coresys)
|
||||
job, task = coresys.jobs.schedule_job(
|
||||
test.test_progress_syncing_execute, JobSchedulerOptions()
|
||||
)
|
||||
|
||||
# First child should've set parent job to 25% progress
|
||||
await main_event.wait()
|
||||
assert job.progress == 25
|
||||
|
||||
# Now we run to middle of second job which should put us at 75%
|
||||
main_event.clear()
|
||||
child_event.set()
|
||||
await main_event.wait()
|
||||
assert job.progress == 75
|
||||
|
||||
# Finally let it run to the end and see progress is 100%
|
||||
main_event.clear()
|
||||
group_child_event.set()
|
||||
await main_event.wait()
|
||||
assert job.progress == 100
|
||||
|
||||
# Release and check it is done
|
||||
execute_event.set()
|
||||
await task
|
||||
assert job.done
|
||||
|
@@ -219,7 +219,7 @@ async def test_notify_on_change(coresys: CoreSys, ha_ws_client: AsyncMock):
|
||||
"name": TEST_JOB,
|
||||
"reference": "test",
|
||||
"uuid": ANY,
|
||||
"progress": 50,
|
||||
"progress": 100,
|
||||
"stage": "test",
|
||||
"done": True,
|
||||
"parent_id": None,
|
||||
|
@@ -275,3 +275,25 @@ async def test_parsing_boots_none():
|
||||
boots.append((index, boot_id))
|
||||
|
||||
assert boots == []
|
||||
|
||||
|
||||
async def test_parsing_non_utf8_message():
|
||||
"""Test that non-UTF-8 bytes in message are replaced with replacement character."""
|
||||
journal_logs, stream = _journal_logs_mock()
|
||||
# Include invalid UTF-8 sequence (0xff is not valid UTF-8)
|
||||
stream.feed_data(b"MESSAGE=Hello, \xff world!\n\n")
|
||||
_, line = await anext(journal_logs_reader(journal_logs))
|
||||
assert line == "Hello, \ufffd world!"
|
||||
|
||||
|
||||
async def test_parsing_non_utf8_in_binary_message():
|
||||
"""Test that non-UTF-8 bytes in binary format message are replaced."""
|
||||
journal_logs, stream = _journal_logs_mock()
|
||||
# Binary format with invalid UTF-8 sequence
|
||||
stream.feed_data(
|
||||
b"ID=1\n"
|
||||
b"MESSAGE\n\x0f\x00\x00\x00\x00\x00\x00\x00Hello, \xff world!\n"
|
||||
b"AFTER=after\n\n"
|
||||
)
|
||||
_, line = await anext(journal_logs_reader(journal_logs))
|
||||
assert line == "Hello, \ufffd world!"
|
||||
|
Reference in New Issue
Block a user