mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-10-30 14:09:47 +00:00
Compare commits
23 Commits
fix-websoc
...
reject-cor
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5d9d33c9fa | ||
|
|
e4959b4f10 | ||
|
|
78353220de | ||
|
|
131cc3b6d1 | ||
|
|
b92f5976a3 | ||
|
|
370c961c9e | ||
|
|
b903e1196f | ||
|
|
9f8e8ab15a | ||
|
|
56bffc839b | ||
|
|
952a553c3b | ||
|
|
717f1c85f5 | ||
|
|
ffd498a515 | ||
|
|
35f0645cb9 | ||
|
|
15c6547382 | ||
|
|
adefa242e5 | ||
|
|
583a8a82fb | ||
|
|
322df15e73 | ||
|
|
51490c8e41 | ||
|
|
3c21a8b8ef | ||
|
|
ddb8588d77 | ||
|
|
81e46b20b8 | ||
|
|
5041a1ed5c | ||
|
|
337731a55a |
13
.github/workflows/builder.yml
vendored
13
.github/workflows/builder.yml
vendored
@@ -107,7 +107,7 @@ jobs:
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
if: needs.init.outputs.requirements == 'true'
|
||||
uses: home-assistant/wheels@2025.09.1
|
||||
uses: home-assistant/wheels@2025.10.0
|
||||
with:
|
||||
abi: cp313
|
||||
tag: musllinux_1_2
|
||||
@@ -132,7 +132,7 @@ jobs:
|
||||
|
||||
- name: Install Cosign
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # v3.10.0
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
with:
|
||||
cosign-release: "v2.5.3"
|
||||
|
||||
@@ -320,6 +320,15 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Wait for Home Assistant Core to start
|
||||
run: |
|
||||
echo "Waiting for Home Assistant Core to start"
|
||||
timeout 10m ha supervisor logs -f -n 10000 -b 0 | grep -q "Detect a running Home Assistant instance"
|
||||
if [ "$?" != "0" ]; then
|
||||
echo "Home Assistant Core did not start within 10 minutes"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Create full backup
|
||||
id: backup
|
||||
run: |
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -346,7 +346,7 @@ jobs:
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # v3.10.0
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
with:
|
||||
cosign-release: "v2.5.3"
|
||||
- name: Restore Python virtual environment
|
||||
|
||||
1
.github/workflows/stale.yml
vendored
1
.github/workflows/stale.yml
vendored
@@ -16,6 +16,7 @@ jobs:
|
||||
days-before-close: 7
|
||||
stale-issue-label: "stale"
|
||||
exempt-issue-labels: "no-stale,Help%20wanted,help-wanted,pinned,rfc,security"
|
||||
only-issue-types: "bug"
|
||||
stale-issue-message: >
|
||||
There hasn't been any activity on this issue recently. Due to the
|
||||
high number of incoming GitHub notifications, we have to clean some
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
aiodns==3.5.0
|
||||
aiohttp==3.13.0
|
||||
aiohttp==3.13.1
|
||||
atomicwrites-homeassistant==1.4.1
|
||||
attrs==25.4.0
|
||||
awesomeversion==25.8.0
|
||||
blockbuster==1.5.25
|
||||
brotli==1.1.0
|
||||
ciso8601==2.3.3
|
||||
colorlog==6.9.0
|
||||
colorlog==6.10.1
|
||||
cpe==1.3.1
|
||||
cryptography==46.0.2
|
||||
cryptography==46.0.3
|
||||
debugpy==1.8.17
|
||||
deepmerge==2.0
|
||||
dirhash==0.5.0
|
||||
@@ -19,11 +19,11 @@ jinja2==3.1.6
|
||||
log-rate-limit==1.4.2
|
||||
orjson==3.11.3
|
||||
pulsectl==24.12.0
|
||||
pyudev==0.24.3
|
||||
pyudev==0.24.4
|
||||
PyYAML==6.0.3
|
||||
requests==2.32.5
|
||||
securetar==2025.2.1
|
||||
sentry-sdk==2.40.0
|
||||
sentry-sdk==2.42.1
|
||||
setuptools==80.9.0
|
||||
voluptuous==0.15.2
|
||||
dbus-fast==2.44.5
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
astroid==3.3.11
|
||||
coverage==7.10.7
|
||||
astroid==4.0.1
|
||||
coverage==7.11.0
|
||||
mypy==1.18.2
|
||||
pre-commit==4.3.0
|
||||
pylint==3.3.9
|
||||
pylint==4.0.2
|
||||
pytest-aiohttp==1.1.0
|
||||
pytest-asyncio==0.25.2
|
||||
pytest-cov==7.0.0
|
||||
pytest-timeout==2.4.0
|
||||
pytest==8.4.2
|
||||
ruff==0.14.0
|
||||
ruff==0.14.2
|
||||
time-machine==2.19.0
|
||||
types-docker==7.1.0.20250916
|
||||
types-docker==7.1.0.20251009
|
||||
types-pyyaml==6.0.12.20250915
|
||||
types-requests==2.32.4.20250913
|
||||
urllib3==2.5.0
|
||||
|
||||
@@ -222,11 +222,6 @@ class APIProxy(CoreSysAttributes):
|
||||
raise HTTPBadGateway()
|
||||
_LOGGER.info("Home Assistant WebSocket API request initialize")
|
||||
|
||||
# Check if transport is still valid before WebSocket upgrade
|
||||
if request.transport is None:
|
||||
_LOGGER.warning("WebSocket connection lost before upgrade")
|
||||
raise web.HTTPBadRequest(reason="Connection closed")
|
||||
|
||||
# init server
|
||||
server = web.WebSocketResponse(heartbeat=30)
|
||||
await server.prepare(request)
|
||||
|
||||
@@ -306,6 +306,8 @@ class DockerInterface(JobGroup, ABC):
|
||||
# Our filters have all passed. Time to update the job
|
||||
# Only downloading and extracting have progress details. Use that to set extra
|
||||
# We'll leave it around on later stages as the total bytes may be useful after that stage
|
||||
# Enforce range to prevent float drift error
|
||||
progress = max(0, min(progress, 100))
|
||||
if (
|
||||
stage in {PullImageLayerStage.DOWNLOADING, PullImageLayerStage.EXTRACTING}
|
||||
and reference.progress_detail
|
||||
@@ -371,7 +373,7 @@ class DockerInterface(JobGroup, ABC):
|
||||
|
||||
# To reduce noise, limit updates to when result has changed by an entire percent or when stage changed
|
||||
if stage != install_job.stage or progress >= install_job.progress + 1:
|
||||
install_job.update(stage=stage.status, progress=progress)
|
||||
install_job.update(stage=stage.status, progress=max(0, min(progress, 100)))
|
||||
|
||||
@Job(
|
||||
name="docker_interface_install",
|
||||
|
||||
@@ -9,7 +9,12 @@ from typing import Any
|
||||
from supervisor.resolution.const import UnhealthyReason
|
||||
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import DBusError, DBusObjectError, HardwareNotFound
|
||||
from ..exceptions import (
|
||||
DBusError,
|
||||
DBusNotConnectedError,
|
||||
DBusObjectError,
|
||||
HardwareNotFound,
|
||||
)
|
||||
from .const import UdevSubsystem
|
||||
from .data import Device
|
||||
|
||||
@@ -207,6 +212,8 @@ class HwDisk(CoreSysAttributes):
|
||||
try:
|
||||
block_device = self.sys_dbus.udisks2.get_block_device_by_path(device_path)
|
||||
drive = self.sys_dbus.udisks2.get_drive(block_device.drive)
|
||||
except DBusNotConnectedError:
|
||||
return None
|
||||
except DBusObjectError:
|
||||
_LOGGER.warning(
|
||||
"Unable to find UDisks2 drive for device at %s", device_path.as_posix()
|
||||
|
||||
@@ -371,6 +371,12 @@ class HomeAssistant(FileConfiguration, CoreSysAttributes):
|
||||
_LOGGER.error,
|
||||
) from err
|
||||
|
||||
if not resp:
|
||||
raise HomeAssistantBackupError(
|
||||
"Preparing backup of Home Assistant Core failed. No response from HA Core.",
|
||||
_LOGGER.error,
|
||||
)
|
||||
|
||||
if resp and not resp.get(ATTR_SUCCESS):
|
||||
raise HomeAssistantBackupError(
|
||||
f"Preparing backup of Home Assistant Core failed due to: {resp.get(ATTR_ERROR, {}).get(ATTR_MESSAGE, '')}. Check HA Core logs.",
|
||||
|
||||
@@ -225,6 +225,10 @@ class HomeAssistantWebSocket(CoreSysAttributes):
|
||||
# since it makes a new socket connection and we already have one.
|
||||
if not connected and not await self.sys_homeassistant.api.check_api_state():
|
||||
# No core access, don't try.
|
||||
_LOGGER.debug(
|
||||
"Home Assistant API is not accessible. Not sending WS message: %s",
|
||||
message,
|
||||
)
|
||||
return False
|
||||
|
||||
if not self._client:
|
||||
|
||||
@@ -8,7 +8,7 @@ from ..const import UnsupportedReason
|
||||
from .base import EvaluateBase
|
||||
|
||||
EXPECTED_LOGGING = "journald"
|
||||
EXPECTED_STORAGE = "overlay2"
|
||||
EXPECTED_STORAGE = ("overlay2", "overlayfs")
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -41,14 +41,18 @@ class EvaluateDockerConfiguration(EvaluateBase):
|
||||
storage_driver = self.sys_docker.info.storage
|
||||
logging_driver = self.sys_docker.info.logging
|
||||
|
||||
if storage_driver != EXPECTED_STORAGE:
|
||||
is_unsupported = False
|
||||
|
||||
if storage_driver not in EXPECTED_STORAGE:
|
||||
is_unsupported = True
|
||||
_LOGGER.warning(
|
||||
"Docker storage driver %s is not supported!", storage_driver
|
||||
)
|
||||
|
||||
if logging_driver != EXPECTED_LOGGING:
|
||||
is_unsupported = True
|
||||
_LOGGER.warning(
|
||||
"Docker logging driver %s is not supported!", logging_driver
|
||||
)
|
||||
|
||||
return storage_driver != EXPECTED_STORAGE or logging_driver != EXPECTED_LOGGING
|
||||
return is_unsupported
|
||||
|
||||
@@ -9,7 +9,7 @@ import logging
|
||||
from typing import Any, cast
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
from aiohttp import ClientWebSocketResponse, WSCloseCode, web
|
||||
from aiohttp import ClientWebSocketResponse, WSCloseCode
|
||||
from aiohttp.http_websocket import WSMessage, WSMsgType
|
||||
from aiohttp.test_utils import TestClient
|
||||
import pytest
|
||||
@@ -223,32 +223,6 @@ async def test_proxy_auth_abort_log(
|
||||
)
|
||||
|
||||
|
||||
async def test_websocket_transport_none(
|
||||
coresys,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
):
|
||||
"""Test WebSocket connection with transport None is handled gracefully."""
|
||||
# Get the API proxy instance from coresys
|
||||
api_proxy = APIProxy.__new__(APIProxy)
|
||||
api_proxy.coresys = coresys
|
||||
|
||||
# Create a mock request with transport set to None to simulate connection loss
|
||||
mock_request = AsyncMock(spec=web.Request)
|
||||
mock_request.transport = None
|
||||
|
||||
caplog.clear()
|
||||
with caplog.at_level(logging.WARNING):
|
||||
# This should raise HTTPBadRequest, not AssertionError
|
||||
with pytest.raises(web.HTTPBadRequest) as exc_info:
|
||||
await api_proxy.websocket(mock_request)
|
||||
|
||||
# Verify the error reason
|
||||
assert exc_info.value.reason == "Connection closed"
|
||||
|
||||
# Verify the warning was logged
|
||||
assert "WebSocket connection lost before upgrade" in caplog.text
|
||||
|
||||
|
||||
@pytest.mark.parametrize("path", ["", "mock_path"])
|
||||
async def test_api_proxy_get_request(
|
||||
api_client: TestClient,
|
||||
|
||||
@@ -376,3 +376,14 @@ async def test_try_get_nvme_life_time_missing_percent_used(
|
||||
coresys.config.path_supervisor
|
||||
)
|
||||
assert lifetime is None
|
||||
|
||||
|
||||
async def test_try_get_nvme_life_time_dbus_not_connected(coresys: CoreSys):
|
||||
"""Test getting lifetime info from an NVMe when DBUS is not connected."""
|
||||
# Set the dbus for udisks2 bus to be None, to make it forcibly disconnected.
|
||||
coresys.dbus.udisks2.dbus = None
|
||||
|
||||
lifetime = await coresys.hardware.disk.get_disk_life_time(
|
||||
coresys.config.path_supervisor
|
||||
)
|
||||
assert lifetime is None
|
||||
|
||||
@@ -25,13 +25,18 @@ async def test_evaluation(coresys: CoreSys):
|
||||
assert docker_configuration.reason in coresys.resolution.unsupported
|
||||
coresys.resolution.unsupported.clear()
|
||||
|
||||
coresys.docker.info.storage = EXPECTED_STORAGE
|
||||
coresys.docker.info.storage = EXPECTED_STORAGE[0]
|
||||
coresys.docker.info.logging = "unsupported"
|
||||
await docker_configuration()
|
||||
assert docker_configuration.reason in coresys.resolution.unsupported
|
||||
coresys.resolution.unsupported.clear()
|
||||
|
||||
coresys.docker.info.storage = EXPECTED_STORAGE
|
||||
coresys.docker.info.storage = "overlay2"
|
||||
coresys.docker.info.logging = EXPECTED_LOGGING
|
||||
await docker_configuration()
|
||||
assert docker_configuration.reason not in coresys.resolution.unsupported
|
||||
|
||||
coresys.docker.info.storage = "overlayfs"
|
||||
coresys.docker.info.logging = EXPECTED_LOGGING
|
||||
await docker_configuration()
|
||||
assert docker_configuration.reason not in coresys.resolution.unsupported
|
||||
|
||||
Reference in New Issue
Block a user