mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-12 18:49:20 +00:00
Compare commits
1 Commits
2025.03.4
...
fix-error-
Author | SHA1 | Date | |
---|---|---|---|
![]() |
0786e06eb9 |
10
.github/workflows/builder.yml
vendored
10
.github/workflows/builder.yml
vendored
@@ -106,7 +106,7 @@ jobs:
|
||||
|
||||
- name: Build wheels
|
||||
if: needs.init.outputs.requirements == 'true'
|
||||
uses: home-assistant/wheels@2025.03.0
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: cp313
|
||||
tag: musllinux_1_2
|
||||
@@ -125,7 +125,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -149,7 +149,7 @@ jobs:
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: docker/login-action@v3.4.0
|
||||
uses: docker/login-action@v3.3.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -160,7 +160,7 @@ jobs:
|
||||
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
||||
|
||||
- name: Build supervisor
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
uses: home-assistant/builder@2025.02.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -207,7 +207,7 @@ jobs:
|
||||
|
||||
- name: Build the Supervisor
|
||||
if: needs.init.outputs.publish != 'true'
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
uses: home-assistant/builder@2025.02.0
|
||||
with:
|
||||
args: |
|
||||
--test \
|
||||
|
46
.github/workflows/ci.yaml
vendored
46
.github/workflows/ci.yaml
vendored
@@ -28,12 +28,12 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python
|
||||
id: python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -47,7 +47,7 @@ jobs:
|
||||
pip install -r requirements.txt -r requirements_tests.txt
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
@@ -69,13 +69,13 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
@@ -112,13 +112,13 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -130,7 +130,7 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
@@ -170,13 +170,13 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -188,7 +188,7 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
@@ -214,13 +214,13 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -232,7 +232,7 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
@@ -258,13 +258,13 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -294,7 +294,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
@@ -304,7 +304,7 @@ jobs:
|
||||
cosign-release: "v2.4.0"
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -339,7 +339,7 @@ jobs:
|
||||
-o console_output_style=count \
|
||||
tests
|
||||
- name: Upload coverage artifact
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}
|
||||
path: .coverage
|
||||
@@ -353,13 +353,13 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -370,7 +370,7 @@ jobs:
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
- name: Combine coverage results
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
|
2
.github/workflows/sentry.yaml
vendored
2
.github/workflows/sentry.yaml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Sentry Release
|
||||
uses: getsentry/action-release@v3.1.1
|
||||
uses: getsentry/action-release@v1.10.4
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
|
3
.github/workflows/update_frontend.yml
vendored
3
.github/workflows/update_frontend.yml
vendored
@@ -64,9 +64,6 @@ jobs:
|
||||
fileName: home_assistant_frontend_supervisor-${{ needs.check-version.outputs.latest_version }}.tar.gz
|
||||
extract: true
|
||||
out-file-path: supervisor/api/panel/
|
||||
- name: Remove release assets archive
|
||||
run: |
|
||||
rm -f supervisor/api/panel/home_assistant_frontend_supervisor-*.tar.gz
|
||||
- name: Create PR
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
|
@@ -1,5 +1,5 @@
|
||||
[build-system]
|
||||
requires = ["setuptools~=78.1.0", "wheel~=0.45.0"]
|
||||
requires = ["setuptools~=75.8.0", "wheel~=0.45.0"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
|
@@ -1,31 +1,29 @@
|
||||
aiodns==3.2.0
|
||||
aiohttp==3.11.14
|
||||
aiohttp==3.11.13
|
||||
atomicwrites-homeassistant==1.4.1
|
||||
attrs==25.3.0
|
||||
attrs==25.1.0
|
||||
awesomeversion==24.6.0
|
||||
blockbuster==1.5.24
|
||||
brotli==1.1.0
|
||||
ciso8601==2.3.2
|
||||
colorlog==6.9.0
|
||||
cpe==1.3.1
|
||||
cryptography==44.0.2
|
||||
debugpy==1.8.13
|
||||
cryptography==44.0.1
|
||||
debugpy==1.8.12
|
||||
deepmerge==2.0
|
||||
dirhash==0.5.0
|
||||
docker==7.1.0
|
||||
faust-cchardet==2.1.19
|
||||
gitpython==3.1.44
|
||||
jinja2==3.1.6
|
||||
log-rate-limit==1.4.2
|
||||
orjson==3.10.16
|
||||
jinja2==3.1.5
|
||||
orjson==3.10.12
|
||||
pulsectl==24.12.0
|
||||
pyudev==0.24.3
|
||||
PyYAML==6.0.2
|
||||
requests==2.32.3
|
||||
securetar==2025.2.1
|
||||
sentry-sdk==2.24.1
|
||||
setuptools==78.1.0
|
||||
sentry-sdk==2.22.0
|
||||
setuptools==75.8.2
|
||||
voluptuous==0.15.2
|
||||
dbus-fast==2.43.0
|
||||
dbus-fast==2.34.0
|
||||
typing_extensions==4.12.2
|
||||
zlib-fast==0.2.1
|
||||
|
@@ -1,13 +1,13 @@
|
||||
astroid==3.3.9
|
||||
coverage==7.7.1
|
||||
pre-commit==4.2.0
|
||||
pylint==3.3.6
|
||||
astroid==3.3.8
|
||||
coverage==7.6.12
|
||||
pre-commit==4.1.0
|
||||
pylint==3.3.4
|
||||
pytest-aiohttp==1.1.0
|
||||
pytest-asyncio==0.25.2
|
||||
pytest-cov==6.0.0
|
||||
pytest-timeout==2.3.1
|
||||
pytest==8.3.5
|
||||
ruff==0.11.2
|
||||
pytest==8.3.4
|
||||
ruff==0.9.8
|
||||
time-machine==2.16.0
|
||||
typing_extensions==4.12.2
|
||||
urllib3==2.3.0
|
||||
|
@@ -11,12 +11,10 @@ import zlib_fast
|
||||
# Enable fast zlib before importing supervisor
|
||||
zlib_fast.enable()
|
||||
|
||||
# pylint: disable=wrong-import-position
|
||||
from supervisor import bootstrap # noqa: E402
|
||||
from supervisor.utils.blockbuster import activate_blockbuster # noqa: E402
|
||||
from supervisor.utils.logging import activate_log_queue_handler # noqa: E402
|
||||
|
||||
# pylint: enable=wrong-import-position
|
||||
from supervisor import bootstrap # pylint: disable=wrong-import-position # noqa: E402
|
||||
from supervisor.utils.logging import ( # pylint: disable=wrong-import-position # noqa: E402
|
||||
activate_log_queue_handler,
|
||||
)
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -54,8 +52,6 @@ if __name__ == "__main__":
|
||||
_LOGGER.info("Initializing Supervisor setup")
|
||||
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
||||
loop.set_debug(coresys.config.debug)
|
||||
if coresys.config.detect_blocking_io:
|
||||
activate_blockbuster()
|
||||
loop.run_until_complete(coresys.core.connect())
|
||||
|
||||
loop.run_until_complete(bootstrap.supervisor_debugger(coresys))
|
||||
|
@@ -18,7 +18,7 @@ from tempfile import TemporaryDirectory
|
||||
from typing import Any, Final
|
||||
|
||||
import aiohttp
|
||||
from awesomeversion import AwesomeVersion, AwesomeVersionCompareException
|
||||
from awesomeversion import AwesomeVersionCompareException
|
||||
from deepmerge import Merger
|
||||
from securetar import AddFileError, atomic_contents_add, secure_path
|
||||
import voluptuous as vol
|
||||
@@ -140,7 +140,9 @@ class Addon(AddonModel):
|
||||
super().__init__(coresys, slug)
|
||||
self.instance: DockerAddon = DockerAddon(coresys, self)
|
||||
self._state: AddonState = AddonState.UNKNOWN
|
||||
self._manual_stop: bool = False
|
||||
self._manual_stop: bool = (
|
||||
self.sys_hardware.helper.last_boot != self.sys_config.last_boot
|
||||
)
|
||||
self._listeners: list[EventListener] = []
|
||||
self._startup_event = asyncio.Event()
|
||||
self._startup_task: asyncio.Task | None = None
|
||||
@@ -214,10 +216,6 @@ class Addon(AddonModel):
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Async initialize of object."""
|
||||
self._manual_stop = (
|
||||
await self.sys_hardware.helper.last_boot() != self.sys_config.last_boot
|
||||
)
|
||||
|
||||
if self.is_detached:
|
||||
await super().refresh_path_cache()
|
||||
|
||||
@@ -285,28 +283,28 @@ class Addon(AddonModel):
|
||||
@property
|
||||
def with_icon(self) -> bool:
|
||||
"""Return True if an icon exists."""
|
||||
if self.is_detached or not self.addon_store:
|
||||
if self.is_detached:
|
||||
return super().with_icon
|
||||
return self.addon_store.with_icon
|
||||
|
||||
@property
|
||||
def with_logo(self) -> bool:
|
||||
"""Return True if a logo exists."""
|
||||
if self.is_detached or not self.addon_store:
|
||||
if self.is_detached:
|
||||
return super().with_logo
|
||||
return self.addon_store.with_logo
|
||||
|
||||
@property
|
||||
def with_changelog(self) -> bool:
|
||||
"""Return True if a changelog exists."""
|
||||
if self.is_detached or not self.addon_store:
|
||||
if self.is_detached:
|
||||
return super().with_changelog
|
||||
return self.addon_store.with_changelog
|
||||
|
||||
@property
|
||||
def with_documentation(self) -> bool:
|
||||
"""Return True if a documentation exists."""
|
||||
if self.is_detached or not self.addon_store:
|
||||
if self.is_detached:
|
||||
return super().with_documentation
|
||||
return self.addon_store.with_documentation
|
||||
|
||||
@@ -316,7 +314,7 @@ class Addon(AddonModel):
|
||||
return self._available(self.data_store)
|
||||
|
||||
@property
|
||||
def version(self) -> AwesomeVersion:
|
||||
def version(self) -> str | None:
|
||||
"""Return installed version."""
|
||||
return self.persist[ATTR_VERSION]
|
||||
|
||||
@@ -464,7 +462,7 @@ class Addon(AddonModel):
|
||||
return None
|
||||
|
||||
@property
|
||||
def latest_version(self) -> AwesomeVersion:
|
||||
def latest_version(self) -> str:
|
||||
"""Return version of add-on."""
|
||||
return self.data_store[ATTR_VERSION]
|
||||
|
||||
@@ -518,8 +516,9 @@ class Addon(AddonModel):
|
||||
def webui(self) -> str | None:
|
||||
"""Return URL to webui or None."""
|
||||
url = super().webui
|
||||
if not url or not (webui := RE_WEBUI.match(url)):
|
||||
if not url:
|
||||
return None
|
||||
webui = RE_WEBUI.match(url)
|
||||
|
||||
# extract arguments
|
||||
t_port = webui.group("t_port")
|
||||
@@ -674,9 +673,10 @@ class Addon(AddonModel):
|
||||
|
||||
async def watchdog_application(self) -> bool:
|
||||
"""Return True if application is running."""
|
||||
url = self.watchdog_url
|
||||
if not url or not (application := RE_WATCHDOG.match(url)):
|
||||
url = super().watchdog
|
||||
if not url:
|
||||
return True
|
||||
application = RE_WATCHDOG.match(url)
|
||||
|
||||
# extract arguments
|
||||
t_port = int(application.group("t_port"))
|
||||
@@ -685,10 +685,8 @@ class Addon(AddonModel):
|
||||
s_suffix = application.group("s_suffix") or ""
|
||||
|
||||
# search host port for this docker port
|
||||
if self.host_network and self.ports:
|
||||
port = self.ports.get(f"{t_port}/tcp")
|
||||
if port is None:
|
||||
port = t_port
|
||||
if self.host_network:
|
||||
port = self.ports.get(f"{t_port}/tcp", t_port)
|
||||
else:
|
||||
port = t_port
|
||||
|
||||
@@ -722,7 +720,7 @@ class Addon(AddonModel):
|
||||
|
||||
try:
|
||||
options = self.schema.validate(self.options)
|
||||
await self.sys_run_in_executor(write_json_file, self.path_options, options)
|
||||
write_json_file(self.path_options, options)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error(
|
||||
"Add-on %s has invalid options: %s",
|
||||
@@ -753,12 +751,9 @@ class Addon(AddonModel):
|
||||
for listener in self._listeners:
|
||||
self.sys_bus.remove_listener(listener)
|
||||
|
||||
def remove_data_dir():
|
||||
if self.path_data.is_dir():
|
||||
_LOGGER.info("Removing add-on data folder %s", self.path_data)
|
||||
remove_data(self.path_data)
|
||||
|
||||
await self.sys_run_in_executor(remove_data_dir)
|
||||
if self.path_data.is_dir():
|
||||
_LOGGER.info("Removing add-on data folder %s", self.path_data)
|
||||
await remove_data(self.path_data)
|
||||
|
||||
async def _check_ingress_port(self):
|
||||
"""Assign a ingress port if dynamic port selection is used."""
|
||||
@@ -777,20 +772,14 @@ class Addon(AddonModel):
|
||||
)
|
||||
async def install(self) -> None:
|
||||
"""Install and setup this addon."""
|
||||
if not self.addon_store:
|
||||
raise AddonsError("Missing from store, cannot install!")
|
||||
|
||||
await self.sys_addons.data.install(self.addon_store)
|
||||
await self.load()
|
||||
|
||||
def setup_data():
|
||||
if not self.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Creating Home Assistant add-on data folder %s", self.path_data
|
||||
)
|
||||
self.path_data.mkdir()
|
||||
|
||||
await self.sys_run_in_executor(setup_data)
|
||||
if not self.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Creating Home Assistant add-on data folder %s", self.path_data
|
||||
)
|
||||
self.path_data.mkdir()
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
await self.install_apparmor()
|
||||
@@ -829,17 +818,14 @@ class Addon(AddonModel):
|
||||
|
||||
await self.unload()
|
||||
|
||||
def cleanup_config_and_audio():
|
||||
# Remove config if present and requested
|
||||
if self.addon_config_used and remove_config:
|
||||
remove_data(self.path_config)
|
||||
# Remove config if present and requested
|
||||
if self.addon_config_used and remove_config:
|
||||
await remove_data(self.path_config)
|
||||
|
||||
# Cleanup audio settings
|
||||
if self.path_pulse.exists():
|
||||
with suppress(OSError):
|
||||
self.path_pulse.unlink()
|
||||
|
||||
await self.sys_run_in_executor(cleanup_config_and_audio)
|
||||
# Cleanup audio settings
|
||||
if self.path_pulse.exists():
|
||||
with suppress(OSError):
|
||||
self.path_pulse.unlink()
|
||||
|
||||
# Cleanup AppArmor profile
|
||||
with suppress(HostAppArmorError):
|
||||
@@ -883,9 +869,6 @@ class Addon(AddonModel):
|
||||
Returns a Task that completes when addon has state 'started' (see start)
|
||||
if it was running. Else nothing is returned.
|
||||
"""
|
||||
if not self.addon_store:
|
||||
raise AddonsError("Missing from store, cannot update!")
|
||||
|
||||
old_image = self.image
|
||||
# Cache data to prevent races with other updates to global
|
||||
store = self.addon_store.clone()
|
||||
@@ -942,9 +925,7 @@ class Addon(AddonModel):
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
|
||||
if self.addon_store:
|
||||
await self.sys_addons.data.update(self.addon_store)
|
||||
|
||||
await self.sys_addons.data.update(self.addon_store)
|
||||
await self._check_ingress_port()
|
||||
_LOGGER.info("Add-on '%s' successfully rebuilt", self.slug)
|
||||
|
||||
@@ -957,25 +938,22 @@ class Addon(AddonModel):
|
||||
)
|
||||
return out
|
||||
|
||||
async def write_pulse(self) -> None:
|
||||
def write_pulse(self) -> None:
|
||||
"""Write asound config to file and return True on success."""
|
||||
pulse_config = self.sys_plugins.audio.pulse_client(
|
||||
input_profile=self.audio_input, output_profile=self.audio_output
|
||||
)
|
||||
|
||||
def write_pulse_config():
|
||||
# Cleanup wrong maps
|
||||
if self.path_pulse.is_dir():
|
||||
shutil.rmtree(self.path_pulse, ignore_errors=True)
|
||||
self.path_pulse.write_text(pulse_config, encoding="utf-8")
|
||||
# Cleanup wrong maps
|
||||
if self.path_pulse.is_dir():
|
||||
shutil.rmtree(self.path_pulse, ignore_errors=True)
|
||||
|
||||
# Write pulse config
|
||||
try:
|
||||
await self.sys_run_in_executor(write_pulse_config)
|
||||
self.path_pulse.write_text(pulse_config, encoding="utf-8")
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
_LOGGER.error(
|
||||
"Add-on %s can't write pulse/client.config: %s", self.slug, err
|
||||
)
|
||||
@@ -987,7 +965,7 @@ class Addon(AddonModel):
|
||||
async def install_apparmor(self) -> None:
|
||||
"""Install or Update AppArmor profile for Add-on."""
|
||||
exists_local = self.sys_host.apparmor.exists(self.slug)
|
||||
exists_addon = await self.sys_run_in_executor(self.path_apparmor.exists)
|
||||
exists_addon = self.path_apparmor.exists()
|
||||
|
||||
# Nothing to do
|
||||
if not exists_local and not exists_addon:
|
||||
@@ -1092,7 +1070,7 @@ class Addon(AddonModel):
|
||||
|
||||
# Sound
|
||||
if self.with_audio:
|
||||
await self.write_pulse()
|
||||
self.write_pulse()
|
||||
|
||||
def _check_addon_config_dir():
|
||||
if self.path_config.is_dir():
|
||||
@@ -1334,7 +1312,7 @@ class Addon(AddonModel):
|
||||
arcname="config",
|
||||
)
|
||||
|
||||
wait_for_start: asyncio.Task | None = None
|
||||
wait_for_start: Awaitable[None] | None = None
|
||||
|
||||
data = {
|
||||
ATTR_USER: self.persist,
|
||||
@@ -1380,7 +1358,7 @@ class Addon(AddonModel):
|
||||
Returns a Task that completes when addon has state 'started' (see start)
|
||||
if addon is started after restore. Else nothing is returned.
|
||||
"""
|
||||
wait_for_start: asyncio.Task | None = None
|
||||
wait_for_start: Awaitable[None] | None = None
|
||||
|
||||
# Extract backup
|
||||
def _extract_tarfile() -> tuple[TemporaryDirectory, dict[str, Any]]:
|
||||
@@ -1463,12 +1441,6 @@ class Addon(AddonModel):
|
||||
# Restore data and config
|
||||
def _restore_data():
|
||||
"""Restore data and config."""
|
||||
_LOGGER.info("Restoring data and config for addon %s", self.slug)
|
||||
if self.path_data.is_dir():
|
||||
remove_data(self.path_data)
|
||||
if self.path_config.is_dir():
|
||||
remove_data(self.path_config)
|
||||
|
||||
temp_data = Path(tmp.name, "data")
|
||||
if temp_data.is_dir():
|
||||
shutil.copytree(temp_data, self.path_data, symlinks=True)
|
||||
@@ -1481,6 +1453,12 @@ class Addon(AddonModel):
|
||||
elif self.addon_config_used:
|
||||
self.path_config.mkdir()
|
||||
|
||||
_LOGGER.info("Restoring data and config for addon %s", self.slug)
|
||||
if self.path_data.is_dir():
|
||||
await remove_data(self.path_data)
|
||||
if self.path_config.is_dir():
|
||||
await remove_data(self.path_config)
|
||||
|
||||
try:
|
||||
await self.sys_run_in_executor(_restore_data)
|
||||
except shutil.Error as err:
|
||||
@@ -1490,7 +1468,7 @@ class Addon(AddonModel):
|
||||
|
||||
# Restore AppArmor
|
||||
profile_file = Path(tmp.name, "apparmor.txt")
|
||||
if await self.sys_run_in_executor(profile_file.exists):
|
||||
if profile_file.exists():
|
||||
try:
|
||||
await self.sys_host.apparmor.load_profile(
|
||||
self.slug, profile_file
|
||||
@@ -1511,7 +1489,7 @@ class Addon(AddonModel):
|
||||
if data[ATTR_STATE] == AddonState.STARTED:
|
||||
wait_for_start = await self.start()
|
||||
finally:
|
||||
await self.sys_run_in_executor(tmp.cleanup)
|
||||
tmp.cleanup()
|
||||
_LOGGER.info("Finished restore for add-on %s", self.slug)
|
||||
return wait_for_start
|
||||
|
||||
@@ -1604,6 +1582,6 @@ class Addon(AddonModel):
|
||||
|
||||
def refresh_path_cache(self) -> Awaitable[None]:
|
||||
"""Refresh cache of existing paths."""
|
||||
if self.is_detached or not self.addon_store:
|
||||
if self.is_detached:
|
||||
return super().refresh_path_cache()
|
||||
return self.addon_store.refresh_path_cache()
|
||||
|
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from functools import cached_property
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from awesomeversion import AwesomeVersion
|
||||
|
||||
@@ -23,7 +23,7 @@ from ..utils.common import FileConfiguration, find_one_filetype
|
||||
from .validate import SCHEMA_BUILD_CONFIG
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .manager import AnyAddon
|
||||
from . import AnyAddon
|
||||
|
||||
|
||||
class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
@@ -63,7 +63,7 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
@cached_property
|
||||
def arch(self) -> str:
|
||||
"""Return arch of the add-on."""
|
||||
return self.sys_arch.match([self.addon.arch])
|
||||
return self.sys_arch.match(self.addon.arch)
|
||||
|
||||
@property
|
||||
def base_image(self) -> str:
|
||||
@@ -81,6 +81,13 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
)
|
||||
return self._data[ATTR_BUILD_FROM][self.arch]
|
||||
|
||||
@property
|
||||
def dockerfile(self) -> Path:
|
||||
"""Return Dockerfile path."""
|
||||
if self.addon.path_location.joinpath(f"Dockerfile.{self.arch}").exists():
|
||||
return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}")
|
||||
return self.addon.path_location.joinpath("Dockerfile")
|
||||
|
||||
@property
|
||||
def squash(self) -> bool:
|
||||
"""Return True or False if squash is active."""
|
||||
@@ -96,40 +103,25 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
"""Return additional Docker labels."""
|
||||
return self._data[ATTR_LABELS]
|
||||
|
||||
def get_dockerfile(self) -> Path:
|
||||
"""Return Dockerfile path.
|
||||
|
||||
Must be run in executor.
|
||||
"""
|
||||
if self.addon.path_location.joinpath(f"Dockerfile.{self.arch}").exists():
|
||||
return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}")
|
||||
return self.addon.path_location.joinpath("Dockerfile")
|
||||
|
||||
async def is_valid(self) -> bool:
|
||||
@property
|
||||
def is_valid(self) -> bool:
|
||||
"""Return true if the build env is valid."""
|
||||
|
||||
def build_is_valid() -> bool:
|
||||
try:
|
||||
return all(
|
||||
[
|
||||
self.addon.path_location.is_dir(),
|
||||
self.get_dockerfile().is_file(),
|
||||
self.dockerfile.is_file(),
|
||||
]
|
||||
)
|
||||
|
||||
try:
|
||||
return await self.sys_run_in_executor(build_is_valid)
|
||||
except HassioArchNotFound:
|
||||
return False
|
||||
|
||||
def get_docker_args(self, version: AwesomeVersion, image: str | None = None):
|
||||
"""Create a dict with Docker build arguments.
|
||||
|
||||
Must be run in executor.
|
||||
"""
|
||||
args: dict[str, Any] = {
|
||||
"""Create a dict with Docker build arguments."""
|
||||
args = {
|
||||
"path": str(self.addon.path_location),
|
||||
"tag": f"{image or self.addon.image}:{version!s}",
|
||||
"dockerfile": str(self.get_dockerfile()),
|
||||
"dockerfile": str(self.dockerfile),
|
||||
"pull": True,
|
||||
"forcerm": not self.sys_dev,
|
||||
"squash": self.squash,
|
||||
|
@@ -194,7 +194,6 @@ class AddonManager(CoreSysAttributes):
|
||||
|
||||
_LOGGER.info("Add-on '%s' successfully installed", slug)
|
||||
|
||||
@Job(name="addon_manager_uninstall")
|
||||
async def uninstall(self, slug: str, *, remove_config: bool = False) -> None:
|
||||
"""Remove an add-on."""
|
||||
if slug not in self.local:
|
||||
@@ -314,7 +313,7 @@ class AddonManager(CoreSysAttributes):
|
||||
if slug not in self.local:
|
||||
_LOGGER.debug("Add-on %s is not local available for restore", slug)
|
||||
addon = Addon(self.coresys, slug)
|
||||
had_ingress: bool | None = False
|
||||
had_ingress = False
|
||||
else:
|
||||
_LOGGER.debug("Add-on %s is local available for restore", slug)
|
||||
addon = self.local[slug]
|
||||
|
@@ -294,7 +294,7 @@ class AddonModel(JobGroup, ABC):
|
||||
return self.data.get(ATTR_WEBUI)
|
||||
|
||||
@property
|
||||
def watchdog_url(self) -> str | None:
|
||||
def watchdog(self) -> str | None:
|
||||
"""Return URL to for watchdog or None."""
|
||||
return self.data.get(ATTR_WATCHDOG)
|
||||
|
||||
@@ -606,7 +606,7 @@ class AddonModel(JobGroup, ABC):
|
||||
return AddonOptions(self.coresys, raw_schema, self.name, self.slug)
|
||||
|
||||
@property
|
||||
def schema_ui(self) -> list[dict[Any, Any]] | None:
|
||||
def schema_ui(self) -> list[dict[any, any]] | None:
|
||||
"""Create a UI schema for add-on options."""
|
||||
raw_schema = self.data[ATTR_SCHEMA]
|
||||
|
||||
|
@@ -137,7 +137,7 @@ class AddonOptions(CoreSysAttributes):
|
||||
) from None
|
||||
|
||||
# prepare range
|
||||
range_args: dict[str, Any] = {}
|
||||
range_args = {}
|
||||
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||
group_value = match.group(group_name)
|
||||
if group_value:
|
||||
@@ -390,14 +390,14 @@ class UiOptions(CoreSysAttributes):
|
||||
multiple: bool = False,
|
||||
) -> None:
|
||||
"""UI nested dict items."""
|
||||
ui_node: dict[str, Any] = {
|
||||
ui_node = {
|
||||
"name": key,
|
||||
"type": "schema",
|
||||
"optional": True,
|
||||
"multiple": multiple,
|
||||
}
|
||||
|
||||
nested_schema: list[dict[str, Any]] = []
|
||||
nested_schema = []
|
||||
for c_key, c_value in option_dict.items():
|
||||
# Nested?
|
||||
if isinstance(c_value, list):
|
||||
@@ -413,7 +413,7 @@ def _create_device_filter(str_filter: str) -> dict[str, Any]:
|
||||
"""Generate device Filter."""
|
||||
raw_filter = dict(value.split("=") for value in str_filter.split(";"))
|
||||
|
||||
clean_filter: dict[str, Any] = {}
|
||||
clean_filter = {}
|
||||
for key, value in raw_filter.items():
|
||||
if key == "subsystem":
|
||||
clean_filter[key] = UdevSubsystem(value)
|
||||
|
@@ -2,9 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ..const import ROLE_ADMIN, ROLE_MANAGER, SECURITY_DISABLE, SECURITY_PROFILE
|
||||
@@ -86,20 +86,18 @@ def rating_security(addon: AddonModel) -> int:
|
||||
return max(min(8, rating), 1)
|
||||
|
||||
|
||||
def remove_data(folder: Path) -> None:
|
||||
"""Remove folder and reset privileged.
|
||||
|
||||
Must be run in executor.
|
||||
"""
|
||||
async def remove_data(folder: Path) -> None:
|
||||
"""Remove folder and reset privileged."""
|
||||
try:
|
||||
subprocess.run(
|
||||
["rm", "-rf", str(folder)], stdout=subprocess.DEVNULL, text=True, check=True
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
"rm", "-rf", str(folder), stdout=asyncio.subprocess.DEVNULL
|
||||
)
|
||||
|
||||
_, error_msg = await proc.communicate()
|
||||
except OSError as err:
|
||||
error_msg = str(err)
|
||||
except subprocess.CalledProcessError as procerr:
|
||||
error_msg = procerr.stderr.strip()
|
||||
else:
|
||||
return
|
||||
if proc.returncode == 0:
|
||||
return
|
||||
|
||||
_LOGGER.error("Can't remove Add-on Data: %s", error_msg)
|
||||
|
@@ -1,12 +1,11 @@
|
||||
"""Init file for Supervisor RESTful API."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import hdrs, web
|
||||
from aiohttp import web
|
||||
|
||||
from ..const import AddonState
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
@@ -48,14 +47,6 @@ MAX_CLIENT_SIZE: int = 1024**2 * 16
|
||||
MAX_LINE_SIZE: int = 24570
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class StaticResourceConfig:
|
||||
"""Configuration for a static resource."""
|
||||
|
||||
prefix: str
|
||||
path: Path
|
||||
|
||||
|
||||
class RestAPI(CoreSysAttributes):
|
||||
"""Handle RESTful API for Supervisor."""
|
||||
|
||||
@@ -82,12 +73,12 @@ class RestAPI(CoreSysAttributes):
|
||||
self._site: web.TCPSite | None = None
|
||||
|
||||
# share single host API handler for reuse in logging endpoints
|
||||
self._api_host: APIHost = APIHost()
|
||||
self._api_host.coresys = coresys
|
||||
self._api_host: APIHost | None = None
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Register REST API Calls."""
|
||||
static_resource_configs: list[StaticResourceConfig] = []
|
||||
self._api_host = APIHost()
|
||||
self._api_host.coresys = self.coresys
|
||||
|
||||
self._register_addons()
|
||||
self._register_audio()
|
||||
@@ -107,7 +98,7 @@ class RestAPI(CoreSysAttributes):
|
||||
self._register_network()
|
||||
self._register_observer()
|
||||
self._register_os()
|
||||
static_resource_configs.extend(self._register_panel())
|
||||
self._register_panel()
|
||||
self._register_proxy()
|
||||
self._register_resolution()
|
||||
self._register_root()
|
||||
@@ -116,17 +107,6 @@ class RestAPI(CoreSysAttributes):
|
||||
self._register_store()
|
||||
self._register_supervisor()
|
||||
|
||||
if static_resource_configs:
|
||||
|
||||
def process_configs() -> list[web.StaticResource]:
|
||||
return [
|
||||
web.StaticResource(config.prefix, config.path)
|
||||
for config in static_resource_configs
|
||||
]
|
||||
|
||||
for resource in await self.sys_run_in_executor(process_configs):
|
||||
self.webapp.router.register_resource(resource)
|
||||
|
||||
await self.start()
|
||||
|
||||
def _register_advanced_logs(self, path: str, syslog_identifier: str):
|
||||
@@ -237,8 +217,6 @@ class RestAPI(CoreSysAttributes):
|
||||
[
|
||||
web.get("/os/info", api_os.info),
|
||||
web.post("/os/update", api_os.update),
|
||||
web.get("/os/config/swap", api_os.config_swap_info),
|
||||
web.post("/os/config/swap", api_os.config_swap_options),
|
||||
web.post("/os/config/sync", api_os.config_sync),
|
||||
web.post("/os/datadisk/move", api_os.migrate_data),
|
||||
web.get("/os/datadisk/list", api_os.list_data),
|
||||
@@ -526,7 +504,7 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/addons", api_addons.list_addons),
|
||||
web.get("/addons", api_addons.list),
|
||||
web.post("/addons/{addon}/uninstall", api_addons.uninstall),
|
||||
web.post("/addons/{addon}/start", api_addons.start),
|
||||
web.post("/addons/{addon}/stop", api_addons.stop),
|
||||
@@ -594,9 +572,7 @@ class RestAPI(CoreSysAttributes):
|
||||
web.post("/ingress/session", api_ingress.create_session),
|
||||
web.post("/ingress/validate_session", api_ingress.validate_session),
|
||||
web.get("/ingress/panels", api_ingress.panels),
|
||||
web.route(
|
||||
hdrs.METH_ANY, "/ingress/{token}/{path:.*}", api_ingress.handler
|
||||
),
|
||||
web.view("/ingress/{token}/{path:.*}", api_ingress.handler),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -607,7 +583,7 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/backups", api_backups.list_backups),
|
||||
web.get("/backups", api_backups.list),
|
||||
web.get("/backups/info", api_backups.info),
|
||||
web.post("/backups/options", api_backups.options),
|
||||
web.post("/backups/reload", api_backups.reload),
|
||||
@@ -634,7 +610,7 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/services", api_services.list_services),
|
||||
web.get("/services", api_services.list),
|
||||
web.get("/services/{service}", api_services.get_service),
|
||||
web.post("/services/{service}", api_services.set_service),
|
||||
web.delete("/services/{service}", api_services.del_service),
|
||||
@@ -648,7 +624,7 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/discovery", api_discovery.list_discovery),
|
||||
web.get("/discovery", api_discovery.list),
|
||||
web.get("/discovery/{uuid}", api_discovery.get_discovery),
|
||||
web.delete("/discovery/{uuid}", api_discovery.del_discovery),
|
||||
web.post("/discovery", api_discovery.set_discovery),
|
||||
@@ -774,9 +750,10 @@ class RestAPI(CoreSysAttributes):
|
||||
]
|
||||
)
|
||||
|
||||
def _register_panel(self) -> list[StaticResourceConfig]:
|
||||
def _register_panel(self) -> None:
|
||||
"""Register panel for Home Assistant."""
|
||||
return [StaticResourceConfig("/app", Path(__file__).parent.joinpath("panel"))]
|
||||
panel_dir = Path(__file__).parent.joinpath("panel")
|
||||
self.webapp.add_routes([web.static("/app", panel_dir)])
|
||||
|
||||
def _register_docker(self) -> None:
|
||||
"""Register docker configuration functions."""
|
||||
|
@@ -3,13 +3,14 @@
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
import logging
|
||||
from typing import Any, TypedDict
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..addons.addon import Addon
|
||||
from ..addons.manager import AnyAddon
|
||||
from ..addons.utils import rating_security
|
||||
from ..const import (
|
||||
ATTR_ADDONS,
|
||||
@@ -62,6 +63,7 @@ from ..const import (
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_MESSAGE,
|
||||
ATTR_NAME,
|
||||
ATTR_NETWORK,
|
||||
ATTR_NETWORK_DESCRIPTION,
|
||||
@@ -70,6 +72,7 @@ from ..const import (
|
||||
ATTR_OPTIONS,
|
||||
ATTR_PRIVILEGED,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_PWNED,
|
||||
ATTR_RATING,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SCHEMA,
|
||||
@@ -87,6 +90,7 @@ from ..const import (
|
||||
ATTR_UPDATE_AVAILABLE,
|
||||
ATTR_URL,
|
||||
ATTR_USB,
|
||||
ATTR_VALID,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_VIDEO,
|
||||
@@ -142,20 +146,12 @@ SCHEMA_UNINSTALL = vol.Schema(
|
||||
# pylint: enable=no-value-for-parameter
|
||||
|
||||
|
||||
class OptionsValidateResponse(TypedDict):
|
||||
"""Response object for options validate."""
|
||||
|
||||
message: str
|
||||
valid: bool
|
||||
pwned: bool | None
|
||||
|
||||
|
||||
class APIAddons(CoreSysAttributes):
|
||||
"""Handle RESTful API for add-on functions."""
|
||||
|
||||
def get_addon_for_request(self, request: web.Request) -> Addon:
|
||||
"""Return addon, throw an exception if it doesn't exist."""
|
||||
addon_slug: str = request.match_info["addon"]
|
||||
addon_slug: str = request.match_info.get("addon")
|
||||
|
||||
# Lookup itself
|
||||
if addon_slug == "self":
|
||||
@@ -173,7 +169,7 @@ class APIAddons(CoreSysAttributes):
|
||||
return addon
|
||||
|
||||
@api_process
|
||||
async def list_addons(self, request: web.Request) -> dict[str, Any]:
|
||||
async def list(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return all add-ons or repositories."""
|
||||
data_addons = [
|
||||
{
|
||||
@@ -208,7 +204,7 @@ class APIAddons(CoreSysAttributes):
|
||||
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return add-on information."""
|
||||
addon: Addon = self.get_addon_for_request(request)
|
||||
addon: AnyAddon = self.get_addon_for_request(request)
|
||||
|
||||
data = {
|
||||
ATTR_NAME: addon.name,
|
||||
@@ -343,10 +339,10 @@ class APIAddons(CoreSysAttributes):
|
||||
await addon.save_persist()
|
||||
|
||||
@api_process
|
||||
async def options_validate(self, request: web.Request) -> OptionsValidateResponse:
|
||||
async def options_validate(self, request: web.Request) -> None:
|
||||
"""Validate user options for add-on."""
|
||||
addon = self.get_addon_for_request(request)
|
||||
data = OptionsValidateResponse(message="", valid=True, pwned=False)
|
||||
data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False}
|
||||
|
||||
options = await request.json(loads=json_loads) or addon.options
|
||||
|
||||
@@ -355,8 +351,8 @@ class APIAddons(CoreSysAttributes):
|
||||
try:
|
||||
options_schema.validate(options)
|
||||
except vol.Invalid as ex:
|
||||
data["message"] = humanize_error(options, ex)
|
||||
data["valid"] = False
|
||||
data[ATTR_MESSAGE] = humanize_error(options, ex)
|
||||
data[ATTR_VALID] = False
|
||||
|
||||
if not self.sys_security.pwned:
|
||||
return data
|
||||
@@ -367,24 +363,24 @@ class APIAddons(CoreSysAttributes):
|
||||
await self.sys_security.verify_secret(secret)
|
||||
continue
|
||||
except PwnedSecret:
|
||||
data["pwned"] = True
|
||||
data[ATTR_PWNED] = True
|
||||
except PwnedError:
|
||||
data["pwned"] = None
|
||||
data[ATTR_PWNED] = None
|
||||
break
|
||||
|
||||
if self.sys_security.force and data["pwned"] in (None, True):
|
||||
data["valid"] = False
|
||||
if data["pwned"] is None:
|
||||
data["message"] = "Error happening on pwned secrets check!"
|
||||
if self.sys_security.force and data[ATTR_PWNED] in (None, True):
|
||||
data[ATTR_VALID] = False
|
||||
if data[ATTR_PWNED] is None:
|
||||
data[ATTR_MESSAGE] = "Error happening on pwned secrets check!"
|
||||
else:
|
||||
data["message"] = "Add-on uses pwned secrets!"
|
||||
data[ATTR_MESSAGE] = "Add-on uses pwned secrets!"
|
||||
|
||||
return data
|
||||
|
||||
@api_process
|
||||
async def options_config(self, request: web.Request) -> None:
|
||||
"""Validate user options for add-on."""
|
||||
slug: str = request.match_info["addon"]
|
||||
slug: str = request.match_info.get("addon")
|
||||
if slug != "self":
|
||||
raise APIForbidden("This can be only read by the Add-on itself!")
|
||||
addon = self.get_addon_for_request(request)
|
||||
|
@@ -124,7 +124,7 @@ class APIAudio(CoreSysAttributes):
|
||||
@api_process
|
||||
async def set_volume(self, request: web.Request) -> None:
|
||||
"""Set audio volume on stream."""
|
||||
source: StreamType = StreamType(request.match_info["source"])
|
||||
source: StreamType = StreamType(request.match_info.get("source"))
|
||||
application: bool = request.path.endswith("application")
|
||||
body = await api_validate(SCHEMA_VOLUME, request)
|
||||
|
||||
@@ -137,7 +137,7 @@ class APIAudio(CoreSysAttributes):
|
||||
@api_process
|
||||
async def set_mute(self, request: web.Request) -> None:
|
||||
"""Mute audio volume on stream."""
|
||||
source: StreamType = StreamType(request.match_info["source"])
|
||||
source: StreamType = StreamType(request.match_info.get("source"))
|
||||
application: bool = request.path.endswith("application")
|
||||
body = await api_validate(SCHEMA_MUTE, request)
|
||||
|
||||
@@ -150,7 +150,7 @@ class APIAudio(CoreSysAttributes):
|
||||
@api_process
|
||||
async def set_default(self, request: web.Request) -> None:
|
||||
"""Set audio default stream."""
|
||||
source: StreamType = StreamType(request.match_info["source"])
|
||||
source: StreamType = StreamType(request.match_info.get("source"))
|
||||
body = await api_validate(SCHEMA_DEFAULT, request)
|
||||
|
||||
await asyncio.shield(self.sys_host.sound.set_default(source, body[ATTR_NAME]))
|
||||
|
@@ -1,7 +1,6 @@
|
||||
"""Init file for Supervisor auth/SSO RESTful API."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -43,7 +42,7 @@ REALM_HEADER: dict[str, str] = {
|
||||
class APIAuth(CoreSysAttributes):
|
||||
"""Handle RESTful API for auth functions."""
|
||||
|
||||
def _process_basic(self, request: web.Request, addon: Addon) -> Awaitable[bool]:
|
||||
def _process_basic(self, request: web.Request, addon: Addon) -> bool:
|
||||
"""Process login request with basic auth.
|
||||
|
||||
Return a coroutine.
|
||||
@@ -53,7 +52,7 @@ class APIAuth(CoreSysAttributes):
|
||||
|
||||
def _process_dict(
|
||||
self, request: web.Request, addon: Addon, data: dict[str, str]
|
||||
) -> Awaitable[bool]:
|
||||
) -> bool:
|
||||
"""Process login with dict data.
|
||||
|
||||
Return a coroutine.
|
||||
|
@@ -10,9 +10,9 @@ import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any, cast
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import BodyPartReader, web
|
||||
from aiohttp import web
|
||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
@@ -52,9 +52,8 @@ from ..const import (
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, APIForbidden, APINotFound
|
||||
from ..jobs import JobSchedulerOptions, SupervisorJob
|
||||
from ..jobs import JobSchedulerOptions
|
||||
from ..mounts.const import MountUsage
|
||||
from ..mounts.mount import Mount
|
||||
from ..resolution.const import UnhealthyReason
|
||||
from .const import (
|
||||
ATTR_ADDITIONAL_LOCATIONS,
|
||||
@@ -188,7 +187,7 @@ class APIBackups(CoreSysAttributes):
|
||||
]
|
||||
|
||||
@api_process
|
||||
async def list_backups(self, request):
|
||||
async def list(self, request):
|
||||
"""Return backup list."""
|
||||
data_backups = self._list_backups()
|
||||
|
||||
@@ -296,11 +295,8 @@ class APIBackups(CoreSysAttributes):
|
||||
) -> tuple[asyncio.Task, str]:
|
||||
"""Start backup task in background and return task and job ID."""
|
||||
event = asyncio.Event()
|
||||
job, backup_task = cast(
|
||||
tuple[SupervisorJob, asyncio.Task],
|
||||
self.sys_jobs.schedule_job(
|
||||
backup_method, JobSchedulerOptions(), *args, **kwargs
|
||||
),
|
||||
job, backup_task = self.sys_jobs.schedule_job(
|
||||
backup_method, JobSchedulerOptions(), *args, **kwargs
|
||||
)
|
||||
|
||||
async def release_on_freeze(new_state: CoreState):
|
||||
@@ -315,7 +311,10 @@ class APIBackups(CoreSysAttributes):
|
||||
try:
|
||||
event_task = self.sys_create_task(event.wait())
|
||||
_, pending = await asyncio.wait(
|
||||
(backup_task, event_task),
|
||||
(
|
||||
backup_task,
|
||||
event_task,
|
||||
),
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
# It seems backup returned early (error or something), make sure to cancel
|
||||
@@ -476,7 +475,7 @@ class APIBackups(CoreSysAttributes):
|
||||
_LOGGER.info("Downloading backup %s", backup.slug)
|
||||
filename = backup.all_locations[location][ATTR_PATH]
|
||||
# If the file is missing, return 404 and trigger reload of location
|
||||
if not await self.sys_run_in_executor(filename.is_file):
|
||||
if not filename.is_file():
|
||||
self.sys_create_task(self.sys_backups.reload(location))
|
||||
return web.Response(status=404)
|
||||
|
||||
@@ -498,10 +497,8 @@ class APIBackups(CoreSysAttributes):
|
||||
locations: list[LOCATION_TYPE] | None = None
|
||||
tmp_path = self.sys_config.path_tmp
|
||||
if ATTR_LOCATION in request.query:
|
||||
location_names: list[str] = request.query.getall(ATTR_LOCATION, [])
|
||||
self._validate_cloud_backup_location(
|
||||
request, cast(list[str | None], location_names)
|
||||
)
|
||||
location_names: list[str] = request.query.getall(ATTR_LOCATION)
|
||||
self._validate_cloud_backup_location(request, location_names)
|
||||
# Convert empty string to None if necessary
|
||||
locations = [
|
||||
self._location_to_mount(location)
|
||||
@@ -512,7 +509,7 @@ class APIBackups(CoreSysAttributes):
|
||||
location = locations.pop(0)
|
||||
|
||||
if location and location != LOCATION_CLOUD_BACKUP:
|
||||
tmp_path = cast(Mount, location).local_where or tmp_path
|
||||
tmp_path = location.local_where
|
||||
|
||||
filename: str | None = None
|
||||
if ATTR_FILENAME in request.query:
|
||||
@@ -534,8 +531,6 @@ class APIBackups(CoreSysAttributes):
|
||||
|
||||
def close_backup_file() -> None:
|
||||
if backup_file_stream:
|
||||
# Make sure it got closed, in case of exception. It is safe to
|
||||
# close the file stream twice.
|
||||
backup_file_stream.close()
|
||||
if temp_dir:
|
||||
temp_dir.cleanup()
|
||||
@@ -543,15 +538,9 @@ class APIBackups(CoreSysAttributes):
|
||||
try:
|
||||
reader = await request.multipart()
|
||||
contents = await reader.next()
|
||||
if not isinstance(contents, BodyPartReader):
|
||||
raise APIError("Improperly formatted upload, could not read backup")
|
||||
|
||||
tar_file = await self.sys_run_in_executor(open_backup_file)
|
||||
while chunk := await contents.read_chunk(size=2**16):
|
||||
await self.sys_run_in_executor(
|
||||
cast(IOBase, backup_file_stream).write, chunk
|
||||
)
|
||||
await self.sys_run_in_executor(cast(IOBase, backup_file_stream).close)
|
||||
await self.sys_run_in_executor(backup_file_stream.write, chunk)
|
||||
|
||||
backup = await asyncio.shield(
|
||||
self.sys_backups.import_backup(
|
||||
@@ -566,9 +555,7 @@ class APIBackups(CoreSysAttributes):
|
||||
LOCATION_CLOUD_BACKUP,
|
||||
None,
|
||||
}:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
_LOGGER.error("Can't write new backup file: %s", err)
|
||||
return False
|
||||
|
||||
@@ -576,7 +563,8 @@ class APIBackups(CoreSysAttributes):
|
||||
return False
|
||||
|
||||
finally:
|
||||
await self.sys_run_in_executor(close_backup_file)
|
||||
if temp_dir or backup:
|
||||
await self.sys_run_in_executor(close_backup_file)
|
||||
|
||||
if backup:
|
||||
return {ATTR_SLUG: backup.slug}
|
||||
|
@@ -80,11 +80,3 @@ class BootSlot(StrEnum):
|
||||
|
||||
A = "A"
|
||||
B = "B"
|
||||
|
||||
|
||||
class DetectBlockingIO(StrEnum):
|
||||
"""Enable/Disable detection for blocking I/O in event loop."""
|
||||
|
||||
OFF = "off"
|
||||
ON = "on"
|
||||
ON_AT_STARTUP = "on_at_startup"
|
||||
|
@@ -1,9 +1,7 @@
|
||||
"""Init file for Supervisor network RESTful API."""
|
||||
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..addons.addon import Addon
|
||||
@@ -18,7 +16,6 @@ from ..const import (
|
||||
AddonState,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..discovery import Message
|
||||
from ..exceptions import APIForbidden, APINotFound
|
||||
from .utils import api_process, api_validate, require_home_assistant
|
||||
|
||||
@@ -35,16 +32,16 @@ SCHEMA_DISCOVERY = vol.Schema(
|
||||
class APIDiscovery(CoreSysAttributes):
|
||||
"""Handle RESTful API for discovery functions."""
|
||||
|
||||
def _extract_message(self, request: web.Request) -> Message:
|
||||
def _extract_message(self, request):
|
||||
"""Extract discovery message from URL."""
|
||||
message = self.sys_discovery.get(request.match_info["uuid"])
|
||||
message = self.sys_discovery.get(request.match_info.get("uuid"))
|
||||
if not message:
|
||||
raise APINotFound("Discovery message not found")
|
||||
return message
|
||||
|
||||
@api_process
|
||||
@require_home_assistant
|
||||
async def list_discovery(self, request: web.Request) -> dict[str, Any]:
|
||||
async def list(self, request):
|
||||
"""Show registered and available services."""
|
||||
# Get available discovery
|
||||
discovery = [
|
||||
@@ -55,16 +52,12 @@ class APIDiscovery(CoreSysAttributes):
|
||||
ATTR_CONFIG: message.config,
|
||||
}
|
||||
for message in self.sys_discovery.list_messages
|
||||
if (
|
||||
discovered := cast(
|
||||
Addon, self.sys_addons.get(message.addon, local_only=True)
|
||||
)
|
||||
)
|
||||
and discovered.state == AddonState.STARTED
|
||||
if (addon := self.sys_addons.get(message.addon, local_only=True))
|
||||
and addon.state == AddonState.STARTED
|
||||
]
|
||||
|
||||
# Get available services/add-ons
|
||||
services: dict[str, list[str]] = {}
|
||||
services = {}
|
||||
for addon in self.sys_addons.all:
|
||||
for name in addon.discovery:
|
||||
services.setdefault(name, []).append(addon.slug)
|
||||
@@ -72,7 +65,7 @@ class APIDiscovery(CoreSysAttributes):
|
||||
return {ATTR_DISCOVERY: discovery, ATTR_SERVICES: services}
|
||||
|
||||
@api_process
|
||||
async def set_discovery(self, request: web.Request) -> dict[str, str]:
|
||||
async def set_discovery(self, request):
|
||||
"""Write data into a discovery pipeline."""
|
||||
body = await api_validate(SCHEMA_DISCOVERY, request)
|
||||
addon: Addon = request[REQUEST_FROM]
|
||||
@@ -96,7 +89,7 @@ class APIDiscovery(CoreSysAttributes):
|
||||
|
||||
@api_process
|
||||
@require_home_assistant
|
||||
async def get_discovery(self, request: web.Request) -> dict[str, Any]:
|
||||
async def get_discovery(self, request):
|
||||
"""Read data into a discovery message."""
|
||||
message = self._extract_message(request)
|
||||
|
||||
@@ -108,7 +101,7 @@ class APIDiscovery(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def del_discovery(self, request: web.Request) -> None:
|
||||
async def del_discovery(self, request):
|
||||
"""Delete data into a discovery message."""
|
||||
message = self._extract_message(request)
|
||||
addon = request[REQUEST_FROM]
|
||||
@@ -118,3 +111,4 @@ class APIDiscovery(CoreSysAttributes):
|
||||
raise APIForbidden("Can't remove discovery message")
|
||||
|
||||
await self.sys_discovery.remove(message)
|
||||
return True
|
||||
|
@@ -68,10 +68,7 @@ def filesystem_struct(fs_block: UDisks2Block) -> dict[str, Any]:
|
||||
ATTR_NAME: fs_block.id_label,
|
||||
ATTR_SYSTEM: fs_block.hint_system,
|
||||
ATTR_MOUNT_POINTS: [
|
||||
str(mount_point)
|
||||
for mount_point in (
|
||||
fs_block.filesystem.mount_points if fs_block.filesystem else []
|
||||
)
|
||||
str(mount_point) for mount_point in fs_block.filesystem.mount_points
|
||||
],
|
||||
}
|
||||
|
||||
|
@@ -3,7 +3,6 @@
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientConnectionResetError, web
|
||||
from aiohttp.hdrs import ACCEPT, RANGE
|
||||
@@ -196,18 +195,20 @@ class APIHost(CoreSysAttributes):
|
||||
) -> web.StreamResponse:
|
||||
"""Return systemd-journald logs."""
|
||||
log_formatter = LogFormatter.PLAIN
|
||||
params: dict[str, Any] = {}
|
||||
params = {}
|
||||
if identifier:
|
||||
params[PARAM_SYSLOG_IDENTIFIER] = identifier
|
||||
elif IDENTIFIER in request.match_info:
|
||||
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info[IDENTIFIER]
|
||||
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER)
|
||||
else:
|
||||
params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers
|
||||
# host logs should be always verbose, no matter what Accept header is used
|
||||
log_formatter = LogFormatter.VERBOSE
|
||||
|
||||
if BOOTID in request.match_info:
|
||||
params[PARAM_BOOT_ID] = await self._get_boot_id(request.match_info[BOOTID])
|
||||
params[PARAM_BOOT_ID] = await self._get_boot_id(
|
||||
request.match_info.get(BOOTID)
|
||||
)
|
||||
if follow:
|
||||
params[PARAM_FOLLOW] = ""
|
||||
|
||||
@@ -240,7 +241,7 @@ class APIHost(CoreSysAttributes):
|
||||
# entries=cursor[[:num_skip]:num_entries]
|
||||
range_header = f"entries=:-{lines - 1}:{'' if follow else lines}"
|
||||
elif RANGE in request.headers:
|
||||
range_header = request.headers[RANGE]
|
||||
range_header = request.headers.get(RANGE)
|
||||
else:
|
||||
range_header = (
|
||||
f"entries=:-{DEFAULT_LINES - 1}:{'' if follow else DEFAULT_LINES}"
|
||||
@@ -254,22 +255,16 @@ class APIHost(CoreSysAttributes):
|
||||
response.content_type = CONTENT_TYPE_TEXT
|
||||
headers_returned = False
|
||||
async for cursor, line in journal_logs_reader(resp, log_formatter):
|
||||
try:
|
||||
if not headers_returned:
|
||||
if cursor:
|
||||
response.headers["X-First-Cursor"] = cursor
|
||||
response.headers["X-Accel-Buffering"] = "no"
|
||||
await response.prepare(request)
|
||||
headers_returned = True
|
||||
if not headers_returned:
|
||||
if cursor:
|
||||
response.headers["X-First-Cursor"] = cursor
|
||||
response.headers["X-Accel-Buffering"] = "no"
|
||||
await response.prepare(request)
|
||||
headers_returned = True
|
||||
# When client closes the connection while reading busy logs, we
|
||||
# sometimes get this exception. It should be safe to ignore it.
|
||||
with suppress(ClientConnectionResetError):
|
||||
await response.write(line.encode("utf-8") + b"\n")
|
||||
except ClientConnectionResetError as err:
|
||||
# When client closes the connection while reading busy logs, we
|
||||
# sometimes get this exception. It should be safe to ignore it.
|
||||
_LOGGER.debug(
|
||||
"ClientConnectionResetError raised when returning journal logs: %s",
|
||||
err,
|
||||
)
|
||||
break
|
||||
except ConnectionResetError as ex:
|
||||
raise APIError(
|
||||
"Connection reset when trying to fetch data from systemd-journald."
|
||||
|
@@ -83,7 +83,7 @@ class APIIngress(CoreSysAttributes):
|
||||
|
||||
def _extract_addon(self, request: web.Request) -> Addon:
|
||||
"""Return addon, throw an exception it it doesn't exist."""
|
||||
token = request.match_info["token"]
|
||||
token = request.match_info.get("token")
|
||||
|
||||
# Find correct add-on
|
||||
addon = self.sys_ingress.get(token)
|
||||
@@ -132,7 +132,7 @@ class APIIngress(CoreSysAttributes):
|
||||
|
||||
@api_process
|
||||
@require_home_assistant
|
||||
async def validate_session(self, request: web.Request) -> None:
|
||||
async def validate_session(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Validate session and extending how long it's valid for."""
|
||||
data = await api_validate(VALIDATE_SESSION_DATA, request)
|
||||
|
||||
@@ -147,14 +147,14 @@ class APIIngress(CoreSysAttributes):
|
||||
"""Route data to Supervisor ingress service."""
|
||||
|
||||
# Check Ingress Session
|
||||
session = request.cookies.get(COOKIE_INGRESS, "")
|
||||
session = request.cookies.get(COOKIE_INGRESS)
|
||||
if not self.sys_ingress.validate_session(session):
|
||||
_LOGGER.warning("No valid ingress session %s", session)
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
# Process requests
|
||||
addon = self._extract_addon(request)
|
||||
path = request.match_info["path"]
|
||||
path = request.match_info.get("path")
|
||||
session_data = self.sys_ingress.get_session_data(session)
|
||||
try:
|
||||
# Websocket
|
||||
@@ -183,7 +183,7 @@ class APIIngress(CoreSysAttributes):
|
||||
for proto in request.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
|
||||
]
|
||||
else:
|
||||
req_protocols = []
|
||||
req_protocols = ()
|
||||
|
||||
ws_server = web.WebSocketResponse(
|
||||
protocols=req_protocols, autoclose=False, autoping=False
|
||||
@@ -340,10 +340,9 @@ def _init_header(
|
||||
headers[name] = value
|
||||
|
||||
# Update X-Forwarded-For
|
||||
if request.transport:
|
||||
forward_for = request.headers.get(hdrs.X_FORWARDED_FOR)
|
||||
connected_ip = ip_address(request.transport.get_extra_info("peername")[0])
|
||||
headers[hdrs.X_FORWARDED_FOR] = f"{forward_for}, {connected_ip!s}"
|
||||
forward_for = request.headers.get(hdrs.X_FORWARDED_FOR)
|
||||
connected_ip = ip_address(request.transport.get_extra_info("peername")[0])
|
||||
headers[hdrs.X_FORWARDED_FOR] = f"{forward_for}, {connected_ip!s}"
|
||||
|
||||
return headers
|
||||
|
||||
|
@@ -26,7 +26,7 @@ class APIJobs(CoreSysAttributes):
|
||||
def _extract_job(self, request: web.Request) -> SupervisorJob:
|
||||
"""Extract job from request or raise."""
|
||||
try:
|
||||
return self.sys_jobs.get_job(request.match_info["uuid"])
|
||||
return self.sys_jobs.get_job(request.match_info.get("uuid"))
|
||||
except JobNotFound:
|
||||
raise APINotFound("Job does not exist") from None
|
||||
|
||||
@@ -71,10 +71,7 @@ class APIJobs(CoreSysAttributes):
|
||||
|
||||
if current_job.uuid in jobs_by_parent:
|
||||
queue.extend(
|
||||
[
|
||||
(child_jobs, job)
|
||||
for job in jobs_by_parent.get(current_job.uuid, [])
|
||||
]
|
||||
[(child_jobs, job) for job in jobs_by_parent.get(current_job.uuid)]
|
||||
)
|
||||
|
||||
return job_list
|
||||
|
@@ -1,12 +1,11 @@
|
||||
"""Handle security part of this API."""
|
||||
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
import re
|
||||
from typing import Final
|
||||
from urllib.parse import unquote
|
||||
|
||||
from aiohttp.web import Request, Response, middleware
|
||||
from aiohttp.web import Request, RequestHandler, Response, middleware
|
||||
from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized
|
||||
from awesomeversion import AwesomeVersion
|
||||
|
||||
@@ -24,7 +23,7 @@ from ...const import (
|
||||
)
|
||||
from ...coresys import CoreSys, CoreSysAttributes
|
||||
from ...utils import version_is_new_enough
|
||||
from ..utils import api_return_error, extract_supervisor_token
|
||||
from ..utils import api_return_error, excract_supervisor_token
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
_CORE_VERSION: Final = AwesomeVersion("2023.3.4")
|
||||
@@ -180,7 +179,9 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
return unquoted
|
||||
|
||||
@middleware
|
||||
async def block_bad_requests(self, request: Request, handler: Callable) -> Response:
|
||||
async def block_bad_requests(
|
||||
self, request: Request, handler: RequestHandler
|
||||
) -> Response:
|
||||
"""Process request and tblock commonly known exploit attempts."""
|
||||
if FILTERS.search(self._recursive_unquote(request.path)):
|
||||
_LOGGER.warning(
|
||||
@@ -198,7 +199,9 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
return await handler(request)
|
||||
|
||||
@middleware
|
||||
async def system_validation(self, request: Request, handler: Callable) -> Response:
|
||||
async def system_validation(
|
||||
self, request: Request, handler: RequestHandler
|
||||
) -> Response:
|
||||
"""Check if core is ready to response."""
|
||||
if self.sys_core.state not in (
|
||||
CoreState.STARTUP,
|
||||
@@ -212,10 +215,12 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
return await handler(request)
|
||||
|
||||
@middleware
|
||||
async def token_validation(self, request: Request, handler: Callable) -> Response:
|
||||
async def token_validation(
|
||||
self, request: Request, handler: RequestHandler
|
||||
) -> Response:
|
||||
"""Check security access of this layer."""
|
||||
request_from: CoreSysAttributes | None = None
|
||||
supervisor_token = extract_supervisor_token(request)
|
||||
request_from = None
|
||||
supervisor_token = excract_supervisor_token(request)
|
||||
|
||||
# Blacklist
|
||||
if BLACKLIST.match(request.path):
|
||||
@@ -283,7 +288,7 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
raise HTTPForbidden()
|
||||
|
||||
@middleware
|
||||
async def core_proxy(self, request: Request, handler: Callable) -> Response:
|
||||
async def core_proxy(self, request: Request, handler: RequestHandler) -> Response:
|
||||
"""Validate user from Core API proxy."""
|
||||
if (
|
||||
request[REQUEST_FROM] != self.sys_homeassistant
|
||||
|
@@ -1,6 +1,6 @@
|
||||
"""Inits file for supervisor mounts REST API."""
|
||||
|
||||
from typing import Any, cast
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
@@ -10,7 +10,7 @@ from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, APINotFound
|
||||
from ..mounts.const import ATTR_DEFAULT_BACKUP_MOUNT, MountUsage
|
||||
from ..mounts.mount import Mount
|
||||
from ..mounts.validate import SCHEMA_MOUNT_CONFIG, MountData
|
||||
from ..mounts.validate import SCHEMA_MOUNT_CONFIG
|
||||
from .const import ATTR_MOUNTS, ATTR_USER_PATH
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
@@ -26,7 +26,7 @@ class APIMounts(CoreSysAttributes):
|
||||
|
||||
def _extract_mount(self, request: web.Request) -> Mount:
|
||||
"""Extract mount from request or raise."""
|
||||
name = request.match_info["mount"]
|
||||
name = request.match_info.get("mount")
|
||||
if name not in self.sys_mounts:
|
||||
raise APINotFound(f"No mount exists with name {name}")
|
||||
return self.sys_mounts.get(name)
|
||||
@@ -71,10 +71,10 @@ class APIMounts(CoreSysAttributes):
|
||||
@api_process
|
||||
async def create_mount(self, request: web.Request) -> None:
|
||||
"""Create a new mount in supervisor."""
|
||||
body = cast(MountData, await api_validate(SCHEMA_MOUNT_CONFIG, request))
|
||||
body = await api_validate(SCHEMA_MOUNT_CONFIG, request)
|
||||
|
||||
if body["name"] in self.sys_mounts:
|
||||
raise APIError(f"A mount already exists with name {body['name']}")
|
||||
if body[ATTR_NAME] in self.sys_mounts:
|
||||
raise APIError(f"A mount already exists with name {body[ATTR_NAME]}")
|
||||
|
||||
mount = Mount.from_dict(self.coresys, body)
|
||||
await self.sys_mounts.create_mount(mount)
|
||||
@@ -97,10 +97,7 @@ class APIMounts(CoreSysAttributes):
|
||||
{vol.Optional(ATTR_NAME, default=current.name): current.name},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
body = cast(
|
||||
MountData,
|
||||
await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request),
|
||||
)
|
||||
body = await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request)
|
||||
|
||||
mount = Mount.from_dict(self.coresys, body)
|
||||
await self.sys_mounts.create_mount(mount)
|
||||
|
@@ -132,12 +132,8 @@ def interface_struct(interface: Interface) -> dict[str, Any]:
|
||||
ATTR_CONNECTED: interface.connected,
|
||||
ATTR_PRIMARY: interface.primary,
|
||||
ATTR_MAC: interface.mac,
|
||||
ATTR_IPV4: ipconfig_struct(interface.ipv4, interface.ipv4setting)
|
||||
if interface.ipv4 and interface.ipv4setting
|
||||
else None,
|
||||
ATTR_IPV6: ipconfig_struct(interface.ipv6, interface.ipv6setting)
|
||||
if interface.ipv6 and interface.ipv6setting
|
||||
else None,
|
||||
ATTR_IPV4: ipconfig_struct(interface.ipv4, interface.ipv4setting),
|
||||
ATTR_IPV6: ipconfig_struct(interface.ipv6, interface.ipv6setting),
|
||||
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
|
||||
ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None,
|
||||
}
|
||||
@@ -194,14 +190,14 @@ class APINetwork(CoreSysAttributes):
|
||||
@api_process
|
||||
async def interface_info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return network information for a interface."""
|
||||
interface = self._get_interface(request.match_info[ATTR_INTERFACE])
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
|
||||
return interface_struct(interface)
|
||||
|
||||
@api_process
|
||||
async def interface_update(self, request: web.Request) -> None:
|
||||
"""Update the configuration of an interface."""
|
||||
interface = self._get_interface(request.match_info[ATTR_INTERFACE])
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
|
||||
# Validate data
|
||||
body = await api_validate(SCHEMA_UPDATE, request)
|
||||
@@ -247,7 +243,7 @@ class APINetwork(CoreSysAttributes):
|
||||
@api_process
|
||||
async def scan_accesspoints(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Scan and return a list of available networks."""
|
||||
interface = self._get_interface(request.match_info[ATTR_INTERFACE])
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
|
||||
# Only wlan is supported
|
||||
if interface.type != InterfaceType.WIRELESS:
|
||||
@@ -260,10 +256,8 @@ class APINetwork(CoreSysAttributes):
|
||||
@api_process
|
||||
async def create_vlan(self, request: web.Request) -> None:
|
||||
"""Create a new vlan."""
|
||||
interface = self._get_interface(request.match_info[ATTR_INTERFACE])
|
||||
vlan = int(request.match_info.get(ATTR_VLAN, -1))
|
||||
if vlan < 0:
|
||||
raise APIError(f"Invalid vlan specified: {vlan}")
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
vlan = int(request.match_info.get(ATTR_VLAN))
|
||||
|
||||
# Only ethernet is supported
|
||||
if interface.type != InterfaceType.ETHERNET:
|
||||
|
@@ -3,7 +3,6 @@
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
import logging
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
@@ -22,14 +21,12 @@ from ..const import (
|
||||
ATTR_SERIAL,
|
||||
ATTR_SIZE,
|
||||
ATTR_STATE,
|
||||
ATTR_SWAP_SIZE,
|
||||
ATTR_SWAPPINESS,
|
||||
ATTR_UPDATE_AVAILABLE,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APINotFound, BoardInvalidError
|
||||
from ..exceptions import BoardInvalidError
|
||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||
from ..validate import version_tag
|
||||
from .const import (
|
||||
@@ -68,15 +65,6 @@ SCHEMA_GREEN_OPTIONS = vol.Schema(
|
||||
vol.Optional(ATTR_SYSTEM_HEALTH_LED): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
RE_SWAP_SIZE = re.compile(r"^\d+([KMG](i?B)?|B)?$", re.IGNORECASE)
|
||||
|
||||
SCHEMA_SWAP_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_SWAP_SIZE): vol.Match(RE_SWAP_SIZE),
|
||||
vol.Optional(ATTR_SWAPPINESS): vol.All(int, vol.Range(min=0, max=200)),
|
||||
}
|
||||
)
|
||||
# pylint: enable=no-value-for-parameter
|
||||
|
||||
|
||||
@@ -224,45 +212,3 @@ class APIOS(CoreSysAttributes):
|
||||
)
|
||||
|
||||
return {}
|
||||
|
||||
@api_process
|
||||
async def config_swap_info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Get swap settings."""
|
||||
if not self.coresys.os.available or self.coresys.os.version < "15.0":
|
||||
raise APINotFound(
|
||||
"Home Assistant OS 15.0 or newer required for swap settings"
|
||||
)
|
||||
|
||||
return {
|
||||
ATTR_SWAP_SIZE: self.sys_dbus.agent.swap.swap_size,
|
||||
ATTR_SWAPPINESS: self.sys_dbus.agent.swap.swappiness,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def config_swap_options(self, request: web.Request) -> None:
|
||||
"""Update swap settings."""
|
||||
if not self.coresys.os.available or self.coresys.os.version < "15.0":
|
||||
raise APINotFound(
|
||||
"Home Assistant OS 15.0 or newer required for swap settings"
|
||||
)
|
||||
|
||||
body = await api_validate(SCHEMA_SWAP_OPTIONS, request)
|
||||
|
||||
reboot_required = False
|
||||
|
||||
if ATTR_SWAP_SIZE in body:
|
||||
old_size = self.sys_dbus.agent.swap.swap_size
|
||||
await self.sys_dbus.agent.swap.set_swap_size(body[ATTR_SWAP_SIZE])
|
||||
reboot_required = reboot_required or old_size != body[ATTR_SWAP_SIZE]
|
||||
|
||||
if ATTR_SWAPPINESS in body:
|
||||
old_swappiness = self.sys_dbus.agent.swap.swappiness
|
||||
await self.sys_dbus.agent.swap.set_swappiness(body[ATTR_SWAPPINESS])
|
||||
reboot_required = reboot_required or old_swappiness != body[ATTR_SWAPPINESS]
|
||||
|
||||
if reboot_required:
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.REBOOT_REQUIRED,
|
||||
ContextType.SYSTEM,
|
||||
suggestions=[SuggestionType.EXECUTE_REBOOT],
|
||||
)
|
||||
|
@@ -1,20 +1,18 @@
|
||||
"""Utils for Home Assistant Proxy."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import AsyncIterator
|
||||
from contextlib import asynccontextmanager
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import WSMessageTypeError, web
|
||||
from aiohttp import web
|
||||
from aiohttp.client_exceptions import ClientConnectorError
|
||||
from aiohttp.client_ws import ClientWebSocketResponse
|
||||
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE
|
||||
from aiohttp.http import WSMessage
|
||||
from aiohttp.http_websocket import WSMsgType
|
||||
from aiohttp.web_exceptions import HTTPBadGateway, HTTPUnauthorized
|
||||
|
||||
from supervisor.utils.logging import AddonLoggerAdapter
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, HomeAssistantAPIError, HomeAssistantAuthError
|
||||
from ..utils.json import json_dumps
|
||||
@@ -42,7 +40,7 @@ class APIProxy(CoreSysAttributes):
|
||||
bearer = request.headers[AUTHORIZATION]
|
||||
supervisor_token = bearer.split(" ")[-1]
|
||||
else:
|
||||
supervisor_token = request.headers.get(HEADER_HA_ACCESS, "")
|
||||
supervisor_token = request.headers.get(HEADER_HA_ACCESS)
|
||||
|
||||
addon = self.sys_addons.from_token(supervisor_token)
|
||||
if not addon:
|
||||
@@ -56,9 +54,7 @@ class APIProxy(CoreSysAttributes):
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
@asynccontextmanager
|
||||
async def _api_client(
|
||||
self, request: web.Request, path: str, timeout: int | None = 300
|
||||
) -> AsyncIterator[aiohttp.ClientResponse]:
|
||||
async def _api_client(self, request: web.Request, path: str, timeout: int = 300):
|
||||
"""Return a client request with proxy origin for Home Assistant."""
|
||||
try:
|
||||
async with self.sys_homeassistant.api.make_request(
|
||||
@@ -97,7 +93,7 @@ class APIProxy(CoreSysAttributes):
|
||||
_LOGGER.info("Home Assistant EventStream start")
|
||||
async with self._api_client(request, "stream", timeout=None) as client:
|
||||
response = web.StreamResponse()
|
||||
response.content_type = request.headers.get(CONTENT_TYPE, "")
|
||||
response.content_type = request.headers.get(CONTENT_TYPE)
|
||||
try:
|
||||
response.headers["X-Accel-Buffering"] = "no"
|
||||
await response.prepare(request)
|
||||
@@ -117,7 +113,7 @@ class APIProxy(CoreSysAttributes):
|
||||
raise HTTPBadGateway()
|
||||
|
||||
# Normal request
|
||||
path = request.match_info["path"]
|
||||
path = request.match_info.get("path", "")
|
||||
async with self._api_client(request, path) as client:
|
||||
data = await client.read()
|
||||
return web.Response(
|
||||
@@ -180,39 +176,25 @@ class APIProxy(CoreSysAttributes):
|
||||
|
||||
async def _proxy_message(
|
||||
self,
|
||||
source: web.WebSocketResponse | ClientWebSocketResponse,
|
||||
read_task: asyncio.Task,
|
||||
target: web.WebSocketResponse | ClientWebSocketResponse,
|
||||
logger: AddonLoggerAdapter,
|
||||
) -> None:
|
||||
"""Proxy a message from client to server or vice versa."""
|
||||
while not source.closed and not target.closed:
|
||||
msg = await source.receive()
|
||||
match msg.type:
|
||||
case WSMsgType.TEXT:
|
||||
await target.send_str(msg.data)
|
||||
case WSMsgType.BINARY:
|
||||
await target.send_bytes(msg.data)
|
||||
case WSMsgType.CLOSE | WSMsgType.CLOSED:
|
||||
logger.debug(
|
||||
"Received WebSocket message type %r from %s.",
|
||||
msg.type,
|
||||
"add-on" if type(source) is web.WebSocketResponse else "Core",
|
||||
)
|
||||
await target.close()
|
||||
case WSMsgType.CLOSING:
|
||||
pass
|
||||
case WSMsgType.ERROR:
|
||||
logger.warning(
|
||||
"Error WebSocket message received while proxying: %r", msg.data
|
||||
)
|
||||
await target.close(code=source.close_code)
|
||||
case _:
|
||||
logger.warning(
|
||||
"Cannot proxy WebSocket message of unsupported type: %r",
|
||||
msg.type,
|
||||
)
|
||||
await source.close()
|
||||
await target.close()
|
||||
if read_task.exception():
|
||||
raise read_task.exception()
|
||||
|
||||
msg: WSMessage = read_task.result()
|
||||
if msg.type == WSMsgType.TEXT:
|
||||
return await target.send_str(msg.data)
|
||||
if msg.type == WSMsgType.BINARY:
|
||||
return await target.send_bytes(msg.data)
|
||||
if msg.type == WSMsgType.CLOSE:
|
||||
_LOGGER.debug("Received close message from WebSocket.")
|
||||
return await target.close()
|
||||
|
||||
raise TypeError(
|
||||
f"Cannot proxy websocket message of unsupported type: {msg.type}"
|
||||
)
|
||||
|
||||
async def websocket(self, request: web.Request):
|
||||
"""Initialize a WebSocket API connection."""
|
||||
@@ -232,8 +214,8 @@ class APIProxy(CoreSysAttributes):
|
||||
dumps=json_dumps,
|
||||
)
|
||||
|
||||
# Check API access, wait up to 10s just like _async_handle_auth_phase in Core
|
||||
response = await server.receive_json(timeout=10)
|
||||
# Check API access
|
||||
response = await server.receive_json()
|
||||
supervisor_token = response.get("api_password") or response.get(
|
||||
"access_token"
|
||||
)
|
||||
@@ -254,14 +236,6 @@ class APIProxy(CoreSysAttributes):
|
||||
{"type": "auth_ok", "ha_version": self.sys_homeassistant.version},
|
||||
dumps=json_dumps,
|
||||
)
|
||||
except TimeoutError:
|
||||
_LOGGER.error("Timeout during authentication for WebSocket API")
|
||||
return server
|
||||
except WSMessageTypeError as err:
|
||||
_LOGGER.error(
|
||||
"Unexpected message during authentication for WebSocket API: %s", err
|
||||
)
|
||||
return server
|
||||
except (RuntimeError, ValueError) as err:
|
||||
_LOGGER.error("Can't initialize handshake: %s", err)
|
||||
return server
|
||||
@@ -272,32 +246,48 @@ class APIProxy(CoreSysAttributes):
|
||||
except APIError:
|
||||
return server
|
||||
|
||||
logger = AddonLoggerAdapter(_LOGGER, {"addon_name": addon_name})
|
||||
logger.info("Home Assistant WebSocket API proxy running")
|
||||
_LOGGER.info("Home Assistant WebSocket API request running")
|
||||
try:
|
||||
client_read: asyncio.Task | None = None
|
||||
server_read: asyncio.Task | None = None
|
||||
while not server.closed and not client.closed:
|
||||
if not client_read:
|
||||
client_read = self.sys_create_task(client.receive())
|
||||
if not server_read:
|
||||
server_read = self.sys_create_task(server.receive())
|
||||
|
||||
client_task = self.sys_create_task(self._proxy_message(client, server, logger))
|
||||
server_task = self.sys_create_task(self._proxy_message(server, client, logger))
|
||||
# wait until data need to be processed
|
||||
await asyncio.wait(
|
||||
[client_read, server_read], return_when=asyncio.FIRST_COMPLETED
|
||||
)
|
||||
|
||||
# Typically, this will return with an empty pending set. However, if one of
|
||||
# the directions has an exception, make sure to close both connections and
|
||||
# wait for the other proxy task to exit gracefully. Using this over try-except
|
||||
# handling makes it easier to wait for the other direction to complete.
|
||||
_, pending = await asyncio.wait(
|
||||
(client_task, server_task), return_when=asyncio.FIRST_EXCEPTION
|
||||
)
|
||||
# server
|
||||
if server_read.done() and not client.closed:
|
||||
await self._proxy_message(server_read, client)
|
||||
server_read = None
|
||||
|
||||
if not client.closed:
|
||||
await client.close()
|
||||
if not server.closed:
|
||||
await server.close()
|
||||
# client
|
||||
if client_read.done() and not server.closed:
|
||||
await self._proxy_message(client_read, server)
|
||||
client_read = None
|
||||
|
||||
if pending:
|
||||
_, pending = await asyncio.wait(
|
||||
pending, timeout=10, return_when=asyncio.ALL_COMPLETED
|
||||
)
|
||||
for task in pending:
|
||||
task.cancel()
|
||||
logger.critical("WebSocket proxy task: %s did not end gracefully", task)
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
logger.info("Home Assistant WebSocket API closed")
|
||||
except (RuntimeError, ConnectionError, TypeError) as err:
|
||||
_LOGGER.info("Home Assistant WebSocket API error: %s", err)
|
||||
|
||||
finally:
|
||||
if client_read and not client_read.done():
|
||||
client_read.cancel()
|
||||
if server_read and not server_read.done():
|
||||
server_read.cancel()
|
||||
|
||||
# close connections
|
||||
if not client.closed:
|
||||
await client.close()
|
||||
if not server.closed:
|
||||
await server.close()
|
||||
|
||||
_LOGGER.info("Home Assistant WebSocket API for %s closed", addon_name)
|
||||
return server
|
||||
|
@@ -33,21 +33,23 @@ class APIResoulution(CoreSysAttributes):
|
||||
def _extract_issue(self, request: web.Request) -> Issue:
|
||||
"""Extract issue from request or raise."""
|
||||
try:
|
||||
return self.sys_resolution.get_issue(request.match_info["issue"])
|
||||
return self.sys_resolution.get_issue(request.match_info.get("issue"))
|
||||
except ResolutionNotFound:
|
||||
raise APINotFound("The supplied UUID is not a valid issue") from None
|
||||
|
||||
def _extract_suggestion(self, request: web.Request) -> Suggestion:
|
||||
"""Extract suggestion from request or raise."""
|
||||
try:
|
||||
return self.sys_resolution.get_suggestion(request.match_info["suggestion"])
|
||||
return self.sys_resolution.get_suggestion(
|
||||
request.match_info.get("suggestion")
|
||||
)
|
||||
except ResolutionNotFound:
|
||||
raise APINotFound("The supplied UUID is not a valid suggestion") from None
|
||||
|
||||
def _extract_check(self, request: web.Request) -> CheckBase:
|
||||
"""Extract check from request or raise."""
|
||||
try:
|
||||
return self.sys_resolution.check.get(request.match_info["check"])
|
||||
return self.sys_resolution.check.get(request.match_info.get("check"))
|
||||
except ResolutionNotFound:
|
||||
raise APINotFound("The supplied check slug is not available") from None
|
||||
|
||||
|
@@ -25,7 +25,7 @@ class APIServices(CoreSysAttributes):
|
||||
return service
|
||||
|
||||
@api_process
|
||||
async def list_services(self, request):
|
||||
async def list(self, request):
|
||||
"""Show register services."""
|
||||
services = []
|
||||
for service in self.sys_services.list_services:
|
||||
|
@@ -3,12 +3,11 @@
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
from pathlib import Path
|
||||
from typing import Any, cast
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..addons.addon import Addon
|
||||
from ..addons.manager import AnyAddon
|
||||
from ..addons.utils import rating_security
|
||||
from ..api.const import ATTR_SIGNED
|
||||
@@ -70,21 +69,12 @@ SCHEMA_ADD_REPOSITORY = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
def _read_static_text_file(path: Path) -> Any:
|
||||
"""Read in a static text file asset for API output.
|
||||
def _read_static_file(path: Path, binary: bool = False) -> Any:
|
||||
"""Read in a static file asset for API output.
|
||||
|
||||
Must be run in executor.
|
||||
"""
|
||||
with path.open("r", errors="replace") as asset:
|
||||
return asset.read()
|
||||
|
||||
|
||||
def _read_static_binary_file(path: Path) -> Any:
|
||||
"""Read in a static binary file asset for API output.
|
||||
|
||||
Must be run in executor.
|
||||
"""
|
||||
with path.open("rb") as asset:
|
||||
with path.open("rb" if binary else "r") as asset:
|
||||
return asset.read()
|
||||
|
||||
|
||||
@@ -93,7 +83,7 @@ class APIStore(CoreSysAttributes):
|
||||
|
||||
def _extract_addon(self, request: web.Request, installed=False) -> AnyAddon:
|
||||
"""Return add-on, throw an exception it it doesn't exist."""
|
||||
addon_slug: str = request.match_info["addon"]
|
||||
addon_slug: str = request.match_info.get("addon")
|
||||
|
||||
if not (addon := self.sys_addons.get(addon_slug)):
|
||||
raise APINotFound(f"Addon {addon_slug} does not exist")
|
||||
@@ -102,7 +92,6 @@ class APIStore(CoreSysAttributes):
|
||||
raise APIError(f"Addon {addon_slug} is not installed")
|
||||
|
||||
if not installed and addon.is_installed:
|
||||
addon = cast(Addon, addon)
|
||||
if not addon.addon_store:
|
||||
raise APINotFound(f"Addon {addon_slug} does not exist in the store")
|
||||
return addon.addon_store
|
||||
@@ -111,7 +100,7 @@ class APIStore(CoreSysAttributes):
|
||||
|
||||
def _extract_repository(self, request: web.Request) -> Repository:
|
||||
"""Return repository, throw an exception it it doesn't exist."""
|
||||
repository_slug: str = request.match_info["repository"]
|
||||
repository_slug: str = request.match_info.get("repository")
|
||||
|
||||
if repository_slug not in self.sys_store.repositories:
|
||||
raise APINotFound(
|
||||
@@ -126,7 +115,7 @@ class APIStore(CoreSysAttributes):
|
||||
"""Generate addon information."""
|
||||
|
||||
installed = (
|
||||
cast(Addon, self.sys_addons.get(addon.slug, local_only=True))
|
||||
self.sys_addons.get(addon.slug, local_only=True)
|
||||
if addon.is_installed
|
||||
else None
|
||||
)
|
||||
@@ -146,10 +135,12 @@ class APIStore(CoreSysAttributes):
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_STAGE: addon.stage,
|
||||
ATTR_UPDATE_AVAILABLE: installed.need_update if installed else False,
|
||||
ATTR_UPDATE_AVAILABLE: installed.need_update
|
||||
if addon.is_installed
|
||||
else False,
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_VERSION_LATEST: addon.latest_version,
|
||||
ATTR_VERSION: installed.version if installed else None,
|
||||
ATTR_VERSION: installed.version if addon.is_installed else None,
|
||||
}
|
||||
if extended:
|
||||
data.update(
|
||||
@@ -246,7 +237,7 @@ class APIStore(CoreSysAttributes):
|
||||
# Used by legacy routing for addons/{addon}/info, can be refactored out when that is removed (1/2023)
|
||||
async def addons_addon_info_wrapped(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return add-on information directly (not api)."""
|
||||
addon = cast(AddonStore, self._extract_addon(request))
|
||||
addon: AddonStore = self._extract_addon(request)
|
||||
return await self._generate_addon_information(addon, True)
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_PNG)
|
||||
@@ -256,7 +247,7 @@ class APIStore(CoreSysAttributes):
|
||||
if not addon.with_icon:
|
||||
raise APIError(f"No icon found for add-on {addon.slug}!")
|
||||
|
||||
return await self.sys_run_in_executor(_read_static_binary_file, addon.path_icon)
|
||||
return await self.sys_run_in_executor(_read_static_file, addon.path_icon, True)
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_PNG)
|
||||
async def addons_addon_logo(self, request: web.Request) -> bytes:
|
||||
@@ -265,7 +256,7 @@ class APIStore(CoreSysAttributes):
|
||||
if not addon.with_logo:
|
||||
raise APIError(f"No logo found for add-on {addon.slug}!")
|
||||
|
||||
return await self.sys_run_in_executor(_read_static_binary_file, addon.path_logo)
|
||||
return await self.sys_run_in_executor(_read_static_file, addon.path_logo, True)
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||
async def addons_addon_changelog(self, request: web.Request) -> str:
|
||||
@@ -279,9 +270,7 @@ class APIStore(CoreSysAttributes):
|
||||
if not addon.with_changelog:
|
||||
return f"No changelog found for add-on {addon.slug}!"
|
||||
|
||||
return await self.sys_run_in_executor(
|
||||
_read_static_text_file, addon.path_changelog
|
||||
)
|
||||
return await self.sys_run_in_executor(_read_static_file, addon.path_changelog)
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||
async def addons_addon_documentation(self, request: web.Request) -> str:
|
||||
@@ -296,7 +285,7 @@ class APIStore(CoreSysAttributes):
|
||||
return f"No documentation found for add-on {addon.slug}!"
|
||||
|
||||
return await self.sys_run_in_executor(
|
||||
_read_static_text_file, addon.path_documentation
|
||||
_read_static_file, addon.path_documentation
|
||||
)
|
||||
|
||||
@api_process
|
||||
|
@@ -20,7 +20,6 @@ from ..const import (
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_DEBUG,
|
||||
ATTR_DEBUG_BLOCK,
|
||||
ATTR_DETECT_BLOCKING_IO,
|
||||
ATTR_DIAGNOSTICS,
|
||||
ATTR_FORCE_SECURITY,
|
||||
ATTR_HEALTHY,
|
||||
@@ -48,15 +47,10 @@ from ..const import (
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..store.validate import repositories
|
||||
from ..utils.blockbuster import (
|
||||
activate_blockbuster,
|
||||
blockbuster_enabled,
|
||||
deactivate_blockbuster,
|
||||
)
|
||||
from ..utils.sentry import close_sentry, init_sentry
|
||||
from ..utils.validate import validate_timezone
|
||||
from ..validate import version_tag, wait_boot
|
||||
from .const import CONTENT_TYPE_TEXT, DetectBlockingIO
|
||||
from .const import CONTENT_TYPE_TEXT
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@@ -66,7 +60,7 @@ SCHEMA_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_CHANNEL): vol.Coerce(UpdateChannel),
|
||||
vol.Optional(ATTR_ADDONS_REPOSITORIES): repositories,
|
||||
vol.Optional(ATTR_TIMEZONE): str,
|
||||
vol.Optional(ATTR_TIMEZONE): validate_timezone,
|
||||
vol.Optional(ATTR_WAIT_BOOT): wait_boot,
|
||||
vol.Optional(ATTR_LOGGING): vol.Coerce(LogLevel),
|
||||
vol.Optional(ATTR_DEBUG): vol.Boolean(),
|
||||
@@ -75,7 +69,6 @@ SCHEMA_OPTIONS = vol.Schema(
|
||||
vol.Optional(ATTR_CONTENT_TRUST): vol.Boolean(),
|
||||
vol.Optional(ATTR_FORCE_SECURITY): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
|
||||
vol.Optional(ATTR_DETECT_BLOCKING_IO): vol.Coerce(DetectBlockingIO),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -108,7 +101,6 @@ class APISupervisor(CoreSysAttributes):
|
||||
ATTR_DEBUG_BLOCK: self.sys_config.debug_block,
|
||||
ATTR_DIAGNOSTICS: self.sys_config.diagnostics,
|
||||
ATTR_AUTO_UPDATE: self.sys_updater.auto_update,
|
||||
ATTR_DETECT_BLOCKING_IO: blockbuster_enabled(),
|
||||
# Depricated
|
||||
ATTR_WAIT_BOOT: self.sys_config.wait_boot,
|
||||
ATTR_ADDONS: [
|
||||
@@ -135,18 +127,12 @@ class APISupervisor(CoreSysAttributes):
|
||||
"""Set Supervisor options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
# Timezone must be first as validation is incomplete
|
||||
# If a timezone is present we do that validation after in the executor
|
||||
if (
|
||||
ATTR_TIMEZONE in body
|
||||
and (timezone := body[ATTR_TIMEZONE]) != self.sys_config.timezone
|
||||
):
|
||||
await self.sys_run_in_executor(validate_timezone, timezone)
|
||||
await self.sys_config.set_timezone(timezone)
|
||||
|
||||
if ATTR_CHANNEL in body:
|
||||
self.sys_updater.channel = body[ATTR_CHANNEL]
|
||||
|
||||
if ATTR_TIMEZONE in body:
|
||||
self.sys_config.timezone = body[ATTR_TIMEZONE]
|
||||
|
||||
if ATTR_DEBUG in body:
|
||||
self.sys_config.debug = body[ATTR_DEBUG]
|
||||
|
||||
@@ -168,17 +154,6 @@ class APISupervisor(CoreSysAttributes):
|
||||
if ATTR_AUTO_UPDATE in body:
|
||||
self.sys_updater.auto_update = body[ATTR_AUTO_UPDATE]
|
||||
|
||||
if detect_blocking_io := body.get(ATTR_DETECT_BLOCKING_IO):
|
||||
if detect_blocking_io == DetectBlockingIO.ON_AT_STARTUP:
|
||||
self.sys_config.detect_blocking_io = True
|
||||
detect_blocking_io = DetectBlockingIO.ON
|
||||
|
||||
if detect_blocking_io == DetectBlockingIO.ON:
|
||||
activate_blockbuster()
|
||||
elif detect_blocking_io == DetectBlockingIO.OFF:
|
||||
self.sys_config.detect_blocking_io = False
|
||||
deactivate_blockbuster()
|
||||
|
||||
# Deprecated
|
||||
if ATTR_WAIT_BOOT in body:
|
||||
self.sys_config.wait_boot = body[ATTR_WAIT_BOOT]
|
||||
@@ -230,12 +205,19 @@ class APISupervisor(CoreSysAttributes):
|
||||
await asyncio.shield(self.sys_supervisor.update(version))
|
||||
|
||||
@api_process
|
||||
async def reload(self, request: web.Request) -> None:
|
||||
def reload(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Reload add-ons, configuration, etc."""
|
||||
await asyncio.gather(
|
||||
asyncio.shield(self.sys_updater.reload()),
|
||||
asyncio.shield(self.sys_homeassistant.secrets.reload()),
|
||||
asyncio.shield(self.sys_resolution.evaluate.evaluate_system()),
|
||||
return asyncio.shield(
|
||||
asyncio.wait(
|
||||
[
|
||||
self.sys_create_task(coro)
|
||||
for coro in [
|
||||
self.sys_updater.reload(),
|
||||
self.sys_homeassistant.secrets.reload(),
|
||||
self.sys_resolution.evaluate.evaluate_system(),
|
||||
]
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
@api_process
|
||||
|
@@ -21,15 +21,15 @@ from ..const import (
|
||||
RESULT_ERROR,
|
||||
RESULT_OK,
|
||||
)
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..coresys import CoreSys
|
||||
from ..exceptions import APIError, BackupFileNotFoundError, DockerAPIError, HassioError
|
||||
from ..utils import check_exception_chain, get_message_from_exception_chain
|
||||
from ..utils.json import json_dumps, json_loads as json_loads_util
|
||||
from ..utils.log_format import format_message
|
||||
from ..utils.log_format import async_format_message
|
||||
from . import const
|
||||
|
||||
|
||||
def extract_supervisor_token(request: web.Request) -> str | None:
|
||||
def excract_supervisor_token(request: web.Request) -> str | None:
|
||||
"""Extract Supervisor token from request."""
|
||||
if supervisor_token := request.headers.get(HEADER_TOKEN):
|
||||
return supervisor_token
|
||||
@@ -58,9 +58,7 @@ def json_loads(data: Any) -> dict[str, Any]:
|
||||
def api_process(method):
|
||||
"""Wrap function with true/false calls to rest api."""
|
||||
|
||||
async def wrap_api(
|
||||
api: CoreSysAttributes, *args, **kwargs
|
||||
) -> web.Response | web.StreamResponse:
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return API information."""
|
||||
try:
|
||||
answer = await method(api, *args, **kwargs)
|
||||
@@ -87,7 +85,7 @@ def api_process(method):
|
||||
def require_home_assistant(method):
|
||||
"""Ensure that the request comes from Home Assistant."""
|
||||
|
||||
async def wrap_api(api: CoreSysAttributes, *args, **kwargs) -> Any:
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return API information."""
|
||||
coresys: CoreSys = api.coresys
|
||||
request: Request = args[0]
|
||||
@@ -104,9 +102,7 @@ def api_process_raw(content, *, error_type=None):
|
||||
def wrap_method(method):
|
||||
"""Wrap function with raw output to rest api."""
|
||||
|
||||
async def wrap_api(
|
||||
api: CoreSysAttributes, *args, **kwargs
|
||||
) -> web.Response | web.StreamResponse:
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return api information."""
|
||||
try:
|
||||
msg_data = await method(api, *args, **kwargs)
|
||||
@@ -143,7 +139,7 @@ def api_return_error(
|
||||
if error and not message:
|
||||
message = get_message_from_exception_chain(error)
|
||||
if check_exception_chain(error, DockerAPIError):
|
||||
message = format_message(message)
|
||||
message = async_format_message(message)
|
||||
if not message:
|
||||
message = "Unknown error, see supervisor"
|
||||
|
||||
@@ -169,7 +165,7 @@ def api_return_error(
|
||||
)
|
||||
|
||||
|
||||
def api_return_ok(data: dict[str, Any] | list[Any] | None = None) -> web.Response:
|
||||
def api_return_ok(data: dict[str, Any] | None = None) -> web.Response:
|
||||
"""Return an API ok answer."""
|
||||
return web.json_response(
|
||||
{JSON_RESULT: RESULT_OK, JSON_DATA: data or {}},
|
||||
@@ -178,9 +174,7 @@ def api_return_ok(data: dict[str, Any] | list[Any] | None = None) -> web.Respons
|
||||
|
||||
|
||||
async def api_validate(
|
||||
schema: vol.Schema | vol.All,
|
||||
request: web.Request,
|
||||
origin: list[str] | None = None,
|
||||
schema: vol.Schema, request: web.Request, origin: list[str] | None = None
|
||||
) -> dict[str, Any]:
|
||||
"""Validate request data with schema."""
|
||||
data: dict[str, Any] = await request.json(loads=json_loads)
|
||||
|
@@ -50,7 +50,7 @@ class CpuArch(CoreSysAttributes):
|
||||
async def load(self) -> None:
|
||||
"""Load data and initialize default arch."""
|
||||
try:
|
||||
arch_data = await self.sys_run_in_executor(read_json_file, ARCH_JSON)
|
||||
arch_data = read_json_file(ARCH_JSON)
|
||||
except ConfigurationFileError:
|
||||
_LOGGER.warning("Can't read arch json file from %s", ARCH_JSON)
|
||||
return
|
||||
|
@@ -68,9 +68,7 @@ class Auth(FileConfiguration, CoreSysAttributes):
|
||||
self._data.pop(username_h, None)
|
||||
await self.save_data()
|
||||
|
||||
async def check_login(
|
||||
self, addon: Addon, username: str | None, password: str | None
|
||||
) -> bool:
|
||||
async def check_login(self, addon: Addon, username: str, password: str) -> bool:
|
||||
"""Check username login."""
|
||||
if password is None:
|
||||
raise AuthError("None as password is not supported!", _LOGGER.error)
|
||||
|
@@ -392,7 +392,7 @@ class Backup(JobGroup):
|
||||
return
|
||||
except tarfile.ReadError as ex:
|
||||
raise BackupInvalidError(
|
||||
f"Invalid password for backup {self.slug}", _LOGGER.error
|
||||
f"Invalid password for backup {backup.slug}", _LOGGER.error
|
||||
) from ex
|
||||
|
||||
try:
|
||||
@@ -542,7 +542,7 @@ class Backup(JobGroup):
|
||||
raise err
|
||||
finally:
|
||||
if self._tmp:
|
||||
await self.sys_run_in_executor(self._tmp.cleanup)
|
||||
self._tmp.cleanup()
|
||||
|
||||
async def _create_cleanup(self, outer_tarfile: TarFile) -> None:
|
||||
"""Cleanup after backup creation.
|
||||
@@ -601,9 +601,7 @@ class Backup(JobGroup):
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_VERSION: addon.version,
|
||||
# Bug - addon_file.size used to give us this information
|
||||
# It always returns 0 in current securetar. Skipping until fixed
|
||||
ATTR_SIZE: 0,
|
||||
ATTR_SIZE: addon_file.size,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -642,7 +640,7 @@ class Backup(JobGroup):
|
||||
)
|
||||
|
||||
# If exists inside backup
|
||||
if not await self.sys_run_in_executor(addon_file.path.exists):
|
||||
if not addon_file.path.exists():
|
||||
raise BackupError(f"Can't find backup {addon_slug}", _LOGGER.error)
|
||||
|
||||
# Perform a restore
|
||||
@@ -848,9 +846,7 @@ class Backup(JobGroup):
|
||||
await self.sys_homeassistant.backup(homeassistant_file, exclude_database)
|
||||
|
||||
# Store size
|
||||
self.homeassistant[ATTR_SIZE] = await self.sys_run_in_executor(
|
||||
getattr, homeassistant_file, "size"
|
||||
)
|
||||
self.homeassistant[ATTR_SIZE] = homeassistant_file.size
|
||||
|
||||
@Job(name="backup_restore_homeassistant", cleanup=False)
|
||||
async def restore_homeassistant(self) -> Awaitable[None]:
|
||||
|
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
from collections.abc import Awaitable, Iterable
|
||||
import errno
|
||||
import logging
|
||||
from pathlib import Path
|
||||
@@ -179,26 +179,18 @@ class BackupManager(FileConfiguration, JobGroup):
|
||||
)
|
||||
self.sys_jobs.current.stage = stage
|
||||
|
||||
async def _list_backup_files(self, path: Path) -> list[Path]:
|
||||
def _list_backup_files(self, path: Path) -> Iterable[Path]:
|
||||
"""Return iterable of backup files, suppress and log OSError for network mounts."""
|
||||
|
||||
def find_backups() -> list[Path]:
|
||||
# is_dir does a stat syscall which raises if the mount is down
|
||||
# Returning an iterator causes I/O while iterating, coerce into list here
|
||||
if path.is_dir():
|
||||
return list(path.glob("*.tar"))
|
||||
return []
|
||||
|
||||
try:
|
||||
return await self.sys_run_in_executor(find_backups)
|
||||
# is_dir does a stat syscall which raises if the mount is down
|
||||
if path.is_dir():
|
||||
return path.glob("*.tar")
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG and path in {
|
||||
self.sys_config.path_backup,
|
||||
self.sys_config.path_core_backup,
|
||||
}:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
_LOGGER.error("Could not list backups from %s: %s", path.as_posix(), err)
|
||||
|
||||
return []
|
||||
@@ -286,7 +278,9 @@ class BackupManager(FileConfiguration, JobGroup):
|
||||
tasks = [
|
||||
self.sys_create_task(_load_backup(_location, tar_file))
|
||||
for _location, path in locations.items()
|
||||
for tar_file in await self._list_backup_files(path)
|
||||
for tar_file in await self.sys_run_in_executor(
|
||||
self._list_backup_files, path
|
||||
)
|
||||
]
|
||||
|
||||
_LOGGER.info("Found %d backup files", len(tasks))
|
||||
@@ -352,9 +346,7 @@ class BackupManager(FileConfiguration, JobGroup):
|
||||
None,
|
||||
LOCATION_CLOUD_BACKUP,
|
||||
}:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
raise BackupError(msg, _LOGGER.error) from err
|
||||
|
||||
# If backup has been removed from all locations, remove it from cache
|
||||
@@ -368,15 +360,13 @@ class BackupManager(FileConfiguration, JobGroup):
|
||||
):
|
||||
"""Copy a backup file to additional locations."""
|
||||
|
||||
all_new_locations: dict[str | None, Path] = {}
|
||||
|
||||
def copy_to_additional_locations() -> dict[str | None, Path]:
|
||||
"""Copy backup file to additional locations."""
|
||||
nonlocal all_new_locations
|
||||
all_locations: dict[str | None, Path] = {}
|
||||
for location in locations:
|
||||
try:
|
||||
if location == LOCATION_CLOUD_BACKUP:
|
||||
all_new_locations[LOCATION_CLOUD_BACKUP] = Path(
|
||||
all_locations[LOCATION_CLOUD_BACKUP] = Path(
|
||||
copy(backup.tarfile, self.sys_config.path_core_backup)
|
||||
)
|
||||
elif location:
|
||||
@@ -386,11 +376,11 @@ class BackupManager(FileConfiguration, JobGroup):
|
||||
f"{location_mount.name} is down, cannot copy to it",
|
||||
_LOGGER.error,
|
||||
)
|
||||
all_new_locations[location_mount.name] = Path(
|
||||
all_locations[location_mount.name] = Path(
|
||||
copy(backup.tarfile, location_mount.local_where)
|
||||
)
|
||||
else:
|
||||
all_new_locations[None] = Path(
|
||||
all_locations[None] = Path(
|
||||
copy(backup.tarfile, self.sys_config.path_backup)
|
||||
)
|
||||
except OSError as err:
|
||||
@@ -403,24 +393,26 @@ class BackupManager(FileConfiguration, JobGroup):
|
||||
raise BackupDataDiskBadMessageError(msg, _LOGGER.error) from err
|
||||
raise BackupError(msg, _LOGGER.error) from err
|
||||
|
||||
return all_locations
|
||||
|
||||
try:
|
||||
await self.sys_run_in_executor(copy_to_additional_locations)
|
||||
all_new_locations = await self.sys_run_in_executor(
|
||||
copy_to_additional_locations
|
||||
)
|
||||
except BackupDataDiskBadMessageError:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
raise
|
||||
finally:
|
||||
backup.all_locations.update(
|
||||
{
|
||||
loc: {
|
||||
ATTR_PATH: path,
|
||||
ATTR_PROTECTED: backup.protected,
|
||||
ATTR_SIZE_BYTES: backup.size_bytes,
|
||||
}
|
||||
for loc, path in all_new_locations.items()
|
||||
|
||||
backup.all_locations.update(
|
||||
{
|
||||
loc: {
|
||||
ATTR_PATH: path,
|
||||
ATTR_PROTECTED: backup.protected,
|
||||
ATTR_SIZE_BYTES: backup.size_bytes,
|
||||
}
|
||||
)
|
||||
for loc, path in all_new_locations.items()
|
||||
}
|
||||
)
|
||||
|
||||
@Job(name="backup_manager_import_backup")
|
||||
async def import_backup(
|
||||
@@ -449,9 +441,7 @@ class BackupManager(FileConfiguration, JobGroup):
|
||||
await self.sys_run_in_executor(backup.tarfile.rename, tar_file)
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG and location in {LOCATION_CLOUD_BACKUP, None}:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
_LOGGER.error("Can't move backup file to storage: %s", err)
|
||||
return None
|
||||
|
||||
@@ -506,7 +496,7 @@ class BackupManager(FileConfiguration, JobGroup):
|
||||
addon_start_tasks: list[Awaitable[None]] | None = None
|
||||
|
||||
try:
|
||||
await self.sys_core.set_state(CoreState.FREEZE)
|
||||
self.sys_core.state = CoreState.FREEZE
|
||||
|
||||
async with backup.create():
|
||||
# HomeAssistant Folder is for v1
|
||||
@@ -559,7 +549,7 @@ class BackupManager(FileConfiguration, JobGroup):
|
||||
|
||||
return backup
|
||||
finally:
|
||||
await self.sys_core.set_state(CoreState.RUNNING)
|
||||
self.sys_core.state = CoreState.RUNNING
|
||||
|
||||
@Job(
|
||||
name="backup_manager_full_backup",
|
||||
@@ -818,7 +808,7 @@ class BackupManager(FileConfiguration, JobGroup):
|
||||
)
|
||||
|
||||
_LOGGER.info("Full-Restore %s start", backup.slug)
|
||||
await self.sys_core.set_state(CoreState.FREEZE)
|
||||
self.sys_core.state = CoreState.FREEZE
|
||||
|
||||
try:
|
||||
# Stop Home-Assistant / Add-ons
|
||||
@@ -833,7 +823,7 @@ class BackupManager(FileConfiguration, JobGroup):
|
||||
location=location,
|
||||
)
|
||||
finally:
|
||||
await self.sys_core.set_state(CoreState.RUNNING)
|
||||
self.sys_core.state = CoreState.RUNNING
|
||||
|
||||
if success:
|
||||
_LOGGER.info("Full-Restore %s done", backup.slug)
|
||||
@@ -888,7 +878,7 @@ class BackupManager(FileConfiguration, JobGroup):
|
||||
)
|
||||
|
||||
_LOGGER.info("Partial-Restore %s start", backup.slug)
|
||||
await self.sys_core.set_state(CoreState.FREEZE)
|
||||
self.sys_core.state = CoreState.FREEZE
|
||||
|
||||
try:
|
||||
success = await self._do_restore(
|
||||
@@ -900,7 +890,7 @@ class BackupManager(FileConfiguration, JobGroup):
|
||||
location=location,
|
||||
)
|
||||
finally:
|
||||
await self.sys_core.set_state(CoreState.RUNNING)
|
||||
self.sys_core.state = CoreState.RUNNING
|
||||
|
||||
if success:
|
||||
_LOGGER.info("Partial-Restore %s done", backup.slug)
|
||||
@@ -914,7 +904,7 @@ class BackupManager(FileConfiguration, JobGroup):
|
||||
)
|
||||
async def freeze_all(self, timeout: float = DEFAULT_FREEZE_TIMEOUT) -> None:
|
||||
"""Freeze system to prepare for an external backup such as an image snapshot."""
|
||||
await self.sys_core.set_state(CoreState.FREEZE)
|
||||
self.sys_core.state = CoreState.FREEZE
|
||||
|
||||
# Determine running addons
|
||||
installed = self.sys_addons.installed.copy()
|
||||
@@ -967,7 +957,7 @@ class BackupManager(FileConfiguration, JobGroup):
|
||||
if task
|
||||
]
|
||||
finally:
|
||||
await self.sys_core.set_state(CoreState.RUNNING)
|
||||
self.sys_core.state = CoreState.RUNNING
|
||||
self._thaw_event.clear()
|
||||
self._thaw_task = None
|
||||
|
||||
|
@@ -8,6 +8,7 @@ import signal
|
||||
import warnings
|
||||
|
||||
from colorlog import ColoredFormatter
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
from .addons.manager import AddonManager
|
||||
from .api import RestAPI
|
||||
@@ -45,7 +46,7 @@ from .services import ServiceManager
|
||||
from .store import StoreManager
|
||||
from .supervisor import Supervisor
|
||||
from .updater import Updater
|
||||
from .utils.sentry import capture_exception, init_sentry
|
||||
from .utils.sentry import init_sentry
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -55,11 +56,11 @@ async def initialize_coresys() -> CoreSys:
|
||||
coresys = await CoreSys().load_config()
|
||||
|
||||
# Initialize core objects
|
||||
coresys.docker = await DockerAPI(coresys).post_init()
|
||||
coresys.docker = await DockerAPI(coresys).load_config()
|
||||
coresys.resolution = await ResolutionManager(coresys).load_config()
|
||||
await coresys.resolution.load_modules()
|
||||
coresys.jobs = await JobManager(coresys).load_config()
|
||||
coresys.core = await Core(coresys).post_init()
|
||||
coresys.core = Core(coresys)
|
||||
coresys.plugins = await PluginManager(coresys).load_config()
|
||||
coresys.arch = CpuArch(coresys)
|
||||
coresys.auth = await Auth(coresys).load_config()
|
||||
@@ -69,8 +70,8 @@ async def initialize_coresys() -> CoreSys:
|
||||
coresys.homeassistant = await HomeAssistant(coresys).load_config()
|
||||
coresys.addons = await AddonManager(coresys).load_config()
|
||||
coresys.backups = await BackupManager(coresys).load_config()
|
||||
coresys.host = await HostManager(coresys).post_init()
|
||||
coresys.hardware = await HardwareManager(coresys).post_init()
|
||||
coresys.host = HostManager(coresys)
|
||||
coresys.hardware = HardwareManager(coresys)
|
||||
coresys.ingress = await Ingress(coresys).load_config()
|
||||
coresys.tasks = Tasks(coresys)
|
||||
coresys.services = await ServiceManager(coresys).load_config()
|
||||
|
@@ -1,7 +1,6 @@
|
||||
"""Bootstrap Supervisor."""
|
||||
|
||||
import asyncio
|
||||
from datetime import UTC, datetime, tzinfo
|
||||
from datetime import UTC, datetime
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path, PurePath
|
||||
@@ -12,7 +11,6 @@ from .const import (
|
||||
ATTR_ADDONS_CUSTOM_LIST,
|
||||
ATTR_DEBUG,
|
||||
ATTR_DEBUG_BLOCK,
|
||||
ATTR_DETECT_BLOCKING_IO,
|
||||
ATTR_DIAGNOSTICS,
|
||||
ATTR_IMAGE,
|
||||
ATTR_LAST_BOOT,
|
||||
@@ -26,7 +24,7 @@ from .const import (
|
||||
LogLevel,
|
||||
)
|
||||
from .utils.common import FileConfiguration
|
||||
from .utils.dt import get_time_zone, parse_datetime
|
||||
from .utils.dt import parse_datetime
|
||||
from .validate import SCHEMA_SUPERVISOR_CONFIG
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@@ -68,7 +66,6 @@ class CoreConfig(FileConfiguration):
|
||||
def __init__(self):
|
||||
"""Initialize config object."""
|
||||
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_SUPERVISOR_CONFIG)
|
||||
self._timezone_tzinfo: tzinfo | None = None
|
||||
|
||||
@property
|
||||
def timezone(self) -> str | None:
|
||||
@@ -79,19 +76,12 @@ class CoreConfig(FileConfiguration):
|
||||
self._data.pop(ATTR_TIMEZONE, None)
|
||||
return None
|
||||
|
||||
@property
|
||||
def timezone_tzinfo(self) -> tzinfo | None:
|
||||
"""Return system timezone as tzinfo object."""
|
||||
return self._timezone_tzinfo
|
||||
|
||||
async def set_timezone(self, value: str) -> None:
|
||||
@timezone.setter
|
||||
def timezone(self, value: str) -> None:
|
||||
"""Set system timezone."""
|
||||
if value == _UTC:
|
||||
return
|
||||
self._data[ATTR_TIMEZONE] = value
|
||||
self._timezone_tzinfo = await asyncio.get_running_loop().run_in_executor(
|
||||
None, get_time_zone, value
|
||||
)
|
||||
|
||||
@property
|
||||
def version(self) -> AwesomeVersion:
|
||||
@@ -143,16 +133,6 @@ class CoreConfig(FileConfiguration):
|
||||
"""Set debug wait mode."""
|
||||
self._data[ATTR_DEBUG_BLOCK] = value
|
||||
|
||||
@property
|
||||
def detect_blocking_io(self) -> bool:
|
||||
"""Return True if blocking I/O in event loop detection enabled at startup."""
|
||||
return self._data[ATTR_DETECT_BLOCKING_IO]
|
||||
|
||||
@detect_blocking_io.setter
|
||||
def detect_blocking_io(self, value: bool) -> None:
|
||||
"""Enable/Disable blocking I/O in event loop detection at startup."""
|
||||
self._data[ATTR_DETECT_BLOCKING_IO] = value
|
||||
|
||||
@property
|
||||
def diagnostics(self) -> bool | None:
|
||||
"""Return bool if diagnostics is set otherwise None."""
|
||||
@@ -410,15 +390,3 @@ class CoreConfig(FileConfiguration):
|
||||
def extern_to_local_path(self, path: PurePath) -> Path:
|
||||
"""Translate a path relative to extern supervisor data to its path in the container."""
|
||||
return self.path_supervisor / path.relative_to(self.path_extern_supervisor)
|
||||
|
||||
async def read_data(self) -> None:
|
||||
"""Read configuration file."""
|
||||
timezone = self.timezone
|
||||
await super().read_data()
|
||||
|
||||
if not self.timezone:
|
||||
self._timezone_tzinfo = None
|
||||
elif timezone != self.timezone:
|
||||
self._timezone_tzinfo = await asyncio.get_running_loop().run_in_executor(
|
||||
None, get_time_zone, self.timezone
|
||||
)
|
||||
|
@@ -152,7 +152,6 @@ ATTR_DEFAULT = "default"
|
||||
ATTR_DEPLOYMENT = "deployment"
|
||||
ATTR_DESCRIPTON = "description"
|
||||
ATTR_DETACHED = "detached"
|
||||
ATTR_DETECT_BLOCKING_IO = "detect_blocking_io"
|
||||
ATTR_DEVICES = "devices"
|
||||
ATTR_DEVICETREE = "devicetree"
|
||||
ATTR_DIAGNOSTICS = "diagnostics"
|
||||
@@ -314,8 +313,6 @@ ATTR_SUPERVISOR_INTERNET = "supervisor_internet"
|
||||
ATTR_SUPERVISOR_VERSION = "supervisor_version"
|
||||
ATTR_SUPPORTED = "supported"
|
||||
ATTR_SUPPORTED_ARCH = "supported_arch"
|
||||
ATTR_SWAP_SIZE = "swap_size"
|
||||
ATTR_SWAPPINESS = "swappiness"
|
||||
ATTR_SYSTEM = "system"
|
||||
ATTR_SYSTEM_MANAGED = "system_managed"
|
||||
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY = "system_managed_config_entry"
|
||||
|
@@ -5,7 +5,6 @@ from collections.abc import Awaitable
|
||||
from contextlib import suppress
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Self
|
||||
|
||||
from .const import (
|
||||
ATTR_STARTUP,
|
||||
@@ -40,6 +39,7 @@ class Core(CoreSysAttributes):
|
||||
"""Initialize Supervisor object."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self._state: CoreState = CoreState.INITIALIZE
|
||||
self._write_run_state(self._state)
|
||||
self.exit_code: int = 0
|
||||
|
||||
@property
|
||||
@@ -57,38 +57,32 @@ class Core(CoreSysAttributes):
|
||||
"""Return true if the installation is healthy."""
|
||||
return len(self.sys_resolution.unhealthy) == 0
|
||||
|
||||
async def _write_run_state(self):
|
||||
def _write_run_state(self, new_state: CoreState):
|
||||
"""Write run state for s6 service supervisor."""
|
||||
try:
|
||||
await self.sys_run_in_executor(
|
||||
RUN_SUPERVISOR_STATE.write_text, str(self._state), encoding="utf-8"
|
||||
)
|
||||
RUN_SUPERVISOR_STATE.write_text(str(new_state), encoding="utf-8")
|
||||
except OSError as err:
|
||||
_LOGGER.warning(
|
||||
"Can't update the Supervisor state to %s: %s", self._state, err
|
||||
"Can't update the Supervisor state to %s: %s", new_state, err
|
||||
)
|
||||
|
||||
async def post_init(self) -> Self:
|
||||
"""Post init actions that must be done in event loop."""
|
||||
await self._write_run_state()
|
||||
return self
|
||||
|
||||
async def set_state(self, new_state: CoreState) -> None:
|
||||
@state.setter
|
||||
def state(self, new_state: CoreState) -> None:
|
||||
"""Set core into new state."""
|
||||
if self._state == new_state:
|
||||
return
|
||||
|
||||
self._write_run_state(new_state)
|
||||
self._state = new_state
|
||||
await self._write_run_state()
|
||||
|
||||
# Don't attempt to notify anyone on CLOSE as we're about to stop the event loop
|
||||
if self._state != CoreState.CLOSE:
|
||||
self.sys_bus.fire_event(BusEvent.SUPERVISOR_STATE_CHANGE, self._state)
|
||||
if new_state != CoreState.CLOSE:
|
||||
self.sys_bus.fire_event(BusEvent.SUPERVISOR_STATE_CHANGE, new_state)
|
||||
|
||||
# These will be received by HA after startup has completed which won't make sense
|
||||
if self._state not in STARTING_STATES:
|
||||
if new_state not in STARTING_STATES:
|
||||
self.sys_homeassistant.websocket.supervisor_update_event(
|
||||
"info", {"state": self._state}
|
||||
"info", {"state": new_state}
|
||||
)
|
||||
|
||||
async def connect(self):
|
||||
@@ -114,7 +108,7 @@ class Core(CoreSysAttributes):
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.UPDATE_ROLLBACK, ContextType.SUPERVISOR
|
||||
)
|
||||
self.sys_resolution.add_unhealthy_reason(UnhealthyReason.SUPERVISOR)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.SUPERVISOR
|
||||
|
||||
# Fix wrong version in config / avoid boot loop on OS
|
||||
self.sys_config.version = self.sys_supervisor.version
|
||||
@@ -122,7 +116,7 @@ class Core(CoreSysAttributes):
|
||||
|
||||
async def setup(self):
|
||||
"""Start setting up supervisor orchestration."""
|
||||
await self.set_state(CoreState.SETUP)
|
||||
self.state = CoreState.SETUP
|
||||
|
||||
# Check internet on startup
|
||||
await self.sys_supervisor.check_connectivity()
|
||||
@@ -177,7 +171,7 @@ class Core(CoreSysAttributes):
|
||||
_LOGGER.critical(
|
||||
"Fatal error happening on load Task %s: %s", setup_task, err
|
||||
)
|
||||
self.sys_resolution.add_unhealthy_reason(UnhealthyReason.SETUP)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.SETUP
|
||||
await async_capture_exception(err)
|
||||
|
||||
# Set OS Agent diagnostics if needed
|
||||
@@ -202,7 +196,7 @@ class Core(CoreSysAttributes):
|
||||
|
||||
async def start(self):
|
||||
"""Start Supervisor orchestration."""
|
||||
await self.set_state(CoreState.STARTUP)
|
||||
self.state = CoreState.STARTUP
|
||||
|
||||
# Check if system is healthy
|
||||
if not self.supported:
|
||||
@@ -229,7 +223,7 @@ class Core(CoreSysAttributes):
|
||||
|
||||
try:
|
||||
# HomeAssistant is already running, only Supervisor restarted
|
||||
if await self.sys_hardware.helper.last_boot() == self.sys_config.last_boot:
|
||||
if self.sys_hardware.helper.last_boot == self.sys_config.last_boot:
|
||||
_LOGGER.info("Detected Supervisor restart")
|
||||
return
|
||||
|
||||
@@ -288,7 +282,7 @@ class Core(CoreSysAttributes):
|
||||
self.sys_create_task(self.sys_updater.reload())
|
||||
self.sys_create_task(self.sys_resolution.healthcheck())
|
||||
|
||||
await self.set_state(CoreState.RUNNING)
|
||||
self.state = CoreState.RUNNING
|
||||
self.sys_homeassistant.websocket.supervisor_update_event(
|
||||
"supervisor", {ATTR_STARTUP: "complete"}
|
||||
)
|
||||
@@ -303,7 +297,7 @@ class Core(CoreSysAttributes):
|
||||
return
|
||||
|
||||
# don't process scheduler anymore
|
||||
await self.set_state(CoreState.STOPPING)
|
||||
self.state = CoreState.STOPPING
|
||||
|
||||
# Stage 1
|
||||
try:
|
||||
@@ -338,7 +332,7 @@ class Core(CoreSysAttributes):
|
||||
except TimeoutError:
|
||||
_LOGGER.warning("Stage 2: Force Shutdown!")
|
||||
|
||||
await self.set_state(CoreState.CLOSE)
|
||||
self.state = CoreState.CLOSE
|
||||
_LOGGER.info("Supervisor is down - %d", self.exit_code)
|
||||
self.sys_loop.stop()
|
||||
|
||||
@@ -346,7 +340,7 @@ class Core(CoreSysAttributes):
|
||||
"""Shutdown all running containers in correct order."""
|
||||
# don't process scheduler anymore
|
||||
if self.state == CoreState.RUNNING:
|
||||
await self.set_state(CoreState.SHUTDOWN)
|
||||
self.state = CoreState.SHUTDOWN
|
||||
|
||||
# Shutdown Application Add-ons, using Home Assistant API
|
||||
await self.sys_addons.shutdown(AddonStartup.APPLICATION)
|
||||
@@ -368,7 +362,7 @@ class Core(CoreSysAttributes):
|
||||
|
||||
async def _update_last_boot(self):
|
||||
"""Update last boot time."""
|
||||
self.sys_config.last_boot = await self.sys_hardware.helper.last_boot()
|
||||
self.sys_config.last_boot = self.sys_hardware.helper.last_boot
|
||||
await self.sys_config.save_data()
|
||||
|
||||
async def _retrieve_whoami(self, with_ssl: bool) -> WhoamiData | None:
|
||||
@@ -399,7 +393,7 @@ class Core(CoreSysAttributes):
|
||||
_LOGGER.warning("Can't adjust Time/Date settings: %s", err)
|
||||
return
|
||||
|
||||
await self.sys_config.set_timezone(self.sys_config.timezone or data.timezone)
|
||||
self.sys_config.timezone = self.sys_config.timezone or data.timezone
|
||||
|
||||
# Calculate if system time is out of sync
|
||||
delta = data.dt_utc - utcnow()
|
||||
|
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from collections.abc import Callable, Coroutine
|
||||
from contextvars import Context, copy_context
|
||||
from datetime import UTC, datetime, tzinfo
|
||||
from datetime import datetime
|
||||
from functools import partial
|
||||
import logging
|
||||
import os
|
||||
@@ -22,6 +22,7 @@ from .const import (
|
||||
MACHINE_ID,
|
||||
SERVER_SOFTWARE,
|
||||
)
|
||||
from .utils.dt import UTC, get_time_zone
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .addons.manager import AddonManager
|
||||
@@ -142,19 +143,13 @@ class CoreSys:
|
||||
"""Return system timezone."""
|
||||
if self.config.timezone:
|
||||
return self.config.timezone
|
||||
# pylint bug with python 3.12.4 (https://github.com/pylint-dev/pylint/issues/9811)
|
||||
# pylint: disable=no-member
|
||||
if self.host.info.timezone:
|
||||
return self.host.info.timezone
|
||||
# pylint: enable=no-member
|
||||
return "UTC"
|
||||
|
||||
@property
|
||||
def timezone_tzinfo(self) -> tzinfo:
|
||||
"""Return system timezone as tzinfo object."""
|
||||
if self.config.timezone_tzinfo:
|
||||
return self.config.timezone_tzinfo
|
||||
if self.host.info.timezone_tzinfo:
|
||||
return self.host.info.timezone_tzinfo
|
||||
return UTC
|
||||
|
||||
@property
|
||||
def loop(self) -> asyncio.BaseEventLoop:
|
||||
"""Return loop object."""
|
||||
@@ -560,7 +555,7 @@ class CoreSys:
|
||||
|
||||
def now(self) -> datetime:
|
||||
"""Return now in local timezone."""
|
||||
return datetime.now(self.timezone_tzinfo)
|
||||
return datetime.now(get_time_zone(self.timezone) or UTC)
|
||||
|
||||
def add_set_task_context_callback(
|
||||
self, callback: Callable[[Context], Context]
|
||||
@@ -647,11 +642,6 @@ class CoreSysAttributes:
|
||||
"""Return running machine type of the Supervisor system."""
|
||||
return self.coresys.machine
|
||||
|
||||
@property
|
||||
def sys_machine_id(self) -> str | None:
|
||||
"""Return machine id."""
|
||||
return self.coresys.machine_id
|
||||
|
||||
@property
|
||||
def sys_dev(self) -> bool:
|
||||
"""Return True if we run dev mode."""
|
||||
@@ -807,7 +797,7 @@ class CoreSysAttributes:
|
||||
return self.coresys.now()
|
||||
|
||||
def sys_run_in_executor(
|
||||
self, funct: Callable[..., T], *args, **kwargs
|
||||
self, funct: Callable[..., T], *args: tuple[Any], **kwargs: dict[str, Any]
|
||||
) -> Coroutine[Any, Any, T]:
|
||||
"""Add a job to the executor pool."""
|
||||
return self.coresys.run_in_executor(funct, *args, **kwargs)
|
||||
@@ -820,8 +810,8 @@ class CoreSysAttributes:
|
||||
self,
|
||||
delay: float,
|
||||
funct: Callable[..., Coroutine[Any, Any, T]],
|
||||
*args,
|
||||
**kwargs,
|
||||
*args: tuple[Any],
|
||||
**kwargs: dict[str, Any],
|
||||
) -> asyncio.TimerHandle:
|
||||
"""Start a task after a delay."""
|
||||
return self.coresys.call_later(delay, funct, *args, **kwargs)
|
||||
@@ -830,8 +820,8 @@ class CoreSysAttributes:
|
||||
self,
|
||||
when: datetime,
|
||||
funct: Callable[..., Coroutine[Any, Any, T]],
|
||||
*args,
|
||||
**kwargs,
|
||||
*args: tuple[Any],
|
||||
**kwargs: dict[str, Any],
|
||||
) -> asyncio.TimerHandle:
|
||||
"""Start a task at the specified datetime."""
|
||||
return self.coresys.call_at(when, funct, *args, **kwargs)
|
||||
|
@@ -22,7 +22,6 @@ from .apparmor import AppArmor
|
||||
from .boards import BoardManager
|
||||
from .cgroup import CGroup
|
||||
from .datadisk import DataDisk
|
||||
from .swap import Swap
|
||||
from .system import System
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@@ -44,7 +43,6 @@ class OSAgent(DBusInterfaceProxy):
|
||||
self._board: BoardManager = BoardManager()
|
||||
self._cgroup: CGroup = CGroup()
|
||||
self._datadisk: DataDisk = DataDisk()
|
||||
self._swap: Swap = Swap()
|
||||
self._system: System = System()
|
||||
|
||||
@property
|
||||
@@ -57,11 +55,6 @@ class OSAgent(DBusInterfaceProxy):
|
||||
"""Return AppArmor DBUS object."""
|
||||
return self._apparmor
|
||||
|
||||
@property
|
||||
def swap(self) -> Swap:
|
||||
"""Return Swap DBUS object."""
|
||||
return self._swap
|
||||
|
||||
@property
|
||||
def system(self) -> System:
|
||||
"""Return System DBUS object."""
|
||||
@@ -96,14 +89,7 @@ class OSAgent(DBusInterfaceProxy):
|
||||
@property
|
||||
def all(self) -> list[DBusInterface]:
|
||||
"""Return all managed dbus interfaces."""
|
||||
return [
|
||||
self.apparmor,
|
||||
self.board,
|
||||
self.cgroup,
|
||||
self.datadisk,
|
||||
self.swap,
|
||||
self.system,
|
||||
]
|
||||
return [self.apparmor, self.board, self.cgroup, self.datadisk, self.system]
|
||||
|
||||
async def connect(self, bus: MessageBus) -> None:
|
||||
"""Connect to system's D-Bus."""
|
||||
|
@@ -1,40 +0,0 @@
|
||||
"""Swap object for OS Agent."""
|
||||
|
||||
from collections.abc import Awaitable
|
||||
|
||||
from ..const import (
|
||||
DBUS_ATTR_SWAP_SIZE,
|
||||
DBUS_ATTR_SWAPPINESS,
|
||||
DBUS_IFACE_HAOS_CONFIG_SWAP,
|
||||
DBUS_NAME_HAOS,
|
||||
DBUS_OBJECT_HAOS_CONFIG_SWAP,
|
||||
)
|
||||
from ..interface import DBusInterfaceProxy, dbus_property
|
||||
|
||||
|
||||
class Swap(DBusInterfaceProxy):
|
||||
"""Swap object for OS Agent."""
|
||||
|
||||
bus_name: str = DBUS_NAME_HAOS
|
||||
object_path: str = DBUS_OBJECT_HAOS_CONFIG_SWAP
|
||||
properties_interface: str = DBUS_IFACE_HAOS_CONFIG_SWAP
|
||||
|
||||
@property
|
||||
@dbus_property
|
||||
def swap_size(self) -> str:
|
||||
"""Get swap size."""
|
||||
return self.properties[DBUS_ATTR_SWAP_SIZE]
|
||||
|
||||
def set_swap_size(self, size: str) -> Awaitable[None]:
|
||||
"""Set swap size."""
|
||||
return self.dbus.Config.Swap.set_swap_size(size)
|
||||
|
||||
@property
|
||||
@dbus_property
|
||||
def swappiness(self) -> int:
|
||||
"""Get swappiness."""
|
||||
return self.properties[DBUS_ATTR_SWAPPINESS]
|
||||
|
||||
def set_swappiness(self, swappiness: int) -> Awaitable[None]:
|
||||
"""Set swappiness."""
|
||||
return self.dbus.Config.Swap.set_swappiness(swappiness)
|
@@ -25,7 +25,6 @@ DBUS_IFACE_HAOS = "io.hass.os"
|
||||
DBUS_IFACE_HAOS_APPARMOR = "io.hass.os.AppArmor"
|
||||
DBUS_IFACE_HAOS_BOARDS = "io.hass.os.Boards"
|
||||
DBUS_IFACE_HAOS_CGROUP = "io.hass.os.CGroup"
|
||||
DBUS_IFACE_HAOS_CONFIG_SWAP = "io.hass.os.Config.Swap"
|
||||
DBUS_IFACE_HAOS_DATADISK = "io.hass.os.DataDisk"
|
||||
DBUS_IFACE_HAOS_SYSTEM = "io.hass.os.System"
|
||||
DBUS_IFACE_HOSTNAME = "org.freedesktop.hostname1"
|
||||
@@ -54,7 +53,6 @@ DBUS_OBJECT_HAOS = "/io/hass/os"
|
||||
DBUS_OBJECT_HAOS_APPARMOR = "/io/hass/os/AppArmor"
|
||||
DBUS_OBJECT_HAOS_BOARDS = "/io/hass/os/Boards"
|
||||
DBUS_OBJECT_HAOS_CGROUP = "/io/hass/os/CGroup"
|
||||
DBUS_OBJECT_HAOS_CONFIG_SWAP = "/io/hass/os/Config/Swap"
|
||||
DBUS_OBJECT_HAOS_DATADISK = "/io/hass/os/DataDisk"
|
||||
DBUS_OBJECT_HAOS_SYSTEM = "/io/hass/os/System"
|
||||
DBUS_OBJECT_HOSTNAME = "/org/freedesktop/hostname1"
|
||||
@@ -171,8 +169,6 @@ DBUS_ATTR_STATIC_OPERATING_SYSTEM_CPE_NAME = "OperatingSystemCPEName"
|
||||
DBUS_ATTR_STRENGTH = "Strength"
|
||||
DBUS_ATTR_SUPPORTED_FILESYSTEMS = "SupportedFilesystems"
|
||||
DBUS_ATTR_SYMLINKS = "Symlinks"
|
||||
DBUS_ATTR_SWAP_SIZE = "SwapSize"
|
||||
DBUS_ATTR_SWAPPINESS = "Swappiness"
|
||||
DBUS_ATTR_TABLE = "Table"
|
||||
DBUS_ATTR_TIME_DETECTED = "TimeDetected"
|
||||
DBUS_ATTR_TIMEUSEC = "TimeUSec"
|
||||
|
@@ -1,14 +1,12 @@
|
||||
"""Interface to systemd-timedate over D-Bus."""
|
||||
|
||||
import asyncio
|
||||
from datetime import datetime, tzinfo
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from dbus_fast.aio.message_bus import MessageBus
|
||||
|
||||
from ..exceptions import DBusError, DBusInterfaceError, DBusServiceUnkownError
|
||||
from ..utils.dt import get_time_zone, utc_from_timestamp
|
||||
from ..utils.dt import utc_from_timestamp
|
||||
from .const import (
|
||||
DBUS_ATTR_NTP,
|
||||
DBUS_ATTR_NTPSYNCHRONIZED,
|
||||
@@ -35,11 +33,6 @@ class TimeDate(DBusInterfaceProxy):
|
||||
object_path: str = DBUS_OBJECT_TIMEDATE
|
||||
properties_interface: str = DBUS_IFACE_TIMEDATE
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize object."""
|
||||
super().__init__()
|
||||
self._timezone_tzinfo: tzinfo | None = None
|
||||
|
||||
@property
|
||||
@dbus_property
|
||||
def timezone(self) -> str:
|
||||
@@ -64,11 +57,6 @@ class TimeDate(DBusInterfaceProxy):
|
||||
"""Return the system UTC time."""
|
||||
return utc_from_timestamp(self.properties[DBUS_ATTR_TIMEUSEC] / 1000000)
|
||||
|
||||
@property
|
||||
def timezone_tzinfo(self) -> tzinfo | None:
|
||||
"""Return timezone as tzinfo object."""
|
||||
return self._timezone_tzinfo
|
||||
|
||||
async def connect(self, bus: MessageBus):
|
||||
"""Connect to D-Bus."""
|
||||
_LOGGER.info("Load dbus interface %s", self.name)
|
||||
@@ -81,19 +69,6 @@ class TimeDate(DBusInterfaceProxy):
|
||||
"No timedate support on the host. Time/Date functions have been disabled."
|
||||
)
|
||||
|
||||
@dbus_connected
|
||||
async def update(self, changed: dict[str, Any] | None = None) -> None:
|
||||
"""Update properties via D-Bus."""
|
||||
timezone = self.timezone
|
||||
await super().update(changed)
|
||||
|
||||
if not self.timezone:
|
||||
self._timezone_tzinfo = None
|
||||
elif timezone != self.timezone:
|
||||
self._timezone_tzinfo = await asyncio.get_running_loop().run_in_executor(
|
||||
None, get_time_zone, self.timezone
|
||||
)
|
||||
|
||||
@dbus_connected
|
||||
async def set_time(self, utc: datetime) -> None:
|
||||
"""Set time & date on host as UTC."""
|
||||
|
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from uuid import uuid4
|
||||
from uuid import UUID, uuid4
|
||||
|
||||
import attr
|
||||
|
||||
@@ -31,7 +31,7 @@ class Message:
|
||||
addon: str = attr.ib()
|
||||
service: str = attr.ib()
|
||||
config: dict[str, Any] = attr.ib(eq=False)
|
||||
uuid: str = attr.ib(factory=lambda: uuid4().hex, eq=False)
|
||||
uuid: UUID = attr.ib(factory=lambda: uuid4().hex, eq=False)
|
||||
|
||||
|
||||
class Discovery(CoreSysAttributes, FileConfiguration):
|
||||
|
@@ -665,19 +665,17 @@ class DockerAddon(DockerInterface):
|
||||
async def _build(self, version: AwesomeVersion, image: str | None = None) -> None:
|
||||
"""Build a Docker container."""
|
||||
build_env = await AddonBuild(self.coresys, self.addon).load_config()
|
||||
if not await build_env.is_valid():
|
||||
if not build_env.is_valid:
|
||||
_LOGGER.error("Invalid build environment, can't build this add-on!")
|
||||
raise DockerError()
|
||||
|
||||
_LOGGER.info("Starting build for %s:%s", self.image, version)
|
||||
|
||||
def build_image():
|
||||
return self.sys_docker.images.build(
|
||||
use_config_proxy=False, **build_env.get_docker_args(version, image)
|
||||
)
|
||||
|
||||
try:
|
||||
image, log = await self.sys_run_in_executor(build_image)
|
||||
image, log = await self.sys_run_in_executor(
|
||||
self.sys_docker.images.build,
|
||||
use_config_proxy=False,
|
||||
**build_env.get_docker_args(version, image),
|
||||
)
|
||||
|
||||
_LOGGER.debug("Build %s:%s done: %s", self.image, version, log)
|
||||
|
||||
|
@@ -5,7 +5,7 @@ import logging
|
||||
import docker
|
||||
from docker.types import Mount
|
||||
|
||||
from ..const import DOCKER_CPU_RUNTIME_ALLOCATION
|
||||
from ..const import DOCKER_CPU_RUNTIME_ALLOCATION, MACHINE_ID
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import DockerJobError
|
||||
from ..hardware.const import PolicyGroup
|
||||
@@ -57,7 +57,7 @@ class DockerAudio(DockerInterface, CoreSysAttributes):
|
||||
]
|
||||
|
||||
# Machine ID
|
||||
if self.sys_machine_id:
|
||||
if MACHINE_ID.exists():
|
||||
mounts.append(MOUNT_MACHINE_ID)
|
||||
|
||||
return mounts
|
||||
|
@@ -8,7 +8,7 @@ import re
|
||||
from awesomeversion import AwesomeVersion, AwesomeVersionCompareException
|
||||
from docker.types import Mount
|
||||
|
||||
from ..const import LABEL_MACHINE
|
||||
from ..const import LABEL_MACHINE, MACHINE_ID
|
||||
from ..exceptions import DockerJobError
|
||||
from ..hardware.const import PolicyGroup
|
||||
from ..homeassistant.const import LANDINGPAGE
|
||||
@@ -154,7 +154,7 @@ class DockerHomeAssistant(DockerInterface):
|
||||
)
|
||||
|
||||
# Machine ID
|
||||
if self.sys_machine_id:
|
||||
if MACHINE_ID.exists():
|
||||
mounts.append(MOUNT_MACHINE_ID)
|
||||
|
||||
return mounts
|
||||
|
@@ -53,7 +53,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
IMAGE_WITH_HOST = re.compile(r"^((?:[a-z0-9]+(?:-[a-z0-9]+)*\.)+[a-z]{2,})\/.+")
|
||||
DOCKER_HUB = "hub.docker.com"
|
||||
|
||||
MAP_ARCH: dict[CpuArch | str, str] = {
|
||||
MAP_ARCH = {
|
||||
CpuArch.ARMV7: "linux/arm/v7",
|
||||
CpuArch.ARMHF: "linux/arm/v6",
|
||||
CpuArch.AARCH64: "linux/arm64",
|
||||
|
@@ -1,8 +1,6 @@
|
||||
"""Manager for Supervisor Docker."""
|
||||
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
from functools import partial
|
||||
from ipaddress import IPv4Address
|
||||
import logging
|
||||
import os
|
||||
@@ -107,42 +105,19 @@ class DockerAPI:
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self._docker: DockerClient | None = None
|
||||
self._network: DockerNetwork | None = None
|
||||
self._info: DockerInfo | None = None
|
||||
self.docker: DockerClient = DockerClient(
|
||||
base_url=f"unix:/{str(SOCKET_DOCKER)}", version="auto", timeout=900
|
||||
)
|
||||
self.network: DockerNetwork = DockerNetwork(self.docker)
|
||||
self._info: DockerInfo = DockerInfo.new(self.docker.info())
|
||||
self.config: DockerConfig = DockerConfig()
|
||||
self._monitor: DockerMonitor = DockerMonitor(coresys)
|
||||
|
||||
async def post_init(self) -> Self:
|
||||
"""Post init actions that must be done in event loop."""
|
||||
self._docker = await asyncio.get_running_loop().run_in_executor(
|
||||
None,
|
||||
partial(
|
||||
DockerClient,
|
||||
base_url=f"unix:/{str(SOCKET_DOCKER)}",
|
||||
version="auto",
|
||||
timeout=900,
|
||||
),
|
||||
)
|
||||
self._network = DockerNetwork(self._docker)
|
||||
self._info = DockerInfo.new(self.docker.info())
|
||||
async def load_config(self) -> Self:
|
||||
"""Load config in executor."""
|
||||
await self.config.read_data()
|
||||
return self
|
||||
|
||||
@property
|
||||
def docker(self) -> DockerClient:
|
||||
"""Get docker API client."""
|
||||
if not self._docker:
|
||||
raise RuntimeError("Docker API Client not initialized!")
|
||||
return self._docker
|
||||
|
||||
@property
|
||||
def network(self) -> DockerNetwork:
|
||||
"""Get Docker network."""
|
||||
if not self._network:
|
||||
raise RuntimeError("Docker Network not initialized!")
|
||||
return self._network
|
||||
|
||||
@property
|
||||
def images(self) -> ImageCollection:
|
||||
"""Return API images."""
|
||||
@@ -161,8 +136,6 @@ class DockerAPI:
|
||||
@property
|
||||
def info(self) -> DockerInfo:
|
||||
"""Return local docker info."""
|
||||
if not self._info:
|
||||
raise RuntimeError("Docker Info not initialized!")
|
||||
return self._info
|
||||
|
||||
@property
|
||||
|
@@ -25,7 +25,6 @@ class HwHelper(CoreSysAttributes):
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Init hardware object."""
|
||||
self.coresys = coresys
|
||||
self._last_boot: datetime | None = None
|
||||
|
||||
@property
|
||||
def support_audio(self) -> bool:
|
||||
@@ -42,15 +41,11 @@ class HwHelper(CoreSysAttributes):
|
||||
"""Return True if the device have USB ports."""
|
||||
return bool(self.sys_hardware.filter_devices(subsystem=UdevSubsystem.USB))
|
||||
|
||||
async def last_boot(self) -> datetime | None:
|
||||
@property
|
||||
def last_boot(self) -> datetime | None:
|
||||
"""Return last boot time."""
|
||||
if self._last_boot:
|
||||
return self._last_boot
|
||||
|
||||
try:
|
||||
stats: str = await self.sys_run_in_executor(
|
||||
_PROC_STAT.read_text, encoding="utf-8"
|
||||
)
|
||||
stats: str = _PROC_STAT.read_text(encoding="utf-8")
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't read stat data: %s", err)
|
||||
return None
|
||||
@@ -61,8 +56,7 @@ class HwHelper(CoreSysAttributes):
|
||||
_LOGGER.error("Can't found last boot time!")
|
||||
return None
|
||||
|
||||
self._last_boot = datetime.fromtimestamp(int(found.group(1)), UTC)
|
||||
return self._last_boot
|
||||
return datetime.fromtimestamp(int(found.group(1)), UTC)
|
||||
|
||||
def hide_virtual_device(self, udev_device: pyudev.Device) -> bool:
|
||||
"""Small helper to hide not needed Devices."""
|
||||
|
@@ -2,7 +2,6 @@
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Self
|
||||
|
||||
import pyudev
|
||||
|
||||
@@ -52,25 +51,17 @@ class HardwareManager(CoreSysAttributes):
|
||||
"""Initialize Hardware Monitor object."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self._devices: dict[str, Device] = {}
|
||||
self._udev: pyudev.Context | None = None
|
||||
self._udev = pyudev.Context()
|
||||
|
||||
self._monitor: HwMonitor | None = None
|
||||
self._montior: HwMonitor = HwMonitor(coresys)
|
||||
self._helper: HwHelper = HwHelper(coresys)
|
||||
self._policy: HwPolicy = HwPolicy(coresys)
|
||||
self._disk: HwDisk = HwDisk(coresys)
|
||||
|
||||
async def post_init(self) -> Self:
|
||||
"""Complete initialization of obect within event loop."""
|
||||
self._udev = await self.sys_run_in_executor(pyudev.Context)
|
||||
self._monitor: HwMonitor = HwMonitor(self.coresys, self._udev)
|
||||
return self
|
||||
|
||||
@property
|
||||
def monitor(self) -> HwMonitor:
|
||||
"""Return Hardware Monitor instance."""
|
||||
if not self._monitor:
|
||||
raise RuntimeError("Hardware monitor not initialized!")
|
||||
return self._monitor
|
||||
return self._montior
|
||||
|
||||
@property
|
||||
def helper(self) -> HwHelper:
|
||||
|
@@ -20,10 +20,10 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
class HwMonitor(CoreSysAttributes):
|
||||
"""Hardware monitor for supervisor."""
|
||||
|
||||
def __init__(self, coresys: CoreSys, context: pyudev.Context):
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize Hardware Monitor object."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self.context = context
|
||||
self.context = pyudev.Context()
|
||||
self.monitor: pyudev.Monitor | None = None
|
||||
self.observer: pyudev.MonitorObserver | None = None
|
||||
|
||||
@@ -40,7 +40,7 @@ class HwMonitor(CoreSysAttributes):
|
||||
),
|
||||
)
|
||||
except OSError:
|
||||
self.sys_resolution.add_unhealthy_reason(UnhealthyReason.PRIVILEGED)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.PRIVILEGED
|
||||
_LOGGER.critical("Not privileged to run udev monitor!")
|
||||
else:
|
||||
self.observer.start()
|
||||
|
@@ -1,8 +1,7 @@
|
||||
"""Home Assistant control object."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import AsyncIterator
|
||||
from contextlib import asynccontextmanager, suppress
|
||||
from contextlib import AbstractAsyncContextManager, asynccontextmanager, suppress
|
||||
from dataclasses import dataclass
|
||||
from datetime import UTC, datetime, timedelta
|
||||
import logging
|
||||
@@ -11,7 +10,6 @@ from typing import Any
|
||||
import aiohttp
|
||||
from aiohttp import hdrs
|
||||
from awesomeversion import AwesomeVersion
|
||||
from multidict import MultiMapping
|
||||
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import HomeAssistantAPIError, HomeAssistantAuthError
|
||||
@@ -86,10 +84,10 @@ class HomeAssistantAPI(CoreSysAttributes):
|
||||
json: dict[str, Any] | None = None,
|
||||
content_type: str | None = None,
|
||||
data: Any = None,
|
||||
timeout: int | None = 30,
|
||||
params: MultiMapping[str] | None = None,
|
||||
timeout: int = 30,
|
||||
params: dict[str, str] | None = None,
|
||||
headers: dict[str, str] | None = None,
|
||||
) -> AsyncIterator[aiohttp.ClientResponse]:
|
||||
) -> AbstractAsyncContextManager[aiohttp.ClientResponse]:
|
||||
"""Async context manager to make a request with right auth."""
|
||||
url = f"{self.sys_homeassistant.api_url}/{path}"
|
||||
headers = headers or {}
|
||||
|
@@ -342,7 +342,7 @@ class HomeAssistantCore(JobGroup):
|
||||
await self.sys_homeassistant.save_data()
|
||||
|
||||
# Write audio settings
|
||||
await self.sys_homeassistant.write_pulse()
|
||||
self.sys_homeassistant.write_pulse()
|
||||
|
||||
try:
|
||||
await self.instance.run(restore_job_id=self.sys_backups.current_restore)
|
||||
|
@@ -67,7 +67,6 @@ HOMEASSISTANT_BACKUP_EXCLUDE = [
|
||||
"*.corrupt.*",
|
||||
"*.log.*",
|
||||
"*.log",
|
||||
".storage/*.corrupt.*",
|
||||
"OZW_Log.txt",
|
||||
"backups/*.tar",
|
||||
"tmp_backups/*.tar",
|
||||
@@ -314,25 +313,22 @@ class HomeAssistant(FileConfiguration, CoreSysAttributes):
|
||||
BusEvent.HARDWARE_REMOVE_DEVICE, self._hardware_events
|
||||
)
|
||||
|
||||
async def write_pulse(self):
|
||||
def write_pulse(self):
|
||||
"""Write asound config to file and return True on success."""
|
||||
pulse_config = self.sys_plugins.audio.pulse_client(
|
||||
input_profile=self.audio_input, output_profile=self.audio_output
|
||||
)
|
||||
|
||||
def write_pulse_config():
|
||||
# Cleanup wrong maps
|
||||
if self.path_pulse.is_dir():
|
||||
shutil.rmtree(self.path_pulse, ignore_errors=True)
|
||||
self.path_pulse.write_text(pulse_config, encoding="utf-8")
|
||||
# Cleanup wrong maps
|
||||
if self.path_pulse.is_dir():
|
||||
shutil.rmtree(self.path_pulse, ignore_errors=True)
|
||||
|
||||
# Write pulse config
|
||||
try:
|
||||
await self.sys_run_in_executor(write_pulse_config)
|
||||
self.path_pulse.write_text(pulse_config, encoding="utf-8")
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
_LOGGER.error("Home Assistant can't write pulse/client.config: %s", err)
|
||||
else:
|
||||
_LOGGER.info("Update pulse/client.config: %s", self.path_pulse)
|
||||
|
@@ -49,21 +49,18 @@ class HomeAssistantSecrets(CoreSysAttributes):
|
||||
)
|
||||
async def _read_secrets(self):
|
||||
"""Read secrets.yaml into memory."""
|
||||
if not self.path_secrets.exists():
|
||||
_LOGGER.debug("Home Assistant secrets.yaml does not exist")
|
||||
return
|
||||
|
||||
def read_secrets_yaml() -> dict | None:
|
||||
if not self.path_secrets.exists():
|
||||
_LOGGER.debug("Home Assistant secrets.yaml does not exist")
|
||||
return None
|
||||
# Read secrets
|
||||
try:
|
||||
secrets = await self.sys_run_in_executor(read_yaml_file, self.path_secrets)
|
||||
except YamlFileError as err:
|
||||
_LOGGER.warning("Can't read Home Assistant secrets: %s", err)
|
||||
return
|
||||
|
||||
# Read secrets
|
||||
try:
|
||||
return read_yaml_file(self.path_secrets)
|
||||
except YamlFileError as err:
|
||||
_LOGGER.warning("Can't read Home Assistant secrets: %s", err)
|
||||
return None
|
||||
|
||||
secrets = await self.sys_run_in_executor(read_secrets_yaml)
|
||||
if secrets is None or not isinstance(secrets, dict):
|
||||
if not isinstance(secrets, dict):
|
||||
return
|
||||
|
||||
# Process secrets
|
||||
|
@@ -54,16 +54,10 @@ class AppArmorControl(CoreSysAttributes):
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Load available profiles."""
|
||||
|
||||
def find_profiles() -> set[str]:
|
||||
profiles: set[str] = set()
|
||||
for content in self.sys_config.path_apparmor.iterdir():
|
||||
if not content.is_file():
|
||||
continue
|
||||
profiles.add(content.name)
|
||||
return profiles
|
||||
|
||||
self._profiles = await self.sys_run_in_executor(find_profiles)
|
||||
for content in self.sys_config.path_apparmor.iterdir():
|
||||
if not content.is_file():
|
||||
continue
|
||||
self._profiles.add(content.name)
|
||||
|
||||
_LOGGER.info("Loading AppArmor Profiles: %s", self._profiles)
|
||||
|
||||
@@ -90,9 +84,7 @@ class AppArmorControl(CoreSysAttributes):
|
||||
await self.sys_run_in_executor(shutil.copyfile, profile_file, dest_profile)
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
raise HostAppArmorError(
|
||||
f"Can't copy {profile_file}: {err}", _LOGGER.error
|
||||
) from err
|
||||
@@ -117,9 +109,7 @@ class AppArmorControl(CoreSysAttributes):
|
||||
await self.sys_run_in_executor(profile_file.unlink)
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
raise HostAppArmorError(
|
||||
f"Can't remove profile: {err}", _LOGGER.error
|
||||
) from err
|
||||
@@ -135,9 +125,7 @@ class AppArmorControl(CoreSysAttributes):
|
||||
shutil.copy(profile_file, backup_file)
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
raise HostAppArmorError(
|
||||
f"Can't backup profile {profile_name}: {err}", _LOGGER.error
|
||||
) from err
|
||||
|
@@ -234,7 +234,7 @@ class Interface:
|
||||
|
||||
# WifiMode
|
||||
mode = WifiMode.INFRASTRUCTURE
|
||||
if inet.settings.wireless and inet.settings.wireless.mode:
|
||||
if inet.settings.wireless.mode:
|
||||
mode = WifiMode(inet.settings.wireless.mode)
|
||||
|
||||
# Signal
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Info control for host."""
|
||||
|
||||
import asyncio
|
||||
from datetime import datetime, tzinfo
|
||||
from datetime import datetime
|
||||
import logging
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
@@ -72,11 +72,6 @@ class InfoCenter(CoreSysAttributes):
|
||||
"""Return host timezone."""
|
||||
return self.sys_dbus.timedate.timezone
|
||||
|
||||
@property
|
||||
def timezone_tzinfo(self) -> tzinfo | None:
|
||||
"""Return host timezone as tzinfo object."""
|
||||
return self.sys_dbus.timedate.timezone_tzinfo
|
||||
|
||||
@property
|
||||
def dt_utc(self) -> datetime | None:
|
||||
"""Return host UTC time."""
|
||||
|
@@ -8,7 +8,6 @@ import json
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Self
|
||||
|
||||
from aiohttp import ClientError, ClientSession, ClientTimeout
|
||||
from aiohttp.client_exceptions import UnixClientConnectorError
|
||||
@@ -52,19 +51,13 @@ class LogsControl(CoreSysAttributes):
|
||||
self._profiles: set[str] = set()
|
||||
self._boot_ids: list[str] = []
|
||||
self._default_identifiers: list[str] = []
|
||||
self._available: bool = False
|
||||
|
||||
async def post_init(self) -> Self:
|
||||
"""Post init actions that must occur in event loop."""
|
||||
self._available = bool(
|
||||
os.environ.get("SUPERVISOR_SYSTEMD_JOURNAL_GATEWAYD_URL")
|
||||
) or await self.sys_run_in_executor(SYSTEMD_JOURNAL_GATEWAYD_SOCKET.is_socket)
|
||||
return self
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Check if systemd-journal-gatwayd is available."""
|
||||
return self._available
|
||||
if os.environ.get("SUPERVISOR_SYSTEMD_JOURNAL_GATEWAYD_URL"):
|
||||
return True
|
||||
return SYSTEMD_JOURNAL_GATEWAYD_SOCKET.is_socket()
|
||||
|
||||
@property
|
||||
def boot_ids(self) -> list[str]:
|
||||
@@ -79,9 +72,7 @@ class LogsControl(CoreSysAttributes):
|
||||
async def load(self) -> None:
|
||||
"""Load log control."""
|
||||
try:
|
||||
self._default_identifiers = await self.sys_run_in_executor(
|
||||
read_json_file, SYSLOG_IDENTIFIERS_JSON
|
||||
)
|
||||
self._default_identifiers = read_json_file(SYSLOG_IDENTIFIERS_JSON)
|
||||
except ConfigurationFileError:
|
||||
_LOGGER.warning(
|
||||
"Can't read syslog identifiers json file from %s",
|
||||
|
@@ -3,7 +3,6 @@
|
||||
from contextlib import suppress
|
||||
from functools import lru_cache
|
||||
import logging
|
||||
from typing import Self
|
||||
|
||||
from awesomeversion import AwesomeVersion
|
||||
|
||||
@@ -39,11 +38,6 @@ class HostManager(CoreSysAttributes):
|
||||
self._sound: SoundControl = SoundControl(coresys)
|
||||
self._logs: LogsControl = LogsControl(coresys)
|
||||
|
||||
async def post_init(self) -> Self:
|
||||
"""Post init actions that must occur in event loop."""
|
||||
await self._logs.post_init()
|
||||
return self
|
||||
|
||||
@property
|
||||
def apparmor(self) -> AppArmorControl:
|
||||
"""Return host AppArmor handler."""
|
||||
|
@@ -73,15 +73,10 @@ class SupervisorJobError:
|
||||
|
||||
type_: type[HassioError] = HassioError
|
||||
message: str = "Unknown error, see supervisor logs"
|
||||
stage: str | None = None
|
||||
|
||||
def as_dict(self) -> dict[str, str]:
|
||||
"""Return dictionary representation."""
|
||||
return {
|
||||
"type": self.type_.__name__,
|
||||
"message": self.message,
|
||||
"stage": self.stage,
|
||||
}
|
||||
return {"type": self.type_.__name__, "message": self.message}
|
||||
|
||||
|
||||
@define(order=True)
|
||||
@@ -131,9 +126,9 @@ class SupervisorJob:
|
||||
def capture_error(self, err: HassioError | None = None) -> None:
|
||||
"""Capture an error or record that an unknown error has occurred."""
|
||||
if err:
|
||||
new_error = SupervisorJobError(type(err), str(err), self.stage)
|
||||
new_error = SupervisorJobError(type(err), str(err))
|
||||
else:
|
||||
new_error = SupervisorJobError(stage=self.stage)
|
||||
new_error = SupervisorJobError()
|
||||
self.errors += [new_error]
|
||||
|
||||
@contextmanager
|
||||
|
@@ -35,7 +35,7 @@ class Job(CoreSysAttributes):
|
||||
name: str,
|
||||
conditions: list[JobCondition] | None = None,
|
||||
cleanup: bool = True,
|
||||
on_condition: type[JobException] | None = None,
|
||||
on_condition: JobException | None = None,
|
||||
limit: JobExecutionLimit | None = None,
|
||||
throttle_period: timedelta
|
||||
| Callable[[CoreSys, datetime, list[datetime] | None], timedelta]
|
||||
|
@@ -292,12 +292,9 @@ class MountManager(FileConfiguration, CoreSysAttributes):
|
||||
where.as_posix(),
|
||||
)
|
||||
path = self.sys_config.path_emergency / mount.name
|
||||
if not path.exists():
|
||||
path.mkdir(mode=0o444)
|
||||
|
||||
def emergency_mkdir():
|
||||
if not path.exists():
|
||||
path.mkdir(mode=0o444)
|
||||
|
||||
await self.sys_run_in_executor(emergency_mkdir)
|
||||
path = self.sys_config.local_to_extern_path(path)
|
||||
|
||||
self._bound_mounts[mount.name] = bound_mount = BoundMount(
|
||||
|
@@ -237,9 +237,7 @@ class OSManager(CoreSysAttributes):
|
||||
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
raise HassOSUpdateError(
|
||||
f"Can't write OTA file: {err!s}", _LOGGER.error
|
||||
) from err
|
||||
|
@@ -88,15 +88,11 @@ class PluginAudio(PluginBase):
|
||||
# Initialize Client Template
|
||||
try:
|
||||
self.client_template = jinja2.Template(
|
||||
await self.sys_run_in_executor(
|
||||
PULSE_CLIENT_TMPL.read_text, encoding="utf-8"
|
||||
)
|
||||
PULSE_CLIENT_TMPL.read_text(encoding="utf-8")
|
||||
)
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
|
||||
_LOGGER.error("Can't read pulse-client.tmpl: %s", err)
|
||||
|
||||
@@ -104,19 +100,13 @@ class PluginAudio(PluginBase):
|
||||
|
||||
# Setup default asound config
|
||||
asound = self.sys_config.path_audio.joinpath("asound")
|
||||
|
||||
def setup_default_asound():
|
||||
if not asound.exists():
|
||||
if not asound.exists():
|
||||
try:
|
||||
shutil.copy(ASOUND_TMPL, asound)
|
||||
|
||||
try:
|
||||
await self.sys_run_in_executor(setup_default_asound)
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
_LOGGER.error("Can't create default asound: %s", err)
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG:
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
_LOGGER.error("Can't create default asound: %s", err)
|
||||
|
||||
@Job(
|
||||
name="plugin_audio_update",
|
||||
@@ -133,7 +123,7 @@ class PluginAudio(PluginBase):
|
||||
async def restart(self) -> None:
|
||||
"""Restart Audio plugin."""
|
||||
_LOGGER.info("Restarting Audio plugin")
|
||||
await self._write_config()
|
||||
self._write_config()
|
||||
try:
|
||||
await self.instance.restart()
|
||||
except DockerError as err:
|
||||
@@ -142,7 +132,7 @@ class PluginAudio(PluginBase):
|
||||
async def start(self) -> None:
|
||||
"""Run Audio plugin."""
|
||||
_LOGGER.info("Starting Audio plugin")
|
||||
await self._write_config()
|
||||
self._write_config()
|
||||
try:
|
||||
await self.instance.run()
|
||||
except DockerError as err:
|
||||
@@ -187,11 +177,10 @@ class PluginAudio(PluginBase):
|
||||
default_sink=output_profile,
|
||||
)
|
||||
|
||||
async def _write_config(self):
|
||||
def _write_config(self):
|
||||
"""Write pulse audio config."""
|
||||
try:
|
||||
await self.sys_run_in_executor(
|
||||
write_json_file,
|
||||
write_json_file(
|
||||
self.pulse_audio_config,
|
||||
{
|
||||
"debug": self.sys_config.logging == LogLevel.DEBUG,
|
||||
|
@@ -152,31 +152,26 @@ class PluginDns(PluginBase):
|
||||
# Initialize CoreDNS Template
|
||||
try:
|
||||
self.resolv_template = jinja2.Template(
|
||||
await self.sys_run_in_executor(RESOLV_TMPL.read_text, encoding="utf-8")
|
||||
RESOLV_TMPL.read_text(encoding="utf-8")
|
||||
)
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
_LOGGER.error("Can't read resolve.tmpl: %s", err)
|
||||
|
||||
try:
|
||||
self.hosts_template = jinja2.Template(
|
||||
await self.sys_run_in_executor(HOSTS_TMPL.read_text, encoding="utf-8")
|
||||
HOSTS_TMPL.read_text(encoding="utf-8")
|
||||
)
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
_LOGGER.error("Can't read hosts.tmpl: %s", err)
|
||||
|
||||
await self._init_hosts()
|
||||
await super().load()
|
||||
|
||||
# Update supervisor
|
||||
await self._write_resolv(HOST_RESOLV)
|
||||
self._write_resolv(HOST_RESOLV)
|
||||
await self.sys_supervisor.check_connectivity()
|
||||
|
||||
async def install(self) -> None:
|
||||
@@ -200,7 +195,7 @@ class PluginDns(PluginBase):
|
||||
|
||||
async def restart(self) -> None:
|
||||
"""Restart CoreDNS plugin."""
|
||||
await self._write_config()
|
||||
self._write_config()
|
||||
_LOGGER.info("Restarting CoreDNS plugin")
|
||||
try:
|
||||
await self.instance.restart()
|
||||
@@ -209,7 +204,7 @@ class PluginDns(PluginBase):
|
||||
|
||||
async def start(self) -> None:
|
||||
"""Run CoreDNS."""
|
||||
await self._write_config()
|
||||
self._write_config()
|
||||
|
||||
# Start Instance
|
||||
_LOGGER.info("Starting CoreDNS plugin")
|
||||
@@ -278,7 +273,7 @@ class PluginDns(PluginBase):
|
||||
else:
|
||||
self._loop = False
|
||||
|
||||
async def _write_config(self) -> None:
|
||||
def _write_config(self) -> None:
|
||||
"""Write CoreDNS config."""
|
||||
debug: bool = self.sys_config.logging == LogLevel.DEBUG
|
||||
dns_servers: list[str] = []
|
||||
@@ -302,8 +297,7 @@ class PluginDns(PluginBase):
|
||||
|
||||
# Write config to plugin
|
||||
try:
|
||||
await self.sys_run_in_executor(
|
||||
write_json_file,
|
||||
write_json_file(
|
||||
self.coredns_config,
|
||||
{
|
||||
"servers": dns_servers,
|
||||
@@ -347,9 +341,7 @@ class PluginDns(PluginBase):
|
||||
)
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
raise CoreDNSError(f"Can't update hosts: {err}", _LOGGER.error) from err
|
||||
|
||||
async def add_host(
|
||||
@@ -420,7 +412,7 @@ class PluginDns(PluginBase):
|
||||
_LOGGER.error("Repair of CoreDNS failed")
|
||||
await async_capture_exception(err)
|
||||
|
||||
async def _write_resolv(self, resolv_conf: Path) -> None:
|
||||
def _write_resolv(self, resolv_conf: Path) -> None:
|
||||
"""Update/Write resolv.conf file."""
|
||||
if not self.resolv_template:
|
||||
_LOGGER.warning(
|
||||
@@ -435,12 +427,10 @@ class PluginDns(PluginBase):
|
||||
|
||||
# Write config back to resolv
|
||||
try:
|
||||
await self.sys_run_in_executor(resolv_conf.write_text, data)
|
||||
resolv_conf.write_text(data)
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
_LOGGER.warning("Can't write/update %s: %s", resolv_conf, err)
|
||||
return
|
||||
|
||||
|
@@ -30,7 +30,9 @@ class CheckCoreSecurity(CheckBase):
|
||||
# Security issue < 2021.1.5 & Custom components
|
||||
try:
|
||||
if self.sys_homeassistant.version < AwesomeVersion("2021.1.5"):
|
||||
if await self.sys_run_in_executor(self._custom_components_exists):
|
||||
if Path(
|
||||
self.sys_config.path_homeassistant, "custom_components"
|
||||
).exists():
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.SECURITY,
|
||||
ContextType.CORE,
|
||||
@@ -47,14 +49,9 @@ class CheckCoreSecurity(CheckBase):
|
||||
return False
|
||||
except AwesomeVersionException:
|
||||
return True
|
||||
return await self.sys_run_in_executor(self._custom_components_exists)
|
||||
|
||||
def _custom_components_exists(self) -> bool:
|
||||
"""Return true if custom components folder exists.
|
||||
|
||||
Must be run in executor.
|
||||
"""
|
||||
return Path(self.sys_config.path_homeassistant, "custom_components").exists()
|
||||
if not Path(self.sys_config.path_homeassistant, "custom_components").exists():
|
||||
return False
|
||||
return True
|
||||
|
||||
@property
|
||||
def issue(self) -> IssueType:
|
||||
|
@@ -30,7 +30,7 @@ class CheckSupervisorTrust(CheckBase):
|
||||
try:
|
||||
await self.sys_supervisor.check_trust()
|
||||
except CodeNotaryUntrusted:
|
||||
self.sys_resolution.add_unhealthy_reason(UnhealthyReason.UNTRUSTED)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.UNTRUSTED
|
||||
self.sys_resolution.create_issue(IssueType.TRUST, ContextType.SUPERVISOR)
|
||||
except CodeNotaryError:
|
||||
pass
|
||||
|
@@ -67,6 +67,6 @@ class ResolutionEvaluation(CoreSysAttributes):
|
||||
await async_capture_exception(err)
|
||||
|
||||
if any(reason in self.sys_resolution.unsupported for reason in UNHEALTHY):
|
||||
self.sys_resolution.add_unhealthy_reason(UnhealthyReason.DOCKER)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.DOCKER
|
||||
|
||||
_LOGGER.info("System evaluation complete")
|
||||
|
@@ -36,9 +36,6 @@ class EvaluateAppArmor(EvaluateBase):
|
||||
async def evaluate(self) -> None:
|
||||
"""Run evaluation."""
|
||||
try:
|
||||
apparmor = await self.sys_run_in_executor(
|
||||
_APPARMOR_KERNEL.read_text, encoding="utf-8"
|
||||
)
|
||||
return _APPARMOR_KERNEL.read_text(encoding="utf-8").strip().upper() != "Y"
|
||||
except OSError:
|
||||
return True
|
||||
return apparmor.strip().upper() != "Y"
|
||||
|
@@ -23,7 +23,7 @@ class EvaluateBase(ABC, CoreSysAttributes):
|
||||
return
|
||||
if await self.evaluate():
|
||||
if self.reason not in self.sys_resolution.unsupported:
|
||||
self.sys_resolution.add_unsupported_reason(self.reason)
|
||||
self.sys_resolution.unsupported = self.reason
|
||||
_LOGGER.warning(
|
||||
"%s (more-info: https://www.home-assistant.io/more-info/unsupported/%s)",
|
||||
self.on_failure,
|
||||
|
@@ -101,6 +101,6 @@ class EvaluateContainer(EvaluateBase):
|
||||
"Found image in unhealthy image list '%s' on the host",
|
||||
image_name,
|
||||
)
|
||||
self.sys_resolution.add_unhealthy_reason(UnhealthyReason.DOCKER)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.DOCKER
|
||||
|
||||
return len(self._images) != 0
|
||||
|
@@ -34,13 +34,7 @@ class EvaluateLxc(EvaluateBase):
|
||||
|
||||
async def evaluate(self):
|
||||
"""Run evaluation."""
|
||||
|
||||
def check_lxc():
|
||||
with suppress(OSError):
|
||||
if "container=lxc" in Path("/proc/1/environ").read_text(
|
||||
encoding="utf-8"
|
||||
):
|
||||
return True
|
||||
return Path("/dev/lxd/sock").exists()
|
||||
|
||||
return await self.sys_run_in_executor(check_lxc)
|
||||
with suppress(OSError):
|
||||
if "container=lxc" in Path("/proc/1/environ").read_text(encoding="utf-8"):
|
||||
return True
|
||||
return Path("/dev/lxd/sock").exists()
|
||||
|
@@ -51,9 +51,7 @@ class EvaluateSourceMods(EvaluateBase):
|
||||
)
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.CORRUPT_FILESYSTEM, ContextType.SYSTEM
|
||||
|
@@ -39,7 +39,7 @@ class FixupAddonExecuteRepair(FixupBase):
|
||||
)
|
||||
return
|
||||
|
||||
_LOGGER.info("Installing image for addon %s", reference)
|
||||
_LOGGER.info("Installing image for addon %s")
|
||||
self.attempts += 1
|
||||
await addon.instance.install(addon.version)
|
||||
|
||||
|
@@ -87,12 +87,28 @@ class ResolutionManager(FileConfiguration, CoreSysAttributes):
|
||||
"""Return a list of issues."""
|
||||
return self._issues
|
||||
|
||||
@issues.setter
|
||||
def issues(self, issue: Issue) -> None:
|
||||
"""Add issues."""
|
||||
if issue in self._issues:
|
||||
return
|
||||
_LOGGER.info(
|
||||
"Create new issue %s - %s / %s", issue.type, issue.context, issue.reference
|
||||
)
|
||||
self._issues.append(issue)
|
||||
|
||||
# Event on issue creation
|
||||
self.sys_homeassistant.websocket.supervisor_event(
|
||||
WSEvent.ISSUE_CHANGED, self._make_issue_message(issue)
|
||||
)
|
||||
|
||||
@property
|
||||
def suggestions(self) -> list[Suggestion]:
|
||||
"""Return a list of suggestions that can handled."""
|
||||
return self._suggestions
|
||||
|
||||
def add_suggestion(self, suggestion: Suggestion) -> None:
|
||||
@suggestions.setter
|
||||
def suggestions(self, suggestion: Suggestion) -> None:
|
||||
"""Add suggestion."""
|
||||
if suggestion in self._suggestions:
|
||||
return
|
||||
@@ -116,7 +132,8 @@ class ResolutionManager(FileConfiguration, CoreSysAttributes):
|
||||
"""Return a list of unsupported reasons."""
|
||||
return self._unsupported
|
||||
|
||||
def add_unsupported_reason(self, reason: UnsupportedReason) -> None:
|
||||
@unsupported.setter
|
||||
def unsupported(self, reason: UnsupportedReason) -> None:
|
||||
"""Add a reason for unsupported."""
|
||||
if reason not in self._unsupported:
|
||||
self._unsupported.append(reason)
|
||||
@@ -127,11 +144,12 @@ class ResolutionManager(FileConfiguration, CoreSysAttributes):
|
||||
|
||||
@property
|
||||
def unhealthy(self) -> list[UnhealthyReason]:
|
||||
"""Return a list of unhealthy reasons."""
|
||||
"""Return a list of unsupported reasons."""
|
||||
return self._unhealthy
|
||||
|
||||
def add_unhealthy_reason(self, reason: UnhealthyReason) -> None:
|
||||
"""Add a reason for unhealthy."""
|
||||
@unhealthy.setter
|
||||
def unhealthy(self, reason: UnhealthyReason) -> None:
|
||||
"""Add a reason for unsupported."""
|
||||
if reason not in self._unhealthy:
|
||||
self._unhealthy.append(reason)
|
||||
self.sys_homeassistant.websocket.supervisor_event(
|
||||
@@ -180,21 +198,11 @@ class ResolutionManager(FileConfiguration, CoreSysAttributes):
|
||||
"""Add an issue and suggestions."""
|
||||
if suggestions:
|
||||
for suggestion in suggestions:
|
||||
self.add_suggestion(
|
||||
Suggestion(suggestion, issue.context, issue.reference)
|
||||
self.suggestions = Suggestion(
|
||||
suggestion, issue.context, issue.reference
|
||||
)
|
||||
|
||||
if issue in self._issues:
|
||||
return
|
||||
_LOGGER.info(
|
||||
"Create new issue %s - %s / %s", issue.type, issue.context, issue.reference
|
||||
)
|
||||
self._issues.append(issue)
|
||||
|
||||
# Event on issue creation
|
||||
self.sys_homeassistant.websocket.supervisor_event(
|
||||
WSEvent.ISSUE_CHANGED, self._make_issue_message(issue)
|
||||
)
|
||||
self.issues = issue
|
||||
|
||||
async def load(self):
|
||||
"""Load the resoulution manager."""
|
||||
|
@@ -75,6 +75,8 @@ class StoreManager(CoreSysAttributes, FileConfiguration):
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Start up add-on management."""
|
||||
await self.data.update()
|
||||
|
||||
# Init custom repositories and load add-ons
|
||||
await self.update_repositories(
|
||||
self._data[ATTR_REPOSITORIES], add_with_errors=True
|
||||
@@ -183,7 +185,7 @@ class StoreManager(CoreSysAttributes, FileConfiguration):
|
||||
raise err
|
||||
|
||||
else:
|
||||
if not await self.sys_run_in_executor(repository.validate):
|
||||
if not repository.validate():
|
||||
if add_with_errors:
|
||||
_LOGGER.error("%s is not a valid add-on repository", url)
|
||||
self.sys_resolution.create_issue(
|
||||
|
@@ -179,9 +179,7 @@ class StoreData(CoreSysAttributes):
|
||||
except OSError as err:
|
||||
suggestion = None
|
||||
if err.errno == errno.EBADMSG:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
elif path.stem != StoreType.LOCAL:
|
||||
suggestion = [SuggestionType.EXECUTE_RESET]
|
||||
self.sys_resolution.create_issue(
|
||||
|
@@ -49,7 +49,7 @@ class GitRepo(CoreSysAttributes):
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Init Git add-on repository."""
|
||||
if not await self.sys_run_in_executor((self.path / ".git").is_dir):
|
||||
if not (self.path / ".git").is_dir():
|
||||
await self.clone()
|
||||
return
|
||||
|
||||
@@ -97,9 +97,7 @@ class GitRepo(CoreSysAttributes):
|
||||
}
|
||||
|
||||
try:
|
||||
_LOGGER.info(
|
||||
"Cloning add-on %s repository from %s", self.path, self.url
|
||||
)
|
||||
_LOGGER.info("Cloning add-on %s repository", self.url)
|
||||
self.repo = await self.sys_run_in_executor(
|
||||
ft.partial(
|
||||
git.Repo.clone_from, self.url, str(self.path), **git_args
|
||||
@@ -130,14 +128,7 @@ class GitRepo(CoreSysAttributes):
|
||||
return
|
||||
|
||||
async with self.lock:
|
||||
_LOGGER.info("Update add-on %s repository from %s", self.path, self.url)
|
||||
|
||||
try:
|
||||
git_cmd = git.Git()
|
||||
await self.sys_run_in_executor(git_cmd.ls_remote, "--heads", self.url)
|
||||
except git.CommandError as err:
|
||||
_LOGGER.warning("Wasn't able to update %s repo: %s.", self.url, err)
|
||||
raise StoreGitError() from err
|
||||
_LOGGER.info("Update add-on %s repository", self.url)
|
||||
|
||||
try:
|
||||
branch = self.repo.active_branch.name
|
||||
|
@@ -174,9 +174,7 @@ class Supervisor(CoreSysAttributes):
|
||||
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
raise SupervisorAppArmorError(
|
||||
f"Can't write temporary profile: {err!s}", _LOGGER.error
|
||||
) from err
|
||||
|
@@ -1,35 +0,0 @@
|
||||
"""Activate and deactivate blockbuster for finding blocking I/O."""
|
||||
|
||||
from functools import cache
|
||||
import logging
|
||||
|
||||
from blockbuster import BlockBuster
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@cache
|
||||
def _get_blockbuster() -> BlockBuster:
|
||||
"""Get blockbuster instance."""
|
||||
return BlockBuster()
|
||||
|
||||
|
||||
def blockbuster_enabled() -> bool:
|
||||
"""Return true if blockbuster detection is enabled."""
|
||||
blockbuster = _get_blockbuster()
|
||||
# We activate all or none so just check the first one
|
||||
for _, fn in blockbuster.functions.items():
|
||||
return fn.activated
|
||||
return False
|
||||
|
||||
|
||||
def activate_blockbuster() -> None:
|
||||
"""Activate blockbuster detection."""
|
||||
_LOGGER.info("Activating BlockBuster blocking I/O detection")
|
||||
_get_blockbuster().activate()
|
||||
|
||||
|
||||
def deactivate_blockbuster() -> None:
|
||||
"""Deactivate blockbuster detection."""
|
||||
_LOGGER.info("Deactivating BlockBuster blocking I/O detection")
|
||||
_get_blockbuster().deactivate()
|
@@ -18,7 +18,6 @@ from dbus_fast.aio.message_bus import MessageBus
|
||||
from dbus_fast.aio.proxy_object import ProxyInterface, ProxyObject
|
||||
from dbus_fast.errors import DBusError as DBusFastDBusError
|
||||
from dbus_fast.introspection import Node
|
||||
from log_rate_limit import RateLimit, StreamRateLimitFilter
|
||||
|
||||
from ..exceptions import (
|
||||
DBusError,
|
||||
@@ -39,7 +38,6 @@ from ..exceptions import (
|
||||
from .sentry import async_capture_exception
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
_LOGGER.addFilter(StreamRateLimitFilter(period_sec=30, allow_next_n=2))
|
||||
|
||||
DBUS_INTERFACE_OBJECT_MANAGER: str = "org.freedesktop.DBus.ObjectManager"
|
||||
DBUS_INTERFACE_PROPERTIES: str = "org.freedesktop.DBus.Properties"
|
||||
@@ -153,17 +151,11 @@ class DBus:
|
||||
# The systemd D-Bus activate service has a timeout of 25s, which will raise. We should
|
||||
# not end up here unless the D-Bus broker is majorly overwhelmed.
|
||||
_LOGGER.critical(
|
||||
"Timeout connecting to %s - %s",
|
||||
self.bus_name,
|
||||
self.object_path,
|
||||
extra=RateLimit(stream_id=f"dbus_timeout_{self.bus_name}"),
|
||||
"Timeout connecting to %s - %s", self.bus_name, self.object_path
|
||||
)
|
||||
except EOFError:
|
||||
_LOGGER.warning(
|
||||
"Busy system at %s - %s",
|
||||
self.bus_name,
|
||||
self.object_path,
|
||||
extra=RateLimit(stream_id=f"dbus_eof_{self.bus_name}"),
|
||||
"Busy system at %s - %s", self.bus_name, self.object_path
|
||||
)
|
||||
|
||||
await asyncio.sleep(3)
|
||||
|
@@ -69,10 +69,7 @@ def utc_from_timestamp(timestamp: float) -> datetime:
|
||||
|
||||
|
||||
def get_time_zone(time_zone_str: str) -> tzinfo | None:
|
||||
"""Get time zone from string. Return None if unable to determine.
|
||||
|
||||
Must be run in executor.
|
||||
"""
|
||||
"""Get time zone from string. Return None if unable to determine."""
|
||||
try:
|
||||
return zoneinfo.ZoneInfo(time_zone_str)
|
||||
except zoneinfo.ZoneInfoNotFoundError:
|
||||
|
@@ -48,10 +48,7 @@ json_loads = orjson.loads # pylint: disable=no-member
|
||||
|
||||
|
||||
def write_json_file(jsonfile: Path, data: Any) -> None:
|
||||
"""Write a JSON file.
|
||||
|
||||
Must be run in executor.
|
||||
"""
|
||||
"""Write a JSON file."""
|
||||
try:
|
||||
with atomic_write(jsonfile, overwrite=True) as fp:
|
||||
fp.write(
|
||||
@@ -70,10 +67,7 @@ def write_json_file(jsonfile: Path, data: Any) -> None:
|
||||
|
||||
|
||||
def read_json_file(jsonfile: Path) -> Any:
|
||||
"""Read a JSON file and return a dict.
|
||||
|
||||
Must be run in executor.
|
||||
"""
|
||||
"""Read a JSON file and return a dict."""
|
||||
try:
|
||||
return json_loads(jsonfile.read_bytes())
|
||||
except (OSError, ValueError, TypeError, UnicodeDecodeError) as err:
|
||||
|
@@ -1,8 +1,11 @@
|
||||
"""Custom log messages."""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import re
|
||||
|
||||
from .sentry import async_capture_exception
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
RE_BIND_FAILED = re.compile(
|
||||
@@ -10,12 +13,17 @@ RE_BIND_FAILED = re.compile(
|
||||
)
|
||||
|
||||
|
||||
def format_message(message: str) -> str:
|
||||
"""Return a formatted message if it's known."""
|
||||
match = RE_BIND_FAILED.match(message)
|
||||
if match:
|
||||
return (
|
||||
f"Port '{match.group(1)}' is already in use by something else on the host."
|
||||
)
|
||||
def async_format_message(message: str) -> str:
|
||||
"""Return a formated message if it's known.
|
||||
|
||||
Must be called from event loop.
|
||||
"""
|
||||
try:
|
||||
match = RE_BIND_FAILED.match(message)
|
||||
if match:
|
||||
return f"Port '{match.group(1)}' is already in use by something else on the host."
|
||||
except TypeError as err:
|
||||
_LOGGER.error("The type of message is not a string - %s", err)
|
||||
asyncio.get_running_loop().create_task(async_capture_exception(err))
|
||||
|
||||
return message
|
||||
|
@@ -8,14 +8,6 @@ import queue
|
||||
from typing import Any
|
||||
|
||||
|
||||
class AddonLoggerAdapter(logging.LoggerAdapter):
|
||||
"""Logging Adapter which prepends log entries with add-on name."""
|
||||
|
||||
def process(self, msg, kwargs):
|
||||
"""Process the logging message by prepending the add-on name."""
|
||||
return f"[{self.extra['addon_name']}] {msg}", kwargs
|
||||
|
||||
|
||||
class SupervisorQueueHandler(logging.handlers.QueueHandler):
|
||||
"""Process the log in another thread."""
|
||||
|
||||
|
@@ -5,9 +5,9 @@ from functools import partial
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp.web_exceptions import HTTPBadGateway, HTTPServiceUnavailable
|
||||
import sentry_sdk
|
||||
from sentry_sdk.integrations.aiohttp import AioHttpIntegration
|
||||
from sentry_sdk.integrations.asyncio import AsyncioIntegration
|
||||
from sentry_sdk.integrations.atexit import AtexitIntegration
|
||||
from sentry_sdk.integrations.dedupe import DedupeIntegration
|
||||
from sentry_sdk.integrations.excepthook import ExcepthookIntegration
|
||||
@@ -27,24 +27,14 @@ def init_sentry(coresys: CoreSys) -> None:
|
||||
"""Initialize sentry client."""
|
||||
if not sentry_sdk.is_initialized():
|
||||
_LOGGER.info("Initializing Supervisor Sentry")
|
||||
# Don't use AsyncioIntegration(). We commonly handle task exceptions
|
||||
# outside of tasks. This would cause exception we gracefully handle to
|
||||
# be captured by sentry.
|
||||
sentry_sdk.init(
|
||||
dsn="https://9c6ea70f49234442b4746e447b24747e@o427061.ingest.sentry.io/5370612",
|
||||
before_send=partial(filter_data, coresys),
|
||||
auto_enabling_integrations=False,
|
||||
default_integrations=False,
|
||||
integrations=[
|
||||
AioHttpIntegration(
|
||||
failed_request_status_codes=frozenset(range(500, 600))
|
||||
- set(
|
||||
{
|
||||
HTTPBadGateway.status_code,
|
||||
HTTPServiceUnavailable.status_code,
|
||||
}
|
||||
)
|
||||
),
|
||||
AioHttpIntegration(),
|
||||
AsyncioIntegration(),
|
||||
ExcepthookIntegration(),
|
||||
DedupeIntegration(),
|
||||
AtexitIntegration(),
|
||||
|
@@ -61,7 +61,7 @@ def journal_verbose_formatter(entries: dict[str, str]) -> str:
|
||||
|
||||
async def journal_logs_reader(
|
||||
journal_logs: ClientResponse, log_formatter: LogFormatter = LogFormatter.PLAIN
|
||||
) -> AsyncGenerator[tuple[str | None, str]]:
|
||||
) -> AsyncGenerator[str | None, str]:
|
||||
"""Read logs from systemd journal line by line, formatted using the given formatter.
|
||||
|
||||
Returns a generator of (cursor, formatted_entry) tuples.
|
||||
|
@@ -18,10 +18,7 @@ def schema_or(schema):
|
||||
|
||||
|
||||
def validate_timezone(timezone):
|
||||
"""Validate voluptuous timezone.
|
||||
|
||||
Must be run in executor.
|
||||
"""
|
||||
"""Validate voluptuous timezone."""
|
||||
if get_time_zone(timezone) is not None:
|
||||
return timezone
|
||||
raise vol.Invalid(
|
||||
|
@@ -15,7 +15,6 @@ from .const import (
|
||||
ATTR_CONTENT_TRUST,
|
||||
ATTR_DEBUG,
|
||||
ATTR_DEBUG_BLOCK,
|
||||
ATTR_DETECT_BLOCKING_IO,
|
||||
ATTR_DIAGNOSTICS,
|
||||
ATTR_DISPLAYNAME,
|
||||
ATTR_DNS,
|
||||
@@ -163,7 +162,6 @@ SCHEMA_SUPERVISOR_CONFIG = vol.Schema(
|
||||
vol.Optional(ATTR_DEBUG, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEBUG_BLOCK, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DIAGNOSTICS, default=None): vol.Maybe(vol.Boolean()),
|
||||
vol.Optional(ATTR_DETECT_BLOCKING_IO, default=False): vol.Boolean(),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
@@ -20,7 +20,6 @@ from supervisor.docker.addon import DockerAddon
|
||||
from supervisor.docker.const import ContainerState
|
||||
from supervisor.docker.monitor import DockerContainerStateEvent
|
||||
from supervisor.exceptions import AddonsError, AddonsJobError, AudioUpdateError
|
||||
from supervisor.hardware.helper import HwHelper
|
||||
from supervisor.ingress import Ingress
|
||||
from supervisor.store.repository import Repository
|
||||
from supervisor.utils.dt import utcnow
|
||||
@@ -251,7 +250,11 @@ async def test_watchdog_during_attach(
|
||||
|
||||
with (
|
||||
patch.object(Addon, "restart") as restart,
|
||||
patch.object(HwHelper, "last_boot", return_value=utcnow()),
|
||||
patch.object(
|
||||
type(coresys.hardware.helper),
|
||||
"last_boot",
|
||||
new=PropertyMock(return_value=utcnow()),
|
||||
),
|
||||
patch.object(DockerAddon, "attach"),
|
||||
patch.object(
|
||||
DockerAddon,
|
||||
@@ -259,9 +262,7 @@ async def test_watchdog_during_attach(
|
||||
return_value=ContainerState.STOPPED,
|
||||
),
|
||||
):
|
||||
coresys.config.last_boot = (
|
||||
await coresys.hardware.helper.last_boot() + boot_timedelta
|
||||
)
|
||||
coresys.config.last_boot = coresys.hardware.helper.last_boot + boot_timedelta
|
||||
addon = Addon(coresys, store.slug)
|
||||
coresys.addons.local[addon.slug] = addon
|
||||
addon.watchdog = True
|
||||
@@ -738,7 +739,7 @@ async def test_local_example_ingress_port_set(
|
||||
assert install_addon_example.ingress_port != 0
|
||||
|
||||
|
||||
async def test_addon_pulse_error(
|
||||
def test_addon_pulse_error(
|
||||
coresys: CoreSys,
|
||||
install_addon_example: Addon,
|
||||
caplog: pytest.LogCaptureFixture,
|
||||
@@ -749,14 +750,14 @@ async def test_addon_pulse_error(
|
||||
"supervisor.addons.addon.Path.write_text", side_effect=(err := OSError())
|
||||
):
|
||||
err.errno = errno.EBUSY
|
||||
await install_addon_example.write_pulse()
|
||||
install_addon_example.write_pulse()
|
||||
|
||||
assert "can't write pulse/client.config" in caplog.text
|
||||
assert coresys.core.healthy is True
|
||||
|
||||
caplog.clear()
|
||||
err.errno = errno.EBADMSG
|
||||
await install_addon_example.write_pulse()
|
||||
install_addon_example.write_pulse()
|
||||
|
||||
assert "can't write pulse/client.config" in caplog.text
|
||||
assert coresys.core.healthy is False
|
||||
|
@@ -20,9 +20,7 @@ async def test_platform_set(coresys: CoreSys, install_addon_ssh: Addon):
|
||||
type(coresys.arch), "default", new=PropertyMock(return_value="amd64")
|
||||
),
|
||||
):
|
||||
args = await coresys.run_in_executor(
|
||||
build.get_docker_args, AwesomeVersion("latest")
|
||||
)
|
||||
args = build.get_docker_args(AwesomeVersion("latest"))
|
||||
|
||||
assert args["platform"] == "linux/amd64"
|
||||
|
||||
@@ -38,14 +36,10 @@ async def test_dockerfile_evaluation(coresys: CoreSys, install_addon_ssh: Addon)
|
||||
type(coresys.arch), "default", new=PropertyMock(return_value="amd64")
|
||||
),
|
||||
):
|
||||
args = await coresys.run_in_executor(
|
||||
build.get_docker_args, AwesomeVersion("latest")
|
||||
)
|
||||
args = build.get_docker_args(AwesomeVersion("latest"))
|
||||
|
||||
assert args["dockerfile"].endswith("fixtures/addons/local/ssh/Dockerfile")
|
||||
assert str(await coresys.run_in_executor(build.get_dockerfile)).endswith(
|
||||
"fixtures/addons/local/ssh/Dockerfile"
|
||||
)
|
||||
assert str(build.dockerfile).endswith("fixtures/addons/local/ssh/Dockerfile")
|
||||
assert build.arch == "amd64"
|
||||
|
||||
|
||||
@@ -60,12 +54,10 @@ async def test_dockerfile_evaluation_arch(coresys: CoreSys, install_addon_ssh: A
|
||||
type(coresys.arch), "default", new=PropertyMock(return_value="aarch64")
|
||||
),
|
||||
):
|
||||
args = await coresys.run_in_executor(
|
||||
build.get_docker_args, AwesomeVersion("latest")
|
||||
)
|
||||
args = build.get_docker_args(AwesomeVersion("latest"))
|
||||
|
||||
assert args["dockerfile"].endswith("fixtures/addons/local/ssh/Dockerfile.aarch64")
|
||||
assert str(await coresys.run_in_executor(build.get_dockerfile)).endswith(
|
||||
assert str(build.dockerfile).endswith(
|
||||
"fixtures/addons/local/ssh/Dockerfile.aarch64"
|
||||
)
|
||||
assert build.arch == "aarch64"
|
||||
@@ -82,7 +74,7 @@ async def test_build_valid(coresys: CoreSys, install_addon_ssh: Addon):
|
||||
type(coresys.arch), "default", new=PropertyMock(return_value="aarch64")
|
||||
),
|
||||
):
|
||||
assert await build.is_valid()
|
||||
assert build.is_valid
|
||||
|
||||
|
||||
async def test_build_invalid(coresys: CoreSys, install_addon_ssh: Addon):
|
||||
@@ -96,4 +88,4 @@ async def test_build_invalid(coresys: CoreSys, install_addon_ssh: Addon):
|
||||
type(coresys.arch), "default", new=PropertyMock(return_value="amd64")
|
||||
),
|
||||
):
|
||||
assert not await build.is_valid()
|
||||
assert not build.is_valid
|
||||
|
@@ -409,7 +409,7 @@ async def test_repository_file_error(
|
||||
in caplog.text
|
||||
)
|
||||
|
||||
await coresys.run_in_executor(write_json_file, repo_file, {"invalid": "bad"})
|
||||
write_json_file(repo_file, {"invalid": "bad"})
|
||||
await coresys.store.data.update()
|
||||
assert f"Repository parse error {repo_dir.as_posix()}" in caplog.text
|
||||
|
||||
|
@@ -58,7 +58,7 @@ async def api_token_validation(aiohttp_client, coresys: CoreSys) -> TestClient:
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_security_system_initialize(api_system: TestClient, coresys: CoreSys):
|
||||
"""Test security."""
|
||||
await coresys.core.set_state(CoreState.INITIALIZE)
|
||||
coresys.core.state = CoreState.INITIALIZE
|
||||
|
||||
resp = await api_system.get("/supervisor/ping")
|
||||
result = await resp.json()
|
||||
@@ -69,7 +69,7 @@ async def test_api_security_system_initialize(api_system: TestClient, coresys: C
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_security_system_setup(api_system: TestClient, coresys: CoreSys):
|
||||
"""Test security."""
|
||||
await coresys.core.set_state(CoreState.SETUP)
|
||||
coresys.core.state = CoreState.SETUP
|
||||
|
||||
resp = await api_system.get("/supervisor/ping")
|
||||
result = await resp.json()
|
||||
@@ -80,7 +80,7 @@ async def test_api_security_system_setup(api_system: TestClient, coresys: CoreSy
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_security_system_running(api_system: TestClient, coresys: CoreSys):
|
||||
"""Test security."""
|
||||
await coresys.core.set_state(CoreState.RUNNING)
|
||||
coresys.core.state = CoreState.RUNNING
|
||||
|
||||
resp = await api_system.get("/supervisor/ping")
|
||||
assert resp.status == 200
|
||||
@@ -89,7 +89,7 @@ async def test_api_security_system_running(api_system: TestClient, coresys: Core
|
||||
@pytest.mark.asyncio
|
||||
async def test_api_security_system_startup(api_system: TestClient, coresys: CoreSys):
|
||||
"""Test security."""
|
||||
await coresys.core.set_state(CoreState.STARTUP)
|
||||
coresys.core.state = CoreState.STARTUP
|
||||
|
||||
resp = await api_system.get("/supervisor/ping")
|
||||
assert resp.status == 200
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user