mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-04-20 19:27:16 +00:00
Compare commits
No commits in common. "main" and "2025.01.2" have entirely different histories.
11
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
11
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@ -26,7 +26,7 @@ body:
|
||||
attributes:
|
||||
label: What type of installation are you running?
|
||||
description: >
|
||||
If you don't know, can be found in [Settings -> System -> Repairs -> (three dot menu) -> System Information](https://my.home-assistant.io/redirect/system_health/).
|
||||
If you don't know, can be found in [Settings -> System -> Repairs -> System Information](https://my.home-assistant.io/redirect/system_health/).
|
||||
It is listed as the `Installation Type` value.
|
||||
options:
|
||||
- Home Assistant OS
|
||||
@ -72,9 +72,9 @@ body:
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: System information
|
||||
label: System Health information
|
||||
description: >
|
||||
The System information can be found in [Settings -> System -> Repairs -> (three dot menu) -> System Information](https://my.home-assistant.io/redirect/system_health/).
|
||||
System Health information can be found in the top right menu in [Settings -> System -> Repairs](https://my.home-assistant.io/redirect/repairs/).
|
||||
Click the copy button at the bottom of the pop-up and paste it here.
|
||||
|
||||
[](https://my.home-assistant.io/redirect/system_health/)
|
||||
@ -83,9 +83,8 @@ body:
|
||||
label: Supervisor diagnostics
|
||||
placeholder: "drag-and-drop the diagnostics data file here (do not copy-and-paste the content)"
|
||||
description: >-
|
||||
Supervisor diagnostics can be found in [Settings -> Devices & services](https://my.home-assistant.io/redirect/integrations/).
|
||||
Find the card that says `Home Assistant Supervisor`, open it, and select the three dot menu of the Supervisor integration entry
|
||||
and select 'Download diagnostics'.
|
||||
Supervisor diagnostics can be found in [Settings -> Integrations](https://my.home-assistant.io/redirect/integrations/).
|
||||
Find the card that says `Home Assistant Supervisor`, open its menu and select 'Download diagnostics'.
|
||||
|
||||
**Please drag-and-drop the downloaded file into the textbox below. Do not copy and paste its contents.**
|
||||
- type: textarea
|
||||
|
14
.github/workflows/builder.yml
vendored
14
.github/workflows/builder.yml
vendored
@ -106,9 +106,9 @@ jobs:
|
||||
|
||||
- name: Build wheels
|
||||
if: needs.init.outputs.requirements == 'true'
|
||||
uses: home-assistant/wheels@2025.03.0
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: cp313
|
||||
abi: cp312
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
@ -125,13 +125,13 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Install Cosign
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: sigstore/cosign-installer@v3.8.1
|
||||
uses: sigstore/cosign-installer@v3.7.0
|
||||
with:
|
||||
cosign-release: "v2.4.0"
|
||||
|
||||
@ -149,7 +149,7 @@ jobs:
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: docker/login-action@v3.4.0
|
||||
uses: docker/login-action@v3.3.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@ -160,7 +160,7 @@ jobs:
|
||||
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
||||
|
||||
- name: Build supervisor
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
uses: home-assistant/builder@2024.08.2
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@ -207,7 +207,7 @@ jobs:
|
||||
|
||||
- name: Build the Supervisor
|
||||
if: needs.init.outputs.publish != 'true'
|
||||
uses: home-assistant/builder@2025.03.0
|
||||
uses: home-assistant/builder@2024.08.2
|
||||
with:
|
||||
args: |
|
||||
--test \
|
||||
|
50
.github/workflows/ci.yaml
vendored
50
.github/workflows/ci.yaml
vendored
@ -28,12 +28,12 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python
|
||||
id: python
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@ -47,7 +47,7 @@ jobs:
|
||||
pip install -r requirements.txt -r requirements_tests.txt
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
@ -69,13 +69,13 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@ -87,7 +87,7 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
@ -112,13 +112,13 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@ -130,7 +130,7 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
@ -170,13 +170,13 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@ -188,7 +188,7 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
@ -214,13 +214,13 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@ -232,7 +232,7 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
@ -258,13 +258,13 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@ -294,17 +294,17 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.8.1
|
||||
uses: sigstore/cosign-installer@v3.7.0
|
||||
with:
|
||||
cosign-release: "v2.4.0"
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@ -339,7 +339,7 @@ jobs:
|
||||
-o console_output_style=count \
|
||||
tests
|
||||
- name: Upload coverage artifact
|
||||
uses: actions/upload-artifact@v4.6.2
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}
|
||||
path: .coverage
|
||||
@ -353,13 +353,13 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.5.0
|
||||
uses: actions/setup-python@v5.4.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.3
|
||||
uses: actions/cache@v4.2.0
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@ -370,7 +370,7 @@ jobs:
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.2.1
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
- name: Combine coverage results
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
@ -378,4 +378,4 @@ jobs:
|
||||
coverage report
|
||||
coverage xml
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v5.4.2
|
||||
uses: codecov/codecov-action@v5.3.1
|
||||
|
2
.github/workflows/sentry.yaml
vendored
2
.github/workflows/sentry.yaml
vendored
@ -12,7 +12,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Sentry Release
|
||||
uses: getsentry/action-release@v3.1.1
|
||||
uses: getsentry/action-release@v1.9.0
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
|
32
.github/workflows/update_frontend.yml
vendored
32
.github/workflows/update_frontend.yml
vendored
@ -10,8 +10,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
skip: ${{ steps.check_version.outputs.skip || steps.check_existing_pr.outputs.skip }}
|
||||
current_version: ${{ steps.check_version.outputs.current_version }}
|
||||
latest_version: ${{ steps.latest_frontend_version.outputs.latest_tag }}
|
||||
latest_tag: ${{ steps.latest_frontend_version.outputs.latest_tag }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
@ -24,11 +23,11 @@ jobs:
|
||||
- name: Check if version is up to date
|
||||
id: check_version
|
||||
run: |
|
||||
current_version="$(cat .ha-frontend-version)"
|
||||
latest_version="${{ steps.latest_frontend_version.outputs.latest_tag }}"
|
||||
echo "current_version=${current_version}" >> $GITHUB_OUTPUT
|
||||
echo "LATEST_VERSION=${latest_version}" >> $GITHUB_ENV
|
||||
if [[ ! "$current_version" < "$latest_version" ]]; then
|
||||
SUPERVISOR_VERSION=$(cat .ha-frontend-version)
|
||||
LATEST_VERSION=${{ steps.latest_frontend_version.outputs.latest_tag }}
|
||||
echo "SUPERVISOR_VERSION=$SUPERVISOR_VERSION" >> $GITHUB_ENV
|
||||
echo "LATEST_VERSION=$LATEST_VERSION" >> $GITHUB_ENV
|
||||
if [[ ! "$SUPERVISOR_VERSION" < "$LATEST_VERSION" ]]; then
|
||||
echo "Frontend version is up to date"
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
@ -38,7 +37,7 @@ jobs:
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
PR=$(gh pr list --state open --base main --json title --search "Update frontend to version $LATEST_VERSION")
|
||||
PR=$(gh pr list --state open --base main --json title --search "Autoupdate frontend to version $LATEST_VERSION")
|
||||
if [[ "$PR" != "[]" ]]; then
|
||||
echo "Skipping - There is already a PR open for version $LATEST_VERSION"
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
@ -55,28 +54,21 @@ jobs:
|
||||
rm -rf supervisor/api/panel/*
|
||||
- name: Update version file
|
||||
run: |
|
||||
echo "${{ needs.check-version.outputs.latest_version }}" > .ha-frontend-version
|
||||
echo "${{ needs.check-version.outputs.latest_tag }}" > .ha-frontend-version
|
||||
- name: Download release assets
|
||||
uses: robinraju/release-downloader@v1
|
||||
with:
|
||||
repository: 'home-assistant/frontend'
|
||||
tag: ${{ needs.check-version.outputs.latest_version }}
|
||||
fileName: home_assistant_frontend_supervisor-${{ needs.check-version.outputs.latest_version }}.tar.gz
|
||||
tag: ${{ needs.check-version.outputs.latest_tag }}
|
||||
fileName: home_assistant_frontend_supervisor-${{ needs.check-version.outputs.latest_tag }}.tar.gz
|
||||
extract: true
|
||||
out-file-path: supervisor/api/panel/
|
||||
- name: Remove release assets archive
|
||||
run: |
|
||||
rm -f supervisor/api/panel/home_assistant_frontend_supervisor-*.tar.gz
|
||||
- name: Create PR
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
commit-message: "Update frontend to version ${{ needs.check-version.outputs.latest_version }}"
|
||||
commit-message: "Autoupdate frontend to version ${{ needs.check-version.outputs.latest_tag }}"
|
||||
branch: autoupdate-frontend
|
||||
base: main
|
||||
draft: true
|
||||
sign-commits: true
|
||||
title: "Update frontend to version ${{ needs.check-version.outputs.latest_version }}"
|
||||
body: >
|
||||
Update frontend from ${{ needs.check-version.outputs.current_version }} to
|
||||
[${{ needs.check-version.outputs.latest_version }}](https://github.com/home-assistant/frontend/releases/tag/${{ needs.check-version.outputs.latest_version }})
|
||||
|
||||
title: "Autoupdate frontend to version ${{ needs.check-version.outputs.latest_tag }}"
|
||||
|
@ -1 +1 @@
|
||||
20250401.0
|
||||
20241127.8
|
12
Dockerfile
12
Dockerfile
@ -9,8 +9,7 @@ ENV \
|
||||
|
||||
ARG \
|
||||
COSIGN_VERSION \
|
||||
BUILD_ARCH \
|
||||
QEMU_CPU
|
||||
BUILD_ARCH
|
||||
|
||||
# Install base
|
||||
WORKDIR /usr/src
|
||||
@ -29,23 +28,22 @@ RUN \
|
||||
\
|
||||
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
|
||||
&& chmod a+x /usr/bin/cosign \
|
||||
&& pip3 install uv==0.6.1
|
||||
&& pip3 install uv==0.2.21
|
||||
|
||||
# Install requirements
|
||||
COPY requirements.txt .
|
||||
RUN \
|
||||
if [ "${BUILD_ARCH}" = "i386" ]; then \
|
||||
setarch="linux32"; \
|
||||
linux32 uv pip install --no-build -r requirements.txt; \
|
||||
else \
|
||||
setarch=""; \
|
||||
uv pip install --no-build -r requirements.txt; \
|
||||
fi \
|
||||
&& ${setarch} uv pip install --compile-bytecode --no-cache --no-build -r requirements.txt \
|
||||
&& rm -f requirements.txt
|
||||
|
||||
# Install Home Assistant Supervisor
|
||||
COPY . supervisor
|
||||
RUN \
|
||||
uv pip install --no-cache -e ./supervisor \
|
||||
pip3 install -e ./supervisor \
|
||||
&& python3 -m compileall ./supervisor/supervisor
|
||||
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
[build-system]
|
||||
requires = ["setuptools~=78.1.0", "wheel~=0.46.1"]
|
||||
requires = ["setuptools~=75.8.0", "wheel~=0.45.0"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
|
@ -1,30 +1,29 @@
|
||||
aiodns==3.2.0
|
||||
aiohttp==3.11.16
|
||||
aiohttp==3.11.11
|
||||
atomicwrites-homeassistant==1.4.1
|
||||
attrs==25.3.0
|
||||
attrs==25.1.0
|
||||
awesomeversion==24.6.0
|
||||
blockbuster==1.5.24
|
||||
brotli==1.1.0
|
||||
ciso8601==2.3.2
|
||||
colorlog==6.9.0
|
||||
cpe==1.3.1
|
||||
cryptography==44.0.2
|
||||
debugpy==1.8.14
|
||||
cryptography==44.0.0
|
||||
debugpy==1.8.12
|
||||
deepmerge==2.0
|
||||
dirhash==0.5.0
|
||||
docker==7.1.0
|
||||
faust-cchardet==2.1.19
|
||||
gitpython==3.1.44
|
||||
jinja2==3.1.6
|
||||
log-rate-limit==1.4.2
|
||||
orjson==3.10.16
|
||||
jinja2==3.1.5
|
||||
orjson==3.10.12
|
||||
pulsectl==24.12.0
|
||||
pyudev==0.24.3
|
||||
PyYAML==6.0.2
|
||||
requests==2.32.3
|
||||
securetar==2025.2.1
|
||||
sentry-sdk==2.26.1
|
||||
setuptools==78.1.0
|
||||
securetar==2025.1.4
|
||||
sentry-sdk==2.20.0
|
||||
setuptools==75.8.0
|
||||
voluptuous==0.15.2
|
||||
dbus-fast==2.44.1
|
||||
zlib-fast==0.2.1
|
||||
dbus-fast==2.30.2
|
||||
typing_extensions==4.12.2
|
||||
zlib-fast==0.2.0
|
||||
|
@ -1,12 +1,13 @@
|
||||
astroid==3.3.9
|
||||
coverage==7.8.0
|
||||
pre-commit==4.2.0
|
||||
pylint==3.3.6
|
||||
astroid==3.3.8
|
||||
coverage==7.6.10
|
||||
pre-commit==4.1.0
|
||||
pylint==3.3.4
|
||||
pytest-aiohttp==1.1.0
|
||||
pytest-asyncio==0.25.2
|
||||
pytest-cov==6.1.1
|
||||
pytest-cov==6.0.0
|
||||
pytest-timeout==2.3.1
|
||||
pytest==8.3.5
|
||||
ruff==0.11.6
|
||||
pytest==8.3.4
|
||||
ruff==0.9.4
|
||||
time-machine==2.16.0
|
||||
urllib3==2.4.0
|
||||
typing_extensions==4.12.2
|
||||
urllib3==2.3.0
|
||||
|
@ -11,12 +11,10 @@ import zlib_fast
|
||||
# Enable fast zlib before importing supervisor
|
||||
zlib_fast.enable()
|
||||
|
||||
# pylint: disable=wrong-import-position
|
||||
from supervisor import bootstrap # noqa: E402
|
||||
from supervisor.utils.blockbuster import activate_blockbuster # noqa: E402
|
||||
from supervisor.utils.logging import activate_log_queue_handler # noqa: E402
|
||||
|
||||
# pylint: enable=wrong-import-position
|
||||
from supervisor import bootstrap # pylint: disable=wrong-import-position # noqa: E402
|
||||
from supervisor.utils.logging import ( # pylint: disable=wrong-import-position # noqa: E402
|
||||
activate_log_queue_handler,
|
||||
)
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@ -54,11 +52,10 @@ if __name__ == "__main__":
|
||||
_LOGGER.info("Initializing Supervisor setup")
|
||||
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
||||
loop.set_debug(coresys.config.debug)
|
||||
if coresys.config.detect_blocking_io:
|
||||
activate_blockbuster()
|
||||
loop.run_until_complete(coresys.core.connect())
|
||||
|
||||
loop.run_until_complete(bootstrap.supervisor_debugger(coresys))
|
||||
bootstrap.supervisor_debugger(coresys)
|
||||
bootstrap.migrate_system_env(coresys)
|
||||
|
||||
# Signal health startup for container
|
||||
run_os_startup_check_cleanup()
|
||||
|
@ -18,9 +18,9 @@ from tempfile import TemporaryDirectory
|
||||
from typing import Any, Final
|
||||
|
||||
import aiohttp
|
||||
from awesomeversion import AwesomeVersion, AwesomeVersionCompareException
|
||||
from awesomeversion import AwesomeVersionCompareException
|
||||
from deepmerge import Merger
|
||||
from securetar import AddFileError, atomic_contents_add, secure_path
|
||||
from securetar import atomic_contents_add, secure_path
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
@ -88,7 +88,7 @@ from ..store.addon import AddonStore
|
||||
from ..utils import check_port
|
||||
from ..utils.apparmor import adjust_profile
|
||||
from ..utils.json import read_json_file, write_json_file
|
||||
from ..utils.sentry import async_capture_exception
|
||||
from ..utils.sentry import capture_exception
|
||||
from .const import (
|
||||
WATCHDOG_MAX_ATTEMPTS,
|
||||
WATCHDOG_RETRY_SECONDS,
|
||||
@ -140,7 +140,9 @@ class Addon(AddonModel):
|
||||
super().__init__(coresys, slug)
|
||||
self.instance: DockerAddon = DockerAddon(coresys, self)
|
||||
self._state: AddonState = AddonState.UNKNOWN
|
||||
self._manual_stop: bool = False
|
||||
self._manual_stop: bool = (
|
||||
self.sys_hardware.helper.last_boot != self.sys_config.last_boot
|
||||
)
|
||||
self._listeners: list[EventListener] = []
|
||||
self._startup_event = asyncio.Event()
|
||||
self._startup_task: asyncio.Task | None = None
|
||||
@ -214,10 +216,6 @@ class Addon(AddonModel):
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Async initialize of object."""
|
||||
self._manual_stop = (
|
||||
await self.sys_hardware.helper.last_boot() != self.sys_config.last_boot
|
||||
)
|
||||
|
||||
if self.is_detached:
|
||||
await super().refresh_path_cache()
|
||||
|
||||
@ -245,7 +243,7 @@ class Addon(AddonModel):
|
||||
await self.instance.install(self.version, default_image, arch=self.arch)
|
||||
|
||||
self.persist[ATTR_IMAGE] = default_image
|
||||
await self.save_persist()
|
||||
self.save_persist()
|
||||
|
||||
@property
|
||||
def ip_address(self) -> IPv4Address:
|
||||
@ -285,28 +283,28 @@ class Addon(AddonModel):
|
||||
@property
|
||||
def with_icon(self) -> bool:
|
||||
"""Return True if an icon exists."""
|
||||
if self.is_detached or not self.addon_store:
|
||||
if self.is_detached:
|
||||
return super().with_icon
|
||||
return self.addon_store.with_icon
|
||||
|
||||
@property
|
||||
def with_logo(self) -> bool:
|
||||
"""Return True if a logo exists."""
|
||||
if self.is_detached or not self.addon_store:
|
||||
if self.is_detached:
|
||||
return super().with_logo
|
||||
return self.addon_store.with_logo
|
||||
|
||||
@property
|
||||
def with_changelog(self) -> bool:
|
||||
"""Return True if a changelog exists."""
|
||||
if self.is_detached or not self.addon_store:
|
||||
if self.is_detached:
|
||||
return super().with_changelog
|
||||
return self.addon_store.with_changelog
|
||||
|
||||
@property
|
||||
def with_documentation(self) -> bool:
|
||||
"""Return True if a documentation exists."""
|
||||
if self.is_detached or not self.addon_store:
|
||||
if self.is_detached:
|
||||
return super().with_documentation
|
||||
return self.addon_store.with_documentation
|
||||
|
||||
@ -316,7 +314,7 @@ class Addon(AddonModel):
|
||||
return self._available(self.data_store)
|
||||
|
||||
@property
|
||||
def version(self) -> AwesomeVersion:
|
||||
def version(self) -> str | None:
|
||||
"""Return installed version."""
|
||||
return self.persist[ATTR_VERSION]
|
||||
|
||||
@ -464,7 +462,7 @@ class Addon(AddonModel):
|
||||
return None
|
||||
|
||||
@property
|
||||
def latest_version(self) -> AwesomeVersion:
|
||||
def latest_version(self) -> str:
|
||||
"""Return version of add-on."""
|
||||
return self.data_store[ATTR_VERSION]
|
||||
|
||||
@ -518,8 +516,9 @@ class Addon(AddonModel):
|
||||
def webui(self) -> str | None:
|
||||
"""Return URL to webui or None."""
|
||||
url = super().webui
|
||||
if not url or not (webui := RE_WEBUI.match(url)):
|
||||
if not url:
|
||||
return None
|
||||
webui = RE_WEBUI.match(url)
|
||||
|
||||
# extract arguments
|
||||
t_port = webui.group("t_port")
|
||||
@ -668,15 +667,16 @@ class Addon(AddonModel):
|
||||
"""Is add-on loaded."""
|
||||
return bool(self._listeners)
|
||||
|
||||
async def save_persist(self) -> None:
|
||||
def save_persist(self) -> None:
|
||||
"""Save data of add-on."""
|
||||
await self.sys_addons.data.save_data()
|
||||
self.sys_addons.data.save_data()
|
||||
|
||||
async def watchdog_application(self) -> bool:
|
||||
"""Return True if application is running."""
|
||||
url = self.watchdog_url
|
||||
if not url or not (application := RE_WATCHDOG.match(url)):
|
||||
url = super().watchdog
|
||||
if not url:
|
||||
return True
|
||||
application = RE_WATCHDOG.match(url)
|
||||
|
||||
# extract arguments
|
||||
t_port = int(application.group("t_port"))
|
||||
@ -685,10 +685,8 @@ class Addon(AddonModel):
|
||||
s_suffix = application.group("s_suffix") or ""
|
||||
|
||||
# search host port for this docker port
|
||||
if self.host_network and self.ports:
|
||||
port = self.ports.get(f"{t_port}/tcp")
|
||||
if port is None:
|
||||
port = t_port
|
||||
if self.host_network:
|
||||
port = self.ports.get(f"{t_port}/tcp", t_port)
|
||||
else:
|
||||
port = t_port
|
||||
|
||||
@ -722,7 +720,7 @@ class Addon(AddonModel):
|
||||
|
||||
try:
|
||||
options = self.schema.validate(self.options)
|
||||
await self.sys_run_in_executor(write_json_file, self.path_options, options)
|
||||
write_json_file(self.path_options, options)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error(
|
||||
"Add-on %s has invalid options: %s",
|
||||
@ -753,12 +751,9 @@ class Addon(AddonModel):
|
||||
for listener in self._listeners:
|
||||
self.sys_bus.remove_listener(listener)
|
||||
|
||||
def remove_data_dir():
|
||||
if self.path_data.is_dir():
|
||||
_LOGGER.info("Removing add-on data folder %s", self.path_data)
|
||||
remove_data(self.path_data)
|
||||
|
||||
await self.sys_run_in_executor(remove_data_dir)
|
||||
if self.path_data.is_dir():
|
||||
_LOGGER.info("Removing add-on data folder %s", self.path_data)
|
||||
await remove_data(self.path_data)
|
||||
|
||||
async def _check_ingress_port(self):
|
||||
"""Assign a ingress port if dynamic port selection is used."""
|
||||
@ -777,20 +772,14 @@ class Addon(AddonModel):
|
||||
)
|
||||
async def install(self) -> None:
|
||||
"""Install and setup this addon."""
|
||||
if not self.addon_store:
|
||||
raise AddonsError("Missing from store, cannot install!")
|
||||
|
||||
await self.sys_addons.data.install(self.addon_store)
|
||||
self.sys_addons.data.install(self.addon_store)
|
||||
await self.load()
|
||||
|
||||
def setup_data():
|
||||
if not self.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Creating Home Assistant add-on data folder %s", self.path_data
|
||||
)
|
||||
self.path_data.mkdir()
|
||||
|
||||
await self.sys_run_in_executor(setup_data)
|
||||
if not self.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Creating Home Assistant add-on data folder %s", self.path_data
|
||||
)
|
||||
self.path_data.mkdir()
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
await self.install_apparmor()
|
||||
@ -801,7 +790,7 @@ class Addon(AddonModel):
|
||||
self.latest_version, self.addon_store.image, arch=self.arch
|
||||
)
|
||||
except DockerError as err:
|
||||
await self.sys_addons.data.uninstall(self)
|
||||
self.sys_addons.data.uninstall(self)
|
||||
raise AddonsError() from err
|
||||
|
||||
# Add to addon manager
|
||||
@ -829,17 +818,14 @@ class Addon(AddonModel):
|
||||
|
||||
await self.unload()
|
||||
|
||||
def cleanup_config_and_audio():
|
||||
# Remove config if present and requested
|
||||
if self.addon_config_used and remove_config:
|
||||
remove_data(self.path_config)
|
||||
# Remove config if present and requested
|
||||
if self.addon_config_used and remove_config:
|
||||
await remove_data(self.path_config)
|
||||
|
||||
# Cleanup audio settings
|
||||
if self.path_pulse.exists():
|
||||
with suppress(OSError):
|
||||
self.path_pulse.unlink()
|
||||
|
||||
await self.sys_run_in_executor(cleanup_config_and_audio)
|
||||
# Cleanup audio settings
|
||||
if self.path_pulse.exists():
|
||||
with suppress(OSError):
|
||||
self.path_pulse.unlink()
|
||||
|
||||
# Cleanup AppArmor profile
|
||||
with suppress(HostAppArmorError):
|
||||
@ -853,23 +839,23 @@ class Addon(AddonModel):
|
||||
|
||||
# Cleanup Ingress dynamic port assignment
|
||||
if self.with_ingress:
|
||||
await self.sys_ingress.del_dynamic_port(self.slug)
|
||||
self.sys_create_task(self.sys_ingress.reload())
|
||||
self.sys_ingress.del_dynamic_port(self.slug)
|
||||
|
||||
# Cleanup discovery data
|
||||
for message in self.sys_discovery.list_messages:
|
||||
if message.addon != self.slug:
|
||||
continue
|
||||
await self.sys_discovery.remove(message)
|
||||
self.sys_discovery.remove(message)
|
||||
|
||||
# Cleanup services data
|
||||
for service in self.sys_services.list_services:
|
||||
if self.slug not in service.active:
|
||||
continue
|
||||
await service.del_service_data(self)
|
||||
service.del_service_data(self)
|
||||
|
||||
# Remove from addon manager
|
||||
await self.sys_addons.data.uninstall(self)
|
||||
self.sys_addons.data.uninstall(self)
|
||||
self.sys_addons.local.pop(self.slug)
|
||||
|
||||
@Job(
|
||||
@ -883,9 +869,6 @@ class Addon(AddonModel):
|
||||
Returns a Task that completes when addon has state 'started' (see start)
|
||||
if it was running. Else nothing is returned.
|
||||
"""
|
||||
if not self.addon_store:
|
||||
raise AddonsError("Missing from store, cannot update!")
|
||||
|
||||
old_image = self.image
|
||||
# Cache data to prevent races with other updates to global
|
||||
store = self.addon_store.clone()
|
||||
@ -901,7 +884,7 @@ class Addon(AddonModel):
|
||||
|
||||
try:
|
||||
_LOGGER.info("Add-on '%s' successfully updated", self.slug)
|
||||
await self.sys_addons.data.update(store)
|
||||
self.sys_addons.data.update(store)
|
||||
await self._check_ingress_port()
|
||||
|
||||
# Cleanup
|
||||
@ -942,9 +925,7 @@ class Addon(AddonModel):
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
|
||||
if self.addon_store:
|
||||
await self.sys_addons.data.update(self.addon_store)
|
||||
|
||||
self.sys_addons.data.update(self.addon_store)
|
||||
await self._check_ingress_port()
|
||||
_LOGGER.info("Add-on '%s' successfully rebuilt", self.slug)
|
||||
|
||||
@ -957,25 +938,22 @@ class Addon(AddonModel):
|
||||
)
|
||||
return out
|
||||
|
||||
async def write_pulse(self) -> None:
|
||||
def write_pulse(self) -> None:
|
||||
"""Write asound config to file and return True on success."""
|
||||
pulse_config = self.sys_plugins.audio.pulse_client(
|
||||
input_profile=self.audio_input, output_profile=self.audio_output
|
||||
)
|
||||
|
||||
def write_pulse_config():
|
||||
# Cleanup wrong maps
|
||||
if self.path_pulse.is_dir():
|
||||
shutil.rmtree(self.path_pulse, ignore_errors=True)
|
||||
self.path_pulse.write_text(pulse_config, encoding="utf-8")
|
||||
# Cleanup wrong maps
|
||||
if self.path_pulse.is_dir():
|
||||
shutil.rmtree(self.path_pulse, ignore_errors=True)
|
||||
|
||||
# Write pulse config
|
||||
try:
|
||||
await self.sys_run_in_executor(write_pulse_config)
|
||||
self.path_pulse.write_text(pulse_config, encoding="utf-8")
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
_LOGGER.error(
|
||||
"Add-on %s can't write pulse/client.config: %s", self.slug, err
|
||||
)
|
||||
@ -987,7 +965,7 @@ class Addon(AddonModel):
|
||||
async def install_apparmor(self) -> None:
|
||||
"""Install or Update AppArmor profile for Add-on."""
|
||||
exists_local = self.sys_host.apparmor.exists(self.slug)
|
||||
exists_addon = await self.sys_run_in_executor(self.path_apparmor.exists)
|
||||
exists_addon = self.path_apparmor.exists()
|
||||
|
||||
# Nothing to do
|
||||
if not exists_local and not exists_addon:
|
||||
@ -999,21 +977,11 @@ class Addon(AddonModel):
|
||||
return
|
||||
|
||||
# Need install/update
|
||||
tmp_folder: TemporaryDirectory | None = None
|
||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as tmp_folder:
|
||||
profile_file = Path(tmp_folder, "apparmor.txt")
|
||||
|
||||
def install_update_profile() -> Path:
|
||||
nonlocal tmp_folder
|
||||
tmp_folder = TemporaryDirectory(dir=self.sys_config.path_tmp)
|
||||
profile_file = Path(tmp_folder.name, "apparmor.txt")
|
||||
adjust_profile(self.slug, self.path_apparmor, profile_file)
|
||||
return profile_file
|
||||
|
||||
try:
|
||||
profile_file = await self.sys_run_in_executor(install_update_profile)
|
||||
await self.sys_host.apparmor.load_profile(self.slug, profile_file)
|
||||
finally:
|
||||
if tmp_folder:
|
||||
await self.sys_run_in_executor(tmp_folder.cleanup)
|
||||
|
||||
async def uninstall_apparmor(self) -> None:
|
||||
"""Remove AppArmor profile for Add-on."""
|
||||
@ -1085,14 +1053,14 @@ class Addon(AddonModel):
|
||||
|
||||
# Access Token
|
||||
self.persist[ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
|
||||
await self.save_persist()
|
||||
self.save_persist()
|
||||
|
||||
# Options
|
||||
await self.write_options()
|
||||
|
||||
# Sound
|
||||
if self.with_audio:
|
||||
await self.write_pulse()
|
||||
self.write_pulse()
|
||||
|
||||
def _check_addon_config_dir():
|
||||
if self.path_config.is_dir():
|
||||
@ -1270,45 +1238,46 @@ class Addon(AddonModel):
|
||||
Returns a Task that completes when addon has state 'started' (see start)
|
||||
for cold backup. Else nothing is returned.
|
||||
"""
|
||||
wait_for_start: Awaitable[None] | None = None
|
||||
|
||||
def _addon_backup(
|
||||
store_image: bool,
|
||||
metadata: dict[str, Any],
|
||||
apparmor_profile: str | None,
|
||||
addon_config_used: bool,
|
||||
):
|
||||
"""Start the backup process."""
|
||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
||||
temp_path = Path(temp)
|
||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
||||
temp_path = Path(temp)
|
||||
|
||||
# store local image
|
||||
if store_image:
|
||||
try:
|
||||
self.instance.export_image(temp_path.joinpath("image.tar"))
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
|
||||
# Store local configs/state
|
||||
# store local image
|
||||
if self.need_build:
|
||||
try:
|
||||
write_json_file(temp_path.joinpath("addon.json"), metadata)
|
||||
except ConfigurationFileError as err:
|
||||
await self.instance.export_image(temp_path.joinpath("image.tar"))
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
|
||||
data = {
|
||||
ATTR_USER: self.persist,
|
||||
ATTR_SYSTEM: self.data,
|
||||
ATTR_VERSION: self.version,
|
||||
ATTR_STATE: _MAP_ADDON_STATE.get(self.state, self.state),
|
||||
}
|
||||
|
||||
# Store local configs/state
|
||||
try:
|
||||
write_json_file(temp_path.joinpath("addon.json"), data)
|
||||
except ConfigurationFileError as err:
|
||||
raise AddonsError(
|
||||
f"Can't save meta for {self.slug}", _LOGGER.error
|
||||
) from err
|
||||
|
||||
# Store AppArmor Profile
|
||||
if self.sys_host.apparmor.exists(self.slug):
|
||||
profile = temp_path.joinpath("apparmor.txt")
|
||||
try:
|
||||
await self.sys_host.apparmor.backup_profile(self.slug, profile)
|
||||
except HostAppArmorError as err:
|
||||
raise AddonsError(
|
||||
f"Can't save meta for {self.slug}", _LOGGER.error
|
||||
"Can't backup AppArmor profile", _LOGGER.error
|
||||
) from err
|
||||
|
||||
# Store AppArmor Profile
|
||||
if apparmor_profile:
|
||||
profile_backup_file = temp_path.joinpath("apparmor.txt")
|
||||
try:
|
||||
self.sys_host.apparmor.backup_profile(
|
||||
apparmor_profile, profile_backup_file
|
||||
)
|
||||
except HostAppArmorError as err:
|
||||
raise AddonsError(
|
||||
"Can't backup AppArmor profile", _LOGGER.error
|
||||
) from err
|
||||
|
||||
# Write tarfile
|
||||
# write into tarfile
|
||||
def _write_tarfile():
|
||||
"""Write tar inside loop."""
|
||||
with tar_file as backup:
|
||||
# Backup metadata
|
||||
backup.add(temp, arcname=".")
|
||||
@ -1324,7 +1293,7 @@ class Addon(AddonModel):
|
||||
)
|
||||
|
||||
# Backup config
|
||||
if addon_config_used:
|
||||
if self.addon_config_used:
|
||||
atomic_contents_add(
|
||||
backup,
|
||||
self.path_config,
|
||||
@ -1334,39 +1303,19 @@ class Addon(AddonModel):
|
||||
arcname="config",
|
||||
)
|
||||
|
||||
wait_for_start: asyncio.Task | None = None
|
||||
|
||||
data = {
|
||||
ATTR_USER: self.persist,
|
||||
ATTR_SYSTEM: self.data,
|
||||
ATTR_VERSION: self.version,
|
||||
ATTR_STATE: _MAP_ADDON_STATE.get(self.state, self.state),
|
||||
}
|
||||
apparmor_profile = (
|
||||
self.slug if self.sys_host.apparmor.exists(self.slug) else None
|
||||
)
|
||||
|
||||
was_running = await self.begin_backup()
|
||||
try:
|
||||
_LOGGER.info("Building backup for add-on %s", self.slug)
|
||||
await self.sys_run_in_executor(
|
||||
partial(
|
||||
_addon_backup,
|
||||
store_image=self.need_build,
|
||||
metadata=data,
|
||||
apparmor_profile=apparmor_profile,
|
||||
addon_config_used=self.addon_config_used,
|
||||
)
|
||||
)
|
||||
_LOGGER.info("Finish backup for addon %s", self.slug)
|
||||
except (tarfile.TarError, OSError, AddFileError) as err:
|
||||
raise AddonsError(
|
||||
f"Can't write tarfile {tar_file}: {err}", _LOGGER.error
|
||||
) from err
|
||||
finally:
|
||||
if was_running:
|
||||
wait_for_start = await self.end_backup()
|
||||
is_running = await self.begin_backup()
|
||||
try:
|
||||
_LOGGER.info("Building backup for add-on %s", self.slug)
|
||||
await self.sys_run_in_executor(_write_tarfile)
|
||||
except (tarfile.TarError, OSError) as err:
|
||||
raise AddonsError(
|
||||
f"Can't write tarfile {tar_file}: {err}", _LOGGER.error
|
||||
) from err
|
||||
finally:
|
||||
if is_running:
|
||||
wait_for_start = await self.end_backup()
|
||||
|
||||
_LOGGER.info("Finish backup for addon %s", self.slug)
|
||||
return wait_for_start
|
||||
|
||||
@Job(
|
||||
@ -1380,37 +1329,31 @@ class Addon(AddonModel):
|
||||
Returns a Task that completes when addon has state 'started' (see start)
|
||||
if addon is started after restore. Else nothing is returned.
|
||||
"""
|
||||
wait_for_start: asyncio.Task | None = None
|
||||
|
||||
# Extract backup
|
||||
def _extract_tarfile() -> tuple[TemporaryDirectory, dict[str, Any]]:
|
||||
"""Extract tar backup."""
|
||||
tmp = TemporaryDirectory(dir=self.sys_config.path_tmp)
|
||||
try:
|
||||
wait_for_start: Awaitable[None] | None = None
|
||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
||||
# extract backup
|
||||
def _extract_tarfile():
|
||||
"""Extract tar backup."""
|
||||
with tar_file as backup:
|
||||
backup.extractall(
|
||||
path=tmp.name,
|
||||
path=Path(temp),
|
||||
members=secure_path(backup),
|
||||
filter="fully_trusted",
|
||||
)
|
||||
|
||||
data = read_json_file(Path(tmp.name, "addon.json"))
|
||||
except:
|
||||
tmp.cleanup()
|
||||
raise
|
||||
try:
|
||||
await self.sys_run_in_executor(_extract_tarfile)
|
||||
except tarfile.TarError as err:
|
||||
raise AddonsError(
|
||||
f"Can't read tarfile {tar_file}: {err}", _LOGGER.error
|
||||
) from err
|
||||
|
||||
return tmp, data
|
||||
# Read backup data
|
||||
try:
|
||||
data = read_json_file(Path(temp, "addon.json"))
|
||||
except ConfigurationFileError as err:
|
||||
raise AddonsError() from err
|
||||
|
||||
try:
|
||||
tmp, data = await self.sys_run_in_executor(_extract_tarfile)
|
||||
except tarfile.TarError as err:
|
||||
raise AddonsError(
|
||||
f"Can't read tarfile {tar_file}: {err}", _LOGGER.error
|
||||
) from err
|
||||
except ConfigurationFileError as err:
|
||||
raise AddonsError() from err
|
||||
|
||||
try:
|
||||
# Validate
|
||||
try:
|
||||
data = SCHEMA_ADDON_BACKUP(data)
|
||||
@ -1430,7 +1373,7 @@ class Addon(AddonModel):
|
||||
# Restore local add-on information
|
||||
_LOGGER.info("Restore config for addon %s", self.slug)
|
||||
restore_image = self._image(data[ATTR_SYSTEM])
|
||||
await self.sys_addons.data.restore(
|
||||
self.sys_addons.data.restore(
|
||||
self.slug, data[ATTR_USER], data[ATTR_SYSTEM], restore_image
|
||||
)
|
||||
|
||||
@ -1444,7 +1387,7 @@ class Addon(AddonModel):
|
||||
if not await self.instance.exists():
|
||||
_LOGGER.info("Restore/Install of image for addon %s", self.slug)
|
||||
|
||||
image_file = Path(tmp.name, "image.tar")
|
||||
image_file = Path(temp, "image.tar")
|
||||
if image_file.is_file():
|
||||
with suppress(DockerError):
|
||||
await self.instance.import_image(image_file)
|
||||
@ -1463,24 +1406,24 @@ class Addon(AddonModel):
|
||||
# Restore data and config
|
||||
def _restore_data():
|
||||
"""Restore data and config."""
|
||||
_LOGGER.info("Restoring data and config for addon %s", self.slug)
|
||||
if self.path_data.is_dir():
|
||||
remove_data(self.path_data)
|
||||
if self.path_config.is_dir():
|
||||
remove_data(self.path_config)
|
||||
|
||||
temp_data = Path(tmp.name, "data")
|
||||
temp_data = Path(temp, "data")
|
||||
if temp_data.is_dir():
|
||||
shutil.copytree(temp_data, self.path_data, symlinks=True)
|
||||
else:
|
||||
self.path_data.mkdir()
|
||||
|
||||
temp_config = Path(tmp.name, "config")
|
||||
temp_config = Path(temp, "config")
|
||||
if temp_config.is_dir():
|
||||
shutil.copytree(temp_config, self.path_config, symlinks=True)
|
||||
elif self.addon_config_used:
|
||||
self.path_config.mkdir()
|
||||
|
||||
_LOGGER.info("Restoring data and config for addon %s", self.slug)
|
||||
if self.path_data.is_dir():
|
||||
await remove_data(self.path_data)
|
||||
if self.path_config.is_dir():
|
||||
await remove_data(self.path_config)
|
||||
|
||||
try:
|
||||
await self.sys_run_in_executor(_restore_data)
|
||||
except shutil.Error as err:
|
||||
@ -1489,16 +1432,15 @@ class Addon(AddonModel):
|
||||
) from err
|
||||
|
||||
# Restore AppArmor
|
||||
profile_file = Path(tmp.name, "apparmor.txt")
|
||||
if await self.sys_run_in_executor(profile_file.exists):
|
||||
profile_file = Path(temp, "apparmor.txt")
|
||||
if profile_file.exists():
|
||||
try:
|
||||
await self.sys_host.apparmor.load_profile(
|
||||
self.slug, profile_file
|
||||
)
|
||||
except HostAppArmorError as err:
|
||||
_LOGGER.error(
|
||||
"Can't restore AppArmor profile for add-on %s",
|
||||
self.slug,
|
||||
"Can't restore AppArmor profile for add-on %s", self.slug
|
||||
)
|
||||
raise AddonsError() from err
|
||||
|
||||
@ -1510,8 +1452,7 @@ class Addon(AddonModel):
|
||||
# Run add-on
|
||||
if data[ATTR_STATE] == AddonState.STARTED:
|
||||
wait_for_start = await self.start()
|
||||
finally:
|
||||
await self.sys_run_in_executor(tmp.cleanup)
|
||||
|
||||
_LOGGER.info("Finished restore for add-on %s", self.slug)
|
||||
return wait_for_start
|
||||
|
||||
@ -1552,7 +1493,7 @@ class Addon(AddonModel):
|
||||
except AddonsError as err:
|
||||
attempts = attempts + 1
|
||||
_LOGGER.error("Watchdog restart of addon %s failed!", self.name)
|
||||
await async_capture_exception(err)
|
||||
capture_exception(err)
|
||||
else:
|
||||
break
|
||||
|
||||
@ -1604,6 +1545,6 @@ class Addon(AddonModel):
|
||||
|
||||
def refresh_path_cache(self) -> Awaitable[None]:
|
||||
"""Refresh cache of existing paths."""
|
||||
if self.is_detached or not self.addon_store:
|
||||
if self.is_detached:
|
||||
return super().refresh_path_cache()
|
||||
return self.addon_store.refresh_path_cache()
|
||||
|
@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from functools import cached_property
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from awesomeversion import AwesomeVersion
|
||||
|
||||
@ -23,7 +23,7 @@ from ..utils.common import FileConfiguration, find_one_filetype
|
||||
from .validate import SCHEMA_BUILD_CONFIG
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .manager import AnyAddon
|
||||
from . import AnyAddon
|
||||
|
||||
|
||||
class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
@ -34,36 +34,23 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
self.coresys: CoreSys = coresys
|
||||
self.addon = addon
|
||||
|
||||
# Search for build file later in executor
|
||||
super().__init__(None, SCHEMA_BUILD_CONFIG)
|
||||
|
||||
def _get_build_file(self) -> Path:
|
||||
"""Get build file.
|
||||
|
||||
Must be run in executor.
|
||||
"""
|
||||
try:
|
||||
return find_one_filetype(
|
||||
build_file = find_one_filetype(
|
||||
self.addon.path_location, "build", FILE_SUFFIX_CONFIGURATION
|
||||
)
|
||||
except ConfigurationFileError:
|
||||
return self.addon.path_location / "build.json"
|
||||
build_file = self.addon.path_location / "build.json"
|
||||
|
||||
async def read_data(self) -> None:
|
||||
"""Load data from file."""
|
||||
if not self._file:
|
||||
self._file = await self.sys_run_in_executor(self._get_build_file)
|
||||
super().__init__(build_file, SCHEMA_BUILD_CONFIG)
|
||||
|
||||
await super().read_data()
|
||||
|
||||
async def save_data(self):
|
||||
def save_data(self):
|
||||
"""Ignore save function."""
|
||||
raise RuntimeError()
|
||||
|
||||
@cached_property
|
||||
def arch(self) -> str:
|
||||
"""Return arch of the add-on."""
|
||||
return self.sys_arch.match([self.addon.arch])
|
||||
return self.sys_arch.match(self.addon.arch)
|
||||
|
||||
@property
|
||||
def base_image(self) -> str:
|
||||
@ -81,6 +68,13 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
)
|
||||
return self._data[ATTR_BUILD_FROM][self.arch]
|
||||
|
||||
@property
|
||||
def dockerfile(self) -> Path:
|
||||
"""Return Dockerfile path."""
|
||||
if self.addon.path_location.joinpath(f"Dockerfile.{self.arch}").exists():
|
||||
return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}")
|
||||
return self.addon.path_location.joinpath("Dockerfile")
|
||||
|
||||
@property
|
||||
def squash(self) -> bool:
|
||||
"""Return True or False if squash is active."""
|
||||
@ -96,40 +90,25 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
"""Return additional Docker labels."""
|
||||
return self._data[ATTR_LABELS]
|
||||
|
||||
def get_dockerfile(self) -> Path:
|
||||
"""Return Dockerfile path.
|
||||
|
||||
Must be run in executor.
|
||||
"""
|
||||
if self.addon.path_location.joinpath(f"Dockerfile.{self.arch}").exists():
|
||||
return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}")
|
||||
return self.addon.path_location.joinpath("Dockerfile")
|
||||
|
||||
async def is_valid(self) -> bool:
|
||||
@property
|
||||
def is_valid(self) -> bool:
|
||||
"""Return true if the build env is valid."""
|
||||
|
||||
def build_is_valid() -> bool:
|
||||
try:
|
||||
return all(
|
||||
[
|
||||
self.addon.path_location.is_dir(),
|
||||
self.get_dockerfile().is_file(),
|
||||
self.dockerfile.is_file(),
|
||||
]
|
||||
)
|
||||
|
||||
try:
|
||||
return await self.sys_run_in_executor(build_is_valid)
|
||||
except HassioArchNotFound:
|
||||
return False
|
||||
|
||||
def get_docker_args(self, version: AwesomeVersion, image: str | None = None):
|
||||
"""Create a dict with Docker build arguments.
|
||||
|
||||
Must be run in executor.
|
||||
"""
|
||||
args: dict[str, Any] = {
|
||||
"""Create a dict with Docker build arguments."""
|
||||
args = {
|
||||
"path": str(self.addon.path_location),
|
||||
"tag": f"{image or self.addon.image}:{version!s}",
|
||||
"dockerfile": str(self.get_dockerfile()),
|
||||
"dockerfile": str(self.dockerfile),
|
||||
"pull": True,
|
||||
"forcerm": not self.sys_dev,
|
||||
"squash": self.squash,
|
||||
|
@ -38,7 +38,7 @@ class AddonsData(FileConfiguration, CoreSysAttributes):
|
||||
"""Return local add-on data."""
|
||||
return self._data[ATTR_SYSTEM]
|
||||
|
||||
async def install(self, addon: AddonStore) -> None:
|
||||
def install(self, addon: AddonStore) -> None:
|
||||
"""Set addon as installed."""
|
||||
self.system[addon.slug] = deepcopy(addon.data)
|
||||
self.user[addon.slug] = {
|
||||
@ -46,28 +46,26 @@ class AddonsData(FileConfiguration, CoreSysAttributes):
|
||||
ATTR_VERSION: addon.version,
|
||||
ATTR_IMAGE: addon.image,
|
||||
}
|
||||
await self.save_data()
|
||||
self.save_data()
|
||||
|
||||
async def uninstall(self, addon: Addon) -> None:
|
||||
def uninstall(self, addon: Addon) -> None:
|
||||
"""Set add-on as uninstalled."""
|
||||
self.system.pop(addon.slug, None)
|
||||
self.user.pop(addon.slug, None)
|
||||
await self.save_data()
|
||||
self.save_data()
|
||||
|
||||
async def update(self, addon: AddonStore) -> None:
|
||||
def update(self, addon: AddonStore) -> None:
|
||||
"""Update version of add-on."""
|
||||
self.system[addon.slug] = deepcopy(addon.data)
|
||||
self.user[addon.slug].update(
|
||||
{ATTR_VERSION: addon.version, ATTR_IMAGE: addon.image}
|
||||
)
|
||||
await self.save_data()
|
||||
self.save_data()
|
||||
|
||||
async def restore(
|
||||
self, slug: str, user: Config, system: Config, image: str
|
||||
) -> None:
|
||||
def restore(self, slug: str, user: Config, system: Config, image: str) -> None:
|
||||
"""Restore data to add-on."""
|
||||
self.user[slug] = deepcopy(user)
|
||||
self.system[slug] = deepcopy(system)
|
||||
|
||||
self.user[slug][ATTR_IMAGE] = image
|
||||
await self.save_data()
|
||||
self.save_data()
|
||||
|
@ -5,7 +5,7 @@ from collections.abc import Awaitable
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
import tarfile
|
||||
from typing import Self, Union
|
||||
from typing import Union
|
||||
|
||||
from attr import evolve
|
||||
|
||||
@ -23,7 +23,7 @@ from ..exceptions import (
|
||||
from ..jobs.decorator import Job, JobCondition
|
||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||
from ..store.addon import AddonStore
|
||||
from ..utils.sentry import async_capture_exception
|
||||
from ..utils.sentry import capture_exception
|
||||
from .addon import Addon
|
||||
from .const import ADDON_UPDATE_CONDITIONS
|
||||
from .data import AddonsData
|
||||
@ -74,11 +74,6 @@ class AddonManager(CoreSysAttributes):
|
||||
return addon
|
||||
return None
|
||||
|
||||
async def load_config(self) -> Self:
|
||||
"""Load config in executor."""
|
||||
await self.data.read_data()
|
||||
return self
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Start up add-on management."""
|
||||
# Refresh cache for all store addons
|
||||
@ -170,7 +165,7 @@ class AddonManager(CoreSysAttributes):
|
||||
await addon.stop()
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
|
||||
await async_capture_exception(err)
|
||||
capture_exception(err)
|
||||
|
||||
@Job(
|
||||
name="addon_manager_install",
|
||||
@ -194,7 +189,6 @@ class AddonManager(CoreSysAttributes):
|
||||
|
||||
_LOGGER.info("Add-on '%s' successfully installed", slug)
|
||||
|
||||
@Job(name="addon_manager_uninstall")
|
||||
async def uninstall(self, slug: str, *, remove_config: bool = False) -> None:
|
||||
"""Remove an add-on."""
|
||||
if slug not in self.local:
|
||||
@ -314,7 +308,7 @@ class AddonManager(CoreSysAttributes):
|
||||
if slug not in self.local:
|
||||
_LOGGER.debug("Add-on %s is not local available for restore", slug)
|
||||
addon = Addon(self.coresys, slug)
|
||||
had_ingress: bool | None = False
|
||||
had_ingress = False
|
||||
else:
|
||||
_LOGGER.debug("Add-on %s is local available for restore", slug)
|
||||
addon = self.local[slug]
|
||||
@ -389,7 +383,7 @@ class AddonManager(CoreSysAttributes):
|
||||
reference=addon.slug,
|
||||
suggestions=[SuggestionType.EXECUTE_REPAIR],
|
||||
)
|
||||
await async_capture_exception(err)
|
||||
capture_exception(err)
|
||||
else:
|
||||
add_host_coros.append(
|
||||
self.sys_plugins.dns.add_host(
|
||||
|
@ -210,6 +210,18 @@ class AddonModel(JobGroup, ABC):
|
||||
"""Return description of add-on."""
|
||||
return self.data[ATTR_DESCRIPTON]
|
||||
|
||||
@property
|
||||
def long_description(self) -> str | None:
|
||||
"""Return README.md as long_description."""
|
||||
readme = Path(self.path_location, "README.md")
|
||||
|
||||
# If readme not exists
|
||||
if not readme.exists():
|
||||
return None
|
||||
|
||||
# Return data
|
||||
return readme.read_text(encoding="utf-8")
|
||||
|
||||
@property
|
||||
def repository(self) -> str:
|
||||
"""Return repository of add-on."""
|
||||
@ -294,7 +306,7 @@ class AddonModel(JobGroup, ABC):
|
||||
return self.data.get(ATTR_WEBUI)
|
||||
|
||||
@property
|
||||
def watchdog_url(self) -> str | None:
|
||||
def watchdog(self) -> str | None:
|
||||
"""Return URL to for watchdog or None."""
|
||||
return self.data.get(ATTR_WATCHDOG)
|
||||
|
||||
@ -606,7 +618,7 @@ class AddonModel(JobGroup, ABC):
|
||||
return AddonOptions(self.coresys, raw_schema, self.name, self.slug)
|
||||
|
||||
@property
|
||||
def schema_ui(self) -> list[dict[Any, Any]] | None:
|
||||
def schema_ui(self) -> list[dict[any, any]] | None:
|
||||
"""Create a UI schema for add-on options."""
|
||||
raw_schema = self.data[ATTR_SCHEMA]
|
||||
|
||||
@ -634,21 +646,6 @@ class AddonModel(JobGroup, ABC):
|
||||
"""Return breaking versions of addon."""
|
||||
return self.data[ATTR_BREAKING_VERSIONS]
|
||||
|
||||
async def long_description(self) -> str | None:
|
||||
"""Return README.md as long_description."""
|
||||
|
||||
def read_readme() -> str | None:
|
||||
readme = Path(self.path_location, "README.md")
|
||||
|
||||
# If readme not exists
|
||||
if not readme.exists():
|
||||
return None
|
||||
|
||||
# Return data
|
||||
return readme.read_text(encoding="utf-8")
|
||||
|
||||
return await self.sys_run_in_executor(read_readme)
|
||||
|
||||
def refresh_path_cache(self) -> Awaitable[None]:
|
||||
"""Refresh cache of existing paths."""
|
||||
|
||||
|
@ -137,7 +137,7 @@ class AddonOptions(CoreSysAttributes):
|
||||
) from None
|
||||
|
||||
# prepare range
|
||||
range_args: dict[str, Any] = {}
|
||||
range_args = {}
|
||||
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||
group_value = match.group(group_name)
|
||||
if group_value:
|
||||
@ -390,14 +390,14 @@ class UiOptions(CoreSysAttributes):
|
||||
multiple: bool = False,
|
||||
) -> None:
|
||||
"""UI nested dict items."""
|
||||
ui_node: dict[str, Any] = {
|
||||
ui_node = {
|
||||
"name": key,
|
||||
"type": "schema",
|
||||
"optional": True,
|
||||
"multiple": multiple,
|
||||
}
|
||||
|
||||
nested_schema: list[dict[str, Any]] = []
|
||||
nested_schema = []
|
||||
for c_key, c_value in option_dict.items():
|
||||
# Nested?
|
||||
if isinstance(c_value, list):
|
||||
@ -413,7 +413,7 @@ def _create_device_filter(str_filter: str) -> dict[str, Any]:
|
||||
"""Generate device Filter."""
|
||||
raw_filter = dict(value.split("=") for value in str_filter.split(";"))
|
||||
|
||||
clean_filter: dict[str, Any] = {}
|
||||
clean_filter = {}
|
||||
for key, value in raw_filter.items():
|
||||
if key == "subsystem":
|
||||
clean_filter[key] = UdevSubsystem(value)
|
||||
|
@ -2,9 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ..const import ROLE_ADMIN, ROLE_MANAGER, SECURITY_DISABLE, SECURITY_PROFILE
|
||||
@ -86,20 +86,18 @@ def rating_security(addon: AddonModel) -> int:
|
||||
return max(min(8, rating), 1)
|
||||
|
||||
|
||||
def remove_data(folder: Path) -> None:
|
||||
"""Remove folder and reset privileged.
|
||||
|
||||
Must be run in executor.
|
||||
"""
|
||||
async def remove_data(folder: Path) -> None:
|
||||
"""Remove folder and reset privileged."""
|
||||
try:
|
||||
subprocess.run(
|
||||
["rm", "-rf", str(folder)], stdout=subprocess.DEVNULL, text=True, check=True
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
"rm", "-rf", str(folder), stdout=asyncio.subprocess.DEVNULL
|
||||
)
|
||||
|
||||
_, error_msg = await proc.communicate()
|
||||
except OSError as err:
|
||||
error_msg = str(err)
|
||||
except subprocess.CalledProcessError as procerr:
|
||||
error_msg = procerr.stderr.strip()
|
||||
else:
|
||||
return
|
||||
if proc.returncode == 0:
|
||||
return
|
||||
|
||||
_LOGGER.error("Can't remove Add-on Data: %s", error_msg)
|
||||
|
@ -1,17 +1,16 @@
|
||||
"""Init file for Supervisor RESTful API."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import hdrs, web
|
||||
from aiohttp import web
|
||||
|
||||
from ..const import AddonState
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import APIAddonNotInstalled, HostNotSupportedError
|
||||
from ..utils.sentry import async_capture_exception
|
||||
from ..utils.sentry import capture_exception
|
||||
from .addons import APIAddons
|
||||
from .audio import APIAudio
|
||||
from .auth import APIAuth
|
||||
@ -48,14 +47,6 @@ MAX_CLIENT_SIZE: int = 1024**2 * 16
|
||||
MAX_LINE_SIZE: int = 24570
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class StaticResourceConfig:
|
||||
"""Configuration for a static resource."""
|
||||
|
||||
prefix: str
|
||||
path: Path
|
||||
|
||||
|
||||
class RestAPI(CoreSysAttributes):
|
||||
"""Handle RESTful API for Supervisor."""
|
||||
|
||||
@ -82,12 +73,12 @@ class RestAPI(CoreSysAttributes):
|
||||
self._site: web.TCPSite | None = None
|
||||
|
||||
# share single host API handler for reuse in logging endpoints
|
||||
self._api_host: APIHost = APIHost()
|
||||
self._api_host.coresys = coresys
|
||||
self._api_host: APIHost | None = None
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Register REST API Calls."""
|
||||
static_resource_configs: list[StaticResourceConfig] = []
|
||||
self._api_host = APIHost()
|
||||
self._api_host.coresys = self.coresys
|
||||
|
||||
self._register_addons()
|
||||
self._register_audio()
|
||||
@ -107,7 +98,7 @@ class RestAPI(CoreSysAttributes):
|
||||
self._register_network()
|
||||
self._register_observer()
|
||||
self._register_os()
|
||||
static_resource_configs.extend(self._register_panel())
|
||||
self._register_panel()
|
||||
self._register_proxy()
|
||||
self._register_resolution()
|
||||
self._register_root()
|
||||
@ -116,17 +107,6 @@ class RestAPI(CoreSysAttributes):
|
||||
self._register_store()
|
||||
self._register_supervisor()
|
||||
|
||||
if static_resource_configs:
|
||||
|
||||
def process_configs() -> list[web.StaticResource]:
|
||||
return [
|
||||
web.StaticResource(config.prefix, config.path)
|
||||
for config in static_resource_configs
|
||||
]
|
||||
|
||||
for resource in await self.sys_run_in_executor(process_configs):
|
||||
self.webapp.router.register_resource(resource)
|
||||
|
||||
await self.start()
|
||||
|
||||
def _register_advanced_logs(self, path: str, syslog_identifier: str):
|
||||
@ -237,8 +217,6 @@ class RestAPI(CoreSysAttributes):
|
||||
[
|
||||
web.get("/os/info", api_os.info),
|
||||
web.post("/os/update", api_os.update),
|
||||
web.get("/os/config/swap", api_os.config_swap_info),
|
||||
web.post("/os/config/swap", api_os.config_swap_options),
|
||||
web.post("/os/config/sync", api_os.config_sync),
|
||||
web.post("/os/datadisk/move", api_os.migrate_data),
|
||||
web.get("/os/datadisk/list", api_os.list_data),
|
||||
@ -434,7 +412,7 @@ class RestAPI(CoreSysAttributes):
|
||||
if not isinstance(err, HostNotSupportedError):
|
||||
# No need to capture HostNotSupportedError to Sentry, the cause
|
||||
# is known and reported to the user using the resolution center.
|
||||
await async_capture_exception(err)
|
||||
capture_exception(err)
|
||||
kwargs.pop("follow", None) # Follow is not supported for Docker logs
|
||||
return await api_supervisor.logs(*args, **kwargs)
|
||||
|
||||
@ -526,7 +504,7 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/addons", api_addons.list_addons),
|
||||
web.get("/addons", api_addons.list),
|
||||
web.post("/addons/{addon}/uninstall", api_addons.uninstall),
|
||||
web.post("/addons/{addon}/start", api_addons.start),
|
||||
web.post("/addons/{addon}/stop", api_addons.stop),
|
||||
@ -594,9 +572,7 @@ class RestAPI(CoreSysAttributes):
|
||||
web.post("/ingress/session", api_ingress.create_session),
|
||||
web.post("/ingress/validate_session", api_ingress.validate_session),
|
||||
web.get("/ingress/panels", api_ingress.panels),
|
||||
web.route(
|
||||
hdrs.METH_ANY, "/ingress/{token}/{path:.*}", api_ingress.handler
|
||||
),
|
||||
web.view("/ingress/{token}/{path:.*}", api_ingress.handler),
|
||||
]
|
||||
)
|
||||
|
||||
@ -607,7 +583,7 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/backups", api_backups.list_backups),
|
||||
web.get("/backups", api_backups.list),
|
||||
web.get("/backups/info", api_backups.info),
|
||||
web.post("/backups/options", api_backups.options),
|
||||
web.post("/backups/reload", api_backups.reload),
|
||||
@ -634,7 +610,7 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/services", api_services.list_services),
|
||||
web.get("/services", api_services.list),
|
||||
web.get("/services/{service}", api_services.get_service),
|
||||
web.post("/services/{service}", api_services.set_service),
|
||||
web.delete("/services/{service}", api_services.del_service),
|
||||
@ -648,7 +624,7 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/discovery", api_discovery.list_discovery),
|
||||
web.get("/discovery", api_discovery.list),
|
||||
web.get("/discovery/{uuid}", api_discovery.get_discovery),
|
||||
web.delete("/discovery/{uuid}", api_discovery.del_discovery),
|
||||
web.post("/discovery", api_discovery.set_discovery),
|
||||
@ -774,9 +750,10 @@ class RestAPI(CoreSysAttributes):
|
||||
]
|
||||
)
|
||||
|
||||
def _register_panel(self) -> list[StaticResourceConfig]:
|
||||
def _register_panel(self) -> None:
|
||||
"""Register panel for Home Assistant."""
|
||||
return [StaticResourceConfig("/app", Path(__file__).parent.joinpath("panel"))]
|
||||
panel_dir = Path(__file__).parent.joinpath("panel")
|
||||
self.webapp.add_routes([web.static("/app", panel_dir)])
|
||||
|
||||
def _register_docker(self) -> None:
|
||||
"""Register docker configuration functions."""
|
||||
|
@ -3,13 +3,14 @@
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
import logging
|
||||
from typing import Any, TypedDict
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..addons.addon import Addon
|
||||
from ..addons.manager import AnyAddon
|
||||
from ..addons.utils import rating_security
|
||||
from ..const import (
|
||||
ATTR_ADDONS,
|
||||
@ -62,6 +63,7 @@ from ..const import (
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_MESSAGE,
|
||||
ATTR_NAME,
|
||||
ATTR_NETWORK,
|
||||
ATTR_NETWORK_DESCRIPTION,
|
||||
@ -70,6 +72,7 @@ from ..const import (
|
||||
ATTR_OPTIONS,
|
||||
ATTR_PRIVILEGED,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_PWNED,
|
||||
ATTR_RATING,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SCHEMA,
|
||||
@ -87,6 +90,7 @@ from ..const import (
|
||||
ATTR_UPDATE_AVAILABLE,
|
||||
ATTR_URL,
|
||||
ATTR_USB,
|
||||
ATTR_VALID,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_VIDEO,
|
||||
@ -142,20 +146,12 @@ SCHEMA_UNINSTALL = vol.Schema(
|
||||
# pylint: enable=no-value-for-parameter
|
||||
|
||||
|
||||
class OptionsValidateResponse(TypedDict):
|
||||
"""Response object for options validate."""
|
||||
|
||||
message: str
|
||||
valid: bool
|
||||
pwned: bool | None
|
||||
|
||||
|
||||
class APIAddons(CoreSysAttributes):
|
||||
"""Handle RESTful API for add-on functions."""
|
||||
|
||||
def get_addon_for_request(self, request: web.Request) -> Addon:
|
||||
"""Return addon, throw an exception if it doesn't exist."""
|
||||
addon_slug: str = request.match_info["addon"]
|
||||
addon_slug: str = request.match_info.get("addon")
|
||||
|
||||
# Lookup itself
|
||||
if addon_slug == "self":
|
||||
@ -173,7 +169,7 @@ class APIAddons(CoreSysAttributes):
|
||||
return addon
|
||||
|
||||
@api_process
|
||||
async def list_addons(self, request: web.Request) -> dict[str, Any]:
|
||||
async def list(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return all add-ons or repositories."""
|
||||
data_addons = [
|
||||
{
|
||||
@ -208,7 +204,7 @@ class APIAddons(CoreSysAttributes):
|
||||
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return add-on information."""
|
||||
addon: Addon = self.get_addon_for_request(request)
|
||||
addon: AnyAddon = self.get_addon_for_request(request)
|
||||
|
||||
data = {
|
||||
ATTR_NAME: addon.name,
|
||||
@ -216,7 +212,7 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_HOSTNAME: addon.hostname,
|
||||
ATTR_DNS: addon.dns,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_LONG_DESCRIPTION: await addon.long_description(),
|
||||
ATTR_LONG_DESCRIPTION: addon.long_description,
|
||||
ATTR_ADVANCED: addon.advanced,
|
||||
ATTR_STAGE: addon.stage,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
@ -326,7 +322,7 @@ class APIAddons(CoreSysAttributes):
|
||||
if ATTR_WATCHDOG in body:
|
||||
addon.watchdog = body[ATTR_WATCHDOG]
|
||||
|
||||
await addon.save_persist()
|
||||
addon.save_persist()
|
||||
|
||||
@api_process
|
||||
async def sys_options(self, request: web.Request) -> None:
|
||||
@ -340,13 +336,13 @@ class APIAddons(CoreSysAttributes):
|
||||
if ATTR_SYSTEM_MANAGED_CONFIG_ENTRY in body:
|
||||
addon.system_managed_config_entry = body[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY]
|
||||
|
||||
await addon.save_persist()
|
||||
addon.save_persist()
|
||||
|
||||
@api_process
|
||||
async def options_validate(self, request: web.Request) -> OptionsValidateResponse:
|
||||
async def options_validate(self, request: web.Request) -> None:
|
||||
"""Validate user options for add-on."""
|
||||
addon = self.get_addon_for_request(request)
|
||||
data = OptionsValidateResponse(message="", valid=True, pwned=False)
|
||||
data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False}
|
||||
|
||||
options = await request.json(loads=json_loads) or addon.options
|
||||
|
||||
@ -355,8 +351,8 @@ class APIAddons(CoreSysAttributes):
|
||||
try:
|
||||
options_schema.validate(options)
|
||||
except vol.Invalid as ex:
|
||||
data["message"] = humanize_error(options, ex)
|
||||
data["valid"] = False
|
||||
data[ATTR_MESSAGE] = humanize_error(options, ex)
|
||||
data[ATTR_VALID] = False
|
||||
|
||||
if not self.sys_security.pwned:
|
||||
return data
|
||||
@ -367,24 +363,24 @@ class APIAddons(CoreSysAttributes):
|
||||
await self.sys_security.verify_secret(secret)
|
||||
continue
|
||||
except PwnedSecret:
|
||||
data["pwned"] = True
|
||||
data[ATTR_PWNED] = True
|
||||
except PwnedError:
|
||||
data["pwned"] = None
|
||||
data[ATTR_PWNED] = None
|
||||
break
|
||||
|
||||
if self.sys_security.force and data["pwned"] in (None, True):
|
||||
data["valid"] = False
|
||||
if data["pwned"] is None:
|
||||
data["message"] = "Error happening on pwned secrets check!"
|
||||
if self.sys_security.force and data[ATTR_PWNED] in (None, True):
|
||||
data[ATTR_VALID] = False
|
||||
if data[ATTR_PWNED] is None:
|
||||
data[ATTR_MESSAGE] = "Error happening on pwned secrets check!"
|
||||
else:
|
||||
data["message"] = "Add-on uses pwned secrets!"
|
||||
data[ATTR_MESSAGE] = "Add-on uses pwned secrets!"
|
||||
|
||||
return data
|
||||
|
||||
@api_process
|
||||
async def options_config(self, request: web.Request) -> None:
|
||||
"""Validate user options for add-on."""
|
||||
slug: str = request.match_info["addon"]
|
||||
slug: str = request.match_info.get("addon")
|
||||
if slug != "self":
|
||||
raise APIForbidden("This can be only read by the Add-on itself!")
|
||||
addon = self.get_addon_for_request(request)
|
||||
@ -406,7 +402,7 @@ class APIAddons(CoreSysAttributes):
|
||||
_LOGGER.warning("Changing protected flag for %s!", addon.slug)
|
||||
addon.protected = body[ATTR_PROTECTED]
|
||||
|
||||
await addon.save_persist()
|
||||
addon.save_persist()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> dict[str, Any]:
|
||||
|
@ -124,7 +124,7 @@ class APIAudio(CoreSysAttributes):
|
||||
@api_process
|
||||
async def set_volume(self, request: web.Request) -> None:
|
||||
"""Set audio volume on stream."""
|
||||
source: StreamType = StreamType(request.match_info["source"])
|
||||
source: StreamType = StreamType(request.match_info.get("source"))
|
||||
application: bool = request.path.endswith("application")
|
||||
body = await api_validate(SCHEMA_VOLUME, request)
|
||||
|
||||
@ -137,7 +137,7 @@ class APIAudio(CoreSysAttributes):
|
||||
@api_process
|
||||
async def set_mute(self, request: web.Request) -> None:
|
||||
"""Mute audio volume on stream."""
|
||||
source: StreamType = StreamType(request.match_info["source"])
|
||||
source: StreamType = StreamType(request.match_info.get("source"))
|
||||
application: bool = request.path.endswith("application")
|
||||
body = await api_validate(SCHEMA_MUTE, request)
|
||||
|
||||
@ -150,7 +150,7 @@ class APIAudio(CoreSysAttributes):
|
||||
@api_process
|
||||
async def set_default(self, request: web.Request) -> None:
|
||||
"""Set audio default stream."""
|
||||
source: StreamType = StreamType(request.match_info["source"])
|
||||
source: StreamType = StreamType(request.match_info.get("source"))
|
||||
body = await api_validate(SCHEMA_DEFAULT, request)
|
||||
|
||||
await asyncio.shield(self.sys_host.sound.set_default(source, body[ATTR_NAME]))
|
||||
|
@ -1,7 +1,6 @@
|
||||
"""Init file for Supervisor auth/SSO RESTful API."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@ -43,7 +42,7 @@ REALM_HEADER: dict[str, str] = {
|
||||
class APIAuth(CoreSysAttributes):
|
||||
"""Handle RESTful API for auth functions."""
|
||||
|
||||
def _process_basic(self, request: web.Request, addon: Addon) -> Awaitable[bool]:
|
||||
def _process_basic(self, request: web.Request, addon: Addon) -> bool:
|
||||
"""Process login request with basic auth.
|
||||
|
||||
Return a coroutine.
|
||||
@ -53,7 +52,7 @@ class APIAuth(CoreSysAttributes):
|
||||
|
||||
def _process_dict(
|
||||
self, request: web.Request, addon: Addon, data: dict[str, str]
|
||||
) -> Awaitable[bool]:
|
||||
) -> bool:
|
||||
"""Process login with dict data.
|
||||
|
||||
Return a coroutine.
|
||||
@ -100,7 +99,7 @@ class APIAuth(CoreSysAttributes):
|
||||
@api_process
|
||||
async def cache(self, request: web.Request) -> None:
|
||||
"""Process cache reset request."""
|
||||
await self.sys_auth.reset_data()
|
||||
self.sys_auth.reset_data()
|
||||
|
||||
@api_process
|
||||
async def list_users(self, request: web.Request) -> dict[str, list[dict[str, Any]]]:
|
||||
|
@ -5,14 +5,13 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
import errno
|
||||
from io import IOBase
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any, cast
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import BodyPartReader, web
|
||||
from aiohttp import web
|
||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
@ -36,10 +35,10 @@ from ..const import (
|
||||
ATTR_LOCATION,
|
||||
ATTR_NAME,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_PATH,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_REPOSITORIES,
|
||||
ATTR_SIZE,
|
||||
ATTR_SIZE_BYTES,
|
||||
ATTR_SLUG,
|
||||
ATTR_SUPERVISOR_VERSION,
|
||||
ATTR_TIMEOUT,
|
||||
@ -51,15 +50,15 @@ from ..const import (
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, APIForbidden, APINotFound
|
||||
from ..jobs import JobSchedulerOptions, SupervisorJob
|
||||
from ..jobs import JobSchedulerOptions
|
||||
from ..mounts.const import MountUsage
|
||||
from ..mounts.mount import Mount
|
||||
from ..resolution.const import UnhealthyReason
|
||||
from .const import (
|
||||
ATTR_ADDITIONAL_LOCATIONS,
|
||||
ATTR_BACKGROUND,
|
||||
ATTR_LOCATION_ATTRIBUTES,
|
||||
ATTR_LOCATIONS,
|
||||
ATTR_SIZE_BYTES,
|
||||
CONTENT_TYPE_TAR,
|
||||
)
|
||||
from .utils import api_process, api_validate
|
||||
@ -155,8 +154,8 @@ class APIBackups(CoreSysAttributes):
|
||||
"""Make location attributes dictionary."""
|
||||
return {
|
||||
loc if loc else LOCATION_LOCAL: {
|
||||
ATTR_PROTECTED: backup.all_locations[loc].protected,
|
||||
ATTR_SIZE_BYTES: backup.all_locations[loc].size_bytes,
|
||||
ATTR_PROTECTED: backup.all_locations[loc][ATTR_PROTECTED],
|
||||
ATTR_SIZE_BYTES: backup.location_size(loc),
|
||||
}
|
||||
for loc in backup.locations
|
||||
}
|
||||
@ -187,7 +186,7 @@ class APIBackups(CoreSysAttributes):
|
||||
]
|
||||
|
||||
@api_process
|
||||
async def list_backups(self, request):
|
||||
async def list(self, request):
|
||||
"""Return backup list."""
|
||||
data_backups = self._list_backups()
|
||||
|
||||
@ -213,7 +212,7 @@ class APIBackups(CoreSysAttributes):
|
||||
if ATTR_DAYS_UNTIL_STALE in body:
|
||||
self.sys_backups.days_until_stale = body[ATTR_DAYS_UNTIL_STALE]
|
||||
|
||||
await self.sys_backups.save_data()
|
||||
self.sys_backups.save_data()
|
||||
|
||||
@api_process
|
||||
async def reload(self, _):
|
||||
@ -261,7 +260,7 @@ class APIBackups(CoreSysAttributes):
|
||||
def _location_to_mount(self, location: str | None) -> LOCATION_TYPE:
|
||||
"""Convert a single location to a mount if possible."""
|
||||
if not location or location == LOCATION_CLOUD_BACKUP:
|
||||
return cast(LOCATION_TYPE, location)
|
||||
return location
|
||||
|
||||
mount = self.sys_mounts.get(location)
|
||||
if mount.usage != MountUsage.BACKUP:
|
||||
@ -295,11 +294,8 @@ class APIBackups(CoreSysAttributes):
|
||||
) -> tuple[asyncio.Task, str]:
|
||||
"""Start backup task in background and return task and job ID."""
|
||||
event = asyncio.Event()
|
||||
job, backup_task = cast(
|
||||
tuple[SupervisorJob, asyncio.Task],
|
||||
self.sys_jobs.schedule_job(
|
||||
backup_method, JobSchedulerOptions(), *args, **kwargs
|
||||
),
|
||||
job, backup_task = self.sys_jobs.schedule_job(
|
||||
backup_method, JobSchedulerOptions(), *args, **kwargs
|
||||
)
|
||||
|
||||
async def release_on_freeze(new_state: CoreState):
|
||||
@ -314,7 +310,10 @@ class APIBackups(CoreSysAttributes):
|
||||
try:
|
||||
event_task = self.sys_create_task(event.wait())
|
||||
_, pending = await asyncio.wait(
|
||||
(backup_task, event_task),
|
||||
(
|
||||
backup_task,
|
||||
event_task,
|
||||
),
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
# It seems backup returned early (error or something), make sure to cancel
|
||||
@ -458,7 +457,7 @@ class APIBackups(CoreSysAttributes):
|
||||
else:
|
||||
self._validate_cloud_backup_location(request, backup.location)
|
||||
|
||||
await self.sys_backups.remove(backup, locations=locations)
|
||||
self.sys_backups.remove(backup, locations=locations)
|
||||
|
||||
@api_process
|
||||
async def download(self, request: web.Request):
|
||||
@ -473,12 +472,7 @@ class APIBackups(CoreSysAttributes):
|
||||
raise APIError(f"Backup {backup.slug} is not in location {location}")
|
||||
|
||||
_LOGGER.info("Downloading backup %s", backup.slug)
|
||||
filename = backup.all_locations[location].path
|
||||
# If the file is missing, return 404 and trigger reload of location
|
||||
if not await self.sys_run_in_executor(filename.is_file):
|
||||
self.sys_create_task(self.sys_backups.reload(location))
|
||||
return web.Response(status=404)
|
||||
|
||||
filename = backup.all_locations[location][ATTR_PATH]
|
||||
response = web.FileResponse(filename)
|
||||
response.content_type = CONTENT_TYPE_TAR
|
||||
|
||||
@ -497,10 +491,8 @@ class APIBackups(CoreSysAttributes):
|
||||
locations: list[LOCATION_TYPE] | None = None
|
||||
tmp_path = self.sys_config.path_tmp
|
||||
if ATTR_LOCATION in request.query:
|
||||
location_names: list[str] = request.query.getall(ATTR_LOCATION, [])
|
||||
self._validate_cloud_backup_location(
|
||||
request, cast(list[str | None], location_names)
|
||||
)
|
||||
location_names: list[str] = request.query.getall(ATTR_LOCATION)
|
||||
self._validate_cloud_backup_location(request, location_names)
|
||||
# Convert empty string to None if necessary
|
||||
locations = [
|
||||
self._location_to_mount(location)
|
||||
@ -511,7 +503,7 @@ class APIBackups(CoreSysAttributes):
|
||||
location = locations.pop(0)
|
||||
|
||||
if location and location != LOCATION_CLOUD_BACKUP:
|
||||
tmp_path = cast(Mount, location).local_where
|
||||
tmp_path = location.local_where
|
||||
|
||||
filename: str | None = None
|
||||
if ATTR_FILENAME in request.query:
|
||||
@ -521,36 +513,29 @@ class APIBackups(CoreSysAttributes):
|
||||
except vol.Invalid as ex:
|
||||
raise APIError(humanize_error(filename, ex)) from None
|
||||
|
||||
temp_dir: TemporaryDirectory | None = None
|
||||
backup_file_stream: IOBase | None = None
|
||||
|
||||
def open_backup_file() -> Path:
|
||||
nonlocal temp_dir, backup_file_stream
|
||||
temp_dir = TemporaryDirectory(dir=tmp_path.as_posix())
|
||||
tar_file = Path(temp_dir.name, "backup.tar")
|
||||
backup_file_stream = tar_file.open("wb")
|
||||
return tar_file
|
||||
|
||||
def close_backup_file() -> None:
|
||||
if backup_file_stream:
|
||||
# Make sure it got closed, in case of exception. It is safe to
|
||||
# close the file stream twice.
|
||||
backup_file_stream.close()
|
||||
if temp_dir:
|
||||
temp_dir.cleanup()
|
||||
|
||||
try:
|
||||
with TemporaryDirectory(dir=tmp_path.as_posix()) as temp_dir:
|
||||
tar_file = Path(temp_dir, "backup.tar")
|
||||
reader = await request.multipart()
|
||||
contents = await reader.next()
|
||||
if not isinstance(contents, BodyPartReader):
|
||||
raise APIError("Improperly formatted upload, could not read backup")
|
||||
try:
|
||||
with tar_file.open("wb") as backup:
|
||||
while True:
|
||||
chunk = await contents.read_chunk()
|
||||
if not chunk:
|
||||
break
|
||||
backup.write(chunk)
|
||||
|
||||
tar_file = await self.sys_run_in_executor(open_backup_file)
|
||||
while chunk := await contents.read_chunk(size=2**16):
|
||||
await self.sys_run_in_executor(
|
||||
cast(IOBase, backup_file_stream).write, chunk
|
||||
)
|
||||
await self.sys_run_in_executor(cast(IOBase, backup_file_stream).close)
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG and location in {
|
||||
LOCATION_CLOUD_BACKUP,
|
||||
None,
|
||||
}:
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
_LOGGER.error("Can't write new backup file: %s", err)
|
||||
return False
|
||||
|
||||
except asyncio.CancelledError:
|
||||
return False
|
||||
|
||||
backup = await asyncio.shield(
|
||||
self.sys_backups.import_backup(
|
||||
@ -560,22 +545,6 @@ class APIBackups(CoreSysAttributes):
|
||||
additional_locations=locations,
|
||||
)
|
||||
)
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG and location in {
|
||||
LOCATION_CLOUD_BACKUP,
|
||||
None,
|
||||
}:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
_LOGGER.error("Can't write new backup file: %s", err)
|
||||
return False
|
||||
|
||||
except asyncio.CancelledError:
|
||||
return False
|
||||
|
||||
finally:
|
||||
await self.sys_run_in_executor(close_backup_file)
|
||||
|
||||
if backup:
|
||||
return {ATTR_SLUG: backup.slug}
|
||||
|
@ -60,6 +60,7 @@ ATTR_REVISION = "revision"
|
||||
ATTR_SAFE_MODE = "safe_mode"
|
||||
ATTR_SEAT = "seat"
|
||||
ATTR_SIGNED = "signed"
|
||||
ATTR_SIZE_BYTES = "size_bytes"
|
||||
ATTR_STARTUP_TIME = "startup_time"
|
||||
ATTR_STATUS = "status"
|
||||
ATTR_SUBSYSTEM = "subsystem"
|
||||
@ -80,11 +81,3 @@ class BootSlot(StrEnum):
|
||||
|
||||
A = "A"
|
||||
B = "B"
|
||||
|
||||
|
||||
class DetectBlockingIO(StrEnum):
|
||||
"""Enable/Disable detection for blocking I/O in event loop."""
|
||||
|
||||
OFF = "off"
|
||||
ON = "on"
|
||||
ON_AT_STARTUP = "on_at_startup"
|
||||
|
@ -1,9 +1,7 @@
|
||||
"""Init file for Supervisor network RESTful API."""
|
||||
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..addons.addon import Addon
|
||||
@ -18,7 +16,6 @@ from ..const import (
|
||||
AddonState,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..discovery import Message
|
||||
from ..exceptions import APIForbidden, APINotFound
|
||||
from .utils import api_process, api_validate, require_home_assistant
|
||||
|
||||
@ -35,16 +32,16 @@ SCHEMA_DISCOVERY = vol.Schema(
|
||||
class APIDiscovery(CoreSysAttributes):
|
||||
"""Handle RESTful API for discovery functions."""
|
||||
|
||||
def _extract_message(self, request: web.Request) -> Message:
|
||||
def _extract_message(self, request):
|
||||
"""Extract discovery message from URL."""
|
||||
message = self.sys_discovery.get(request.match_info["uuid"])
|
||||
message = self.sys_discovery.get(request.match_info.get("uuid"))
|
||||
if not message:
|
||||
raise APINotFound("Discovery message not found")
|
||||
return message
|
||||
|
||||
@api_process
|
||||
@require_home_assistant
|
||||
async def list_discovery(self, request: web.Request) -> dict[str, Any]:
|
||||
async def list(self, request):
|
||||
"""Show registered and available services."""
|
||||
# Get available discovery
|
||||
discovery = [
|
||||
@ -55,16 +52,12 @@ class APIDiscovery(CoreSysAttributes):
|
||||
ATTR_CONFIG: message.config,
|
||||
}
|
||||
for message in self.sys_discovery.list_messages
|
||||
if (
|
||||
discovered := cast(
|
||||
Addon, self.sys_addons.get(message.addon, local_only=True)
|
||||
)
|
||||
)
|
||||
and discovered.state == AddonState.STARTED
|
||||
if (addon := self.sys_addons.get(message.addon, local_only=True))
|
||||
and addon.state == AddonState.STARTED
|
||||
]
|
||||
|
||||
# Get available services/add-ons
|
||||
services: dict[str, list[str]] = {}
|
||||
services = {}
|
||||
for addon in self.sys_addons.all:
|
||||
for name in addon.discovery:
|
||||
services.setdefault(name, []).append(addon.slug)
|
||||
@ -72,7 +65,7 @@ class APIDiscovery(CoreSysAttributes):
|
||||
return {ATTR_DISCOVERY: discovery, ATTR_SERVICES: services}
|
||||
|
||||
@api_process
|
||||
async def set_discovery(self, request: web.Request) -> dict[str, str]:
|
||||
async def set_discovery(self, request):
|
||||
"""Write data into a discovery pipeline."""
|
||||
body = await api_validate(SCHEMA_DISCOVERY, request)
|
||||
addon: Addon = request[REQUEST_FROM]
|
||||
@ -90,13 +83,13 @@ class APIDiscovery(CoreSysAttributes):
|
||||
)
|
||||
|
||||
# Process discovery message
|
||||
message = await self.sys_discovery.send(addon, **body)
|
||||
message = self.sys_discovery.send(addon, **body)
|
||||
|
||||
return {ATTR_UUID: message.uuid}
|
||||
|
||||
@api_process
|
||||
@require_home_assistant
|
||||
async def get_discovery(self, request: web.Request) -> dict[str, Any]:
|
||||
async def get_discovery(self, request):
|
||||
"""Read data into a discovery message."""
|
||||
message = self._extract_message(request)
|
||||
|
||||
@ -108,7 +101,7 @@ class APIDiscovery(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def del_discovery(self, request: web.Request) -> None:
|
||||
async def del_discovery(self, request):
|
||||
"""Delete data into a discovery message."""
|
||||
message = self._extract_message(request)
|
||||
addon = request[REQUEST_FROM]
|
||||
@ -117,4 +110,5 @@ class APIDiscovery(CoreSysAttributes):
|
||||
if message.addon != addon.slug:
|
||||
raise APIForbidden("Can't remove discovery message")
|
||||
|
||||
await self.sys_discovery.remove(message)
|
||||
self.sys_discovery.remove(message)
|
||||
return True
|
||||
|
@ -78,7 +78,7 @@ class APICoreDNS(CoreSysAttributes):
|
||||
if restart_required:
|
||||
self.sys_create_task(self.sys_plugins.dns.restart())
|
||||
|
||||
await self.sys_plugins.dns.save_data()
|
||||
self.sys_plugins.dns.save_data()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> dict[str, Any]:
|
||||
|
@ -53,7 +53,7 @@ class APIDocker(CoreSysAttributes):
|
||||
for hostname, registry in body.items():
|
||||
self.sys_docker.config.registries[hostname] = registry
|
||||
|
||||
await self.sys_docker.config.save_data()
|
||||
self.sys_docker.config.save_data()
|
||||
|
||||
@api_process
|
||||
async def remove_registry(self, request: web.Request):
|
||||
@ -63,7 +63,7 @@ class APIDocker(CoreSysAttributes):
|
||||
raise APINotFound(f"Hostname {hostname} does not exist in registries")
|
||||
|
||||
del self.sys_docker.config.registries[hostname]
|
||||
await self.sys_docker.config.save_data()
|
||||
self.sys_docker.config.save_data()
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request):
|
||||
|
@ -68,10 +68,7 @@ def filesystem_struct(fs_block: UDisks2Block) -> dict[str, Any]:
|
||||
ATTR_NAME: fs_block.id_label,
|
||||
ATTR_SYSTEM: fs_block.hint_system,
|
||||
ATTR_MOUNT_POINTS: [
|
||||
str(mount_point)
|
||||
for mount_point in (
|
||||
fs_block.filesystem.mount_points if fs_block.filesystem else []
|
||||
)
|
||||
str(mount_point) for mount_point in fs_block.filesystem.mount_points
|
||||
],
|
||||
}
|
||||
|
||||
|
@ -118,7 +118,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_IMAGE in body:
|
||||
self.sys_homeassistant.set_image(body[ATTR_IMAGE])
|
||||
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
||||
self.sys_homeassistant.override_image = (
|
||||
self.sys_homeassistant.image != self.sys_homeassistant.default_image
|
||||
)
|
||||
@ -149,7 +149,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
ATTR_BACKUPS_EXCLUDE_DATABASE
|
||||
]
|
||||
|
||||
await self.sys_homeassistant.save_data()
|
||||
self.sys_homeassistant.save_data()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> dict[Any, str]:
|
||||
|
@ -3,7 +3,6 @@
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientConnectionResetError, web
|
||||
from aiohttp.hdrs import ACCEPT, RANGE
|
||||
@ -99,10 +98,10 @@ class APIHost(CoreSysAttributes):
|
||||
ATTR_VIRTUALIZATION: self.sys_host.info.virtualization,
|
||||
ATTR_CPE: self.sys_host.info.cpe,
|
||||
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
|
||||
ATTR_DISK_FREE: await self.sys_host.info.free_space(),
|
||||
ATTR_DISK_TOTAL: await self.sys_host.info.total_space(),
|
||||
ATTR_DISK_USED: await self.sys_host.info.used_space(),
|
||||
ATTR_DISK_LIFE_TIME: await self.sys_host.info.disk_life_time(),
|
||||
ATTR_DISK_FREE: self.sys_host.info.free_space,
|
||||
ATTR_DISK_TOTAL: self.sys_host.info.total_space,
|
||||
ATTR_DISK_USED: self.sys_host.info.used_space,
|
||||
ATTR_DISK_LIFE_TIME: self.sys_host.info.disk_life_time,
|
||||
ATTR_FEATURES: self.sys_host.features,
|
||||
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
||||
ATTR_LLMNR_HOSTNAME: self.sys_host.info.llmnr_hostname,
|
||||
@ -196,18 +195,20 @@ class APIHost(CoreSysAttributes):
|
||||
) -> web.StreamResponse:
|
||||
"""Return systemd-journald logs."""
|
||||
log_formatter = LogFormatter.PLAIN
|
||||
params: dict[str, Any] = {}
|
||||
params = {}
|
||||
if identifier:
|
||||
params[PARAM_SYSLOG_IDENTIFIER] = identifier
|
||||
elif IDENTIFIER in request.match_info:
|
||||
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info[IDENTIFIER]
|
||||
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER)
|
||||
else:
|
||||
params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers
|
||||
# host logs should be always verbose, no matter what Accept header is used
|
||||
log_formatter = LogFormatter.VERBOSE
|
||||
|
||||
if BOOTID in request.match_info:
|
||||
params[PARAM_BOOT_ID] = await self._get_boot_id(request.match_info[BOOTID])
|
||||
params[PARAM_BOOT_ID] = await self._get_boot_id(
|
||||
request.match_info.get(BOOTID)
|
||||
)
|
||||
if follow:
|
||||
params[PARAM_FOLLOW] = ""
|
||||
|
||||
@ -240,7 +241,7 @@ class APIHost(CoreSysAttributes):
|
||||
# entries=cursor[[:num_skip]:num_entries]
|
||||
range_header = f"entries=:-{lines - 1}:{'' if follow else lines}"
|
||||
elif RANGE in request.headers:
|
||||
range_header = request.headers[RANGE]
|
||||
range_header = request.headers.get(RANGE)
|
||||
else:
|
||||
range_header = (
|
||||
f"entries=:-{DEFAULT_LINES - 1}:{'' if follow else DEFAULT_LINES}"
|
||||
@ -254,22 +255,16 @@ class APIHost(CoreSysAttributes):
|
||||
response.content_type = CONTENT_TYPE_TEXT
|
||||
headers_returned = False
|
||||
async for cursor, line in journal_logs_reader(resp, log_formatter):
|
||||
try:
|
||||
if not headers_returned:
|
||||
if cursor:
|
||||
response.headers["X-First-Cursor"] = cursor
|
||||
response.headers["X-Accel-Buffering"] = "no"
|
||||
await response.prepare(request)
|
||||
headers_returned = True
|
||||
if not headers_returned:
|
||||
if cursor:
|
||||
response.headers["X-First-Cursor"] = cursor
|
||||
response.headers["X-Accel-Buffering"] = "no"
|
||||
await response.prepare(request)
|
||||
headers_returned = True
|
||||
# When client closes the connection while reading busy logs, we
|
||||
# sometimes get this exception. It should be safe to ignore it.
|
||||
with suppress(ClientConnectionResetError):
|
||||
await response.write(line.encode("utf-8") + b"\n")
|
||||
except ClientConnectionResetError as err:
|
||||
# When client closes the connection while reading busy logs, we
|
||||
# sometimes get this exception. It should be safe to ignore it.
|
||||
_LOGGER.debug(
|
||||
"ClientConnectionResetError raised when returning journal logs: %s",
|
||||
err,
|
||||
)
|
||||
break
|
||||
except ConnectionResetError as ex:
|
||||
raise APIError(
|
||||
"Connection reset when trying to fetch data from systemd-journald."
|
||||
|
@ -83,7 +83,7 @@ class APIIngress(CoreSysAttributes):
|
||||
|
||||
def _extract_addon(self, request: web.Request) -> Addon:
|
||||
"""Return addon, throw an exception it it doesn't exist."""
|
||||
token = request.match_info["token"]
|
||||
token = request.match_info.get("token")
|
||||
|
||||
# Find correct add-on
|
||||
addon = self.sys_ingress.get(token)
|
||||
@ -132,7 +132,7 @@ class APIIngress(CoreSysAttributes):
|
||||
|
||||
@api_process
|
||||
@require_home_assistant
|
||||
async def validate_session(self, request: web.Request) -> None:
|
||||
async def validate_session(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Validate session and extending how long it's valid for."""
|
||||
data = await api_validate(VALIDATE_SESSION_DATA, request)
|
||||
|
||||
@ -147,14 +147,14 @@ class APIIngress(CoreSysAttributes):
|
||||
"""Route data to Supervisor ingress service."""
|
||||
|
||||
# Check Ingress Session
|
||||
session = request.cookies.get(COOKIE_INGRESS, "")
|
||||
session = request.cookies.get(COOKIE_INGRESS)
|
||||
if not self.sys_ingress.validate_session(session):
|
||||
_LOGGER.warning("No valid ingress session %s", session)
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
# Process requests
|
||||
addon = self._extract_addon(request)
|
||||
path = request.match_info.get("path", "")
|
||||
path = request.match_info.get("path")
|
||||
session_data = self.sys_ingress.get_session_data(session)
|
||||
try:
|
||||
# Websocket
|
||||
@ -183,7 +183,7 @@ class APIIngress(CoreSysAttributes):
|
||||
for proto in request.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
|
||||
]
|
||||
else:
|
||||
req_protocols = []
|
||||
req_protocols = ()
|
||||
|
||||
ws_server = web.WebSocketResponse(
|
||||
protocols=req_protocols, autoclose=False, autoping=False
|
||||
@ -279,7 +279,7 @@ class APIIngress(CoreSysAttributes):
|
||||
try:
|
||||
response.headers["X-Accel-Buffering"] = "no"
|
||||
await response.prepare(request)
|
||||
async for data, _ in result.content.iter_chunks():
|
||||
async for data in result.content.iter_chunked(4096):
|
||||
await response.write(data)
|
||||
|
||||
except (
|
||||
@ -340,10 +340,9 @@ def _init_header(
|
||||
headers[name] = value
|
||||
|
||||
# Update X-Forwarded-For
|
||||
if request.transport:
|
||||
forward_for = request.headers.get(hdrs.X_FORWARDED_FOR)
|
||||
connected_ip = ip_address(request.transport.get_extra_info("peername")[0])
|
||||
headers[hdrs.X_FORWARDED_FOR] = f"{forward_for}, {connected_ip!s}"
|
||||
forward_for = request.headers.get(hdrs.X_FORWARDED_FOR)
|
||||
connected_ip = ip_address(request.transport.get_extra_info("peername")[0])
|
||||
headers[hdrs.X_FORWARDED_FOR] = f"{forward_for}, {connected_ip!s}"
|
||||
|
||||
return headers
|
||||
|
||||
|
@ -26,7 +26,7 @@ class APIJobs(CoreSysAttributes):
|
||||
def _extract_job(self, request: web.Request) -> SupervisorJob:
|
||||
"""Extract job from request or raise."""
|
||||
try:
|
||||
return self.sys_jobs.get_job(request.match_info["uuid"])
|
||||
return self.sys_jobs.get_job(request.match_info.get("uuid"))
|
||||
except JobNotFound:
|
||||
raise APINotFound("Job does not exist") from None
|
||||
|
||||
@ -71,10 +71,7 @@ class APIJobs(CoreSysAttributes):
|
||||
|
||||
if current_job.uuid in jobs_by_parent:
|
||||
queue.extend(
|
||||
[
|
||||
(child_jobs, job)
|
||||
for job in jobs_by_parent.get(current_job.uuid, [])
|
||||
]
|
||||
[(child_jobs, job) for job in jobs_by_parent.get(current_job.uuid)]
|
||||
)
|
||||
|
||||
return job_list
|
||||
@ -95,14 +92,14 @@ class APIJobs(CoreSysAttributes):
|
||||
if ATTR_IGNORE_CONDITIONS in body:
|
||||
self.sys_jobs.ignore_conditions = body[ATTR_IGNORE_CONDITIONS]
|
||||
|
||||
await self.sys_jobs.save_data()
|
||||
self.sys_jobs.save_data()
|
||||
|
||||
await self.sys_resolution.evaluate.evaluate_system()
|
||||
|
||||
@api_process
|
||||
async def reset(self, request: web.Request) -> None:
|
||||
"""Reset options for JobManager."""
|
||||
await self.sys_jobs.reset_data()
|
||||
self.sys_jobs.reset_data()
|
||||
|
||||
@api_process
|
||||
async def job_info(self, request: web.Request) -> dict[str, Any]:
|
||||
|
@ -1,12 +1,11 @@
|
||||
"""Handle security part of this API."""
|
||||
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
import re
|
||||
from typing import Final
|
||||
from urllib.parse import unquote
|
||||
|
||||
from aiohttp.web import Request, Response, middleware
|
||||
from aiohttp.web import Request, RequestHandler, Response, middleware
|
||||
from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized
|
||||
from awesomeversion import AwesomeVersion
|
||||
|
||||
@ -24,7 +23,7 @@ from ...const import (
|
||||
)
|
||||
from ...coresys import CoreSys, CoreSysAttributes
|
||||
from ...utils import version_is_new_enough
|
||||
from ..utils import api_return_error, extract_supervisor_token
|
||||
from ..utils import api_return_error, excract_supervisor_token
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
_CORE_VERSION: Final = AwesomeVersion("2023.3.4")
|
||||
@ -180,7 +179,9 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
return unquoted
|
||||
|
||||
@middleware
|
||||
async def block_bad_requests(self, request: Request, handler: Callable) -> Response:
|
||||
async def block_bad_requests(
|
||||
self, request: Request, handler: RequestHandler
|
||||
) -> Response:
|
||||
"""Process request and tblock commonly known exploit attempts."""
|
||||
if FILTERS.search(self._recursive_unquote(request.path)):
|
||||
_LOGGER.warning(
|
||||
@ -198,7 +199,9 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
return await handler(request)
|
||||
|
||||
@middleware
|
||||
async def system_validation(self, request: Request, handler: Callable) -> Response:
|
||||
async def system_validation(
|
||||
self, request: Request, handler: RequestHandler
|
||||
) -> Response:
|
||||
"""Check if core is ready to response."""
|
||||
if self.sys_core.state not in (
|
||||
CoreState.STARTUP,
|
||||
@ -212,10 +215,12 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
return await handler(request)
|
||||
|
||||
@middleware
|
||||
async def token_validation(self, request: Request, handler: Callable) -> Response:
|
||||
async def token_validation(
|
||||
self, request: Request, handler: RequestHandler
|
||||
) -> Response:
|
||||
"""Check security access of this layer."""
|
||||
request_from: CoreSysAttributes | None = None
|
||||
supervisor_token = extract_supervisor_token(request)
|
||||
request_from = None
|
||||
supervisor_token = excract_supervisor_token(request)
|
||||
|
||||
# Blacklist
|
||||
if BLACKLIST.match(request.path):
|
||||
@ -283,7 +288,7 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
raise HTTPForbidden()
|
||||
|
||||
@middleware
|
||||
async def core_proxy(self, request: Request, handler: Callable) -> Response:
|
||||
async def core_proxy(self, request: Request, handler: RequestHandler) -> Response:
|
||||
"""Validate user from Core API proxy."""
|
||||
if (
|
||||
request[REQUEST_FROM] != self.sys_homeassistant
|
||||
|
@ -1,6 +1,6 @@
|
||||
"""Inits file for supervisor mounts REST API."""
|
||||
|
||||
from typing import Any, cast
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
@ -10,7 +10,7 @@ from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, APINotFound
|
||||
from ..mounts.const import ATTR_DEFAULT_BACKUP_MOUNT, MountUsage
|
||||
from ..mounts.mount import Mount
|
||||
from ..mounts.validate import SCHEMA_MOUNT_CONFIG, MountData
|
||||
from ..mounts.validate import SCHEMA_MOUNT_CONFIG
|
||||
from .const import ATTR_MOUNTS, ATTR_USER_PATH
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
@ -26,7 +26,7 @@ class APIMounts(CoreSysAttributes):
|
||||
|
||||
def _extract_mount(self, request: web.Request) -> Mount:
|
||||
"""Extract mount from request or raise."""
|
||||
name = request.match_info["mount"]
|
||||
name = request.match_info.get("mount")
|
||||
if name not in self.sys_mounts:
|
||||
raise APINotFound(f"No mount exists with name {name}")
|
||||
return self.sys_mounts.get(name)
|
||||
@ -66,15 +66,15 @@ class APIMounts(CoreSysAttributes):
|
||||
else:
|
||||
self.sys_mounts.default_backup_mount = mount
|
||||
|
||||
await self.sys_mounts.save_data()
|
||||
self.sys_mounts.save_data()
|
||||
|
||||
@api_process
|
||||
async def create_mount(self, request: web.Request) -> None:
|
||||
"""Create a new mount in supervisor."""
|
||||
body = cast(MountData, await api_validate(SCHEMA_MOUNT_CONFIG, request))
|
||||
body = await api_validate(SCHEMA_MOUNT_CONFIG, request)
|
||||
|
||||
if body["name"] in self.sys_mounts:
|
||||
raise APIError(f"A mount already exists with name {body['name']}")
|
||||
if body[ATTR_NAME] in self.sys_mounts:
|
||||
raise APIError(f"A mount already exists with name {body[ATTR_NAME]}")
|
||||
|
||||
mount = Mount.from_dict(self.coresys, body)
|
||||
await self.sys_mounts.create_mount(mount)
|
||||
@ -87,7 +87,7 @@ class APIMounts(CoreSysAttributes):
|
||||
if not self.sys_mounts.default_backup_mount:
|
||||
self.sys_mounts.default_backup_mount = mount
|
||||
|
||||
await self.sys_mounts.save_data()
|
||||
self.sys_mounts.save_data()
|
||||
|
||||
@api_process
|
||||
async def update_mount(self, request: web.Request) -> None:
|
||||
@ -97,10 +97,7 @@ class APIMounts(CoreSysAttributes):
|
||||
{vol.Optional(ATTR_NAME, default=current.name): current.name},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
body = cast(
|
||||
MountData,
|
||||
await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request),
|
||||
)
|
||||
body = await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request)
|
||||
|
||||
mount = Mount.from_dict(self.coresys, body)
|
||||
await self.sys_mounts.create_mount(mount)
|
||||
@ -113,7 +110,7 @@ class APIMounts(CoreSysAttributes):
|
||||
elif self.sys_mounts.default_backup_mount == mount:
|
||||
self.sys_mounts.default_backup_mount = None
|
||||
|
||||
await self.sys_mounts.save_data()
|
||||
self.sys_mounts.save_data()
|
||||
|
||||
@api_process
|
||||
async def delete_mount(self, request: web.Request) -> None:
|
||||
@ -125,7 +122,7 @@ class APIMounts(CoreSysAttributes):
|
||||
if mount.usage == MountUsage.BACKUP:
|
||||
self.sys_create_task(self.sys_backups.reload())
|
||||
|
||||
await self.sys_mounts.save_data()
|
||||
self.sys_mounts.save_data()
|
||||
|
||||
@api_process
|
||||
async def reload_mount(self, request: web.Request) -> None:
|
||||
|
@ -132,12 +132,8 @@ def interface_struct(interface: Interface) -> dict[str, Any]:
|
||||
ATTR_CONNECTED: interface.connected,
|
||||
ATTR_PRIMARY: interface.primary,
|
||||
ATTR_MAC: interface.mac,
|
||||
ATTR_IPV4: ipconfig_struct(interface.ipv4, interface.ipv4setting)
|
||||
if interface.ipv4 and interface.ipv4setting
|
||||
else None,
|
||||
ATTR_IPV6: ipconfig_struct(interface.ipv6, interface.ipv6setting)
|
||||
if interface.ipv6 and interface.ipv6setting
|
||||
else None,
|
||||
ATTR_IPV4: ipconfig_struct(interface.ipv4, interface.ipv4setting),
|
||||
ATTR_IPV6: ipconfig_struct(interface.ipv6, interface.ipv6setting),
|
||||
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
|
||||
ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None,
|
||||
}
|
||||
@ -194,14 +190,14 @@ class APINetwork(CoreSysAttributes):
|
||||
@api_process
|
||||
async def interface_info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return network information for a interface."""
|
||||
interface = self._get_interface(request.match_info[ATTR_INTERFACE])
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
|
||||
return interface_struct(interface)
|
||||
|
||||
@api_process
|
||||
async def interface_update(self, request: web.Request) -> None:
|
||||
"""Update the configuration of an interface."""
|
||||
interface = self._get_interface(request.match_info[ATTR_INTERFACE])
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
|
||||
# Validate data
|
||||
body = await api_validate(SCHEMA_UPDATE, request)
|
||||
@ -247,7 +243,7 @@ class APINetwork(CoreSysAttributes):
|
||||
@api_process
|
||||
async def scan_accesspoints(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Scan and return a list of available networks."""
|
||||
interface = self._get_interface(request.match_info[ATTR_INTERFACE])
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
|
||||
# Only wlan is supported
|
||||
if interface.type != InterfaceType.WIRELESS:
|
||||
@ -260,10 +256,8 @@ class APINetwork(CoreSysAttributes):
|
||||
@api_process
|
||||
async def create_vlan(self, request: web.Request) -> None:
|
||||
"""Create a new vlan."""
|
||||
interface = self._get_interface(request.match_info[ATTR_INTERFACE])
|
||||
vlan = int(request.match_info.get(ATTR_VLAN, -1))
|
||||
if vlan < 0:
|
||||
raise APIError(f"Invalid vlan specified: {vlan}")
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
vlan = int(request.match_info.get(ATTR_VLAN))
|
||||
|
||||
# Only ethernet is supported
|
||||
if interface.type != InterfaceType.ETHERNET:
|
||||
|
@ -3,7 +3,6 @@
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
import logging
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
@ -22,14 +21,12 @@ from ..const import (
|
||||
ATTR_SERIAL,
|
||||
ATTR_SIZE,
|
||||
ATTR_STATE,
|
||||
ATTR_SWAP_SIZE,
|
||||
ATTR_SWAPPINESS,
|
||||
ATTR_UPDATE_AVAILABLE,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APINotFound, BoardInvalidError
|
||||
from ..exceptions import BoardInvalidError
|
||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||
from ..validate import version_tag
|
||||
from .const import (
|
||||
@ -68,15 +65,6 @@ SCHEMA_GREEN_OPTIONS = vol.Schema(
|
||||
vol.Optional(ATTR_SYSTEM_HEALTH_LED): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
RE_SWAP_SIZE = re.compile(r"^\d+([KMG](i?B)?|B)?$", re.IGNORECASE)
|
||||
|
||||
SCHEMA_SWAP_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_SWAP_SIZE): vol.Match(RE_SWAP_SIZE),
|
||||
vol.Optional(ATTR_SWAPPINESS): vol.All(int, vol.Range(min=0, max=200)),
|
||||
}
|
||||
)
|
||||
# pylint: enable=no-value-for-parameter
|
||||
|
||||
|
||||
@ -181,7 +169,7 @@ class APIOS(CoreSysAttributes):
|
||||
body[ATTR_SYSTEM_HEALTH_LED]
|
||||
)
|
||||
|
||||
await self.sys_dbus.agent.board.green.save_data()
|
||||
self.sys_dbus.agent.board.green.save_data()
|
||||
|
||||
@api_process
|
||||
async def boards_yellow_info(self, request: web.Request) -> dict[str, Any]:
|
||||
@ -208,7 +196,7 @@ class APIOS(CoreSysAttributes):
|
||||
if ATTR_POWER_LED in body:
|
||||
await self.sys_dbus.agent.board.yellow.set_power_led(body[ATTR_POWER_LED])
|
||||
|
||||
await self.sys_dbus.agent.board.yellow.save_data()
|
||||
self.sys_dbus.agent.board.yellow.save_data()
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.REBOOT_REQUIRED,
|
||||
ContextType.SYSTEM,
|
||||
@ -224,53 +212,3 @@ class APIOS(CoreSysAttributes):
|
||||
)
|
||||
|
||||
return {}
|
||||
|
||||
@api_process
|
||||
async def config_swap_info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Get swap settings."""
|
||||
if (
|
||||
not self.coresys.os.available
|
||||
or not self.coresys.os.version
|
||||
or self.coresys.os.version < "15.0"
|
||||
):
|
||||
raise APINotFound(
|
||||
"Home Assistant OS 15.0 or newer required for swap settings"
|
||||
)
|
||||
|
||||
return {
|
||||
ATTR_SWAP_SIZE: self.sys_dbus.agent.swap.swap_size,
|
||||
ATTR_SWAPPINESS: self.sys_dbus.agent.swap.swappiness,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def config_swap_options(self, request: web.Request) -> None:
|
||||
"""Update swap settings."""
|
||||
if (
|
||||
not self.coresys.os.available
|
||||
or not self.coresys.os.version
|
||||
or self.coresys.os.version < "15.0"
|
||||
):
|
||||
raise APINotFound(
|
||||
"Home Assistant OS 15.0 or newer required for swap settings"
|
||||
)
|
||||
|
||||
body = await api_validate(SCHEMA_SWAP_OPTIONS, request)
|
||||
|
||||
reboot_required = False
|
||||
|
||||
if ATTR_SWAP_SIZE in body:
|
||||
old_size = self.sys_dbus.agent.swap.swap_size
|
||||
await self.sys_dbus.agent.swap.set_swap_size(body[ATTR_SWAP_SIZE])
|
||||
reboot_required = reboot_required or old_size != body[ATTR_SWAP_SIZE]
|
||||
|
||||
if ATTR_SWAPPINESS in body:
|
||||
old_swappiness = self.sys_dbus.agent.swap.swappiness
|
||||
await self.sys_dbus.agent.swap.set_swappiness(body[ATTR_SWAPPINESS])
|
||||
reboot_required = reboot_required or old_swappiness != body[ATTR_SWAPPINESS]
|
||||
|
||||
if reboot_required:
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.REBOOT_REQUIRED,
|
||||
ContextType.SYSTEM,
|
||||
suggestions=[SuggestionType.EXECUTE_REBOOT],
|
||||
)
|
||||
|
@ -1 +1 @@
|
||||
!function(){function d(d){var e=document.createElement("script");e.src=d,document.body.appendChild(e)}if(/Edge?\/(12[4-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Firefox\/(12[5-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Chrom(ium|e)\/(109|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|(Maci|X1{2}).+ Version\/(17\.([5-9]|\d{2,})|(1[89]|[2-9]\d|\d{3,})\.\d+)([,.]\d+|)( \(\w+\)|)( Mobile\/\w+|) Safari\/|Chrome.+OPR\/(1{2}\d|1[2-9]\d|[2-9]\d{2}|\d{4,})\.\d+\.\d+|(CPU[ +]OS|iPhone[ +]OS|CPU[ +]iPhone|CPU IPhone OS|CPU iPad OS)[ +]+(15[._]([6-9]|\d{2,})|(1[6-9]|[2-9]\d|\d{3,})[._]\d+)([._]\d+|)|Android:?[ /-](12[4-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})(\.\d+|)(\.\d+|)|Mobile Safari.+OPR\/([89]\d|\d{3,})\.\d+\.\d+|Android.+Firefox\/(12[5-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Android.+Chrom(ium|e)\/(12[4-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|SamsungBrowser\/(2[5-9]|[3-9]\d|\d{3,})\.\d+|Home As{2}istant\/[\d.]+ \(.+; macOS (1[2-9]|[2-9]\d|\d{3,})\.\d+(\.\d+)?\)/.test(navigator.userAgent))try{new Function("import('/api/hassio/app/frontend_latest/entrypoint.35399ae87c70acf8.js')")()}catch(e){d("/api/hassio/app/frontend_es5/entrypoint.476bfed22da63267.js")}else d("/api/hassio/app/frontend_es5/entrypoint.476bfed22da63267.js")}()
|
||||
!function(){function d(d){var e=document.createElement("script");e.src=d,document.body.appendChild(e)}if(/Edge?\/(12[1-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Firefox\/(1{2}[5-9]|1[2-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Chrom(ium|e)\/(109|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|(Maci|X1{2}).+ Version\/(17\.([3-9]|\d{2,})|(1[89]|[2-9]\d|\d{3,})\.\d+)([,.]\d+|)( \(\w+\)|)( Mobile\/\w+|) Safari\/|Chrome.+OPR\/(10[6-9]|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+\.\d+|(CPU[ +]OS|iPhone[ +]OS|CPU[ +]iPhone|CPU IPhone OS|CPU iPad OS)[ +]+(15[._]([6-9]|\d{2,})|(1[6-9]|[2-9]\d|\d{3,})[._]\d+)([._]\d+|)|Android:?[ /-](12[1-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})(\.\d+|)(\.\d+|)|Mobile Safari.+OPR\/([89]\d|\d{3,})\.\d+\.\d+|Android.+Firefox\/(12[1-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Android.+Chrom(ium|e)\/(12[1-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|SamsungBrowser\/(2[4-9]|[3-9]\d|\d{3,})\.\d+|Home As{2}istant\/[\d.]+ \(.+; macOS (1[2-9]|[2-9]\d|\d{3,})\.\d+(\.\d+)?\)/.test(navigator.userAgent))try{new Function("import('/api/hassio/app/frontend_latest/entrypoint.553a7707b827808b.js')")()}catch(e){d("/api/hassio/app/frontend_es5/entrypoint.ff48ee24e0742761.js")}else d("/api/hassio/app/frontend_es5/entrypoint.ff48ee24e0742761.js")}()
|
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
@ -1 +0,0 @@
|
||||
{"version":3,"file":"1081.91949d686e61cc12.js","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20250401.0/src/components/ha-button-toggle-group.ts","https://raw.githubusercontent.com/home-assistant/frontend/20250401.0/src/components/ha-selector/ha-selector-button-toggle.ts"],"names":["_decorate","customElement","_initialize","_LitElement","F","constructor","args","d","kind","decorators","property","attribute","key","value","type","Boolean","queryAll","html","_t","_","this","buttons","map","button","iconPath","_t2","label","active","_handleClick","_t3","styleMap","width","fullWidth","length","dense","_this$_buttons","_buttons","forEach","async","updateComplete","shadowRoot","querySelector","style","margin","ev","currentTarget","fireEvent","static","css","_t4","LitElement","HaButtonToggleSelector","_this$selector$button","_this$selector$button2","_this$selector$button3","options","selector","button_toggle","option","translationKey","translation_key","localizeValue","localizedLabel","sort","a","b","caseInsensitiveStringCompare","hass","locale","language","toggleButtons","item","_valueChanged","_ev$detail","_this$value","stopPropagation","detail","target","disabled","undefined"],"mappings":"qXAWgCA,EAAAA,EAAAA,GAAA,EAD/BC,EAAAA,EAAAA,IAAc,4BAAyB,SAAAC,EAAAC,GAkIvC,OAAAC,EAlID,cACgCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAC7BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,UAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEC,UAAW,aAAcG,KAAMC,WAAUH,IAAA,YAAAC,KAAAA,GAAA,OAClC,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEvBC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,QAAAC,KAAAA,GAAA,OAAgB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEhDO,EAAAA,EAAAA,IAAS,eAAaJ,IAAA,WAAAC,WAAA,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEvB,WACE,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,uBAELC,KAAKC,QAAQC,KAAKC,GAClBA,EAAOC,UACHP,EAAAA,EAAAA,IAAIQ,IAAAA,EAAAN,CAAA,2GACOI,EAAOG,MACRH,EAAOC,SACND,EAAOV,MACNO,KAAKO,SAAWJ,EAAOV,MACxBO,KAAKQ,eAEhBX,EAAAA,EAAAA,IAAIY,IAAAA,EAAAV,CAAA,iHACMW,EAAAA,EAAAA,GAAS,CACfC,MAAOX,KAAKY,UACL,IAAMZ,KAAKC,QAAQY,OAAtB,IACA,YAGGb,KAAKc,MACLX,EAAOV,MACNO,KAAKO,SAAWJ,EAAOV,MACxBO,KAAKQ,aACXL,EAAOG,SAKxB,GAAC,CAAAlB,KAAA,SAAAI,IAAA,UAAAC,MAED,WAAoB,IAAAsB,EAEL,QAAbA,EAAAf,KAAKgB,gBAAQ,IAAAD,GAAbA,EAAeE,SAAQC,gBACff,EAAOgB,eAEXhB,EAAOiB,WAAYC,cAAc,UACjCC,MAAMC,OAAS,GAAG,GAExB,GAAC,CAAAnC,KAAA,SAAAI,IAAA,eAAAC,MAED,SAAqB+B,GACnBxB,KAAKO,OAASiB,EAAGC,cAAchC,OAC/BiC,EAAAA,EAAAA,GAAU1B,KAAM,gBAAiB,CAAEP,MAAOO,KAAKO,QACjD,GAAC,CAAAnB,KAAA,QAAAuC,QAAA,EAAAnC,IAAA,SAAAC,KAAAA,GAAA,OAEemC,EAAAA,EAAAA,IAAGC,IAAAA,EAAA9B,CAAA,u0CAzDoB+B,EAAAA,I,MCD5BC,GAAsBnD,EAAAA,EAAAA,GAAA,EADlCC,EAAAA,EAAAA,IAAc,+BAA4B,SAAAC,EAAAC,GA4F1C,OAAAC,EA5FD,cACmCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAChCC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,gBAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAG9BC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAuC,EAAAC,EAAAC,EACjB,MAAMC,GACuB,QAA3BH,EAAAhC,KAAKoC,SAASC,qBAAa,IAAAL,GAAS,QAATA,EAA3BA,EAA6BG,eAAO,IAAAH,OAAA,EAApCA,EAAsC9B,KAAKoC,GACvB,iBAAXA,EACFA,EACA,CAAE7C,MAAO6C,EAAQhC,MAAOgC,OAC1B,GAEDC,EAA4C,QAA9BN,EAAGjC,KAAKoC,SAASC,qBAAa,IAAAJ,OAAA,EAA3BA,EAA6BO,gBAEhDxC,KAAKyC,eAAiBF,GACxBJ,EAAQlB,SAASqB,IACf,MAAMI,EAAiB1C,KAAKyC,cAC1B,GAAGF,aAA0BD,EAAO7C,SAElCiD,IACFJ,EAAOhC,MAAQoC,EACjB,IAI2B,QAA/BR,EAAIlC,KAAKoC,SAASC,qBAAa,IAAAH,GAA3BA,EAA6BS,MAC/BR,EAAQQ,MAAK,CAACC,EAAGC,KACfC,EAAAA,EAAAA,IACEF,EAAEtC,MACFuC,EAAEvC,MACFN,KAAK+C,KAAKC,OAAOC,YAKvB,MAAMC,EAAgCf,EAAQjC,KAAKiD,IAAkB,CACnE7C,MAAO6C,EAAK7C,MACZb,MAAO0D,EAAK1D,UAGd,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,iHACPC,KAAKM,MAEM4C,EACDlD,KAAKP,MACEO,KAAKoD,cAG5B,GAAC,CAAAhE,KAAA,SAAAI,IAAA,gBAAAC,MAED,SAAsB+B,GAAI,IAAA6B,EAAAC,EACxB9B,EAAG+B,kBAEH,MAAM9D,GAAiB,QAAT4D,EAAA7B,EAAGgC,cAAM,IAAAH,OAAA,EAATA,EAAW5D,QAAS+B,EAAGiC,OAAOhE,MACxCO,KAAK0D,eAAsBC,IAAVlE,GAAuBA,KAAqB,QAAhB6D,EAAMtD,KAAKP,aAAK,IAAA6D,EAAAA,EAAI,MAGrE5B,EAAAA,EAAAA,GAAU1B,KAAM,gBAAiB,CAC/BP,MAAOA,GAEX,GAAC,CAAAL,KAAA,QAAAuC,QAAA,EAAAnC,IAAA,SAAAC,KAAAA,GAAA,OAEemC,EAAAA,EAAAA,IAAGvB,IAAAA,EAAAN,CAAA,wLA5EuB+B,EAAAA,G"}
|
2
supervisor/api/panel/frontend_es5/12.cd76ff0e6ff4d214.js
Normal file
2
supervisor/api/panel/frontend_es5/12.cd76ff0e6ff4d214.js
Normal file
@ -0,0 +1,2 @@
|
||||
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["12"],{5739:function(e,a,t){t.a(e,(async function(e,i){try{t.r(a),t.d(a,{HaNavigationSelector:function(){return b}});var n,r=t(63038),d=t(27862),o=t(52565),l=t(92776),u=t(5776),s=t(21475),c=(t(38419),t(57243)),h=t(50778),v=t(36522),k=t(63297),f=e([k]);k=(f.then?(await f)():f)[0];var b=(0,s.Z)([(0,h.Mo)("ha-selector-navigation")],(function(e,a){var t=function(a){function t(){var a;(0,o.Z)(this,t);for(var i=arguments.length,n=new Array(i),r=0;r<i;r++)n[r]=arguments[r];return a=(0,l.Z)(this,t,[].concat(n)),e(a),a}return(0,u.Z)(t,a),(0,d.Z)(t)}(a);return{F:t,d:[{kind:"field",decorators:[(0,h.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,h.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,h.Cb)()],key:"value",value:void 0},{kind:"field",decorators:[(0,h.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,h.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,h.Cb)({type:Boolean,reflect:!0})],key:"disabled",value:function(){return!1}},{kind:"field",decorators:[(0,h.Cb)({type:Boolean})],key:"required",value:function(){return!0}},{kind:"method",key:"render",value:function(){return(0,c.dy)(n||(n=(0,r.Z)([' <ha-navigation-picker .hass="','" .label="','" .value="','" .required="','" .disabled="','" .helper="','" @value-changed="','"></ha-navigation-picker> '])),this.hass,this.label,this.value,this.required,this.disabled,this.helper,this._valueChanged)}},{kind:"method",key:"_valueChanged",value:function(e){(0,v.B)(this,"value-changed",{value:e.detail.value})}}]}}),c.oi);i()}catch(y){i(y)}}))}}]);
|
||||
//# sourceMappingURL=12.cd76ff0e6ff4d214.js.map
|
BIN
supervisor/api/panel/frontend_es5/12.cd76ff0e6ff4d214.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/12.cd76ff0e6ff4d214.js.gz
Normal file
Binary file not shown.
@ -0,0 +1 @@
|
||||
{"version":3,"file":"12.cd76ff0e6ff4d214.js","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20241127.8/src/components/ha-selector/ha-selector-navigation.ts"],"names":["HaNavigationSelector","_decorate","customElement","_initialize","_LitElement","_LitElement2","_this","_classCallCheck","_len","arguments","length","args","Array","_key","_callSuper","concat","_inherits","_createClass","F","d","kind","decorators","property","attribute","key","value","type","Boolean","reflect","html","_templateObject","_taggedTemplateLiteral","this","hass","label","required","disabled","helper","_valueChanged","ev","fireEvent","detail","LitElement"],"mappings":"oYAOA,IACaA,GAAoBC,EAAAA,EAAAA,GAAA,EADhCC,EAAAA,EAAAA,IAAc,4BAAyB,SAAAC,EAAAC,GAAA,IAC3BJ,EAAoB,SAAAK,GAAA,SAAAL,IAAA,IAAAM,GAAAC,EAAAA,EAAAA,GAAA,KAAAP,GAAA,QAAAQ,EAAAC,UAAAC,OAAAC,EAAA,IAAAC,MAAAJ,GAAAK,EAAA,EAAAA,EAAAL,EAAAK,IAAAF,EAAAE,GAAAJ,UAAAI,GAAA,OAAAP,GAAAQ,EAAAA,EAAAA,GAAA,KAAAd,EAAA,GAAAe,OAAAJ,IAAAR,EAAAG,GAAAA,CAAA,QAAAU,EAAAA,EAAAA,GAAAhB,EAAAK,IAAAY,EAAAA,EAAAA,GAAAjB,EAAA,EAAAI,GAAA,OAAAc,EAApBlB,EAAoBmB,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAC9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,QAASC,SAAS,KAAOJ,IAAA,WAAAC,MAAA,kBAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAElEC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WACE,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,GAAAC,EAAAA,EAAAA,GAAA,+JAECC,KAAKC,KACJD,KAAKE,MACLF,KAAKP,MACFO,KAAKG,SACLH,KAAKI,SACPJ,KAAKK,OACEL,KAAKM,cAG5B,GAAC,CAAAlB,KAAA,SAAAI,IAAA,gBAAAC,MAED,SAAsBc,IACpBC,EAAAA,EAAAA,GAAUR,KAAM,gBAAiB,CAAEP,MAAOc,EAAGE,OAAOhB,OACtD,IAAC,GA/BuCiB,EAAAA,I"}
|
@ -1,2 +0,0 @@
|
||||
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["12"],{5739:function(e,a,t){t.a(e,(async function(e,i){try{t.r(a),t.d(a,{HaNavigationSelector:()=>c});var d=t(73577),r=(t(71695),t(47021),t(57243)),n=t(50778),l=t(36522),o=t(63297),s=e([o]);o=(s.then?(await s)():s)[0];let u,h=e=>e,c=(0,d.Z)([(0,n.Mo)("ha-selector-navigation")],(function(e,a){return{F:class extends a{constructor(...a){super(...a),e(this)}},d:[{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"value",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,n.Cb)({type:Boolean,reflect:!0})],key:"disabled",value(){return!1}},{kind:"field",decorators:[(0,n.Cb)({type:Boolean})],key:"required",value(){return!0}},{kind:"method",key:"render",value:function(){return(0,r.dy)(u||(u=h` <ha-navigation-picker .hass="${0}" .label="${0}" .value="${0}" .required="${0}" .disabled="${0}" .helper="${0}" @value-changed="${0}"></ha-navigation-picker> `),this.hass,this.label,this.value,this.required,this.disabled,this.helper,this._valueChanged)}},{kind:"method",key:"_valueChanged",value:function(e){(0,l.B)(this,"value-changed",{value:e.detail.value})}}]}}),r.oi);i()}catch(u){i(u)}}))}}]);
|
||||
//# sourceMappingURL=12.ffa1bdc0a98802fa.js.map
|
Binary file not shown.
Binary file not shown.
@ -1 +0,0 @@
|
||||
{"version":3,"file":"12.ffa1bdc0a98802fa.js","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20250401.0/src/components/ha-selector/ha-selector-navigation.ts"],"names":["HaNavigationSelector","_decorate","customElement","_initialize","_LitElement","F","constructor","args","d","kind","decorators","property","attribute","key","value","type","Boolean","reflect","html","_t","_","this","hass","label","required","disabled","helper","_valueChanged","ev","fireEvent","detail","LitElement"],"mappings":"mVAQaA,GAAoBC,EAAAA,EAAAA,GAAA,EADhCC,EAAAA,EAAAA,IAAc,4BAAyB,SAAAC,EAAAC,GAiCvC,OAAAC,EAjCD,cACiCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAC9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,QAASC,SAAS,KAAOJ,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAElEC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WACE,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,mKAECC,KAAKC,KACJD,KAAKE,MACLF,KAAKP,MACFO,KAAKG,SACLH,KAAKI,SACPJ,KAAKK,OACEL,KAAKM,cAG5B,GAAC,CAAAlB,KAAA,SAAAI,IAAA,gBAAAC,MAED,SAAsBc,IACpBC,EAAAA,EAAAA,GAAUR,KAAM,gBAAiB,CAAEP,MAAOc,EAAGE,OAAOhB,OACtD,IAAC,GA/BuCiB,EAAAA,I"}
|
@ -1,2 +0,0 @@
|
||||
(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["1236"],{4121:function(){Intl.PluralRules&&"function"==typeof Intl.PluralRules.__addLocaleData&&Intl.PluralRules.__addLocaleData({data:{categories:{cardinal:["one","other"],ordinal:["one","two","few","other"]},fn:function(e,n){var t=String(e).split("."),a=!t[1],l=Number(t[0])==e,o=l&&t[0].slice(-1),r=l&&t[0].slice(-2);return n?1==o&&11!=r?"one":2==o&&12!=r?"two":3==o&&13!=r?"few":"other":1==e&&a?"one":"other"}},locale:"en"})}}]);
|
||||
//# sourceMappingURL=1236.64ca65d0ea4d76d4.js.map
|
Binary file not shown.
Binary file not shown.
@ -1 +0,0 @@
|
||||
{"version":3,"file":"1236.64ca65d0ea4d76d4.js","sources":["/unknown/node_modules/@formatjs/intl-pluralrules/locale-data/en.js"],"names":["Intl","PluralRules","__addLocaleData","n","ord","s","String","split","v0","t0","Number","n10","slice","n100"],"mappings":"wHAEIA,KAAKC,aAA2D,mBAArCD,KAAKC,YAAYC,iBAC9CF,KAAKC,YAAYC,gBAAgB,CAAC,KAAO,CAAC,WAAa,CAAC,SAAW,CAAC,MAAM,SAAS,QAAU,CAAC,MAAM,MAAM,MAAM,UAAU,GAAK,SAASC,EAAGC,GAC3I,IAAIC,EAAIC,OAAOH,GAAGI,MAAM,KAAMC,GAAMH,EAAE,GAAII,EAAKC,OAAOL,EAAE,KAAOF,EAAGQ,EAAMF,GAAMJ,EAAE,GAAGO,OAAO,GAAIC,EAAOJ,GAAMJ,EAAE,GAAGO,OAAO,GACvH,OAAIR,EAAmB,GAAPO,GAAoB,IAARE,EAAa,MAC9B,GAAPF,GAAoB,IAARE,EAAa,MAClB,GAAPF,GAAoB,IAARE,EAAa,MACzB,QACQ,GAALV,GAAUK,EAAK,MAAQ,OAChC,GAAG,OAAS,M"}
|
@ -0,0 +1,2 @@
|
||||
(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["1236"],{4121:function(e,n,t){t(451),t(23509),Intl.PluralRules&&"function"==typeof Intl.PluralRules.__addLocaleData&&Intl.PluralRules.__addLocaleData({data:{categories:{cardinal:["one","other"],ordinal:["one","two","few","other"]},fn:function(e,n){var t=String(e).split("."),a=!t[1],l=Number(t[0])==e,o=l&&t[0].slice(-1),r=l&&t[0].slice(-2);return n?1==o&&11!=r?"one":2==o&&12!=r?"two":3==o&&13!=r?"few":"other":1==e&&a?"one":"other"}},locale:"en"})}}]);
|
||||
//# sourceMappingURL=1236.71e83acee5b952f8.js.map
|
BIN
supervisor/api/panel/frontend_es5/1236.71e83acee5b952f8.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1236.71e83acee5b952f8.js.gz
Normal file
Binary file not shown.
@ -0,0 +1 @@
|
||||
{"version":3,"file":"1236.71e83acee5b952f8.js","sources":["/unknown/node_modules/@formatjs/intl-pluralrules/locale-data/en.js"],"names":["Intl","PluralRules","__addLocaleData","n","ord","s","String","split","v0","t0","Number","n10","slice","n100"],"mappings":"6IAEIA,KAAKC,aAA2D,mBAArCD,KAAKC,YAAYC,iBAC9CF,KAAKC,YAAYC,gBAAgB,CAAC,KAAO,CAAC,WAAa,CAAC,SAAW,CAAC,MAAM,SAAS,QAAU,CAAC,MAAM,MAAM,MAAM,UAAU,GAAK,SAASC,EAAGC,GAC3I,IAAIC,EAAIC,OAAOH,GAAGI,MAAM,KAAMC,GAAMH,EAAE,GAAII,EAAKC,OAAOL,EAAE,KAAOF,EAAGQ,EAAMF,GAAMJ,EAAE,GAAGO,OAAO,GAAIC,EAAOJ,GAAMJ,EAAE,GAAGO,OAAO,GACvH,OAAIR,EAAmB,GAAPO,GAAoB,IAARE,EAAa,MAC9B,GAAPF,GAAoB,IAARE,EAAa,MAClB,GAAPF,GAAoB,IAARE,EAAa,MACzB,QACQ,GAALV,GAAUK,EAAK,MAAQ,OAChC,GAAG,OAAS,M"}
|
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1258.641d07368f81a9e1.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1258.641d07368f81a9e1.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1327.3669501922dc6ea0.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1327.3669501922dc6ea0.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -1,47 +0,0 @@
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2017 Google LLC
|
||||
* SPDX-License-Identifier: BSD-3-Clause
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2019 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2021 Google LLC
|
||||
* SPDX-LIcense-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2021 Google LLC
|
||||
* SPDX-License-Identifier: BSD-3-Clause
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2022 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2022 Google LLC
|
||||
* SPDX-License-Identifier: BSD-3-Clause
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2023 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright 2024 Google LLC
|
||||
* SPDX-License-Identifier: Apache-2.0
|
||||
*/
|
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1369.67dadde16f922a81.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1369.67dadde16f922a81.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1390.12bba3426dde7337.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1390.12bba3426dde7337.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/140.1be47d53a95f0c5b.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/140.1be47d53a95f0c5b.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1403.f77f43b9a4eed93b.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1403.f77f43b9a4eed93b.js.gz
Normal file
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user