Compare commits

..

1 Commits

Author SHA1 Message Date
ludeeus
e415923553 Trigger backup sync when backup is complete 2024-10-16 04:54:29 +00:00
4178 changed files with 505381 additions and 14923 deletions

View File

@@ -1,6 +1,6 @@
{ {
"name": "Supervisor dev", "name": "Supervisor dev",
"image": "ghcr.io/home-assistant/devcontainer:2-supervisor", "image": "ghcr.io/home-assistant/devcontainer:supervisor",
"containerEnv": { "containerEnv": {
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}" "WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
}, },
@@ -44,8 +44,5 @@
} }
} }
}, },
"mounts": [ "mounts": ["type=volume,target=/var/lib/docker"]
"type=volume,target=/var/lib/docker",
"type=volume,target=/mnt/supervisor"
]
} }

View File

@@ -26,7 +26,7 @@ body:
attributes: attributes:
label: What type of installation are you running? label: What type of installation are you running?
description: > description: >
If you don't know, can be found in [Settings -> System -> Repairs -> (three dot menu) -> System Information](https://my.home-assistant.io/redirect/system_health/). If you don't know, can be found in [Settings -> System -> Repairs -> System Information](https://my.home-assistant.io/redirect/system_health/).
It is listed as the `Installation Type` value. It is listed as the `Installation Type` value.
options: options:
- Home Assistant OS - Home Assistant OS
@@ -72,9 +72,9 @@ body:
validations: validations:
required: true required: true
attributes: attributes:
label: System information label: System Health information
description: > description: >
The System information can be found in [Settings -> System -> Repairs -> (three dot menu) -> System Information](https://my.home-assistant.io/redirect/system_health/). System Health information can be found in the top right menu in [Settings -> System -> Repairs](https://my.home-assistant.io/redirect/repairs/).
Click the copy button at the bottom of the pop-up and paste it here. Click the copy button at the bottom of the pop-up and paste it here.
[![Open your Home Assistant instance and show health information about your system.](https://my.home-assistant.io/badges/system_health.svg)](https://my.home-assistant.io/redirect/system_health/) [![Open your Home Assistant instance and show health information about your system.](https://my.home-assistant.io/badges/system_health.svg)](https://my.home-assistant.io/redirect/system_health/)
@@ -83,9 +83,8 @@ body:
label: Supervisor diagnostics label: Supervisor diagnostics
placeholder: "drag-and-drop the diagnostics data file here (do not copy-and-paste the content)" placeholder: "drag-and-drop the diagnostics data file here (do not copy-and-paste the content)"
description: >- description: >-
Supervisor diagnostics can be found in [Settings -> Devices & services](https://my.home-assistant.io/redirect/integrations/). Supervisor diagnostics can be found in [Settings -> Integrations](https://my.home-assistant.io/redirect/integrations/).
Find the card that says `Home Assistant Supervisor`, open it, and select the three dot menu of the Supervisor integration entry Find the card that says `Home Assistant Supervisor`, open its menu and select 'Download diagnostics'.
and select 'Download diagnostics'.
**Please drag-and-drop the downloaded file into the textbox below. Do not copy and paste its contents.** **Please drag-and-drop the downloaded file into the textbox below. Do not copy and paste its contents.**
- type: textarea - type: textarea

View File

@@ -33,7 +33,7 @@ on:
- setup.py - setup.py
env: env:
DEFAULT_PYTHON: "3.13" DEFAULT_PYTHON: "3.12"
BUILD_NAME: supervisor BUILD_NAME: supervisor
BUILD_TYPE: supervisor BUILD_TYPE: supervisor
@@ -53,7 +53,7 @@ jobs:
requirements: ${{ steps.requirements.outputs.changed }} requirements: ${{ steps.requirements.outputs.changed }}
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.1
with: with:
fetch-depth: 0 fetch-depth: 0
@@ -92,7 +92,7 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }} arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.1
with: with:
fetch-depth: 0 fetch-depth: 0
@@ -106,9 +106,9 @@ jobs:
- name: Build wheels - name: Build wheels
if: needs.init.outputs.requirements == 'true' if: needs.init.outputs.requirements == 'true'
uses: home-assistant/wheels@2025.03.0 uses: home-assistant/wheels@2024.07.1
with: with:
abi: cp313 abi: cp312
tag: musllinux_1_2 tag: musllinux_1_2
arch: ${{ matrix.arch }} arch: ${{ matrix.arch }}
wheels-key: ${{ secrets.WHEELS_KEY }} wheels-key: ${{ secrets.WHEELS_KEY }}
@@ -125,13 +125,13 @@ jobs:
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
uses: actions/setup-python@v5.6.0 uses: actions/setup-python@v5.2.0
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
- name: Install Cosign - name: Install Cosign
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
uses: sigstore/cosign-installer@v3.8.2 uses: sigstore/cosign-installer@v3.7.0
with: with:
cosign-release: "v2.4.0" cosign-release: "v2.4.0"
@@ -149,7 +149,7 @@ jobs:
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
uses: docker/login-action@v3.4.0 uses: docker/login-action@v3.3.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@@ -160,7 +160,7 @@ jobs:
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
- name: Build supervisor - name: Build supervisor
uses: home-assistant/builder@2025.03.0 uses: home-assistant/builder@2024.08.2
with: with:
args: | args: |
$BUILD_ARGS \ $BUILD_ARGS \
@@ -178,7 +178,7 @@ jobs:
steps: steps:
- name: Checkout the repository - name: Checkout the repository
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.1
- name: Initialize git - name: Initialize git
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
@@ -203,11 +203,11 @@ jobs:
timeout-minutes: 60 timeout-minutes: 60
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.1
- name: Build the Supervisor - name: Build the Supervisor
if: needs.init.outputs.publish != 'true' if: needs.init.outputs.publish != 'true'
uses: home-assistant/builder@2025.03.0 uses: home-assistant/builder@2024.08.2
with: with:
args: | args: |
--test \ --test \

View File

@@ -8,7 +8,7 @@ on:
pull_request: ~ pull_request: ~
env: env:
DEFAULT_PYTHON: "3.13" DEFAULT_PYTHON: "3.12"
PRE_COMMIT_CACHE: ~/.cache/pre-commit PRE_COMMIT_CACHE: ~/.cache/pre-commit
concurrency: concurrency:
@@ -25,15 +25,15 @@ jobs:
name: Prepare Python dependencies name: Prepare Python dependencies
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.1
- name: Set up Python - name: Set up Python
id: python id: python
uses: actions/setup-python@v5.6.0 uses: actions/setup-python@v5.2.0
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.3 uses: actions/cache@v4.1.1
with: with:
path: venv path: venv
key: | key: |
@@ -47,7 +47,7 @@ jobs:
pip install -r requirements.txt -r requirements_tests.txt pip install -r requirements.txt -r requirements_tests.txt
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache@v4.2.3 uses: actions/cache@v4.1.1
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true lookup-only: true
@@ -67,15 +67,15 @@ jobs:
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }} - name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0 uses: actions/setup-python@v5.2.0
id: python id: python
with: with:
python-version: ${{ needs.prepare.outputs.python-version }} python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.3 uses: actions/cache@v4.1.1
with: with:
path: venv path: venv
key: | key: |
@@ -87,7 +87,7 @@ jobs:
exit 1 exit 1
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache@v4.2.3 uses: actions/cache@v4.1.1
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
key: | key: |
@@ -110,15 +110,15 @@ jobs:
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }} - name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0 uses: actions/setup-python@v5.2.0
id: python id: python
with: with:
python-version: ${{ needs.prepare.outputs.python-version }} python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.3 uses: actions/cache@v4.1.1
with: with:
path: venv path: venv
key: | key: |
@@ -130,7 +130,7 @@ jobs:
exit 1 exit 1
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache@v4.2.3 uses: actions/cache@v4.1.1
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
key: | key: |
@@ -153,7 +153,7 @@ jobs:
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.1
- name: Register hadolint problem matcher - name: Register hadolint problem matcher
run: | run: |
echo "::add-matcher::.github/workflows/matchers/hadolint.json" echo "::add-matcher::.github/workflows/matchers/hadolint.json"
@@ -168,15 +168,15 @@ jobs:
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }} - name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0 uses: actions/setup-python@v5.2.0
id: python id: python
with: with:
python-version: ${{ needs.prepare.outputs.python-version }} python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.3 uses: actions/cache@v4.1.1
with: with:
path: venv path: venv
key: | key: |
@@ -188,7 +188,7 @@ jobs:
exit 1 exit 1
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache@v4.2.3 uses: actions/cache@v4.1.1
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
key: | key: |
@@ -212,15 +212,15 @@ jobs:
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }} - name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0 uses: actions/setup-python@v5.2.0
id: python id: python
with: with:
python-version: ${{ needs.prepare.outputs.python-version }} python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.3 uses: actions/cache@v4.1.1
with: with:
path: venv path: venv
key: | key: |
@@ -232,7 +232,7 @@ jobs:
exit 1 exit 1
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache@v4.2.3 uses: actions/cache@v4.1.1
with: with:
path: ${{ env.PRE_COMMIT_CACHE }} path: ${{ env.PRE_COMMIT_CACHE }}
key: | key: |
@@ -256,15 +256,15 @@ jobs:
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }} - name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0 uses: actions/setup-python@v5.2.0
id: python id: python
with: with:
python-version: ${{ needs.prepare.outputs.python-version }} python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.3 uses: actions/cache@v4.1.1
with: with:
path: venv path: venv
key: | key: |
@@ -274,10 +274,6 @@ jobs:
run: | run: |
echo "Failed to restore Python virtual environment from cache" echo "Failed to restore Python virtual environment from cache"
exit 1 exit 1
- name: Install additional system dependencies
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends libpulse0
- name: Register pylint problem matcher - name: Register pylint problem matcher
run: | run: |
echo "::add-matcher::.github/workflows/matchers/pylint.json" echo "::add-matcher::.github/workflows/matchers/pylint.json"
@@ -292,19 +288,19 @@ jobs:
name: Run tests Python ${{ needs.prepare.outputs.python-version }} name: Run tests Python ${{ needs.prepare.outputs.python-version }}
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }} - name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0 uses: actions/setup-python@v5.2.0
id: python id: python
with: with:
python-version: ${{ needs.prepare.outputs.python-version }} python-version: ${{ needs.prepare.outputs.python-version }}
- name: Install Cosign - name: Install Cosign
uses: sigstore/cosign-installer@v3.8.2 uses: sigstore/cosign-installer@v3.7.0
with: with:
cosign-release: "v2.4.0" cosign-release: "v2.4.0"
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.3 uses: actions/cache@v4.1.1
with: with:
path: venv path: venv
key: | key: |
@@ -339,7 +335,7 @@ jobs:
-o console_output_style=count \ -o console_output_style=count \
tests tests
- name: Upload coverage artifact - name: Upload coverage artifact
uses: actions/upload-artifact@v4.6.2 uses: actions/upload-artifact@v4.4.3
with: with:
name: coverage-${{ matrix.python-version }} name: coverage-${{ matrix.python-version }}
path: .coverage path: .coverage
@@ -351,15 +347,15 @@ jobs:
needs: ["pytest", "prepare"] needs: ["pytest", "prepare"]
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }} - name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.6.0 uses: actions/setup-python@v5.2.0
id: python id: python
with: with:
python-version: ${{ needs.prepare.outputs.python-version }} python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v4.2.3 uses: actions/cache@v4.1.1
with: with:
path: venv path: venv
key: | key: |
@@ -370,7 +366,7 @@ jobs:
echo "Failed to restore Python virtual environment from cache" echo "Failed to restore Python virtual environment from cache"
exit 1 exit 1
- name: Download all coverage artifacts - name: Download all coverage artifacts
uses: actions/download-artifact@v4.2.1 uses: actions/download-artifact@v4.1.8
- name: Combine coverage results - name: Combine coverage results
run: | run: |
. venv/bin/activate . venv/bin/activate
@@ -378,4 +374,4 @@ jobs:
coverage report coverage report
coverage xml coverage xml
- name: Upload coverage to Codecov - name: Upload coverage to Codecov
uses: codecov/codecov-action@v5.4.2 uses: codecov/codecov-action@v4.6.0

View File

@@ -11,7 +11,7 @@ jobs:
name: Release Drafter name: Release Drafter
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.1
with: with:
fetch-depth: 0 fetch-depth: 0
@@ -36,7 +36,7 @@ jobs:
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT" echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT"
- name: Run Release Drafter - name: Run Release Drafter
uses: release-drafter/release-drafter@v6.1.0 uses: release-drafter/release-drafter@v6.0.0
with: with:
tag: ${{ steps.version.outputs.version }} tag: ${{ steps.version.outputs.version }}
name: ${{ steps.version.outputs.version }} name: ${{ steps.version.outputs.version }}

View File

@@ -10,9 +10,9 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v4.2.2 uses: actions/checkout@v4.2.1
- name: Sentry Release - name: Sentry Release
uses: getsentry/action-release@v3.1.1 uses: getsentry/action-release@v1.7.0
env: env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }} SENTRY_ORG: ${{ secrets.SENTRY_ORG }}

View File

@@ -9,7 +9,7 @@ jobs:
stale: stale:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/stale@v9.1.0 - uses: actions/stale@v9.0.0
with: with:
repo-token: ${{ secrets.GITHUB_TOKEN }} repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 30 days-before-stale: 30

View File

@@ -1,82 +0,0 @@
name: Update frontend
on:
schedule: # once a day
- cron: "0 0 * * *"
workflow_dispatch:
jobs:
check-version:
runs-on: ubuntu-latest
outputs:
skip: ${{ steps.check_version.outputs.skip || steps.check_existing_pr.outputs.skip }}
current_version: ${{ steps.check_version.outputs.current_version }}
latest_version: ${{ steps.latest_frontend_version.outputs.latest_tag }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Get latest frontend release
id: latest_frontend_version
uses: abatilo/release-info-action@v1.3.3
with:
owner: home-assistant
repo: frontend
- name: Check if version is up to date
id: check_version
run: |
current_version="$(cat .ha-frontend-version)"
latest_version="${{ steps.latest_frontend_version.outputs.latest_tag }}"
echo "current_version=${current_version}" >> $GITHUB_OUTPUT
echo "LATEST_VERSION=${latest_version}" >> $GITHUB_ENV
if [[ ! "$current_version" < "$latest_version" ]]; then
echo "Frontend version is up to date"
echo "skip=true" >> $GITHUB_OUTPUT
fi
- name: Check if there is no open PR with this version
if: steps.check_version.outputs.skip != 'true'
id: check_existing_pr
env:
GH_TOKEN: ${{ github.token }}
run: |
PR=$(gh pr list --state open --base main --json title --search "Update frontend to version $LATEST_VERSION")
if [[ "$PR" != "[]" ]]; then
echo "Skipping - There is already a PR open for version $LATEST_VERSION"
echo "skip=true" >> $GITHUB_OUTPUT
fi
create-pr:
runs-on: ubuntu-latest
needs: check-version
if: needs.check-version.outputs.skip != 'true'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Clear www folder
run: |
rm -rf supervisor/api/panel/*
- name: Update version file
run: |
echo "${{ needs.check-version.outputs.latest_version }}" > .ha-frontend-version
- name: Download release assets
uses: robinraju/release-downloader@v1
with:
repository: 'home-assistant/frontend'
tag: ${{ needs.check-version.outputs.latest_version }}
fileName: home_assistant_frontend_supervisor-${{ needs.check-version.outputs.latest_version }}.tar.gz
extract: true
out-file-path: supervisor/api/panel/
- name: Remove release assets archive
run: |
rm -f supervisor/api/panel/home_assistant_frontend_supervisor-*.tar.gz
- name: Create PR
uses: peter-evans/create-pull-request@v7
with:
commit-message: "Update frontend to version ${{ needs.check-version.outputs.latest_version }}"
branch: autoupdate-frontend
base: main
draft: true
sign-commits: true
title: "Update frontend to version ${{ needs.check-version.outputs.latest_version }}"
body: >
Update frontend from ${{ needs.check-version.outputs.current_version }} to
[${{ needs.check-version.outputs.latest_version }}](https://github.com/home-assistant/frontend/releases/tag/${{ needs.check-version.outputs.latest_version }})

4
.gitmodules vendored Normal file
View File

@@ -0,0 +1,4 @@
[submodule "home-assistant-polymer"]
path = home-assistant-polymer
url = https://github.com/home-assistant/home-assistant-polymer
branch = dev

View File

@@ -1 +0,0 @@
20250401.0

View File

@@ -1,6 +1,6 @@
repos: repos:
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.9.1 rev: v0.5.7
hooks: hooks:
- id: ruff - id: ruff
args: args:
@@ -8,7 +8,7 @@ repos:
- id: ruff-format - id: ruff-format
files: ^((supervisor|tests)/.+)?[^/]+\.py$ files: ^((supervisor|tests)/.+)?[^/]+\.py$
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0 rev: v4.5.0
hooks: hooks:
- id: check-executables-have-shebangs - id: check-executables-have-shebangs
stages: [manual] stages: [manual]

View File

@@ -9,8 +9,7 @@ ENV \
ARG \ ARG \
COSIGN_VERSION \ COSIGN_VERSION \
BUILD_ARCH \ BUILD_ARCH
QEMU_CPU
# Install base # Install base
WORKDIR /usr/src WORKDIR /usr/src
@@ -29,23 +28,22 @@ RUN \
\ \
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \ && curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
&& chmod a+x /usr/bin/cosign \ && chmod a+x /usr/bin/cosign \
&& pip3 install uv==0.6.1 && pip3 install uv==0.2.21
# Install requirements # Install requirements
COPY requirements.txt . COPY requirements.txt .
RUN \ RUN \
if [ "${BUILD_ARCH}" = "i386" ]; then \ if [ "${BUILD_ARCH}" = "i386" ]; then \
setarch="linux32"; \ linux32 uv pip install --no-build -r requirements.txt; \
else \ else \
setarch=""; \ uv pip install --no-build -r requirements.txt; \
fi \ fi \
&& ${setarch} uv pip install --compile-bytecode --no-cache --no-build -r requirements.txt \
&& rm -f requirements.txt && rm -f requirements.txt
# Install Home Assistant Supervisor # Install Home Assistant Supervisor
COPY . supervisor COPY . supervisor
RUN \ RUN \
uv pip install --no-cache -e ./supervisor \ pip3 install -e ./supervisor \
&& python3 -m compileall ./supervisor/supervisor && python3 -m compileall ./supervisor/supervisor

View File

@@ -1,10 +1,10 @@
image: ghcr.io/home-assistant/{arch}-hassio-supervisor image: ghcr.io/home-assistant/{arch}-hassio-supervisor
build_from: build_from:
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.13-alpine3.21 aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.20
armhf: ghcr.io/home-assistant/armhf-base-python:3.13-alpine3.21 armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.20
armv7: ghcr.io/home-assistant/armv7-base-python:3.13-alpine3.21 armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.20
amd64: ghcr.io/home-assistant/amd64-base-python:3.13-alpine3.21 amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.20
i386: ghcr.io/home-assistant/i386-base-python:3.13-alpine3.21 i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.20
codenotary: codenotary:
signer: notary@home-assistant.io signer: notary@home-assistant.io
base_image: notary@home-assistant.io base_image: notary@home-assistant.io

View File

@@ -1,5 +1,5 @@
[build-system] [build-system]
requires = ["setuptools~=79.0.0", "wheel~=0.46.1"] requires = ["setuptools~=68.0.0", "wheel~=0.40.0"]
build-backend = "setuptools.build_meta" build-backend = "setuptools.build_meta"
[project] [project]
@@ -12,7 +12,7 @@ authors = [
{ name = "The Home Assistant Authors", email = "hello@home-assistant.io" }, { name = "The Home Assistant Authors", email = "hello@home-assistant.io" },
] ]
keywords = ["docker", "home-assistant", "api"] keywords = ["docker", "home-assistant", "api"]
requires-python = ">=3.13.0" requires-python = ">=3.12.0"
[project.urls] [project.urls]
"Homepage" = "https://www.home-assistant.io/" "Homepage" = "https://www.home-assistant.io/"
@@ -31,7 +31,7 @@ include-package-data = true
include = ["supervisor*"] include = ["supervisor*"]
[tool.pylint.MAIN] [tool.pylint.MAIN]
py-version = "3.13" py-version = "3.12"
# Use a conservative default here; 2 should speed up most setups and not hurt # Use a conservative default here; 2 should speed up most setups and not hurt
# any too bad. Override on command line as appropriate. # any too bad. Override on command line as appropriate.
jobs = 2 jobs = 2
@@ -147,7 +147,7 @@ disable = [
# "pointless-statement", # B018, ruff catches new occurrences, needs more work # "pointless-statement", # B018, ruff catches new occurrences, needs more work
"raise-missing-from", # TRY200 "raise-missing-from", # TRY200
# "redefined-builtin", # A001, ruff is way more stricter, needs work # "redefined-builtin", # A001, ruff is way more stricter, needs work
"try-except-raise", # TRY203 "try-except-raise", # TRY302
"unused-argument", # ARG001, we don't use it "unused-argument", # ARG001, we don't use it
"unused-format-string-argument", #F507 "unused-format-string-argument", #F507
"unused-format-string-key", # F504 "unused-format-string-key", # F504
@@ -223,7 +223,6 @@ testpaths = ["tests"]
norecursedirs = [".git"] norecursedirs = [".git"]
log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s" log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s"
log_date_format = "%Y-%m-%d %H:%M:%S" log_date_format = "%Y-%m-%d %H:%M:%S"
asyncio_default_fixture_loop_scope = "function"
asyncio_mode = "auto" asyncio_mode = "auto"
filterwarnings = [ filterwarnings = [
"error", "error",
@@ -290,7 +289,7 @@ lint.select = [
"T20", # flake8-print "T20", # flake8-print
"TID251", # Banned imports "TID251", # Banned imports
"TRY004", # Prefer TypeError exception for invalid type "TRY004", # Prefer TypeError exception for invalid type
"TRY203", # Remove exception handler; error is immediately re-raised "TRY302", # Remove exception handler; error is immediately re-raised
"UP", # pyupgrade "UP", # pyupgrade
"W", # pycodestyle "W", # pycodestyle
] ]

View File

@@ -1,30 +1,29 @@
aiodns==3.2.0 aiodns==3.2.0
aiohttp==3.11.18 aiohttp==3.10.10
atomicwrites-homeassistant==1.4.1 atomicwrites-homeassistant==1.4.1
attrs==25.3.0 attrs==24.2.0
awesomeversion==24.6.0 awesomeversion==24.6.0
blockbuster==1.5.24
brotli==1.1.0 brotli==1.1.0
ciso8601==2.3.2 ciso8601==2.3.1
colorlog==6.9.0 colorlog==6.8.2
cpe==1.3.1 cpe==1.3.1
cryptography==44.0.2 cryptography==43.0.1
debugpy==1.8.14 debugpy==1.8.7
deepmerge==2.0 deepmerge==2.0
dirhash==0.5.0 dirhash==0.5.0
docker==7.1.0 docker==7.1.0
faust-cchardet==2.1.19 faust-cchardet==2.1.19
gitpython==3.1.44 gitpython==3.1.43
jinja2==3.1.6 jinja2==3.1.4
log-rate-limit==1.4.2 orjson==3.10.7
orjson==3.10.16 pulsectl==24.8.0
pulsectl==24.12.0
pyudev==0.24.3 pyudev==0.24.3
PyYAML==6.0.2 PyYAML==6.0.2
requests==2.32.3 requests==2.32.3
securetar==2025.2.1 securetar==2024.2.1
sentry-sdk==2.26.1 sentry-sdk==2.16.0
setuptools==79.0.1 setuptools==75.1.0
voluptuous==0.15.2 voluptuous==0.15.2
dbus-fast==2.44.1 dbus-fast==2.24.3
zlib-fast==0.2.1 typing_extensions==4.12.2
zlib-fast==0.2.0

View File

@@ -1,12 +1,12 @@
astroid==3.3.9 coverage==7.6.3
coverage==7.8.0 pre-commit==4.0.1
pre-commit==4.2.0 pylint==3.3.1
pylint==3.3.6 pytest-aiohttp==1.0.5
pytest-aiohttp==1.1.0 pytest-asyncio==0.23.6
pytest-asyncio==0.25.2 pytest-cov==5.0.0
pytest-cov==6.1.1
pytest-timeout==2.3.1 pytest-timeout==2.3.1
pytest==8.3.5 pytest==8.3.3
ruff==0.11.6 ruff==0.6.9
time-machine==2.16.0 time-machine==2.16.0
urllib3==2.4.0 typing_extensions==4.12.2
urllib3==2.2.3

30
scripts/update-frontend.sh Executable file
View File

@@ -0,0 +1,30 @@
#!/bin/bash
source "/etc/supervisor_scripts/common"
set -e
# Update frontend
git submodule update --init --recursive --remote
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
cd home-assistant-polymer
nvm install
script/bootstrap
# Download translations
start_docker
./script/translations_download
# build frontend
cd hassio
./script/build_hassio
# Copy frontend
rm -rf ../../supervisor/api/panel/*
cp -rf build/* ../../supervisor/api/panel/
# Reset frontend git
cd ..
git reset --hard HEAD
stop_docker

View File

@@ -19,7 +19,7 @@ def _get_supervisor_version():
for line in CONSTANTS.split("/n"): for line in CONSTANTS.split("/n"):
if match := RE_SUPERVISOR_VERSION.match(line): if match := RE_SUPERVISOR_VERSION.match(line):
return match.group(1) return match.group(1)
return "9999.09.9.dev9999" return "99.9.9dev"
setup( setup(

View File

@@ -11,12 +11,10 @@ import zlib_fast
# Enable fast zlib before importing supervisor # Enable fast zlib before importing supervisor
zlib_fast.enable() zlib_fast.enable()
# pylint: disable=wrong-import-position from supervisor import bootstrap # pylint: disable=wrong-import-position # noqa: E402
from supervisor import bootstrap # noqa: E402 from supervisor.utils.logging import ( # pylint: disable=wrong-import-position # noqa: E402
from supervisor.utils.blockbuster import activate_blockbuster # noqa: E402 activate_log_queue_handler,
from supervisor.utils.logging import activate_log_queue_handler # noqa: E402 )
# pylint: enable=wrong-import-position
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -54,11 +52,10 @@ if __name__ == "__main__":
_LOGGER.info("Initializing Supervisor setup") _LOGGER.info("Initializing Supervisor setup")
coresys = loop.run_until_complete(bootstrap.initialize_coresys()) coresys = loop.run_until_complete(bootstrap.initialize_coresys())
loop.set_debug(coresys.config.debug) loop.set_debug(coresys.config.debug)
if coresys.config.detect_blocking_io:
activate_blockbuster()
loop.run_until_complete(coresys.core.connect()) loop.run_until_complete(coresys.core.connect())
loop.run_until_complete(bootstrap.supervisor_debugger(coresys)) bootstrap.supervisor_debugger(coresys)
bootstrap.migrate_system_env(coresys)
# Signal health startup for container # Signal health startup for container
run_os_startup_check_cleanup() run_os_startup_check_cleanup()

View File

@@ -6,7 +6,6 @@ from contextlib import suppress
from copy import deepcopy from copy import deepcopy
from datetime import datetime from datetime import datetime
import errno import errno
from functools import partial
from ipaddress import IPv4Address from ipaddress import IPv4Address
import logging import logging
from pathlib import Path, PurePath from pathlib import Path, PurePath
@@ -18,9 +17,9 @@ from tempfile import TemporaryDirectory
from typing import Any, Final from typing import Any, Final
import aiohttp import aiohttp
from awesomeversion import AwesomeVersion, AwesomeVersionCompareException from awesomeversion import AwesomeVersionCompareException
from deepmerge import Merger from deepmerge import Merger
from securetar import AddFileError, atomic_contents_add, secure_path from securetar import atomic_contents_add, secure_path
import voluptuous as vol import voluptuous as vol
from voluptuous.humanize import humanize_error from voluptuous.humanize import humanize_error
@@ -33,6 +32,8 @@ from ..const import (
ATTR_AUDIO_OUTPUT, ATTR_AUDIO_OUTPUT,
ATTR_AUTO_UPDATE, ATTR_AUTO_UPDATE,
ATTR_BOOT, ATTR_BOOT,
ATTR_DATA,
ATTR_EVENT,
ATTR_IMAGE, ATTR_IMAGE,
ATTR_INGRESS_ENTRY, ATTR_INGRESS_ENTRY,
ATTR_INGRESS_PANEL, ATTR_INGRESS_PANEL,
@@ -48,6 +49,7 @@ from ..const import (
ATTR_SYSTEM, ATTR_SYSTEM,
ATTR_SYSTEM_MANAGED, ATTR_SYSTEM_MANAGED,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY, ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
ATTR_TYPE,
ATTR_USER, ATTR_USER,
ATTR_UUID, ATTR_UUID,
ATTR_VERSION, ATTR_VERSION,
@@ -76,16 +78,15 @@ from ..exceptions import (
HostAppArmorError, HostAppArmorError,
) )
from ..hardware.data import Device from ..hardware.data import Device
from ..homeassistant.const import WSEvent from ..homeassistant.const import WSEvent, WSType
from ..jobs.const import JobExecutionLimit from ..jobs.const import JobExecutionLimit
from ..jobs.decorator import Job from ..jobs.decorator import Job
from ..resolution.const import ContextType, IssueType, UnhealthyReason from ..resolution.const import UnhealthyReason
from ..resolution.data import Issue
from ..store.addon import AddonStore from ..store.addon import AddonStore
from ..utils import check_port from ..utils import check_port
from ..utils.apparmor import adjust_profile from ..utils.apparmor import adjust_profile
from ..utils.json import read_json_file, write_json_file from ..utils.json import read_json_file, write_json_file
from ..utils.sentry import async_capture_exception from ..utils.sentry import capture_exception
from .const import ( from .const import (
WATCHDOG_MAX_ATTEMPTS, WATCHDOG_MAX_ATTEMPTS,
WATCHDOG_RETRY_SECONDS, WATCHDOG_RETRY_SECONDS,
@@ -137,31 +138,17 @@ class Addon(AddonModel):
super().__init__(coresys, slug) super().__init__(coresys, slug)
self.instance: DockerAddon = DockerAddon(coresys, self) self.instance: DockerAddon = DockerAddon(coresys, self)
self._state: AddonState = AddonState.UNKNOWN self._state: AddonState = AddonState.UNKNOWN
self._manual_stop: bool = False self._manual_stop: bool = (
self.sys_hardware.helper.last_boot != self.sys_config.last_boot
)
self._listeners: list[EventListener] = [] self._listeners: list[EventListener] = []
self._startup_event = asyncio.Event() self._startup_event = asyncio.Event()
self._startup_task: asyncio.Task | None = None self._startup_task: asyncio.Task | None = None
self._boot_failed_issue = Issue(
IssueType.BOOT_FAIL, ContextType.ADDON, reference=self.slug
)
self._device_access_missing_issue = Issue(
IssueType.DEVICE_ACCESS_MISSING, ContextType.ADDON, reference=self.slug
)
def __repr__(self) -> str: def __repr__(self) -> str:
"""Return internal representation.""" """Return internal representation."""
return f"<Addon: {self.slug}>" return f"<Addon: {self.slug}>"
@property
def boot_failed_issue(self) -> Issue:
"""Get issue used if start on boot failed."""
return self._boot_failed_issue
@property
def device_access_missing_issue(self) -> Issue:
"""Get issue used if device access is missing and can't be automatically added."""
return self._device_access_missing_issue
@property @property
def state(self) -> AddonState: def state(self) -> AddonState:
"""Return state of the add-on.""" """Return state of the add-on."""
@@ -179,26 +166,15 @@ class Addon(AddonModel):
if new_state == AddonState.STARTED or old_state == AddonState.STARTUP: if new_state == AddonState.STARTED or old_state == AddonState.STARTUP:
self._startup_event.set() self._startup_event.set()
# Dismiss boot failed issue if present and we started self.sys_homeassistant.websocket.send_message(
if (
new_state == AddonState.STARTED
and self.boot_failed_issue in self.sys_resolution.issues
):
self.sys_resolution.dismiss_issue(self.boot_failed_issue)
# Dismiss device access missing issue if present and we stopped
if (
new_state == AddonState.STOPPED
and self.device_access_missing_issue in self.sys_resolution.issues
):
self.sys_resolution.dismiss_issue(self.device_access_missing_issue)
self.sys_homeassistant.websocket.supervisor_event_custom(
WSEvent.ADDON,
{ {
ATTR_SLUG: self.slug, ATTR_TYPE: WSType.SUPERVISOR_EVENT,
ATTR_STATE: new_state, ATTR_DATA: {
}, ATTR_EVENT: WSEvent.ADDON,
ATTR_SLUG: self.slug,
ATTR_STATE: new_state,
},
}
) )
@property @property
@@ -208,10 +184,6 @@ class Addon(AddonModel):
async def load(self) -> None: async def load(self) -> None:
"""Async initialize of object.""" """Async initialize of object."""
self._manual_stop = (
await self.sys_hardware.helper.last_boot() != self.sys_config.last_boot
)
if self.is_detached: if self.is_detached:
await super().refresh_path_cache() await super().refresh_path_cache()
@@ -239,7 +211,7 @@ class Addon(AddonModel):
await self.instance.install(self.version, default_image, arch=self.arch) await self.instance.install(self.version, default_image, arch=self.arch)
self.persist[ATTR_IMAGE] = default_image self.persist[ATTR_IMAGE] = default_image
await self.save_persist() self.save_persist()
@property @property
def ip_address(self) -> IPv4Address: def ip_address(self) -> IPv4Address:
@@ -279,28 +251,28 @@ class Addon(AddonModel):
@property @property
def with_icon(self) -> bool: def with_icon(self) -> bool:
"""Return True if an icon exists.""" """Return True if an icon exists."""
if self.is_detached or not self.addon_store: if self.is_detached:
return super().with_icon return super().with_icon
return self.addon_store.with_icon return self.addon_store.with_icon
@property @property
def with_logo(self) -> bool: def with_logo(self) -> bool:
"""Return True if a logo exists.""" """Return True if a logo exists."""
if self.is_detached or not self.addon_store: if self.is_detached:
return super().with_logo return super().with_logo
return self.addon_store.with_logo return self.addon_store.with_logo
@property @property
def with_changelog(self) -> bool: def with_changelog(self) -> bool:
"""Return True if a changelog exists.""" """Return True if a changelog exists."""
if self.is_detached or not self.addon_store: if self.is_detached:
return super().with_changelog return super().with_changelog
return self.addon_store.with_changelog return self.addon_store.with_changelog
@property @property
def with_documentation(self) -> bool: def with_documentation(self) -> bool:
"""Return True if a documentation exists.""" """Return True if a documentation exists."""
if self.is_detached or not self.addon_store: if self.is_detached:
return super().with_documentation return super().with_documentation
return self.addon_store.with_documentation return self.addon_store.with_documentation
@@ -310,7 +282,7 @@ class Addon(AddonModel):
return self._available(self.data_store) return self._available(self.data_store)
@property @property
def version(self) -> AwesomeVersion: def version(self) -> str | None:
"""Return installed version.""" """Return installed version."""
return self.persist[ATTR_VERSION] return self.persist[ATTR_VERSION]
@@ -350,13 +322,6 @@ class Addon(AddonModel):
"""Store user boot options.""" """Store user boot options."""
self.persist[ATTR_BOOT] = value self.persist[ATTR_BOOT] = value
# Dismiss boot failed issue if present and boot at start disabled
if (
value == AddonBoot.MANUAL
and self._boot_failed_issue in self.sys_resolution.issues
):
self.sys_resolution.dismiss_issue(self._boot_failed_issue)
@property @property
def auto_update(self) -> bool: def auto_update(self) -> bool:
"""Return if auto update is enable.""" """Return if auto update is enable."""
@@ -458,7 +423,7 @@ class Addon(AddonModel):
return None return None
@property @property
def latest_version(self) -> AwesomeVersion: def latest_version(self) -> str:
"""Return version of add-on.""" """Return version of add-on."""
return self.data_store[ATTR_VERSION] return self.data_store[ATTR_VERSION]
@@ -512,8 +477,9 @@ class Addon(AddonModel):
def webui(self) -> str | None: def webui(self) -> str | None:
"""Return URL to webui or None.""" """Return URL to webui or None."""
url = super().webui url = super().webui
if not url or not (webui := RE_WEBUI.match(url)): if not url:
return None return None
webui = RE_WEBUI.match(url)
# extract arguments # extract arguments
t_port = webui.group("t_port") t_port = webui.group("t_port")
@@ -662,15 +628,16 @@ class Addon(AddonModel):
"""Is add-on loaded.""" """Is add-on loaded."""
return bool(self._listeners) return bool(self._listeners)
async def save_persist(self) -> None: def save_persist(self) -> None:
"""Save data of add-on.""" """Save data of add-on."""
await self.sys_addons.data.save_data() self.sys_addons.data.save_data()
async def watchdog_application(self) -> bool: async def watchdog_application(self) -> bool:
"""Return True if application is running.""" """Return True if application is running."""
url = self.watchdog_url url = super().watchdog
if not url or not (application := RE_WATCHDOG.match(url)): if not url:
return True return True
application = RE_WATCHDOG.match(url)
# extract arguments # extract arguments
t_port = int(application.group("t_port")) t_port = int(application.group("t_port"))
@@ -679,10 +646,8 @@ class Addon(AddonModel):
s_suffix = application.group("s_suffix") or "" s_suffix = application.group("s_suffix") or ""
# search host port for this docker port # search host port for this docker port
if self.host_network and self.ports: if self.host_network:
port = self.ports.get(f"{t_port}/tcp") port = self.ports.get(f"{t_port}/tcp", t_port)
if port is None:
port = t_port
else: else:
port = t_port port = t_port
@@ -716,7 +681,7 @@ class Addon(AddonModel):
try: try:
options = self.schema.validate(self.options) options = self.schema.validate(self.options)
await self.sys_run_in_executor(write_json_file, self.path_options, options) write_json_file(self.path_options, options)
except vol.Invalid as ex: except vol.Invalid as ex:
_LOGGER.error( _LOGGER.error(
"Add-on %s has invalid options: %s", "Add-on %s has invalid options: %s",
@@ -747,12 +712,9 @@ class Addon(AddonModel):
for listener in self._listeners: for listener in self._listeners:
self.sys_bus.remove_listener(listener) self.sys_bus.remove_listener(listener)
def remove_data_dir(): if self.path_data.is_dir():
if self.path_data.is_dir(): _LOGGER.info("Removing add-on data folder %s", self.path_data)
_LOGGER.info("Removing add-on data folder %s", self.path_data) await remove_data(self.path_data)
remove_data(self.path_data)
await self.sys_run_in_executor(remove_data_dir)
async def _check_ingress_port(self): async def _check_ingress_port(self):
"""Assign a ingress port if dynamic port selection is used.""" """Assign a ingress port if dynamic port selection is used."""
@@ -771,20 +733,14 @@ class Addon(AddonModel):
) )
async def install(self) -> None: async def install(self) -> None:
"""Install and setup this addon.""" """Install and setup this addon."""
if not self.addon_store: self.sys_addons.data.install(self.addon_store)
raise AddonsError("Missing from store, cannot install!")
await self.sys_addons.data.install(self.addon_store)
await self.load() await self.load()
def setup_data(): if not self.path_data.is_dir():
if not self.path_data.is_dir(): _LOGGER.info(
_LOGGER.info( "Creating Home Assistant add-on data folder %s", self.path_data
"Creating Home Assistant add-on data folder %s", self.path_data )
) self.path_data.mkdir()
self.path_data.mkdir()
await self.sys_run_in_executor(setup_data)
# Setup/Fix AppArmor profile # Setup/Fix AppArmor profile
await self.install_apparmor() await self.install_apparmor()
@@ -795,7 +751,7 @@ class Addon(AddonModel):
self.latest_version, self.addon_store.image, arch=self.arch self.latest_version, self.addon_store.image, arch=self.arch
) )
except DockerError as err: except DockerError as err:
await self.sys_addons.data.uninstall(self) self.sys_addons.data.uninstall(self)
raise AddonsError() from err raise AddonsError() from err
# Add to addon manager # Add to addon manager
@@ -823,17 +779,14 @@ class Addon(AddonModel):
await self.unload() await self.unload()
def cleanup_config_and_audio(): # Remove config if present and requested
# Remove config if present and requested if self.addon_config_used and remove_config:
if self.addon_config_used and remove_config: await remove_data(self.path_config)
remove_data(self.path_config)
# Cleanup audio settings # Cleanup audio settings
if self.path_pulse.exists(): if self.path_pulse.exists():
with suppress(OSError): with suppress(OSError):
self.path_pulse.unlink() self.path_pulse.unlink()
await self.sys_run_in_executor(cleanup_config_and_audio)
# Cleanup AppArmor profile # Cleanup AppArmor profile
with suppress(HostAppArmorError): with suppress(HostAppArmorError):
@@ -847,23 +800,23 @@ class Addon(AddonModel):
# Cleanup Ingress dynamic port assignment # Cleanup Ingress dynamic port assignment
if self.with_ingress: if self.with_ingress:
await self.sys_ingress.del_dynamic_port(self.slug)
self.sys_create_task(self.sys_ingress.reload()) self.sys_create_task(self.sys_ingress.reload())
self.sys_ingress.del_dynamic_port(self.slug)
# Cleanup discovery data # Cleanup discovery data
for message in self.sys_discovery.list_messages: for message in self.sys_discovery.list_messages:
if message.addon != self.slug: if message.addon != self.slug:
continue continue
await self.sys_discovery.remove(message) self.sys_discovery.remove(message)
# Cleanup services data # Cleanup services data
for service in self.sys_services.list_services: for service in self.sys_services.list_services:
if self.slug not in service.active: if self.slug not in service.active:
continue continue
await service.del_service_data(self) service.del_service_data(self)
# Remove from addon manager # Remove from addon manager
await self.sys_addons.data.uninstall(self) self.sys_addons.data.uninstall(self)
self.sys_addons.local.pop(self.slug) self.sys_addons.local.pop(self.slug)
@Job( @Job(
@@ -877,9 +830,6 @@ class Addon(AddonModel):
Returns a Task that completes when addon has state 'started' (see start) Returns a Task that completes when addon has state 'started' (see start)
if it was running. Else nothing is returned. if it was running. Else nothing is returned.
""" """
if not self.addon_store:
raise AddonsError("Missing from store, cannot update!")
old_image = self.image old_image = self.image
# Cache data to prevent races with other updates to global # Cache data to prevent races with other updates to global
store = self.addon_store.clone() store = self.addon_store.clone()
@@ -895,7 +845,7 @@ class Addon(AddonModel):
try: try:
_LOGGER.info("Add-on '%s' successfully updated", self.slug) _LOGGER.info("Add-on '%s' successfully updated", self.slug)
await self.sys_addons.data.update(store) self.sys_addons.data.update(store)
await self._check_ingress_port() await self._check_ingress_port()
# Cleanup # Cleanup
@@ -936,9 +886,7 @@ class Addon(AddonModel):
except DockerError as err: except DockerError as err:
raise AddonsError() from err raise AddonsError() from err
if self.addon_store: self.sys_addons.data.update(self.addon_store)
await self.sys_addons.data.update(self.addon_store)
await self._check_ingress_port() await self._check_ingress_port()
_LOGGER.info("Add-on '%s' successfully rebuilt", self.slug) _LOGGER.info("Add-on '%s' successfully rebuilt", self.slug)
@@ -951,25 +899,22 @@ class Addon(AddonModel):
) )
return out return out
async def write_pulse(self) -> None: def write_pulse(self) -> None:
"""Write asound config to file and return True on success.""" """Write asound config to file and return True on success."""
pulse_config = self.sys_plugins.audio.pulse_client( pulse_config = self.sys_plugins.audio.pulse_client(
input_profile=self.audio_input, output_profile=self.audio_output input_profile=self.audio_input, output_profile=self.audio_output
) )
def write_pulse_config(): # Cleanup wrong maps
# Cleanup wrong maps if self.path_pulse.is_dir():
if self.path_pulse.is_dir(): shutil.rmtree(self.path_pulse, ignore_errors=True)
shutil.rmtree(self.path_pulse, ignore_errors=True)
self.path_pulse.write_text(pulse_config, encoding="utf-8")
# Write pulse config
try: try:
await self.sys_run_in_executor(write_pulse_config) self.path_pulse.write_text(pulse_config, encoding="utf-8")
except OSError as err: except OSError as err:
if err.errno == errno.EBADMSG: if err.errno == errno.EBADMSG:
self.sys_resolution.add_unhealthy_reason( self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
UnhealthyReason.OSERROR_BAD_MESSAGE
)
_LOGGER.error( _LOGGER.error(
"Add-on %s can't write pulse/client.config: %s", self.slug, err "Add-on %s can't write pulse/client.config: %s", self.slug, err
) )
@@ -981,7 +926,7 @@ class Addon(AddonModel):
async def install_apparmor(self) -> None: async def install_apparmor(self) -> None:
"""Install or Update AppArmor profile for Add-on.""" """Install or Update AppArmor profile for Add-on."""
exists_local = self.sys_host.apparmor.exists(self.slug) exists_local = self.sys_host.apparmor.exists(self.slug)
exists_addon = await self.sys_run_in_executor(self.path_apparmor.exists) exists_addon = self.path_apparmor.exists()
# Nothing to do # Nothing to do
if not exists_local and not exists_addon: if not exists_local and not exists_addon:
@@ -993,21 +938,11 @@ class Addon(AddonModel):
return return
# Need install/update # Need install/update
tmp_folder: TemporaryDirectory | None = None with TemporaryDirectory(dir=self.sys_config.path_tmp) as tmp_folder:
profile_file = Path(tmp_folder, "apparmor.txt")
def install_update_profile() -> Path:
nonlocal tmp_folder
tmp_folder = TemporaryDirectory(dir=self.sys_config.path_tmp)
profile_file = Path(tmp_folder.name, "apparmor.txt")
adjust_profile(self.slug, self.path_apparmor, profile_file) adjust_profile(self.slug, self.path_apparmor, profile_file)
return profile_file
try:
profile_file = await self.sys_run_in_executor(install_update_profile)
await self.sys_host.apparmor.load_profile(self.slug, profile_file) await self.sys_host.apparmor.load_profile(self.slug, profile_file)
finally:
if tmp_folder:
await self.sys_run_in_executor(tmp_folder.cleanup)
async def uninstall_apparmor(self) -> None: async def uninstall_apparmor(self) -> None:
"""Remove AppArmor profile for Add-on.""" """Remove AppArmor profile for Add-on."""
@@ -1079,14 +1014,14 @@ class Addon(AddonModel):
# Access Token # Access Token
self.persist[ATTR_ACCESS_TOKEN] = secrets.token_hex(56) self.persist[ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
await self.save_persist() self.save_persist()
# Options # Options
await self.write_options() await self.write_options()
# Sound # Sound
if self.with_audio: if self.with_audio:
await self.write_pulse() self.write_pulse()
def _check_addon_config_dir(): def _check_addon_config_dir():
if self.path_config.is_dir(): if self.path_config.is_dir():
@@ -1234,25 +1169,6 @@ class Addon(AddonModel):
await self._backup_command(self.backup_post) await self._backup_command(self.backup_post)
return None return None
def _is_excluded_by_filter(
self, origin_path: Path, arcname: str, item_arcpath: PurePath
) -> bool:
"""Filter out files from backup based on filters provided by addon developer.
This tests the dev provided filters against the full path of the file as
Supervisor sees them using match. This is done for legacy reasons, testing
against the relative path makes more sense and may be changed in the future.
"""
full_path = origin_path / item_arcpath.relative_to(arcname)
for exclude in self.backup_exclude:
if not full_path.match(exclude):
continue
_LOGGER.debug("Ignoring %s because of %s", full_path, exclude)
return True
return False
@Job( @Job(
name="addon_backup", name="addon_backup",
limit=JobExecutionLimit.GROUP_ONCE, limit=JobExecutionLimit.GROUP_ONCE,
@@ -1264,45 +1180,46 @@ class Addon(AddonModel):
Returns a Task that completes when addon has state 'started' (see start) Returns a Task that completes when addon has state 'started' (see start)
for cold backup. Else nothing is returned. for cold backup. Else nothing is returned.
""" """
wait_for_start: Awaitable[None] | None = None
def _addon_backup( with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
store_image: bool, temp_path = Path(temp)
metadata: dict[str, Any],
apparmor_profile: str | None,
addon_config_used: bool,
):
"""Start the backup process."""
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
temp_path = Path(temp)
# store local image # store local image
if store_image: if self.need_build:
try:
self.instance.export_image(temp_path.joinpath("image.tar"))
except DockerError as err:
raise AddonsError() from err
# Store local configs/state
try: try:
write_json_file(temp_path.joinpath("addon.json"), metadata) await self.instance.export_image(temp_path.joinpath("image.tar"))
except ConfigurationFileError as err: except DockerError as err:
raise AddonsError() from err
data = {
ATTR_USER: self.persist,
ATTR_SYSTEM: self.data,
ATTR_VERSION: self.version,
ATTR_STATE: _MAP_ADDON_STATE.get(self.state, self.state),
}
# Store local configs/state
try:
write_json_file(temp_path.joinpath("addon.json"), data)
except ConfigurationFileError as err:
raise AddonsError(
f"Can't save meta for {self.slug}", _LOGGER.error
) from err
# Store AppArmor Profile
if self.sys_host.apparmor.exists(self.slug):
profile = temp_path.joinpath("apparmor.txt")
try:
await self.sys_host.apparmor.backup_profile(self.slug, profile)
except HostAppArmorError as err:
raise AddonsError( raise AddonsError(
f"Can't save meta for {self.slug}", _LOGGER.error "Can't backup AppArmor profile", _LOGGER.error
) from err ) from err
# Store AppArmor Profile # write into tarfile
if apparmor_profile: def _write_tarfile():
profile_backup_file = temp_path.joinpath("apparmor.txt") """Write tar inside loop."""
try:
self.sys_host.apparmor.backup_profile(
apparmor_profile, profile_backup_file
)
except HostAppArmorError as err:
raise AddonsError(
"Can't backup AppArmor profile", _LOGGER.error
) from err
# Write tarfile
with tar_file as backup: with tar_file as backup:
# Backup metadata # Backup metadata
backup.add(temp, arcname=".") backup.add(temp, arcname=".")
@@ -1311,56 +1228,32 @@ class Addon(AddonModel):
atomic_contents_add( atomic_contents_add(
backup, backup,
self.path_data, self.path_data,
file_filter=partial( excludes=self.backup_exclude,
self._is_excluded_by_filter, self.path_data, "data"
),
arcname="data", arcname="data",
) )
# Backup config # Backup config
if addon_config_used: if self.addon_config_used:
atomic_contents_add( atomic_contents_add(
backup, backup,
self.path_config, self.path_config,
file_filter=partial( excludes=self.backup_exclude,
self._is_excluded_by_filter, self.path_config, "config"
),
arcname="config", arcname="config",
) )
wait_for_start: asyncio.Task | None = None is_running = await self.begin_backup()
try:
data = { _LOGGER.info("Building backup for add-on %s", self.slug)
ATTR_USER: self.persist, await self.sys_run_in_executor(_write_tarfile)
ATTR_SYSTEM: self.data, except (tarfile.TarError, OSError) as err:
ATTR_VERSION: self.version, raise AddonsError(
ATTR_STATE: _MAP_ADDON_STATE.get(self.state, self.state), f"Can't write tarfile {tar_file}: {err}", _LOGGER.error
} ) from err
apparmor_profile = ( finally:
self.slug if self.sys_host.apparmor.exists(self.slug) else None if is_running:
) wait_for_start = await self.end_backup()
was_running = await self.begin_backup()
try:
_LOGGER.info("Building backup for add-on %s", self.slug)
await self.sys_run_in_executor(
partial(
_addon_backup,
store_image=self.need_build,
metadata=data,
apparmor_profile=apparmor_profile,
addon_config_used=self.addon_config_used,
)
)
_LOGGER.info("Finish backup for addon %s", self.slug)
except (tarfile.TarError, OSError, AddFileError) as err:
raise AddonsError(
f"Can't write tarfile {tar_file}: {err}", _LOGGER.error
) from err
finally:
if was_running:
wait_for_start = await self.end_backup()
_LOGGER.info("Finish backup for addon %s", self.slug)
return wait_for_start return wait_for_start
@Job( @Job(
@@ -1374,37 +1267,31 @@ class Addon(AddonModel):
Returns a Task that completes when addon has state 'started' (see start) Returns a Task that completes when addon has state 'started' (see start)
if addon is started after restore. Else nothing is returned. if addon is started after restore. Else nothing is returned.
""" """
wait_for_start: asyncio.Task | None = None wait_for_start: Awaitable[None] | None = None
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
# Extract backup # extract backup
def _extract_tarfile() -> tuple[TemporaryDirectory, dict[str, Any]]: def _extract_tarfile():
"""Extract tar backup.""" """Extract tar backup."""
tmp = TemporaryDirectory(dir=self.sys_config.path_tmp)
try:
with tar_file as backup: with tar_file as backup:
backup.extractall( backup.extractall(
path=tmp.name, path=Path(temp),
members=secure_path(backup), members=secure_path(backup),
filter="fully_trusted", filter="fully_trusted",
) )
data = read_json_file(Path(tmp.name, "addon.json")) try:
except: await self.sys_run_in_executor(_extract_tarfile)
tmp.cleanup() except tarfile.TarError as err:
raise raise AddonsError(
f"Can't read tarfile {tar_file}: {err}", _LOGGER.error
) from err
return tmp, data # Read backup data
try:
data = read_json_file(Path(temp, "addon.json"))
except ConfigurationFileError as err:
raise AddonsError() from err
try:
tmp, data = await self.sys_run_in_executor(_extract_tarfile)
except tarfile.TarError as err:
raise AddonsError(
f"Can't read tarfile {tar_file}: {err}", _LOGGER.error
) from err
except ConfigurationFileError as err:
raise AddonsError() from err
try:
# Validate # Validate
try: try:
data = SCHEMA_ADDON_BACKUP(data) data = SCHEMA_ADDON_BACKUP(data)
@@ -1424,7 +1311,7 @@ class Addon(AddonModel):
# Restore local add-on information # Restore local add-on information
_LOGGER.info("Restore config for addon %s", self.slug) _LOGGER.info("Restore config for addon %s", self.slug)
restore_image = self._image(data[ATTR_SYSTEM]) restore_image = self._image(data[ATTR_SYSTEM])
await self.sys_addons.data.restore( self.sys_addons.data.restore(
self.slug, data[ATTR_USER], data[ATTR_SYSTEM], restore_image self.slug, data[ATTR_USER], data[ATTR_SYSTEM], restore_image
) )
@@ -1438,7 +1325,7 @@ class Addon(AddonModel):
if not await self.instance.exists(): if not await self.instance.exists():
_LOGGER.info("Restore/Install of image for addon %s", self.slug) _LOGGER.info("Restore/Install of image for addon %s", self.slug)
image_file = Path(tmp.name, "image.tar") image_file = Path(temp, "image.tar")
if image_file.is_file(): if image_file.is_file():
with suppress(DockerError): with suppress(DockerError):
await self.instance.import_image(image_file) await self.instance.import_image(image_file)
@@ -1457,24 +1344,24 @@ class Addon(AddonModel):
# Restore data and config # Restore data and config
def _restore_data(): def _restore_data():
"""Restore data and config.""" """Restore data and config."""
_LOGGER.info("Restoring data and config for addon %s", self.slug) temp_data = Path(temp, "data")
if self.path_data.is_dir():
remove_data(self.path_data)
if self.path_config.is_dir():
remove_data(self.path_config)
temp_data = Path(tmp.name, "data")
if temp_data.is_dir(): if temp_data.is_dir():
shutil.copytree(temp_data, self.path_data, symlinks=True) shutil.copytree(temp_data, self.path_data, symlinks=True)
else: else:
self.path_data.mkdir() self.path_data.mkdir()
temp_config = Path(tmp.name, "config") temp_config = Path(temp, "config")
if temp_config.is_dir(): if temp_config.is_dir():
shutil.copytree(temp_config, self.path_config, symlinks=True) shutil.copytree(temp_config, self.path_config, symlinks=True)
elif self.addon_config_used: elif self.addon_config_used:
self.path_config.mkdir() self.path_config.mkdir()
_LOGGER.info("Restoring data and config for addon %s", self.slug)
if self.path_data.is_dir():
await remove_data(self.path_data)
if self.path_config.is_dir():
await remove_data(self.path_config)
try: try:
await self.sys_run_in_executor(_restore_data) await self.sys_run_in_executor(_restore_data)
except shutil.Error as err: except shutil.Error as err:
@@ -1483,16 +1370,15 @@ class Addon(AddonModel):
) from err ) from err
# Restore AppArmor # Restore AppArmor
profile_file = Path(tmp.name, "apparmor.txt") profile_file = Path(temp, "apparmor.txt")
if await self.sys_run_in_executor(profile_file.exists): if profile_file.exists():
try: try:
await self.sys_host.apparmor.load_profile( await self.sys_host.apparmor.load_profile(
self.slug, profile_file self.slug, profile_file
) )
except HostAppArmorError as err: except HostAppArmorError as err:
_LOGGER.error( _LOGGER.error(
"Can't restore AppArmor profile for add-on %s", "Can't restore AppArmor profile for add-on %s", self.slug
self.slug,
) )
raise AddonsError() from err raise AddonsError() from err
@@ -1504,8 +1390,7 @@ class Addon(AddonModel):
# Run add-on # Run add-on
if data[ATTR_STATE] == AddonState.STARTED: if data[ATTR_STATE] == AddonState.STARTED:
wait_for_start = await self.start() wait_for_start = await self.start()
finally:
await self.sys_run_in_executor(tmp.cleanup)
_LOGGER.info("Finished restore for add-on %s", self.slug) _LOGGER.info("Finished restore for add-on %s", self.slug)
return wait_for_start return wait_for_start
@@ -1546,7 +1431,7 @@ class Addon(AddonModel):
except AddonsError as err: except AddonsError as err:
attempts = attempts + 1 attempts = attempts + 1
_LOGGER.error("Watchdog restart of addon %s failed!", self.name) _LOGGER.error("Watchdog restart of addon %s failed!", self.name)
await async_capture_exception(err) capture_exception(err)
else: else:
break break
@@ -1598,6 +1483,6 @@ class Addon(AddonModel):
def refresh_path_cache(self) -> Awaitable[None]: def refresh_path_cache(self) -> Awaitable[None]:
"""Refresh cache of existing paths.""" """Refresh cache of existing paths."""
if self.is_detached or not self.addon_store: if self.is_detached:
return super().refresh_path_cache() return super().refresh_path_cache()
return self.addon_store.refresh_path_cache() return self.addon_store.refresh_path_cache()

View File

@@ -4,7 +4,7 @@ from __future__ import annotations
from functools import cached_property from functools import cached_property
from pathlib import Path from pathlib import Path
from typing import TYPE_CHECKING, Any from typing import TYPE_CHECKING
from awesomeversion import AwesomeVersion from awesomeversion import AwesomeVersion
@@ -23,7 +23,7 @@ from ..utils.common import FileConfiguration, find_one_filetype
from .validate import SCHEMA_BUILD_CONFIG from .validate import SCHEMA_BUILD_CONFIG
if TYPE_CHECKING: if TYPE_CHECKING:
from .manager import AnyAddon from . import AnyAddon
class AddonBuild(FileConfiguration, CoreSysAttributes): class AddonBuild(FileConfiguration, CoreSysAttributes):
@@ -34,36 +34,23 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
self.coresys: CoreSys = coresys self.coresys: CoreSys = coresys
self.addon = addon self.addon = addon
# Search for build file later in executor
super().__init__(None, SCHEMA_BUILD_CONFIG)
def _get_build_file(self) -> Path:
"""Get build file.
Must be run in executor.
"""
try: try:
return find_one_filetype( build_file = find_one_filetype(
self.addon.path_location, "build", FILE_SUFFIX_CONFIGURATION self.addon.path_location, "build", FILE_SUFFIX_CONFIGURATION
) )
except ConfigurationFileError: except ConfigurationFileError:
return self.addon.path_location / "build.json" build_file = self.addon.path_location / "build.json"
async def read_data(self) -> None: super().__init__(build_file, SCHEMA_BUILD_CONFIG)
"""Load data from file."""
if not self._file:
self._file = await self.sys_run_in_executor(self._get_build_file)
await super().read_data() def save_data(self):
async def save_data(self):
"""Ignore save function.""" """Ignore save function."""
raise RuntimeError() raise RuntimeError()
@cached_property @cached_property
def arch(self) -> str: def arch(self) -> str:
"""Return arch of the add-on.""" """Return arch of the add-on."""
return self.sys_arch.match([self.addon.arch]) return self.sys_arch.match(self.addon.arch)
@property @property
def base_image(self) -> str: def base_image(self) -> str:
@@ -81,6 +68,13 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
) )
return self._data[ATTR_BUILD_FROM][self.arch] return self._data[ATTR_BUILD_FROM][self.arch]
@property
def dockerfile(self) -> Path:
"""Return Dockerfile path."""
if self.addon.path_location.joinpath(f"Dockerfile.{self.arch}").exists():
return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}")
return self.addon.path_location.joinpath("Dockerfile")
@property @property
def squash(self) -> bool: def squash(self) -> bool:
"""Return True or False if squash is active.""" """Return True or False if squash is active."""
@@ -96,40 +90,25 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
"""Return additional Docker labels.""" """Return additional Docker labels."""
return self._data[ATTR_LABELS] return self._data[ATTR_LABELS]
def get_dockerfile(self) -> Path: @property
"""Return Dockerfile path. def is_valid(self) -> bool:
Must be run in executor.
"""
if self.addon.path_location.joinpath(f"Dockerfile.{self.arch}").exists():
return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}")
return self.addon.path_location.joinpath("Dockerfile")
async def is_valid(self) -> bool:
"""Return true if the build env is valid.""" """Return true if the build env is valid."""
try:
def build_is_valid() -> bool:
return all( return all(
[ [
self.addon.path_location.is_dir(), self.addon.path_location.is_dir(),
self.get_dockerfile().is_file(), self.dockerfile.is_file(),
] ]
) )
try:
return await self.sys_run_in_executor(build_is_valid)
except HassioArchNotFound: except HassioArchNotFound:
return False return False
def get_docker_args(self, version: AwesomeVersion, image: str | None = None): def get_docker_args(self, version: AwesomeVersion, image: str | None = None):
"""Create a dict with Docker build arguments. """Create a dict with Docker build arguments."""
args = {
Must be run in executor.
"""
args: dict[str, Any] = {
"path": str(self.addon.path_location), "path": str(self.addon.path_location),
"tag": f"{image or self.addon.image}:{version!s}", "tag": f"{image or self.addon.image}:{version!s}",
"dockerfile": str(self.get_dockerfile()), "dockerfile": str(self.dockerfile),
"pull": True, "pull": True,
"forcerm": not self.sys_dev, "forcerm": not self.sys_dev,
"squash": self.squash, "squash": self.squash,

View File

@@ -38,7 +38,7 @@ class AddonsData(FileConfiguration, CoreSysAttributes):
"""Return local add-on data.""" """Return local add-on data."""
return self._data[ATTR_SYSTEM] return self._data[ATTR_SYSTEM]
async def install(self, addon: AddonStore) -> None: def install(self, addon: AddonStore) -> None:
"""Set addon as installed.""" """Set addon as installed."""
self.system[addon.slug] = deepcopy(addon.data) self.system[addon.slug] = deepcopy(addon.data)
self.user[addon.slug] = { self.user[addon.slug] = {
@@ -46,28 +46,26 @@ class AddonsData(FileConfiguration, CoreSysAttributes):
ATTR_VERSION: addon.version, ATTR_VERSION: addon.version,
ATTR_IMAGE: addon.image, ATTR_IMAGE: addon.image,
} }
await self.save_data() self.save_data()
async def uninstall(self, addon: Addon) -> None: def uninstall(self, addon: Addon) -> None:
"""Set add-on as uninstalled.""" """Set add-on as uninstalled."""
self.system.pop(addon.slug, None) self.system.pop(addon.slug, None)
self.user.pop(addon.slug, None) self.user.pop(addon.slug, None)
await self.save_data() self.save_data()
async def update(self, addon: AddonStore) -> None: def update(self, addon: AddonStore) -> None:
"""Update version of add-on.""" """Update version of add-on."""
self.system[addon.slug] = deepcopy(addon.data) self.system[addon.slug] = deepcopy(addon.data)
self.user[addon.slug].update( self.user[addon.slug].update(
{ATTR_VERSION: addon.version, ATTR_IMAGE: addon.image} {ATTR_VERSION: addon.version, ATTR_IMAGE: addon.image}
) )
await self.save_data() self.save_data()
async def restore( def restore(self, slug: str, user: Config, system: Config, image: str) -> None:
self, slug: str, user: Config, system: Config, image: str
) -> None:
"""Restore data to add-on.""" """Restore data to add-on."""
self.user[slug] = deepcopy(user) self.user[slug] = deepcopy(user)
self.system[slug] = deepcopy(system) self.system[slug] = deepcopy(system)
self.user[slug][ATTR_IMAGE] = image self.user[slug][ATTR_IMAGE] = image
await self.save_data() self.save_data()

View File

@@ -5,25 +5,27 @@ from collections.abc import Awaitable
from contextlib import suppress from contextlib import suppress
import logging import logging
import tarfile import tarfile
from typing import Self, Union from typing import Union
from attr import evolve
from ..const import AddonBoot, AddonStartup, AddonState from ..const import AddonBoot, AddonStartup, AddonState
from ..coresys import CoreSys, CoreSysAttributes from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import ( from ..exceptions import (
AddonConfigurationError,
AddonsError, AddonsError,
AddonsJobError, AddonsJobError,
AddonsNotSupportedError, AddonsNotSupportedError,
CoreDNSError, CoreDNSError,
DockerAPIError,
DockerError, DockerError,
DockerNotFound,
HassioError, HassioError,
HomeAssistantAPIError, HomeAssistantAPIError,
) )
from ..jobs.decorator import Job, JobCondition from ..jobs.decorator import Job, JobCondition
from ..resolution.const import ContextType, IssueType, SuggestionType from ..resolution.const import ContextType, IssueType, SuggestionType
from ..store.addon import AddonStore from ..store.addon import AddonStore
from ..utils.sentry import async_capture_exception from ..utils import check_exception_chain
from ..utils.sentry import capture_exception
from .addon import Addon from .addon import Addon
from .const import ADDON_UPDATE_CONDITIONS from .const import ADDON_UPDATE_CONDITIONS
from .data import AddonsData from .data import AddonsData
@@ -74,11 +76,6 @@ class AddonManager(CoreSysAttributes):
return addon return addon
return None return None
async def load_config(self) -> Self:
"""Load config in executor."""
await self.data.read_data()
return self
async def load(self) -> None: async def load(self) -> None:
"""Start up add-on management.""" """Start up add-on management."""
# Refresh cache for all store addons # Refresh cache for all store addons
@@ -121,14 +118,15 @@ class AddonManager(CoreSysAttributes):
try: try:
if start_task := await addon.start(): if start_task := await addon.start():
wait_boot.append(start_task) wait_boot.append(start_task)
except AddonsError as err:
# Check if there is an system/user issue
if check_exception_chain(
err, (DockerAPIError, DockerNotFound, AddonConfigurationError)
):
addon.boot = AddonBoot.MANUAL
addon.save_persist()
except HassioError: except HassioError:
self.sys_resolution.add_issue( pass # These are already handled
evolve(addon.boot_failed_issue),
suggestions=[
SuggestionType.EXECUTE_START,
SuggestionType.DISABLE_BOOT,
],
)
else: else:
continue continue
@@ -137,19 +135,6 @@ class AddonManager(CoreSysAttributes):
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere # Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
await asyncio.gather(*wait_boot, return_exceptions=True) await asyncio.gather(*wait_boot, return_exceptions=True)
# After waiting for startup, create an issue for boot addons that are error or unknown state
# Ignore stopped as single shot addons can be run at boot and this is successful exit
# Timeout waiting for startup is not a failure, addon is probably just slow
for addon in tasks:
if addon.state in {AddonState.ERROR, AddonState.UNKNOWN}:
self.sys_resolution.add_issue(
evolve(addon.boot_failed_issue),
suggestions=[
SuggestionType.EXECUTE_START,
SuggestionType.DISABLE_BOOT,
],
)
async def shutdown(self, stage: AddonStartup) -> None: async def shutdown(self, stage: AddonStartup) -> None:
"""Shutdown addons.""" """Shutdown addons."""
tasks: list[Addon] = [] tasks: list[Addon] = []
@@ -170,7 +155,7 @@ class AddonManager(CoreSysAttributes):
await addon.stop() await addon.stop()
except Exception as err: # pylint: disable=broad-except except Exception as err: # pylint: disable=broad-except
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err) _LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
await async_capture_exception(err) capture_exception(err)
@Job( @Job(
name="addon_manager_install", name="addon_manager_install",
@@ -194,7 +179,6 @@ class AddonManager(CoreSysAttributes):
_LOGGER.info("Add-on '%s' successfully installed", slug) _LOGGER.info("Add-on '%s' successfully installed", slug)
@Job(name="addon_manager_uninstall")
async def uninstall(self, slug: str, *, remove_config: bool = False) -> None: async def uninstall(self, slug: str, *, remove_config: bool = False) -> None:
"""Remove an add-on.""" """Remove an add-on."""
if slug not in self.local: if slug not in self.local:
@@ -314,7 +298,7 @@ class AddonManager(CoreSysAttributes):
if slug not in self.local: if slug not in self.local:
_LOGGER.debug("Add-on %s is not local available for restore", slug) _LOGGER.debug("Add-on %s is not local available for restore", slug)
addon = Addon(self.coresys, slug) addon = Addon(self.coresys, slug)
had_ingress: bool | None = False had_ingress = False
else: else:
_LOGGER.debug("Add-on %s is local available for restore", slug) _LOGGER.debug("Add-on %s is local available for restore", slug)
addon = self.local[slug] addon = self.local[slug]
@@ -389,7 +373,7 @@ class AddonManager(CoreSysAttributes):
reference=addon.slug, reference=addon.slug,
suggestions=[SuggestionType.EXECUTE_REPAIR], suggestions=[SuggestionType.EXECUTE_REPAIR],
) )
await async_capture_exception(err) capture_exception(err)
else: else:
add_host_coros.append( add_host_coros.append(
self.sys_plugins.dns.add_host( self.sys_plugins.dns.add_host(

View File

@@ -47,7 +47,7 @@ from ..const import (
ATTR_JOURNALD, ATTR_JOURNALD,
ATTR_KERNEL_MODULES, ATTR_KERNEL_MODULES,
ATTR_LEGACY, ATTR_LEGACY,
ATTR_LOCATION, ATTR_LOCATON,
ATTR_MACHINE, ATTR_MACHINE,
ATTR_MAP, ATTR_MAP,
ATTR_NAME, ATTR_NAME,
@@ -210,6 +210,18 @@ class AddonModel(JobGroup, ABC):
"""Return description of add-on.""" """Return description of add-on."""
return self.data[ATTR_DESCRIPTON] return self.data[ATTR_DESCRIPTON]
@property
def long_description(self) -> str | None:
"""Return README.md as long_description."""
readme = Path(self.path_location, "README.md")
# If readme not exists
if not readme.exists():
return None
# Return data
return readme.read_text(encoding="utf-8")
@property @property
def repository(self) -> str: def repository(self) -> str:
"""Return repository of add-on.""" """Return repository of add-on."""
@@ -294,7 +306,7 @@ class AddonModel(JobGroup, ABC):
return self.data.get(ATTR_WEBUI) return self.data.get(ATTR_WEBUI)
@property @property
def watchdog_url(self) -> str | None: def watchdog(self) -> str | None:
"""Return URL to for watchdog or None.""" """Return URL to for watchdog or None."""
return self.data.get(ATTR_WATCHDOG) return self.data.get(ATTR_WATCHDOG)
@@ -569,7 +581,7 @@ class AddonModel(JobGroup, ABC):
@property @property
def path_location(self) -> Path: def path_location(self) -> Path:
"""Return path to this add-on.""" """Return path to this add-on."""
return Path(self.data[ATTR_LOCATION]) return Path(self.data[ATTR_LOCATON])
@property @property
def path_icon(self) -> Path: def path_icon(self) -> Path:
@@ -606,7 +618,7 @@ class AddonModel(JobGroup, ABC):
return AddonOptions(self.coresys, raw_schema, self.name, self.slug) return AddonOptions(self.coresys, raw_schema, self.name, self.slug)
@property @property
def schema_ui(self) -> list[dict[Any, Any]] | None: def schema_ui(self) -> list[dict[any, any]] | None:
"""Create a UI schema for add-on options.""" """Create a UI schema for add-on options."""
raw_schema = self.data[ATTR_SCHEMA] raw_schema = self.data[ATTR_SCHEMA]
@@ -634,21 +646,6 @@ class AddonModel(JobGroup, ABC):
"""Return breaking versions of addon.""" """Return breaking versions of addon."""
return self.data[ATTR_BREAKING_VERSIONS] return self.data[ATTR_BREAKING_VERSIONS]
async def long_description(self) -> str | None:
"""Return README.md as long_description."""
def read_readme() -> str | None:
readme = Path(self.path_location, "README.md")
# If readme not exists
if not readme.exists():
return None
# Return data
return readme.read_text(encoding="utf-8")
return await self.sys_run_in_executor(read_readme)
def refresh_path_cache(self) -> Awaitable[None]: def refresh_path_cache(self) -> Awaitable[None]:
"""Refresh cache of existing paths.""" """Refresh cache of existing paths."""

View File

@@ -137,7 +137,7 @@ class AddonOptions(CoreSysAttributes):
) from None ) from None
# prepare range # prepare range
range_args: dict[str, Any] = {} range_args = {}
for group_name in _SCHEMA_LENGTH_PARTS: for group_name in _SCHEMA_LENGTH_PARTS:
group_value = match.group(group_name) group_value = match.group(group_name)
if group_value: if group_value:
@@ -390,14 +390,14 @@ class UiOptions(CoreSysAttributes):
multiple: bool = False, multiple: bool = False,
) -> None: ) -> None:
"""UI nested dict items.""" """UI nested dict items."""
ui_node: dict[str, Any] = { ui_node = {
"name": key, "name": key,
"type": "schema", "type": "schema",
"optional": True, "optional": True,
"multiple": multiple, "multiple": multiple,
} }
nested_schema: list[dict[str, Any]] = [] nested_schema = []
for c_key, c_value in option_dict.items(): for c_key, c_value in option_dict.items():
# Nested? # Nested?
if isinstance(c_value, list): if isinstance(c_value, list):
@@ -413,7 +413,7 @@ def _create_device_filter(str_filter: str) -> dict[str, Any]:
"""Generate device Filter.""" """Generate device Filter."""
raw_filter = dict(value.split("=") for value in str_filter.split(";")) raw_filter = dict(value.split("=") for value in str_filter.split(";"))
clean_filter: dict[str, Any] = {} clean_filter = {}
for key, value in raw_filter.items(): for key, value in raw_filter.items():
if key == "subsystem": if key == "subsystem":
clean_filter[key] = UdevSubsystem(value) clean_filter[key] = UdevSubsystem(value)

View File

@@ -2,9 +2,9 @@
from __future__ import annotations from __future__ import annotations
import asyncio
import logging import logging
from pathlib import Path from pathlib import Path
import subprocess
from typing import TYPE_CHECKING from typing import TYPE_CHECKING
from ..const import ROLE_ADMIN, ROLE_MANAGER, SECURITY_DISABLE, SECURITY_PROFILE from ..const import ROLE_ADMIN, ROLE_MANAGER, SECURITY_DISABLE, SECURITY_PROFILE
@@ -46,7 +46,6 @@ def rating_security(addon: AddonModel) -> int:
privilege in addon.privileged privilege in addon.privileged
for privilege in ( for privilege in (
Capabilities.BPF, Capabilities.BPF,
Capabilities.CHECKPOINT_RESTORE,
Capabilities.DAC_READ_SEARCH, Capabilities.DAC_READ_SEARCH,
Capabilities.NET_ADMIN, Capabilities.NET_ADMIN,
Capabilities.NET_RAW, Capabilities.NET_RAW,
@@ -86,20 +85,18 @@ def rating_security(addon: AddonModel) -> int:
return max(min(8, rating), 1) return max(min(8, rating), 1)
def remove_data(folder: Path) -> None: async def remove_data(folder: Path) -> None:
"""Remove folder and reset privileged. """Remove folder and reset privileged."""
Must be run in executor.
"""
try: try:
subprocess.run( proc = await asyncio.create_subprocess_exec(
["rm", "-rf", str(folder)], stdout=subprocess.DEVNULL, text=True, check=True "rm", "-rf", str(folder), stdout=asyncio.subprocess.DEVNULL
) )
_, error_msg = await proc.communicate()
except OSError as err: except OSError as err:
error_msg = str(err) error_msg = str(err)
except subprocess.CalledProcessError as procerr:
error_msg = procerr.stderr.strip()
else: else:
return if proc.returncode == 0:
return
_LOGGER.error("Can't remove Add-on Data: %s", error_msg) _LOGGER.error("Can't remove Add-on Data: %s", error_msg)

View File

@@ -55,7 +55,7 @@ from ..const import (
ATTR_KERNEL_MODULES, ATTR_KERNEL_MODULES,
ATTR_LABELS, ATTR_LABELS,
ATTR_LEGACY, ATTR_LEGACY,
ATTR_LOCATION, ATTR_LOCATON,
ATTR_MACHINE, ATTR_MACHINE,
ATTR_MAP, ATTR_MAP,
ATTR_NAME, ATTR_NAME,
@@ -483,7 +483,7 @@ SCHEMA_ADDON_SYSTEM = vol.All(
_migrate_addon_config(), _migrate_addon_config(),
_SCHEMA_ADDON_CONFIG.extend( _SCHEMA_ADDON_CONFIG.extend(
{ {
vol.Required(ATTR_LOCATION): str, vol.Required(ATTR_LOCATON): str,
vol.Required(ATTR_REPOSITORY): str, vol.Required(ATTR_REPOSITORY): str,
vol.Required(ATTR_TRANSLATIONS, default=dict): { vol.Required(ATTR_TRANSLATIONS, default=dict): {
str: SCHEMA_ADDON_TRANSLATIONS str: SCHEMA_ADDON_TRANSLATIONS

View File

@@ -1,17 +1,16 @@
"""Init file for Supervisor RESTful API.""" """Init file for Supervisor RESTful API."""
from dataclasses import dataclass
from functools import partial from functools import partial
import logging import logging
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any
from aiohttp import hdrs, web from aiohttp import web
from ..const import AddonState from ..const import AddonState
from ..coresys import CoreSys, CoreSysAttributes from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import APIAddonNotInstalled, HostNotSupportedError from ..exceptions import APIAddonNotInstalled, HostNotSupportedError
from ..utils.sentry import async_capture_exception from ..utils.sentry import capture_exception
from .addons import APIAddons from .addons import APIAddons
from .audio import APIAudio from .audio import APIAudio
from .auth import APIAuth from .auth import APIAuth
@@ -48,14 +47,6 @@ MAX_CLIENT_SIZE: int = 1024**2 * 16
MAX_LINE_SIZE: int = 24570 MAX_LINE_SIZE: int = 24570
@dataclass(slots=True, frozen=True)
class StaticResourceConfig:
"""Configuration for a static resource."""
prefix: str
path: Path
class RestAPI(CoreSysAttributes): class RestAPI(CoreSysAttributes):
"""Handle RESTful API for Supervisor.""" """Handle RESTful API for Supervisor."""
@@ -82,12 +73,12 @@ class RestAPI(CoreSysAttributes):
self._site: web.TCPSite | None = None self._site: web.TCPSite | None = None
# share single host API handler for reuse in logging endpoints # share single host API handler for reuse in logging endpoints
self._api_host: APIHost = APIHost() self._api_host: APIHost | None = None
self._api_host.coresys = coresys
async def load(self) -> None: async def load(self) -> None:
"""Register REST API Calls.""" """Register REST API Calls."""
static_resource_configs: list[StaticResourceConfig] = [] self._api_host = APIHost()
self._api_host.coresys = self.coresys
self._register_addons() self._register_addons()
self._register_audio() self._register_audio()
@@ -107,7 +98,7 @@ class RestAPI(CoreSysAttributes):
self._register_network() self._register_network()
self._register_observer() self._register_observer()
self._register_os() self._register_os()
static_resource_configs.extend(self._register_panel()) self._register_panel()
self._register_proxy() self._register_proxy()
self._register_resolution() self._register_resolution()
self._register_root() self._register_root()
@@ -116,17 +107,6 @@ class RestAPI(CoreSysAttributes):
self._register_store() self._register_store()
self._register_supervisor() self._register_supervisor()
if static_resource_configs:
def process_configs() -> list[web.StaticResource]:
return [
web.StaticResource(config.prefix, config.path)
for config in static_resource_configs
]
for resource in await self.sys_run_in_executor(process_configs):
self.webapp.router.register_resource(resource)
await self.start() await self.start()
def _register_advanced_logs(self, path: str, syslog_identifier: str): def _register_advanced_logs(self, path: str, syslog_identifier: str):
@@ -237,8 +217,6 @@ class RestAPI(CoreSysAttributes):
[ [
web.get("/os/info", api_os.info), web.get("/os/info", api_os.info),
web.post("/os/update", api_os.update), web.post("/os/update", api_os.update),
web.get("/os/config/swap", api_os.config_swap_info),
web.post("/os/config/swap", api_os.config_swap_options),
web.post("/os/config/sync", api_os.config_sync), web.post("/os/config/sync", api_os.config_sync),
web.post("/os/datadisk/move", api_os.migrate_data), web.post("/os/datadisk/move", api_os.migrate_data),
web.get("/os/datadisk/list", api_os.list_data), web.get("/os/datadisk/list", api_os.list_data),
@@ -345,9 +323,6 @@ class RestAPI(CoreSysAttributes):
api_root.coresys = self.coresys api_root.coresys = self.coresys
self.webapp.add_routes([web.get("/info", api_root.info)]) self.webapp.add_routes([web.get("/info", api_root.info)])
self.webapp.add_routes([web.post("/reload_updates", api_root.reload_updates)])
# Discouraged
self.webapp.add_routes([web.post("/refresh_updates", api_root.refresh_updates)]) self.webapp.add_routes([web.post("/refresh_updates", api_root.refresh_updates)])
self.webapp.add_routes( self.webapp.add_routes(
[web.get("/available_updates", api_root.available_updates)] [web.get("/available_updates", api_root.available_updates)]
@@ -437,8 +412,7 @@ class RestAPI(CoreSysAttributes):
if not isinstance(err, HostNotSupportedError): if not isinstance(err, HostNotSupportedError):
# No need to capture HostNotSupportedError to Sentry, the cause # No need to capture HostNotSupportedError to Sentry, the cause
# is known and reported to the user using the resolution center. # is known and reported to the user using the resolution center.
await async_capture_exception(err) capture_exception(err)
kwargs.pop("follow", None) # Follow is not supported for Docker logs
return await api_supervisor.logs(*args, **kwargs) return await api_supervisor.logs(*args, **kwargs)
self.webapp.add_routes( self.webapp.add_routes(
@@ -529,7 +503,7 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes( self.webapp.add_routes(
[ [
web.get("/addons", api_addons.list_addons), web.get("/addons", api_addons.list),
web.post("/addons/{addon}/uninstall", api_addons.uninstall), web.post("/addons/{addon}/uninstall", api_addons.uninstall),
web.post("/addons/{addon}/start", api_addons.start), web.post("/addons/{addon}/start", api_addons.start),
web.post("/addons/{addon}/stop", api_addons.stop), web.post("/addons/{addon}/stop", api_addons.stop),
@@ -597,9 +571,7 @@ class RestAPI(CoreSysAttributes):
web.post("/ingress/session", api_ingress.create_session), web.post("/ingress/session", api_ingress.create_session),
web.post("/ingress/validate_session", api_ingress.validate_session), web.post("/ingress/validate_session", api_ingress.validate_session),
web.get("/ingress/panels", api_ingress.panels), web.get("/ingress/panels", api_ingress.panels),
web.route( web.view("/ingress/{token}/{path:.*}", api_ingress.handler),
hdrs.METH_ANY, "/ingress/{token}/{path:.*}", api_ingress.handler
),
] ]
) )
@@ -610,7 +582,7 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes( self.webapp.add_routes(
[ [
web.get("/backups", api_backups.list_backups), web.get("/backups", api_backups.list),
web.get("/backups/info", api_backups.info), web.get("/backups/info", api_backups.info),
web.post("/backups/options", api_backups.options), web.post("/backups/options", api_backups.options),
web.post("/backups/reload", api_backups.reload), web.post("/backups/reload", api_backups.reload),
@@ -637,7 +609,7 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes( self.webapp.add_routes(
[ [
web.get("/services", api_services.list_services), web.get("/services", api_services.list),
web.get("/services/{service}", api_services.get_service), web.get("/services/{service}", api_services.get_service),
web.post("/services/{service}", api_services.set_service), web.post("/services/{service}", api_services.set_service),
web.delete("/services/{service}", api_services.del_service), web.delete("/services/{service}", api_services.del_service),
@@ -651,7 +623,7 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes( self.webapp.add_routes(
[ [
web.get("/discovery", api_discovery.list_discovery), web.get("/discovery", api_discovery.list),
web.get("/discovery/{uuid}", api_discovery.get_discovery), web.get("/discovery/{uuid}", api_discovery.get_discovery),
web.delete("/discovery/{uuid}", api_discovery.del_discovery), web.delete("/discovery/{uuid}", api_discovery.del_discovery),
web.post("/discovery", api_discovery.set_discovery), web.post("/discovery", api_discovery.set_discovery),
@@ -777,9 +749,10 @@ class RestAPI(CoreSysAttributes):
] ]
) )
def _register_panel(self) -> list[StaticResourceConfig]: def _register_panel(self) -> None:
"""Register panel for Home Assistant.""" """Register panel for Home Assistant."""
return [StaticResourceConfig("/app", Path(__file__).parent.joinpath("panel"))] panel_dir = Path(__file__).parent.joinpath("panel")
self.webapp.add_routes([web.static("/app", panel_dir)])
def _register_docker(self) -> None: def _register_docker(self) -> None:
"""Register docker configuration functions.""" """Register docker configuration functions."""

View File

@@ -3,13 +3,14 @@
import asyncio import asyncio
from collections.abc import Awaitable from collections.abc import Awaitable
import logging import logging
from typing import Any, TypedDict from typing import Any
from aiohttp import web from aiohttp import web
import voluptuous as vol import voluptuous as vol
from voluptuous.humanize import humanize_error from voluptuous.humanize import humanize_error
from ..addons.addon import Addon from ..addons.addon import Addon
from ..addons.manager import AnyAddon
from ..addons.utils import rating_security from ..addons.utils import rating_security
from ..const import ( from ..const import (
ATTR_ADDONS, ATTR_ADDONS,
@@ -62,6 +63,7 @@ from ..const import (
ATTR_MEMORY_LIMIT, ATTR_MEMORY_LIMIT,
ATTR_MEMORY_PERCENT, ATTR_MEMORY_PERCENT,
ATTR_MEMORY_USAGE, ATTR_MEMORY_USAGE,
ATTR_MESSAGE,
ATTR_NAME, ATTR_NAME,
ATTR_NETWORK, ATTR_NETWORK,
ATTR_NETWORK_DESCRIPTION, ATTR_NETWORK_DESCRIPTION,
@@ -70,6 +72,7 @@ from ..const import (
ATTR_OPTIONS, ATTR_OPTIONS,
ATTR_PRIVILEGED, ATTR_PRIVILEGED,
ATTR_PROTECTED, ATTR_PROTECTED,
ATTR_PWNED,
ATTR_RATING, ATTR_RATING,
ATTR_REPOSITORY, ATTR_REPOSITORY,
ATTR_SCHEMA, ATTR_SCHEMA,
@@ -87,6 +90,7 @@ from ..const import (
ATTR_UPDATE_AVAILABLE, ATTR_UPDATE_AVAILABLE,
ATTR_URL, ATTR_URL,
ATTR_USB, ATTR_USB,
ATTR_VALID,
ATTR_VERSION, ATTR_VERSION,
ATTR_VERSION_LATEST, ATTR_VERSION_LATEST,
ATTR_VIDEO, ATTR_VIDEO,
@@ -102,7 +106,6 @@ from ..exceptions import (
APIAddonNotInstalled, APIAddonNotInstalled,
APIError, APIError,
APIForbidden, APIForbidden,
APINotFound,
PwnedError, PwnedError,
PwnedSecret, PwnedSecret,
) )
@@ -142,20 +145,12 @@ SCHEMA_UNINSTALL = vol.Schema(
# pylint: enable=no-value-for-parameter # pylint: enable=no-value-for-parameter
class OptionsValidateResponse(TypedDict):
"""Response object for options validate."""
message: str
valid: bool
pwned: bool | None
class APIAddons(CoreSysAttributes): class APIAddons(CoreSysAttributes):
"""Handle RESTful API for add-on functions.""" """Handle RESTful API for add-on functions."""
def get_addon_for_request(self, request: web.Request) -> Addon: def get_addon_for_request(self, request: web.Request) -> Addon:
"""Return addon, throw an exception if it doesn't exist.""" """Return addon, throw an exception if it doesn't exist."""
addon_slug: str = request.match_info["addon"] addon_slug: str = request.match_info.get("addon")
# Lookup itself # Lookup itself
if addon_slug == "self": if addon_slug == "self":
@@ -166,14 +161,14 @@ class APIAddons(CoreSysAttributes):
addon = self.sys_addons.get(addon_slug) addon = self.sys_addons.get(addon_slug)
if not addon: if not addon:
raise APINotFound(f"Addon {addon_slug} does not exist") raise APIError(f"Addon {addon_slug} does not exist")
if not isinstance(addon, Addon) or not addon.is_installed: if not isinstance(addon, Addon) or not addon.is_installed:
raise APIAddonNotInstalled("Addon is not installed") raise APIAddonNotInstalled("Addon is not installed")
return addon return addon
@api_process @api_process
async def list_addons(self, request: web.Request) -> dict[str, Any]: async def list(self, request: web.Request) -> dict[str, Any]:
"""Return all add-ons or repositories.""" """Return all add-ons or repositories."""
data_addons = [ data_addons = [
{ {
@@ -208,7 +203,7 @@ class APIAddons(CoreSysAttributes):
async def info(self, request: web.Request) -> dict[str, Any]: async def info(self, request: web.Request) -> dict[str, Any]:
"""Return add-on information.""" """Return add-on information."""
addon: Addon = self.get_addon_for_request(request) addon: AnyAddon = self.get_addon_for_request(request)
data = { data = {
ATTR_NAME: addon.name, ATTR_NAME: addon.name,
@@ -216,7 +211,7 @@ class APIAddons(CoreSysAttributes):
ATTR_HOSTNAME: addon.hostname, ATTR_HOSTNAME: addon.hostname,
ATTR_DNS: addon.dns, ATTR_DNS: addon.dns,
ATTR_DESCRIPTON: addon.description, ATTR_DESCRIPTON: addon.description,
ATTR_LONG_DESCRIPTION: await addon.long_description(), ATTR_LONG_DESCRIPTION: addon.long_description,
ATTR_ADVANCED: addon.advanced, ATTR_ADVANCED: addon.advanced,
ATTR_STAGE: addon.stage, ATTR_STAGE: addon.stage,
ATTR_REPOSITORY: addon.repository, ATTR_REPOSITORY: addon.repository,
@@ -326,7 +321,7 @@ class APIAddons(CoreSysAttributes):
if ATTR_WATCHDOG in body: if ATTR_WATCHDOG in body:
addon.watchdog = body[ATTR_WATCHDOG] addon.watchdog = body[ATTR_WATCHDOG]
await addon.save_persist() addon.save_persist()
@api_process @api_process
async def sys_options(self, request: web.Request) -> None: async def sys_options(self, request: web.Request) -> None:
@@ -340,13 +335,13 @@ class APIAddons(CoreSysAttributes):
if ATTR_SYSTEM_MANAGED_CONFIG_ENTRY in body: if ATTR_SYSTEM_MANAGED_CONFIG_ENTRY in body:
addon.system_managed_config_entry = body[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY] addon.system_managed_config_entry = body[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY]
await addon.save_persist() addon.save_persist()
@api_process @api_process
async def options_validate(self, request: web.Request) -> OptionsValidateResponse: async def options_validate(self, request: web.Request) -> None:
"""Validate user options for add-on.""" """Validate user options for add-on."""
addon = self.get_addon_for_request(request) addon = self.get_addon_for_request(request)
data = OptionsValidateResponse(message="", valid=True, pwned=False) data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False}
options = await request.json(loads=json_loads) or addon.options options = await request.json(loads=json_loads) or addon.options
@@ -355,8 +350,8 @@ class APIAddons(CoreSysAttributes):
try: try:
options_schema.validate(options) options_schema.validate(options)
except vol.Invalid as ex: except vol.Invalid as ex:
data["message"] = humanize_error(options, ex) data[ATTR_MESSAGE] = humanize_error(options, ex)
data["valid"] = False data[ATTR_VALID] = False
if not self.sys_security.pwned: if not self.sys_security.pwned:
return data return data
@@ -367,24 +362,24 @@ class APIAddons(CoreSysAttributes):
await self.sys_security.verify_secret(secret) await self.sys_security.verify_secret(secret)
continue continue
except PwnedSecret: except PwnedSecret:
data["pwned"] = True data[ATTR_PWNED] = True
except PwnedError: except PwnedError:
data["pwned"] = None data[ATTR_PWNED] = None
break break
if self.sys_security.force and data["pwned"] in (None, True): if self.sys_security.force and data[ATTR_PWNED] in (None, True):
data["valid"] = False data[ATTR_VALID] = False
if data["pwned"] is None: if data[ATTR_PWNED] is None:
data["message"] = "Error happening on pwned secrets check!" data[ATTR_MESSAGE] = "Error happening on pwned secrets check!"
else: else:
data["message"] = "Add-on uses pwned secrets!" data[ATTR_MESSAGE] = "Add-on uses pwned secrets!"
return data return data
@api_process @api_process
async def options_config(self, request: web.Request) -> None: async def options_config(self, request: web.Request) -> None:
"""Validate user options for add-on.""" """Validate user options for add-on."""
slug: str = request.match_info["addon"] slug: str = request.match_info.get("addon")
if slug != "self": if slug != "self":
raise APIForbidden("This can be only read by the Add-on itself!") raise APIForbidden("This can be only read by the Add-on itself!")
addon = self.get_addon_for_request(request) addon = self.get_addon_for_request(request)
@@ -406,7 +401,7 @@ class APIAddons(CoreSysAttributes):
_LOGGER.warning("Changing protected flag for %s!", addon.slug) _LOGGER.warning("Changing protected flag for %s!", addon.slug)
addon.protected = body[ATTR_PROTECTED] addon.protected = body[ATTR_PROTECTED]
await addon.save_persist() addon.save_persist()
@api_process @api_process
async def stats(self, request: web.Request) -> dict[str, Any]: async def stats(self, request: web.Request) -> dict[str, Any]:

View File

@@ -124,7 +124,7 @@ class APIAudio(CoreSysAttributes):
@api_process @api_process
async def set_volume(self, request: web.Request) -> None: async def set_volume(self, request: web.Request) -> None:
"""Set audio volume on stream.""" """Set audio volume on stream."""
source: StreamType = StreamType(request.match_info["source"]) source: StreamType = StreamType(request.match_info.get("source"))
application: bool = request.path.endswith("application") application: bool = request.path.endswith("application")
body = await api_validate(SCHEMA_VOLUME, request) body = await api_validate(SCHEMA_VOLUME, request)
@@ -137,7 +137,7 @@ class APIAudio(CoreSysAttributes):
@api_process @api_process
async def set_mute(self, request: web.Request) -> None: async def set_mute(self, request: web.Request) -> None:
"""Mute audio volume on stream.""" """Mute audio volume on stream."""
source: StreamType = StreamType(request.match_info["source"]) source: StreamType = StreamType(request.match_info.get("source"))
application: bool = request.path.endswith("application") application: bool = request.path.endswith("application")
body = await api_validate(SCHEMA_MUTE, request) body = await api_validate(SCHEMA_MUTE, request)
@@ -150,7 +150,7 @@ class APIAudio(CoreSysAttributes):
@api_process @api_process
async def set_default(self, request: web.Request) -> None: async def set_default(self, request: web.Request) -> None:
"""Set audio default stream.""" """Set audio default stream."""
source: StreamType = StreamType(request.match_info["source"]) source: StreamType = StreamType(request.match_info.get("source"))
body = await api_validate(SCHEMA_DEFAULT, request) body = await api_validate(SCHEMA_DEFAULT, request)
await asyncio.shield(self.sys_host.sound.set_default(source, body[ATTR_NAME])) await asyncio.shield(self.sys_host.sound.set_default(source, body[ATTR_NAME]))

View File

@@ -1,7 +1,6 @@
"""Init file for Supervisor auth/SSO RESTful API.""" """Init file for Supervisor auth/SSO RESTful API."""
import asyncio import asyncio
from collections.abc import Awaitable
import logging import logging
from typing import Any from typing import Any
@@ -43,7 +42,7 @@ REALM_HEADER: dict[str, str] = {
class APIAuth(CoreSysAttributes): class APIAuth(CoreSysAttributes):
"""Handle RESTful API for auth functions.""" """Handle RESTful API for auth functions."""
def _process_basic(self, request: web.Request, addon: Addon) -> Awaitable[bool]: def _process_basic(self, request: web.Request, addon: Addon) -> bool:
"""Process login request with basic auth. """Process login request with basic auth.
Return a coroutine. Return a coroutine.
@@ -53,7 +52,7 @@ class APIAuth(CoreSysAttributes):
def _process_dict( def _process_dict(
self, request: web.Request, addon: Addon, data: dict[str, str] self, request: web.Request, addon: Addon, data: dict[str, str]
) -> Awaitable[bool]: ) -> bool:
"""Process login with dict data. """Process login with dict data.
Return a coroutine. Return a coroutine.
@@ -100,7 +99,7 @@ class APIAuth(CoreSysAttributes):
@api_process @api_process
async def cache(self, request: web.Request) -> None: async def cache(self, request: web.Request) -> None:
"""Process cache reset request.""" """Process cache reset request."""
await self.sys_auth.reset_data() self.sys_auth.reset_data()
@api_process @api_process
async def list_users(self, request: web.Request) -> dict[str, list[dict[str, Any]]]: async def list_users(self, request: web.Request) -> dict[str, list[dict[str, Any]]]:

View File

@@ -1,24 +1,19 @@
"""Backups RESTful API.""" """Backups RESTful API."""
from __future__ import annotations
import asyncio import asyncio
from collections.abc import Callable from collections.abc import Callable
import errno import errno
from io import IOBase
import logging import logging
from pathlib import Path from pathlib import Path
import re import re
from tempfile import TemporaryDirectory from tempfile import TemporaryDirectory
from typing import Any, cast from typing import Any
from aiohttp import BodyPartReader, web from aiohttp import web
from aiohttp.hdrs import CONTENT_DISPOSITION from aiohttp.hdrs import CONTENT_DISPOSITION
import voluptuous as vol import voluptuous as vol
from voluptuous.humanize import humanize_error
from ..backups.backup import Backup from ..backups.backup import Backup
from ..backups.const import LOCATION_CLOUD_BACKUP, LOCATION_TYPE
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale
from ..const import ( from ..const import (
ATTR_ADDONS, ATTR_ADDONS,
@@ -27,81 +22,44 @@ from ..const import (
ATTR_CONTENT, ATTR_CONTENT,
ATTR_DATE, ATTR_DATE,
ATTR_DAYS_UNTIL_STALE, ATTR_DAYS_UNTIL_STALE,
ATTR_EXTRA,
ATTR_FILENAME,
ATTR_FOLDERS, ATTR_FOLDERS,
ATTR_HOMEASSISTANT, ATTR_HOMEASSISTANT,
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE, ATTR_HOMEASSISTANT_EXCLUDE_DATABASE,
ATTR_JOB_ID, ATTR_LOCATON,
ATTR_LOCATION,
ATTR_NAME, ATTR_NAME,
ATTR_PASSWORD, ATTR_PASSWORD,
ATTR_PROTECTED, ATTR_PROTECTED,
ATTR_REPOSITORIES, ATTR_REPOSITORIES,
ATTR_SIZE, ATTR_SIZE,
ATTR_SIZE_BYTES,
ATTR_SLUG, ATTR_SLUG,
ATTR_SUPERVISOR_VERSION, ATTR_SUPERVISOR_VERSION,
ATTR_TIMEOUT, ATTR_TIMEOUT,
ATTR_TYPE, ATTR_TYPE,
ATTR_VERSION, ATTR_VERSION,
REQUEST_FROM,
BusEvent, BusEvent,
CoreState, CoreState,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APIForbidden, APINotFound from ..exceptions import APIError
from ..jobs import JobSchedulerOptions, SupervisorJob from ..jobs import JobSchedulerOptions
from ..mounts.const import MountUsage from ..mounts.const import MountUsage
from ..mounts.mount import Mount
from ..resolution.const import UnhealthyReason from ..resolution.const import UnhealthyReason
from .const import ( from .const import ATTR_BACKGROUND, ATTR_JOB_ID, CONTENT_TYPE_TAR
ATTR_ADDITIONAL_LOCATIONS,
ATTR_BACKGROUND,
ATTR_LOCATION_ATTRIBUTES,
ATTR_LOCATIONS,
CONTENT_TYPE_TAR,
)
from .utils import api_process, api_validate from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
ALL_ADDONS_FLAG = "ALL"
LOCATION_LOCAL = ".local"
RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+") RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+")
RE_BACKUP_FILENAME = re.compile(r"^[^\\\/]+\.tar$")
# Backwards compatible # Backwards compatible
# Remove: 2022.08 # Remove: 2022.08
_ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT] _ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT]
def _ensure_list(item: Any) -> list:
"""Ensure value is a list."""
if not isinstance(item, list):
return [item]
return item
def _convert_local_location(item: str | None) -> str | None:
"""Convert local location value."""
if item in {LOCATION_LOCAL, ""}:
return None
return item
# pylint: disable=no-value-for-parameter # pylint: disable=no-value-for-parameter
SCHEMA_FOLDERS = vol.All([vol.In(_ALL_FOLDERS)], vol.Unique())
SCHEMA_LOCATION = vol.All(vol.Maybe(str), _convert_local_location)
SCHEMA_LOCATION_LIST = vol.All(_ensure_list, [SCHEMA_LOCATION], vol.Unique())
SCHEMA_RESTORE_FULL = vol.Schema( SCHEMA_RESTORE_FULL = vol.Schema(
{ {
vol.Optional(ATTR_PASSWORD): vol.Maybe(str), vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(), vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION,
} }
) )
@@ -109,36 +67,40 @@ SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend(
{ {
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(), vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()), vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS, vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
} }
) )
SCHEMA_BACKUP_FULL = vol.Schema( SCHEMA_BACKUP_FULL = vol.Schema(
{ {
vol.Optional(ATTR_NAME): str, vol.Optional(ATTR_NAME): str,
vol.Optional(ATTR_FILENAME): vol.Match(RE_BACKUP_FILENAME),
vol.Optional(ATTR_PASSWORD): vol.Maybe(str), vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()), vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION_LIST, vol.Optional(ATTR_LOCATON): vol.Maybe(str),
vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(), vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(),
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(), vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
vol.Optional(ATTR_EXTRA): dict,
} }
) )
SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend( SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
{ {
vol.Optional(ATTR_ADDONS): vol.Or( vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
ALL_ADDONS_FLAG, vol.All([str], vol.Unique()) vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
),
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS,
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(), vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
} }
) )
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_DAYS_UNTIL_STALE): days_until_stale}) SCHEMA_OPTIONS = vol.Schema(
SCHEMA_FREEZE = vol.Schema({vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1))}) {
SCHEMA_REMOVE = vol.Schema({vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION_LIST}) vol.Optional(ATTR_DAYS_UNTIL_STALE): days_until_stale,
}
)
SCHEMA_FREEZE = vol.Schema(
{
vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1)),
}
)
class APIBackups(CoreSysAttributes): class APIBackups(CoreSysAttributes):
@@ -148,19 +110,9 @@ class APIBackups(CoreSysAttributes):
"""Return backup, throw an exception if it doesn't exist.""" """Return backup, throw an exception if it doesn't exist."""
backup = self.sys_backups.get(request.match_info.get("slug")) backup = self.sys_backups.get(request.match_info.get("slug"))
if not backup: if not backup:
raise APINotFound("Backup does not exist") raise APIError("Backup does not exist")
return backup return backup
def _make_location_attributes(self, backup: Backup) -> dict[str, dict[str, Any]]:
"""Make location attributes dictionary."""
return {
loc if loc else LOCATION_LOCAL: {
ATTR_PROTECTED: backup.all_locations[loc].protected,
ATTR_SIZE_BYTES: backup.all_locations[loc].size_bytes,
}
for loc in backup.locations
}
def _list_backups(self): def _list_backups(self):
"""Return list of backups.""" """Return list of backups."""
return [ return [
@@ -170,11 +122,8 @@ class APIBackups(CoreSysAttributes):
ATTR_DATE: backup.date, ATTR_DATE: backup.date,
ATTR_TYPE: backup.sys_type, ATTR_TYPE: backup.sys_type,
ATTR_SIZE: backup.size, ATTR_SIZE: backup.size,
ATTR_SIZE_BYTES: backup.size_bytes, ATTR_LOCATON: backup.location,
ATTR_LOCATION: backup.location,
ATTR_LOCATIONS: backup.locations,
ATTR_PROTECTED: backup.protected, ATTR_PROTECTED: backup.protected,
ATTR_LOCATION_ATTRIBUTES: self._make_location_attributes(backup),
ATTR_COMPRESSED: backup.compressed, ATTR_COMPRESSED: backup.compressed,
ATTR_CONTENT: { ATTR_CONTENT: {
ATTR_HOMEASSISTANT: backup.homeassistant_version is not None, ATTR_HOMEASSISTANT: backup.homeassistant_version is not None,
@@ -183,11 +132,10 @@ class APIBackups(CoreSysAttributes):
}, },
} }
for backup in self.sys_backups.list_backups for backup in self.sys_backups.list_backups
if backup.location != LOCATION_CLOUD_BACKUP
] ]
@api_process @api_process
async def list_backups(self, request): async def list(self, request):
"""Return backup list.""" """Return backup list."""
data_backups = self._list_backups() data_backups = self._list_backups()
@@ -213,7 +161,7 @@ class APIBackups(CoreSysAttributes):
if ATTR_DAYS_UNTIL_STALE in body: if ATTR_DAYS_UNTIL_STALE in body:
self.sys_backups.days_until_stale = body[ATTR_DAYS_UNTIL_STALE] self.sys_backups.days_until_stale = body[ATTR_DAYS_UNTIL_STALE]
await self.sys_backups.save_data() self.sys_backups.save_data()
@api_process @api_process
async def reload(self, _): async def reload(self, _):
@@ -243,63 +191,37 @@ class APIBackups(CoreSysAttributes):
ATTR_NAME: backup.name, ATTR_NAME: backup.name,
ATTR_DATE: backup.date, ATTR_DATE: backup.date,
ATTR_SIZE: backup.size, ATTR_SIZE: backup.size,
ATTR_SIZE_BYTES: backup.size_bytes,
ATTR_COMPRESSED: backup.compressed, ATTR_COMPRESSED: backup.compressed,
ATTR_PROTECTED: backup.protected, ATTR_PROTECTED: backup.protected,
ATTR_LOCATION_ATTRIBUTES: self._make_location_attributes(backup),
ATTR_SUPERVISOR_VERSION: backup.supervisor_version, ATTR_SUPERVISOR_VERSION: backup.supervisor_version,
ATTR_HOMEASSISTANT: backup.homeassistant_version, ATTR_HOMEASSISTANT: backup.homeassistant_version,
ATTR_LOCATION: backup.location, ATTR_LOCATON: backup.location,
ATTR_LOCATIONS: backup.locations,
ATTR_ADDONS: data_addons, ATTR_ADDONS: data_addons,
ATTR_REPOSITORIES: backup.repositories, ATTR_REPOSITORIES: backup.repositories,
ATTR_FOLDERS: backup.folders, ATTR_FOLDERS: backup.folders,
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database, ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database,
ATTR_EXTRA: backup.extra,
} }
def _location_to_mount(self, location: str | None) -> LOCATION_TYPE: def _location_to_mount(self, body: dict[str, Any]) -> dict[str, Any]:
"""Convert a single location to a mount if possible."""
if not location or location == LOCATION_CLOUD_BACKUP:
return cast(LOCATION_TYPE, location)
mount = self.sys_mounts.get(location)
if mount.usage != MountUsage.BACKUP:
raise APIError(
f"Mount {mount.name} is not used for backups, cannot backup to there"
)
return mount
def _location_field_to_mount(self, body: dict[str, Any]) -> dict[str, Any]:
"""Change location field to mount if necessary.""" """Change location field to mount if necessary."""
body[ATTR_LOCATION] = self._location_to_mount(body.get(ATTR_LOCATION)) if not body.get(ATTR_LOCATON):
return body return body
def _validate_cloud_backup_location( body[ATTR_LOCATON] = self.sys_mounts.get(body[ATTR_LOCATON])
self, request: web.Request, location: list[str | None] | str | None if body[ATTR_LOCATON].usage != MountUsage.BACKUP:
) -> None: raise APIError(
"""Cloud backup location is only available to Home Assistant.""" f"Mount {body[ATTR_LOCATON].name} is not used for backups, cannot backup to there"
if not isinstance(location, list):
location = [location]
if (
LOCATION_CLOUD_BACKUP in location
and request.get(REQUEST_FROM) != self.sys_homeassistant
):
raise APIForbidden(
f"Location {LOCATION_CLOUD_BACKUP} is only available for Home Assistant"
) )
return body
async def _background_backup_task( async def _background_backup_task(
self, backup_method: Callable, *args, **kwargs self, backup_method: Callable, *args, **kwargs
) -> tuple[asyncio.Task, str]: ) -> tuple[asyncio.Task, str]:
"""Start backup task in background and return task and job ID.""" """Start backup task in background and return task and job ID."""
event = asyncio.Event() event = asyncio.Event()
job, backup_task = cast( job, backup_task = self.sys_jobs.schedule_job(
tuple[SupervisorJob, asyncio.Task], backup_method, JobSchedulerOptions(), *args, **kwargs
self.sys_jobs.schedule_job(
backup_method, JobSchedulerOptions(), *args, **kwargs
),
) )
async def release_on_freeze(new_state: CoreState): async def release_on_freeze(new_state: CoreState):
@@ -312,39 +234,24 @@ class APIBackups(CoreSysAttributes):
BusEvent.SUPERVISOR_STATE_CHANGE, release_on_freeze BusEvent.SUPERVISOR_STATE_CHANGE, release_on_freeze
) )
try: try:
event_task = self.sys_create_task(event.wait()) await asyncio.wait(
_, pending = await asyncio.wait( (
(backup_task, event_task), backup_task,
self.sys_create_task(event.wait()),
),
return_when=asyncio.FIRST_COMPLETED, return_when=asyncio.FIRST_COMPLETED,
) )
# It seems backup returned early (error or something), make sure to cancel
# the event task to avoid "Task was destroyed but it is pending!" errors.
if event_task in pending:
event_task.cancel()
return (backup_task, job.uuid) return (backup_task, job.uuid)
finally: finally:
self.sys_bus.remove_listener(listener) self.sys_bus.remove_listener(listener)
@api_process @api_process
async def backup_full(self, request: web.Request): async def backup_full(self, request):
"""Create full backup.""" """Create full backup."""
body = await api_validate(SCHEMA_BACKUP_FULL, request) body = await api_validate(SCHEMA_BACKUP_FULL, request)
locations: list[LOCATION_TYPE] | None = None
if ATTR_LOCATION in body:
location_names: list[str | None] = body.pop(ATTR_LOCATION)
self._validate_cloud_backup_location(request, location_names)
locations = [
self._location_to_mount(location) for location in location_names
]
body[ATTR_LOCATION] = locations.pop(0)
if locations:
body[ATTR_ADDITIONAL_LOCATIONS] = locations
background = body.pop(ATTR_BACKGROUND) background = body.pop(ATTR_BACKGROUND)
backup_task, job_id = await self._background_backup_task( backup_task, job_id = await self._background_backup_task(
self.sys_backups.do_backup_full, **body self.sys_backups.do_backup_full, **self._location_to_mount(body)
) )
if background and not backup_task.done(): if background and not backup_task.done():
@@ -359,28 +266,12 @@ class APIBackups(CoreSysAttributes):
) )
@api_process @api_process
async def backup_partial(self, request: web.Request): async def backup_partial(self, request):
"""Create a partial backup.""" """Create a partial backup."""
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request) body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
locations: list[LOCATION_TYPE] | None = None
if ATTR_LOCATION in body:
location_names: list[str | None] = body.pop(ATTR_LOCATION)
self._validate_cloud_backup_location(request, location_names)
locations = [
self._location_to_mount(location) for location in location_names
]
body[ATTR_LOCATION] = locations.pop(0)
if locations:
body[ATTR_ADDITIONAL_LOCATIONS] = locations
if body.get(ATTR_ADDONS) == ALL_ADDONS_FLAG:
body[ATTR_ADDONS] = list(self.sys_addons.local)
background = body.pop(ATTR_BACKGROUND) background = body.pop(ATTR_BACKGROUND)
backup_task, job_id = await self._background_backup_task( backup_task, job_id = await self._background_backup_task(
self.sys_backups.do_backup_partial, **body self.sys_backups.do_backup_partial, **self._location_to_mount(body)
) )
if background and not backup_task.done(): if background and not backup_task.done():
@@ -395,13 +286,10 @@ class APIBackups(CoreSysAttributes):
) )
@api_process @api_process
async def restore_full(self, request: web.Request): async def restore_full(self, request):
"""Full restore of a backup.""" """Full restore of a backup."""
backup = self._extract_slug(request) backup = self._extract_slug(request)
body = await api_validate(SCHEMA_RESTORE_FULL, request) body = await api_validate(SCHEMA_RESTORE_FULL, request)
self._validate_cloud_backup_location(
request, body.get(ATTR_LOCATION, backup.location)
)
background = body.pop(ATTR_BACKGROUND) background = body.pop(ATTR_BACKGROUND)
restore_task, job_id = await self._background_backup_task( restore_task, job_id = await self._background_backup_task(
self.sys_backups.do_restore_full, backup, **body self.sys_backups.do_restore_full, backup, **body
@@ -415,13 +303,10 @@ class APIBackups(CoreSysAttributes):
) )
@api_process @api_process
async def restore_partial(self, request: web.Request): async def restore_partial(self, request):
"""Partial restore a backup.""" """Partial restore a backup."""
backup = self._extract_slug(request) backup = self._extract_slug(request)
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request) body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
self._validate_cloud_backup_location(
request, body.get(ATTR_LOCATION, backup.location)
)
background = body.pop(ATTR_BACKGROUND) background = body.pop(ATTR_BACKGROUND)
restore_task, job_id = await self._background_backup_task( restore_task, job_id = await self._background_backup_task(
self.sys_backups.do_restore_partial, backup, **body self.sys_backups.do_restore_partial, backup, **body
@@ -435,147 +320,59 @@ class APIBackups(CoreSysAttributes):
) )
@api_process @api_process
async def freeze(self, request: web.Request): async def freeze(self, request):
"""Initiate manual freeze for external backup.""" """Initiate manual freeze for external backup."""
body = await api_validate(SCHEMA_FREEZE, request) body = await api_validate(SCHEMA_FREEZE, request)
await asyncio.shield(self.sys_backups.freeze_all(**body)) await asyncio.shield(self.sys_backups.freeze_all(**body))
@api_process @api_process
async def thaw(self, request: web.Request): async def thaw(self, request):
"""Begin thaw after manual freeze.""" """Begin thaw after manual freeze."""
await self.sys_backups.thaw_all() await self.sys_backups.thaw_all()
@api_process @api_process
async def remove(self, request: web.Request): async def remove(self, request):
"""Remove a backup.""" """Remove a backup."""
backup = self._extract_slug(request) backup = self._extract_slug(request)
body = await api_validate(SCHEMA_REMOVE, request) return self.sys_backups.remove(backup)
locations: list[LOCATION_TYPE] | None = None
if ATTR_LOCATION in body: async def download(self, request):
self._validate_cloud_backup_location(request, body[ATTR_LOCATION])
locations = [self._location_to_mount(name) for name in body[ATTR_LOCATION]]
else:
self._validate_cloud_backup_location(request, backup.location)
await self.sys_backups.remove(backup, locations=locations)
@api_process
async def download(self, request: web.Request):
"""Download a backup file.""" """Download a backup file."""
backup = self._extract_slug(request) backup = self._extract_slug(request)
# Query will give us '' for /backups, convert value to None
location = _convert_local_location(
request.query.get(ATTR_LOCATION, backup.location)
)
self._validate_cloud_backup_location(request, location)
if location not in backup.all_locations:
raise APIError(f"Backup {backup.slug} is not in location {location}")
_LOGGER.info("Downloading backup %s", backup.slug) _LOGGER.info("Downloading backup %s", backup.slug)
filename = backup.all_locations[location].path response = web.FileResponse(backup.tarfile)
# If the file is missing, return 404 and trigger reload of location
if not await self.sys_run_in_executor(filename.is_file):
self.sys_create_task(self.sys_backups.reload(location))
return web.Response(status=404)
response = web.FileResponse(filename)
response.content_type = CONTENT_TYPE_TAR response.content_type = CONTENT_TYPE_TAR
download_filename = filename.name
if download_filename == f"{backup.slug}.tar":
download_filename = f"{RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
response.headers[CONTENT_DISPOSITION] = ( response.headers[CONTENT_DISPOSITION] = (
f"attachment; filename={download_filename}" f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
) )
return response return response
@api_process @api_process
async def upload(self, request: web.Request): async def upload(self, request):
"""Upload a backup file.""" """Upload a backup file."""
location: LOCATION_TYPE = None with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir:
locations: list[LOCATION_TYPE] | None = None tar_file = Path(temp_dir, "backup.tar")
tmp_path = self.sys_config.path_tmp
if ATTR_LOCATION in request.query:
location_names: list[str] = request.query.getall(ATTR_LOCATION, [])
self._validate_cloud_backup_location(
request, cast(list[str | None], location_names)
)
# Convert empty string to None if necessary
locations = [
self._location_to_mount(location)
if _convert_local_location(location)
else None
for location in location_names
]
location = locations.pop(0)
if location and location != LOCATION_CLOUD_BACKUP:
tmp_path = cast(Mount, location).local_where
filename: str | None = None
if ATTR_FILENAME in request.query:
filename = request.query.get(ATTR_FILENAME)
try:
vol.Match(RE_BACKUP_FILENAME)(filename)
except vol.Invalid as ex:
raise APIError(humanize_error(filename, ex)) from None
temp_dir: TemporaryDirectory | None = None
backup_file_stream: IOBase | None = None
def open_backup_file() -> Path:
nonlocal temp_dir, backup_file_stream
temp_dir = TemporaryDirectory(dir=tmp_path.as_posix())
tar_file = Path(temp_dir.name, "backup.tar")
backup_file_stream = tar_file.open("wb")
return tar_file
def close_backup_file() -> None:
if backup_file_stream:
# Make sure it got closed, in case of exception. It is safe to
# close the file stream twice.
backup_file_stream.close()
if temp_dir:
temp_dir.cleanup()
try:
reader = await request.multipart() reader = await request.multipart()
contents = await reader.next() contents = await reader.next()
if not isinstance(contents, BodyPartReader): try:
raise APIError("Improperly formatted upload, could not read backup") with tar_file.open("wb") as backup:
while True:
chunk = await contents.read_chunk()
if not chunk:
break
backup.write(chunk)
tar_file = await self.sys_run_in_executor(open_backup_file) except OSError as err:
while chunk := await contents.read_chunk(size=2**16): if err.errno == errno.EBADMSG:
await self.sys_run_in_executor( self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
cast(IOBase, backup_file_stream).write, chunk _LOGGER.error("Can't write new backup file: %s", err)
) return False
await self.sys_run_in_executor(cast(IOBase, backup_file_stream).close)
backup = await asyncio.shield( except asyncio.CancelledError:
self.sys_backups.import_backup( return False
tar_file,
filename,
location=location,
additional_locations=locations,
)
)
except OSError as err:
if err.errno == errno.EBADMSG and location in {
LOCATION_CLOUD_BACKUP,
None,
}:
self.sys_resolution.add_unhealthy_reason(
UnhealthyReason.OSERROR_BAD_MESSAGE
)
_LOGGER.error("Can't write new backup file: %s", err)
return False
except asyncio.CancelledError: backup = await asyncio.shield(self.sys_backups.import_backup(tar_file))
return False
finally:
await self.sys_run_in_executor(close_backup_file)
if backup: if backup:
return {ATTR_SLUG: backup.slug} return {ATTR_SLUG: backup.slug}

View File

@@ -12,7 +12,6 @@ CONTENT_TYPE_X_LOG = "text/x-log"
COOKIE_INGRESS = "ingress_session" COOKIE_INGRESS = "ingress_session"
ATTR_ADDITIONAL_LOCATIONS = "additional_locations"
ATTR_AGENT_VERSION = "agent_version" ATTR_AGENT_VERSION = "agent_version"
ATTR_APPARMOR_VERSION = "apparmor_version" ATTR_APPARMOR_VERSION = "apparmor_version"
ATTR_ATTRIBUTES = "attributes" ATTR_ATTRIBUTES = "attributes"
@@ -43,12 +42,11 @@ ATTR_GROUP_IDS = "group_ids"
ATTR_IDENTIFIERS = "identifiers" ATTR_IDENTIFIERS = "identifiers"
ATTR_IS_ACTIVE = "is_active" ATTR_IS_ACTIVE = "is_active"
ATTR_IS_OWNER = "is_owner" ATTR_IS_OWNER = "is_owner"
ATTR_JOB_ID = "job_id"
ATTR_JOBS = "jobs" ATTR_JOBS = "jobs"
ATTR_LLMNR = "llmnr" ATTR_LLMNR = "llmnr"
ATTR_LLMNR_HOSTNAME = "llmnr_hostname" ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
ATTR_LOCAL_ONLY = "local_only" ATTR_LOCAL_ONLY = "local_only"
ATTR_LOCATION_ATTRIBUTES = "location_attributes"
ATTR_LOCATIONS = "locations"
ATTR_MDNS = "mdns" ATTR_MDNS = "mdns"
ATTR_MODEL = "model" ATTR_MODEL = "model"
ATTR_MOUNTS = "mounts" ATTR_MOUNTS = "mounts"
@@ -70,7 +68,6 @@ ATTR_UPDATE_TYPE = "update_type"
ATTR_USAGE = "usage" ATTR_USAGE = "usage"
ATTR_USE_NTP = "use_ntp" ATTR_USE_NTP = "use_ntp"
ATTR_USERS = "users" ATTR_USERS = "users"
ATTR_USER_PATH = "user_path"
ATTR_VENDOR = "vendor" ATTR_VENDOR = "vendor"
ATTR_VIRTUALIZATION = "virtualization" ATTR_VIRTUALIZATION = "virtualization"
@@ -80,11 +77,3 @@ class BootSlot(StrEnum):
A = "A" A = "A"
B = "B" B = "B"
class DetectBlockingIO(StrEnum):
"""Enable/Disable detection for blocking I/O in event loop."""
OFF = "off"
ON = "on"
ON_AT_STARTUP = "on_at_startup"

View File

@@ -1,9 +1,7 @@
"""Init file for Supervisor network RESTful API.""" """Init file for Supervisor network RESTful API."""
import logging import logging
from typing import Any, cast
from aiohttp import web
import voluptuous as vol import voluptuous as vol
from ..addons.addon import Addon from ..addons.addon import Addon
@@ -18,8 +16,7 @@ from ..const import (
AddonState, AddonState,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..discovery import Message from ..exceptions import APIError, APIForbidden
from ..exceptions import APIForbidden, APINotFound
from .utils import api_process, api_validate, require_home_assistant from .utils import api_process, api_validate, require_home_assistant
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -35,16 +32,16 @@ SCHEMA_DISCOVERY = vol.Schema(
class APIDiscovery(CoreSysAttributes): class APIDiscovery(CoreSysAttributes):
"""Handle RESTful API for discovery functions.""" """Handle RESTful API for discovery functions."""
def _extract_message(self, request: web.Request) -> Message: def _extract_message(self, request):
"""Extract discovery message from URL.""" """Extract discovery message from URL."""
message = self.sys_discovery.get(request.match_info["uuid"]) message = self.sys_discovery.get(request.match_info.get("uuid"))
if not message: if not message:
raise APINotFound("Discovery message not found") raise APIError("Discovery message not found")
return message return message
@api_process @api_process
@require_home_assistant @require_home_assistant
async def list_discovery(self, request: web.Request) -> dict[str, Any]: async def list(self, request):
"""Show registered and available services.""" """Show registered and available services."""
# Get available discovery # Get available discovery
discovery = [ discovery = [
@@ -55,16 +52,12 @@ class APIDiscovery(CoreSysAttributes):
ATTR_CONFIG: message.config, ATTR_CONFIG: message.config,
} }
for message in self.sys_discovery.list_messages for message in self.sys_discovery.list_messages
if ( if (addon := self.sys_addons.get(message.addon, local_only=True))
discovered := cast( and addon.state == AddonState.STARTED
Addon, self.sys_addons.get(message.addon, local_only=True)
)
)
and discovered.state == AddonState.STARTED
] ]
# Get available services/add-ons # Get available services/add-ons
services: dict[str, list[str]] = {} services = {}
for addon in self.sys_addons.all: for addon in self.sys_addons.all:
for name in addon.discovery: for name in addon.discovery:
services.setdefault(name, []).append(addon.slug) services.setdefault(name, []).append(addon.slug)
@@ -72,7 +65,7 @@ class APIDiscovery(CoreSysAttributes):
return {ATTR_DISCOVERY: discovery, ATTR_SERVICES: services} return {ATTR_DISCOVERY: discovery, ATTR_SERVICES: services}
@api_process @api_process
async def set_discovery(self, request: web.Request) -> dict[str, str]: async def set_discovery(self, request):
"""Write data into a discovery pipeline.""" """Write data into a discovery pipeline."""
body = await api_validate(SCHEMA_DISCOVERY, request) body = await api_validate(SCHEMA_DISCOVERY, request)
addon: Addon = request[REQUEST_FROM] addon: Addon = request[REQUEST_FROM]
@@ -90,13 +83,13 @@ class APIDiscovery(CoreSysAttributes):
) )
# Process discovery message # Process discovery message
message = await self.sys_discovery.send(addon, **body) message = self.sys_discovery.send(addon, **body)
return {ATTR_UUID: message.uuid} return {ATTR_UUID: message.uuid}
@api_process @api_process
@require_home_assistant @require_home_assistant
async def get_discovery(self, request: web.Request) -> dict[str, Any]: async def get_discovery(self, request):
"""Read data into a discovery message.""" """Read data into a discovery message."""
message = self._extract_message(request) message = self._extract_message(request)
@@ -108,7 +101,7 @@ class APIDiscovery(CoreSysAttributes):
} }
@api_process @api_process
async def del_discovery(self, request: web.Request) -> None: async def del_discovery(self, request):
"""Delete data into a discovery message.""" """Delete data into a discovery message."""
message = self._extract_message(request) message = self._extract_message(request)
addon = request[REQUEST_FROM] addon = request[REQUEST_FROM]
@@ -117,4 +110,5 @@ class APIDiscovery(CoreSysAttributes):
if message.addon != addon.slug: if message.addon != addon.slug:
raise APIForbidden("Can't remove discovery message") raise APIForbidden("Can't remove discovery message")
await self.sys_discovery.remove(message) self.sys_discovery.remove(message)
return True

View File

@@ -78,7 +78,7 @@ class APICoreDNS(CoreSysAttributes):
if restart_required: if restart_required:
self.sys_create_task(self.sys_plugins.dns.restart()) self.sys_create_task(self.sys_plugins.dns.restart())
await self.sys_plugins.dns.save_data() self.sys_plugins.dns.save_data()
@api_process @api_process
async def stats(self, request: web.Request) -> dict[str, Any]: async def stats(self, request: web.Request) -> dict[str, Any]:

View File

@@ -16,7 +16,6 @@ from ..const import (
ATTR_VERSION, ATTR_VERSION,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APINotFound
from .utils import api_process, api_validate from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -53,17 +52,14 @@ class APIDocker(CoreSysAttributes):
for hostname, registry in body.items(): for hostname, registry in body.items():
self.sys_docker.config.registries[hostname] = registry self.sys_docker.config.registries[hostname] = registry
await self.sys_docker.config.save_data() self.sys_docker.config.save_data()
@api_process @api_process
async def remove_registry(self, request: web.Request): async def remove_registry(self, request: web.Request):
"""Delete a docker registry.""" """Delete a docker registry."""
hostname = request.match_info.get(ATTR_HOSTNAME) hostname = request.match_info.get(ATTR_HOSTNAME)
if hostname not in self.sys_docker.config.registries:
raise APINotFound(f"Hostname {hostname} does not exist in registries")
del self.sys_docker.config.registries[hostname] del self.sys_docker.config.registries[hostname]
await self.sys_docker.config.save_data() self.sys_docker.config.save_data()
@api_process @api_process
async def info(self, request: web.Request): async def info(self, request: web.Request):

View File

@@ -68,10 +68,7 @@ def filesystem_struct(fs_block: UDisks2Block) -> dict[str, Any]:
ATTR_NAME: fs_block.id_label, ATTR_NAME: fs_block.id_label,
ATTR_SYSTEM: fs_block.hint_system, ATTR_SYSTEM: fs_block.hint_system,
ATTR_MOUNT_POINTS: [ ATTR_MOUNT_POINTS: [
str(mount_point) str(mount_point) for mount_point in fs_block.filesystem.mount_points
for mount_point in (
fs_block.filesystem.mount_points if fs_block.filesystem else []
)
], ],
} }

View File

@@ -118,7 +118,7 @@ class APIHomeAssistant(CoreSysAttributes):
body = await api_validate(SCHEMA_OPTIONS, request) body = await api_validate(SCHEMA_OPTIONS, request)
if ATTR_IMAGE in body: if ATTR_IMAGE in body:
self.sys_homeassistant.set_image(body[ATTR_IMAGE]) self.sys_homeassistant.image = body[ATTR_IMAGE]
self.sys_homeassistant.override_image = ( self.sys_homeassistant.override_image = (
self.sys_homeassistant.image != self.sys_homeassistant.default_image self.sys_homeassistant.image != self.sys_homeassistant.default_image
) )
@@ -149,7 +149,7 @@ class APIHomeAssistant(CoreSysAttributes):
ATTR_BACKUPS_EXCLUDE_DATABASE ATTR_BACKUPS_EXCLUDE_DATABASE
] ]
await self.sys_homeassistant.save_data() self.sys_homeassistant.save_data()
@api_process @api_process
async def stats(self, request: web.Request) -> dict[Any, str]: async def stats(self, request: web.Request) -> dict[Any, str]:

View File

@@ -3,9 +3,8 @@
import asyncio import asyncio
from contextlib import suppress from contextlib import suppress
import logging import logging
from typing import Any
from aiohttp import ClientConnectionResetError, web from aiohttp import web
from aiohttp.hdrs import ACCEPT, RANGE from aiohttp.hdrs import ACCEPT, RANGE
import voluptuous as vol import voluptuous as vol
from voluptuous.error import CoerceInvalid from voluptuous.error import CoerceInvalid
@@ -37,7 +36,6 @@ from ..host.const import (
LogFormat, LogFormat,
LogFormatter, LogFormatter,
) )
from ..host.logs import SYSTEMD_JOURNAL_GATEWAYD_LINES_MAX
from ..utils.systemd_journal import journal_logs_reader from ..utils.systemd_journal import journal_logs_reader
from .const import ( from .const import (
ATTR_AGENT_VERSION, ATTR_AGENT_VERSION,
@@ -100,10 +98,10 @@ class APIHost(CoreSysAttributes):
ATTR_VIRTUALIZATION: self.sys_host.info.virtualization, ATTR_VIRTUALIZATION: self.sys_host.info.virtualization,
ATTR_CPE: self.sys_host.info.cpe, ATTR_CPE: self.sys_host.info.cpe,
ATTR_DEPLOYMENT: self.sys_host.info.deployment, ATTR_DEPLOYMENT: self.sys_host.info.deployment,
ATTR_DISK_FREE: await self.sys_host.info.free_space(), ATTR_DISK_FREE: self.sys_host.info.free_space,
ATTR_DISK_TOTAL: await self.sys_host.info.total_space(), ATTR_DISK_TOTAL: self.sys_host.info.total_space,
ATTR_DISK_USED: await self.sys_host.info.used_space(), ATTR_DISK_USED: self.sys_host.info.used_space,
ATTR_DISK_LIFE_TIME: await self.sys_host.info.disk_life_time(), ATTR_DISK_LIFE_TIME: self.sys_host.info.disk_life_time,
ATTR_FEATURES: self.sys_host.features, ATTR_FEATURES: self.sys_host.features,
ATTR_HOSTNAME: self.sys_host.info.hostname, ATTR_HOSTNAME: self.sys_host.info.hostname,
ATTR_LLMNR_HOSTNAME: self.sys_host.info.llmnr_hostname, ATTR_LLMNR_HOSTNAME: self.sys_host.info.llmnr_hostname,
@@ -197,18 +195,20 @@ class APIHost(CoreSysAttributes):
) -> web.StreamResponse: ) -> web.StreamResponse:
"""Return systemd-journald logs.""" """Return systemd-journald logs."""
log_formatter = LogFormatter.PLAIN log_formatter = LogFormatter.PLAIN
params: dict[str, Any] = {} params = {}
if identifier: if identifier:
params[PARAM_SYSLOG_IDENTIFIER] = identifier params[PARAM_SYSLOG_IDENTIFIER] = identifier
elif IDENTIFIER in request.match_info: elif IDENTIFIER in request.match_info:
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info[IDENTIFIER] params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER)
else: else:
params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers
# host logs should be always verbose, no matter what Accept header is used # host logs should be always verbose, no matter what Accept header is used
log_formatter = LogFormatter.VERBOSE log_formatter = LogFormatter.VERBOSE
if BOOTID in request.match_info: if BOOTID in request.match_info:
params[PARAM_BOOT_ID] = await self._get_boot_id(request.match_info[BOOTID]) params[PARAM_BOOT_ID] = await self._get_boot_id(
request.match_info.get(BOOTID)
)
if follow: if follow:
params[PARAM_FOLLOW] = "" params[PARAM_FOLLOW] = ""
@@ -239,11 +239,13 @@ class APIHost(CoreSysAttributes):
# return 2 lines at minimum. # return 2 lines at minimum.
lines = max(2, lines) lines = max(2, lines)
# entries=cursor[[:num_skip]:num_entries] # entries=cursor[[:num_skip]:num_entries]
range_header = f"entries=:-{lines - 1}:{SYSTEMD_JOURNAL_GATEWAYD_LINES_MAX if follow else lines}" range_header = f"entries=:-{lines-1}:{'' if follow else lines}"
elif RANGE in request.headers: elif RANGE in request.headers:
range_header = request.headers[RANGE] range_header = request.headers.get(RANGE)
else: else:
range_header = f"entries=:-{DEFAULT_LINES - 1}:{SYSTEMD_JOURNAL_GATEWAYD_LINES_MAX if follow else DEFAULT_LINES}" range_header = (
f"entries=:-{DEFAULT_LINES-1}:{'' if follow else DEFAULT_LINES}"
)
async with self.sys_host.logs.journald_logs( async with self.sys_host.logs.journald_logs(
params=params, range_header=range_header, accept=LogFormat.JOURNAL params=params, range_header=range_header, accept=LogFormat.JOURNAL
@@ -253,22 +255,12 @@ class APIHost(CoreSysAttributes):
response.content_type = CONTENT_TYPE_TEXT response.content_type = CONTENT_TYPE_TEXT
headers_returned = False headers_returned = False
async for cursor, line in journal_logs_reader(resp, log_formatter): async for cursor, line in journal_logs_reader(resp, log_formatter):
try: if not headers_returned:
if not headers_returned: if cursor:
if cursor: response.headers["X-First-Cursor"] = cursor
response.headers["X-First-Cursor"] = cursor await response.prepare(request)
response.headers["X-Accel-Buffering"] = "no" headers_returned = True
await response.prepare(request) await response.write(line.encode("utf-8") + b"\n")
headers_returned = True
await response.write(line.encode("utf-8") + b"\n")
except ClientConnectionResetError as err:
# When client closes the connection while reading busy logs, we
# sometimes get this exception. It should be safe to ignore it.
_LOGGER.debug(
"ClientConnectionResetError raised when returning journal logs: %s",
err,
)
break
except ConnectionResetError as ex: except ConnectionResetError as ex:
raise APIError( raise APIError(
"Connection reset when trying to fetch data from systemd-journald." "Connection reset when trying to fetch data from systemd-journald."

View File

@@ -83,7 +83,7 @@ class APIIngress(CoreSysAttributes):
def _extract_addon(self, request: web.Request) -> Addon: def _extract_addon(self, request: web.Request) -> Addon:
"""Return addon, throw an exception it it doesn't exist.""" """Return addon, throw an exception it it doesn't exist."""
token = request.match_info["token"] token = request.match_info.get("token")
# Find correct add-on # Find correct add-on
addon = self.sys_ingress.get(token) addon = self.sys_ingress.get(token)
@@ -132,7 +132,7 @@ class APIIngress(CoreSysAttributes):
@api_process @api_process
@require_home_assistant @require_home_assistant
async def validate_session(self, request: web.Request) -> None: async def validate_session(self, request: web.Request) -> dict[str, Any]:
"""Validate session and extending how long it's valid for.""" """Validate session and extending how long it's valid for."""
data = await api_validate(VALIDATE_SESSION_DATA, request) data = await api_validate(VALIDATE_SESSION_DATA, request)
@@ -147,14 +147,14 @@ class APIIngress(CoreSysAttributes):
"""Route data to Supervisor ingress service.""" """Route data to Supervisor ingress service."""
# Check Ingress Session # Check Ingress Session
session = request.cookies.get(COOKIE_INGRESS, "") session = request.cookies.get(COOKIE_INGRESS)
if not self.sys_ingress.validate_session(session): if not self.sys_ingress.validate_session(session):
_LOGGER.warning("No valid ingress session %s", session) _LOGGER.warning("No valid ingress session %s", session)
raise HTTPUnauthorized() raise HTTPUnauthorized()
# Process requests # Process requests
addon = self._extract_addon(request) addon = self._extract_addon(request)
path = request.match_info.get("path", "") path = request.match_info.get("path")
session_data = self.sys_ingress.get_session_data(session) session_data = self.sys_ingress.get_session_data(session)
try: try:
# Websocket # Websocket
@@ -183,7 +183,7 @@ class APIIngress(CoreSysAttributes):
for proto in request.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",") for proto in request.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
] ]
else: else:
req_protocols = [] req_protocols = ()
ws_server = web.WebSocketResponse( ws_server = web.WebSocketResponse(
protocols=req_protocols, autoclose=False, autoping=False protocols=req_protocols, autoclose=False, autoping=False
@@ -277,9 +277,8 @@ class APIIngress(CoreSysAttributes):
response.content_type = content_type response.content_type = content_type
try: try:
response.headers["X-Accel-Buffering"] = "no"
await response.prepare(request) await response.prepare(request)
async for data, _ in result.content.iter_chunks(): async for data in result.content.iter_chunked(4096):
await response.write(data) await response.write(data)
except ( except (
@@ -340,10 +339,9 @@ def _init_header(
headers[name] = value headers[name] = value
# Update X-Forwarded-For # Update X-Forwarded-For
if request.transport: forward_for = request.headers.get(hdrs.X_FORWARDED_FOR)
forward_for = request.headers.get(hdrs.X_FORWARDED_FOR) connected_ip = ip_address(request.transport.get_extra_info("peername")[0])
connected_ip = ip_address(request.transport.get_extra_info("peername")[0]) headers[hdrs.X_FORWARDED_FOR] = f"{forward_for}, {connected_ip!s}"
headers[hdrs.X_FORWARDED_FOR] = f"{forward_for}, {connected_ip!s}"
return headers return headers

View File

@@ -7,7 +7,7 @@ from aiohttp import web
import voluptuous as vol import voluptuous as vol
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APINotFound, JobNotFound from ..exceptions import APIError
from ..jobs import SupervisorJob from ..jobs import SupervisorJob
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
from .const import ATTR_JOBS from .const import ATTR_JOBS
@@ -23,24 +23,10 @@ SCHEMA_OPTIONS = vol.Schema(
class APIJobs(CoreSysAttributes): class APIJobs(CoreSysAttributes):
"""Handle RESTful API for OS functions.""" """Handle RESTful API for OS functions."""
def _extract_job(self, request: web.Request) -> SupervisorJob:
"""Extract job from request or raise."""
try:
return self.sys_jobs.get_job(request.match_info["uuid"])
except JobNotFound:
raise APINotFound("Job does not exist") from None
def _list_jobs(self, start: SupervisorJob | None = None) -> list[dict[str, Any]]: def _list_jobs(self, start: SupervisorJob | None = None) -> list[dict[str, Any]]:
"""Return current job tree. """Return current job tree."""
Jobs are added to cache as they are created so by default they are in oldest to newest.
This is correct ordering for child jobs as it makes logical sense to present those in
the order they occurred within the parent. For the list as a whole, sort from newest
to oldest as its likely any client is most interested in the newer ones.
"""
# Initially sort oldest to newest so all child lists end up in correct order
jobs_by_parent: dict[str | None, list[SupervisorJob]] = {} jobs_by_parent: dict[str | None, list[SupervisorJob]] = {}
for job in sorted(self.sys_jobs.jobs): for job in self.sys_jobs.jobs:
if job.internal: if job.internal:
continue continue
@@ -49,15 +35,11 @@ class APIJobs(CoreSysAttributes):
else: else:
jobs_by_parent[job.parent_id].append(job) jobs_by_parent[job.parent_id].append(job)
# After parent-child organization, sort the root jobs only from newest to oldest
job_list: list[dict[str, Any]] = [] job_list: list[dict[str, Any]] = []
queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = ( queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = (
[(job_list, start)] [(job_list, start)]
if start if start
else [ else [(job_list, job) for job in jobs_by_parent.get(None, [])]
(job_list, job)
for job in sorted(jobs_by_parent.get(None, []), reverse=True)
]
) )
while queue: while queue:
@@ -71,10 +53,7 @@ class APIJobs(CoreSysAttributes):
if current_job.uuid in jobs_by_parent: if current_job.uuid in jobs_by_parent:
queue.extend( queue.extend(
[ [(child_jobs, job) for job in jobs_by_parent.get(current_job.uuid)]
(child_jobs, job)
for job in jobs_by_parent.get(current_job.uuid, [])
]
) )
return job_list return job_list
@@ -95,25 +74,25 @@ class APIJobs(CoreSysAttributes):
if ATTR_IGNORE_CONDITIONS in body: if ATTR_IGNORE_CONDITIONS in body:
self.sys_jobs.ignore_conditions = body[ATTR_IGNORE_CONDITIONS] self.sys_jobs.ignore_conditions = body[ATTR_IGNORE_CONDITIONS]
await self.sys_jobs.save_data() self.sys_jobs.save_data()
await self.sys_resolution.evaluate.evaluate_system() await self.sys_resolution.evaluate.evaluate_system()
@api_process @api_process
async def reset(self, request: web.Request) -> None: async def reset(self, request: web.Request) -> None:
"""Reset options for JobManager.""" """Reset options for JobManager."""
await self.sys_jobs.reset_data() self.sys_jobs.reset_data()
@api_process @api_process
async def job_info(self, request: web.Request) -> dict[str, Any]: async def job_info(self, request: web.Request) -> dict[str, Any]:
"""Get details of a job by ID.""" """Get details of a job by ID."""
job = self._extract_job(request) job = self.sys_jobs.get_job(request.match_info.get("uuid"))
return self._list_jobs(job)[0] return self._list_jobs(job)[0]
@api_process @api_process
async def remove_job(self, request: web.Request) -> None: async def remove_job(self, request: web.Request) -> None:
"""Remove a completed job.""" """Remove a completed job."""
job = self._extract_job(request) job = self.sys_jobs.get_job(request.match_info.get("uuid"))
if not job.done: if not job.done:
raise APIError(f"Job {job.uuid} is not done!") raise APIError(f"Job {job.uuid} is not done!")

View File

@@ -1,12 +1,11 @@
"""Handle security part of this API.""" """Handle security part of this API."""
from collections.abc import Callable
import logging import logging
import re import re
from typing import Final from typing import Final
from urllib.parse import unquote from urllib.parse import unquote
from aiohttp.web import Request, Response, middleware from aiohttp.web import Request, RequestHandler, Response, middleware
from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized
from awesomeversion import AwesomeVersion from awesomeversion import AwesomeVersion
@@ -24,7 +23,7 @@ from ...const import (
) )
from ...coresys import CoreSys, CoreSysAttributes from ...coresys import CoreSys, CoreSysAttributes
from ...utils import version_is_new_enough from ...utils import version_is_new_enough
from ..utils import api_return_error, extract_supervisor_token from ..utils import api_return_error, excract_supervisor_token
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
_CORE_VERSION: Final = AwesomeVersion("2023.3.4") _CORE_VERSION: Final = AwesomeVersion("2023.3.4")
@@ -180,7 +179,9 @@ class SecurityMiddleware(CoreSysAttributes):
return unquoted return unquoted
@middleware @middleware
async def block_bad_requests(self, request: Request, handler: Callable) -> Response: async def block_bad_requests(
self, request: Request, handler: RequestHandler
) -> Response:
"""Process request and tblock commonly known exploit attempts.""" """Process request and tblock commonly known exploit attempts."""
if FILTERS.search(self._recursive_unquote(request.path)): if FILTERS.search(self._recursive_unquote(request.path)):
_LOGGER.warning( _LOGGER.warning(
@@ -198,7 +199,9 @@ class SecurityMiddleware(CoreSysAttributes):
return await handler(request) return await handler(request)
@middleware @middleware
async def system_validation(self, request: Request, handler: Callable) -> Response: async def system_validation(
self, request: Request, handler: RequestHandler
) -> Response:
"""Check if core is ready to response.""" """Check if core is ready to response."""
if self.sys_core.state not in ( if self.sys_core.state not in (
CoreState.STARTUP, CoreState.STARTUP,
@@ -212,10 +215,12 @@ class SecurityMiddleware(CoreSysAttributes):
return await handler(request) return await handler(request)
@middleware @middleware
async def token_validation(self, request: Request, handler: Callable) -> Response: async def token_validation(
self, request: Request, handler: RequestHandler
) -> Response:
"""Check security access of this layer.""" """Check security access of this layer."""
request_from: CoreSysAttributes | None = None request_from = None
supervisor_token = extract_supervisor_token(request) supervisor_token = excract_supervisor_token(request)
# Blacklist # Blacklist
if BLACKLIST.match(request.path): if BLACKLIST.match(request.path):
@@ -283,7 +288,7 @@ class SecurityMiddleware(CoreSysAttributes):
raise HTTPForbidden() raise HTTPForbidden()
@middleware @middleware
async def core_proxy(self, request: Request, handler: Callable) -> Response: async def core_proxy(self, request: Request, handler: RequestHandler) -> Response:
"""Validate user from Core API proxy.""" """Validate user from Core API proxy."""
if ( if (
request[REQUEST_FROM] != self.sys_homeassistant request[REQUEST_FROM] != self.sys_homeassistant

View File

@@ -1,17 +1,17 @@
"""Inits file for supervisor mounts REST API.""" """Inits file for supervisor mounts REST API."""
from typing import Any, cast from typing import Any
from aiohttp import web from aiohttp import web
import voluptuous as vol import voluptuous as vol
from ..const import ATTR_NAME, ATTR_STATE from ..const import ATTR_NAME, ATTR_STATE
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APINotFound from ..exceptions import APIError
from ..mounts.const import ATTR_DEFAULT_BACKUP_MOUNT, MountUsage from ..mounts.const import ATTR_DEFAULT_BACKUP_MOUNT, MountUsage
from ..mounts.mount import Mount from ..mounts.mount import Mount
from ..mounts.validate import SCHEMA_MOUNT_CONFIG, MountData from ..mounts.validate import SCHEMA_MOUNT_CONFIG
from .const import ATTR_MOUNTS, ATTR_USER_PATH from .const import ATTR_MOUNTS
from .utils import api_process, api_validate from .utils import api_process, api_validate
SCHEMA_OPTIONS = vol.Schema( SCHEMA_OPTIONS = vol.Schema(
@@ -24,13 +24,6 @@ SCHEMA_OPTIONS = vol.Schema(
class APIMounts(CoreSysAttributes): class APIMounts(CoreSysAttributes):
"""Handle REST API for mounting options.""" """Handle REST API for mounting options."""
def _extract_mount(self, request: web.Request) -> Mount:
"""Extract mount from request or raise."""
name = request.match_info["mount"]
if name not in self.sys_mounts:
raise APINotFound(f"No mount exists with name {name}")
return self.sys_mounts.get(name)
@api_process @api_process
async def info(self, request: web.Request) -> dict[str, Any]: async def info(self, request: web.Request) -> dict[str, Any]:
"""Return MountManager info.""" """Return MountManager info."""
@@ -39,13 +32,7 @@ class APIMounts(CoreSysAttributes):
if self.sys_mounts.default_backup_mount if self.sys_mounts.default_backup_mount
else None, else None,
ATTR_MOUNTS: [ ATTR_MOUNTS: [
mount.to_dict() mount.to_dict() | {ATTR_STATE: mount.state}
| {
ATTR_STATE: mount.state,
ATTR_USER_PATH: mount.container_where.as_posix()
if mount.container_where
else None,
}
for mount in self.sys_mounts.mounts for mount in self.sys_mounts.mounts
], ],
} }
@@ -66,15 +53,15 @@ class APIMounts(CoreSysAttributes):
else: else:
self.sys_mounts.default_backup_mount = mount self.sys_mounts.default_backup_mount = mount
await self.sys_mounts.save_data() self.sys_mounts.save_data()
@api_process @api_process
async def create_mount(self, request: web.Request) -> None: async def create_mount(self, request: web.Request) -> None:
"""Create a new mount in supervisor.""" """Create a new mount in supervisor."""
body = cast(MountData, await api_validate(SCHEMA_MOUNT_CONFIG, request)) body = await api_validate(SCHEMA_MOUNT_CONFIG, request)
if body["name"] in self.sys_mounts: if body[ATTR_NAME] in self.sys_mounts:
raise APIError(f"A mount already exists with name {body['name']}") raise APIError(f"A mount already exists with name {body[ATTR_NAME]}")
mount = Mount.from_dict(self.coresys, body) mount = Mount.from_dict(self.coresys, body)
await self.sys_mounts.create_mount(mount) await self.sys_mounts.create_mount(mount)
@@ -87,20 +74,19 @@ class APIMounts(CoreSysAttributes):
if not self.sys_mounts.default_backup_mount: if not self.sys_mounts.default_backup_mount:
self.sys_mounts.default_backup_mount = mount self.sys_mounts.default_backup_mount = mount
await self.sys_mounts.save_data() self.sys_mounts.save_data()
@api_process @api_process
async def update_mount(self, request: web.Request) -> None: async def update_mount(self, request: web.Request) -> None:
"""Update an existing mount in supervisor.""" """Update an existing mount in supervisor."""
current = self._extract_mount(request) name = request.match_info.get("mount")
name_schema = vol.Schema( name_schema = vol.Schema(
{vol.Optional(ATTR_NAME, default=current.name): current.name}, {vol.Optional(ATTR_NAME, default=name): name}, extra=vol.ALLOW_EXTRA
extra=vol.ALLOW_EXTRA,
)
body = cast(
MountData,
await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request),
) )
body = await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request)
if name not in self.sys_mounts:
raise APIError(f"No mount exists with name {name}")
mount = Mount.from_dict(self.coresys, body) mount = Mount.from_dict(self.coresys, body)
await self.sys_mounts.create_mount(mount) await self.sys_mounts.create_mount(mount)
@@ -113,26 +99,26 @@ class APIMounts(CoreSysAttributes):
elif self.sys_mounts.default_backup_mount == mount: elif self.sys_mounts.default_backup_mount == mount:
self.sys_mounts.default_backup_mount = None self.sys_mounts.default_backup_mount = None
await self.sys_mounts.save_data() self.sys_mounts.save_data()
@api_process @api_process
async def delete_mount(self, request: web.Request) -> None: async def delete_mount(self, request: web.Request) -> None:
"""Delete an existing mount in supervisor.""" """Delete an existing mount in supervisor."""
current = self._extract_mount(request) name = request.match_info.get("mount")
mount = await self.sys_mounts.remove_mount(current.name) mount = await self.sys_mounts.remove_mount(name)
# If it was a backup mount, reload backups # If it was a backup mount, reload backups
if mount.usage == MountUsage.BACKUP: if mount.usage == MountUsage.BACKUP:
self.sys_create_task(self.sys_backups.reload()) self.sys_create_task(self.sys_backups.reload())
await self.sys_mounts.save_data() self.sys_mounts.save_data()
@api_process @api_process
async def reload_mount(self, request: web.Request) -> None: async def reload_mount(self, request: web.Request) -> None:
"""Reload an existing mount in supervisor.""" """Reload an existing mount in supervisor."""
mount = self._extract_mount(request) name = request.match_info.get("mount")
await self.sys_mounts.reload_mount(mount.name) await self.sys_mounts.reload_mount(name)
# If it's a backup mount, reload backups # If it's a backup mount, reload backups
if mount.usage == MountUsage.BACKUP: if self.sys_mounts.get(name).usage == MountUsage.BACKUP:
self.sys_create_task(self.sys_backups.reload()) self.sys_create_task(self.sys_backups.reload())

View File

@@ -42,7 +42,7 @@ from ..const import (
DOCKER_NETWORK_MASK, DOCKER_NETWORK_MASK,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APINotFound, HostNetworkNotFound from ..exceptions import APIError, HostNetworkNotFound
from ..host.configuration import ( from ..host.configuration import (
AccessPoint, AccessPoint,
Interface, Interface,
@@ -132,12 +132,8 @@ def interface_struct(interface: Interface) -> dict[str, Any]:
ATTR_CONNECTED: interface.connected, ATTR_CONNECTED: interface.connected,
ATTR_PRIMARY: interface.primary, ATTR_PRIMARY: interface.primary,
ATTR_MAC: interface.mac, ATTR_MAC: interface.mac,
ATTR_IPV4: ipconfig_struct(interface.ipv4, interface.ipv4setting) ATTR_IPV4: ipconfig_struct(interface.ipv4, interface.ipv4setting),
if interface.ipv4 and interface.ipv4setting ATTR_IPV6: ipconfig_struct(interface.ipv6, interface.ipv6setting),
else None,
ATTR_IPV6: ipconfig_struct(interface.ipv6, interface.ipv6setting)
if interface.ipv6 and interface.ipv6setting
else None,
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None, ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None, ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None,
} }
@@ -171,7 +167,7 @@ class APINetwork(CoreSysAttributes):
except HostNetworkNotFound: except HostNetworkNotFound:
pass pass
raise APINotFound(f"Interface {name} does not exist") from None raise APIError(f"Interface {name} does not exist") from None
@api_process @api_process
async def info(self, request: web.Request) -> dict[str, Any]: async def info(self, request: web.Request) -> dict[str, Any]:
@@ -194,14 +190,14 @@ class APINetwork(CoreSysAttributes):
@api_process @api_process
async def interface_info(self, request: web.Request) -> dict[str, Any]: async def interface_info(self, request: web.Request) -> dict[str, Any]:
"""Return network information for a interface.""" """Return network information for a interface."""
interface = self._get_interface(request.match_info[ATTR_INTERFACE]) interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
return interface_struct(interface) return interface_struct(interface)
@api_process @api_process
async def interface_update(self, request: web.Request) -> None: async def interface_update(self, request: web.Request) -> None:
"""Update the configuration of an interface.""" """Update the configuration of an interface."""
interface = self._get_interface(request.match_info[ATTR_INTERFACE]) interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
# Validate data # Validate data
body = await api_validate(SCHEMA_UPDATE, request) body = await api_validate(SCHEMA_UPDATE, request)
@@ -247,7 +243,7 @@ class APINetwork(CoreSysAttributes):
@api_process @api_process
async def scan_accesspoints(self, request: web.Request) -> dict[str, Any]: async def scan_accesspoints(self, request: web.Request) -> dict[str, Any]:
"""Scan and return a list of available networks.""" """Scan and return a list of available networks."""
interface = self._get_interface(request.match_info[ATTR_INTERFACE]) interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
# Only wlan is supported # Only wlan is supported
if interface.type != InterfaceType.WIRELESS: if interface.type != InterfaceType.WIRELESS:
@@ -260,10 +256,8 @@ class APINetwork(CoreSysAttributes):
@api_process @api_process
async def create_vlan(self, request: web.Request) -> None: async def create_vlan(self, request: web.Request) -> None:
"""Create a new vlan.""" """Create a new vlan."""
interface = self._get_interface(request.match_info[ATTR_INTERFACE]) interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
vlan = int(request.match_info.get(ATTR_VLAN, -1)) vlan = int(request.match_info.get(ATTR_VLAN))
if vlan < 0:
raise APIError(f"Invalid vlan specified: {vlan}")
# Only ethernet is supported # Only ethernet is supported
if interface.type != InterfaceType.ETHERNET: if interface.type != InterfaceType.ETHERNET:

View File

@@ -3,7 +3,6 @@
import asyncio import asyncio
from collections.abc import Awaitable from collections.abc import Awaitable
import logging import logging
import re
from typing import Any from typing import Any
from aiohttp import web from aiohttp import web
@@ -22,14 +21,12 @@ from ..const import (
ATTR_SERIAL, ATTR_SERIAL,
ATTR_SIZE, ATTR_SIZE,
ATTR_STATE, ATTR_STATE,
ATTR_SWAP_SIZE,
ATTR_SWAPPINESS,
ATTR_UPDATE_AVAILABLE, ATTR_UPDATE_AVAILABLE,
ATTR_VERSION, ATTR_VERSION,
ATTR_VERSION_LATEST, ATTR_VERSION_LATEST,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APINotFound, BoardInvalidError from ..exceptions import BoardInvalidError
from ..resolution.const import ContextType, IssueType, SuggestionType from ..resolution.const import ContextType, IssueType, SuggestionType
from ..validate import version_tag from ..validate import version_tag
from .const import ( from .const import (
@@ -68,15 +65,6 @@ SCHEMA_GREEN_OPTIONS = vol.Schema(
vol.Optional(ATTR_SYSTEM_HEALTH_LED): vol.Boolean(), vol.Optional(ATTR_SYSTEM_HEALTH_LED): vol.Boolean(),
} }
) )
RE_SWAP_SIZE = re.compile(r"^\d+([KMG](i?B)?|B)?$", re.IGNORECASE)
SCHEMA_SWAP_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_SWAP_SIZE): vol.Match(RE_SWAP_SIZE),
vol.Optional(ATTR_SWAPPINESS): vol.All(int, vol.Range(min=0, max=200)),
}
)
# pylint: enable=no-value-for-parameter # pylint: enable=no-value-for-parameter
@@ -181,7 +169,7 @@ class APIOS(CoreSysAttributes):
body[ATTR_SYSTEM_HEALTH_LED] body[ATTR_SYSTEM_HEALTH_LED]
) )
await self.sys_dbus.agent.board.green.save_data() self.sys_dbus.agent.board.green.save_data()
@api_process @api_process
async def boards_yellow_info(self, request: web.Request) -> dict[str, Any]: async def boards_yellow_info(self, request: web.Request) -> dict[str, Any]:
@@ -208,7 +196,7 @@ class APIOS(CoreSysAttributes):
if ATTR_POWER_LED in body: if ATTR_POWER_LED in body:
await self.sys_dbus.agent.board.yellow.set_power_led(body[ATTR_POWER_LED]) await self.sys_dbus.agent.board.yellow.set_power_led(body[ATTR_POWER_LED])
await self.sys_dbus.agent.board.yellow.save_data() self.sys_dbus.agent.board.yellow.save_data()
self.sys_resolution.create_issue( self.sys_resolution.create_issue(
IssueType.REBOOT_REQUIRED, IssueType.REBOOT_REQUIRED,
ContextType.SYSTEM, ContextType.SYSTEM,
@@ -224,53 +212,3 @@ class APIOS(CoreSysAttributes):
) )
return {} return {}
@api_process
async def config_swap_info(self, request: web.Request) -> dict[str, Any]:
"""Get swap settings."""
if (
not self.coresys.os.available
or not self.coresys.os.version
or self.coresys.os.version < "15.0"
):
raise APINotFound(
"Home Assistant OS 15.0 or newer required for swap settings"
)
return {
ATTR_SWAP_SIZE: self.sys_dbus.agent.swap.swap_size,
ATTR_SWAPPINESS: self.sys_dbus.agent.swap.swappiness,
}
@api_process
async def config_swap_options(self, request: web.Request) -> None:
"""Update swap settings."""
if (
not self.coresys.os.available
or not self.coresys.os.version
or self.coresys.os.version < "15.0"
):
raise APINotFound(
"Home Assistant OS 15.0 or newer required for swap settings"
)
body = await api_validate(SCHEMA_SWAP_OPTIONS, request)
reboot_required = False
if ATTR_SWAP_SIZE in body:
old_size = self.sys_dbus.agent.swap.swap_size
await self.sys_dbus.agent.swap.set_swap_size(body[ATTR_SWAP_SIZE])
reboot_required = reboot_required or old_size != body[ATTR_SWAP_SIZE]
if ATTR_SWAPPINESS in body:
old_swappiness = self.sys_dbus.agent.swap.swappiness
await self.sys_dbus.agent.swap.set_swappiness(body[ATTR_SWAPPINESS])
reboot_required = reboot_required or old_swappiness != body[ATTR_SWAPPINESS]
if reboot_required:
self.sys_resolution.create_issue(
IssueType.REBOOT_REQUIRED,
ContextType.SYSTEM,
suggestions=[SuggestionType.EXECUTE_REBOOT],
)

View File

@@ -1 +1 @@
!function(){function d(d){var e=document.createElement("script");e.src=d,document.body.appendChild(e)}if(/Edge?\/(12[4-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Firefox\/(12[5-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Chrom(ium|e)\/(109|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|(Maci|X1{2}).+ Version\/(17\.([5-9]|\d{2,})|(1[89]|[2-9]\d|\d{3,})\.\d+)([,.]\d+|)( \(\w+\)|)( Mobile\/\w+|) Safari\/|Chrome.+OPR\/(1{2}\d|1[2-9]\d|[2-9]\d{2}|\d{4,})\.\d+\.\d+|(CPU[ +]OS|iPhone[ +]OS|CPU[ +]iPhone|CPU IPhone OS|CPU iPad OS)[ +]+(15[._]([6-9]|\d{2,})|(1[6-9]|[2-9]\d|\d{3,})[._]\d+)([._]\d+|)|Android:?[ /-](12[4-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})(\.\d+|)(\.\d+|)|Mobile Safari.+OPR\/([89]\d|\d{3,})\.\d+\.\d+|Android.+Firefox\/(12[5-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Android.+Chrom(ium|e)\/(12[4-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|SamsungBrowser\/(2[5-9]|[3-9]\d|\d{3,})\.\d+|Home As{2}istant\/[\d.]+ \(.+; macOS (1[2-9]|[2-9]\d|\d{3,})\.\d+(\.\d+)?\)/.test(navigator.userAgent))try{new Function("import('/api/hassio/app/frontend_latest/entrypoint.35399ae87c70acf8.js')")()}catch(e){d("/api/hassio/app/frontend_es5/entrypoint.476bfed22da63267.js")}else d("/api/hassio/app/frontend_es5/entrypoint.476bfed22da63267.js")}() !function(){function n(n){var t=document.createElement("script");t.src=n,document.body.appendChild(t)}if(/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent))n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js");else try{new Function("import('/api/hassio/app/frontend_latest/entrypoint-qzB1D0O4L9U.js')")()}catch(t){n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js")}}()

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,2 @@
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([[1047],{32594:function(e,t,r){r.d(t,{U:function(){return n}});var n=function(e){return e.stopPropagation()}},75054:function(e,t,r){r.r(t),r.d(t,{HaTimeDuration:function(){return f}});var n,a=r(88962),i=r(33368),o=r(71650),d=r(82390),u=r(69205),l=r(70906),s=r(91808),c=r(68144),v=r(79932),f=(r(47289),(0,s.Z)([(0,v.Mo)("ha-selector-duration")],(function(e,t){var r=function(t){(0,u.Z)(n,t);var r=(0,l.Z)(n);function n(){var t;(0,o.Z)(this,n);for(var a=arguments.length,i=new Array(a),u=0;u<a;u++)i[u]=arguments[u];return t=r.call.apply(r,[this].concat(i)),e((0,d.Z)(t)),t}return(0,i.Z)(n)}(t);return{F:r,d:[{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"value",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"disabled",value:function(){return!1}},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"required",value:function(){return!0}},{kind:"method",key:"render",value:function(){var e;return(0,c.dy)(n||(n=(0,a.Z)([' <ha-duration-input .label="','" .helper="','" .data="','" .disabled="','" .required="','" ?enableDay="','"></ha-duration-input> '])),this.label,this.helper,this.value,this.disabled,this.required,null===(e=this.selector.duration)||void 0===e?void 0:e.enable_day)}}]}}),c.oi))}}]);
//# sourceMappingURL=1047-g7fFLS9eP4I.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"1047-g7fFLS9eP4I.js","mappings":"yKAAO,IAAMA,EAAkB,SAACC,GAAE,OAAKA,EAAGD,iBAAiB,C,qLCQ9CE,G,UAAcC,EAAAA,EAAAA,GAAA,EAD1BC,EAAAA,EAAAA,IAAc,0BAAuB,SAAAC,EAAAC,GAAA,IACzBJ,EAAc,SAAAK,IAAAC,EAAAA,EAAAA,GAAAN,EAAAK,GAAA,IAAAE,GAAAC,EAAAA,EAAAA,GAAAR,GAAA,SAAAA,IAAA,IAAAS,GAAAC,EAAAA,EAAAA,GAAA,KAAAV,GAAA,QAAAW,EAAAC,UAAAC,OAAAC,EAAA,IAAAC,MAAAJ,GAAAK,EAAA,EAAAA,EAAAL,EAAAK,IAAAF,EAAAE,GAAAJ,UAAAI,GAAA,OAAAP,EAAAF,EAAAU,KAAAC,MAAAX,EAAA,OAAAY,OAAAL,IAAAX,GAAAiB,EAAAA,EAAAA,GAAAX,IAAAA,CAAA,QAAAY,EAAAA,EAAAA,GAAArB,EAAA,EAAAI,GAAA,OAAAkB,EAAdtB,EAAcuB,EAAA,EAAAC,KAAA,QAAAC,WAAA,EACxBC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAG,EACjB,OAAOC,EAAAA,EAAAA,IAAIC,IAAAA,GAAAC,EAAAA,EAAAA,GAAA,wIAEEC,KAAKC,MACJD,KAAKE,OACPF,KAAKP,MACDO,KAAKG,SACLH,KAAKI,SACkB,QADVR,EACZI,KAAKK,SAASC,gBAAQ,IAAAV,OAAA,EAAtBA,EAAwBW,WAG3C,IAAC,GA1BiCC,EAAAA,I","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/common/dom/stop_propagation.ts","https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/components/ha-selector/ha-selector-duration.ts"],"names":["stopPropagation","ev","HaTimeDuration","_decorate","customElement","_initialize","_LitElement","_LitElement2","_inherits","_super","_createSuper","_this","_classCallCheck","_len","arguments","length","args","Array","_key","call","apply","concat","_assertThisInitialized","_createClass","F","d","kind","decorators","property","attribute","key","value","type","Boolean","_this$selector$durati","html","_templateObject","_taggedTemplateLiteral","this","label","helper","disabled","required","selector","duration","enable_day","LitElement"],"sourceRoot":""}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1 +0,0 @@
{"version":3,"file":"1081.91949d686e61cc12.js","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20250401.0/src/components/ha-button-toggle-group.ts","https://raw.githubusercontent.com/home-assistant/frontend/20250401.0/src/components/ha-selector/ha-selector-button-toggle.ts"],"names":["_decorate","customElement","_initialize","_LitElement","F","constructor","args","d","kind","decorators","property","attribute","key","value","type","Boolean","queryAll","html","_t","_","this","buttons","map","button","iconPath","_t2","label","active","_handleClick","_t3","styleMap","width","fullWidth","length","dense","_this$_buttons","_buttons","forEach","async","updateComplete","shadowRoot","querySelector","style","margin","ev","currentTarget","fireEvent","static","css","_t4","LitElement","HaButtonToggleSelector","_this$selector$button","_this$selector$button2","_this$selector$button3","options","selector","button_toggle","option","translationKey","translation_key","localizeValue","localizedLabel","sort","a","b","caseInsensitiveStringCompare","hass","locale","language","toggleButtons","item","_valueChanged","_ev$detail","_this$value","stopPropagation","detail","target","disabled","undefined"],"mappings":"qXAWgCA,EAAAA,EAAAA,GAAA,EAD/BC,EAAAA,EAAAA,IAAc,4BAAyB,SAAAC,EAAAC,GAkIvC,OAAAC,EAlID,cACgCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAC7BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,UAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEC,UAAW,aAAcG,KAAMC,WAAUH,IAAA,YAAAC,KAAAA,GAAA,OAClC,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEvBC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,QAAAC,KAAAA,GAAA,OAAgB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEhDO,EAAAA,EAAAA,IAAS,eAAaJ,IAAA,WAAAC,WAAA,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEvB,WACE,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,uBAELC,KAAKC,QAAQC,KAAKC,GAClBA,EAAOC,UACHP,EAAAA,EAAAA,IAAIQ,IAAAA,EAAAN,CAAA,2GACOI,EAAOG,MACRH,EAAOC,SACND,EAAOV,MACNO,KAAKO,SAAWJ,EAAOV,MACxBO,KAAKQ,eAEhBX,EAAAA,EAAAA,IAAIY,IAAAA,EAAAV,CAAA,iHACMW,EAAAA,EAAAA,GAAS,CACfC,MAAOX,KAAKY,UACL,IAAMZ,KAAKC,QAAQY,OAAtB,IACA,YAGGb,KAAKc,MACLX,EAAOV,MACNO,KAAKO,SAAWJ,EAAOV,MACxBO,KAAKQ,aACXL,EAAOG,SAKxB,GAAC,CAAAlB,KAAA,SAAAI,IAAA,UAAAC,MAED,WAAoB,IAAAsB,EAEL,QAAbA,EAAAf,KAAKgB,gBAAQ,IAAAD,GAAbA,EAAeE,SAAQC,gBACff,EAAOgB,eAEXhB,EAAOiB,WAAYC,cAAc,UACjCC,MAAMC,OAAS,GAAG,GAExB,GAAC,CAAAnC,KAAA,SAAAI,IAAA,eAAAC,MAED,SAAqB+B,GACnBxB,KAAKO,OAASiB,EAAGC,cAAchC,OAC/BiC,EAAAA,EAAAA,GAAU1B,KAAM,gBAAiB,CAAEP,MAAOO,KAAKO,QACjD,GAAC,CAAAnB,KAAA,QAAAuC,QAAA,EAAAnC,IAAA,SAAAC,KAAAA,GAAA,OAEemC,EAAAA,EAAAA,IAAGC,IAAAA,EAAA9B,CAAA,u0CAzDoB+B,EAAAA,I,MCD5BC,GAAsBnD,EAAAA,EAAAA,GAAA,EADlCC,EAAAA,EAAAA,IAAc,+BAA4B,SAAAC,EAAAC,GA4F1C,OAAAC,EA5FD,cACmCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAChCC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,gBAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAG9BC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAuC,EAAAC,EAAAC,EACjB,MAAMC,GACuB,QAA3BH,EAAAhC,KAAKoC,SAASC,qBAAa,IAAAL,GAAS,QAATA,EAA3BA,EAA6BG,eAAO,IAAAH,OAAA,EAApCA,EAAsC9B,KAAKoC,GACvB,iBAAXA,EACFA,EACA,CAAE7C,MAAO6C,EAAQhC,MAAOgC,OAC1B,GAEDC,EAA4C,QAA9BN,EAAGjC,KAAKoC,SAASC,qBAAa,IAAAJ,OAAA,EAA3BA,EAA6BO,gBAEhDxC,KAAKyC,eAAiBF,GACxBJ,EAAQlB,SAASqB,IACf,MAAMI,EAAiB1C,KAAKyC,cAC1B,GAAGF,aAA0BD,EAAO7C,SAElCiD,IACFJ,EAAOhC,MAAQoC,EACjB,IAI2B,QAA/BR,EAAIlC,KAAKoC,SAASC,qBAAa,IAAAH,GAA3BA,EAA6BS,MAC/BR,EAAQQ,MAAK,CAACC,EAAGC,KACfC,EAAAA,EAAAA,IACEF,EAAEtC,MACFuC,EAAEvC,MACFN,KAAK+C,KAAKC,OAAOC,YAKvB,MAAMC,EAAgCf,EAAQjC,KAAKiD,IAAkB,CACnE7C,MAAO6C,EAAK7C,MACZb,MAAO0D,EAAK1D,UAGd,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,iHACPC,KAAKM,MAEM4C,EACDlD,KAAKP,MACEO,KAAKoD,cAG5B,GAAC,CAAAhE,KAAA,SAAAI,IAAA,gBAAAC,MAED,SAAsB+B,GAAI,IAAA6B,EAAAC,EACxB9B,EAAG+B,kBAEH,MAAM9D,GAAiB,QAAT4D,EAAA7B,EAAGgC,cAAM,IAAAH,OAAA,EAATA,EAAW5D,QAAS+B,EAAGiC,OAAOhE,MACxCO,KAAK0D,eAAsBC,IAAVlE,GAAuBA,KAAqB,QAAhB6D,EAAMtD,KAAKP,aAAK,IAAA6D,EAAAA,EAAI,MAGrE5B,EAAAA,EAAAA,GAAU1B,KAAM,gBAAiB,CAC/BP,MAAOA,GAEX,GAAC,CAAAL,KAAA,QAAAuC,QAAA,EAAAnC,IAAA,SAAAC,KAAAA,GAAA,OAEemC,EAAAA,EAAAA,IAAGvB,IAAAA,EAAAN,CAAA,wLA5EuB+B,EAAAA,G"}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,2 +0,0 @@
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["12"],{5739:function(e,a,t){t.a(e,(async function(e,i){try{t.r(a),t.d(a,{HaNavigationSelector:()=>c});var d=t(73577),r=(t(71695),t(47021),t(57243)),n=t(50778),l=t(36522),o=t(63297),s=e([o]);o=(s.then?(await s)():s)[0];let u,h=e=>e,c=(0,d.Z)([(0,n.Mo)("ha-selector-navigation")],(function(e,a){return{F:class extends a{constructor(...a){super(...a),e(this)}},d:[{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"value",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,n.Cb)({type:Boolean,reflect:!0})],key:"disabled",value(){return!1}},{kind:"field",decorators:[(0,n.Cb)({type:Boolean})],key:"required",value(){return!0}},{kind:"method",key:"render",value:function(){return(0,r.dy)(u||(u=h` <ha-navigation-picker .hass="${0}" .label="${0}" .value="${0}" .required="${0}" .disabled="${0}" .helper="${0}" @value-changed="${0}"></ha-navigation-picker> `),this.hass,this.label,this.value,this.required,this.disabled,this.helper,this._valueChanged)}},{kind:"method",key:"_valueChanged",value:function(e){(0,l.B)(this,"value-changed",{value:e.detail.value})}}]}}),r.oi);i()}catch(u){i(u)}}))}}]);
//# sourceMappingURL=12.ffa1bdc0a98802fa.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"12.ffa1bdc0a98802fa.js","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20250401.0/src/components/ha-selector/ha-selector-navigation.ts"],"names":["HaNavigationSelector","_decorate","customElement","_initialize","_LitElement","F","constructor","args","d","kind","decorators","property","attribute","key","value","type","Boolean","reflect","html","_t","_","this","hass","label","required","disabled","helper","_valueChanged","ev","fireEvent","detail","LitElement"],"mappings":"mVAQaA,GAAoBC,EAAAA,EAAAA,GAAA,EADhCC,EAAAA,EAAAA,IAAc,4BAAyB,SAAAC,EAAAC,GAiCvC,OAAAC,EAjCD,cACiCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAC9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,QAASC,SAAS,KAAOJ,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAElEC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WACE,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,mKAECC,KAAKC,KACJD,KAAKE,MACLF,KAAKP,MACFO,KAAKG,SACLH,KAAKI,SACPJ,KAAKK,OACEL,KAAKM,cAG5B,GAAC,CAAAlB,KAAA,SAAAI,IAAA,gBAAAC,MAED,SAAsBc,IACpBC,EAAAA,EAAAA,GAAUR,KAAM,gBAAiB,CAAEP,MAAOc,EAAGE,OAAOhB,OACtD,IAAC,GA/BuCiB,EAAAA,I"}

View File

@@ -1,2 +0,0 @@
(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["1236"],{4121:function(){Intl.PluralRules&&"function"==typeof Intl.PluralRules.__addLocaleData&&Intl.PluralRules.__addLocaleData({data:{categories:{cardinal:["one","other"],ordinal:["one","two","few","other"]},fn:function(e,n){var t=String(e).split("."),a=!t[1],l=Number(t[0])==e,o=l&&t[0].slice(-1),r=l&&t[0].slice(-2);return n?1==o&&11!=r?"one":2==o&&12!=r?"two":3==o&&13!=r?"few":"other":1==e&&a?"one":"other"}},locale:"en"})}}]);
//# sourceMappingURL=1236.64ca65d0ea4d76d4.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"1236.64ca65d0ea4d76d4.js","sources":["/unknown/node_modules/@formatjs/intl-pluralrules/locale-data/en.js"],"names":["Intl","PluralRules","__addLocaleData","n","ord","s","String","split","v0","t0","Number","n10","slice","n100"],"mappings":"wHAEIA,KAAKC,aAA2D,mBAArCD,KAAKC,YAAYC,iBAC9CF,KAAKC,YAAYC,gBAAgB,CAAC,KAAO,CAAC,WAAa,CAAC,SAAW,CAAC,MAAM,SAAS,QAAU,CAAC,MAAM,MAAM,MAAM,UAAU,GAAK,SAASC,EAAGC,GAC3I,IAAIC,EAAIC,OAAOH,GAAGI,MAAM,KAAMC,GAAMH,EAAE,GAAII,EAAKC,OAAOL,EAAE,KAAOF,EAAGQ,EAAMF,GAAMJ,EAAE,GAAGO,OAAO,GAAIC,EAAOJ,GAAMJ,EAAE,GAAGO,OAAO,GACvH,OAAIR,EAAmB,GAAPO,GAAoB,IAARE,EAAa,MAC9B,GAAPF,GAAoB,IAARE,EAAa,MAClB,GAAPF,GAAoB,IAARE,EAAa,MACzB,QACQ,GAALV,GAAUK,EAAK,MAAQ,OAChC,GAAG,OAAS,M"}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1 +0,0 @@
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["1295"],{21393:function(s,n,e){e.r(n)}}]);

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,47 +0,0 @@
/**
* @license
* Copyright 2017 Google LLC
* SPDX-License-Identifier: BSD-3-Clause
*/
/**
* @license
* Copyright 2019 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* @license
* Copyright 2021 Google LLC
* SPDX-LIcense-Identifier: Apache-2.0
*/
/**
* @license
* Copyright 2021 Google LLC
* SPDX-License-Identifier: BSD-3-Clause
*/
/**
* @license
* Copyright 2022 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* @license
* Copyright 2022 Google LLC
* SPDX-License-Identifier: BSD-3-Clause
*/
/**
* @license
* Copyright 2023 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/
/**
* @license
* Copyright 2024 Google LLC
* SPDX-License-Identifier: Apache-2.0
*/

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Some files were not shown because too many files have changed in this diff Show More