Compare commits

..

4 Commits

Author SHA1 Message Date
Pascal Vizeli
2470d16d85 Cleanup 2022-08-09 14:36:56 +00:00
Pascal Vizeli
5c0172440a change name 2022-08-09 11:18:33 +00:00
Pascal Vizeli
006db94cc0 fix gateway 2022-08-09 08:32:16 +00:00
Pascal Vizeli
9f1ab99265 Support basic IPv6 2022-08-09 08:07:52 +00:00
2061 changed files with 26302 additions and 547611 deletions

View File

@@ -1,48 +1,37 @@
{
"name": "Supervisor dev",
"image": "ghcr.io/home-assistant/devcontainer:supervisor",
"containerEnv": {
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
},
"remoteEnv": {
"PATH": "${containerEnv:VIRTUAL_ENV}/bin:${containerEnv:PATH}"
},
"appPort": ["9123:8123", "7357:4357"],
"postCreateCommand": "bash devcontainer_setup",
"postStartCommand": "bash devcontainer_bootstrap",
"postCreateCommand": "bash devcontainer_bootstrap",
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
"customizations": {
"vscode": {
"extensions": [
"charliermarsh.ruff",
"ms-python.pylint",
"ms-python.vscode-pylance",
"visualstudioexptteam.vscodeintellicode",
"redhat.vscode-yaml",
"esbenp.prettier-vscode",
"GitHub.vscode-pull-request-github"
],
"settings": {
"python.defaultInterpreterPath": "/home/vscode/.local/ha-venv/bin/python",
"python.pythonPath": "/home/vscode/.local/ha-venv/bin/python",
"python.terminal.activateEnvInCurrentTerminal": true,
"python.testing.pytestArgs": ["--no-cov"],
"pylint.importStrategy": "fromEnvironment",
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"files.trimTrailingWhitespace": true,
"terminal.integrated.profiles.linux": {
"zsh": {
"path": "/usr/bin/zsh"
}
},
"terminal.integrated.defaultProfile.linux": "zsh",
"[python]": {
"editor.defaultFormatter": "charliermarsh.ruff"
}
"extensions": [
"ms-python.python",
"ms-python.vscode-pylance",
"visualstudioexptteam.vscodeintellicode",
"esbenp.prettier-vscode"
],
"mounts": [ "type=volume,target=/var/lib/docker" ],
"settings": {
"terminal.integrated.profiles.linux": {
"zsh": {
"path": "/usr/bin/zsh"
}
}
},
"mounts": ["type=volume,target=/var/lib/docker"]
},
"terminal.integrated.defaultProfile.linux": "zsh",
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"files.trimTrailingWhitespace": true,
"python.pythonPath": "/usr/local/bin/python3",
"python.linting.pylintEnabled": true,
"python.linting.enabled": true,
"python.formatting.provider": "black",
"python.formatting.blackArgs": ["--target-version", "py39"],
"python.formatting.blackPath": "/usr/local/bin/black",
"python.linting.banditPath": "/usr/local/bin/bandit",
"python.linting.flake8Path": "/usr/local/bin/flake8",
"python.linting.mypyPath": "/usr/local/bin/mypy",
"python.linting.pylintPath": "/usr/local/bin/pylint",
"python.linting.pydocstylePath": "/usr/local/bin/pydocstyle"
}
}

View File

@@ -20,14 +20,22 @@ body:
attributes:
value: |
## Environment
- type: input
validations:
required: true
attributes:
label: What is the used version of the Supervisor?
placeholder: supervisor-
description: >
Can be found in the Supervisor panel -> System tab. Starts with
`supervisor-....`.
- type: dropdown
validations:
required: true
attributes:
label: What type of installation are you running?
description: >
If you don't know, can be found in [Settings -> System -> Repairs -> System Information](https://my.home-assistant.io/redirect/system_health/).
It is listed as the `Installation Type` value.
If you don't know, you can find it in: Configuration panel -> Info.
options:
- Home Assistant OS
- Home Assistant Supervised
@@ -40,6 +48,22 @@ body:
- Home Assistant Operating System
- Debian
- Other (e.g., Raspbian/Raspberry Pi OS/Fedora)
- type: input
validations:
required: true
attributes:
label: What is the version of your installed operating system?
placeholder: "5.11"
description: Can be found in the Supervisor panel -> System tab.
- type: input
validations:
required: true
attributes:
label: What version of Home Assistant Core is installed?
placeholder: core-
description: >
Can be found in the Supervisor panel -> System tab. Starts with
`core-....`.
- type: markdown
attributes:
value: |
@@ -63,30 +87,8 @@ body:
attributes:
label: Anything in the Supervisor logs that might be useful for us?
description: >
Supervisor Logs can be found in [Settings -> System -> Logs](https://my.home-assistant.io/redirect/logs/)
then choose `Supervisor` in the top right.
[![Open your Home Assistant instance and show your Supervisor system logs.](https://my.home-assistant.io/badges/supervisor_logs.svg)](https://my.home-assistant.io/redirect/supervisor_logs/)
The Supervisor logs can be found in the Supervisor panel -> System tab.
render: txt
- type: textarea
validations:
required: true
attributes:
label: System Health information
description: >
System Health information can be found in the top right menu in [Settings -> System -> Repairs](https://my.home-assistant.io/redirect/repairs/).
Click the copy button at the bottom of the pop-up and paste it here.
[![Open your Home Assistant instance and show health information about your system.](https://my.home-assistant.io/badges/system_health.svg)](https://my.home-assistant.io/redirect/system_health/)
- type: textarea
attributes:
label: Supervisor diagnostics
placeholder: "drag-and-drop the diagnostics data file here (do not copy-and-paste the content)"
description: >-
Supervisor diagnostics can be found in [Settings -> Integrations](https://my.home-assistant.io/redirect/integrations/).
Find the card that says `Home Assistant Supervisor`, open its menu and select 'Download diagnostics'.
**Please drag-and-drop the downloaded file into the textbox below. Do not copy and paste its contents.**
- type: textarea
attributes:
label: Additional information

View File

@@ -38,7 +38,6 @@
- This PR is related to issue:
- Link to documentation pull request:
- Link to cli pull request:
- Link to client library pull request:
## Checklist
@@ -53,14 +52,12 @@
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
- [ ] There is no commented out code in this PR.
- [ ] I have followed the [development checklist][dev-checklist]
- [ ] The code has been formatted using Ruff (`ruff format supervisor tests`)
- [ ] The code has been formatted using Black (`black --fast supervisor tests`)
- [ ] Tests have been added to verify that the new code works.
If API endpoints or add-on configuration are added/changed:
If API endpoints of add-on configuration are added/changed:
- [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository]
- [ ] [CLI][cli-repository] updated (if necessary)
- [ ] [Client library][client-library-repository] updated (if necessary)
<!--
Thank you for contributing <3
@@ -70,5 +67,3 @@ If API endpoints or add-on configuration are added/changed:
[dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html
[docs-repository]: https://github.com/home-assistant/developers.home-assistant
[cli-repository]: https://github.com/home-assistant/cli
[client-library-repository]: https://github.com/home-assistant-libs/python-supervisor-client/

View File

@@ -33,13 +33,10 @@ on:
- setup.py
env:
DEFAULT_PYTHON: "3.12"
DEFAULT_PYTHON: 3.9
BUILD_NAME: supervisor
BUILD_TYPE: supervisor
concurrency:
group: "${{ github.workflow }}-${{ github.ref }}"
cancel-in-progress: true
WHEELS_TAG: 3.9-alpine3.14
jobs:
init:
@@ -53,7 +50,7 @@ jobs:
requirements: ${{ steps.requirements.outputs.changed }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.1
uses: actions/checkout@v3.0.2
with:
fetch-depth: 0
@@ -70,51 +67,39 @@ jobs:
- name: Get changed files
id: changed_files
if: steps.version.outputs.publish == 'false'
uses: masesgroup/retrieve-changed-files@v3.0.0
uses: jitterbit/get-changed-files@v1
- name: Check if requirements files changed
id: requirements
run: |
if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.yaml) ]]; then
echo "changed=true" >> "$GITHUB_OUTPUT"
if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.json) ]]; then
echo "::set-output name=changed::true"
fi
build:
name: Build ${{ matrix.arch }} supervisor
needs: init
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
packages: write
strategy:
matrix:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.1
uses: actions/checkout@v3.0.2
with:
fetch-depth: 0
- name: Write env-file
if: needs.init.outputs.requirements == 'true'
run: |
(
# Fix out of memory issues with rust
echo "CARGO_NET_GIT_FETCH_WITH_CLI=true"
) > .env_file
- name: Build wheels
if: needs.init.outputs.requirements == 'true'
uses: home-assistant/wheels@2024.07.1
uses: home-assistant/wheels@2022.01.2
with:
abi: cp312
tag: musllinux_1_2
tag: ${{ env.WHEELS_TAG }}
arch: ${{ matrix.arch }}
wheels-host: wheels.hass.io
wheels-key: ${{ secrets.WHEELS_KEY }}
apk: "libffi-dev;openssl-dev;yaml-dev"
wheels-user: wheels
apk: "build-base;libffi-dev;openssl-dev;cargo"
skip-binary: aiohttp
env-file: true
requirements: "requirements.txt"
- name: Set version
@@ -123,33 +108,16 @@ jobs:
with:
type: ${{ env.BUILD_TYPE }}
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
- name: Login to DockerHub
if: needs.init.outputs.publish == 'true'
uses: actions/setup-python@v5.2.0
uses: docker/login-action@v2.0.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Install Cosign
if: needs.init.outputs.publish == 'true'
uses: sigstore/cosign-installer@v3.7.0
with:
cosign-release: "v2.4.0"
- name: Install dirhash and calc hash
if: needs.init.outputs.publish == 'true'
run: |
pip3 install setuptools dirhash
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
echo "${dir_hash}" > rootfs/supervisor.sha256
- name: Sign supervisor SHA256
if: needs.init.outputs.publish == 'true'
run: |
cosign sign-blob --yes rootfs/supervisor.sha256 --bundle rootfs/supervisor.sha256.sig
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
if: needs.init.outputs.publish == 'true'
uses: docker/login-action@v3.3.0
uses: docker/login-action@v2.0.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -160,17 +128,55 @@ jobs:
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
- name: Build supervisor
uses: home-assistant/builder@2024.08.2
uses: home-assistant/builder@2022.06.2
with:
args: |
$BUILD_ARGS \
--${{ matrix.arch }} \
--target /data \
--cosign \
--generic ${{ needs.init.outputs.version }}
env:
CAS_API_KEY: ${{ secrets.CAS_TOKEN }}
codenotary:
name: CAS signature
needs: init
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
if: needs.init.outputs.publish == 'true'
uses: actions/checkout@v3.0.2
with:
fetch-depth: 0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.publish == 'true'
uses: actions/setup-python@v4.2.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Set version
if: needs.init.outputs.publish == 'true'
uses: home-assistant/actions/helpers/version@master
with:
type: ${{ env.BUILD_TYPE }}
- name: Install dirhash and calc hash
if: needs.init.outputs.publish == 'true'
id: dirhash
run: |
pip3 install dirhash
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
echo "::set-output name=dirhash::${dir_hash}"
- name: Signing Source
if: needs.init.outputs.publish == 'true'
uses: home-assistant/actions/helpers/codenotary@master
with:
source: hash://${{ steps.dirhash.outputs.dirhash }}
asset: supervisor-${{ needs.init.outputs.version }}
token: ${{ secrets.CAS_TOKEN }}
version:
name: Update version
needs: ["init", "run_supervisor"]
@@ -178,7 +184,7 @@ jobs:
steps:
- name: Checkout the repository
if: needs.init.outputs.publish == 'true'
uses: actions/checkout@v4.2.1
uses: actions/checkout@v3.0.2
- name: Initialize git
if: needs.init.outputs.publish == 'true'
@@ -199,15 +205,15 @@ jobs:
run_supervisor:
runs-on: ubuntu-latest
name: Run the Supervisor
needs: ["build", "init"]
needs: ["build", "codenotary", "init"]
timeout-minutes: 60
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.1
uses: actions/checkout@v3.0.2
- name: Build the Supervisor
if: needs.init.outputs.publish != 'true'
uses: home-assistant/builder@2024.08.2
uses: home-assistant/builder@2022.06.2
with:
args: |
--test \
@@ -219,7 +225,7 @@ jobs:
if: needs.init.outputs.publish == 'true'
run: |
docker pull ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }}
docker tag ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }} ghcr.io/home-assistant/amd64-hassio-supervisor:runner
docker tag ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }} homeassistant/amd64-hassio-supervisor:runner
- name: Create the Supervisor
run: |
@@ -236,7 +242,7 @@ jobs:
-e SUPERVISOR_NAME=hassio_supervisor \
-e SUPERVISOR_DEV=1 \
-e SUPERVISOR_MACHINE="qemux86-64" \
ghcr.io/home-assistant/amd64-hassio-supervisor:runner
homeassistant/amd64-hassio-supervisor:runner
- name: Start the Supervisor
run: docker start hassio_supervisor
@@ -284,12 +290,6 @@ jobs:
exit 1
fi
# Make sure its state is started
test="$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.state')"
if [ "$test" != "started" ]; then
exit 1
fi
- name: Check the Supervisor code sign
if: needs.init.outputs.publish == 'true'
run: |
@@ -324,7 +324,7 @@ jobs:
if [ "$(echo $test | jq -r '.result')" != "ok" ]; then
exit 1
fi
echo "slug=$(echo $test | jq -r '.data.slug')" >> "$GITHUB_OUTPUT"
echo "::set-output name=slug::$(echo $test | jq -r '.data.slug')"
- name: Uninstall SSH add-on
run: |
@@ -362,12 +362,6 @@ jobs:
exit 1
fi
# Make sure its state is started
test="$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.state')"
if [ "$test" != "started" ]; then
exit 1
fi
- name: Restore SSL directory from backup
run: |
test=$(docker exec hassio_cli ha backups restore ${{ steps.backup.outputs.slug }} --folders ssl --no-progress --raw-json | jq -r '.result')

View File

@@ -8,32 +8,30 @@ on:
pull_request: ~
env:
DEFAULT_PYTHON: "3.12"
PRE_COMMIT_CACHE: ~/.cache/pre-commit
concurrency:
group: "${{ github.workflow }}-${{ github.ref }}"
cancel-in-progress: true
DEFAULT_PYTHON: 3.9
PRE_COMMIT_HOME: ~/.cache/pre-commit
DEFAULT_CAS: v1.0.2
jobs:
# Separate job to pre-populate the base dependency cache
# This prevent upcoming jobs to do the same individually
prepare:
runs-on: ubuntu-latest
outputs:
python-version: ${{ steps.python.outputs.python-version }}
name: Prepare Python dependencies
strategy:
matrix:
python-version: [3.9]
name: Prepare Python ${{ matrix.python-version }} dependencies
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
- name: Set up Python
uses: actions/checkout@v3.0.2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.2.0
uses: actions/setup-python@v4.2.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
python-version: ${{ matrix.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v3.0.6
with:
path: venv
key: |
@@ -47,10 +45,9 @@ jobs:
pip install -r requirements.txt -r requirements_tests.txt
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.1.1
uses: actions/cache@v3.0.6
with:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
restore-keys: |
@@ -61,91 +58,34 @@ jobs:
. venv/bin/activate
pre-commit install-hooks
lint-ruff-format:
name: Check ruff-format
lint-black:
name: Check black
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.2.0
uses: actions/checkout@v3.0.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.2.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v3.0.6
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.1.1
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run ruff-format
- name: Run black
run: |
. venv/bin/activate
pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure
env:
RUFF_OUTPUT_FORMAT: github
lint-ruff:
name: Check ruff
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.2.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.1.1
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run ruff
run: |
. venv/bin/activate
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
env:
RUFF_OUTPUT_FORMAT: github
black --target-version py38 --check supervisor tests setup.py
lint-dockerfile:
name: Check Dockerfile
@@ -153,7 +93,7 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
uses: actions/checkout@v3.0.2
- name: Register hadolint problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
@@ -168,19 +108,19 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.2.0
uses: actions/checkout@v3.0.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.2.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v3.0.6
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
@@ -188,9 +128,9 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.1.1
uses: actions/cache@v3.0.6
with:
path: ${{ env.PRE_COMMIT_CACHE }}
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
@@ -206,25 +146,57 @@ jobs:
. venv/bin/activate
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
lint-json:
name: Check JSON
lint-flake8:
name: Check flake8
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.2.0
uses: actions/checkout@v3.0.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.2.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v3.0.6
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Register flake8 problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/flake8.json"
- name: Run flake8
run: |
. venv/bin/activate
flake8 supervisor tests
lint-isort:
name: Check isort
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.0.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.2.0
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.0.6
with:
path: venv
key: |
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
@@ -232,9 +204,50 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.1.1
uses: actions/cache@v3.0.6
with:
path: ${{ env.PRE_COMMIT_CACHE }}
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run isort
run: |
. venv/bin/activate
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
lint-json:
name: Check JSON
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.0.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.2.0
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.0.6
with:
path: venv
key: |
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v3.0.6
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
@@ -256,19 +269,19 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.2.0
uses: actions/checkout@v3.0.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.2.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v3.0.6
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
@@ -282,29 +295,73 @@ jobs:
. venv/bin/activate
pylint supervisor tests
pytest:
lint-pyupgrade:
name: Check pyupgrade
runs-on: ubuntu-latest
needs: prepare
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.2.0
uses: actions/checkout@v3.0.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.2.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Install Cosign
uses: sigstore/cosign-installer@v3.7.0
with:
cosign-release: "v2.4.0"
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v3.0.6
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v3.0.6
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run pyupgrade
run: |
. venv/bin/activate
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
pytest:
runs-on: ubuntu-latest
needs: prepare
strategy:
matrix:
python-version: [3.9]
name: Run tests Python ${{ matrix.python-version }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v3.0.2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4.2.0
id: python
with:
python-version: ${{ matrix.python-version }}
- name: Install CAS tools
uses: home-assistant/actions/helpers/cas@master
with:
version: ${{ env.DEFAULT_CAS }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v3.0.6
with:
path: venv
key: |
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
@@ -313,7 +370,7 @@ jobs:
- name: Install additional system dependencies
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus-daemon
sudo apt-get install -y --no-install-recommends libpulse0 libudev1
- name: Register Python problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/python.json"
@@ -335,38 +392,37 @@ jobs:
-o console_output_style=count \
tests
- name: Upload coverage artifact
uses: actions/upload-artifact@v4.4.2
uses: actions/upload-artifact@v3.1.0
with:
name: coverage-${{ matrix.python-version }}
path: .coverage
include-hidden-files: true
coverage:
name: Process test coverage
runs-on: ubuntu-latest
needs: ["pytest", "prepare"]
needs: pytest
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.2.0
uses: actions/checkout@v3.0.2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.2.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.1.1
uses: actions/cache@v3.0.6
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.1.8
uses: actions/download-artifact@v3
- name: Combine coverage results
run: |
. venv/bin/activate
@@ -374,4 +430,4 @@ jobs:
coverage report
coverage xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v4.6.0
uses: codecov/codecov-action@v3.1.0

View File

@@ -9,7 +9,7 @@ jobs:
lock:
runs-on: ubuntu-latest
steps:
- uses: dessant/lock-threads@v5.0.1
- uses: dessant/lock-threads@v3.0.0
with:
github-token: ${{ github.token }}
issue-inactive-days: "30"

30
.github/workflows/matchers/flake8.json vendored Normal file
View File

@@ -0,0 +1,30 @@
{
"problemMatcher": [
{
"owner": "flake8-error",
"severity": "error",
"pattern": [
{
"regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$",
"file": 1,
"line": 2,
"column": 3,
"message": 4
}
]
},
{
"owner": "flake8-warning",
"severity": "warning",
"pattern": [
{
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$",
"file": 1,
"line": 2,
"column": 3,
"message": 4
}
]
}
]
}

View File

@@ -11,7 +11,7 @@ jobs:
name: Release Drafter
steps:
- name: Checkout the repository
uses: actions/checkout@v4.2.1
uses: actions/checkout@v3.0.2
with:
fetch-depth: 0
@@ -33,10 +33,10 @@ jobs:
echo Current version: $latest
echo New target version: $datepre.$newpost
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT"
echo "::set-output name=version::$datepre.$newpost"
- name: Run Release Drafter
uses: release-drafter/release-drafter@v6.0.0
uses: release-drafter/release-drafter@v5.20.0
with:
tag: ${{ steps.version.outputs.version }}
name: ${{ steps.version.outputs.version }}

View File

@@ -10,9 +10,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.2.1
uses: actions/checkout@v3.0.2
- name: Sentry Release
uses: getsentry/action-release@v1.7.0
uses: getsentry/action-release@v1.2.0
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}

View File

@@ -9,10 +9,10 @@ jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v9.0.0
- uses: actions/stale@v5.1.1
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 30
days-before-stale: 60
days-before-close: 7
stale-issue-label: "stale"
exempt-issue-labels: "no-stale,Help%20wanted,help-wanted,pinned,rfc,security"

View File

@@ -3,5 +3,4 @@ ignored:
- DL3006
- DL3013
- DL3018
- DL3042
- SC2155

View File

@@ -1,15 +1,34 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.5.7
- repo: https://github.com/psf/black
rev: 22.6.0
hooks:
- id: ruff
- id: black
args:
- --fix
- id: ruff-format
- --safe
- --quiet
- --target-version
- py39
files: ^((supervisor|tests)/.+)?[^/]+\.py$
- repo: https://gitlab.com/pycqa/flake8
rev: 3.8.3
hooks:
- id: flake8
additional_dependencies:
- flake8-docstrings==1.5.0
- pydocstyle==5.0.2
files: ^(supervisor|script|tests)/.+\.py$
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
rev: v3.1.0
hooks:
- id: check-executables-have-shebangs
stages: [manual]
- id: check-json
- repo: https://github.com/PyCQA/isort
rev: 5.9.3
hooks:
- id: isort
- repo: https://github.com/asottile/pyupgrade
rev: v2.32.1
hooks:
- id: pyupgrade
args: [--py39-plus]

7
.vscode/launch.json vendored
View File

@@ -13,13 +13,6 @@
"remoteRoot": "/usr/src/supervisor"
}
]
},
{
"name": "Debug Tests",
"type": "python",
"request": "test",
"console": "internalConsole",
"justMyCode": false
}
]
}

18
.vscode/tasks.json vendored
View File

@@ -58,23 +58,9 @@
"problemMatcher": []
},
{
"label": "Ruff Check",
"label": "Flake8",
"type": "shell",
"command": "ruff check --fix supervisor tests",
"group": {
"kind": "test",
"isDefault": true
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Ruff Format",
"type": "shell",
"command": "ruff format supervisor tests",
"command": "flake8 supervisor tests",
"group": {
"kind": "test",
"isDefault": true

View File

@@ -3,20 +3,17 @@ FROM ${BUILD_FROM}
ENV \
S6_SERVICES_GRACETIME=10000 \
SUPERVISOR_API=http://localhost \
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1 \
UV_SYSTEM_PYTHON=true
SUPERVISOR_API=http://localhost
ARG \
COSIGN_VERSION \
BUILD_ARCH
BUILD_ARCH \
CAS_VERSION
# Install base
WORKDIR /usr/src
RUN \
set -x \
&& apk add --no-cache \
findutils \
eudev \
eudev-libs \
git \
@@ -24,26 +21,33 @@ RUN \
libpulse \
musl \
openssl \
yaml \
&& apk add --no-cache --virtual .build-dependencies \
build-base \
go \
\
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
&& chmod a+x /usr/bin/cosign \
&& pip3 install uv==0.2.21
&& git clone -b "v${CAS_VERSION}" --depth 1 \
https://github.com/codenotary/cas \
&& cd cas \
&& make cas \
&& mv cas /usr/bin/cas \
\
&& apk del .build-dependencies \
&& rm -rf /root/go /root/.cache \
&& rm -rf /usr/src/cas
# Install requirements
COPY requirements.txt .
RUN \
if [ "${BUILD_ARCH}" = "i386" ]; then \
linux32 uv pip install --no-build -r requirements.txt; \
else \
uv pip install --no-build -r requirements.txt; \
fi \
export MAKEFLAGS="-j$(nproc)" \
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
"https://wheels.home-assistant.io/alpine-$(cut -d '.' -f 1-2 < /etc/alpine-release)/${BUILD_ARCH}/" \
-r ./requirements.txt \
&& rm -f requirements.txt
# Install Home Assistant Supervisor
COPY . supervisor
RUN \
pip3 install -e ./supervisor \
pip3 install --no-cache-dir -e ./supervisor \
&& python3 -m compileall ./supervisor/supervisor

View File

@@ -30,5 +30,3 @@ Releases are done in 3 stages (channels) with this structure:
[development]: https://developers.home-assistant.io/docs/supervisor/development
[stable]: https://github.com/home-assistant/version/blob/master/stable.json
[![Home Assistant - A project from the Open Home Foundation](https://www.openhomefoundation.org/badges/home-assistant.png)](https://www.openhomefoundation.org/)

View File

@@ -1,18 +1,16 @@
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
image: homeassistant/{arch}-hassio-supervisor
shadow_repository: ghcr.io/home-assistant
build_from:
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.20
armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.20
armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.20
amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.20
i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.20
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.9-alpine3.14
armhf: ghcr.io/home-assistant/armhf-base-python:3.9-alpine3.14
armv7: ghcr.io/home-assistant/armv7-base-python:3.9-alpine3.14
amd64: ghcr.io/home-assistant/amd64-base-python:3.9-alpine3.14
i386: ghcr.io/home-assistant/i386-base-python:3.9-alpine3.14
codenotary:
signer: notary@home-assistant.io
base_image: notary@home-assistant.io
cosign:
base_identity: https://github.com/home-assistant/docker-base/.*
identity: https://github.com/home-assistant/supervisor/.*
args:
COSIGN_VERSION: 2.4.0
CAS_VERSION: 1.0.2
labels:
io.hass.type: supervisor
org.opencontainers.image.title: Home Assistant Supervisor

45
pylintrc Normal file
View File

@@ -0,0 +1,45 @@
[MASTER]
reports=no
jobs=2
good-names=id,i,j,k,ex,Run,_,fp,T,os
extension-pkg-whitelist=
ciso8601
# Reasons disabled:
# format - handled by black
# locally-disabled - it spams too much
# duplicate-code - unavoidable
# cyclic-import - doesn't test if both import on load
# abstract-class-not-used - is flaky, should not show up but does
# unused-argument - generic callbacks and setup methods create a lot of warnings
# too-many-* - are not enforced for the sake of readability
# too-few-* - same as too-many-*
# abstract-method - with intro of async there are always methods missing
disable=
format,
abstract-method,
cyclic-import,
duplicate-code,
locally-disabled,
no-else-return,
not-context-manager,
too-few-public-methods,
too-many-arguments,
too-many-branches,
too-many-instance-attributes,
too-many-lines,
too-many-locals,
too-many-public-methods,
too-many-return-statements,
too-many-statements,
unused-argument,
consider-using-with
[EXCEPTIONS]
overgeneral-exceptions=Exception
[TYPECHECK]
ignored-modules = distutils

View File

@@ -1,373 +0,0 @@
[build-system]
requires = ["setuptools~=68.0.0", "wheel~=0.40.0"]
build-backend = "setuptools.build_meta"
[project]
name = "Supervisor"
dynamic = ["version", "dependencies"]
license = { text = "Apache-2.0" }
description = "Open-source private cloud os for Home-Assistant based on HassOS"
readme = "README.md"
authors = [
{ name = "The Home Assistant Authors", email = "hello@home-assistant.io" },
]
keywords = ["docker", "home-assistant", "api"]
requires-python = ">=3.12.0"
[project.urls]
"Homepage" = "https://www.home-assistant.io/"
"Source Code" = "https://github.com/home-assistant/supervisor"
"Bug Reports" = "https://github.com/home-assistant/supervisor/issues"
"Docs: Dev" = "https://developers.home-assistant.io/"
"Discord" = "https://www.home-assistant.io/join-chat/"
"Forum" = "https://community.home-assistant.io/"
[tool.setuptools]
platforms = ["any"]
zip-safe = false
include-package-data = true
[tool.setuptools.packages.find]
include = ["supervisor*"]
[tool.pylint.MAIN]
py-version = "3.12"
# Use a conservative default here; 2 should speed up most setups and not hurt
# any too bad. Override on command line as appropriate.
jobs = 2
persistent = false
extension-pkg-allow-list = ["ciso8601"]
[tool.pylint.BASIC]
class-const-naming-style = "any"
good-names = ["id", "i", "j", "k", "ex", "Run", "_", "fp", "T", "os"]
[tool.pylint."MESSAGES CONTROL"]
# Reasons disabled:
# format - handled by ruff
# abstract-method - with intro of async there are always methods missing
# cyclic-import - doesn't test if both import on load
# duplicate-code - unavoidable
# locally-disabled - it spams too much
# too-many-* - are not enforced for the sake of readability
# too-few-* - same as too-many-*
# unused-argument - generic callbacks and setup methods create a lot of warnings
disable = [
"format",
"abstract-method",
"cyclic-import",
"duplicate-code",
"locally-disabled",
"no-else-return",
"not-context-manager",
"too-few-public-methods",
"too-many-arguments",
"too-many-branches",
"too-many-instance-attributes",
"too-many-lines",
"too-many-locals",
"too-many-public-methods",
"too-many-return-statements",
"too-many-statements",
"unused-argument",
"consider-using-with",
# Handled by ruff
# Ref: <https://github.com/astral-sh/ruff/issues/970>
"await-outside-async", # PLE1142
"bad-str-strip-call", # PLE1310
"bad-string-format-type", # PLE1307
"bidirectional-unicode", # PLE2502
"continue-in-finally", # PLE0116
"duplicate-bases", # PLE0241
"format-needs-mapping", # F502
"function-redefined", # F811
# Needed because ruff does not understand type of __all__ generated by a function
# "invalid-all-format", # PLE0605
"invalid-all-object", # PLE0604
"invalid-character-backspace", # PLE2510
"invalid-character-esc", # PLE2513
"invalid-character-nul", # PLE2514
"invalid-character-sub", # PLE2512
"invalid-character-zero-width-space", # PLE2515
"logging-too-few-args", # PLE1206
"logging-too-many-args", # PLE1205
"missing-format-string-key", # F524
"mixed-format-string", # F506
"no-method-argument", # N805
"no-self-argument", # N805
"nonexistent-operator", # B002
"nonlocal-without-binding", # PLE0117
"not-in-loop", # F701, F702
"notimplemented-raised", # F901
"return-in-init", # PLE0101
"return-outside-function", # F706
"syntax-error", # E999
"too-few-format-args", # F524
"too-many-format-args", # F522
"too-many-star-expressions", # F622
"truncated-format-string", # F501
"undefined-all-variable", # F822
"undefined-variable", # F821
"used-prior-global-declaration", # PLE0118
"yield-inside-async-function", # PLE1700
"yield-outside-function", # F704
"anomalous-backslash-in-string", # W605
"assert-on-string-literal", # PLW0129
"assert-on-tuple", # F631
"bad-format-string", # W1302, F
"bad-format-string-key", # W1300, F
"bare-except", # E722
"binary-op-exception", # PLW0711
"cell-var-from-loop", # B023
# "dangerous-default-value", # B006, ruff catches new occurrences, needs more work
"duplicate-except", # B014
"duplicate-key", # F601
"duplicate-string-formatting-argument", # F
"duplicate-value", # F
"eval-used", # PGH001
"exec-used", # S102
# "expression-not-assigned", # B018, ruff catches new occurrences, needs more work
"f-string-without-interpolation", # F541
"forgotten-debug-statement", # T100
"format-string-without-interpolation", # F
# "global-statement", # PLW0603, ruff catches new occurrences, needs more work
"global-variable-not-assigned", # PLW0602
"implicit-str-concat", # ISC001
"import-self", # PLW0406
"inconsistent-quotes", # Q000
"invalid-envvar-default", # PLW1508
"keyword-arg-before-vararg", # B026
"logging-format-interpolation", # G
"logging-fstring-interpolation", # G
"logging-not-lazy", # G
"misplaced-future", # F404
"named-expr-without-context", # PLW0131
"nested-min-max", # PLW3301
# "pointless-statement", # B018, ruff catches new occurrences, needs more work
"raise-missing-from", # TRY200
# "redefined-builtin", # A001, ruff is way more stricter, needs work
"try-except-raise", # TRY302
"unused-argument", # ARG001, we don't use it
"unused-format-string-argument", #F507
"unused-format-string-key", # F504
"unused-import", # F401
"unused-variable", # F841
"useless-else-on-loop", # PLW0120
"wildcard-import", # F403
"bad-classmethod-argument", # N804
"consider-iterating-dictionary", # SIM118
"empty-docstring", # D419
"invalid-name", # N815
"line-too-long", # E501, disabled globally
"missing-class-docstring", # D101
"missing-final-newline", # W292
"missing-function-docstring", # D103
"missing-module-docstring", # D100
"multiple-imports", #E401
"singleton-comparison", # E711, E712
"subprocess-run-check", # PLW1510
"superfluous-parens", # UP034
"ungrouped-imports", # I001
"unidiomatic-typecheck", # E721
"unnecessary-direct-lambda-call", # PLC3002
"unnecessary-lambda-assignment", # PLC3001
"unneeded-not", # SIM208
"useless-import-alias", # PLC0414
"wrong-import-order", # I001
"wrong-import-position", # E402
"comparison-of-constants", # PLR0133
"comparison-with-itself", # PLR0124
# "consider-alternative-union-syntax", # UP007, typing extension
"consider-merging-isinstance", # PLR1701
# "consider-using-alias", # UP006, typing extension
"consider-using-dict-comprehension", # C402
"consider-using-generator", # C417
"consider-using-get", # SIM401
"consider-using-set-comprehension", # C401
"consider-using-sys-exit", # PLR1722
"consider-using-ternary", # SIM108
"literal-comparison", # F632
"property-with-parameters", # PLR0206
"super-with-arguments", # UP008
"too-many-branches", # PLR0912
"too-many-return-statements", # PLR0911
"too-many-statements", # PLR0915
"trailing-comma-tuple", # COM818
"unnecessary-comprehension", # C416
"use-a-generator", # C417
"use-dict-literal", # C406
"use-list-literal", # C405
"useless-object-inheritance", # UP004
"useless-return", # PLR1711
# "no-self-use", # PLR6301 # Optional plugin, not enabled
]
[tool.pylint.REPORTS]
score = false
[tool.pylint.TYPECHECK]
ignored-modules = ["distutils"]
[tool.pylint.FORMAT]
expected-line-ending-format = "LF"
[tool.pylint.EXCEPTIONS]
overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"]
[tool.pylint.DESIGN]
max-positional-arguments = 10
[tool.pytest.ini_options]
testpaths = ["tests"]
norecursedirs = [".git"]
log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s"
log_date_format = "%Y-%m-%d %H:%M:%S"
asyncio_mode = "auto"
filterwarnings = [
"error",
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:dirhash",
"ignore::pytest.PytestUnraisableExceptionWarning",
]
[tool.ruff]
lint.select = [
"B002", # Python does not support the unary prefix increment
"B007", # Loop control variable {name} not used within loop body
"B014", # Exception handler with duplicate exception
"B023", # Function definition does not bind loop variable {name}
"B026", # Star-arg unpacking after a keyword argument is strongly discouraged
"B904", # Use raise from to specify exception cause
"C", # complexity
"COM818", # Trailing comma on bare tuple prohibited
"D", # docstrings
"DTZ003", # Use datetime.now(tz=) instead of datetime.utcnow()
"DTZ004", # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts)
"E", # pycodestyle
"F", # pyflakes/autoflake
"G", # flake8-logging-format
"I", # isort
"ICN001", # import concentions; {name} should be imported as {asname}
"N804", # First argument of a class method should be named cls
"N805", # First argument of a method should be named self
"N815", # Variable {name} in class scope should not be mixedCase
"PGH004", # Use specific rule codes when using noqa
"PLC0414", # Useless import alias. Import alias does not rename original package.
"PLC", # pylint
"PLE", # pylint
"PLR", # pylint
"PLW", # pylint
"Q000", # Double quotes found but single quotes preferred
"RUF006", # Store a reference to the return value of asyncio.create_task
"S102", # Use of exec detected
"S103", # bad-file-permissions
"S108", # hardcoded-temp-file
"S306", # suspicious-mktemp-usage
"S307", # suspicious-eval-usage
"S313", # suspicious-xmlc-element-tree-usage
"S314", # suspicious-xml-element-tree-usage
"S315", # suspicious-xml-expat-reader-usage
"S316", # suspicious-xml-expat-builder-usage
"S317", # suspicious-xml-sax-usage
"S318", # suspicious-xml-mini-dom-usage
"S319", # suspicious-xml-pull-dom-usage
"S320", # suspicious-xmle-tree-usage
"S601", # paramiko-call
"S602", # subprocess-popen-with-shell-equals-true
"S604", # call-with-shell-equals-true
"S608", # hardcoded-sql-expression
"S609", # unix-command-wildcard-injection
"SIM105", # Use contextlib.suppress({exception}) instead of try-except-pass
"SIM117", # Merge with-statements that use the same scope
"SIM118", # Use {key} in {dict} instead of {key} in {dict}.keys()
"SIM201", # Use {left} != {right} instead of not {left} == {right}
"SIM208", # Use {expr} instead of not (not {expr})
"SIM212", # Use {a} if {a} else {b} instead of {b} if not {a} else {a}
"SIM300", # Yoda conditions. Use 'age == 42' instead of '42 == age'.
"SIM401", # Use get from dict with default instead of an if block
"T100", # Trace found: {name} used
"T20", # flake8-print
"TID251", # Banned imports
"TRY004", # Prefer TypeError exception for invalid type
"TRY302", # Remove exception handler; error is immediately re-raised
"UP", # pyupgrade
"W", # pycodestyle
]
lint.ignore = [
"D202", # No blank lines allowed after function docstring
"D203", # 1 blank line required before class docstring
"D213", # Multi-line docstring summary should start at the second line
"D406", # Section name should end with a newline
"D407", # Section name underlining
"E501", # line too long
"E731", # do not assign a lambda expression, use a def
# Ignore ignored, as the rule is now back in preview/nursery, which cannot
# be ignored anymore without warnings.
# https://github.com/astral-sh/ruff/issues/7491
# "PLC1901", # Lots of false positives
# False positives https://github.com/astral-sh/ruff/issues/5386
"PLC0208", # Use a sequence type instead of a `set` when iterating over values
"PLR0911", # Too many return statements ({returns} > {max_returns})
"PLR0912", # Too many branches ({branches} > {max_branches})
"PLR0913", # Too many arguments to function call ({c_args} > {max_args})
"PLR0915", # Too many statements ({statements} > {max_statements})
"PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable
"PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target
"UP006", # keep type annotation style as is
"UP007", # keep type annotation style as is
# Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923
"UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)`
# May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
"W191",
"E111",
"E114",
"E117",
"D206",
"D300",
"Q000",
"Q001",
"Q002",
"Q003",
"COM812",
"COM819",
"ISC001",
"ISC002",
# Disabled because ruff does not understand type of __all__ generated by a function
"PLE0605",
]
[tool.ruff.lint.flake8-import-conventions.extend-aliases]
voluptuous = "vol"
[tool.ruff.lint.flake8-pytest-style]
fixture-parentheses = false
[tool.ruff.lint.flake8-tidy-imports.banned-api]
"pytz".msg = "use zoneinfo instead"
[tool.ruff.lint.isort]
force-sort-within-sections = true
section-order = [
"future",
"standard-library",
"third-party",
"first-party",
"local-folder",
]
forced-separate = ["tests"]
known-first-party = ["supervisor", "tests"]
combine-as-imports = true
split-on-trailing-comma = false
[tool.ruff.lint.per-file-ignores]
# DBus Service Mocks must use typing and names understood by dbus-fast
"tests/dbus_service_mocks/*.py" = ["F722", "F821", "N815"]
[tool.ruff.lint.mccabe]
max-complexity = 25

2
pytest.ini Normal file
View File

@@ -0,0 +1,2 @@
[pytest]
asyncio_mode = auto

View File

@@ -1,29 +1,25 @@
aiodns==3.2.0
aiohttp==3.10.9
aiodns==3.0.0
aiohttp==3.8.1
async_timeout==4.0.2
atomicwrites-homeassistant==1.4.1
attrs==24.2.0
awesomeversion==24.6.0
brotli==1.1.0
ciso8601==2.3.1
colorlog==6.8.2
cpe==1.3.1
cryptography==43.0.1
debugpy==1.8.6
deepmerge==2.0
dirhash==0.5.0
docker==7.1.0
faust-cchardet==2.1.19
gitpython==3.1.43
jinja2==3.1.4
orjson==3.10.7
pulsectl==24.8.0
pyudev==0.24.3
PyYAML==6.0.2
requests==2.32.3
securetar==2024.2.1
sentry-sdk==2.16.0
setuptools==75.1.0
voluptuous==0.15.2
dbus-fast==2.24.3
typing_extensions==4.12.2
zlib-fast==0.2.0
attrs==22.1.0
awesomeversion==22.6.0
brotli==1.0.9
cchardet==2.1.7
ciso8601==2.2.0
colorlog==6.6.0
cpe==1.2.1
cryptography==37.0.4
debugpy==1.6.2
deepmerge==1.0.1
dirhash==0.2.1
docker==5.0.3
gitpython==3.1.27
jinja2==3.1.2
pulsectl==22.3.2
pyudev==0.23.2
ruamel.yaml==0.17.17
securetar==2022.2.0
sentry-sdk==1.9.2
voluptuous==0.13.1
dbus-next==0.2.3

View File

@@ -1,12 +1,15 @@
coverage==7.6.1
pre-commit==4.0.1
pylint==3.3.1
pytest-aiohttp==1.0.5
pytest-asyncio==0.23.6
pytest-cov==5.0.0
pytest-timeout==2.3.1
pytest==8.3.3
ruff==0.6.9
time-machine==2.16.0
typing_extensions==4.12.2
urllib3==2.2.3
black==22.6.0
codecov==2.1.12
coverage==6.4.3
flake8-docstrings==1.6.0
flake8==5.0.4
pre-commit==2.20.0
pydocstyle==6.1.1
pylint==2.14.5
pytest-aiohttp==1.0.4
pytest-asyncio==0.18.3
pytest-cov==3.0.0
pytest-timeout==2.1.0
pytest==7.1.2
pyupgrade==2.37.3
time-machine==2.7.1

View File

@@ -15,7 +15,7 @@ do
if [[ "${supervisor_state}" = "running" ]]; then
# Check API
if bashio::supervisor.ping > /dev/null; then
if bashio::supervisor.ping; then
failed_count=0
else
bashio::log.warning "Maybe found an issue on API healthy"

View File

@@ -0,0 +1,4 @@
-----BEGIN PUBLIC KEY-----
MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE03LvYuz79GTJx4uKp3w6NrSe5JZI
iBtgzzYi0YQYtZO/r+xFpgDJEa0gLHkXtl94fpqrFiN89In83lzaszbZtA==
-----END PUBLIC KEY-----

View File

@@ -0,0 +1,8 @@
{
"currentcontext": {
"LcHost": "cas.codenotary.com",
"LcPort": "443"
},
"schemaversion": 3,
"users": null
}

29
setup.cfg Normal file
View File

@@ -0,0 +1,29 @@
[isort]
multi_line_output = 3
include_trailing_comma=True
force_grid_wrap=0
line_length=88
indent = " "
force_sort_within_sections = true
sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
default_section = THIRDPARTY
forced_separate = tests
combine_as_imports = true
use_parentheses = true
known_first_party = supervisor,tests
[flake8]
exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build
doctests = True
max-line-length = 88
# E501: line too long
# W503: Line break occurred before a binary operator
# E203: Whitespace before ':'
# D202 No blank lines allowed after function docstring
# W504 line break after binary operator
ignore =
E501,
W503,
E203,
D202,
W504

View File

@@ -1,28 +1,60 @@
"""Home Assistant Supervisor setup."""
from pathlib import Path
import re
from setuptools import setup
RE_SUPERVISOR_VERSION = re.compile(r"^SUPERVISOR_VERSION =\s*(.+)$")
SUPERVISOR_DIR = Path(__file__).parent
REQUIREMENTS_FILE = SUPERVISOR_DIR / "requirements.txt"
CONST_FILE = SUPERVISOR_DIR / "supervisor/const.py"
REQUIREMENTS = REQUIREMENTS_FILE.read_text(encoding="utf-8")
CONSTANTS = CONST_FILE.read_text(encoding="utf-8")
def _get_supervisor_version():
for line in CONSTANTS.split("/n"):
if match := RE_SUPERVISOR_VERSION.match(line):
return match.group(1)
return "99.9.9dev"
from supervisor.const import SUPERVISOR_VERSION
setup(
version=_get_supervisor_version(),
dependencies=REQUIREMENTS.split("/n"),
name="Supervisor",
version=SUPERVISOR_VERSION,
license="BSD License",
author="The Home Assistant Authors",
author_email="hello@home-assistant.io",
url="https://home-assistant.io/",
description=("Open-source private cloud os for Home-Assistant" " based on HassOS"),
long_description=(
"A maintainless private cloud operator system that"
"setup a Home-Assistant instance. Based on HassOS"
),
classifiers=[
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Topic :: Home Automation",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Scientific/Engineering :: Atmospheric Science",
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Programming Language :: Python :: 3.8",
],
keywords=["docker", "home-assistant", "api"],
zip_safe=False,
platforms="any",
packages=[
"supervisor.addons",
"supervisor.api",
"supervisor.backups",
"supervisor.dbus.network",
"supervisor.dbus.network.setting",
"supervisor.dbus",
"supervisor.discovery.services",
"supervisor.discovery",
"supervisor.docker",
"supervisor.homeassistant",
"supervisor.host",
"supervisor.jobs",
"supervisor.misc",
"supervisor.plugins",
"supervisor.resolution.checks",
"supervisor.resolution.evaluations",
"supervisor.resolution.fixups",
"supervisor.resolution",
"supervisor.security",
"supervisor.services.modules",
"supervisor.services",
"supervisor.store",
"supervisor.utils",
"supervisor",
],
include_package_data=True,
)

View File

@@ -1,20 +1,11 @@
"""Main file for Supervisor."""
import asyncio
from concurrent.futures import ThreadPoolExecutor
import logging
from pathlib import Path
import sys
import zlib_fast
# Enable fast zlib before importing supervisor
zlib_fast.enable()
from supervisor import bootstrap # pylint: disable=wrong-import-position # noqa: E402
from supervisor.utils.logging import ( # pylint: disable=wrong-import-position # noqa: E402
activate_log_queue_handler,
)
from supervisor import bootstrap
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -37,8 +28,7 @@ if __name__ == "__main__":
bootstrap.initialize_logging()
# Init async event loop
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop = asyncio.get_event_loop()
# Check if all information are available to setup Supervisor
bootstrap.check_environment()
@@ -47,8 +37,6 @@ if __name__ == "__main__":
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
loop.set_default_executor(executor)
activate_log_queue_handler()
_LOGGER.info("Initializing Supervisor setup")
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
loop.set_debug(coresys.config.debug)

View File

@@ -1 +1,439 @@
"""Init file for Supervisor add-ons."""
import asyncio
from contextlib import suppress
import logging
import tarfile
from typing import Optional, Union
from ..const import AddonBoot, AddonStartup, AddonState
from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import (
AddonConfigurationError,
AddonsError,
AddonsJobError,
AddonsNotSupportedError,
CoreDNSError,
DockerAPIError,
DockerError,
DockerNotFound,
HomeAssistantAPIError,
HostAppArmorError,
)
from ..jobs.decorator import Job, JobCondition
from ..resolution.const import ContextType, IssueType, SuggestionType
from ..store.addon import AddonStore
from ..utils import check_exception_chain
from .addon import Addon
from .data import AddonsData
_LOGGER: logging.Logger = logging.getLogger(__name__)
AnyAddon = Union[Addon, AddonStore]
class AddonManager(CoreSysAttributes):
"""Manage add-ons inside Supervisor."""
def __init__(self, coresys: CoreSys):
"""Initialize Docker base wrapper."""
self.coresys: CoreSys = coresys
self.data: AddonsData = AddonsData(coresys)
self.local: dict[str, Addon] = {}
self.store: dict[str, AddonStore] = {}
@property
def all(self) -> list[AnyAddon]:
"""Return a list of all add-ons."""
addons: dict[str, AnyAddon] = {**self.store, **self.local}
return list(addons.values())
@property
def installed(self) -> list[Addon]:
"""Return a list of all installed add-ons."""
return list(self.local.values())
def get(self, addon_slug: str, local_only: bool = False) -> Optional[AnyAddon]:
"""Return an add-on from slug.
Prio:
1 - Local
2 - Store
"""
if addon_slug in self.local:
return self.local[addon_slug]
if not local_only:
return self.store.get(addon_slug)
return None
def from_token(self, token: str) -> Optional[Addon]:
"""Return an add-on from Supervisor token."""
for addon in self.installed:
if token == addon.supervisor_token:
return addon
return None
async def load(self) -> None:
"""Start up add-on management."""
tasks = []
for slug in self.data.system:
addon = self.local[slug] = Addon(self.coresys, slug)
tasks.append(addon.load())
# Run initial tasks
_LOGGER.info("Found %d installed add-ons", len(tasks))
if tasks:
await asyncio.wait(tasks)
# Sync DNS
await self.sync_dns()
async def boot(self, stage: AddonStartup) -> None:
"""Boot add-ons with mode auto."""
tasks: list[Addon] = []
for addon in self.installed:
if addon.boot != AddonBoot.AUTO or addon.startup != stage:
continue
tasks.append(addon)
# Evaluate add-ons which need to be started
_LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks))
if not tasks:
return
# Start Add-ons sequential
# avoid issue on slow IO
for addon in tasks:
try:
await addon.start()
except AddonsError as err:
# Check if there is an system/user issue
if check_exception_chain(
err, (DockerAPIError, DockerNotFound, AddonConfigurationError)
):
addon.boot = AddonBoot.MANUAL
addon.save_persist()
except Exception as err: # pylint: disable=broad-except
self.sys_capture_exception(err)
else:
continue
_LOGGER.warning("Can't start Add-on %s", addon.slug)
await asyncio.sleep(self.sys_config.wait_boot)
async def shutdown(self, stage: AddonStartup) -> None:
"""Shutdown addons."""
tasks: list[Addon] = []
for addon in self.installed:
if addon.state != AddonState.STARTED or addon.startup != stage:
continue
tasks.append(addon)
# Evaluate add-ons which need to be stopped
_LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks))
if not tasks:
return
# Stop Add-ons sequential
# avoid issue on slow IO
for addon in tasks:
try:
await addon.stop()
except Exception as err: # pylint: disable=broad-except
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
self.sys_capture_exception(err)
@Job(
conditions=[
JobCondition.FREE_SPACE,
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AddonsJobError,
)
async def install(self, slug: str) -> None:
"""Install an add-on."""
if slug in self.local:
raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning)
store = self.store.get(slug)
if not store:
raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error)
if not store.available:
raise AddonsNotSupportedError(
f"Add-on {slug} not supported on this platform", _LOGGER.error
)
self.data.install(store)
addon = Addon(self.coresys, slug)
await addon.load()
if not addon.path_data.is_dir():
_LOGGER.info(
"Creating Home Assistant add-on data folder %s", addon.path_data
)
addon.path_data.mkdir()
# Setup/Fix AppArmor profile
await addon.install_apparmor()
try:
await addon.instance.install(store.version, store.image, arch=addon.arch)
except DockerError as err:
self.data.uninstall(addon)
raise AddonsError() from err
else:
self.local[slug] = addon
# Reload ingress tokens
if addon.with_ingress:
await self.sys_ingress.reload()
_LOGGER.info("Add-on '%s' successfully installed", slug)
async def uninstall(self, slug: str) -> None:
"""Remove an add-on."""
if slug not in self.local:
_LOGGER.warning("Add-on %s is not installed", slug)
return
addon = self.local[slug]
try:
await addon.instance.remove()
except DockerError as err:
raise AddonsError() from err
else:
addon.state = AddonState.UNKNOWN
await addon.remove_data()
# Cleanup audio settings
if addon.path_pulse.exists():
with suppress(OSError):
addon.path_pulse.unlink()
# Cleanup AppArmor profile
with suppress(HostAppArmorError):
await addon.uninstall_apparmor()
# Cleanup Ingress panel from sidebar
if addon.ingress_panel:
addon.ingress_panel = False
with suppress(HomeAssistantAPIError):
await self.sys_ingress.update_hass_panel(addon)
# Cleanup Ingress dynamic port assignment
if addon.with_ingress:
self.sys_create_task(self.sys_ingress.reload())
self.sys_ingress.del_dynamic_port(slug)
# Cleanup discovery data
for message in self.sys_discovery.list_messages:
if message.addon != addon.slug:
continue
self.sys_discovery.remove(message)
# Cleanup services data
for service in self.sys_services.list_services:
if addon.slug not in service.active:
continue
service.del_service_data(addon)
self.data.uninstall(addon)
self.local.pop(slug)
_LOGGER.info("Add-on '%s' successfully removed", slug)
@Job(
conditions=[
JobCondition.FREE_SPACE,
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AddonsJobError,
)
async def update(self, slug: str, backup: Optional[bool] = False) -> None:
"""Update add-on."""
if slug not in self.local:
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
addon = self.local[slug]
if addon.is_detached:
raise AddonsError(
f"Add-on {slug} is not available inside store", _LOGGER.error
)
store = self.store[slug]
if addon.version == store.version:
raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning)
# Check if available, Maybe something have changed
if not store.available:
raise AddonsNotSupportedError(
f"Add-on {slug} not supported on that platform", _LOGGER.error
)
if backup:
await self.sys_backups.do_backup_partial(
name=f"addon_{addon.slug}_{addon.version}",
homeassistant=False,
addons=[addon.slug],
)
# Update instance
last_state: AddonState = addon.state
old_image = addon.image
try:
await addon.instance.update(store.version, store.image)
except DockerError as err:
raise AddonsError() from err
_LOGGER.info("Add-on '%s' successfully updated", slug)
self.data.update(store)
# Cleanup
with suppress(DockerError):
await addon.instance.cleanup(old_image=old_image)
# Setup/Fix AppArmor profile
await addon.install_apparmor()
# restore state
if last_state == AddonState.STARTED:
await addon.start()
@Job(
conditions=[
JobCondition.FREE_SPACE,
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AddonsJobError,
)
async def rebuild(self, slug: str) -> None:
"""Perform a rebuild of local build add-on."""
if slug not in self.local:
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
addon = self.local[slug]
if addon.is_detached:
raise AddonsError(
f"Add-on {slug} is not available inside store", _LOGGER.error
)
store = self.store[slug]
# Check if a rebuild is possible now
if addon.version != store.version:
raise AddonsError(
"Version changed, use Update instead Rebuild", _LOGGER.error
)
if not addon.need_build:
raise AddonsNotSupportedError(
"Can't rebuild a image based add-on", _LOGGER.error
)
# remove docker container but not addon config
last_state: AddonState = addon.state
try:
await addon.instance.remove()
await addon.instance.install(addon.version)
except DockerError as err:
raise AddonsError() from err
else:
self.data.update(store)
_LOGGER.info("Add-on '%s' successfully rebuilt", slug)
# restore state
if last_state == AddonState.STARTED:
await addon.start()
@Job(
conditions=[
JobCondition.FREE_SPACE,
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AddonsJobError,
)
async def restore(self, slug: str, tar_file: tarfile.TarFile) -> None:
"""Restore state of an add-on."""
if slug not in self.local:
_LOGGER.debug("Add-on %s is not local available for restore", slug)
addon = Addon(self.coresys, slug)
else:
_LOGGER.debug("Add-on %s is local available for restore", slug)
addon = self.local[slug]
await addon.restore(tar_file)
# Check if new
if slug not in self.local:
_LOGGER.info("Detect new Add-on after restore %s", slug)
self.local[slug] = addon
# Update ingress
if addon.with_ingress:
await self.sys_ingress.reload()
with suppress(HomeAssistantAPIError):
await self.sys_ingress.update_hass_panel(addon)
@Job(conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST])
async def repair(self) -> None:
"""Repair local add-ons."""
needs_repair: list[Addon] = []
# Evaluate Add-ons to repair
for addon in self.installed:
if await addon.instance.exists():
continue
needs_repair.append(addon)
_LOGGER.info("Found %d add-ons to repair", len(needs_repair))
if not needs_repair:
return
for addon in needs_repair:
_LOGGER.info("Repairing for add-on: %s", addon.slug)
with suppress(DockerError, KeyError):
# Need pull a image again
if not addon.need_build:
await addon.instance.install(addon.version, addon.image)
continue
# Need local lookup
if addon.need_build and not addon.is_detached:
store = self.store[addon.slug]
# If this add-on is available for rebuild
if addon.version == store.version:
await addon.instance.install(addon.version, addon.image)
continue
_LOGGER.error("Can't repair %s", addon.slug)
with suppress(AddonsError):
await self.uninstall(addon.slug)
async def sync_dns(self) -> None:
"""Sync add-ons DNS names."""
# Update hosts
for addon in self.installed:
try:
if not await addon.instance.is_running():
continue
except DockerError as err:
_LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err)
self.sys_resolution.create_issue(
IssueType.CORRUPT_DOCKER,
ContextType.ADDON,
reference=addon.slug,
suggestions=[SuggestionType.EXECUTE_REPAIR],
)
self.sys_capture_exception(err)
else:
self.sys_plugins.dns.add_host(
ipv4=addon.ip_address, names=[addon.hostname], write=False
)
# Write hosts files
with suppress(CoreDNSError):
self.sys_plugins.dns.write_hosts()

File diff suppressed because it is too large Load Diff

View File

@@ -1,8 +1,6 @@
"""Supervisor add-on build environment."""
from __future__ import annotations
from functools import cached_property
from pathlib import Path
from typing import TYPE_CHECKING
@@ -17,8 +15,7 @@ from ..const import (
META_ADDON,
)
from ..coresys import CoreSys, CoreSysAttributes
from ..docker.interface import MAP_ARCH
from ..exceptions import ConfigurationFileError, HassioArchNotFound
from ..exceptions import ConfigurationFileError
from ..utils.common import FileConfiguration, find_one_filetype
from .validate import SCHEMA_BUILD_CONFIG
@@ -47,33 +44,15 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
"""Ignore save function."""
raise RuntimeError()
@cached_property
def arch(self) -> str:
"""Return arch of the add-on."""
return self.sys_arch.match(self.addon.arch)
@property
def base_image(self) -> str:
"""Return base image for this add-on."""
if not self._data[ATTR_BUILD_FROM]:
return f"ghcr.io/home-assistant/{self.sys_arch.default}-base:latest"
if isinstance(self._data[ATTR_BUILD_FROM], str):
return self._data[ATTR_BUILD_FROM]
# Evaluate correct base image
if self.arch not in self._data[ATTR_BUILD_FROM]:
raise HassioArchNotFound(
f"Add-on {self.addon.slug} is not supported on {self.arch}"
)
return self._data[ATTR_BUILD_FROM][self.arch]
@property
def dockerfile(self) -> Path:
"""Return Dockerfile path."""
if self.addon.path_location.joinpath(f"Dockerfile.{self.arch}").exists():
return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}")
return self.addon.path_location.joinpath("Dockerfile")
arch = self.sys_arch.match(list(self._data[ATTR_BUILD_FROM].keys()))
return self._data[ATTR_BUILD_FROM][arch]
@property
def squash(self) -> bool:
@@ -93,29 +72,24 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
@property
def is_valid(self) -> bool:
"""Return true if the build env is valid."""
try:
return all(
[
self.addon.path_location.is_dir(),
self.dockerfile.is_file(),
]
)
except HassioArchNotFound:
return False
return all(
[
self.addon.path_location.is_dir(),
Path(self.addon.path_location, "Dockerfile").is_file(),
]
)
def get_docker_args(self, version: AwesomeVersion, image: str | None = None):
def get_docker_args(self, version: AwesomeVersion):
"""Create a dict with Docker build arguments."""
args = {
"path": str(self.addon.path_location),
"tag": f"{image or self.addon.image}:{version!s}",
"dockerfile": str(self.dockerfile),
"tag": f"{self.addon.image}:{version!s}",
"pull": True,
"forcerm": not self.sys_dev,
"squash": self.squash,
"platform": MAP_ARCH[self.arch],
"labels": {
"io.hass.version": version,
"io.hass.arch": self.arch,
"io.hass.arch": self.sys_arch.default,
"io.hass.type": META_ADDON,
"io.hass.name": self._fix_label("name"),
"io.hass.description": self._fix_label("description"),

View File

@@ -1,11 +0,0 @@
"""Confgiuration Objects for Addon Config."""
from dataclasses import dataclass
@dataclass(slots=True)
class FolderMapping:
"""Represent folder mapping configuration."""
path: str | None
read_only: bool

View File

@@ -1,49 +1,14 @@
"""Add-on static data."""
from datetime import timedelta
from enum import StrEnum
from ..jobs.const import JobCondition
from enum import Enum
class AddonBackupMode(StrEnum):
class AddonBackupMode(str, Enum):
"""Backup mode of an Add-on."""
HOT = "hot"
COLD = "cold"
class MappingType(StrEnum):
"""Mapping type of an Add-on Folder."""
DATA = "data"
CONFIG = "config"
SSL = "ssl"
ADDONS = "addons"
BACKUP = "backup"
SHARE = "share"
MEDIA = "media"
HOMEASSISTANT_CONFIG = "homeassistant_config"
ALL_ADDON_CONFIGS = "all_addon_configs"
ADDON_CONFIG = "addon_config"
ATTR_BACKUP = "backup"
ATTR_BREAKING_VERSIONS = "breaking_versions"
ATTR_CODENOTARY = "codenotary"
ATTR_READ_ONLY = "read_only"
ATTR_PATH = "path"
WATCHDOG_RETRY_SECONDS = 10
WATCHDOG_MAX_ATTEMPTS = 5
WATCHDOG_THROTTLE_PERIOD = timedelta(minutes=30)
WATCHDOG_THROTTLE_MAX_CALLS = 10
ADDON_UPDATE_CONDITIONS = [
JobCondition.FREE_SPACE,
JobCondition.HEALTHY,
JobCondition.INTERNET_HOST,
JobCondition.PLUGINS_UPDATED,
JobCondition.SUPERVISOR_UPDATED,
]
RE_SLUG = r"[-_.A-Za-z0-9]+"

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor add-on data."""
from copy import deepcopy
from typing import Any

View File

@@ -1,388 +0,0 @@
"""Supervisor add-on manager."""
import asyncio
from collections.abc import Awaitable
from contextlib import suppress
import logging
import tarfile
from typing import Union
from ..const import AddonBoot, AddonStartup, AddonState
from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import (
AddonConfigurationError,
AddonsError,
AddonsJobError,
AddonsNotSupportedError,
CoreDNSError,
DockerAPIError,
DockerError,
DockerNotFound,
HassioError,
HomeAssistantAPIError,
)
from ..jobs.decorator import Job, JobCondition
from ..resolution.const import ContextType, IssueType, SuggestionType
from ..store.addon import AddonStore
from ..utils import check_exception_chain
from ..utils.sentry import capture_exception
from .addon import Addon
from .const import ADDON_UPDATE_CONDITIONS
from .data import AddonsData
_LOGGER: logging.Logger = logging.getLogger(__name__)
AnyAddon = Union[Addon, AddonStore]
class AddonManager(CoreSysAttributes):
"""Manage add-ons inside Supervisor."""
def __init__(self, coresys: CoreSys):
"""Initialize Docker base wrapper."""
self.coresys: CoreSys = coresys
self.data: AddonsData = AddonsData(coresys)
self.local: dict[str, Addon] = {}
self.store: dict[str, AddonStore] = {}
@property
def all(self) -> list[AnyAddon]:
"""Return a list of all add-ons."""
addons: dict[str, AnyAddon] = {**self.store, **self.local}
return list(addons.values())
@property
def installed(self) -> list[Addon]:
"""Return a list of all installed add-ons."""
return list(self.local.values())
def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None:
"""Return an add-on from slug.
Prio:
1 - Local
2 - Store
"""
if addon_slug in self.local:
return self.local[addon_slug]
if not local_only:
return self.store.get(addon_slug)
return None
def from_token(self, token: str) -> Addon | None:
"""Return an add-on from Supervisor token."""
for addon in self.installed:
if token == addon.supervisor_token:
return addon
return None
async def load(self) -> None:
"""Start up add-on management."""
# Refresh cache for all store addons
tasks: list[Awaitable[None]] = [
store.refresh_path_cache() for store in self.store.values()
]
# Load all installed addons
for slug in self.data.system:
addon = self.local[slug] = Addon(self.coresys, slug)
tasks.append(addon.load())
# Run initial tasks
_LOGGER.info("Found %d installed add-ons", len(self.data.system))
if tasks:
await asyncio.gather(*tasks)
# Sync DNS
await self.sync_dns()
async def boot(self, stage: AddonStartup) -> None:
"""Boot add-ons with mode auto."""
tasks: list[Addon] = []
for addon in self.installed:
if addon.boot != AddonBoot.AUTO or addon.startup != stage:
continue
tasks.append(addon)
# Evaluate add-ons which need to be started
_LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks))
if not tasks:
return
# Start Add-ons sequential
# avoid issue on slow IO
# Config.wait_boot is deprecated. Until addons update with healthchecks,
# add a sleep task for it to keep the same minimum amount of wait time
wait_boot: list[Awaitable[None]] = [asyncio.sleep(self.sys_config.wait_boot)]
for addon in tasks:
try:
if start_task := await addon.start():
wait_boot.append(start_task)
except AddonsError as err:
# Check if there is an system/user issue
if check_exception_chain(
err, (DockerAPIError, DockerNotFound, AddonConfigurationError)
):
addon.boot = AddonBoot.MANUAL
addon.save_persist()
except HassioError:
pass # These are already handled
else:
continue
_LOGGER.warning("Can't start Add-on %s", addon.slug)
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
await asyncio.gather(*wait_boot, return_exceptions=True)
async def shutdown(self, stage: AddonStartup) -> None:
"""Shutdown addons."""
tasks: list[Addon] = []
for addon in self.installed:
if addon.state != AddonState.STARTED or addon.startup != stage:
continue
tasks.append(addon)
# Evaluate add-ons which need to be stopped
_LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks))
if not tasks:
return
# Stop Add-ons sequential
# avoid issue on slow IO
for addon in tasks:
try:
await addon.stop()
except Exception as err: # pylint: disable=broad-except
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
capture_exception(err)
@Job(
name="addon_manager_install",
conditions=ADDON_UPDATE_CONDITIONS,
on_condition=AddonsJobError,
)
async def install(self, slug: str) -> None:
"""Install an add-on."""
self.sys_jobs.current.reference = slug
if slug in self.local:
raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning)
store = self.store.get(slug)
if not store:
raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error)
store.validate_availability()
await Addon(self.coresys, slug).install()
_LOGGER.info("Add-on '%s' successfully installed", slug)
async def uninstall(self, slug: str, *, remove_config: bool = False) -> None:
"""Remove an add-on."""
if slug not in self.local:
_LOGGER.warning("Add-on %s is not installed", slug)
return
shared_image = any(
self.local[slug].image == addon.image
and self.local[slug].version == addon.version
for addon in self.installed
if addon.slug != slug
)
await self.local[slug].uninstall(
remove_config=remove_config, remove_image=not shared_image
)
_LOGGER.info("Add-on '%s' successfully removed", slug)
@Job(
name="addon_manager_update",
conditions=ADDON_UPDATE_CONDITIONS,
on_condition=AddonsJobError,
)
async def update(
self, slug: str, backup: bool | None = False
) -> asyncio.Task | None:
"""Update add-on.
Returns a Task that completes when addon has state 'started' (see addon.start)
if addon is started after update. Else nothing is returned.
"""
self.sys_jobs.current.reference = slug
if slug not in self.local:
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
addon = self.local[slug]
if addon.is_detached:
raise AddonsError(
f"Add-on {slug} is not available inside store", _LOGGER.error
)
store = self.store[slug]
if addon.version == store.version:
raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning)
# Check if available, Maybe something have changed
store.validate_availability()
if backup:
await self.sys_backups.do_backup_partial(
name=f"addon_{addon.slug}_{addon.version}",
homeassistant=False,
addons=[addon.slug],
)
return await addon.update()
@Job(
name="addon_manager_rebuild",
conditions=[
JobCondition.FREE_SPACE,
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AddonsJobError,
)
async def rebuild(self, slug: str) -> asyncio.Task | None:
"""Perform a rebuild of local build add-on.
Returns a Task that completes when addon has state 'started' (see addon.start)
if addon is started after rebuild. Else nothing is returned.
"""
self.sys_jobs.current.reference = slug
if slug not in self.local:
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
addon = self.local[slug]
if addon.is_detached:
raise AddonsError(
f"Add-on {slug} is not available inside store", _LOGGER.error
)
store = self.store[slug]
# Check if a rebuild is possible now
if addon.version != store.version:
raise AddonsError(
"Version changed, use Update instead Rebuild", _LOGGER.error
)
if not addon.need_build:
raise AddonsNotSupportedError(
"Can't rebuild a image based add-on", _LOGGER.error
)
return await addon.rebuild()
@Job(
name="addon_manager_restore",
conditions=[
JobCondition.FREE_SPACE,
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AddonsJobError,
)
async def restore(
self, slug: str, tar_file: tarfile.TarFile
) -> asyncio.Task | None:
"""Restore state of an add-on.
Returns a Task that completes when addon has state 'started' (see addon.start)
if addon is started after restore. Else nothing is returned.
"""
self.sys_jobs.current.reference = slug
if slug not in self.local:
_LOGGER.debug("Add-on %s is not local available for restore", slug)
addon = Addon(self.coresys, slug)
had_ingress = False
else:
_LOGGER.debug("Add-on %s is local available for restore", slug)
addon = self.local[slug]
had_ingress = addon.ingress_panel
wait_for_start = await addon.restore(tar_file)
# Check if new
if slug not in self.local:
_LOGGER.info("Detect new Add-on after restore %s", slug)
self.local[slug] = addon
# Update ingress
if had_ingress != addon.ingress_panel:
await self.sys_ingress.reload()
with suppress(HomeAssistantAPIError):
await self.sys_ingress.update_hass_panel(addon)
return wait_for_start
@Job(
name="addon_manager_repair",
conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST],
)
async def repair(self) -> None:
"""Repair local add-ons."""
needs_repair: list[Addon] = []
# Evaluate Add-ons to repair
for addon in self.installed:
if await addon.instance.exists():
continue
needs_repair.append(addon)
_LOGGER.info("Found %d add-ons to repair", len(needs_repair))
if not needs_repair:
return
for addon in needs_repair:
_LOGGER.info("Repairing for add-on: %s", addon.slug)
with suppress(DockerError, KeyError):
# Need pull a image again
if not addon.need_build:
await addon.instance.install(addon.version, addon.image)
continue
# Need local lookup
if addon.need_build and not addon.is_detached:
store = self.store[addon.slug]
# If this add-on is available for rebuild
if addon.version == store.version:
await addon.instance.install(addon.version, addon.image)
continue
_LOGGER.error("Can't repair %s", addon.slug)
with suppress(AddonsError):
await self.uninstall(addon.slug)
async def sync_dns(self) -> None:
"""Sync add-ons DNS names."""
# Update hosts
add_host_coros: list[Awaitable[None]] = []
for addon in self.installed:
try:
if not await addon.instance.is_running():
continue
except DockerError as err:
_LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err)
self.sys_resolution.create_issue(
IssueType.CORRUPT_DOCKER,
ContextType.ADDON,
reference=addon.slug,
suggestions=[SuggestionType.EXECUTE_REPAIR],
)
capture_exception(err)
else:
add_host_coros.append(
self.sys_plugins.dns.add_host(
ipv4=addon.ip_address, names=[addon.hostname], write=False
)
)
await asyncio.gather(*add_host_coros)
# Write hosts files
with suppress(CoreDNSError):
await self.sys_plugins.dns.write_hosts()

View File

@@ -1,17 +1,11 @@
"""Init file for Supervisor add-ons."""
from abc import ABC, abstractmethod
from collections import defaultdict
from collections.abc import Awaitable, Callable
from contextlib import suppress
from datetime import datetime
import logging
from pathlib import Path
from typing import Any
from typing import Any, Awaitable, Optional
from awesomeversion import AwesomeVersion, AwesomeVersionException
from supervisor.utils.dt import utc_from_timestamp
from supervisor.addons.const import AddonBackupMode
from ..const import (
ATTR_ADVANCED,
@@ -39,7 +33,6 @@ from ..const import (
ATTR_HOST_IPC,
ATTR_HOST_NETWORK,
ATTR_HOST_PID,
ATTR_HOST_UTS,
ATTR_IMAGE,
ATTR_INGRESS,
ATTR_INGRESS_STREAM,
@@ -69,13 +62,11 @@ from ..const import (
ATTR_TIMEOUT,
ATTR_TMPFS,
ATTR_TRANSLATIONS,
ATTR_TYPE,
ATTR_UART,
ATTR_UDEV,
ATTR_URL,
ATTR_USB,
ATTR_VERSION,
ATTR_VERSION_TIMESTAMP,
ATTR_VIDEO,
ATTR_WATCHDOG,
ATTR_WEBUI,
@@ -83,47 +74,25 @@ from ..const import (
SECURITY_DISABLE,
SECURITY_PROFILE,
AddonBoot,
AddonBootConfig,
AddonStage,
AddonStartup,
)
from ..coresys import CoreSys
from ..coresys import CoreSys, CoreSysAttributes
from ..docker.const import Capabilities
from ..exceptions import AddonsNotSupportedError
from ..jobs.const import JOB_GROUP_ADDON
from ..jobs.job_group import JobGroup
from ..utils import version_is_new_enough
from .configuration import FolderMapping
from .const import (
ATTR_BACKUP,
ATTR_BREAKING_VERSIONS,
ATTR_CODENOTARY,
ATTR_PATH,
ATTR_READ_ONLY,
AddonBackupMode,
MappingType,
)
from .const import ATTR_BACKUP, ATTR_CODENOTARY
from .options import AddonOptions, UiOptions
from .validate import RE_SERVICE
_LOGGER: logging.Logger = logging.getLogger(__name__)
from .validate import RE_SERVICE, RE_VOLUME
Data = dict[str, Any]
class AddonModel(JobGroup, ABC):
class AddonModel(CoreSysAttributes, ABC):
"""Add-on Data layout."""
def __init__(self, coresys: CoreSys, slug: str):
"""Initialize data holder."""
super().__init__(
coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug
)
self.coresys: CoreSys = coresys
self.slug: str = slug
self._path_icon_exists: bool = False
self._path_logo_exists: bool = False
self._path_changelog_exists: bool = False
self._path_documentation_exists: bool = False
@property
@abstractmethod
@@ -151,17 +120,12 @@ class AddonModel(JobGroup, ABC):
return self.data[ATTR_OPTIONS]
@property
def boot_config(self) -> AddonBootConfig:
"""Return boot config."""
def boot(self) -> AddonBoot:
"""Return boot config with prio local settings."""
return self.data[ATTR_BOOT]
@property
def boot(self) -> AddonBoot:
"""Return boot config with prio local settings unless config is forced."""
return AddonBoot(self.data[ATTR_BOOT])
@property
def auto_update(self) -> bool | None:
def auto_update(self) -> Optional[bool]:
"""Return if auto update is enable."""
return None
@@ -186,22 +150,22 @@ class AddonModel(JobGroup, ABC):
return self.data[ATTR_TIMEOUT]
@property
def uuid(self) -> str | None:
def uuid(self) -> Optional[str]:
"""Return an API token for this add-on."""
return None
@property
def supervisor_token(self) -> str | None:
def supervisor_token(self) -> Optional[str]:
"""Return access token for Supervisor API."""
return None
@property
def ingress_token(self) -> str | None:
def ingress_token(self) -> Optional[str]:
"""Return access token for Supervisor API."""
return None
@property
def ingress_entry(self) -> str | None:
def ingress_entry(self) -> Optional[str]:
"""Return ingress external URL."""
return None
@@ -211,7 +175,7 @@ class AddonModel(JobGroup, ABC):
return self.data[ATTR_DESCRIPTON]
@property
def long_description(self) -> str | None:
def long_description(self) -> Optional[str]:
"""Return README.md as long_description."""
readme = Path(self.path_location, "README.md")
@@ -237,11 +201,6 @@ class AddonModel(JobGroup, ABC):
"""Return latest version of add-on."""
return self.data[ATTR_VERSION]
@property
def latest_version_timestamp(self) -> datetime:
"""Return when latest version was first seen."""
return utc_from_timestamp(self.data[ATTR_VERSION_TIMESTAMP])
@property
def version(self) -> AwesomeVersion:
"""Return version of add-on."""
@@ -286,32 +245,32 @@ class AddonModel(JobGroup, ABC):
return self.data.get(ATTR_DISCOVERY, [])
@property
def ports_description(self) -> dict[str, str] | None:
def ports_description(self) -> Optional[dict[str, str]]:
"""Return descriptions of ports."""
return self.data.get(ATTR_PORTS_DESCRIPTION)
@property
def ports(self) -> dict[str, int | None] | None:
def ports(self) -> Optional[dict[str, Optional[int]]]:
"""Return ports of add-on."""
return self.data.get(ATTR_PORTS)
@property
def ingress_url(self) -> str | None:
def ingress_url(self) -> Optional[str]:
"""Return URL to ingress url."""
return None
@property
def webui(self) -> str | None:
def webui(self) -> Optional[str]:
"""Return URL to webui or None."""
return self.data.get(ATTR_WEBUI)
@property
def watchdog(self) -> str | None:
def watchdog(self) -> Optional[str]:
"""Return URL to for watchdog or None."""
return self.data.get(ATTR_WATCHDOG)
@property
def ingress_port(self) -> int | None:
def ingress_port(self) -> Optional[int]:
"""Return Ingress port."""
return None
@@ -345,11 +304,6 @@ class AddonModel(JobGroup, ABC):
"""Return True if add-on run on host IPC namespace."""
return self.data[ATTR_HOST_IPC]
@property
def host_uts(self) -> bool:
"""Return True if add-on run on host UTS namespace."""
return self.data[ATTR_HOST_UTS]
@property
def host_dbus(self) -> bool:
"""Return True if add-on run on host D-BUS."""
@@ -361,7 +315,7 @@ class AddonModel(JobGroup, ABC):
return [Path(node) for node in self.data.get(ATTR_DEVICES, [])]
@property
def environment(self) -> dict[str, str] | None:
def environment(self) -> Optional[dict[str, str]]:
"""Return environment of add-on."""
return self.data.get(ATTR_ENVIRONMENT)
@@ -410,12 +364,12 @@ class AddonModel(JobGroup, ABC):
return self.data.get(ATTR_BACKUP_EXCLUDE, [])
@property
def backup_pre(self) -> str | None:
def backup_pre(self) -> Optional[str]:
"""Return pre-backup command."""
return self.data.get(ATTR_BACKUP_PRE)
@property
def backup_post(self) -> str | None:
def backup_post(self) -> Optional[str]:
"""Return post-backup command."""
return self.data.get(ATTR_BACKUP_POST)
@@ -440,7 +394,7 @@ class AddonModel(JobGroup, ABC):
return self.data[ATTR_INGRESS]
@property
def ingress_panel(self) -> bool | None:
def ingress_panel(self) -> Optional[bool]:
"""Return True if the add-on access support ingress."""
return None
@@ -490,7 +444,7 @@ class AddonModel(JobGroup, ABC):
return self.data[ATTR_DEVICETREE]
@property
def with_tmpfs(self) -> str | None:
def with_tmpfs(self) -> Optional[str]:
"""Return if tmp is in memory of add-on."""
return self.data[ATTR_TMPFS]
@@ -510,34 +464,34 @@ class AddonModel(JobGroup, ABC):
return self.data[ATTR_VIDEO]
@property
def homeassistant_version(self) -> str | None:
def homeassistant_version(self) -> Optional[str]:
"""Return min Home Assistant version they needed by Add-on."""
return self.data.get(ATTR_HOMEASSISTANT)
@property
def url(self) -> str | None:
def url(self) -> Optional[str]:
"""Return URL of add-on."""
return self.data.get(ATTR_URL)
@property
def with_icon(self) -> bool:
"""Return True if an icon exists."""
return self._path_icon_exists
return self.path_icon.exists()
@property
def with_logo(self) -> bool:
"""Return True if a logo exists."""
return self._path_logo_exists
return self.path_logo.exists()
@property
def with_changelog(self) -> bool:
"""Return True if a changelog exists."""
return self._path_changelog_exists
return self.path_changelog.exists()
@property
def with_documentation(self) -> bool:
"""Return True if a documentation exists."""
return self._path_documentation_exists
return self.path_documentation.exists()
@property
def supported_arch(self) -> list[str]:
@@ -558,7 +512,7 @@ class AddonModel(JobGroup, ABC):
return self.sys_arch.default
@property
def image(self) -> str | None:
def image(self) -> Optional[str]:
"""Generate image name from data."""
return self._image(self.data)
@@ -568,13 +522,14 @@ class AddonModel(JobGroup, ABC):
return ATTR_IMAGE not in self.data
@property
def map_volumes(self) -> dict[MappingType, FolderMapping]:
"""Return a dict of {MappingType: FolderMapping} from add-on."""
def map_volumes(self) -> dict[str, str]:
"""Return a dict of {volume: policy} from add-on."""
volumes = {}
for volume in self.data[ATTR_MAP]:
volumes[MappingType(volume[ATTR_TYPE])] = FolderMapping(
volume.get(ATTR_PATH), volume[ATTR_READ_ONLY]
)
result = RE_VOLUME.match(volume)
if not result:
continue
volumes[result.group(1)] = result.group(2) or "ro"
return volumes
@@ -618,7 +573,7 @@ class AddonModel(JobGroup, ABC):
return AddonOptions(self.coresys, raw_schema, self.name, self.slug)
@property
def schema_ui(self) -> list[dict[any, any]] | None:
def schema_ui(self) -> Optional[list[dict[any, any]]]:
"""Create a UI schema for add-on options."""
raw_schema = self.data[ATTR_SCHEMA]
@@ -637,76 +592,35 @@ class AddonModel(JobGroup, ABC):
return ATTR_CODENOTARY in self.data
@property
def codenotary(self) -> str | None:
def codenotary(self) -> Optional[str]:
"""Return Signer email address for CAS."""
return self.data.get(ATTR_CODENOTARY)
@property
def breaking_versions(self) -> list[AwesomeVersion]:
"""Return breaking versions of addon."""
return self.data[ATTR_BREAKING_VERSIONS]
def refresh_path_cache(self) -> Awaitable[None]:
"""Refresh cache of existing paths."""
def check_paths():
self._path_icon_exists = self.path_icon.exists()
self._path_logo_exists = self.path_logo.exists()
self._path_changelog_exists = self.path_changelog.exists()
self._path_documentation_exists = self.path_documentation.exists()
return self.sys_run_in_executor(check_paths)
def validate_availability(self) -> None:
"""Validate if addon is available for current system."""
return self._validate_availability(self.data, logger=_LOGGER.error)
def __eq__(self, other):
"""Compaired add-on objects."""
if not isinstance(other, AddonModel):
return False
return self.slug == other.slug
def _validate_availability(
self, config, *, logger: Callable[..., None] | None = None
) -> None:
"""Validate if addon is available for current system."""
def _available(self, config) -> bool:
"""Return True if this add-on is available on this platform."""
# Architecture
if not self.sys_arch.is_supported(config[ATTR_ARCH]):
raise AddonsNotSupportedError(
f"Add-on {self.slug} not supported on this platform, supported architectures: {', '.join(config[ATTR_ARCH])}",
logger,
)
return False
# Machine / Hardware
machine = config.get(ATTR_MACHINE)
if machine and (
f"!{self.sys_machine}" in machine or self.sys_machine not in machine
):
raise AddonsNotSupportedError(
f"Add-on {self.slug} not supported on this machine, supported machine types: {', '.join(machine)}",
logger,
)
# Home Assistant
version: AwesomeVersion | None = config.get(ATTR_HOMEASSISTANT)
with suppress(AwesomeVersionException, TypeError):
if version and not version_is_new_enough(
self.sys_homeassistant.version, version
):
raise AddonsNotSupportedError(
f"Add-on {self.slug} not supported on this system, requires Home Assistant version {version} or greater",
logger,
)
def _available(self, config) -> bool:
"""Return True if this add-on is available on this platform."""
try:
self._validate_availability(config)
except AddonsNotSupportedError:
if machine and f"!{self.sys_machine}" in machine:
return False
elif machine and self.sys_machine not in machine:
return False
return True
# Home Assistant
version: Optional[AwesomeVersion] = config.get(ATTR_HOMEASSISTANT)
try:
return self.sys_homeassistant.version >= version
except (AwesomeVersionException, TypeError):
return True
def _image(self, config) -> str:
"""Generate image name from data."""
@@ -717,3 +631,19 @@ class AddonModel(JobGroup, ABC):
# local build
return f"{config[ATTR_REPOSITORY]}/{self.sys_arch.default}-addon-{config[ATTR_SLUG]}"
def install(self) -> Awaitable[None]:
"""Install this add-on."""
return self.sys_addons.install(self.slug)
def uninstall(self) -> Awaitable[None]:
"""Uninstall this add-on."""
return self.sys_addons.uninstall(self.slug)
def update(self, backup: Optional[bool] = False) -> Awaitable[None]:
"""Update this add-on."""
return self.sys_addons.update(self.slug, backup=backup)
def rebuild(self) -> Awaitable[None]:
"""Rebuild this add-on."""
return self.sys_addons.rebuild(self.slug)

View File

@@ -1,10 +1,9 @@
"""Add-on Options / UI rendering."""
import hashlib
import logging
from pathlib import Path
import re
from typing import Any
from typing import Any, Union
import voluptuous as vol
@@ -294,7 +293,7 @@ class UiOptions(CoreSysAttributes):
multiple: bool = False,
) -> None:
"""Validate a single element."""
ui_node: dict[str, str | bool | float | list[str]] = {"name": key}
ui_node: dict[str, Union[str, bool, float, list[str]]] = {"name": key}
# If multiple
if multiple:

View File

@@ -1,5 +1,4 @@
"""Util add-ons functions."""
from __future__ import annotations
import asyncio
@@ -45,15 +44,12 @@ def rating_security(addon: AddonModel) -> int:
any(
privilege in addon.privileged
for privilege in (
Capabilities.BPF,
Capabilities.DAC_READ_SEARCH,
Capabilities.NET_ADMIN,
Capabilities.NET_RAW,
Capabilities.PERFMON,
Capabilities.SYS_ADMIN,
Capabilities.SYS_MODULE,
Capabilities.SYS_PTRACE,
Capabilities.SYS_RAWIO,
Capabilities.SYS_PTRACE,
Capabilities.SYS_MODULE,
Capabilities.DAC_READ_SEARCH,
)
)
or addon.with_kernel_modules
@@ -74,10 +70,6 @@ def rating_security(addon: AddonModel) -> int:
if addon.host_pid:
rating += -2
# UTS host namespace allows to set hostname only with SYS_ADMIN
if addon.host_uts and Capabilities.SYS_ADMIN in addon.privileged:
rating += -1
# Docker Access & full Access
if addon.access_docker_api or addon.with_full_access:
rating = 1

View File

@@ -1,5 +1,4 @@
"""Validate add-ons options schema."""
import logging
import re
import secrets
@@ -8,6 +7,8 @@ import uuid
import voluptuous as vol
from supervisor.addons.const import AddonBackupMode
from ..const import (
ARCH_ALL,
ATTR_ACCESS_TOKEN,
@@ -42,7 +43,6 @@ from ..const import (
ATTR_HOST_IPC,
ATTR_HOST_NETWORK,
ATTR_HOST_PID,
ATTR_HOST_UTS,
ATTR_IMAGE,
ATTR_INGRESS,
ATTR_INGRESS_ENTRY,
@@ -79,12 +79,9 @@ from ..const import (
ATTR_STATE,
ATTR_STDIN,
ATTR_SYSTEM,
ATTR_SYSTEM_MANAGED,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
ATTR_TIMEOUT,
ATTR_TMPFS,
ATTR_TRANSLATIONS,
ATTR_TYPE,
ATTR_UART,
ATTR_UDEV,
ATTR_URL,
@@ -98,11 +95,11 @@ from ..const import (
ROLE_ALL,
ROLE_DEFAULT,
AddonBoot,
AddonBootConfig,
AddonStage,
AddonStartup,
AddonState,
)
from ..discovery.validate import valid_discovery_service
from ..docker.const import Capabilities
from ..validate import (
docker_image,
@@ -113,23 +110,12 @@ from ..validate import (
uuid_match,
version_tag,
)
from .const import (
ATTR_BACKUP,
ATTR_BREAKING_VERSIONS,
ATTR_CODENOTARY,
ATTR_PATH,
ATTR_READ_ONLY,
RE_SLUG,
AddonBackupMode,
MappingType,
)
from .const import ATTR_BACKUP, ATTR_CODENOTARY
from .options import RE_SCHEMA_ELEMENT
_LOGGER: logging.Logger = logging.getLogger(__name__)
RE_VOLUME = re.compile(
r"^(data|config|ssl|addons|backup|share|media|homeassistant_config|all_addon_configs|addon_config)(?::(rw|ro))?$"
)
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share|media)(?::(rw|ro))?$")
RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$")
@@ -145,7 +131,6 @@ RE_MACHINE = re.compile(
r"|generic-x86-64"
r"|odroid-c2"
r"|odroid-c4"
r"|odroid-m1"
r"|odroid-n2"
r"|odroid-xu"
r"|qemuarm-64"
@@ -158,15 +143,10 @@ RE_MACHINE = re.compile(
r"|raspberrypi3"
r"|raspberrypi4-64"
r"|raspberrypi4"
r"|raspberrypi5-64"
r"|yellow"
r"|green"
r"|tinker"
r")$"
)
RE_SLUG_FIELD = re.compile(r"^" + RE_SLUG + r"$")
def _warn_addon_config(config: dict[str, Any]):
"""Warn about miss configs."""
@@ -214,9 +194,9 @@ def _migrate_addon_config(protocol=False):
name,
)
if value == "before":
config[ATTR_STARTUP] = AddonStartup.SERVICES
config[ATTR_STARTUP] = AddonStartup.SERVICES.value
elif value == "after":
config[ATTR_STARTUP] = AddonStartup.APPLICATION
config[ATTR_STARTUP] = AddonStartup.APPLICATION.value
# UART 2021-01-20
if "auto_uart" in config:
@@ -262,48 +242,6 @@ def _migrate_addon_config(protocol=False):
name,
)
# 2023-11 "map" entries can also be dict to allow path configuration
volumes = []
for entry in config.get(ATTR_MAP, []):
if isinstance(entry, dict):
volumes.append(entry)
if isinstance(entry, str):
result = RE_VOLUME.match(entry)
if not result:
continue
volumes.append(
{
ATTR_TYPE: result.group(1),
ATTR_READ_ONLY: result.group(2) != "rw",
}
)
if volumes:
config[ATTR_MAP] = volumes
# 2023-10 "config" became "homeassistant" so /config can be used for addon's public config
if any(volume[ATTR_TYPE] == MappingType.CONFIG for volume in volumes):
if any(
volume
and volume[ATTR_TYPE]
in {MappingType.ADDON_CONFIG, MappingType.HOMEASSISTANT_CONFIG}
for volume in volumes
):
_LOGGER.warning(
"Add-on config using incompatible map options, '%s' and '%s' are ignored if '%s' is included. Please report this to the maintainer of %s",
MappingType.ADDON_CONFIG,
MappingType.HOMEASSISTANT_CONFIG,
MappingType.CONFIG,
name,
)
else:
_LOGGER.debug(
"Add-on config using deprecated map option '%s' instead of '%s'. Please report this to the maintainer of %s",
MappingType.CONFIG,
MappingType.HOMEASSISTANT_CONFIG,
name,
)
return config
return _migrate
@@ -314,7 +252,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
{
vol.Required(ATTR_NAME): str,
vol.Required(ATTR_VERSION): version_tag,
vol.Required(ATTR_SLUG): vol.Match(RE_SLUG_FIELD),
vol.Required(ATTR_SLUG): str,
vol.Required(ATTR_DESCRIPTON): str,
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
vol.Optional(ATTR_MACHINE): vol.All([vol.Match(RE_MACHINE)], vol.Unique()),
@@ -322,9 +260,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Optional(ATTR_STARTUP, default=AddonStartup.APPLICATION): vol.Coerce(
AddonStartup
),
vol.Optional(ATTR_BOOT, default=AddonBootConfig.AUTO): vol.Coerce(
AddonBootConfig
),
vol.Optional(ATTR_BOOT, default=AddonBoot.AUTO): vol.Coerce(AddonBoot),
vol.Optional(ATTR_INIT, default=True): vol.Boolean(),
vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(),
vol.Optional(ATTR_STAGE, default=AddonStage.STABLE): vol.Coerce(AddonStage),
@@ -349,20 +285,11 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
vol.Optional(ATTR_HOST_UTS, default=False): vol.Boolean(),
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
vol.Optional(ATTR_DEVICES): [str],
vol.Optional(ATTR_UDEV, default=False): vol.Boolean(),
vol.Optional(ATTR_TMPFS, default=False): vol.Boolean(),
vol.Optional(ATTR_MAP, default=list): [
vol.Schema(
{
vol.Required(ATTR_TYPE): vol.Coerce(MappingType),
vol.Optional(ATTR_READ_ONLY, default=True): bool,
vol.Optional(ATTR_PATH): str,
}
)
],
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): str},
vol.Optional(ATTR_PRIVILEGED): [vol.Coerce(Capabilities)],
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
@@ -383,7 +310,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
vol.Optional(ATTR_DISCOVERY): [str],
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
vol.Optional(ATTR_BACKUP_EXCLUDE): [str],
vol.Optional(ATTR_BACKUP_PRE): str,
vol.Optional(ATTR_BACKUP_POST): str,
@@ -414,7 +341,6 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Coerce(int), vol.Range(min=10, max=300)
),
vol.Optional(ATTR_JOURNALD, default=False): vol.Boolean(),
vol.Optional(ATTR_BREAKING_VERSIONS, default=list): [version_tag],
},
extra=vol.REMOVE_EXTRA,
)
@@ -427,9 +353,8 @@ SCHEMA_ADDON_CONFIG = vol.All(
# pylint: disable=no-value-for-parameter
SCHEMA_BUILD_CONFIG = vol.Schema(
{
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Any(
vol.Match(RE_DOCKER_IMAGE_BUILD),
vol.Schema({vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD)}),
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema(
{vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD)}
),
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
vol.Optional(ATTR_ARGS, default=dict): vol.Schema({str: str}),
@@ -473,8 +398,6 @@ SCHEMA_ADDON_USER = vol.Schema(
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
vol.Optional(ATTR_WATCHDOG, default=False): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_MANAGED, default=False): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY, default=None): vol.Maybe(str),
},
extra=vol.REMOVE_EXTRA,
)

View File

@@ -1,22 +1,20 @@
"""Init file for Supervisor RESTful API."""
from functools import partial
import logging
from pathlib import Path
from typing import Any
from typing import Any, Optional
from aiohttp import web
from ..const import AddonState
from supervisor.api.utils import api_process
from supervisor.const import AddonState
from supervisor.exceptions import APIAddonNotInstalled
from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import APIAddonNotInstalled, HostNotSupportedError
from ..utils.sentry import capture_exception
from .addons import APIAddons
from .audio import APIAudio
from .auth import APIAuth
from .backups import APIBackups
from .cli import APICli
from .const import CONTENT_TYPE_TEXT
from .discovery import APIDiscovery
from .dns import APICoreDNS
from .docker import APIDocker
@@ -26,7 +24,6 @@ from .host import APIHost
from .ingress import APIIngress
from .jobs import APIJobs
from .middleware.security import SecurityMiddleware
from .mounts import APIMounts
from .multicast import APIMulticast
from .network import APINetwork
from .observer import APIObserver
@@ -38,7 +35,6 @@ from .security import APISecurity
from .services import APIServices
from .store import APIStore
from .supervisor import APISupervisor
from .utils import api_process, api_process_raw
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -57,10 +53,8 @@ class RestAPI(CoreSysAttributes):
self.webapp: web.Application = web.Application(
client_max_size=MAX_CLIENT_SIZE,
middlewares=[
self.security.block_bad_requests,
self.security.system_validation,
self.security.token_validation,
self.security.core_proxy,
],
handler_args={
"max_line_size": MAX_LINE_SIZE,
@@ -69,17 +63,11 @@ class RestAPI(CoreSysAttributes):
)
# service stuff
self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5)
self._site: web.TCPSite | None = None
# share single host API handler for reuse in logging endpoints
self._api_host: APIHost | None = None
self._runner: web.AppRunner = web.AppRunner(self.webapp)
self._site: Optional[web.TCPSite] = None
async def load(self) -> None:
"""Register REST API Calls."""
self._api_host = APIHost()
self._api_host.coresys = self.coresys
self._register_addons()
self._register_audio()
self._register_auth()
@@ -91,93 +79,41 @@ class RestAPI(CoreSysAttributes):
self._register_hardware()
self._register_homeassistant()
self._register_host()
self._register_jobs()
self._register_root()
self._register_ingress()
self._register_mounts()
self._register_multicast()
self._register_network()
self._register_observer()
self._register_os()
self._register_jobs()
self._register_panel()
self._register_proxy()
self._register_resolution()
self._register_root()
self._register_security()
self._register_services()
self._register_store()
self._register_supervisor()
self._register_store()
self._register_security()
await self.start()
def _register_advanced_logs(self, path: str, syslog_identifier: str):
"""Register logs endpoint for a given path, returning logs for single syslog identifier."""
self.webapp.add_routes(
[
web.get(
f"{path}/logs",
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
),
web.get(
f"{path}/logs/follow",
partial(
self._api_host.advanced_logs,
identifier=syslog_identifier,
follow=True,
),
),
web.get(
f"{path}/logs/boots/{{bootid}}",
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
),
web.get(
f"{path}/logs/boots/{{bootid}}/follow",
partial(
self._api_host.advanced_logs,
identifier=syslog_identifier,
follow=True,
),
),
]
)
def _register_host(self) -> None:
"""Register hostcontrol functions."""
api_host = self._api_host
api_host = APIHost()
api_host.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/host/info", api_host.info),
web.get("/host/logs", api_host.advanced_logs),
web.get(
"/host/logs/follow",
partial(api_host.advanced_logs, follow=True),
),
web.get("/host/logs/identifiers", api_host.list_identifiers),
web.get("/host/logs/identifiers/{identifier}", api_host.advanced_logs),
web.get(
"/host/logs/identifiers/{identifier}/follow",
partial(api_host.advanced_logs, follow=True),
),
web.get("/host/logs/boots", api_host.list_boots),
web.get("/host/logs/boots/{bootid}", api_host.advanced_logs),
web.get(
"/host/logs/boots/{bootid}/follow",
partial(api_host.advanced_logs, follow=True),
),
web.get(
"/host/logs/boots/{bootid}/identifiers/{identifier}",
api_host.advanced_logs,
),
web.get(
"/host/logs/boots/{bootid}/identifiers/{identifier}/follow",
partial(api_host.advanced_logs, follow=True),
),
web.get("/host/logs", api_host.logs),
web.post("/host/reboot", api_host.reboot),
web.post("/host/shutdown", api_host.shutdown),
web.post("/host/reload", api_host.reload),
web.post("/host/options", api_host.options),
web.get("/host/services", api_host.services),
web.post("/host/services/{service}/stop", api_host.service_stop),
web.post("/host/services/{service}/start", api_host.service_start),
web.post("/host/services/{service}/restart", api_host.service_restart),
web.post("/host/services/{service}/reload", api_host.service_reload),
]
)
@@ -220,19 +156,6 @@ class RestAPI(CoreSysAttributes):
web.post("/os/config/sync", api_os.config_sync),
web.post("/os/datadisk/move", api_os.migrate_data),
web.get("/os/datadisk/list", api_os.list_data),
web.post("/os/datadisk/wipe", api_os.wipe_data),
web.post("/os/boot-slot", api_os.set_boot_slot),
]
)
# Boards endpoints
self.webapp.add_routes(
[
web.get("/os/boards/green", api_os.boards_green_info),
web.post("/os/boards/green", api_os.boards_green_options),
web.get("/os/boards/yellow", api_os.boards_yellow_info),
web.post("/os/boards/yellow", api_os.boards_yellow_options),
web.get("/os/boards/{board}", api_os.boards_other_info),
]
)
@@ -259,8 +182,6 @@ class RestAPI(CoreSysAttributes):
web.get("/jobs/info", api_jobs.info),
web.post("/jobs/options", api_jobs.options),
web.post("/jobs/reset", api_jobs.reset),
web.get("/jobs/{uuid}", api_jobs.job_info),
web.delete("/jobs/{uuid}", api_jobs.remove_job),
]
)
@@ -299,11 +220,11 @@ class RestAPI(CoreSysAttributes):
[
web.get("/multicast/info", api_multicast.info),
web.get("/multicast/stats", api_multicast.stats),
web.get("/multicast/logs", api_multicast.logs),
web.post("/multicast/update", api_multicast.update),
web.post("/multicast/restart", api_multicast.restart),
]
)
self._register_advanced_logs("/multicast", "hassio_multicast")
def _register_hardware(self) -> None:
"""Register hardware functions."""
@@ -357,10 +278,6 @@ class RestAPI(CoreSysAttributes):
"/resolution/issue/{issue}",
api_resolution.dismiss_issue,
),
web.get(
"/resolution/issue/{issue}/suggestions",
api_resolution.suggestions_for_issue,
),
web.post("/resolution/healthcheck", api_resolution.healthcheck),
]
)
@@ -376,7 +293,6 @@ class RestAPI(CoreSysAttributes):
web.post("/auth", api_auth.auth),
web.post("/auth/reset", api_auth.reset),
web.delete("/auth/cache", api_auth.cache),
web.get("/auth/list", api_auth.list_users),
]
)
@@ -390,6 +306,7 @@ class RestAPI(CoreSysAttributes):
web.get("/supervisor/ping", api_supervisor.ping),
web.get("/supervisor/info", api_supervisor.info),
web.get("/supervisor/stats", api_supervisor.stats),
web.get("/supervisor/logs", api_supervisor.logs),
web.post("/supervisor/update", api_supervisor.update),
web.post("/supervisor/reload", api_supervisor.reload),
web.post("/supervisor/restart", api_supervisor.restart),
@@ -398,38 +315,6 @@ class RestAPI(CoreSysAttributes):
]
)
async def get_supervisor_logs(*args, **kwargs):
try:
return await self._api_host.advanced_logs_handler(
*args, identifier="hassio_supervisor", **kwargs
)
except Exception as err: # pylint: disable=broad-exception-caught
# Supervisor logs are critical, so catch everything, log the exception
# and try to return Docker container logs as the fallback
_LOGGER.exception(
"Failed to get supervisor logs using advanced_logs API"
)
if not isinstance(err, HostNotSupportedError):
# No need to capture HostNotSupportedError to Sentry, the cause
# is known and reported to the user using the resolution center.
capture_exception(err)
return await api_supervisor.logs(*args, **kwargs)
self.webapp.add_routes(
[
web.get("/supervisor/logs", get_supervisor_logs),
web.get(
"/supervisor/logs/follow",
partial(get_supervisor_logs, follow=True),
),
web.get("/supervisor/logs/boots/{bootid}", get_supervisor_logs),
web.get(
"/supervisor/logs/boots/{bootid}/follow",
partial(get_supervisor_logs, follow=True),
),
]
)
def _register_homeassistant(self) -> None:
"""Register Home Assistant functions."""
api_hass = APIHomeAssistant()
@@ -438,6 +323,7 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes(
[
web.get("/core/info", api_hass.info),
web.get("/core/logs", api_hass.logs),
web.get("/core/stats", api_hass.stats),
web.post("/core/options", api_hass.options),
web.post("/core/update", api_hass.update),
@@ -449,12 +335,11 @@ class RestAPI(CoreSysAttributes):
]
)
self._register_advanced_logs("/core", "homeassistant")
# Reroute from legacy
self.webapp.add_routes(
[
web.get("/homeassistant/info", api_hass.info),
web.get("/homeassistant/logs", api_hass.logs),
web.get("/homeassistant/stats", api_hass.stats),
web.post("/homeassistant/options", api_hass.options),
web.post("/homeassistant/restart", api_hass.restart),
@@ -466,8 +351,6 @@ class RestAPI(CoreSysAttributes):
]
)
self._register_advanced_logs("/homeassistant", "homeassistant")
def _register_proxy(self) -> None:
"""Register Home Assistant API Proxy."""
api_proxy = APIProxy()
@@ -509,39 +392,18 @@ class RestAPI(CoreSysAttributes):
web.post("/addons/{addon}/stop", api_addons.stop),
web.post("/addons/{addon}/restart", api_addons.restart),
web.post("/addons/{addon}/options", api_addons.options),
web.post("/addons/{addon}/sys_options", api_addons.sys_options),
web.post(
"/addons/{addon}/options/validate", api_addons.options_validate
),
web.get("/addons/{addon}/options/config", api_addons.options_config),
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
web.get("/addons/{addon}/logs", api_addons.logs),
web.post("/addons/{addon}/stdin", api_addons.stdin),
web.post("/addons/{addon}/security", api_addons.security),
web.get("/addons/{addon}/stats", api_addons.stats),
]
)
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
async def get_addon_logs(request, *args, **kwargs):
addon = api_addons.get_addon_for_request(request)
kwargs["identifier"] = f"addon_{addon.slug}"
return await self._api_host.advanced_logs(request, *args, **kwargs)
self.webapp.add_routes(
[
web.get("/addons/{addon}/logs", get_addon_logs),
web.get(
"/addons/{addon}/logs/follow",
partial(get_addon_logs, follow=True),
),
web.get("/addons/{addon}/logs/boots/{bootid}", get_addon_logs),
web.get(
"/addons/{addon}/logs/boots/{bootid}/follow",
partial(get_addon_logs, follow=True),
),
]
)
# Legacy routing to support requests for not installed addons
api_store = APIStore()
api_store.coresys = self.coresys
@@ -583,15 +445,11 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes(
[
web.get("/backups", api_backups.list),
web.get("/backups/info", api_backups.info),
web.post("/backups/options", api_backups.options),
web.post("/backups/reload", api_backups.reload),
web.post("/backups/freeze", api_backups.freeze),
web.post("/backups/thaw", api_backups.thaw),
web.post("/backups/new/full", api_backups.backup_full),
web.post("/backups/new/partial", api_backups.backup_partial),
web.post("/backups/new/upload", api_backups.upload),
web.get("/backups/{slug}/info", api_backups.backup_info),
web.get("/backups/{slug}/info", api_backups.info),
web.delete("/backups/{slug}", api_backups.remove),
web.post("/backups/{slug}/restore/full", api_backups.restore_full),
web.post(
@@ -639,6 +497,7 @@ class RestAPI(CoreSysAttributes):
[
web.get("/dns/info", api_dns.info),
web.get("/dns/stats", api_dns.stats),
web.get("/dns/logs", api_dns.logs),
web.post("/dns/update", api_dns.update),
web.post("/dns/options", api_dns.options),
web.post("/dns/restart", api_dns.restart),
@@ -646,8 +505,6 @@ class RestAPI(CoreSysAttributes):
]
)
self._register_advanced_logs("/dns", "hassio_dns")
def _register_audio(self) -> None:
"""Register Audio functions."""
api_audio = APIAudio()
@@ -657,6 +514,7 @@ class RestAPI(CoreSysAttributes):
[
web.get("/audio/info", api_audio.info),
web.get("/audio/stats", api_audio.stats),
web.get("/audio/logs", api_audio.logs),
web.post("/audio/update", api_audio.update),
web.post("/audio/restart", api_audio.restart),
web.post("/audio/reload", api_audio.reload),
@@ -669,24 +527,6 @@ class RestAPI(CoreSysAttributes):
]
)
self._register_advanced_logs("/audio", "hassio_audio")
def _register_mounts(self) -> None:
"""Register mounts endpoints."""
api_mounts = APIMounts()
api_mounts.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/mounts", api_mounts.info),
web.post("/mounts/options", api_mounts.options),
web.post("/mounts", api_mounts.create_mount),
web.put("/mounts/{mount}", api_mounts.update_mount),
web.delete("/mounts/{mount}", api_mounts.delete_mount),
web.post("/mounts/{mount}/reload", api_mounts.reload_mount),
]
)
def _register_store(self) -> None:
"""Register store endpoints."""
api_store = APIStore()
@@ -697,6 +537,7 @@ class RestAPI(CoreSysAttributes):
web.get("/store", api_store.store_info),
web.get("/store/addons", api_store.addons_list),
web.get("/store/addons/{addon}", api_store.addons_addon_info),
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon),
web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo),
web.get(
@@ -718,8 +559,6 @@ class RestAPI(CoreSysAttributes):
"/store/addons/{addon}/update/{version}",
api_store.addons_addon_update,
),
# Must be below others since it has a wildcard in resource path
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
web.post("/store/reload", api_store.reload),
web.get("/store/repositories", api_store.repositories_list),
web.get(
@@ -771,7 +610,9 @@ class RestAPI(CoreSysAttributes):
async def start(self) -> None:
"""Run RESTful API webserver."""
await self._runner.setup()
self._site = web.TCPSite(self._runner, host="0.0.0.0", port=80)
self._site = web.TCPSite(
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5
)
try:
await self._site.start()

View File

@@ -1,16 +1,14 @@
"""Init file for Supervisor Home Assistant RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging
from typing import Any
from typing import Any, Awaitable
from aiohttp import web
import voluptuous as vol
from voluptuous.humanize import humanize_error
from ..addons import AnyAddon
from ..addons.addon import Addon
from ..addons.manager import AnyAddon
from ..addons.utils import rating_security
from ..const import (
ATTR_ADDONS,
@@ -47,7 +45,6 @@ from ..const import (
ATTR_HOST_IPC,
ATTR_HOST_NETWORK,
ATTR_HOST_PID,
ATTR_HOST_UTS,
ATTR_HOSTNAME,
ATTR_ICON,
ATTR_INGRESS,
@@ -82,8 +79,6 @@ from ..const import (
ATTR_STARTUP,
ATTR_STATE,
ATTR_STDIN,
ATTR_SYSTEM_MANAGED,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
ATTR_TRANSLATIONS,
ATTR_UART,
ATTR_UDEV,
@@ -98,7 +93,6 @@ from ..const import (
ATTR_WEBUI,
REQUEST_FROM,
AddonBoot,
AddonBootConfig,
)
from ..coresys import CoreSysAttributes
from ..docker.stats import DockerStats
@@ -110,8 +104,8 @@ from ..exceptions import (
PwnedSecret,
)
from ..validate import docker_ports
from .const import ATTR_BOOT_CONFIG, ATTR_REMOVE_CONFIG, ATTR_SIGNED
from .utils import api_process, api_validate, json_loads
from .const import ATTR_SIGNED, CONTENT_TYPE_BINARY
from .utils import api_process, api_process_raw, api_validate, json_loads
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -130,26 +124,15 @@ SCHEMA_OPTIONS = vol.Schema(
}
)
SCHEMA_SYS_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_SYSTEM_MANAGED): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY): vol.Maybe(str),
}
)
# pylint: disable=no-value-for-parameter
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
SCHEMA_UNINSTALL = vol.Schema(
{vol.Optional(ATTR_REMOVE_CONFIG, default=False): vol.Boolean()}
)
# pylint: enable=no-value-for-parameter
class APIAddons(CoreSysAttributes):
"""Handle RESTful API for add-on functions."""
def get_addon_for_request(self, request: web.Request) -> Addon:
"""Return addon, throw an exception if it doesn't exist."""
def _extract_addon(self, request: web.Request) -> Addon:
"""Return addon, throw an exception it it doesn't exist."""
addon_slug: str = request.match_info.get("addon")
# Lookup itself
@@ -189,7 +172,6 @@ class APIAddons(CoreSysAttributes):
ATTR_URL: addon.url,
ATTR_ICON: addon.with_icon,
ATTR_LOGO: addon.with_logo,
ATTR_SYSTEM_MANAGED: addon.system_managed,
}
for addon in self.sys_addons.installed
]
@@ -203,7 +185,7 @@ class APIAddons(CoreSysAttributes):
async def info(self, request: web.Request) -> dict[str, Any]:
"""Return add-on information."""
addon: AnyAddon = self.get_addon_for_request(request)
addon: AnyAddon = self._extract_addon(request)
data = {
ATTR_NAME: addon.name,
@@ -218,7 +200,6 @@ class APIAddons(CoreSysAttributes):
ATTR_VERSION_LATEST: addon.latest_version,
ATTR_PROTECTED: addon.protected,
ATTR_RATING: rating_security(addon),
ATTR_BOOT_CONFIG: addon.boot_config,
ATTR_BOOT: addon.boot,
ATTR_OPTIONS: addon.options,
ATTR_SCHEMA: addon.schema_ui,
@@ -234,7 +215,6 @@ class APIAddons(CoreSysAttributes):
ATTR_HOST_NETWORK: addon.host_network,
ATTR_HOST_PID: addon.host_pid,
ATTR_HOST_IPC: addon.host_ipc,
ATTR_HOST_UTS: addon.host_uts,
ATTR_HOST_DBUS: addon.host_dbus,
ATTR_PRIVILEGED: addon.privileged,
ATTR_FULL_ACCESS: addon.with_full_access,
@@ -278,8 +258,6 @@ class APIAddons(CoreSysAttributes):
ATTR_WATCHDOG: addon.watchdog,
ATTR_DEVICES: addon.static_devices
+ [device.path for device in addon.devices],
ATTR_SYSTEM_MANAGED: addon.system_managed,
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY: addon.system_managed_config_entry,
}
return data
@@ -287,7 +265,7 @@ class APIAddons(CoreSysAttributes):
@api_process
async def options(self, request: web.Request) -> None:
"""Store user options for add-on."""
addon = self.get_addon_for_request(request)
addon = self._extract_addon(request)
# Update secrets for validation
await self.sys_homeassistant.secrets.reload()
@@ -302,10 +280,6 @@ class APIAddons(CoreSysAttributes):
if ATTR_OPTIONS in body:
addon.options = body[ATTR_OPTIONS]
if ATTR_BOOT in body:
if addon.boot_config == AddonBootConfig.MANUAL_ONLY:
raise APIError(
f"Addon {addon.slug} boot option is set to {addon.boot_config} so it cannot be changed"
)
addon.boot = body[ATTR_BOOT]
if ATTR_AUTO_UPDATE in body:
addon.auto_update = body[ATTR_AUTO_UPDATE]
@@ -323,24 +297,10 @@ class APIAddons(CoreSysAttributes):
addon.save_persist()
@api_process
async def sys_options(self, request: web.Request) -> None:
"""Store system options for an add-on."""
addon = self.get_addon_for_request(request)
# Validate/Process Body
body = await api_validate(SCHEMA_SYS_OPTIONS, request)
if ATTR_SYSTEM_MANAGED in body:
addon.system_managed = body[ATTR_SYSTEM_MANAGED]
if ATTR_SYSTEM_MANAGED_CONFIG_ENTRY in body:
addon.system_managed_config_entry = body[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY]
addon.save_persist()
@api_process
async def options_validate(self, request: web.Request) -> None:
"""Validate user options for add-on."""
addon = self.get_addon_for_request(request)
addon = self._extract_addon(request)
data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False}
options = await request.json(loads=json_loads) or addon.options
@@ -382,7 +342,7 @@ class APIAddons(CoreSysAttributes):
slug: str = request.match_info.get("addon")
if slug != "self":
raise APIForbidden("This can be only read by the Add-on itself!")
addon = self.get_addon_for_request(request)
addon = self._extract_addon(request)
# Lookup/reload secrets
await self.sys_homeassistant.secrets.reload()
@@ -394,7 +354,7 @@ class APIAddons(CoreSysAttributes):
@api_process
async def security(self, request: web.Request) -> None:
"""Store security options for add-on."""
addon = self.get_addon_for_request(request)
addon = self._extract_addon(request)
body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
if ATTR_PROTECTED in body:
@@ -406,7 +366,7 @@ class APIAddons(CoreSysAttributes):
@api_process
async def stats(self, request: web.Request) -> dict[str, Any]:
"""Return resource information."""
addon = self.get_addon_for_request(request)
addon = self._extract_addon(request)
stats: DockerStats = await addon.stats()
@@ -422,47 +382,45 @@ class APIAddons(CoreSysAttributes):
}
@api_process
async def uninstall(self, request: web.Request) -> Awaitable[None]:
def uninstall(self, request: web.Request) -> Awaitable[None]:
"""Uninstall add-on."""
addon = self.get_addon_for_request(request)
body: dict[str, Any] = await api_validate(SCHEMA_UNINSTALL, request)
return await asyncio.shield(
self.sys_addons.uninstall(
addon.slug, remove_config=body[ATTR_REMOVE_CONFIG]
)
)
addon = self._extract_addon(request)
return asyncio.shield(addon.uninstall())
@api_process
async def start(self, request: web.Request) -> None:
def start(self, request: web.Request) -> Awaitable[None]:
"""Start add-on."""
addon = self.get_addon_for_request(request)
if start_task := await asyncio.shield(addon.start()):
await start_task
addon = self._extract_addon(request)
return asyncio.shield(addon.start())
@api_process
def stop(self, request: web.Request) -> Awaitable[None]:
"""Stop add-on."""
addon = self.get_addon_for_request(request)
addon = self._extract_addon(request)
return asyncio.shield(addon.stop())
@api_process
async def restart(self, request: web.Request) -> None:
def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart add-on."""
addon: Addon = self.get_addon_for_request(request)
if start_task := await asyncio.shield(addon.restart()):
await start_task
addon: Addon = self._extract_addon(request)
return asyncio.shield(addon.restart())
@api_process
async def rebuild(self, request: web.Request) -> None:
def rebuild(self, request: web.Request) -> Awaitable[None]:
"""Rebuild local build add-on."""
addon = self.get_addon_for_request(request)
if start_task := await asyncio.shield(self.sys_addons.rebuild(addon.slug)):
await start_task
addon = self._extract_addon(request)
return asyncio.shield(addon.rebuild())
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return logs from add-on."""
addon = self._extract_addon(request)
return addon.logs()
@api_process
async def stdin(self, request: web.Request) -> None:
"""Write to stdin of add-on."""
addon = self.get_addon_for_request(request)
addon = self._extract_addon(request)
if not addon.with_stdin:
raise APIError(f"STDIN not supported the {addon.slug} add-on")

View File

@@ -1,12 +1,10 @@
"""Init file for Supervisor Audio RESTful API."""
import asyncio
from collections.abc import Awaitable
from dataclasses import asdict
import logging
from typing import Any
from typing import Any, Awaitable
from aiohttp import web
import attr
import voluptuous as vol
from ..const import (
@@ -36,7 +34,8 @@ from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..host.sound import StreamType
from ..validate import version_tag
from .utils import api_process, api_validate
from .const import CONTENT_TYPE_BINARY
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -76,11 +75,15 @@ class APIAudio(CoreSysAttributes):
ATTR_UPDATE_AVAILABLE: self.sys_plugins.audio.need_update,
ATTR_HOST: str(self.sys_docker.network.audio),
ATTR_AUDIO: {
ATTR_CARD: [asdict(card) for card in self.sys_host.sound.cards],
ATTR_INPUT: [asdict(stream) for stream in self.sys_host.sound.inputs],
ATTR_OUTPUT: [asdict(stream) for stream in self.sys_host.sound.outputs],
ATTR_CARD: [attr.asdict(card) for card in self.sys_host.sound.cards],
ATTR_INPUT: [
attr.asdict(stream) for stream in self.sys_host.sound.inputs
],
ATTR_OUTPUT: [
attr.asdict(stream) for stream in self.sys_host.sound.outputs
],
ATTR_APPLICATION: [
asdict(stream) for stream in self.sys_host.sound.applications
attr.asdict(stream) for stream in self.sys_host.sound.applications
],
},
}
@@ -111,6 +114,11 @@ class APIAudio(CoreSysAttributes):
raise APIError(f"Version {version} is already in use")
await asyncio.shield(self.sys_plugins.audio.update(version))
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return Audio Docker logs."""
return self.sys_plugins.audio.logs()
@api_process
def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart Audio plugin."""

View File

@@ -1,8 +1,6 @@
"""Init file for Supervisor auth/SSO RESTful API."""
import asyncio
import logging
from typing import Any
from aiohttp import BasicAuth, web
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE
@@ -10,19 +8,10 @@ from aiohttp.web_exceptions import HTTPUnauthorized
import voluptuous as vol
from ..addons.addon import Addon
from ..const import ATTR_NAME, ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
from ..const import ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
from ..coresys import CoreSysAttributes
from ..exceptions import APIForbidden
from ..utils.json import json_loads
from .const import (
ATTR_GROUP_IDS,
ATTR_IS_ACTIVE,
ATTR_IS_OWNER,
ATTR_LOCAL_ONLY,
ATTR_USERS,
CONTENT_TYPE_JSON,
CONTENT_TYPE_URL,
)
from .const import CONTENT_TYPE_JSON, CONTENT_TYPE_URL
from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -78,7 +67,7 @@ class APIAuth(CoreSysAttributes):
# Json
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
data = await request.json(loads=json_loads)
data = await request.json()
return await self._process_dict(request, addon, data)
# URL encoded
@@ -100,21 +89,3 @@ class APIAuth(CoreSysAttributes):
async def cache(self, request: web.Request) -> None:
"""Process cache reset request."""
self.sys_auth.reset_data()
@api_process
async def list_users(self, request: web.Request) -> dict[str, list[dict[str, Any]]]:
"""List users on the Home Assistant instance."""
return {
ATTR_USERS: [
{
ATTR_USERNAME: user[ATTR_USERNAME],
ATTR_NAME: user[ATTR_NAME],
ATTR_IS_OWNER: user[ATTR_IS_OWNER],
ATTR_IS_ACTIVE: user[ATTR_IS_ACTIVE],
ATTR_LOCAL_ONLY: user[ATTR_LOCAL_ONLY],
ATTR_GROUP_IDS: user[ATTR_GROUP_IDS],
}
for user in await self.sys_auth.list_users()
if user[ATTR_USERNAME]
]
}

View File

@@ -1,50 +1,35 @@
"""Backups RESTful API."""
import asyncio
from collections.abc import Callable
import errno
import logging
from pathlib import Path
import re
from tempfile import TemporaryDirectory
from typing import Any
from aiohttp import web
from aiohttp.hdrs import CONTENT_DISPOSITION
import voluptuous as vol
from ..backups.backup import Backup
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT
from ..const import (
ATTR_ADDONS,
ATTR_BACKUPS,
ATTR_COMPRESSED,
ATTR_CONTENT,
ATTR_DATE,
ATTR_DAYS_UNTIL_STALE,
ATTR_FOLDERS,
ATTR_HOMEASSISTANT,
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE,
ATTR_LOCATON,
ATTR_NAME,
ATTR_PASSWORD,
ATTR_PROTECTED,
ATTR_REPOSITORIES,
ATTR_SIZE,
ATTR_SLUG,
ATTR_SUPERVISOR_VERSION,
ATTR_TIMEOUT,
ATTR_TYPE,
ATTR_VERSION,
BusEvent,
CoreState,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..jobs import JobSchedulerOptions
from ..mounts.const import MountUsage
from ..resolution.const import UnhealthyReason
from .const import ATTR_BACKGROUND, ATTR_JOB_ID, CONTENT_TYPE_TAR
from .const import CONTENT_TYPE_TAR
from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -56,29 +41,22 @@ RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+")
_ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT]
# pylint: disable=no-value-for-parameter
SCHEMA_RESTORE_FULL = vol.Schema(
SCHEMA_RESTORE_PARTIAL = vol.Schema(
{
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
}
)
SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend(
{
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
}
)
SCHEMA_RESTORE_FULL = vol.Schema({vol.Optional(ATTR_PASSWORD): vol.Maybe(str)})
SCHEMA_BACKUP_FULL = vol.Schema(
{
vol.Optional(ATTR_NAME): str,
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
vol.Optional(ATTR_LOCATON): vol.Maybe(str),
vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(),
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
}
)
@@ -90,18 +68,6 @@ SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
}
)
SCHEMA_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_DAYS_UNTIL_STALE): days_until_stale,
}
)
SCHEMA_FREEZE = vol.Schema(
{
vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1)),
}
)
class APIBackups(CoreSysAttributes):
"""Handle RESTful API for backups functions."""
@@ -113,31 +79,27 @@ class APIBackups(CoreSysAttributes):
raise APIError("Backup does not exist")
return backup
def _list_backups(self):
"""Return list of backups."""
return [
{
ATTR_SLUG: backup.slug,
ATTR_NAME: backup.name,
ATTR_DATE: backup.date,
ATTR_TYPE: backup.sys_type,
ATTR_SIZE: backup.size,
ATTR_LOCATON: backup.location,
ATTR_PROTECTED: backup.protected,
ATTR_COMPRESSED: backup.compressed,
ATTR_CONTENT: {
ATTR_HOMEASSISTANT: backup.homeassistant_version is not None,
ATTR_ADDONS: backup.addon_list,
ATTR_FOLDERS: backup.folders,
},
}
for backup in self.sys_backups.list_backups
]
@api_process
async def list(self, request):
"""Return backup list."""
data_backups = self._list_backups()
data_backups = []
for backup in self.sys_backups.list_backups:
data_backups.append(
{
ATTR_SLUG: backup.slug,
ATTR_NAME: backup.name,
ATTR_DATE: backup.date,
ATTR_TYPE: backup.sys_type,
ATTR_SIZE: backup.size,
ATTR_PROTECTED: backup.protected,
ATTR_COMPRESSED: backup.compressed,
ATTR_CONTENT: {
ATTR_HOMEASSISTANT: backup.homeassistant_version is not None,
ATTR_ADDONS: backup.addon_list,
ATTR_FOLDERS: backup.folders,
},
}
)
if request.path == "/snapshots":
# Kept for backwards compability
@@ -146,31 +108,13 @@ class APIBackups(CoreSysAttributes):
return {ATTR_BACKUPS: data_backups}
@api_process
async def info(self, request):
"""Return backup list and manager info."""
return {
ATTR_BACKUPS: self._list_backups(),
ATTR_DAYS_UNTIL_STALE: self.sys_backups.days_until_stale,
}
@api_process
async def options(self, request):
"""Set backup manager options."""
body = await api_validate(SCHEMA_OPTIONS, request)
if ATTR_DAYS_UNTIL_STALE in body:
self.sys_backups.days_until_stale = body[ATTR_DAYS_UNTIL_STALE]
self.sys_backups.save_data()
@api_process
async def reload(self, _):
async def reload(self, request):
"""Reload backup list."""
await asyncio.shield(self.sys_backups.reload())
return True
@api_process
async def backup_info(self, request):
async def info(self, request):
"""Return backup info."""
backup = self._extract_slug(request)
@@ -193,142 +137,47 @@ class APIBackups(CoreSysAttributes):
ATTR_SIZE: backup.size,
ATTR_COMPRESSED: backup.compressed,
ATTR_PROTECTED: backup.protected,
ATTR_SUPERVISOR_VERSION: backup.supervisor_version,
ATTR_HOMEASSISTANT: backup.homeassistant_version,
ATTR_LOCATON: backup.location,
ATTR_ADDONS: data_addons,
ATTR_REPOSITORIES: backup.repositories,
ATTR_FOLDERS: backup.folders,
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database,
}
def _location_to_mount(self, body: dict[str, Any]) -> dict[str, Any]:
"""Change location field to mount if necessary."""
if not body.get(ATTR_LOCATON):
return body
body[ATTR_LOCATON] = self.sys_mounts.get(body[ATTR_LOCATON])
if body[ATTR_LOCATON].usage != MountUsage.BACKUP:
raise APIError(
f"Mount {body[ATTR_LOCATON].name} is not used for backups, cannot backup to there"
)
return body
async def _background_backup_task(
self, backup_method: Callable, *args, **kwargs
) -> tuple[asyncio.Task, str]:
"""Start backup task in background and return task and job ID."""
event = asyncio.Event()
job, backup_task = self.sys_jobs.schedule_job(
backup_method, JobSchedulerOptions(), *args, **kwargs
)
async def release_on_freeze(new_state: CoreState):
if new_state == CoreState.FREEZE:
event.set()
# Wait for system to get into freeze state before returning
# If the backup fails validation it will raise before getting there
listener = self.sys_bus.register_event(
BusEvent.SUPERVISOR_STATE_CHANGE, release_on_freeze
)
try:
await asyncio.wait(
(
backup_task,
self.sys_create_task(event.wait()),
),
return_when=asyncio.FIRST_COMPLETED,
)
return (backup_task, job.uuid)
finally:
self.sys_bus.remove_listener(listener)
@api_process
async def backup_full(self, request):
"""Create full backup."""
body = await api_validate(SCHEMA_BACKUP_FULL, request)
background = body.pop(ATTR_BACKGROUND)
backup_task, job_id = await self._background_backup_task(
self.sys_backups.do_backup_full, **self._location_to_mount(body)
)
backup = await asyncio.shield(self.sys_backups.do_backup_full(**body))
if background and not backup_task.done():
return {ATTR_JOB_ID: job_id}
backup: Backup = await backup_task
if backup:
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug}
raise APIError(
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details",
job_id=job_id,
)
return {ATTR_SLUG: backup.slug}
return False
@api_process
async def backup_partial(self, request):
"""Create a partial backup."""
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
background = body.pop(ATTR_BACKGROUND)
backup_task, job_id = await self._background_backup_task(
self.sys_backups.do_backup_partial, **self._location_to_mount(body)
)
backup = await asyncio.shield(self.sys_backups.do_backup_partial(**body))
if background and not backup_task.done():
return {ATTR_JOB_ID: job_id}
backup: Backup = await backup_task
if backup:
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug}
raise APIError(
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details",
job_id=job_id,
)
return {ATTR_SLUG: backup.slug}
return False
@api_process
async def restore_full(self, request):
"""Full restore of a backup."""
backup = self._extract_slug(request)
body = await api_validate(SCHEMA_RESTORE_FULL, request)
background = body.pop(ATTR_BACKGROUND)
restore_task, job_id = await self._background_backup_task(
self.sys_backups.do_restore_full, backup, **body
)
if background and not restore_task.done() or await restore_task:
return {ATTR_JOB_ID: job_id}
raise APIError(
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details",
job_id=job_id,
)
return await asyncio.shield(self.sys_backups.do_restore_full(backup, **body))
@api_process
async def restore_partial(self, request):
"""Partial restore a backup."""
backup = self._extract_slug(request)
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
background = body.pop(ATTR_BACKGROUND)
restore_task, job_id = await self._background_backup_task(
self.sys_backups.do_restore_partial, backup, **body
)
if background and not restore_task.done() or await restore_task:
return {ATTR_JOB_ID: job_id}
raise APIError(
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details",
job_id=job_id,
)
@api_process
async def freeze(self, request):
"""Initiate manual freeze for external backup."""
body = await api_validate(SCHEMA_FREEZE, request)
await asyncio.shield(self.sys_backups.freeze_all(**body))
@api_process
async def thaw(self, request):
"""Begin thaw after manual freeze."""
await self.sys_backups.thaw_all()
return await asyncio.shield(self.sys_backups.do_restore_partial(backup, **body))
@api_process
async def remove(self, request):
@@ -343,9 +192,9 @@ class APIBackups(CoreSysAttributes):
_LOGGER.info("Downloading backup %s", backup.slug)
response = web.FileResponse(backup.tarfile)
response.content_type = CONTENT_TYPE_TAR
response.headers[CONTENT_DISPOSITION] = (
f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
)
response.headers[
CONTENT_DISPOSITION
] = f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
return response
@api_process
@@ -364,8 +213,6 @@ class APIBackups(CoreSysAttributes):
backup.write(chunk)
except OSError as err:
if err.errno == errno.EBADMSG:
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
_LOGGER.error("Can't write new backup file: %s", err)
return False

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor HA cli RESTful API."""
import asyncio
import logging
from typing import Any

View File

@@ -1,79 +1,39 @@
"""Const for API."""
from enum import StrEnum
CONTENT_TYPE_BINARY = "application/octet-stream"
CONTENT_TYPE_JSON = "application/json"
CONTENT_TYPE_PNG = "image/png"
CONTENT_TYPE_TAR = "application/tar"
CONTENT_TYPE_TEXT = "text/plain"
CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
CONTENT_TYPE_X_LOG = "text/x-log"
COOKIE_INGRESS = "ingress_session"
ATTR_AGENT_VERSION = "agent_version"
HEADER_TOKEN_OLD = "X-Hassio-Key"
HEADER_TOKEN = "X-Supervisor-Token"
ATTR_APPARMOR_VERSION = "apparmor_version"
ATTR_ATTRIBUTES = "attributes"
ATTR_AGENT_VERSION = "agent_version"
ATTR_AVAILABLE_UPDATES = "available_updates"
ATTR_BACKGROUND = "background"
ATTR_BOOT_CONFIG = "boot_config"
ATTR_BOOT_SLOT = "boot_slot"
ATTR_BOOT_SLOTS = "boot_slots"
ATTR_BOOT_TIMESTAMP = "boot_timestamp"
ATTR_BOOTS = "boots"
ATTR_BROADCAST_LLMNR = "broadcast_llmnr"
ATTR_BROADCAST_MDNS = "broadcast_mdns"
ATTR_BY_ID = "by_id"
ATTR_CHILDREN = "children"
ATTR_CONNECTION_BUS = "connection_bus"
ATTR_DATA_DISK = "data_disk"
ATTR_DEVICE = "device"
ATTR_DEV_PATH = "dev_path"
ATTR_DISKS = "disks"
ATTR_DRIVES = "drives"
ATTR_DT_SYNCHRONIZED = "dt_synchronized"
ATTR_DT_UTC = "dt_utc"
ATTR_EJECTABLE = "ejectable"
ATTR_FALLBACK = "fallback"
ATTR_FILESYSTEMS = "filesystems"
ATTR_FORCE = "force"
ATTR_GROUP_IDS = "group_ids"
ATTR_IDENTIFIERS = "identifiers"
ATTR_IS_ACTIVE = "is_active"
ATTR_IS_OWNER = "is_owner"
ATTR_JOB_ID = "job_id"
ATTR_JOBS = "jobs"
ATTR_LLMNR = "llmnr"
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
ATTR_LOCAL_ONLY = "local_only"
ATTR_MDNS = "mdns"
ATTR_MODEL = "model"
ATTR_MOUNTS = "mounts"
ATTR_MOUNT_POINTS = "mount_points"
ATTR_PANEL_PATH = "panel_path"
ATTR_REMOVABLE = "removable"
ATTR_REMOVE_CONFIG = "remove_config"
ATTR_REVISION = "revision"
ATTR_SAFE_MODE = "safe_mode"
ATTR_SEAT = "seat"
ATTR_SIGNED = "signed"
ATTR_STARTUP_TIME = "startup_time"
ATTR_STATUS = "status"
ATTR_UPDATE_TYPE = "update_type"
ATTR_USE_NTP = "use_ntp"
ATTR_BY_ID = "by_id"
ATTR_SUBSYSTEM = "subsystem"
ATTR_SYSFS = "sysfs"
ATTR_SYSTEM_HEALTH_LED = "system_health_led"
ATTR_TIME_DETECTED = "time_detected"
ATTR_UPDATE_TYPE = "update_type"
ATTR_USAGE = "usage"
ATTR_USE_NTP = "use_ntp"
ATTR_USERS = "users"
ATTR_VENDOR = "vendor"
ATTR_VIRTUALIZATION = "virtualization"
class BootSlot(StrEnum):
"""Boot slots used by HAOS."""
A = "A"
B = "B"
ATTR_DEV_PATH = "dev_path"
ATTR_ATTRIBUTES = "attributes"
ATTR_CHILDREN = "children"

View File

@@ -1,10 +1,6 @@
"""Init file for Supervisor network RESTful API."""
import logging
import voluptuous as vol
from ..addons.addon import Addon
from ..const import (
ATTR_ADDON,
ATTR_CONFIG,
@@ -13,18 +9,16 @@ from ..const import (
ATTR_SERVICES,
ATTR_UUID,
REQUEST_FROM,
AddonState,
)
from ..coresys import CoreSysAttributes
from ..discovery.validate import valid_discovery_service
from ..exceptions import APIError, APIForbidden
from .utils import api_process, api_validate, require_home_assistant
_LOGGER: logging.Logger = logging.getLogger(__name__)
SCHEMA_DISCOVERY = vol.Schema(
{
vol.Required(ATTR_SERVICE): str,
vol.Required(ATTR_CONFIG): dict,
vol.Required(ATTR_SERVICE): valid_discovery_service,
vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
}
)
@@ -42,19 +36,19 @@ class APIDiscovery(CoreSysAttributes):
@api_process
@require_home_assistant
async def list(self, request):
"""Show registered and available services."""
"""Show register services."""
# Get available discovery
discovery = [
{
ATTR_ADDON: message.addon,
ATTR_SERVICE: message.service,
ATTR_UUID: message.uuid,
ATTR_CONFIG: message.config,
}
for message in self.sys_discovery.list_messages
if (addon := self.sys_addons.get(message.addon, local_only=True))
and addon.state == AddonState.STARTED
]
discovery = []
for message in self.sys_discovery.list_messages:
discovery.append(
{
ATTR_ADDON: message.addon,
ATTR_SERVICE: message.service,
ATTR_UUID: message.uuid,
ATTR_CONFIG: message.config,
}
)
# Get available services/add-ons
services = {}
@@ -68,19 +62,11 @@ class APIDiscovery(CoreSysAttributes):
async def set_discovery(self, request):
"""Write data into a discovery pipeline."""
body = await api_validate(SCHEMA_DISCOVERY, request)
addon: Addon = request[REQUEST_FROM]
service = body[ATTR_SERVICE]
addon = request[REQUEST_FROM]
# Access?
if body[ATTR_SERVICE] not in addon.discovery:
_LOGGER.error(
"Add-on %s attempted to send discovery for service %s which is not listed in its config. Please report this to the maintainer of the add-on",
addon.name,
service,
)
raise APIForbidden(
"Add-ons must list services they provide via discovery in their config!"
)
raise APIForbidden("Can't use discovery!")
# Process discovery message
message = self.sys_discovery.send(addon, **body)

View File

@@ -1,9 +1,7 @@
"""Init file for Supervisor DNS RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging
from typing import Any
from typing import Any, Awaitable
from aiohttp import web
import voluptuous as vol
@@ -27,8 +25,8 @@ from ..const import (
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..validate import dns_server_list, version_tag
from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS
from .utils import api_process, api_validate
from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS, CONTENT_TYPE_BINARY
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -106,6 +104,11 @@ class APICoreDNS(CoreSysAttributes):
raise APIError(f"Version {version} is already in use")
await asyncio.shield(self.sys_plugins.dns.update(version))
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return DNS Docker logs."""
return self.sys_plugins.dns.logs()
@api_process
def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart CoreDNS plugin."""

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Home Assistant RESTful API."""
import logging
from typing import Any

View File

@@ -1,45 +1,19 @@
"""Init file for Supervisor hardware RESTful API."""
import logging
from typing import Any
from aiohttp import web
from ..const import (
ATTR_AUDIO,
ATTR_DEVICES,
ATTR_ID,
ATTR_INPUT,
ATTR_NAME,
ATTR_OUTPUT,
ATTR_SERIAL,
ATTR_SIZE,
ATTR_SYSTEM,
)
from ..const import ATTR_AUDIO, ATTR_DEVICES, ATTR_INPUT, ATTR_NAME, ATTR_OUTPUT
from ..coresys import CoreSysAttributes
from ..dbus.udisks2 import UDisks2Manager
from ..dbus.udisks2.block import UDisks2Block
from ..dbus.udisks2.drive import UDisks2Drive
from ..hardware.data import Device
from .const import (
ATTR_ATTRIBUTES,
ATTR_BY_ID,
ATTR_CHILDREN,
ATTR_CONNECTION_BUS,
ATTR_DEV_PATH,
ATTR_DEVICE,
ATTR_DRIVES,
ATTR_EJECTABLE,
ATTR_FILESYSTEMS,
ATTR_MODEL,
ATTR_MOUNT_POINTS,
ATTR_REMOVABLE,
ATTR_REVISION,
ATTR_SEAT,
ATTR_SUBSYSTEM,
ATTR_SYSFS,
ATTR_TIME_DETECTED,
ATTR_VENDOR,
)
from .utils import api_process
@@ -47,7 +21,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
def device_struct(device: Device) -> dict[str, Any]:
"""Return a dict with information of a interface to be used in the API."""
"""Return a dict with information of a interface to be used in th API."""
return {
ATTR_NAME: device.name,
ATTR_SYSFS: device.sysfs,
@@ -59,42 +33,6 @@ def device_struct(device: Device) -> dict[str, Any]:
}
def filesystem_struct(fs_block: UDisks2Block) -> dict[str, Any]:
"""Return a dict with information of a filesystem block device to be used in the API."""
return {
ATTR_DEVICE: str(fs_block.device),
ATTR_ID: fs_block.id,
ATTR_SIZE: fs_block.size,
ATTR_NAME: fs_block.id_label,
ATTR_SYSTEM: fs_block.hint_system,
ATTR_MOUNT_POINTS: [
str(mount_point) for mount_point in fs_block.filesystem.mount_points
],
}
def drive_struct(udisks2: UDisks2Manager, drive: UDisks2Drive) -> dict[str, Any]:
"""Return a dict with information of a disk to be used in the API."""
return {
ATTR_VENDOR: drive.vendor,
ATTR_MODEL: drive.model,
ATTR_REVISION: drive.revision,
ATTR_SERIAL: drive.serial,
ATTR_ID: drive.id,
ATTR_SIZE: drive.size,
ATTR_TIME_DETECTED: drive.time_detected.isoformat(),
ATTR_CONNECTION_BUS: drive.connection_bus,
ATTR_SEAT: drive.seat,
ATTR_REMOVABLE: drive.removable,
ATTR_EJECTABLE: drive.ejectable,
ATTR_FILESYSTEMS: [
filesystem_struct(block)
for block in udisks2.block_devices
if block.filesystem and block.drive == drive.object_path
],
}
class APIHardware(CoreSysAttributes):
"""Handle RESTful API for hardware functions."""
@@ -104,11 +42,7 @@ class APIHardware(CoreSysAttributes):
return {
ATTR_DEVICES: [
device_struct(device) for device in self.sys_hardware.devices
],
ATTR_DRIVES: [
drive_struct(self.sys_dbus.udisks2, drive)
for drive in self.sys_dbus.udisks2.drives
],
]
}
@api_process

View File

@@ -1,9 +1,7 @@
"""Init file for Supervisor Home Assistant RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging
from typing import Any
from typing import Any, Awaitable
from aiohttp import web
import voluptuous as vol
@@ -13,7 +11,6 @@ from ..const import (
ATTR_AUDIO_INPUT,
ATTR_AUDIO_OUTPUT,
ATTR_BACKUP,
ATTR_BACKUPS_EXCLUDE_DATABASE,
ATTR_BLK_READ,
ATTR_BLK_WRITE,
ATTR_BOOT,
@@ -35,10 +32,10 @@ from ..const import (
ATTR_WATCHDOG,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIDBMigrationInProgress, APIError
from ..exceptions import APIError
from ..validate import docker_image, network_port, version_tag
from .const import ATTR_FORCE, ATTR_SAFE_MODE
from .utils import api_process, api_validate
from .const import CONTENT_TYPE_BINARY
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -53,7 +50,6 @@ SCHEMA_OPTIONS = vol.Schema(
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(str),
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str),
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str),
vol.Optional(ATTR_BACKUPS_EXCLUDE_DATABASE): vol.Boolean(),
}
)
@@ -64,34 +60,10 @@ SCHEMA_UPDATE = vol.Schema(
}
)
SCHEMA_RESTART = vol.Schema(
{
vol.Optional(ATTR_SAFE_MODE, default=False): vol.Boolean(),
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
}
)
SCHEMA_STOP = vol.Schema(
{
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
}
)
class APIHomeAssistant(CoreSysAttributes):
"""Handle RESTful API for Home Assistant functions."""
async def _check_offline_migration(self, force: bool = False) -> None:
"""Check and raise if there's an offline DB migration in progress."""
if (
not force
and (state := await self.sys_homeassistant.api.get_api_state())
and state.offline_db_migration
):
raise APIDBMigrationInProgress(
"Offline database migration in progress, try again after it has completed"
)
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
"""Return host information."""
@@ -109,7 +81,6 @@ class APIHomeAssistant(CoreSysAttributes):
ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
ATTR_AUDIO_INPUT: self.sys_homeassistant.audio_input,
ATTR_AUDIO_OUTPUT: self.sys_homeassistant.audio_output,
ATTR_BACKUPS_EXCLUDE_DATABASE: self.sys_homeassistant.backups_exclude_database,
}
@api_process
@@ -119,9 +90,6 @@ class APIHomeAssistant(CoreSysAttributes):
if ATTR_IMAGE in body:
self.sys_homeassistant.image = body[ATTR_IMAGE]
self.sys_homeassistant.override_image = (
self.sys_homeassistant.image != self.sys_homeassistant.default_image
)
if ATTR_BOOT in body:
self.sys_homeassistant.boot = body[ATTR_BOOT]
@@ -144,11 +112,6 @@ class APIHomeAssistant(CoreSysAttributes):
if ATTR_AUDIO_OUTPUT in body:
self.sys_homeassistant.audio_output = body[ATTR_AUDIO_OUTPUT]
if ATTR_BACKUPS_EXCLUDE_DATABASE in body:
self.sys_homeassistant.backups_exclude_database = body[
ATTR_BACKUPS_EXCLUDE_DATABASE
]
self.sys_homeassistant.save_data()
@api_process
@@ -173,7 +136,6 @@ class APIHomeAssistant(CoreSysAttributes):
async def update(self, request: web.Request) -> None:
"""Update Home Assistant."""
body = await api_validate(SCHEMA_UPDATE, request)
await self._check_offline_migration()
await asyncio.shield(
self.sys_homeassistant.core.update(
@@ -183,12 +145,9 @@ class APIHomeAssistant(CoreSysAttributes):
)
@api_process
async def stop(self, request: web.Request) -> Awaitable[None]:
def stop(self, request: web.Request) -> Awaitable[None]:
"""Stop Home Assistant."""
body = await api_validate(SCHEMA_STOP, request)
await self._check_offline_migration(force=body[ATTR_FORCE])
return await asyncio.shield(self.sys_homeassistant.core.stop())
return asyncio.shield(self.sys_homeassistant.core.stop())
@api_process
def start(self, request: web.Request) -> Awaitable[None]:
@@ -196,24 +155,19 @@ class APIHomeAssistant(CoreSysAttributes):
return asyncio.shield(self.sys_homeassistant.core.start())
@api_process
async def restart(self, request: web.Request) -> None:
def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart Home Assistant."""
body = await api_validate(SCHEMA_RESTART, request)
await self._check_offline_migration(force=body[ATTR_FORCE])
await asyncio.shield(
self.sys_homeassistant.core.restart(safe_mode=body[ATTR_SAFE_MODE])
)
return asyncio.shield(self.sys_homeassistant.core.restart())
@api_process
async def rebuild(self, request: web.Request) -> None:
def rebuild(self, request: web.Request) -> Awaitable[None]:
"""Rebuild Home Assistant."""
body = await api_validate(SCHEMA_RESTART, request)
await self._check_offline_migration(force=body[ATTR_FORCE])
return asyncio.shield(self.sys_homeassistant.core.rebuild())
await asyncio.shield(
self.sys_homeassistant.core.rebuild(safe_mode=body[ATTR_SAFE_MODE])
)
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return Home Assistant Docker logs."""
return self.sys_homeassistant.core.logs()
@api_process
async def check(self, request: web.Request) -> None:

View File

@@ -1,13 +1,9 @@
"""Init file for Supervisor host RESTful API."""
import asyncio
from contextlib import suppress
import logging
from typing import Awaitable
from aiohttp import web
from aiohttp.hdrs import ACCEPT, RANGE
import voluptuous as vol
from voluptuous.error import CoerceInvalid
from ..const import (
ATTR_CHASSIS,
@@ -28,66 +24,29 @@ from ..const import (
ATTR_TIMEZONE,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIDBMigrationInProgress, APIError, HostLogError
from ..host.const import (
PARAM_BOOT_ID,
PARAM_FOLLOW,
PARAM_SYSLOG_IDENTIFIER,
LogFormat,
LogFormatter,
)
from ..utils.systemd_journal import journal_logs_reader
from .const import (
ATTR_AGENT_VERSION,
ATTR_APPARMOR_VERSION,
ATTR_BOOT_TIMESTAMP,
ATTR_BOOTS,
ATTR_BROADCAST_LLMNR,
ATTR_BROADCAST_MDNS,
ATTR_DT_SYNCHRONIZED,
ATTR_DT_UTC,
ATTR_FORCE,
ATTR_IDENTIFIERS,
ATTR_LLMNR_HOSTNAME,
ATTR_STARTUP_TIME,
ATTR_USE_NTP,
ATTR_VIRTUALIZATION,
CONTENT_TYPE_TEXT,
CONTENT_TYPE_X_LOG,
CONTENT_TYPE_BINARY,
)
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
IDENTIFIER = "identifier"
BOOTID = "bootid"
DEFAULT_LINES = 100
SERVICE = "service"
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): str})
# pylint: disable=no-value-for-parameter
SCHEMA_SHUTDOWN = vol.Schema(
{
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
}
)
# pylint: enable=no-value-for-parameter
class APIHost(CoreSysAttributes):
"""Handle RESTful API for host functions."""
async def _check_ha_offline_migration(self, force: bool) -> None:
"""Check if HA has an offline migration in progress and raise if not forced."""
if (
not force
and (state := await self.sys_homeassistant.api.get_api_state())
and state.offline_db_migration
):
raise APIDBMigrationInProgress(
"Home Assistant offline database migration in progress, please wait until complete before shutting down host"
)
@api_process
async def info(self, request):
"""Return host information."""
@@ -95,7 +54,6 @@ class APIHost(CoreSysAttributes):
ATTR_AGENT_VERSION: self.sys_dbus.agent.version,
ATTR_APPARMOR_VERSION: self.sys_host.apparmor.version,
ATTR_CHASSIS: self.sys_host.info.chassis,
ATTR_VIRTUALIZATION: self.sys_host.info.virtualization,
ATTR_CPE: self.sys_host.info.cpe,
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
ATTR_DISK_FREE: self.sys_host.info.free_space,
@@ -129,20 +87,14 @@ class APIHost(CoreSysAttributes):
)
@api_process
async def reboot(self, request):
def reboot(self, request):
"""Reboot host."""
body = await api_validate(SCHEMA_SHUTDOWN, request)
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
return await asyncio.shield(self.sys_host.control.reboot())
return asyncio.shield(self.sys_host.control.reboot())
@api_process
async def shutdown(self, request):
def shutdown(self, request):
"""Poweroff host."""
body = await api_validate(SCHEMA_SHUTDOWN, request)
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
return await asyncio.shield(self.sys_host.control.shutdown())
return asyncio.shield(self.sys_host.control.shutdown())
@api_process
def reload(self, request):
@@ -165,109 +117,30 @@ class APIHost(CoreSysAttributes):
return {ATTR_SERVICES: services}
@api_process
async def list_boots(self, _: web.Request):
"""Return a list of boot IDs."""
boot_ids = await self.sys_host.logs.get_boot_ids()
return {
ATTR_BOOTS: {
str(1 + i - len(boot_ids)): boot_id
for i, boot_id in enumerate(boot_ids)
}
}
def service_start(self, request):
"""Start a service."""
unit = request.match_info.get(SERVICE)
return asyncio.shield(self.sys_host.services.start(unit))
@api_process
async def list_identifiers(self, _: web.Request):
"""Return a list of syslog identifiers."""
return {ATTR_IDENTIFIERS: await self.sys_host.logs.get_identifiers()}
def service_stop(self, request):
"""Stop a service."""
unit = request.match_info.get(SERVICE)
return asyncio.shield(self.sys_host.services.stop(unit))
async def _get_boot_id(self, possible_offset: str) -> str:
"""Convert offset into boot ID if required."""
with suppress(CoerceInvalid):
offset = vol.Coerce(int)(possible_offset)
try:
return await self.sys_host.logs.get_boot_id(offset)
except (ValueError, HostLogError) as err:
raise APIError() from err
return possible_offset
@api_process
def service_reload(self, request):
"""Reload a service."""
unit = request.match_info.get(SERVICE)
return asyncio.shield(self.sys_host.services.reload(unit))
async def advanced_logs_handler(
self, request: web.Request, identifier: str | None = None, follow: bool = False
) -> web.StreamResponse:
"""Return systemd-journald logs."""
log_formatter = LogFormatter.PLAIN
params = {}
if identifier:
params[PARAM_SYSLOG_IDENTIFIER] = identifier
elif IDENTIFIER in request.match_info:
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER)
else:
params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers
# host logs should be always verbose, no matter what Accept header is used
log_formatter = LogFormatter.VERBOSE
@api_process
def service_restart(self, request):
"""Restart a service."""
unit = request.match_info.get(SERVICE)
return asyncio.shield(self.sys_host.services.restart(unit))
if BOOTID in request.match_info:
params[PARAM_BOOT_ID] = await self._get_boot_id(
request.match_info.get(BOOTID)
)
if follow:
params[PARAM_FOLLOW] = ""
if ACCEPT in request.headers and request.headers[ACCEPT] not in [
CONTENT_TYPE_TEXT,
CONTENT_TYPE_X_LOG,
"*/*",
]:
raise APIError(
"Invalid content type requested. Only text/plain and text/x-log "
"supported for now."
)
if "verbose" in request.query or request.headers[ACCEPT] == CONTENT_TYPE_X_LOG:
log_formatter = LogFormatter.VERBOSE
if "lines" in request.query:
lines = request.query.get("lines", DEFAULT_LINES)
try:
lines = int(lines)
except ValueError:
# If the user passed a non-integer value, just use the default instead of error.
lines = DEFAULT_LINES
finally:
# We can't use the entries= Range header syntax to refer to the last 1 line,
# and passing 1 to the calculation below would return the 1st line of the logs
# instead. Since this is really an edge case that doesn't matter much, we'll just
# return 2 lines at minimum.
lines = max(2, lines)
# entries=cursor[[:num_skip]:num_entries]
range_header = f"entries=:-{lines-1}:{lines}"
elif RANGE in request.headers:
range_header = request.headers.get(RANGE)
else:
range_header = f"entries=:-{DEFAULT_LINES-1}:{DEFAULT_LINES}"
async with self.sys_host.logs.journald_logs(
params=params, range_header=range_header, accept=LogFormat.JOURNAL
) as resp:
try:
response = web.StreamResponse()
response.content_type = CONTENT_TYPE_TEXT
headers_returned = False
async for cursor, line in journal_logs_reader(resp, log_formatter):
if not headers_returned:
if cursor:
response.headers["X-First-Cursor"] = cursor
await response.prepare(request)
headers_returned = True
await response.write(line.encode("utf-8") + b"\n")
except ConnectionResetError as ex:
raise APIError(
"Connection reset when trying to fetch data from systemd-journald."
) from ex
return response
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
async def advanced_logs(
self, request: web.Request, identifier: str | None = None, follow: bool = False
) -> web.StreamResponse:
"""Return systemd-journald logs. Wrapped as standard API handler."""
return await self.advanced_logs_handler(request, identifier, follow)
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return host kernel logs."""
return self.sys_host.info.get_dmesg()

View File

@@ -1,9 +1,8 @@
"""Supervisor Add-on ingress service."""
import asyncio
from ipaddress import ip_address
import logging
from typing import Any
from typing import Any, Union
import aiohttp
from aiohttp import ClientTimeout, hdrs, web
@@ -22,65 +21,20 @@ from ..const import (
ATTR_ICON,
ATTR_PANELS,
ATTR_SESSION,
ATTR_SESSION_DATA_USER_ID,
ATTR_TITLE,
HEADER_REMOTE_USER_DISPLAY_NAME,
HEADER_REMOTE_USER_ID,
HEADER_REMOTE_USER_NAME,
HEADER_TOKEN,
HEADER_TOKEN_OLD,
IngressSessionData,
IngressSessionDataUser,
)
from ..coresys import CoreSysAttributes
from ..exceptions import HomeAssistantAPIError
from .const import COOKIE_INGRESS
from .const import COOKIE_INGRESS, HEADER_TOKEN, HEADER_TOKEN_OLD
from .utils import api_process, api_validate, require_home_assistant
_LOGGER: logging.Logger = logging.getLogger(__name__)
VALIDATE_SESSION_DATA = vol.Schema({ATTR_SESSION: str})
"""Expected optional payload of create session request"""
SCHEMA_INGRESS_CREATE_SESSION_DATA = vol.Schema(
{
vol.Optional(ATTR_SESSION_DATA_USER_ID): str,
}
)
# from https://github.com/aio-libs/aiohttp/blob/8ae650bee4add9f131d49b96a0a150311ea58cd1/aiohttp/helpers.py#L1059C1-L1079C1
def must_be_empty_body(method: str, code: int) -> bool:
"""Check if a request must return an empty body."""
return (
status_code_must_be_empty_body(code)
or method_must_be_empty_body(method)
or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
)
def method_must_be_empty_body(method: str) -> bool:
"""Check if a method must return an empty body."""
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2
return method.upper() == hdrs.METH_HEAD
def status_code_must_be_empty_body(code: int) -> bool:
"""Check if a status code must return an empty body."""
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
return code in {204, 304} or 100 <= code < 200
class APIIngress(CoreSysAttributes):
"""Ingress view to handle add-on webui routing."""
_list_of_users: list[IngressSessionDataUser]
def __init__(self) -> None:
"""Initialize APIIngress."""
self._list_of_users = []
def _extract_addon(self, request: web.Request) -> Addon:
"""Return addon, throw an exception it it doesn't exist."""
token = request.match_info.get("token")
@@ -115,19 +69,7 @@ class APIIngress(CoreSysAttributes):
@require_home_assistant
async def create_session(self, request: web.Request) -> dict[str, Any]:
"""Create a new session."""
schema_ingress_config_session_data = await api_validate(
SCHEMA_INGRESS_CREATE_SESSION_DATA, request
)
data: IngressSessionData | None = None
if ATTR_SESSION_DATA_USER_ID in schema_ingress_config_session_data:
user = await self._find_user_by_id(
schema_ingress_config_session_data[ATTR_SESSION_DATA_USER_ID]
)
if user:
data = IngressSessionData(user)
session = self.sys_ingress.create_session(data)
session = self.sys_ingress.create_session()
return {ATTR_SESSION: session}
@api_process
@@ -141,9 +83,10 @@ class APIIngress(CoreSysAttributes):
_LOGGER.warning("No valid ingress session %s", data[ATTR_SESSION])
raise HTTPUnauthorized()
@require_home_assistant
async def handler(
self, request: web.Request
) -> web.Response | web.StreamResponse | web.WebSocketResponse:
) -> Union[web.Response, web.StreamResponse, web.WebSocketResponse]:
"""Route data to Supervisor ingress service."""
# Check Ingress Session
@@ -155,14 +98,13 @@ class APIIngress(CoreSysAttributes):
# Process requests
addon = self._extract_addon(request)
path = request.match_info.get("path")
session_data = self.sys_ingress.get_session_data(session)
try:
# Websocket
if _is_websocket(request):
return await self._handle_websocket(request, addon, path, session_data)
return await self._handle_websocket(request, addon, path)
# Request
return await self._handle_request(request, addon, path, session_data)
return await self._handle_request(request, addon, path)
except aiohttp.ClientError as err:
_LOGGER.error("Ingress error: %s", err)
@@ -170,11 +112,7 @@ class APIIngress(CoreSysAttributes):
raise HTTPBadGateway()
async def _handle_websocket(
self,
request: web.Request,
addon: Addon,
path: str,
session_data: IngressSessionData | None,
self, request: web.Request, addon: Addon, path: str
) -> web.WebSocketResponse:
"""Ingress route for websocket."""
if hdrs.SEC_WEBSOCKET_PROTOCOL in request.headers:
@@ -192,7 +130,7 @@ class APIIngress(CoreSysAttributes):
# Preparing
url = self._create_url(addon, path)
source_header = _init_header(request, addon, session_data)
source_header = _init_header(request, addon)
# Support GET query
if request.query_string:
@@ -209,8 +147,8 @@ class APIIngress(CoreSysAttributes):
# Proxy requests
await asyncio.wait(
[
self.sys_create_task(_websocket_forward(ws_server, ws_client)),
self.sys_create_task(_websocket_forward(ws_client, ws_server)),
_websocket_forward(ws_server, ws_client),
_websocket_forward(ws_client, ws_server),
],
return_when=asyncio.FIRST_COMPLETED,
)
@@ -218,15 +156,11 @@ class APIIngress(CoreSysAttributes):
return ws_server
async def _handle_request(
self,
request: web.Request,
addon: Addon,
path: str,
session_data: IngressSessionData | None,
) -> web.Response | web.StreamResponse:
self, request: web.Request, addon: Addon, path: str
) -> Union[web.Response, web.StreamResponse]:
"""Ingress route for request."""
url = self._create_url(addon, path)
source_header = _init_header(request, addon, session_data)
source_header = _init_header(request, addon)
# Passing the raw stream breaks requests for some webservers
# since we just need it for POST requests really, for all other methods
@@ -246,21 +180,12 @@ class APIIngress(CoreSysAttributes):
allow_redirects=False,
data=data,
timeout=ClientTimeout(total=None),
skip_auto_headers={hdrs.CONTENT_TYPE},
) as result:
headers = _response_header(result)
# Avoid parsing content_type in simple cases for better performance
if maybe_content_type := result.headers.get(hdrs.CONTENT_TYPE):
content_type = (maybe_content_type.partition(";"))[0].strip()
else:
content_type = result.content_type
# Simple request
if (
# empty body responses should not be streamed,
# otherwise aiohttp < 3.9.0 may generate
# an invalid "0\r\n\r\n" chunk instead of an empty response.
must_be_empty_body(request.method, result.status)
or hdrs.CONTENT_LENGTH in result.headers
hdrs.CONTENT_LENGTH in result.headers
and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000
):
# Return Response
@@ -268,13 +193,13 @@ class APIIngress(CoreSysAttributes):
return web.Response(
headers=headers,
status=result.status,
content_type=content_type,
content_type=result.content_type,
body=body,
)
# Stream response
response = web.StreamResponse(status=result.status, headers=headers)
response.content_type = content_type
response.content_type = result.content_type
try:
await response.prepare(request)
@@ -290,35 +215,13 @@ class APIIngress(CoreSysAttributes):
return response
async def _find_user_by_id(self, user_id: str) -> IngressSessionDataUser | None:
"""Find user object by the user's ID."""
try:
list_of_users = await self.sys_homeassistant.get_users()
except (HomeAssistantAPIError, TypeError) as err:
_LOGGER.error(
"%s error occurred while requesting list of users: %s", type(err), err
)
return None
if list_of_users is not None:
self._list_of_users = list_of_users
return next((user for user in self._list_of_users if user.id == user_id), None)
def _init_header(
request: web.Request, addon: Addon, session_data: IngressSessionData | None
) -> CIMultiDict | dict[str, str]:
request: web.Request, addon: str
) -> Union[CIMultiDict, dict[str, str]]:
"""Create initial header."""
headers = {}
if session_data is not None:
headers[HEADER_REMOTE_USER_ID] = session_data.user.id
if session_data.user.username is not None:
headers[HEADER_REMOTE_USER_NAME] = session_data.user.username
if session_data.user.display_name is not None:
headers[HEADER_REMOTE_USER_DISPLAY_NAME] = session_data.user.display_name
# filter flags
for name, value in request.headers.items():
if name in (
@@ -331,9 +234,6 @@ def _init_header(
hdrs.SEC_WEBSOCKET_KEY,
istr(HEADER_TOKEN),
istr(HEADER_TOKEN_OLD),
istr(HEADER_REMOTE_USER_ID),
istr(HEADER_REMOTE_USER_NAME),
istr(HEADER_REMOTE_USER_DISPLAY_NAME),
):
continue
headers[name] = value

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Jobs RESTful API."""
import logging
from typing import Any
@@ -7,10 +6,7 @@ from aiohttp import web
import voluptuous as vol
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..jobs import SupervisorJob
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
from .const import ATTR_JOBS
from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -23,47 +19,11 @@ SCHEMA_OPTIONS = vol.Schema(
class APIJobs(CoreSysAttributes):
"""Handle RESTful API for OS functions."""
def _list_jobs(self, start: SupervisorJob | None = None) -> list[dict[str, Any]]:
"""Return current job tree."""
jobs_by_parent: dict[str | None, list[SupervisorJob]] = {}
for job in self.sys_jobs.jobs:
if job.internal:
continue
if job.parent_id not in jobs_by_parent:
jobs_by_parent[job.parent_id] = [job]
else:
jobs_by_parent[job.parent_id].append(job)
job_list: list[dict[str, Any]] = []
queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = (
[(job_list, start)]
if start
else [(job_list, job) for job in jobs_by_parent.get(None, [])]
)
while queue:
(current_list, current_job) = queue.pop(0)
child_jobs: list[dict[str, Any]] = []
# We remove parent_id and instead use that info to represent jobs as a tree
job_dict = current_job.as_dict() | {"child_jobs": child_jobs}
job_dict.pop("parent_id")
current_list.append(job_dict)
if current_job.uuid in jobs_by_parent:
queue.extend(
[(child_jobs, job) for job in jobs_by_parent.get(current_job.uuid)]
)
return job_list
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
"""Return JobManager information."""
return {
ATTR_IGNORE_CONDITIONS: self.sys_jobs.ignore_conditions,
ATTR_JOBS: self._list_jobs(),
}
@api_process
@@ -82,19 +42,3 @@ class APIJobs(CoreSysAttributes):
async def reset(self, request: web.Request) -> None:
"""Reset options for JobManager."""
self.sys_jobs.reset_data()
@api_process
async def job_info(self, request: web.Request) -> dict[str, Any]:
"""Get details of a job by ID."""
job = self.sys_jobs.get_job(request.match_info.get("uuid"))
return self._list_jobs(job)[0]
@api_process
async def remove_job(self, request: web.Request) -> None:
"""Remove a completed job."""
job = self.sys_jobs.get_job(request.match_info.get("uuid"))
if not job.done:
raise APIError(f"Job {job.uuid} is not done!")
self.sys_jobs.remove_job(job)

View File

@@ -1,17 +1,10 @@
"""Handle security part of this API."""
import logging
import re
from typing import Final
from urllib.parse import unquote
from aiohttp.web import Request, RequestHandler, Response, middleware
from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized
from awesomeversion import AwesomeVersion
from aiohttp.web_exceptions import HTTPForbidden, HTTPUnauthorized
from supervisor.homeassistant.const import LANDINGPAGE
from ...addons.const import RE_SLUG
from ...const import (
REQUEST_FROM,
ROLE_ADMIN,
@@ -22,26 +15,14 @@ from ...const import (
CoreState,
)
from ...coresys import CoreSys, CoreSysAttributes
from ...utils import version_is_new_enough
from ..utils import api_return_error, excract_supervisor_token
_LOGGER: logging.Logger = logging.getLogger(__name__)
_CORE_VERSION: Final = AwesomeVersion("2023.3.4")
# fmt: off
_CORE_FRONTEND_PATHS: Final = (
r"|/app/.*\.(?:js|gz|json|map|woff2)"
r"|/(store/)?addons/" + RE_SLUG + r"/(logo|icon)"
)
CORE_FRONTEND: Final = re.compile(
r"^(?:" + _CORE_FRONTEND_PATHS + r")$"
)
# Block Anytime
BLACKLIST: Final = re.compile(
BLACKLIST = re.compile(
r"^(?:"
r"|/homeassistant/api/hassio/.*"
r"|/core/api/hassio/.*"
@@ -49,27 +30,25 @@ BLACKLIST: Final = re.compile(
)
# Free to call or have own security concepts
NO_SECURITY_CHECK: Final = re.compile(
NO_SECURITY_CHECK = re.compile(
r"^(?:"
r"|/homeassistant/api/.*"
r"|/homeassistant/websocket"
r"|/core/api/.*"
r"|/core/websocket"
r"|/supervisor/ping"
r"|/ingress/[-_A-Za-z0-9]+/.*"
+ _CORE_FRONTEND_PATHS
+ r")$"
r")$"
)
# Observer allow API calls
OBSERVER_CHECK: Final = re.compile(
OBSERVER_CHECK = re.compile(
r"^(?:"
r"|/.+/info"
r")$"
)
# Can called by every add-on
ADDONS_API_BYPASS: Final = re.compile(
ADDONS_API_BYPASS = re.compile(
r"^(?:"
r"|/addons/self/(?!security|update)[^/]+"
r"|/addons/self/options/config"
@@ -80,15 +59,8 @@ ADDONS_API_BYPASS: Final = re.compile(
r")$"
)
# Home Assistant only
CORE_ONLY_PATHS: Final = re.compile(
r"^(?:"
r"/addons/" + RE_SLUG + "/sys_options"
r")$"
)
# Policy role add-on API access
ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
ADDONS_ROLE_ACCESS = {
ROLE_DEFAULT: re.compile(
r"^(?:"
r"|/.+/info"
@@ -110,11 +82,9 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
ROLE_MANAGER: re.compile(
r"^(?:"
r"|/.+/info"
r"|/addons(?:/" + RE_SLUG + r"/(?!security).+|/reload)?"
r"|/addons(?:/[^/]+/(?!security).+|/reload)?"
r"|/audio/.+"
r"|/auth/cache"
r"|/available_updates"
r"|/backups.*"
r"|/cli/.+"
r"|/core/.+"
r"|/dns/.+"
@@ -124,17 +94,16 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
r"|/hassos/.+"
r"|/homeassistant/.+"
r"|/host/.+"
r"|/mounts.*"
r"|/multicast/.+"
r"|/network/.+"
r"|/observer/.+"
r"|/os/(?!datadisk/wipe).+"
r"|/refresh_updates"
r"|/os/.+"
r"|/resolution/.+"
r"|/security/.+"
r"|/backups.*"
r"|/snapshots.*"
r"|/store.*"
r"|/supervisor/.+"
r"|/security/.+"
r")$"
),
ROLE_ADMIN: re.compile(
@@ -142,26 +111,6 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
),
}
FILTERS: Final = re.compile(
r"(?:"
# Common exploits
r"proc/self/environ"
r"|(<|%3C).*script.*(>|%3E)"
# File Injections
r"|(\.\.//?)+" # ../../anywhere
r"|[a-zA-Z0-9_]=/([a-z0-9_.]//?)+" # .html?v=/.//test
# SQL Injections
r"|union.*select.*\("
r"|union.*all.*select.*"
r"|concat.*\("
r")",
flags=re.IGNORECASE,
)
# fmt: on
@@ -172,32 +121,6 @@ class SecurityMiddleware(CoreSysAttributes):
"""Initialize security middleware."""
self.coresys: CoreSys = coresys
def _recursive_unquote(self, value: str) -> str:
"""Handle values that are encoded multiple times."""
if (unquoted := unquote(value)) != value:
unquoted = self._recursive_unquote(unquoted)
return unquoted
@middleware
async def block_bad_requests(
self, request: Request, handler: RequestHandler
) -> Response:
"""Process request and tblock commonly known exploit attempts."""
if FILTERS.search(self._recursive_unquote(request.path)):
_LOGGER.warning(
"Filtered a potential harmful request to: %s", request.raw_path
)
raise HTTPBadRequest
if FILTERS.search(self._recursive_unquote(request.query_string)):
_LOGGER.warning(
"Filtered a request with a potential harmful query string: %s",
request.raw_path,
)
raise HTTPBadRequest
return await handler(request)
@middleware
async def system_validation(
self, request: Request, handler: RequestHandler
@@ -209,7 +132,7 @@ class SecurityMiddleware(CoreSysAttributes):
CoreState.FREEZE,
):
return api_return_error(
message=f"System is not ready with state: {self.sys_core.state}"
message=f"System is not ready with state: {self.sys_core.state.value}"
)
return await handler(request)
@@ -230,7 +153,6 @@ class SecurityMiddleware(CoreSysAttributes):
# Ignore security check
if NO_SECURITY_CHECK.match(request.path):
_LOGGER.debug("Passthrough %s", request.path)
request[REQUEST_FROM] = None
return await handler(request)
# Not token
@@ -242,9 +164,6 @@ class SecurityMiddleware(CoreSysAttributes):
if supervisor_token == self.sys_homeassistant.supervisor_token:
_LOGGER.debug("%s access from Home Assistant", request.path)
request_from = self.sys_homeassistant
elif CORE_ONLY_PATHS.match(request.path):
_LOGGER.warning("Attempted access to %s from client besides Home Assistant")
raise HTTPForbidden()
# Host
if supervisor_token == self.sys_plugins.cli.supervisor_token:
@@ -286,46 +205,3 @@ class SecurityMiddleware(CoreSysAttributes):
_LOGGER.error("Invalid token for access %s", request.path)
raise HTTPForbidden()
@middleware
async def core_proxy(self, request: Request, handler: RequestHandler) -> Response:
"""Validate user from Core API proxy."""
if (
request[REQUEST_FROM] != self.sys_homeassistant
or self.sys_homeassistant.version == LANDINGPAGE
or version_is_new_enough(self.sys_homeassistant.version, _CORE_VERSION)
):
return await handler(request)
authorization_index: int | None = None
content_type_index: int | None = None
user_request: bool = False
admin_request: bool = False
ingress_request: bool = False
for idx, (key, value) in enumerate(request.raw_headers):
if key in (b"Authorization", b"X-Hassio-Key"):
authorization_index = idx
elif key == b"Content-Type":
content_type_index = idx
elif key == b"X-Hass-User-ID":
user_request = True
elif key == b"X-Hass-Is-Admin":
admin_request = value == b"1"
elif key == b"X-Ingress-Path":
ingress_request = True
if (user_request or admin_request) and not ingress_request:
return await handler(request)
is_proxy_request = (
authorization_index is not None
and content_type_index is not None
and content_type_index - authorization_index == 1
)
if (
not CORE_FRONTEND.match(request.path) and is_proxy_request
) or ingress_request:
raise HTTPBadRequest()
return await handler(request)

View File

@@ -1,124 +0,0 @@
"""Inits file for supervisor mounts REST API."""
from typing import Any
from aiohttp import web
import voluptuous as vol
from ..const import ATTR_NAME, ATTR_STATE
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..mounts.const import ATTR_DEFAULT_BACKUP_MOUNT, MountUsage
from ..mounts.mount import Mount
from ..mounts.validate import SCHEMA_MOUNT_CONFIG
from .const import ATTR_MOUNTS
from .utils import api_process, api_validate
SCHEMA_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_DEFAULT_BACKUP_MOUNT): vol.Maybe(str),
}
)
class APIMounts(CoreSysAttributes):
"""Handle REST API for mounting options."""
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
"""Return MountManager info."""
return {
ATTR_DEFAULT_BACKUP_MOUNT: self.sys_mounts.default_backup_mount.name
if self.sys_mounts.default_backup_mount
else None,
ATTR_MOUNTS: [
mount.to_dict() | {ATTR_STATE: mount.state}
for mount in self.sys_mounts.mounts
],
}
@api_process
async def options(self, request: web.Request) -> None:
"""Set Mount Manager options."""
body = await api_validate(SCHEMA_OPTIONS, request)
if ATTR_DEFAULT_BACKUP_MOUNT in body:
name: str | None = body[ATTR_DEFAULT_BACKUP_MOUNT]
if name is None:
self.sys_mounts.default_backup_mount = None
elif (mount := self.sys_mounts.get(name)).usage != MountUsage.BACKUP:
raise APIError(
f"Mount {name} is not used for backups, cannot use it as default backup mount"
)
else:
self.sys_mounts.default_backup_mount = mount
self.sys_mounts.save_data()
@api_process
async def create_mount(self, request: web.Request) -> None:
"""Create a new mount in supervisor."""
body = await api_validate(SCHEMA_MOUNT_CONFIG, request)
if body[ATTR_NAME] in self.sys_mounts:
raise APIError(f"A mount already exists with name {body[ATTR_NAME]}")
mount = Mount.from_dict(self.coresys, body)
await self.sys_mounts.create_mount(mount)
# If it's a backup mount, reload backups
if mount.usage == MountUsage.BACKUP:
self.sys_create_task(self.sys_backups.reload())
# If there's no default backup mount, set it to the new mount
if not self.sys_mounts.default_backup_mount:
self.sys_mounts.default_backup_mount = mount
self.sys_mounts.save_data()
@api_process
async def update_mount(self, request: web.Request) -> None:
"""Update an existing mount in supervisor."""
name = request.match_info.get("mount")
name_schema = vol.Schema(
{vol.Optional(ATTR_NAME, default=name): name}, extra=vol.ALLOW_EXTRA
)
body = await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request)
if name not in self.sys_mounts:
raise APIError(f"No mount exists with name {name}")
mount = Mount.from_dict(self.coresys, body)
await self.sys_mounts.create_mount(mount)
# If it's a backup mount, reload backups
if mount.usage == MountUsage.BACKUP:
self.sys_create_task(self.sys_backups.reload())
# If this mount was the default backup mount and isn't for backups any more, remove it
elif self.sys_mounts.default_backup_mount == mount:
self.sys_mounts.default_backup_mount = None
self.sys_mounts.save_data()
@api_process
async def delete_mount(self, request: web.Request) -> None:
"""Delete an existing mount in supervisor."""
name = request.match_info.get("mount")
mount = await self.sys_mounts.remove_mount(name)
# If it was a backup mount, reload backups
if mount.usage == MountUsage.BACKUP:
self.sys_create_task(self.sys_backups.reload())
self.sys_mounts.save_data()
@api_process
async def reload_mount(self, request: web.Request) -> None:
"""Reload an existing mount in supervisor."""
name = request.match_info.get("mount")
await self.sys_mounts.reload_mount(name)
# If it's a backup mount, reload backups
if self.sys_mounts.get(name).usage == MountUsage.BACKUP:
self.sys_create_task(self.sys_backups.reload())

View File

@@ -1,9 +1,7 @@
"""Init file for Supervisor Multicast RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging
from typing import Any
from typing import Any, Awaitable
from aiohttp import web
import voluptuous as vol
@@ -24,7 +22,8 @@ from ..const import (
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..validate import version_tag
from .utils import api_process, api_validate
from .const import CONTENT_TYPE_BINARY
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -69,6 +68,11 @@ class APIMulticast(CoreSysAttributes):
raise APIError(f"Version {version} is already in use")
await asyncio.shield(self.sys_plugins.multicast.update(version))
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return Multicast Docker logs."""
return self.sys_plugins.multicast.logs()
@api_process
def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart Multicast plugin."""

View File

@@ -1,11 +1,10 @@
"""REST API for network."""
import asyncio
from collections.abc import Awaitable
from ipaddress import IPv4Address, IPv4Interface, IPv6Address, IPv6Interface
from typing import Any
from ipaddress import ip_address, ip_interface
from typing import Any, Awaitable
from aiohttp import web
import attr
import voluptuous as vol
from ..const import (
@@ -31,7 +30,6 @@ from ..const import (
ATTR_PARENT,
ATTR_PRIMARY,
ATTR_PSK,
ATTR_READY,
ATTR_SIGNAL,
ATTR_SSID,
ATTR_SUPERVISOR_INTERNET,
@@ -43,33 +41,23 @@ from ..const import (
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError, HostNetworkNotFound
from ..host.configuration import (
from ..host.const import AuthMethod, InterfaceType, WifiMode
from ..host.network import (
AccessPoint,
Interface,
InterfaceMethod,
IpConfig,
IpSetting,
VlanConfig,
WifiConfig,
)
from ..host.const import AuthMethod, InterfaceType, WifiMode
from .utils import api_process, api_validate
_SCHEMA_IPV4_CONFIG = vol.Schema(
_SCHEMA_IP_CONFIG = vol.Schema(
{
vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv4Interface)],
vol.Optional(ATTR_ADDRESS): [vol.Coerce(ip_interface)],
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv4Address),
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv4Address)],
}
)
_SCHEMA_IPV6_CONFIG = vol.Schema(
{
vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv6Interface)],
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv6Address),
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv6Address)],
vol.Optional(ATTR_GATEWAY): vol.Coerce(ip_address),
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(ip_address)],
}
)
@@ -86,22 +74,21 @@ _SCHEMA_WIFI_CONFIG = vol.Schema(
# pylint: disable=no-value-for-parameter
SCHEMA_UPDATE = vol.Schema(
{
vol.Optional(ATTR_IPV4): _SCHEMA_IPV4_CONFIG,
vol.Optional(ATTR_IPV6): _SCHEMA_IPV6_CONFIG,
vol.Optional(ATTR_IPV4): _SCHEMA_IP_CONFIG,
vol.Optional(ATTR_IPV6): _SCHEMA_IP_CONFIG,
vol.Optional(ATTR_WIFI): _SCHEMA_WIFI_CONFIG,
vol.Optional(ATTR_ENABLED): vol.Boolean(),
}
)
def ipconfig_struct(config: IpConfig, setting: IpSetting) -> dict[str, Any]:
def ipconfig_struct(config: IpConfig) -> dict[str, Any]:
"""Return a dict with information about ip configuration."""
return {
ATTR_METHOD: setting.method,
ATTR_METHOD: config.method,
ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
ATTR_NAMESERVERS: [str(address) for address in config.nameservers],
ATTR_GATEWAY: str(config.gateway) if config.gateway else None,
ATTR_READY: config.ready,
}
@@ -131,9 +118,8 @@ def interface_struct(interface: Interface) -> dict[str, Any]:
ATTR_ENABLED: interface.enabled,
ATTR_CONNECTED: interface.connected,
ATTR_PRIMARY: interface.primary,
ATTR_MAC: interface.mac,
ATTR_IPV4: ipconfig_struct(interface.ipv4, interface.ipv4setting),
ATTR_IPV6: ipconfig_struct(interface.ipv6, interface.ipv6setting),
ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None,
ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None,
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None,
}
@@ -207,26 +193,22 @@ class APINetwork(CoreSysAttributes):
# Apply config
for key, config in body.items():
if key == ATTR_IPV4:
interface.ipv4setting = IpSetting(
config.get(ATTR_METHOD, InterfaceMethod.STATIC),
config.get(ATTR_ADDRESS, []),
config.get(ATTR_GATEWAY),
config.get(ATTR_NAMESERVERS, []),
interface.ipv4 = attr.evolve(
interface.ipv4 or IpConfig(InterfaceMethod.STATIC, [], None, []),
**config,
)
elif key == ATTR_IPV6:
interface.ipv6setting = IpSetting(
config.get(ATTR_METHOD, InterfaceMethod.STATIC),
config.get(ATTR_ADDRESS, []),
config.get(ATTR_GATEWAY),
config.get(ATTR_NAMESERVERS, []),
interface.ipv6 = attr.evolve(
interface.ipv6 or IpConfig(InterfaceMethod.STATIC, [], None, []),
**config,
)
elif key == ATTR_WIFI:
interface.wifi = WifiConfig(
config.get(ATTR_MODE, WifiMode.INFRASTRUCTURE),
config.get(ATTR_SSID, ""),
config.get(ATTR_AUTH, AuthMethod.OPEN),
config.get(ATTR_PSK, None),
None,
interface.wifi = attr.evolve(
interface.wifi
or WifiConfig(
WifiMode.INFRASTRUCTURE, "", AuthMethod.OPEN, None, None
),
**config,
)
elif key == ATTR_ENABLED:
interface.enabled = config
@@ -236,9 +218,7 @@ class APINetwork(CoreSysAttributes):
@api_process
def reload(self, request: web.Request) -> Awaitable[None]:
"""Reload network data."""
return asyncio.shield(
self.sys_host.network.update(force_connectivity_check=True)
)
return asyncio.shield(self.sys_host.network.update())
@api_process
async def scan_accesspoints(self, request: web.Request) -> dict[str, Any]:
@@ -268,18 +248,18 @@ class APINetwork(CoreSysAttributes):
vlan_config = VlanConfig(vlan, interface.name)
ipv4_setting = None
ipv4_config = None
if ATTR_IPV4 in body:
ipv4_setting = IpSetting(
ipv4_config = IpConfig(
body[ATTR_IPV4].get(ATTR_METHOD, InterfaceMethod.AUTO),
body[ATTR_IPV4].get(ATTR_ADDRESS, []),
body[ATTR_IPV4].get(ATTR_GATEWAY, None),
body[ATTR_IPV4].get(ATTR_NAMESERVERS, []),
)
ipv6_setting = None
ipv6_config = None
if ATTR_IPV6 in body:
ipv6_setting = IpSetting(
ipv6_config = IpConfig(
body[ATTR_IPV6].get(ATTR_METHOD, InterfaceMethod.AUTO),
body[ATTR_IPV6].get(ATTR_ADDRESS, []),
body[ATTR_IPV6].get(ATTR_GATEWAY, None),
@@ -287,17 +267,13 @@ class APINetwork(CoreSysAttributes):
)
vlan_interface = Interface(
"",
"",
"",
True,
True,
False,
InterfaceType.VLAN,
None,
ipv4_setting,
None,
ipv6_setting,
ipv4_config,
ipv6_config,
None,
vlan_config,
)

View File

@@ -1,5 +1,4 @@
"""Init file for Supervisor Observer RESTful API."""
import asyncio
import logging
from typing import Any

View File

@@ -1,71 +1,29 @@
"""Init file for Supervisor HassOS RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging
from typing import Any
from pathlib import Path
from typing import Any, Awaitable
from aiohttp import web
import voluptuous as vol
from ..const import (
ATTR_ACTIVITY_LED,
ATTR_BOARD,
ATTR_BOOT,
ATTR_DEVICES,
ATTR_DISK_LED,
ATTR_HEARTBEAT_LED,
ATTR_ID,
ATTR_NAME,
ATTR_POWER_LED,
ATTR_SERIAL,
ATTR_SIZE,
ATTR_STATE,
ATTR_UPDATE_AVAILABLE,
ATTR_VERSION,
ATTR_VERSION_LATEST,
)
from ..coresys import CoreSysAttributes
from ..exceptions import BoardInvalidError
from ..resolution.const import ContextType, IssueType, SuggestionType
from ..validate import version_tag
from .const import (
ATTR_BOOT_SLOT,
ATTR_BOOT_SLOTS,
ATTR_DATA_DISK,
ATTR_DEV_PATH,
ATTR_DEVICE,
ATTR_DISKS,
ATTR_MODEL,
ATTR_STATUS,
ATTR_SYSTEM_HEALTH_LED,
ATTR_VENDOR,
BootSlot,
)
from .const import ATTR_DATA_DISK, ATTR_DEVICE
from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
SCHEMA_SET_BOOT_SLOT = vol.Schema({vol.Required(ATTR_BOOT_SLOT): vol.Coerce(BootSlot)})
SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str})
SCHEMA_YELLOW_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_DISK_LED): vol.Boolean(),
vol.Optional(ATTR_HEARTBEAT_LED): vol.Boolean(),
vol.Optional(ATTR_POWER_LED): vol.Boolean(),
}
)
SCHEMA_GREEN_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_ACTIVITY_LED): vol.Boolean(),
vol.Optional(ATTR_POWER_LED): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_HEALTH_LED): vol.Boolean(),
}
)
# pylint: enable=no-value-for-parameter
SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): vol.All(str, vol.Coerce(Path))})
class APIOS(CoreSysAttributes):
@@ -80,16 +38,7 @@ class APIOS(CoreSysAttributes):
ATTR_UPDATE_AVAILABLE: self.sys_os.need_update,
ATTR_BOARD: self.sys_os.board,
ATTR_BOOT: self.sys_dbus.rauc.boot_slot,
ATTR_DATA_DISK: self.sys_os.datadisk.disk_used_id,
ATTR_BOOT_SLOTS: {
slot.bootname: {
ATTR_STATE: slot.state,
ATTR_STATUS: slot.boot_status,
ATTR_VERSION: slot.bundle_version,
}
for slot in self.sys_os.slots
if slot.bootname
},
ATTR_DATA_DISK: self.sys_os.datadisk.disk_used,
}
@api_process
@@ -112,103 +61,9 @@ class APIOS(CoreSysAttributes):
await asyncio.shield(self.sys_os.datadisk.migrate_disk(body[ATTR_DEVICE]))
@api_process
def wipe_data(self, request: web.Request) -> Awaitable[None]:
"""Trigger data disk wipe on Host."""
return asyncio.shield(self.sys_os.datadisk.wipe_disk())
@api_process
async def set_boot_slot(self, request: web.Request) -> None:
"""Change the active boot slot and reboot into it."""
body = await api_validate(SCHEMA_SET_BOOT_SLOT, request)
await asyncio.shield(self.sys_os.set_boot_slot(body[ATTR_BOOT_SLOT]))
@api_process
async def list_data(self, request: web.Request) -> dict[str, Any]:
"""Return possible data targets."""
return {
ATTR_DEVICES: [disk.id for disk in self.sys_os.datadisk.available_disks],
ATTR_DISKS: [
{
ATTR_NAME: disk.name,
ATTR_VENDOR: disk.vendor,
ATTR_MODEL: disk.model,
ATTR_SERIAL: disk.serial,
ATTR_SIZE: disk.size,
ATTR_ID: disk.id,
ATTR_DEV_PATH: disk.device_path.as_posix(),
}
for disk in self.sys_os.datadisk.available_disks
],
ATTR_DEVICES: self.sys_os.datadisk.available_disks,
}
@api_process
async def boards_green_info(self, request: web.Request) -> dict[str, Any]:
"""Get green board settings."""
return {
ATTR_ACTIVITY_LED: self.sys_dbus.agent.board.green.activity_led,
ATTR_POWER_LED: self.sys_dbus.agent.board.green.power_led,
ATTR_SYSTEM_HEALTH_LED: self.sys_dbus.agent.board.green.user_led,
}
@api_process
async def boards_green_options(self, request: web.Request) -> None:
"""Update green board settings."""
body = await api_validate(SCHEMA_GREEN_OPTIONS, request)
if ATTR_ACTIVITY_LED in body:
await self.sys_dbus.agent.board.green.set_activity_led(
body[ATTR_ACTIVITY_LED]
)
if ATTR_POWER_LED in body:
await self.sys_dbus.agent.board.green.set_power_led(body[ATTR_POWER_LED])
if ATTR_SYSTEM_HEALTH_LED in body:
await self.sys_dbus.agent.board.green.set_user_led(
body[ATTR_SYSTEM_HEALTH_LED]
)
self.sys_dbus.agent.board.green.save_data()
@api_process
async def boards_yellow_info(self, request: web.Request) -> dict[str, Any]:
"""Get yellow board settings."""
return {
ATTR_DISK_LED: self.sys_dbus.agent.board.yellow.disk_led,
ATTR_HEARTBEAT_LED: self.sys_dbus.agent.board.yellow.heartbeat_led,
ATTR_POWER_LED: self.sys_dbus.agent.board.yellow.power_led,
}
@api_process
async def boards_yellow_options(self, request: web.Request) -> None:
"""Update yellow board settings."""
body = await api_validate(SCHEMA_YELLOW_OPTIONS, request)
if ATTR_DISK_LED in body:
await self.sys_dbus.agent.board.yellow.set_disk_led(body[ATTR_DISK_LED])
if ATTR_HEARTBEAT_LED in body:
await self.sys_dbus.agent.board.yellow.set_heartbeat_led(
body[ATTR_HEARTBEAT_LED]
)
if ATTR_POWER_LED in body:
await self.sys_dbus.agent.board.yellow.set_power_led(body[ATTR_POWER_LED])
self.sys_dbus.agent.board.yellow.save_data()
self.sys_resolution.create_issue(
IssueType.REBOOT_REQUIRED,
ContextType.SYSTEM,
suggestions=[SuggestionType.EXECUTE_REBOOT],
)
@api_process
async def boards_other_info(self, request: web.Request) -> dict[str, Any]:
"""Empty success return if board is in use, error otherwise."""
if request.match_info["board"] != self.sys_os.board:
raise BoardInvalidError(
f"{request.match_info['board']} board is not in use", _LOGGER.error
)
return {}

View File

@@ -1 +1,16 @@
!function(){function n(n){var t=document.createElement("script");t.src=n,document.body.appendChild(t)}if(/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent))n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js");else try{new Function("import('/api/hassio/app/frontend_latest/entrypoint-qzB1D0O4L9U.js')")()}catch(t){n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js")}}()
function loadES5() {
var el = document.createElement('script');
el.src = '/api/hassio/app/frontend_es5/entrypoint.75b60951.js';
document.body.appendChild(el);
}
if (/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent)) {
loadES5();
} else {
try {
new Function("import('/api/hassio/app/frontend_latest/entrypoint.f358ba39.js')")();
} catch (err) {
loadES5();
}
}

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@@ -0,0 +1 @@
!function(){"use strict";var t,n,e={14971:function(t,n,e){var r,i,o=e(93217),u=e(69330),a=(e(58556),e(62173)),c=function(t,n,e){if("input"===t){if("type"===n&&"checkbox"===e||"checked"===n||"disabled"===n)return;return""}},f={renderMarkdown:function(t,n){var e,o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};return r||(r=Object.assign({},(0,a.getDefaultWhiteList)(),{input:["type","disabled","checked"],"ha-icon":["icon"],"ha-svg-icon":["path"],"ha-alert":["alert-type","title"]})),o.allowSvg?(i||(i=Object.assign({},r,{svg:["xmlns","height","width"],path:["transform","stroke","d"],img:["src"]})),e=i):e=r,(0,a.filterXSS)((0,u.TU)(t,n),{whiteList:e,onTagAttr:c})}};(0,o.Jj)(f)}},r={};function i(t){var n=r[t];if(void 0!==n)return n.exports;var o=r[t]={exports:{}};return e[t](o,o.exports,i),o.exports}i.m=e,i.x=function(){var t=i.O(void 0,[191,752],(function(){return i(14971)}));return t=i.O(t)},t=[],i.O=function(n,e,r,o){if(!e){var u=1/0;for(s=0;s<t.length;s++){e=t[s][0],r=t[s][1],o=t[s][2];for(var a=!0,c=0;c<e.length;c++)(!1&o||u>=o)&&Object.keys(i.O).every((function(t){return i.O[t](e[c])}))?e.splice(c--,1):(a=!1,o<u&&(u=o));if(a){t.splice(s--,1);var f=r();void 0!==f&&(n=f)}}return n}o=o||0;for(var s=t.length;s>0&&t[s-1][2]>o;s--)t[s]=t[s-1];t[s]=[e,r,o]},i.n=function(t){var n=t&&t.__esModule?function(){return t.default}:function(){return t};return i.d(n,{a:n}),n},i.d=function(t,n){for(var e in n)i.o(n,e)&&!i.o(t,e)&&Object.defineProperty(t,e,{enumerable:!0,get:n[e]})},i.f={},i.e=function(t){return Promise.all(Object.keys(i.f).reduce((function(n,e){return i.f[e](t,n),n}),[]))},i.u=function(t){return{191:"2dbdaab4",752:"829db8ac"}[t]+".js"},i.o=function(t,n){return Object.prototype.hasOwnProperty.call(t,n)},i.p="/api/hassio/app/frontend_es5/",function(){var t={971:1};i.f.i=function(n,e){t[n]||importScripts(i.p+i.u(n))};var n=self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[],e=n.push.bind(n);n.push=function(n){var r=n[0],o=n[1],u=n[2];for(var a in o)i.o(o,a)&&(i.m[a]=o[a]);for(u&&u(i);r.length;)t[r.pop()]=1;e(n)}}(),n=i.x,i.x=function(){return Promise.all([i.e(191),i.e(752)]).then(n)};i.x()}();

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,14 @@
/*! *****************************************************************************
Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
this file except in compliance with the License. You may obtain a copy of the
License at http://www.apache.org/licenses/LICENSE-2.0
THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
MERCHANTABLITY OR NON-INFRINGEMENT.
See the Apache Version 2.0 License for specific language governing permissions
and limitations under the License.
***************************************************************************** */

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,2 +0,0 @@
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([[1047],{32594:function(e,t,r){r.d(t,{U:function(){return n}});var n=function(e){return e.stopPropagation()}},75054:function(e,t,r){r.r(t),r.d(t,{HaTimeDuration:function(){return f}});var n,a=r(88962),i=r(33368),o=r(71650),d=r(82390),u=r(69205),l=r(70906),s=r(91808),c=r(68144),v=r(79932),f=(r(47289),(0,s.Z)([(0,v.Mo)("ha-selector-duration")],(function(e,t){var r=function(t){(0,u.Z)(n,t);var r=(0,l.Z)(n);function n(){var t;(0,o.Z)(this,n);for(var a=arguments.length,i=new Array(a),u=0;u<a;u++)i[u]=arguments[u];return t=r.call.apply(r,[this].concat(i)),e((0,d.Z)(t)),t}return(0,i.Z)(n)}(t);return{F:r,d:[{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"value",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"disabled",value:function(){return!1}},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"required",value:function(){return!0}},{kind:"method",key:"render",value:function(){var e;return(0,c.dy)(n||(n=(0,a.Z)([' <ha-duration-input .label="','" .helper="','" .data="','" .disabled="','" .required="','" ?enableDay="','"></ha-duration-input> '])),this.label,this.helper,this.value,this.disabled,this.required,null===(e=this.selector.duration)||void 0===e?void 0:e.enable_day)}}]}}),c.oi))}}]);
//# sourceMappingURL=1047-g7fFLS9eP4I.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"1047-g7fFLS9eP4I.js","mappings":"yKAAO,IAAMA,EAAkB,SAACC,GAAE,OAAKA,EAAGD,iBAAiB,C,qLCQ9CE,G,UAAcC,EAAAA,EAAAA,GAAA,EAD1BC,EAAAA,EAAAA,IAAc,0BAAuB,SAAAC,EAAAC,GAAA,IACzBJ,EAAc,SAAAK,IAAAC,EAAAA,EAAAA,GAAAN,EAAAK,GAAA,IAAAE,GAAAC,EAAAA,EAAAA,GAAAR,GAAA,SAAAA,IAAA,IAAAS,GAAAC,EAAAA,EAAAA,GAAA,KAAAV,GAAA,QAAAW,EAAAC,UAAAC,OAAAC,EAAA,IAAAC,MAAAJ,GAAAK,EAAA,EAAAA,EAAAL,EAAAK,IAAAF,EAAAE,GAAAJ,UAAAI,GAAA,OAAAP,EAAAF,EAAAU,KAAAC,MAAAX,EAAA,OAAAY,OAAAL,IAAAX,GAAAiB,EAAAA,EAAAA,GAAAX,IAAAA,CAAA,QAAAY,EAAAA,EAAAA,GAAArB,EAAA,EAAAI,GAAA,OAAAkB,EAAdtB,EAAcuB,EAAA,EAAAC,KAAA,QAAAC,WAAA,EACxBC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAG,EACjB,OAAOC,EAAAA,EAAAA,IAAIC,IAAAA,GAAAC,EAAAA,EAAAA,GAAA,wIAEEC,KAAKC,MACJD,KAAKE,OACPF,KAAKP,MACDO,KAAKG,SACLH,KAAKI,SACkB,QADVR,EACZI,KAAKK,SAASC,gBAAQ,IAAAV,OAAA,EAAtBA,EAAwBW,WAG3C,IAAC,GA1BiCC,EAAAA,I","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/common/dom/stop_propagation.ts","https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/components/ha-selector/ha-selector-duration.ts"],"names":["stopPropagation","ev","HaTimeDuration","_decorate","customElement","_initialize","_LitElement","_LitElement2","_inherits","_super","_createSuper","_this","_classCallCheck","_len","arguments","length","args","Array","_key","call","apply","concat","_assertThisInitialized","_createClass","F","d","kind","decorators","property","attribute","key","value","type","Boolean","_this$selector$durati","html","_templateObject","_taggedTemplateLiteral","this","label","helper","disabled","required","selector","duration","enable_day","LitElement"],"sourceRoot":""}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,2 +0,0 @@
!function(){"use strict";var n,t,e={14595:function(n,t,e){e(58556);var r,i,o=e(93217),u=e(422),a=e(62173),s=function(n,t,e){if("input"===n){if("type"===t&&"checkbox"===e||"checked"===t||"disabled"===t)return;return""}},c={renderMarkdown:function(n,t){var e,o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};return r||(r=Object.assign(Object.assign({},(0,a.getDefaultWhiteList)()),{},{input:["type","disabled","checked"],"ha-icon":["icon"],"ha-svg-icon":["path"],"ha-alert":["alert-type","title"]})),o.allowSvg?(i||(i=Object.assign(Object.assign({},r),{},{svg:["xmlns","height","width"],path:["transform","stroke","d"],img:["src"]})),e=i):e=r,(0,a.filterXSS)((0,u.TU)(n,t),{whiteList:e,onTagAttr:s})}};(0,o.Jj)(c)}},r={};function i(n){var t=r[n];if(void 0!==t)return t.exports;var o=r[n]={exports:{}};return e[n](o,o.exports,i),o.exports}i.m=e,i.x=function(){var n=i.O(void 0,[9191,215],(function(){return i(14595)}));return n=i.O(n)},n=[],i.O=function(t,e,r,o){if(!e){var u=1/0;for(f=0;f<n.length;f++){e=n[f][0],r=n[f][1],o=n[f][2];for(var a=!0,s=0;s<e.length;s++)(!1&o||u>=o)&&Object.keys(i.O).every((function(n){return i.O[n](e[s])}))?e.splice(s--,1):(a=!1,o<u&&(u=o));if(a){n.splice(f--,1);var c=r();void 0!==c&&(t=c)}}return t}o=o||0;for(var f=n.length;f>0&&n[f-1][2]>o;f--)n[f]=n[f-1];n[f]=[e,r,o]},i.n=function(n){var t=n&&n.__esModule?function(){return n.default}:function(){return n};return i.d(t,{a:t}),t},i.d=function(n,t){for(var e in t)i.o(t,e)&&!i.o(n,e)&&Object.defineProperty(n,e,{enumerable:!0,get:t[e]})},i.f={},i.e=function(n){return Promise.all(Object.keys(i.f).reduce((function(t,e){return i.f[e](n,t),t}),[]))},i.u=function(n){return n+"-"+{215:"FPZmDYZTPdk",9191:"37260H-osZ4"}[n]+".js"},i.o=function(n,t){return Object.prototype.hasOwnProperty.call(n,t)},i.p="/api/hassio/app/frontend_es5/",function(){var n={1402:1};i.f.i=function(t,e){n[t]||importScripts(i.p+i.u(t))};var t=self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[],e=t.push.bind(t);t.push=function(t){var r=t[0],o=t[1],u=t[2];for(var a in o)i.o(o,a)&&(i.m[a]=o[a]);for(u&&u(i);r.length;)n[r.pop()]=1;e(t)}}(),t=i.x,i.x=function(){return Promise.all([i.e(9191),i.e(215)]).then(t)};i.x()}();
//# sourceMappingURL=1402-6WKUruvoXtM.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"1402-6WKUruvoXtM.js","mappings":"6BAAIA,ECAAC,E,sCCMAC,EACAC,E,+BAMEC,EAAY,SAChBC,EACAC,EACAC,GAEA,GAAY,UAARF,EAAiB,CACnB,GACY,SAATC,GAA6B,aAAVC,GACX,YAATD,GACS,aAATA,EAEA,OAEF,MAAO,EACT,CAEF,EA0CME,EAAM,CACVC,eAzCqB,SACrBC,EACAC,GAKW,IAWPC,EAfJC,EAGCC,UAAAC,OAAA,QAAAC,IAAAF,UAAA,GAAAA,UAAA,GAAG,CAAC,EA4BL,OA1BKZ,IACHA,EAAee,OAAAC,OAAAD,OAAAC,OAAA,IACVC,EAAAA,EAAAA,wBAAqB,IACxBC,MAAO,CAAC,OAAQ,WAAY,WAC5B,UAAW,CAAC,QACZ,cAAe,CAAC,QAChB,WAAY,CAAC,aAAc,YAM3BP,EAAYQ,UACTlB,IACHA,EAAYc,OAAAC,OAAAD,OAAAC,OAAA,GACPhB,GAAe,IAClBoB,IAAK,CAAC,QAAS,SAAU,SACzBC,KAAM,CAAC,YAAa,SAAU,KAC9BC,IAAK,CAAC,UAGVZ,EAAYT,GAEZS,EAAYV,GAGPuB,EAAAA,EAAAA,YAAUC,EAAAA,EAAAA,IAAOhB,EAASC,GAAgB,CAC/CC,UAAAA,EACAR,UAAAA,GAEJ,IAQAuB,EAAAA,EAAAA,IAAOnB,E,GC5EHoB,EAA2B,CAAC,EAGhC,SAASC,EAAoBC,GAE5B,IAAIC,EAAeH,EAAyBE,GAC5C,QAAqBd,IAAjBe,EACH,OAAOA,EAAaC,QAGrB,IAAIC,EAASL,EAAyBE,GAAY,CAGjDE,QAAS,CAAC,GAOX,OAHAE,EAAoBJ,GAAUG,EAAQA,EAAOD,QAASH,GAG/CI,EAAOD,OACf,CAGAH,EAAoBM,EAAID,EAGxBL,EAAoBO,EAAI,WAGvB,IAAIC,EAAsBR,EAAoBS,OAAEtB,EAAW,CAAC,KAAK,MAAM,WAAa,OAAOa,EAAoB,MAAQ,IAEvH,OADAQ,EAAsBR,EAAoBS,EAAED,EAE7C,EHlCIrC,EAAW,GACf6B,EAAoBS,EAAI,SAASC,EAAQC,EAAUC,EAAIC,GACtD,IAAGF,EAAH,CAMA,IAAIG,EAAeC,IACnB,IAASC,EAAI,EAAGA,EAAI7C,EAASe,OAAQ8B,IAAK,CACrCL,EAAWxC,EAAS6C,GAAG,GACvBJ,EAAKzC,EAAS6C,GAAG,GACjBH,EAAW1C,EAAS6C,GAAG,GAE3B,IAJA,IAGIC,GAAY,EACPC,EAAI,EAAGA,EAAIP,EAASzB,OAAQgC,MACpB,EAAXL,GAAsBC,GAAgBD,IAAazB,OAAO+B,KAAKnB,EAAoBS,GAAGW,OAAM,SAASC,GAAO,OAAOrB,EAAoBS,EAAEY,GAAKV,EAASO,GAAK,IAChKP,EAASW,OAAOJ,IAAK,IAErBD,GAAY,EACTJ,EAAWC,IAAcA,EAAeD,IAG7C,GAAGI,EAAW,CACb9C,EAASmD,OAAON,IAAK,GACrB,IAAIO,EAAIX,SACEzB,IAANoC,IAAiBb,EAASa,EAC/B,CACD,CACA,OAAOb,CArBP,CAJCG,EAAWA,GAAY,EACvB,IAAI,IAAIG,EAAI7C,EAASe,OAAQ8B,EAAI,GAAK7C,EAAS6C,EAAI,GAAG,GAAKH,EAAUG,IAAK7C,EAAS6C,GAAK7C,EAAS6C,EAAI,GACrG7C,EAAS6C,GAAK,CAACL,EAAUC,EAAIC,EAwB/B,EI5BAb,EAAoBwB,EAAI,SAASpB,GAChC,IAAIqB,EAASrB,GAAUA,EAAOsB,WAC7B,WAAa,OAAOtB,EAAgB,OAAG,EACvC,WAAa,OAAOA,CAAQ,EAE7B,OADAJ,EAAoB2B,EAAEF,EAAQ,CAAEG,EAAGH,IAC5BA,CACR,ECNAzB,EAAoB2B,EAAI,SAASxB,EAAS0B,GACzC,IAAI,IAAIR,KAAOQ,EACX7B,EAAoB8B,EAAED,EAAYR,KAASrB,EAAoB8B,EAAE3B,EAASkB,IAC5EjC,OAAO2C,eAAe5B,EAASkB,EAAK,CAAEW,YAAY,EAAMC,IAAKJ,EAAWR,IAG3E,ECPArB,EAAoBkC,EAAI,CAAC,EAGzBlC,EAAoBmC,EAAI,SAASC,GAChC,OAAOC,QAAQC,IAAIlD,OAAO+B,KAAKnB,EAAoBkC,GAAGK,QAAO,SAASC,EAAUnB,GAE/E,OADArB,EAAoBkC,EAAEb,GAAKe,EAASI,GAC7BA,CACR,GAAG,IACJ,ECPAxC,EAAoByC,EAAI,SAASL,GAEhC,OAAYA,EAAU,IAAM,CAAC,IAAM,cAAc,KAAO,eAAeA,GAAW,KACnF,ECJApC,EAAoB8B,EAAI,SAASY,EAAKC,GAAQ,OAAOvD,OAAOwD,UAAUC,eAAeC,KAAKJ,EAAKC,EAAO,ECAtG3C,EAAoB+C,EAAI,gC,WCIxB,IAAIC,EAAkB,CACrB,KAAM,GAkBPhD,EAAoBkC,EAAElB,EAAI,SAASoB,EAASI,GAEvCQ,EAAgBZ,IAElBa,cAAcjD,EAAoB+C,EAAI/C,EAAoByC,EAAEL,GAG/D,EAEA,IAAIc,EAAqBC,KAA0C,oCAAIA,KAA0C,qCAAK,GAClHC,EAA6BF,EAAmBG,KAAKC,KAAKJ,GAC9DA,EAAmBG,KAzBA,SAASE,GAC3B,IAAI5C,EAAW4C,EAAK,GAChBC,EAAcD,EAAK,GACnBE,EAAUF,EAAK,GACnB,IAAI,IAAItD,KAAYuD,EAChBxD,EAAoB8B,EAAE0B,EAAavD,KACrCD,EAAoBM,EAAEL,GAAYuD,EAAYvD,IAIhD,IADGwD,GAASA,EAAQzD,GACdW,EAASzB,QACd8D,EAAgBrC,EAAS+C,OAAS,EACnCN,EAA2BG,EAC5B,C,ITtBInF,EAAO4B,EAAoBO,EAC/BP,EAAoBO,EAAI,WACvB,OAAO8B,QAAQC,IAAI,CAClBtC,EAAoBmC,EAAE,MACtBnC,EAAoBmC,EAAE,OACpBwB,KAAKvF,EACT,EUL0B4B,EAAoBO,G","sources":["no-source/webpack/runtime/chunk loaded","no-source/webpack/runtime/startup chunk dependencies","https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/resources/markdown-worker.ts","no-source/webpack/bootstrap","no-source/webpack/runtime/compat get default export","no-source/webpack/runtime/define property getters","no-source/webpack/runtime/ensure chunk","no-source/webpack/runtime/get javascript chunk filename","no-source/webpack/runtime/hasOwnProperty shorthand","no-source/webpack/runtime/publicPath","no-source/webpack/runtime/importScripts chunk loading","no-source/webpack/startup"],"names":["deferred","next","whiteListNormal","whiteListSvg","onTagAttr","tag","name","value","api","renderMarkdown","content","markedOptions","whiteList","hassOptions","arguments","length","undefined","Object","assign","getDefaultWhiteList","input","allowSvg","svg","path","img","filterXSS","marked","expose","__webpack_module_cache__","__webpack_require__","moduleId","cachedModule","exports","module","__webpack_modules__","m","x","__webpack_exports__","O","result","chunkIds","fn","priority","notFulfilled","Infinity","i","fulfilled","j","keys","every","key","splice","r","n","getter","__esModule","d","a","definition","o","defineProperty","enumerable","get","f","e","chunkId","Promise","all","reduce","promises","u","obj","prop","prototype","hasOwnProperty","call","p","installedChunks","importScripts","chunkLoadingGlobal","self","parentChunkLoadingFunction","push","bind","data","moreModules","runtime","pop","then"],"sourceRoot":""}

Some files were not shown because too many files have changed in this diff Show More