Compare commits

..

8 Commits

Author SHA1 Message Date
Ludeeus
c51496ad2f add missing id 2021-04-13 13:06:32 +00:00
Ludeeus
fbe409337b Add parrent_id 2021-04-13 12:55:20 +00:00
Ludeeus
443a43cc5b hex is str 2021-04-13 12:48:38 +00:00
Ludeeus
0e25fad1c0 Merge branch 'main' of github.com:home-assistant/supervisor into context 2021-04-13 12:44:45 +00:00
Ludeeus
488a2327fb Add name setter 2020-12-05 12:58:49 +00:00
Ludeeus
b99ed631c5 Add data to job 2020-12-05 12:44:17 +00:00
Ludeeus
726dd3a8f9 Merge branch 'main' of github.com:home-assistant/supervisor into context 2020-12-05 12:29:11 +00:00
Ludeeus
b94810d044 init 2020-11-27 16:10:39 +00:00
1966 changed files with 21955 additions and 557088 deletions

60
.devcontainer/Dockerfile Normal file
View File

@@ -0,0 +1,60 @@
FROM mcr.microsoft.com/vscode/devcontainers/python:0-3.8
ENV DEBIAN_FRONTEND=noninteractive
SHELL ["/bin/bash", "-c"]
WORKDIR /workspaces
# Set Docker daemon config
RUN \
mkdir -p /etc/docker \
&& echo '{"storage-driver": "vfs"}' > /etc/docker/daemon.json
# Install Node/Yarn for Frontent
RUN curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
&& apt-get update \
&& apt-get update && apt-get install -y --no-install-recommends \
curl \
git \
apt-utils \
apt-transport-https \
&& echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list \
&& apt-get update && apt-get install -y --no-install-recommends \
nodejs \
yarn \
&& curl -o - https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | bash \
&& rm -rf /var/lib/apt/lists/*
ENV NVM_DIR /root/.nvm
# Install docker
# https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/
RUN apt-get update && apt-get install -y --no-install-recommends \
apt-transport-https \
ca-certificates \
curl \
software-properties-common \
gpg-agent \
&& curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add - \
&& add-apt-repository "deb https://download.docker.com/linux/debian $(lsb_release -cs) stable" \
&& apt-get update && apt-get install -y --no-install-recommends \
docker-ce \
docker-ce-cli \
containerd.io \
&& rm -rf /var/lib/apt/lists/*
# Install tools
RUN apt-get update && apt-get install -y --no-install-recommends \
jq \
dbus \
network-manager \
libpulse0 \
&& bash <(curl https://getvcn.codenotary.com -L) \
&& rm -rf /var/lib/apt/lists/*
# Install Python dependencies from requirements.txt if it exists
COPY requirements.txt requirements_tests.txt ./
RUN pip3 install -U setuptools pip \
&& pip3 install -r requirements.txt -r requirements_tests.txt \
&& pip3 install tox \
&& rm -f requirements.txt requirements_tests.txt

View File

@@ -1,40 +1,33 @@
{
"name": "Supervisor dev",
"image": "ghcr.io/home-assistant/devcontainer:supervisor",
"containerEnv": {
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
},
"appPort": ["9123:8123", "7357:4357"],
"postCreateCommand": "bash devcontainer_bootstrap",
"context": "..",
"dockerFile": "Dockerfile",
"appPort": "9123:8123",
"postCreateCommand": "pre-commit install",
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
"customizations": {
"vscode": {
"extensions": [
"charliermarsh.ruff",
"ms-python.pylint",
"ms-python.vscode-pylance",
"visualstudioexptteam.vscodeintellicode",
"redhat.vscode-yaml",
"esbenp.prettier-vscode",
"GitHub.vscode-pull-request-github"
],
"settings": {
"terminal.integrated.profiles.linux": {
"zsh": {
"path": "/usr/bin/zsh"
}
},
"terminal.integrated.defaultProfile.linux": "zsh",
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"files.trimTrailingWhitespace": true,
"python.pythonPath": "/usr/local/bin/python3",
"[python]": {
"editor.defaultFormatter": "charliermarsh.ruff"
}
}
}
},
"mounts": ["type=volume,target=/var/lib/docker"]
"containerEnv": {"NVM_DIR":"/usr/local/share/nvm"},
"extensions": [
"ms-python.python",
"ms-python.vscode-pylance",
"visualstudioexptteam.vscodeintellicode",
"esbenp.prettier-vscode"
],
"settings": {
"terminal.integrated.shell.linux": "/bin/bash",
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"files.trimTrailingWhitespace": true,
"python.pythonPath": "/usr/local/bin/python3",
"python.linting.pylintEnabled": true,
"python.linting.enabled": true,
"python.formatting.provider": "black",
"python.formatting.blackArgs": ["--target-version", "py38"],
"python.formatting.blackPath": "/usr/local/bin/black",
"python.linting.banditPath": "/usr/local/bin/bandit",
"python.linting.flake8Path": "/usr/local/bin/flake8",
"python.linting.mypyPath": "/usr/local/bin/mypy",
"python.linting.pylintPath": "/usr/local/bin/pylint",
"python.linting.pydocstylePath": "/usr/local/bin/pydocstyle"
}
}

View File

@@ -1,6 +1,8 @@
name: Bug Report Form
description: Report an issue related to the Home Assistant Supervisor.
about: Report an issue related to the Home Assistant Supervisor.
labels: bug
title: ""
issue_body: true
body:
- type: markdown
attributes:
@@ -20,14 +22,22 @@ body:
attributes:
value: |
## Environment
- type: input
validations:
required: true
attributes:
label: What is the used version of the Supervisor?
placeholder: supervisor-
description: >
Can be found in the Supervisor panel -> System tab. Starts with
`supervisor-....`.
- type: dropdown
validations:
required: true
attributes:
label: What type of installation are you running?
description: >
If you don't know, can be found in [Settings -> System -> Repairs -> System Information](https://my.home-assistant.io/redirect/system_health/).
It is listed as the `Installation Type` value.
If you don't know, you can find it in: Configuration panel -> Info.
options:
- Home Assistant OS
- Home Assistant Supervised
@@ -40,6 +50,22 @@ body:
- Home Assistant Operating System
- Debian
- Other (e.g., Raspbian/Raspberry Pi OS/Fedora)
- type: input
validations:
required: true
attributes:
label: What is the version of your installed operating system?
placeholder: "5.11"
description: Can be found in the Supervisor panel -> System tab.
- type: input
validations:
required: true
attributes:
label: What version of Home Assistant Core is installed?
placeholder: core-
description: >
Can be found in the Supervisor panel -> System tab. Starts with
`core-....`.
- type: markdown
attributes:
value: |
@@ -63,34 +89,18 @@ body:
attributes:
label: Anything in the Supervisor logs that might be useful for us?
description: >
Supervisor Logs can be found in [Settings -> System -> Logs](https://my.home-assistant.io/redirect/logs/)
then choose `Supervisor` in the top right.
The Supervisor logs can be found in the Supervisor panel -> System tab.
value: |
```txt
# Put your logs below this line
[![Open your Home Assistant instance and show your Supervisor system logs.](https://my.home-assistant.io/badges/supervisor_logs.svg)](https://my.home-assistant.io/redirect/supervisor_logs/)
render: txt
- type: textarea
validations:
required: true
```
- type: markdown
attributes:
label: System Health information
description: >
System Health information can be found in the top right menu in [Settings -> System -> Repairs](https://my.home-assistant.io/redirect/repairs/).
Click the copy button at the bottom of the pop-up and paste it here.
[![Open your Home Assistant instance and show health information about your system.](https://my.home-assistant.io/badges/system_health.svg)](https://my.home-assistant.io/redirect/system_health/)
- type: textarea
value: |
## Additional information
- type: markdown
attributes:
label: Supervisor diagnostics
placeholder: "drag-and-drop the diagnostics data file here (do not copy-and-paste the content)"
description: >-
Supervisor diagnostics can be found in [Settings -> Integrations](https://my.home-assistant.io/redirect/integrations/).
Find the card that says `Home Assistant Supervisor`, open its menu and select 'Download diagnostics'.
**Please drag-and-drop the downloaded file into the textbox below. Do not copy and paste its contents.**
- type: textarea
attributes:
label: Additional information
description: >
value: |
If you have any additional information for us, use the field below.
Please note, you can attach screenshots or screen recordings here, by
dragging and dropping files in the field below.
Please note, you can attach screenshots or screen recordings here.

View File

@@ -52,7 +52,7 @@
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
- [ ] There is no commented out code in this PR.
- [ ] I have followed the [development checklist][dev-checklist]
- [ ] The code has been formatted using Ruff (`ruff format supervisor tests`)
- [ ] The code has been formatted using Black (`black --fast supervisor tests`)
- [ ] Tests have been added to verify that the new code works.
If API endpoints of add-on configuration are added/changed:

View File

@@ -31,7 +31,6 @@ categories:
- title: ":arrow_up: Dependency Updates"
label: "dependencies"
collapse-after: 1
include-labels:
- "breaking-change"

View File

@@ -27,19 +27,15 @@ on:
paths:
- "rootfs/**"
- "supervisor/**"
- build.yaml
- build.json
- Dockerfile
- requirements.txt
- setup.py
env:
DEFAULT_PYTHON: "3.12"
BUILD_NAME: supervisor
BUILD_TYPE: supervisor
concurrency:
group: "${{ github.workflow }}-${{ github.ref }}"
cancel-in-progress: true
WHEELS_TAG: 3.8-alpine3.13
jobs:
init:
@@ -53,7 +49,7 @@ jobs:
requirements: ${{ steps.requirements.outputs.changed }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.6
uses: actions/checkout@v2
with:
fetch-depth: 0
@@ -70,51 +66,39 @@ jobs:
- name: Get changed files
id: changed_files
if: steps.version.outputs.publish == 'false'
uses: masesgroup/retrieve-changed-files@v3.0.0
uses: jitterbit/get-changed-files@v1
- name: Check if requirements files changed
id: requirements
run: |
if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.yaml) ]]; then
echo "changed=true" >> "$GITHUB_OUTPUT"
if [[ "${{ steps.changed_files.outputs.all }}" =~ requirements.txt ]]; then
echo "::set-output name=changed::true"
fi
build:
name: Build ${{ matrix.arch }} supervisor
needs: init
runs-on: ubuntu-latest
permissions:
contents: read
id-token: write
packages: write
strategy:
matrix:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.6
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Write env-file
if: needs.init.outputs.requirements == 'true'
run: |
(
# Fix out of memory issues with rust
echo "CARGO_NET_GIT_FETCH_WITH_CLI=true"
) > .env_file
- name: Build wheels
if: needs.init.outputs.requirements == 'true'
uses: home-assistant/wheels@2024.01.0
uses: home-assistant/wheels@master
with:
abi: cp312
tag: musllinux_1_2
tag: ${{ env.WHEELS_TAG }}
arch: ${{ matrix.arch }}
wheels-host: ${{ secrets.WHEELS_HOST }}
wheels-key: ${{ secrets.WHEELS_KEY }}
apk: "libffi-dev;openssl-dev;yaml-dev"
wheels-user: wheels
apk: "build-base;libffi-dev;openssl-dev;cargo"
skip-binary: aiohttp
env-file: true
requirements: "requirements.txt"
- name: Set version
@@ -123,53 +107,62 @@ jobs:
with:
type: ${{ env.BUILD_TYPE }}
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
- name: Login to DockerHub
if: needs.init.outputs.publish == 'true'
uses: actions/setup-python@v5.1.0
uses: docker/login-action@v1
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Install Cosign
if: needs.init.outputs.publish == 'true'
uses: sigstore/cosign-installer@v3.5.0
with:
cosign-release: "v2.2.3"
- name: Install dirhash and calc hash
if: needs.init.outputs.publish == 'true'
run: |
pip3 install setuptools dirhash
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
echo "${dir_hash}" > rootfs/supervisor.sha256
- name: Sign supervisor SHA256
if: needs.init.outputs.publish == 'true'
run: |
cosign sign-blob --yes rootfs/supervisor.sha256 --bundle rootfs/supervisor.sha256.sig
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
if: needs.init.outputs.publish == 'true'
uses: docker/login-action@v3.2.0
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
username: ${{ secrets.GIT_USER }}
password: ${{ secrets.GIT_TOKEN }}
- name: Set build arguments
if: needs.init.outputs.publish == 'false'
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
- name: Build supervisor
uses: home-assistant/builder@2024.03.5
uses: home-assistant/builder@2021.04.0
with:
args: |
$BUILD_ARGS \
--${{ matrix.arch }} \
--target /data \
--cosign \
--with-codenotary "${{ secrets.VCN_USER }}" "${{ secrets.VCN_PASSWORD }}" "${{ secrets.VCN_ORG }}" \
--validate-from "${{ secrets.VCN_ORG }}" \
--validate-cache "${{ secrets.VCN_ORG }}" \
--generic ${{ needs.init.outputs.version }}
env:
CAS_API_KEY: ${{ secrets.CAS_TOKEN }}
codenotary:
name: CodeNotary signature
needs: init
runs-on: ubuntu-latest
steps:
- name: Checkout the repository
if: needs.init.outputs.publish == 'true'
uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Set version
if: needs.init.outputs.publish == 'true'
uses: home-assistant/actions/helpers/version@master
with:
type: ${{ env.BUILD_TYPE }}
- name: Signing image
if: needs.init.outputs.publish == 'true'
uses: home-assistant/actions/helpers/codenotary@master
with:
source: dir://${{ github.workspace }}
user: ${{ secrets.VCN_USER }}
password: ${{ secrets.VCN_PASSWORD }}
organisation: ${{ secrets.VCN_ORG }}
version:
name: Update version
@@ -178,7 +171,7 @@ jobs:
steps:
- name: Checkout the repository
if: needs.init.outputs.publish == 'true'
uses: actions/checkout@v4.1.6
uses: actions/checkout@v2
- name: Initialize git
if: needs.init.outputs.publish == 'true'
@@ -199,15 +192,13 @@ jobs:
run_supervisor:
runs-on: ubuntu-latest
name: Run the Supervisor
needs: ["build", "init"]
timeout-minutes: 60
needs: ["build", "codenotary"]
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.6
uses: actions/checkout@v2
- name: Build the Supervisor
if: needs.init.outputs.publish != 'true'
uses: home-assistant/builder@2024.03.5
uses: home-assistant/builder@2021.04.0
with:
args: |
--test \
@@ -215,19 +206,13 @@ jobs:
--target /data \
--generic runner
- name: Pull Supervisor
if: needs.init.outputs.publish == 'true'
run: |
docker pull ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }}
docker tag ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }} ghcr.io/home-assistant/amd64-hassio-supervisor:runner
- name: Create the Supervisor
run: |
mkdir -p /tmp/supervisor/data
docker create --name hassio_supervisor \
--privileged \
--security-opt seccomp=unconfined \
--security-opt apparmor=unconfined \
--security-opt apparmor:unconfined \
-v /run/docker.sock:/run/docker.sock \
-v /run/dbus:/run/dbus \
-v /tmp/supervisor/data:/data \
@@ -236,7 +221,7 @@ jobs:
-e SUPERVISOR_NAME=hassio_supervisor \
-e SUPERVISOR_DEV=1 \
-e SUPERVISOR_MACHINE="qemux86-64" \
ghcr.io/home-assistant/amd64-hassio-supervisor:runner
homeassistant/amd64-hassio-supervisor:runner
- name: Start the Supervisor
run: docker start hassio_supervisor
@@ -246,135 +231,22 @@ jobs:
SUPERVISOR=$(docker inspect --format='{{.NetworkSettings.IPAddress}}' hassio_supervisor)
ping="error"
while [ "$ping" != "ok" ]; do
ping=$(curl -sSL "http://$SUPERVISOR/supervisor/ping" | jq -r '.result')
ping=$(curl -sSL "http://$SUPERVISOR/supervisor/ping" | jq -r .result)
sleep 5
done
- name: Check the Supervisor
run: |
echo "Checking supervisor info"
test=$(docker exec hassio_cli ha supervisor info --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
test=$(docker exec hassio_cli ha supervisor info --no-progress --raw-json | jq -r .result)
if [ "$test" != "ok" ];then
docker logs hassio_supervisor
exit 1
fi
echo "Checking supervisor network info"
test=$(docker exec hassio_cli ha network info --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
test=$(docker exec hassio_cli ha network info --no-progress --raw-json | jq -r .result)
if [ "$test" != "ok" ];then
docker logs hassio_supervisor
exit 1
fi
- name: Check the Store / Addon
run: |
echo "Install Core SSH Add-on"
test=$(docker exec hassio_cli ha addons install core_ssh --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
# Make sure it actually installed
test=$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.version')
if [[ "$test" == "null" ]]; then
exit 1
fi
echo "Start Core SSH Add-on"
test=$(docker exec hassio_cli ha addons start core_ssh --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
# Make sure its state is started
test="$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.state')"
if [ "$test" != "started" ]; then
exit 1
fi
- name: Check the Supervisor code sign
if: needs.init.outputs.publish == 'true'
run: |
echo "Enable Content-Trust"
test=$(docker exec hassio_cli ha security options --content-trust=true --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
echo "Run supervisor health check"
test=$(docker exec hassio_cli ha resolution healthcheck --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
echo "Check supervisor unhealthy"
test=$(docker exec hassio_cli ha resolution info --no-progress --raw-json | jq -r '.data.unhealthy[]')
if [ "$test" != "" ]; then
exit 1
fi
echo "Check supervisor supported"
test=$(docker exec hassio_cli ha resolution info --no-progress --raw-json | jq -r '.data.unsupported[]')
if [[ "$test" =~ source_mods ]]; then
exit 1
fi
- name: Create full backup
id: backup
run: |
test=$(docker exec hassio_cli ha backups new --no-progress --raw-json)
if [ "$(echo $test | jq -r '.result')" != "ok" ]; then
exit 1
fi
echo "slug=$(echo $test | jq -r '.data.slug')" >> "$GITHUB_OUTPUT"
- name: Uninstall SSH add-on
run: |
test=$(docker exec hassio_cli ha addons uninstall core_ssh --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
- name: Restart supervisor
run: |
test=$(docker exec hassio_cli ha supervisor restart --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
- name: Wait for Supervisor to come up
run: |
SUPERVISOR=$(docker inspect --format='{{.NetworkSettings.IPAddress}}' hassio_supervisor)
ping="error"
while [ "$ping" != "ok" ]; do
ping=$(curl -sSL "http://$SUPERVISOR/supervisor/ping" | jq -r '.result')
sleep 5
done
- name: Restore SSH add-on from backup
run: |
test=$(docker exec hassio_cli ha backups restore ${{ steps.backup.outputs.slug }} --addons core_ssh --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
# Make sure it actually installed
test=$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.version')
if [[ "$test" == "null" ]]; then
exit 1
fi
# Make sure its state is started
test="$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.state')"
if [ "$test" != "started" ]; then
exit 1
fi
- name: Restore SSL directory from backup
run: |
test=$(docker exec hassio_cli ha backups restore ${{ steps.backup.outputs.slug }} --folders ssl --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
- name: Get supervisor logs on failiure
if: ${{ cancelled() || failure() }}
run: docker logs hassio_supervisor

View File

@@ -8,36 +8,36 @@ on:
pull_request: ~
env:
DEFAULT_PYTHON: "3.12"
PRE_COMMIT_CACHE: ~/.cache/pre-commit
concurrency:
group: "${{ github.workflow }}-${{ github.ref }}"
cancel-in-progress: true
DEFAULT_PYTHON: 3.8
PRE_COMMIT_HOME: ~/.cache/pre-commit
jobs:
# Separate job to pre-populate the base dependency cache
# This prevent upcoming jobs to do the same individually
prepare:
runs-on: ubuntu-latest
outputs:
python-version: ${{ steps.python.outputs.python-version }}
name: Prepare Python dependencies
strategy:
matrix:
python-version: [3.8]
name: Prepare Python ${{ matrix.python-version }} dependencies
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.6
- name: Set up Python
uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@v5.1.0
uses: actions/setup-python@v2.2.2
with:
python-version: ${{ env.DEFAULT_PYTHON }}
python-version: ${{ matrix.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.0.2
uses: actions/cache@v2.1.5
with:
path: venv
key: |
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
restore-keys: |
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-
- name: Create Python virtual environment
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
@@ -47,10 +47,9 @@ jobs:
pip install -r requirements.txt -r requirements_tests.txt
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.0.2
uses: actions/cache@v2.1.5
with:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
restore-keys: |
@@ -61,91 +60,34 @@ jobs:
. venv/bin/activate
pre-commit install-hooks
lint-ruff-format:
name: Check ruff-format
lint-black:
name: Check black
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.6
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.1.0
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v2.2.2
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.0.2
uses: actions/cache@v2.1.5
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.0.2
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run ruff-format
- name: Run black
run: |
. venv/bin/activate
pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure
env:
RUFF_OUTPUT_FORMAT: github
lint-ruff:
name: Check ruff
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.6
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.1.0
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.0.2
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.0.2
with:
path: ${{ env.PRE_COMMIT_CACHE }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run ruff
run: |
. venv/bin/activate
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
env:
RUFF_OUTPUT_FORMAT: github
black --target-version py38 --check supervisor tests setup.py
lint-dockerfile:
name: Check Dockerfile
@@ -153,7 +95,7 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.6
uses: actions/checkout@v2
- name: Register hadolint problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
@@ -168,19 +110,19 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.6
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.1.0
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v2.2.2
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.0.2
uses: actions/cache@v2.1.5
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
@@ -188,9 +130,9 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.0.2
uses: actions/cache@v2.1.5
with:
path: ${{ env.PRE_COMMIT_CACHE }}
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
@@ -206,25 +148,57 @@ jobs:
. venv/bin/activate
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
lint-json:
name: Check JSON
lint-flake8:
name: Check flake8
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.6
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.1.0
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v2.2.2
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.0.2
uses: actions/cache@v2.1.5
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Register flake8 problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/flake8.json"
- name: Run flake8
run: |
. venv/bin/activate
flake8 supervisor tests
lint-isort:
name: Check isort
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v2.2.2
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v2.1.5
with:
path: venv
key: |
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
@@ -232,9 +206,50 @@ jobs:
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v4.0.2
uses: actions/cache@v2.1.5
with:
path: ${{ env.PRE_COMMIT_CACHE }}
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run isort
run: |
. venv/bin/activate
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
lint-json:
name: Check JSON
runs-on: ubuntu-latest
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v2.2.2
id: python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v2.1.5
with:
path: venv
key: |
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v2.1.5
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
@@ -256,19 +271,19 @@ jobs:
needs: prepare
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.6
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.1.0
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v2.2.2
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.0.2
uses: actions/cache@v2.1.5
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
@@ -282,29 +297,73 @@ jobs:
. venv/bin/activate
pylint supervisor tests
pytest:
lint-pyupgrade:
name: Check pyupgrade
runs-on: ubuntu-latest
needs: prepare
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.6
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.1.0
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v2.2.2
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
- name: Install Cosign
uses: sigstore/cosign-installer@v3.5.0
with:
cosign-release: "v2.2.3"
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.0.2
uses: actions/cache@v2.1.5
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: actions/cache@v2.1.5
with:
path: ${{ env.PRE_COMMIT_HOME }}
key: |
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
- name: Fail job if cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Run pyupgrade
run: |
. venv/bin/activate
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
pytest:
runs-on: ubuntu-latest
needs: prepare
strategy:
matrix:
python-version: [3.8]
name: Run tests Python ${{ matrix.python-version }}
steps:
- name: Check out code from GitHub
uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2.2.2
id: python
with:
python-version: ${{ matrix.python-version }}
- name: Install CodeNotary
shell: bash
run: |
bash <(curl https://getvcn.codenotary.com -L)
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v2.1.5
with:
path: venv
key: |
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
@@ -313,7 +372,7 @@ jobs:
- name: Install additional system dependencies
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus dbus-x11
sudo apt-get install -y --no-install-recommends libpulse0 libudev1
- name: Register Python problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/python.json"
@@ -335,7 +394,7 @@ jobs:
-o console_output_style=count \
tests
- name: Upload coverage artifact
uses: actions/upload-artifact@v4.3.3
uses: actions/upload-artifact@v2.2.3
with:
name: coverage-${{ matrix.python-version }}
path: .coverage
@@ -343,29 +402,29 @@ jobs:
coverage:
name: Process test coverage
runs-on: ubuntu-latest
needs: ["pytest", "prepare"]
needs: pytest
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.6
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
uses: actions/setup-python@v5.1.0
uses: actions/checkout@v2
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v2.2.2
id: python
with:
python-version: ${{ needs.prepare.outputs.python-version }}
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment
id: cache-venv
uses: actions/cache@v4.0.2
uses: actions/cache@v2.1.5
with:
path: venv
key: |
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
- name: Fail job if Python cache restore failed
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
echo "Failed to restore Python virtual environment from cache"
exit 1
- name: Download all coverage artifacts
uses: actions/download-artifact@v4.1.7
uses: actions/download-artifact@v2
- name: Combine coverage results
run: |
. venv/bin/activate
@@ -373,4 +432,4 @@ jobs:
coverage report
coverage xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v4.4.1
uses: codecov/codecov-action@v1.3.2

View File

@@ -9,12 +9,12 @@ jobs:
lock:
runs-on: ubuntu-latest
steps:
- uses: dessant/lock-threads@v5.0.1
- uses: dessant/lock-threads@v2.0.3
with:
github-token: ${{ github.token }}
issue-inactive-days: "30"
exclude-issue-created-before: "2020-10-01T00:00:00Z"
issue-lock-inactive-days: "30"
issue-exclude-created-before: "2020-10-01T00:00:00Z"
issue-lock-reason: ""
pr-inactive-days: "1"
exclude-pr-created-before: "2020-11-01T00:00:00Z"
pr-lock-inactive-days: "1"
pr-exclude-created-before: "2020-11-01T00:00:00Z"
pr-lock-reason: ""

30
.github/workflows/matchers/flake8.json vendored Normal file
View File

@@ -0,0 +1,30 @@
{
"problemMatcher": [
{
"owner": "flake8-error",
"severity": "error",
"pattern": [
{
"regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$",
"file": 1,
"line": 2,
"column": 3,
"message": 4
}
]
},
{
"owner": "flake8-warning",
"severity": "warning",
"pattern": [
{
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$",
"file": 1,
"line": 2,
"column": 3,
"message": 4
}
]
}
]
}

View File

@@ -11,7 +11,7 @@ jobs:
name: Release Drafter
steps:
- name: Checkout the repository
uses: actions/checkout@v4.1.6
uses: actions/checkout@v2
with:
fetch-depth: 0
@@ -33,10 +33,10 @@ jobs:
echo Current version: $latest
echo New target version: $datepre.$newpost
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT"
echo "::set-output name=version::$datepre.$newpost"
- name: Run Release Drafter
uses: release-drafter/release-drafter@v6.0.0
uses: release-drafter/release-drafter@v5
with:
tag: ${{ steps.version.outputs.version }}
name: ${{ steps.version.outputs.version }}

View File

@@ -10,9 +10,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check out code from GitHub
uses: actions/checkout@v4.1.6
uses: actions/checkout@v2
- name: Sentry Release
uses: getsentry/action-release@v1.7.0
uses: getsentry/action-release@v1.1
env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}

View File

@@ -9,10 +9,10 @@ jobs:
stale:
runs-on: ubuntu-latest
steps:
- uses: actions/stale@v9.0.0
- uses: actions/stale@v3.0.18
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 30
days-before-stale: 60
days-before-close: 7
stale-issue-label: "stale"
exempt-issue-labels: "no-stale,Help%20wanted,help-wanted,pinned,rfc,security"

View File

@@ -3,5 +3,4 @@ ignored:
- DL3006
- DL3013
- DL3018
- DL3042
- SC2155

View File

@@ -1,15 +1,34 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.2.1
- repo: https://github.com/psf/black
rev: 20.8b1
hooks:
- id: ruff
- id: black
args:
- --fix
- id: ruff-format
- --safe
- --quiet
- --target-version
- py38
files: ^((supervisor|tests)/.+)?[^/]+\.py$
- repo: https://gitlab.com/pycqa/flake8
rev: 3.8.3
hooks:
- id: flake8
additional_dependencies:
- flake8-docstrings==1.5.0
- pydocstyle==5.0.2
files: ^(supervisor|script|tests)/.+\.py$
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
rev: v3.1.0
hooks:
- id: check-executables-have-shebangs
stages: [manual]
- id: check-json
- repo: https://github.com/pre-commit/mirrors-isort
rev: v4.3.21
hooks:
- id: isort
- repo: https://github.com/asottile/pyupgrade
rev: v2.6.2
hooks:
- id: pyupgrade
args: [--py37-plus]

7
.vscode/launch.json vendored
View File

@@ -13,13 +13,6 @@
"remoteRoot": "/usr/src/supervisor"
}
]
},
{
"name": "Debug Tests",
"type": "python",
"request": "test",
"console": "internalConsole",
"justMyCode": false
}
]
}

43
.vscode/tasks.json vendored
View File

@@ -4,7 +4,7 @@
{
"label": "Run Supervisor",
"type": "shell",
"command": "supervisor_run",
"command": "./scripts/run-supervisor.sh",
"group": {
"kind": "test",
"isDefault": true
@@ -15,6 +15,20 @@
},
"problemMatcher": []
},
{
"label": "Build Supervisor",
"type": "shell",
"command": "./scripts/build-supervisor.sh",
"group": {
"kind": "build",
"isDefault": true
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Run Supervisor CLI",
"type": "shell",
@@ -32,7 +46,7 @@
{
"label": "Update Supervisor Panel",
"type": "shell",
"command": "LOKALISE_TOKEN='${input:localiseToken}' ./scripts/update-frontend.sh",
"command": "./scripts/update-frontend.sh",
"group": {
"kind": "build",
"isDefault": true
@@ -58,23 +72,9 @@
"problemMatcher": []
},
{
"label": "Ruff Check",
"label": "Flake8",
"type": "shell",
"command": "ruff check --fix supervisor tests",
"group": {
"kind": "test",
"isDefault": true
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Ruff Format",
"type": "shell",
"command": "ruff format supervisor tests",
"command": "flake8 supervisor tests",
"group": {
"kind": "test",
"isDefault": true
@@ -100,12 +100,5 @@
},
"problemMatcher": []
}
],
"inputs": [
{
"id": "localiseToken",
"type": "promptString",
"description": "Paste your lokalise token to download frontend translations"
}
]
}

View File

@@ -3,43 +3,68 @@ FROM ${BUILD_FROM}
ENV \
S6_SERVICES_GRACETIME=10000 \
SUPERVISOR_API=http://localhost \
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1
SUPERVISOR_API=http://localhost
ARG \
COSIGN_VERSION \
BUILD_ARCH
ARG BUILD_ARCH
ARG VCN_VERSION
WORKDIR /usr/src
# Install base
WORKDIR /usr/src
RUN \
set -x \
&& apk add --no-cache \
findutils \
eudev \
eudev-libs \
git \
glib \
libffi \
libpulse \
musl \
openssl \
yaml \
&& apk add --no-cache --virtual .build-dependencies \
build-base \
go \
\
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
&& chmod a+x /usr/bin/cosign
&& git clone -b v${VCN_VERSION} --depth 1 \
https://github.com/codenotary/vcn \
&& cd vcn \
\
# Fix: https://github.com/codenotary/vcn/issues/131
&& go get github.com/codenotary/immudb@4cf9e2ae06ac2e6ec98a60364c3de3eab5524757 \
\
&& if [ "${BUILD_ARCH}" = "armhf" ]; then \
GOARM=6 GOARCH=arm go build -o vcn -ldflags="-s -w" ./cmd/vcn; \
elif [ "${BUILD_ARCH}" = "armv7" ]; then \
GOARM=7 GOARCH=arm go build -o vcn -ldflags="-s -w" ./cmd/vcn; \
elif [ "${BUILD_ARCH}" = "aarch64" ]; then \
GOARCH=arm64 go build -o vcn -ldflags="-s -w" ./cmd/vcn; \
elif [ "${BUILD_ARCH}" = "i386" ]; then \
GOARCH=386 go build -o vcn -ldflags="-s -w" ./cmd/vcn; \
elif [ "${BUILD_ARCH}" = "amd64" ]; then \
GOARCH=amd64 go build -o vcn -ldflags="-s -w" ./cmd/vcn; \
else \
exit 1; \
fi \
\
&& rm -rf /root/go /root/.cache \
&& mv vcn /usr/bin/vcn \
\
&& apk del .build-dependencies \
&& rm -rf /usr/src/vcn
# Install requirements
COPY requirements.txt .
RUN \
export MAKEFLAGS="-j$(nproc)" \
&& pip3 install --only-binary=:all: \
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
"https://wheels.home-assistant.io/alpine-$(cut -d '.' -f 1-2 < /etc/alpine-release)/${BUILD_ARCH}/" \
-r ./requirements.txt \
&& rm -f requirements.txt
# Install Home Assistant Supervisor
COPY . supervisor
RUN \
pip3 install -e ./supervisor \
pip3 install --no-cache-dir -e ./supervisor \
&& python3 -m compileall ./supervisor/supervisor

18
build.json Normal file
View File

@@ -0,0 +1,18 @@
{
"image": "homeassistant/{arch}-hassio-supervisor",
"shadow_repository": "ghcr.io/home-assistant",
"build_from": {
"aarch64": "ghcr.io/home-assistant/aarch64-base-python:3.8-alpine3.13",
"armhf": "ghcr.io/home-assistant/armhf-base-python:3.8-alpine3.13",
"armv7": "ghcr.io/home-assistant/armv7-base-python:3.8-alpine3.13",
"amd64": "ghcr.io/home-assistant/amd64-base-python:3.8-alpine3.13",
"i386": "ghcr.io/home-assistant/i386-base-python:3.8-alpine3.13"
},
"args": {
"VCN_VERSION": "0.9.4"
},
"labels": {
"io.hass.type": "supervisor",
"org.opencontainers.image.source": "https://github.com/home-assistant/supervisor"
}
}

View File

@@ -1,24 +0,0 @@
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
build_from:
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.19
armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.19
armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.19
amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.19
i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.19
codenotary:
signer: notary@home-assistant.io
base_image: notary@home-assistant.io
cosign:
base_identity: https://github.com/home-assistant/docker-base/.*
identity: https://github.com/home-assistant/supervisor/.*
args:
COSIGN_VERSION: 2.2.3
labels:
io.hass.type: supervisor
org.opencontainers.image.title: Home Assistant Supervisor
org.opencontainers.image.description: Container-based system for managing Home Assistant Core installation
org.opencontainers.image.source: https://github.com/home-assistant/supervisor
org.opencontainers.image.authors: The Home Assistant Authors
org.opencontainers.image.url: https://www.home-assistant.io/
org.opencontainers.image.documentation: https://www.home-assistant.io/docs/
org.opencontainers.image.licenses: Apache License 2.0

46
pylintrc Normal file
View File

@@ -0,0 +1,46 @@
[MASTER]
reports=no
jobs=2
good-names=id,i,j,k,ex,Run,_,fp,T
# Reasons disabled:
# format - handled by black
# locally-disabled - it spams too much
# duplicate-code - unavoidable
# cyclic-import - doesn't test if both import on load
# abstract-class-little-used - prevents from setting right foundation
# abstract-class-not-used - is flaky, should not show up but does
# unused-argument - generic callbacks and setup methods create a lot of warnings
# redefined-variable-type - this is Python, we're duck typing!
# too-many-* - are not enforced for the sake of readability
# too-few-* - same as too-many-*
# abstract-method - with intro of async there are always methods missing
disable=
format,
abstract-class-little-used,
abstract-method,
cyclic-import,
duplicate-code,
locally-disabled,
no-else-return,
no-self-use,
not-context-manager,
redefined-variable-type,
too-few-public-methods,
too-many-arguments,
too-many-branches,
too-many-instance-attributes,
too-many-lines,
too-many-locals,
too-many-public-methods,
too-many-return-statements,
too-many-statements,
unused-argument,
[EXCEPTIONS]
overgeneral-exceptions=Exception
[TYPECHECK]
ignored-modules = distutils

View File

@@ -1,371 +0,0 @@
[build-system]
requires = ["setuptools~=68.0.0", "wheel~=0.40.0"]
build-backend = "setuptools.build_meta"
[project]
name = "Supervisor"
dynamic = ["version", "dependencies"]
license = { text = "Apache-2.0" }
description = "Open-source private cloud os for Home-Assistant based on HassOS"
readme = "README.md"
authors = [
{ name = "The Home Assistant Authors", email = "hello@home-assistant.io" },
]
keywords = ["docker", "home-assistant", "api"]
requires-python = ">=3.12.0"
[project.urls]
"Homepage" = "https://www.home-assistant.io/"
"Source Code" = "https://github.com/home-assistant/supervisor"
"Bug Reports" = "https://github.com/home-assistant/supervisor/issues"
"Docs: Dev" = "https://developers.home-assistant.io/"
"Discord" = "https://www.home-assistant.io/join-chat/"
"Forum" = "https://community.home-assistant.io/"
[tool.setuptools]
platforms = ["any"]
zip-safe = false
include-package-data = true
[tool.setuptools.packages.find]
include = ["supervisor*"]
[tool.pylint.MAIN]
py-version = "3.11"
# Use a conservative default here; 2 should speed up most setups and not hurt
# any too bad. Override on command line as appropriate.
jobs = 2
persistent = false
extension-pkg-allow-list = ["ciso8601"]
[tool.pylint.BASIC]
class-const-naming-style = "any"
good-names = ["id", "i", "j", "k", "ex", "Run", "_", "fp", "T", "os"]
[tool.pylint."MESSAGES CONTROL"]
# Reasons disabled:
# format - handled by ruff
# abstract-method - with intro of async there are always methods missing
# cyclic-import - doesn't test if both import on load
# duplicate-code - unavoidable
# locally-disabled - it spams too much
# too-many-* - are not enforced for the sake of readability
# too-few-* - same as too-many-*
# unused-argument - generic callbacks and setup methods create a lot of warnings
disable = [
"format",
"abstract-method",
"cyclic-import",
"duplicate-code",
"locally-disabled",
"no-else-return",
"not-context-manager",
"too-few-public-methods",
"too-many-arguments",
"too-many-branches",
"too-many-instance-attributes",
"too-many-lines",
"too-many-locals",
"too-many-public-methods",
"too-many-return-statements",
"too-many-statements",
"unused-argument",
"consider-using-with",
# Handled by ruff
# Ref: <https://github.com/astral-sh/ruff/issues/970>
"await-outside-async", # PLE1142
"bad-str-strip-call", # PLE1310
"bad-string-format-type", # PLE1307
"bidirectional-unicode", # PLE2502
"continue-in-finally", # PLE0116
"duplicate-bases", # PLE0241
"format-needs-mapping", # F502
"function-redefined", # F811
# Needed because ruff does not understand type of __all__ generated by a function
# "invalid-all-format", # PLE0605
"invalid-all-object", # PLE0604
"invalid-character-backspace", # PLE2510
"invalid-character-esc", # PLE2513
"invalid-character-nul", # PLE2514
"invalid-character-sub", # PLE2512
"invalid-character-zero-width-space", # PLE2515
"logging-too-few-args", # PLE1206
"logging-too-many-args", # PLE1205
"missing-format-string-key", # F524
"mixed-format-string", # F506
"no-method-argument", # N805
"no-self-argument", # N805
"nonexistent-operator", # B002
"nonlocal-without-binding", # PLE0117
"not-in-loop", # F701, F702
"notimplemented-raised", # F901
"return-in-init", # PLE0101
"return-outside-function", # F706
"syntax-error", # E999
"too-few-format-args", # F524
"too-many-format-args", # F522
"too-many-star-expressions", # F622
"truncated-format-string", # F501
"undefined-all-variable", # F822
"undefined-variable", # F821
"used-prior-global-declaration", # PLE0118
"yield-inside-async-function", # PLE1700
"yield-outside-function", # F704
"anomalous-backslash-in-string", # W605
"assert-on-string-literal", # PLW0129
"assert-on-tuple", # F631
"bad-format-string", # W1302, F
"bad-format-string-key", # W1300, F
"bare-except", # E722
"binary-op-exception", # PLW0711
"cell-var-from-loop", # B023
# "dangerous-default-value", # B006, ruff catches new occurrences, needs more work
"duplicate-except", # B014
"duplicate-key", # F601
"duplicate-string-formatting-argument", # F
"duplicate-value", # F
"eval-used", # PGH001
"exec-used", # S102
# "expression-not-assigned", # B018, ruff catches new occurrences, needs more work
"f-string-without-interpolation", # F541
"forgotten-debug-statement", # T100
"format-string-without-interpolation", # F
# "global-statement", # PLW0603, ruff catches new occurrences, needs more work
"global-variable-not-assigned", # PLW0602
"implicit-str-concat", # ISC001
"import-self", # PLW0406
"inconsistent-quotes", # Q000
"invalid-envvar-default", # PLW1508
"keyword-arg-before-vararg", # B026
"logging-format-interpolation", # G
"logging-fstring-interpolation", # G
"logging-not-lazy", # G
"misplaced-future", # F404
"named-expr-without-context", # PLW0131
"nested-min-max", # PLW3301
# "pointless-statement", # B018, ruff catches new occurrences, needs more work
"raise-missing-from", # TRY200
# "redefined-builtin", # A001, ruff is way more stricter, needs work
"try-except-raise", # TRY302
"unused-argument", # ARG001, we don't use it
"unused-format-string-argument", #F507
"unused-format-string-key", # F504
"unused-import", # F401
"unused-variable", # F841
"useless-else-on-loop", # PLW0120
"wildcard-import", # F403
"bad-classmethod-argument", # N804
"consider-iterating-dictionary", # SIM118
"empty-docstring", # D419
"invalid-name", # N815
"line-too-long", # E501, disabled globally
"missing-class-docstring", # D101
"missing-final-newline", # W292
"missing-function-docstring", # D103
"missing-module-docstring", # D100
"multiple-imports", #E401
"singleton-comparison", # E711, E712
"subprocess-run-check", # PLW1510
"superfluous-parens", # UP034
"ungrouped-imports", # I001
"unidiomatic-typecheck", # E721
"unnecessary-direct-lambda-call", # PLC3002
"unnecessary-lambda-assignment", # PLC3001
"unneeded-not", # SIM208
"useless-import-alias", # PLC0414
"wrong-import-order", # I001
"wrong-import-position", # E402
"comparison-of-constants", # PLR0133
"comparison-with-itself", # PLR0124
# "consider-alternative-union-syntax", # UP007, typing extension
"consider-merging-isinstance", # PLR1701
# "consider-using-alias", # UP006, typing extension
"consider-using-dict-comprehension", # C402
"consider-using-generator", # C417
"consider-using-get", # SIM401
"consider-using-set-comprehension", # C401
"consider-using-sys-exit", # PLR1722
"consider-using-ternary", # SIM108
"literal-comparison", # F632
"property-with-parameters", # PLR0206
"super-with-arguments", # UP008
"too-many-branches", # PLR0912
"too-many-return-statements", # PLR0911
"too-many-statements", # PLR0915
"trailing-comma-tuple", # COM818
"unnecessary-comprehension", # C416
"use-a-generator", # C417
"use-dict-literal", # C406
"use-list-literal", # C405
"useless-object-inheritance", # UP004
"useless-return", # PLR1711
# "no-self-use", # PLR6301 # Optional plugin, not enabled
]
[tool.pylint.REPORTS]
score = false
[tool.pylint.TYPECHECK]
ignored-modules = ["distutils"]
[tool.pylint.FORMAT]
expected-line-ending-format = "LF"
[tool.pylint.EXCEPTIONS]
overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"]
[tool.pytest.ini_options]
testpaths = ["tests"]
norecursedirs = [".git"]
log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s"
log_date_format = "%Y-%m-%d %H:%M:%S"
asyncio_mode = "auto"
filterwarnings = [
"error",
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:dirhash",
"ignore::pytest.PytestUnraisableExceptionWarning",
]
[tool.ruff]
select = [
"B002", # Python does not support the unary prefix increment
"B007", # Loop control variable {name} not used within loop body
"B014", # Exception handler with duplicate exception
"B023", # Function definition does not bind loop variable {name}
"B026", # Star-arg unpacking after a keyword argument is strongly discouraged
"C", # complexity
"COM818", # Trailing comma on bare tuple prohibited
"D", # docstrings
"DTZ003", # Use datetime.now(tz=) instead of datetime.utcnow()
"DTZ004", # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts)
"E", # pycodestyle
"F", # pyflakes/autoflake
"G", # flake8-logging-format
"I", # isort
"ICN001", # import concentions; {name} should be imported as {asname}
"N804", # First argument of a class method should be named cls
"N805", # First argument of a method should be named self
"N815", # Variable {name} in class scope should not be mixedCase
"PGH001", # No builtin eval() allowed
"PGH004", # Use specific rule codes when using noqa
"PLC0414", # Useless import alias. Import alias does not rename original package.
"PLC", # pylint
"PLE", # pylint
"PLR", # pylint
"PLW", # pylint
"Q000", # Double quotes found but single quotes preferred
"RUF006", # Store a reference to the return value of asyncio.create_task
"S102", # Use of exec detected
"S103", # bad-file-permissions
"S108", # hardcoded-temp-file
"S306", # suspicious-mktemp-usage
"S307", # suspicious-eval-usage
"S313", # suspicious-xmlc-element-tree-usage
"S314", # suspicious-xml-element-tree-usage
"S315", # suspicious-xml-expat-reader-usage
"S316", # suspicious-xml-expat-builder-usage
"S317", # suspicious-xml-sax-usage
"S318", # suspicious-xml-mini-dom-usage
"S319", # suspicious-xml-pull-dom-usage
"S320", # suspicious-xmle-tree-usage
"S601", # paramiko-call
"S602", # subprocess-popen-with-shell-equals-true
"S604", # call-with-shell-equals-true
"S608", # hardcoded-sql-expression
"S609", # unix-command-wildcard-injection
"SIM105", # Use contextlib.suppress({exception}) instead of try-except-pass
"SIM117", # Merge with-statements that use the same scope
"SIM118", # Use {key} in {dict} instead of {key} in {dict}.keys()
"SIM201", # Use {left} != {right} instead of not {left} == {right}
"SIM208", # Use {expr} instead of not (not {expr})
"SIM212", # Use {a} if {a} else {b} instead of {b} if not {a} else {a}
"SIM300", # Yoda conditions. Use 'age == 42' instead of '42 == age'.
"SIM401", # Use get from dict with default instead of an if block
"T100", # Trace found: {name} used
"T20", # flake8-print
"TID251", # Banned imports
"TRY004", # Prefer TypeError exception for invalid type
"TRY200", # Use raise from to specify exception cause
"TRY302", # Remove exception handler; error is immediately re-raised
"UP", # pyupgrade
"W", # pycodestyle
]
ignore = [
"D202", # No blank lines allowed after function docstring
"D203", # 1 blank line required before class docstring
"D213", # Multi-line docstring summary should start at the second line
"D406", # Section name should end with a newline
"D407", # Section name underlining
"E501", # line too long
"E731", # do not assign a lambda expression, use a def
# Ignore ignored, as the rule is now back in preview/nursery, which cannot
# be ignored anymore without warnings.
# https://github.com/astral-sh/ruff/issues/7491
# "PLC1901", # Lots of false positives
# False positives https://github.com/astral-sh/ruff/issues/5386
"PLC0208", # Use a sequence type instead of a `set` when iterating over values
"PLR0911", # Too many return statements ({returns} > {max_returns})
"PLR0912", # Too many branches ({branches} > {max_branches})
"PLR0913", # Too many arguments to function call ({c_args} > {max_args})
"PLR0915", # Too many statements ({statements} > {max_statements})
"PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable
"PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target
"UP006", # keep type annotation style as is
"UP007", # keep type annotation style as is
# Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923
"UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)`
# May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
"W191",
"E111",
"E114",
"E117",
"D206",
"D300",
"Q000",
"Q001",
"Q002",
"Q003",
"COM812",
"COM819",
"ISC001",
"ISC002",
# Disabled because ruff does not understand type of __all__ generated by a function
"PLE0605",
]
[tool.ruff.flake8-import-conventions.extend-aliases]
voluptuous = "vol"
[tool.ruff.flake8-pytest-style]
fixture-parentheses = false
[tool.ruff.flake8-tidy-imports.banned-api]
"pytz".msg = "use zoneinfo instead"
[tool.ruff.isort]
force-sort-within-sections = true
section-order = [
"future",
"standard-library",
"third-party",
"first-party",
"local-folder",
]
forced-separate = ["tests"]
known-first-party = ["supervisor", "tests"]
combine-as-imports = true
split-on-trailing-comma = false
[tool.ruff.per-file-ignores]
# DBus Service Mocks must use typing and names understood by dbus-fast
"tests/dbus_service_mocks/*.py" = ["F722", "F821", "N815"]
[tool.ruff.mccabe]
max-complexity = 25

View File

@@ -1,30 +1,20 @@
aiodns==3.2.0
aiohttp==3.9.5
aiohttp-fast-url-dispatcher==0.3.0
atomicwrites-homeassistant==1.4.1
attrs==23.2.0
awesomeversion==24.2.0
brotli==1.1.0
ciso8601==2.3.1
colorlog==6.8.2
aiohttp==3.7.4.post0
async_timeout==3.0.1
atomicwrites==1.4.0
attrs==20.3.0
awesomeversion==21.4.0
brotli==1.0.9
cchardet==2.1.7
colorlog==4.8.0
cpe==1.2.1
cryptography==42.0.8
debugpy==1.8.1
deepmerge==1.1.1
dirhash==0.4.0
docker==7.1.0
faust-cchardet==2.1.19
gitpython==3.1.43
jinja2==3.1.4
orjson==3.9.15
pulsectl==24.4.0
pyudev==0.24.3
PyYAML==6.0.1
requests==2.32.3
securetar==2024.2.1
sentry-sdk==2.5.1
setuptools==70.0.0
voluptuous==0.14.2
dbus-fast==2.21.3
typing_extensions==4.12.2
zlib-fast==0.2.0
cryptography==3.4.6
debugpy==1.2.1
docker==5.0.0
gitpython==3.1.14
jinja2==2.11.3
pulsectl==21.3.4
pytz==2021.1
pyudev==0.22.0
ruamel.yaml==0.15.100
sentry-sdk==1.0.0
voluptuous==0.12.1

View File

@@ -1,12 +1,14 @@
coverage==7.5.3
pre-commit==3.7.1
pylint==3.2.3
pytest-aiohttp==1.0.5
pytest-asyncio==0.23.6
pytest-cov==5.0.0
pytest-timeout==2.3.1
pytest==8.2.2
ruff==0.4.8
time-machine==2.14.1
typing_extensions==4.12.2
urllib3==2.2.1
black==20.8b1
codecov==2.1.11
coverage==5.5
flake8-docstrings==1.6.0
flake8==3.9.0
pre-commit==2.12.0
pydocstyle==6.0.0
pylint==2.7.4
pytest-aiohttp==0.3.0
pytest-asyncio==0.12.0 # NB!: Versions over 0.12.0 breaks pytest-aiohttp (https://github.com/aio-libs/pytest-aiohttp/issues/16)
pytest-cov==2.11.1
pytest-timeout==1.4.2
pytest==6.2.3
pyupgrade==2.12.0

0
rootfs/etc/cont-init.d/udev.sh Executable file → Normal file
View File

11
rootfs/etc/services.d/supervisor/finish Executable file → Normal file
View File

@@ -1,11 +1,8 @@
#!/usr/bin/env bashio
#!/usr/bin/execlineb -S1
# ==============================================================================
# Take down the S6 supervision tree when Supervisor fails
# ==============================================================================
if { s6-test ${1} -ne 100 }
if { s6-test ${1} -ne 256 }
if [[ "$1" -ne 100 ]] && [[ "$1" -ne 256 ]]; then
bashio::log.warning "Halt Supervisor"
/run/s6/basedir/bin/halt
fi
bashio::log.info "Supervisor restart after closing"
redirfd -w 2 /dev/null s6-svscanctl -t /var/run/s6/services

1
rootfs/etc/services.d/supervisor/run Executable file → Normal file
View File

@@ -3,6 +3,5 @@
# Start Supervisor service
# ==============================================================================
export LD_PRELOAD="/usr/local/lib/libjemalloc.so.2"
export MALLOC_CONF="background_thread:true,metadata_thp:auto"
exec python3 -m supervisor

11
rootfs/etc/services.d/watchdog/finish Executable file → Normal file
View File

@@ -1,11 +1,8 @@
#!/usr/bin/env bashio
#!/usr/bin/execlineb -S1
# ==============================================================================
# Take down the S6 supervision tree when Watchdog fails
# ==============================================================================
if { s6-test ${1} -ne 0 }
if { s6-test ${1} -ne 256 }
if [[ "$1" -ne 0 ]] && [[ "$1" -ne 256 ]]; then
bashio::log.warning "Halt Supervisor (Wuff)"
/run/s6/basedir/bin/halt
fi
bashio::log.info "Watchdog restart after closing"
s6-svscanctl -t /var/run/s6/services

4
rootfs/etc/services.d/watchdog/run Executable file → Normal file
View File

@@ -15,7 +15,7 @@ do
if [[ "${supervisor_state}" = "running" ]]; then
# Check API
if bashio::supervisor.ping > /dev/null; then
if bashio::supervisor.ping; then
failed_count=0
else
bashio::log.warning "Maybe found an issue on API healthy"
@@ -31,4 +31,4 @@ do
done
bashio::exit.nok "Watchdog detected issue with Supervisor - taking container down!"
basio::exit.nok "Watchdog detected issue with Supervisor - taking container down!"

28
scripts/build-supervisor.sh Executable file
View File

@@ -0,0 +1,28 @@
#!/bin/bash
source "${BASH_SOURCE[0]%/*}/common.sh"
set -eE
DOCKER_TIMEOUT=30
DOCKER_PID=0
function build_supervisor() {
docker pull homeassistant/amd64-builder:dev
docker run --rm \
--privileged \
-v /run/docker.sock:/run/docker.sock \
-v "$(pwd):/data" \
homeassistant/amd64-builder:dev \
--generic latest \
--target /data \
--test \
--amd64 \
--no-cache
}
echo "Build Supervisor"
start_docker
trap "stop_docker" ERR
build_supervisor

58
scripts/common.sh Normal file
View File

@@ -0,0 +1,58 @@
#!/bin/bash
function start_docker() {
local starttime
local endtime
echo "Starting docker."
dockerd 2> /dev/null &
DOCKER_PID=$!
echo "Waiting for docker to initialize..."
starttime="$(date +%s)"
endtime="$(date +%s)"
until docker info >/dev/null 2>&1; do
if [ $((endtime - starttime)) -le $DOCKER_TIMEOUT ]; then
sleep 1
endtime=$(date +%s)
else
echo "Timeout while waiting for docker to come up"
exit 1
fi
done
echo "Docker was initialized"
}
function stop_docker() {
local starttime
local endtime
echo "Stopping in container docker..."
if [ "$DOCKER_PID" -gt 0 ] && kill -0 "$DOCKER_PID" 2> /dev/null; then
starttime="$(date +%s)"
endtime="$(date +%s)"
# Now wait for it to die
kill "$DOCKER_PID"
while kill -0 "$DOCKER_PID" 2> /dev/null; do
if [ $((endtime - starttime)) -le $DOCKER_TIMEOUT ]; then
sleep 1
endtime=$(date +%s)
else
echo "Timeout while waiting for container docker to die"
exit 1
fi
done
else
echo "Your host might have been left with unreleased resources"
fi
}
function cleanup_lastboot() {
if [[ -f /workspaces/test_supervisor/config.json ]]; then
echo "Cleaning up last boot"
cp /workspaces/test_supervisor/config.json /tmp/config.json
jq -rM 'del(.last_boot)' /tmp/config.json > /workspaces/test_supervisor/config.json
rm /tmp/config.json
fi
}

102
scripts/run-supervisor.sh Executable file
View File

@@ -0,0 +1,102 @@
#!/bin/bash
source "${BASH_SOURCE[0]%/*}/common.sh"
source "${BASH_SOURCE[0]%/*}/build-supervisor.sh"
set -eE
DOCKER_TIMEOUT=30
DOCKER_PID=0
function cleanup_docker() {
echo "Cleaning up stopped containers..."
docker rm $(docker ps -a -q) || true
}
function run_supervisor() {
mkdir -p /workspaces/test_supervisor
echo "Start Supervisor"
docker run --rm --privileged \
--name hassio_supervisor \
--privileged \
--security-opt seccomp=unconfined \
--security-opt apparmor:unconfined \
-v /run/docker.sock:/run/docker.sock:rw \
-v /run/dbus:/run/dbus:ro \
-v /run/udev:/run/udev:ro \
-v "/workspaces/test_supervisor":/data:rw \
-v /etc/machine-id:/etc/machine-id:ro \
-v /workspaces/supervisor:/usr/src/supervisor \
-e SUPERVISOR_SHARE="/workspaces/test_supervisor" \
-e SUPERVISOR_NAME=hassio_supervisor \
-e SUPERVISOR_DEV=1 \
-e SUPERVISOR_MACHINE="qemux86-64" \
homeassistant/amd64-hassio-supervisor:latest
}
function init_dbus() {
if pgrep dbus-daemon; then
echo "Dbus is running"
return 0
fi
echo "Startup dbus"
mkdir -p /var/lib/dbus
cp -f /etc/machine-id /var/lib/dbus/machine-id
# cleanups
mkdir -p /run/dbus
rm -f /run/dbus/pid
# run
dbus-daemon --system --print-address
}
function init_udev() {
if pgrep systemd-udevd; then
echo "udev is running"
return 0
fi
echo "Startup udev"
# cleanups
mkdir -p /run/udev
# run
/lib/systemd/systemd-udevd --daemon
sleep 3
udevadm trigger && udevadm settle
}
echo "Run Supervisor"
start_docker
trap "stop_docker" ERR
if [ "$( docker container inspect -f '{{.State.Status}}' hassio_supervisor )" == "running" ]; then
echo "Restarting Supervisor"
docker rm -f hassio_supervisor
init_dbus
init_udev
cleanup_lastboot
run_supervisor
stop_docker
else
echo "Starting Supervisor"
docker system prune -f
build_supervisor
cleanup_lastboot
cleanup_docker
init_dbus
init_udev
run_supervisor
stop_docker
fi

View File

@@ -1,6 +1,4 @@
#!/bin/bash
source "/etc/supervisor_scripts/common"
set -e
# Update frontend
@@ -11,10 +9,6 @@ cd home-assistant-polymer
nvm install
script/bootstrap
# Download translations
start_docker
./script/translations_download
# build frontend
cd hassio
./script/build_hassio
@@ -22,9 +16,3 @@ cd hassio
# Copy frontend
rm -rf ../../supervisor/api/panel/*
cp -rf build/* ../../supervisor/api/panel/
# Reset frontend git
cd ..
git reset --hard HEAD
stop_docker

30
setup.cfg Normal file
View File

@@ -0,0 +1,30 @@
[isort]
multi_line_output = 3
include_trailing_comma=True
force_grid_wrap=0
line_length=88
indent = " "
not_skip = __init__.py
force_sort_within_sections = true
sections = FUTURE,STDLIB,INBETWEENS,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
default_section = THIRDPARTY
forced_separate = tests
combine_as_imports = true
use_parentheses = true
known_first_party = supervisor,tests
[flake8]
exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build
doctests = True
max-line-length = 88
# E501: line too long
# W503: Line break occurred before a binary operator
# E203: Whitespace before ':'
# D202 No blank lines allowed after function docstring
# W504 line break after binary operator
ignore =
E501,
W503,
E203,
D202,
W504

View File

@@ -1,27 +1,59 @@
"""Home Assistant Supervisor setup."""
from pathlib import Path
import re
from setuptools import setup
RE_SUPERVISOR_VERSION = re.compile(r"^SUPERVISOR_VERSION =\s*(.+)$")
SUPERVISOR_DIR = Path(__file__).parent
REQUIREMENTS_FILE = SUPERVISOR_DIR / "requirements.txt"
CONST_FILE = SUPERVISOR_DIR / "supervisor/const.py"
REQUIREMENTS = REQUIREMENTS_FILE.read_text(encoding="utf-8")
CONSTANTS = CONST_FILE.read_text(encoding="utf-8")
def _get_supervisor_version():
for line in CONSTANTS.split("/n"):
if match := RE_SUPERVISOR_VERSION.match(line):
return match.group(1)
return "99.9.9dev"
from supervisor.const import SUPERVISOR_VERSION
setup(
version=_get_supervisor_version(),
dependencies=REQUIREMENTS.split("/n"),
name="Supervisor",
version=SUPERVISOR_VERSION,
license="BSD License",
author="The Home Assistant Authors",
author_email="hello@home-assistant.io",
url="https://home-assistant.io/",
description=("Open-source private cloud os for Home-Assistant" " based on HassOS"),
long_description=(
"A maintainless private cloud operator system that"
"setup a Home-Assistant instance. Based on HassOS"
),
classifiers=[
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Topic :: Home Automation",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: Scientific/Engineering :: Atmospheric Science",
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Programming Language :: Python :: 3.8",
],
keywords=["docker", "home-assistant", "api"],
zip_safe=False,
platforms="any",
packages=[
"supervisor.addons",
"supervisor.api",
"supervisor.dbus.network",
"supervisor.dbus.payloads",
"supervisor.dbus",
"supervisor.discovery.services",
"supervisor.discovery",
"supervisor.docker",
"supervisor.homeassistant",
"supervisor.host",
"supervisor.jobs",
"supervisor.misc",
"supervisor.plugins",
"supervisor.resolution.checks",
"supervisor.resolution.evaluations",
"supervisor.resolution.fixups",
"supervisor.resolution",
"supervisor.services.modules",
"supervisor.services",
"supervisor.snapshots",
"supervisor.store",
"supervisor.utils",
"supervisor",
],
include_package_data=True,
)

View File

@@ -5,15 +5,7 @@ import logging
from pathlib import Path
import sys
import zlib_fast
# Enable fast zlib before importing supervisor
zlib_fast.enable()
from supervisor import bootstrap # pylint: disable=wrong-import-position # noqa: E402
from supervisor.utils.logging import ( # pylint: disable=wrong-import-position # noqa: E402
activate_log_queue_handler,
)
from supervisor import bootstrap
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -36,8 +28,7 @@ if __name__ == "__main__":
bootstrap.initialize_logging()
# Init async event loop
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
loop = asyncio.get_event_loop()
# Check if all information are available to setup Supervisor
bootstrap.check_environment()
@@ -46,11 +37,8 @@ if __name__ == "__main__":
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
loop.set_default_executor(executor)
activate_log_queue_handler()
_LOGGER.info("Initializing Supervisor setup")
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
loop.set_debug(coresys.config.debug)
loop.run_until_complete(coresys.core.connect())
bootstrap.supervisor_debugger(coresys)

View File

@@ -1 +1,429 @@
"""Init file for Supervisor add-ons."""
import asyncio
from contextlib import suppress
import logging
import tarfile
from typing import Dict, List, Optional, Union
from ..const import AddonBoot, AddonStartup, AddonState
from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import (
AddonConfigurationError,
AddonsError,
AddonsJobError,
AddonsNotSupportedError,
CoreDNSError,
DockerAPIError,
DockerError,
DockerNotFound,
HomeAssistantAPIError,
HostAppArmorError,
)
from ..jobs.decorator import Job, JobCondition
from ..resolution.const import ContextType, IssueType, SuggestionType
from ..store.addon import AddonStore
from ..utils import check_exception_chain
from .addon import Addon
from .data import AddonsData
_LOGGER: logging.Logger = logging.getLogger(__name__)
AnyAddon = Union[Addon, AddonStore]
class AddonManager(CoreSysAttributes):
"""Manage add-ons inside Supervisor."""
def __init__(self, coresys: CoreSys):
"""Initialize Docker base wrapper."""
self.coresys: CoreSys = coresys
self.data: AddonsData = AddonsData(coresys)
self.local: Dict[str, Addon] = {}
self.store: Dict[str, AddonStore] = {}
@property
def all(self) -> List[AnyAddon]:
"""Return a list of all add-ons."""
addons: Dict[str, AnyAddon] = {**self.store, **self.local}
return list(addons.values())
@property
def installed(self) -> List[Addon]:
"""Return a list of all installed add-ons."""
return list(self.local.values())
def get(self, addon_slug: str, local_only: bool = False) -> Optional[AnyAddon]:
"""Return an add-on from slug.
Prio:
1 - Local
2 - Store
"""
if addon_slug in self.local:
return self.local[addon_slug]
if not local_only:
return self.store.get(addon_slug)
return None
def from_token(self, token: str) -> Optional[Addon]:
"""Return an add-on from Supervisor token."""
for addon in self.installed:
if token == addon.supervisor_token:
return addon
return None
async def load(self) -> None:
"""Start up add-on management."""
tasks = []
for slug in self.data.system:
addon = self.local[slug] = Addon(self.coresys, slug)
tasks.append(addon.load())
# Run initial tasks
_LOGGER.info("Found %d installed add-ons", len(tasks))
if tasks:
await asyncio.wait(tasks)
# Sync DNS
await self.sync_dns()
async def boot(self, stage: AddonStartup) -> None:
"""Boot add-ons with mode auto."""
tasks: List[Addon] = []
for addon in self.installed:
if addon.boot != AddonBoot.AUTO or addon.startup != stage:
continue
tasks.append(addon)
# Evaluate add-ons which need to be started
_LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks))
if not tasks:
return
# Start Add-ons sequential
# avoid issue on slow IO
for addon in tasks:
try:
await addon.start()
except AddonsError as err:
# Check if there is an system/user issue
if check_exception_chain(
err, (DockerAPIError, DockerNotFound, AddonConfigurationError)
):
addon.boot = AddonBoot.MANUAL
addon.save_persist()
except Exception as err: # pylint: disable=broad-except
self.sys_capture_exception(err)
else:
continue
_LOGGER.warning("Can't start Add-on %s", addon.slug)
await asyncio.sleep(self.sys_config.wait_boot)
async def shutdown(self, stage: AddonStartup) -> None:
"""Shutdown addons."""
tasks: List[Addon] = []
for addon in self.installed:
if addon.state != AddonState.STARTED or addon.startup != stage:
continue
tasks.append(addon)
# Evaluate add-ons which need to be stopped
_LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks))
if not tasks:
return
# Stop Add-ons sequential
# avoid issue on slow IO
for addon in tasks:
try:
await addon.stop()
except Exception as err: # pylint: disable=broad-except
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
self.sys_capture_exception(err)
@Job(
conditions=[
JobCondition.FREE_SPACE,
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AddonsJobError,
)
async def install(self, slug: str) -> None:
"""Install an add-on."""
if slug in self.local:
raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning)
store = self.store.get(slug)
if not store:
raise AddonsError(f"Add-on {slug} not exists", _LOGGER.error)
if not store.available:
raise AddonsNotSupportedError(
f"Add-on {slug} not supported on that platform", _LOGGER.error
)
self.data.install(store)
addon = Addon(self.coresys, slug)
if not addon.path_data.is_dir():
_LOGGER.info(
"Creating Home Assistant add-on data folder %s", addon.path_data
)
addon.path_data.mkdir()
# Setup/Fix AppArmor profile
await addon.install_apparmor()
try:
await addon.instance.install(store.version, store.image)
except DockerError as err:
self.data.uninstall(addon)
raise AddonsError() from err
else:
self.local[slug] = addon
# Reload ingress tokens
if addon.with_ingress:
await self.sys_ingress.reload()
_LOGGER.info("Add-on '%s' successfully installed", slug)
async def uninstall(self, slug: str) -> None:
"""Remove an add-on."""
if slug not in self.local:
_LOGGER.warning("Add-on %s is not installed", slug)
return
addon = self.local[slug]
try:
await addon.instance.remove()
except DockerError as err:
raise AddonsError() from err
else:
addon.state = AddonState.UNKNOWN
await addon.remove_data()
# Cleanup audio settings
if addon.path_pulse.exists():
with suppress(OSError):
addon.path_pulse.unlink()
# Cleanup AppArmor profile
with suppress(HostAppArmorError):
await addon.uninstall_apparmor()
# Cleanup Ingress panel from sidebar
if addon.ingress_panel:
addon.ingress_panel = False
with suppress(HomeAssistantAPIError):
await self.sys_ingress.update_hass_panel(addon)
# Cleanup Ingress dynamic port assignment
if addon.with_ingress:
self.sys_create_task(self.sys_ingress.reload())
self.sys_ingress.del_dynamic_port(slug)
# Cleanup discovery data
for message in self.sys_discovery.list_messages:
if message.addon != addon.slug:
continue
self.sys_discovery.remove(message)
# Cleanup services data
for service in self.sys_services.list_services:
if addon.slug not in service.active:
continue
service.del_service_data(addon)
self.data.uninstall(addon)
self.local.pop(slug)
_LOGGER.info("Add-on '%s' successfully removed", slug)
@Job(
conditions=[
JobCondition.FREE_SPACE,
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AddonsJobError,
)
async def update(self, slug: str) -> None:
"""Update add-on."""
if slug not in self.local:
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
addon = self.local[slug]
if addon.is_detached:
raise AddonsError(
f"Add-on {slug} is not available inside store", _LOGGER.error
)
store = self.store[slug]
if addon.version == store.version:
raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning)
# Check if available, Maybe something have changed
if not store.available:
raise AddonsNotSupportedError(
f"Add-on {slug} not supported on that platform", _LOGGER.error
)
# Update instance
last_state: AddonState = addon.state
old_image = addon.image
try:
await addon.instance.update(store.version, store.image)
except DockerError as err:
raise AddonsError() from err
_LOGGER.info("Add-on '%s' successfully updated", slug)
self.data.update(store)
# Cleanup
with suppress(DockerError):
await addon.instance.cleanup(old_image=old_image)
# Setup/Fix AppArmor profile
await addon.install_apparmor()
# restore state
if last_state == AddonState.STARTED:
await addon.start()
@Job(
conditions=[
JobCondition.FREE_SPACE,
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AddonsJobError,
)
async def rebuild(self, slug: str) -> None:
"""Perform a rebuild of local build add-on."""
if slug not in self.local:
_LOGGER.error("Add-on %s is not installed", slug)
raise AddonsError()
addon = self.local[slug]
if addon.is_detached:
_LOGGER.error("Add-on %s is not available inside store", slug)
raise AddonsError()
store = self.store[slug]
# Check if a rebuild is possible now
if addon.version != store.version:
_LOGGER.error("Version changed, use Update instead Rebuild")
raise AddonsError()
if not addon.need_build:
_LOGGER.error("Can't rebuild a image based add-on")
raise AddonsNotSupportedError()
# remove docker container but not addon config
last_state: AddonState = addon.state
try:
await addon.instance.remove()
await addon.instance.install(addon.version)
except DockerError as err:
raise AddonsError() from err
else:
self.data.update(store)
_LOGGER.info("Add-on '%s' successfully rebuilt", slug)
# restore state
if last_state == AddonState.STARTED:
await addon.start()
@Job(
conditions=[
JobCondition.FREE_SPACE,
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AddonsJobError,
)
async def restore(self, slug: str, tar_file: tarfile.TarFile) -> None:
"""Restore state of an add-on."""
if slug not in self.local:
_LOGGER.debug("Add-on %s is not local available for restore", slug)
addon = Addon(self.coresys, slug)
else:
_LOGGER.debug("Add-on %s is local available for restore", slug)
addon = self.local[slug]
await addon.restore(tar_file)
# Check if new
if slug not in self.local:
_LOGGER.info("Detect new Add-on after restore %s", slug)
self.local[slug] = addon
# Update ingress
if addon.with_ingress:
await self.sys_ingress.reload()
with suppress(HomeAssistantAPIError):
await self.sys_ingress.update_hass_panel(addon)
@Job(conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST])
async def repair(self) -> None:
"""Repair local add-ons."""
needs_repair: List[Addon] = []
# Evaluate Add-ons to repair
for addon in self.installed:
if await addon.instance.exists():
continue
needs_repair.append(addon)
_LOGGER.info("Found %d add-ons to repair", len(needs_repair))
if not needs_repair:
return
for addon in needs_repair:
_LOGGER.info("Repairing for add-on: %s", addon.slug)
with suppress(DockerError, KeyError):
# Need pull a image again
if not addon.need_build:
await addon.instance.install(addon.version, addon.image)
continue
# Need local lookup
if addon.need_build and not addon.is_detached:
store = self.store[addon.slug]
# If this add-on is available for rebuild
if addon.version == store.version:
await addon.instance.install(addon.version, addon.image)
continue
_LOGGER.error("Can't repair %s", addon.slug)
with suppress(AddonsError):
await self.uninstall(addon.slug)
async def sync_dns(self) -> None:
"""Sync add-ons DNS names."""
# Update hosts
for addon in self.installed:
try:
if not await addon.instance.is_running():
continue
except DockerError as err:
_LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err)
self.sys_resolution.create_issue(
IssueType.CORRUPT_DOCKER,
ContextType.ADDON,
reference=addon.slug,
suggestions=[SuggestionType.EXECUTE_REPAIR],
)
self.sys_capture_exception(err)
else:
self.sys_plugins.dns.add_host(
ipv4=addon.ip_address, names=[addon.hostname], write=False
)
# Write hosts files
with suppress(CoreDNSError):
self.sys_plugins.dns.write_hosts()

File diff suppressed because it is too large Load Diff

View File

@@ -1,23 +1,20 @@
"""Supervisor add-on build environment."""
from __future__ import annotations
from functools import cached_property
from pathlib import Path
from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, Dict
from awesomeversion import AwesomeVersion
from ..const import (
ATTR_ARGS,
ATTR_BUILD_FROM,
ATTR_LABELS,
ATTR_SQUASH,
FILE_SUFFIX_CONFIGURATION,
META_ADDON,
)
from ..coresys import CoreSys, CoreSysAttributes
from ..docker.interface import MAP_ARCH
from ..exceptions import ConfigurationFileError, HassioArchNotFound
from ..exceptions import ConfigurationFileError
from ..utils.common import FileConfiguration, find_one_filetype
from .validate import SCHEMA_BUILD_CONFIG
@@ -46,33 +43,12 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
"""Ignore save function."""
raise RuntimeError()
@cached_property
def arch(self) -> str:
"""Return arch of the add-on."""
return self.sys_arch.match(self.addon.arch)
@property
def base_image(self) -> str:
"""Return base image for this add-on."""
if not self._data[ATTR_BUILD_FROM]:
return f"ghcr.io/home-assistant/{self.sys_arch.default}-base:latest"
if isinstance(self._data[ATTR_BUILD_FROM], str):
return self._data[ATTR_BUILD_FROM]
# Evaluate correct base image
if self.arch not in self._data[ATTR_BUILD_FROM]:
raise HassioArchNotFound(
f"Add-on {self.addon.slug} is not supported on {self.arch}"
)
return self._data[ATTR_BUILD_FROM][self.arch]
@property
def dockerfile(self) -> Path:
"""Return Dockerfile path."""
if self.addon.path_location.joinpath(f"Dockerfile.{self.arch}").exists():
return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}")
return self.addon.path_location.joinpath("Dockerfile")
return self._data[ATTR_BUILD_FROM].get(
self.sys_arch.default, f"homeassistant/{self.sys_arch.default}-base:latest"
)
@property
def squash(self) -> bool:
@@ -80,45 +56,34 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
return self._data[ATTR_SQUASH]
@property
def additional_args(self) -> dict[str, str]:
def additional_args(self) -> Dict[str, str]:
"""Return additional Docker build arguments."""
return self._data[ATTR_ARGS]
@property
def additional_labels(self) -> dict[str, str]:
"""Return additional Docker labels."""
return self._data[ATTR_LABELS]
@property
def is_valid(self) -> bool:
"""Return true if the build env is valid."""
try:
return all(
[
self.addon.path_location.is_dir(),
self.dockerfile.is_file(),
]
)
except HassioArchNotFound:
return False
return all(
[
self.addon.path_location.is_dir(),
Path(self.addon.path_location, "Dockerfile").is_file(),
]
)
def get_docker_args(self, version: AwesomeVersion, image: str | None = None):
def get_docker_args(self, version: AwesomeVersion):
"""Create a dict with Docker build arguments."""
args = {
"path": str(self.addon.path_location),
"tag": f"{image or self.addon.image}:{version!s}",
"dockerfile": str(self.dockerfile),
"tag": f"{self.addon.image}:{version!s}",
"pull": True,
"forcerm": not self.sys_dev,
"forcerm": True,
"squash": self.squash,
"platform": MAP_ARCH[self.arch],
"labels": {
"io.hass.version": version,
"io.hass.arch": self.arch,
"io.hass.arch": self.sys_arch.default,
"io.hass.type": META_ADDON,
"io.hass.name": self._fix_label("name"),
"io.hass.description": self._fix_label("description"),
**self.additional_labels,
},
"buildargs": {
"BUILD_FROM": self.base_image,

View File

@@ -1,11 +0,0 @@
"""Confgiuration Objects for Addon Config."""
from dataclasses import dataclass
@dataclass(slots=True)
class FolderMapping:
"""Represent folder mapping configuration."""
path: str | None
read_only: bool

View File

@@ -1,48 +0,0 @@
"""Add-on static data."""
from datetime import timedelta
from enum import StrEnum
from ..jobs.const import JobCondition
class AddonBackupMode(StrEnum):
"""Backup mode of an Add-on."""
HOT = "hot"
COLD = "cold"
class MappingType(StrEnum):
"""Mapping type of an Add-on Folder."""
DATA = "data"
CONFIG = "config"
SSL = "ssl"
ADDONS = "addons"
BACKUP = "backup"
SHARE = "share"
MEDIA = "media"
HOMEASSISTANT_CONFIG = "homeassistant_config"
ALL_ADDON_CONFIGS = "all_addon_configs"
ADDON_CONFIG = "addon_config"
ATTR_BACKUP = "backup"
ATTR_BREAKING_VERSIONS = "breaking_versions"
ATTR_CODENOTARY = "codenotary"
ATTR_READ_ONLY = "read_only"
ATTR_PATH = "path"
WATCHDOG_RETRY_SECONDS = 10
WATCHDOG_MAX_ATTEMPTS = 5
WATCHDOG_THROTTLE_PERIOD = timedelta(minutes=30)
WATCHDOG_THROTTLE_MAX_CALLS = 10
ADDON_UPDATE_CONDITIONS = [
JobCondition.FREE_SPACE,
JobCondition.HEALTHY,
JobCondition.INTERNET_HOST,
JobCondition.PLUGINS_UPDATED,
JobCondition.SUPERVISOR_UPDATED,
]
RE_SLUG = r"[-_.A-Za-z0-9]+"

View File

@@ -1,6 +1,6 @@
"""Init file for Supervisor add-on data."""
from copy import deepcopy
from typing import Any
from typing import Any, Dict
from ..const import (
ATTR_IMAGE,
@@ -16,7 +16,7 @@ from ..utils.common import FileConfiguration
from .addon import Addon
from .validate import SCHEMA_ADDONS_FILE
Config = dict[str, Any]
Config = Dict[str, Any]
class AddonsData(FileConfiguration, CoreSysAttributes):

View File

@@ -1,379 +0,0 @@
"""Supervisor add-on manager."""
import asyncio
from collections.abc import Awaitable
from contextlib import suppress
import logging
import tarfile
from typing import Union
from ..const import AddonBoot, AddonStartup, AddonState
from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import (
AddonConfigurationError,
AddonsError,
AddonsJobError,
AddonsNotSupportedError,
CoreDNSError,
DockerAPIError,
DockerError,
DockerNotFound,
HassioError,
HomeAssistantAPIError,
)
from ..jobs.decorator import Job, JobCondition
from ..resolution.const import ContextType, IssueType, SuggestionType
from ..store.addon import AddonStore
from ..utils import check_exception_chain
from ..utils.sentry import capture_exception
from .addon import Addon
from .const import ADDON_UPDATE_CONDITIONS
from .data import AddonsData
_LOGGER: logging.Logger = logging.getLogger(__name__)
AnyAddon = Union[Addon, AddonStore]
class AddonManager(CoreSysAttributes):
"""Manage add-ons inside Supervisor."""
def __init__(self, coresys: CoreSys):
"""Initialize Docker base wrapper."""
self.coresys: CoreSys = coresys
self.data: AddonsData = AddonsData(coresys)
self.local: dict[str, Addon] = {}
self.store: dict[str, AddonStore] = {}
@property
def all(self) -> list[AnyAddon]:
"""Return a list of all add-ons."""
addons: dict[str, AnyAddon] = {**self.store, **self.local}
return list(addons.values())
@property
def installed(self) -> list[Addon]:
"""Return a list of all installed add-ons."""
return list(self.local.values())
def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None:
"""Return an add-on from slug.
Prio:
1 - Local
2 - Store
"""
if addon_slug in self.local:
return self.local[addon_slug]
if not local_only:
return self.store.get(addon_slug)
return None
def from_token(self, token: str) -> Addon | None:
"""Return an add-on from Supervisor token."""
for addon in self.installed:
if token == addon.supervisor_token:
return addon
return None
async def load(self) -> None:
"""Start up add-on management."""
# Refresh cache for all store addons
tasks: list[Awaitable[None]] = [
store.refresh_path_cache() for store in self.store.values()
]
# Load all installed addons
for slug in self.data.system:
addon = self.local[slug] = Addon(self.coresys, slug)
tasks.append(addon.load())
# Run initial tasks
_LOGGER.info("Found %d installed add-ons", len(self.data.system))
if tasks:
await asyncio.gather(*tasks)
# Sync DNS
await self.sync_dns()
async def boot(self, stage: AddonStartup) -> None:
"""Boot add-ons with mode auto."""
tasks: list[Addon] = []
for addon in self.installed:
if addon.boot != AddonBoot.AUTO or addon.startup != stage:
continue
tasks.append(addon)
# Evaluate add-ons which need to be started
_LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks))
if not tasks:
return
# Start Add-ons sequential
# avoid issue on slow IO
# Config.wait_boot is deprecated. Until addons update with healthchecks,
# add a sleep task for it to keep the same minimum amount of wait time
wait_boot: list[Awaitable[None]] = [asyncio.sleep(self.sys_config.wait_boot)]
for addon in tasks:
try:
if start_task := await addon.start():
wait_boot.append(start_task)
except AddonsError as err:
# Check if there is an system/user issue
if check_exception_chain(
err, (DockerAPIError, DockerNotFound, AddonConfigurationError)
):
addon.boot = AddonBoot.MANUAL
addon.save_persist()
except HassioError:
pass # These are already handled
else:
continue
_LOGGER.warning("Can't start Add-on %s", addon.slug)
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
await asyncio.gather(*wait_boot, return_exceptions=True)
async def shutdown(self, stage: AddonStartup) -> None:
"""Shutdown addons."""
tasks: list[Addon] = []
for addon in self.installed:
if addon.state != AddonState.STARTED or addon.startup != stage:
continue
tasks.append(addon)
# Evaluate add-ons which need to be stopped
_LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks))
if not tasks:
return
# Stop Add-ons sequential
# avoid issue on slow IO
for addon in tasks:
try:
await addon.stop()
except Exception as err: # pylint: disable=broad-except
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
capture_exception(err)
@Job(
name="addon_manager_install",
conditions=ADDON_UPDATE_CONDITIONS,
on_condition=AddonsJobError,
)
async def install(self, slug: str) -> None:
"""Install an add-on."""
self.sys_jobs.current.reference = slug
if slug in self.local:
raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning)
store = self.store.get(slug)
if not store:
raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error)
store.validate_availability()
await Addon(self.coresys, slug).install()
_LOGGER.info("Add-on '%s' successfully installed", slug)
async def uninstall(self, slug: str, *, remove_config: bool = False) -> None:
"""Remove an add-on."""
if slug not in self.local:
_LOGGER.warning("Add-on %s is not installed", slug)
return
await self.local[slug].uninstall(remove_config=remove_config)
_LOGGER.info("Add-on '%s' successfully removed", slug)
@Job(
name="addon_manager_update",
conditions=ADDON_UPDATE_CONDITIONS,
on_condition=AddonsJobError,
)
async def update(
self, slug: str, backup: bool | None = False
) -> asyncio.Task | None:
"""Update add-on.
Returns a Task that completes when addon has state 'started' (see addon.start)
if addon is started after update. Else nothing is returned.
"""
self.sys_jobs.current.reference = slug
if slug not in self.local:
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
addon = self.local[slug]
if addon.is_detached:
raise AddonsError(
f"Add-on {slug} is not available inside store", _LOGGER.error
)
store = self.store[slug]
if addon.version == store.version:
raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning)
# Check if available, Maybe something have changed
store.validate_availability()
if backup:
await self.sys_backups.do_backup_partial(
name=f"addon_{addon.slug}_{addon.version}",
homeassistant=False,
addons=[addon.slug],
)
return await addon.update()
@Job(
name="addon_manager_rebuild",
conditions=[
JobCondition.FREE_SPACE,
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AddonsJobError,
)
async def rebuild(self, slug: str) -> asyncio.Task | None:
"""Perform a rebuild of local build add-on.
Returns a Task that completes when addon has state 'started' (see addon.start)
if addon is started after rebuild. Else nothing is returned.
"""
self.sys_jobs.current.reference = slug
if slug not in self.local:
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
addon = self.local[slug]
if addon.is_detached:
raise AddonsError(
f"Add-on {slug} is not available inside store", _LOGGER.error
)
store = self.store[slug]
# Check if a rebuild is possible now
if addon.version != store.version:
raise AddonsError(
"Version changed, use Update instead Rebuild", _LOGGER.error
)
if not addon.need_build:
raise AddonsNotSupportedError(
"Can't rebuild a image based add-on", _LOGGER.error
)
return await addon.rebuild()
@Job(
name="addon_manager_restore",
conditions=[
JobCondition.FREE_SPACE,
JobCondition.INTERNET_HOST,
JobCondition.HEALTHY,
],
on_condition=AddonsJobError,
)
async def restore(
self, slug: str, tar_file: tarfile.TarFile
) -> asyncio.Task | None:
"""Restore state of an add-on.
Returns a Task that completes when addon has state 'started' (see addon.start)
if addon is started after restore. Else nothing is returned.
"""
self.sys_jobs.current.reference = slug
if slug not in self.local:
_LOGGER.debug("Add-on %s is not local available for restore", slug)
addon = Addon(self.coresys, slug)
had_ingress = False
else:
_LOGGER.debug("Add-on %s is local available for restore", slug)
addon = self.local[slug]
had_ingress = addon.ingress_panel
wait_for_start = await addon.restore(tar_file)
# Check if new
if slug not in self.local:
_LOGGER.info("Detect new Add-on after restore %s", slug)
self.local[slug] = addon
# Update ingress
if had_ingress != addon.ingress_panel:
await self.sys_ingress.reload()
with suppress(HomeAssistantAPIError):
await self.sys_ingress.update_hass_panel(addon)
return wait_for_start
@Job(
name="addon_manager_repair",
conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST],
)
async def repair(self) -> None:
"""Repair local add-ons."""
needs_repair: list[Addon] = []
# Evaluate Add-ons to repair
for addon in self.installed:
if await addon.instance.exists():
continue
needs_repair.append(addon)
_LOGGER.info("Found %d add-ons to repair", len(needs_repair))
if not needs_repair:
return
for addon in needs_repair:
_LOGGER.info("Repairing for add-on: %s", addon.slug)
with suppress(DockerError, KeyError):
# Need pull a image again
if not addon.need_build:
await addon.instance.install(addon.version, addon.image)
continue
# Need local lookup
if addon.need_build and not addon.is_detached:
store = self.store[addon.slug]
# If this add-on is available for rebuild
if addon.version == store.version:
await addon.instance.install(addon.version, addon.image)
continue
_LOGGER.error("Can't repair %s", addon.slug)
with suppress(AddonsError):
await self.uninstall(addon.slug)
async def sync_dns(self) -> None:
"""Sync add-ons DNS names."""
# Update hosts
add_host_coros: list[Awaitable[None]] = []
for addon in self.installed:
try:
if not await addon.instance.is_running():
continue
except DockerError as err:
_LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err)
self.sys_resolution.create_issue(
IssueType.CORRUPT_DOCKER,
ContextType.ADDON,
reference=addon.slug,
suggestions=[SuggestionType.EXECUTE_REPAIR],
)
capture_exception(err)
else:
add_host_coros.append(
self.sys_plugins.dns.add_host(
ipv4=addon.ip_address, names=[addon.hostname], write=False
)
)
await asyncio.gather(*add_host_coros)
# Write hosts files
with suppress(CoreDNSError):
await self.sys_plugins.dns.write_hosts()

View File

@@ -1,16 +1,10 @@
"""Init file for Supervisor add-ons."""
from abc import ABC, abstractmethod
from collections import defaultdict
from collections.abc import Awaitable, Callable
from contextlib import suppress
from datetime import datetime
import logging
from pathlib import Path
from typing import Any
from typing import Any, Awaitable, Dict, List, Optional
from awesomeversion import AwesomeVersion, AwesomeVersionException
from supervisor.utils.dt import utc_from_timestamp
import voluptuous as vol
from ..const import (
ATTR_ADVANCED,
@@ -18,9 +12,6 @@ from ..const import (
ATTR_ARCH,
ATTR_AUDIO,
ATTR_AUTH_API,
ATTR_BACKUP_EXCLUDE,
ATTR_BACKUP_POST,
ATTR_BACKUP_PRE,
ATTR_BOOT,
ATTR_DESCRIPTON,
ATTR_DEVICES,
@@ -38,10 +29,8 @@ from ..const import (
ATTR_HOST_IPC,
ATTR_HOST_NETWORK,
ATTR_HOST_PID,
ATTR_HOST_UTS,
ATTR_IMAGE,
ATTR_INGRESS,
ATTR_INGRESS_STREAM,
ATTR_INIT,
ATTR_JOURNALD,
ATTR_KERNEL_MODULES,
@@ -62,19 +51,18 @@ from ..const import (
ATTR_SCHEMA,
ATTR_SERVICES,
ATTR_SLUG,
ATTR_SNAPSHOT_EXCLUDE,
ATTR_STAGE,
ATTR_STARTUP,
ATTR_STDIN,
ATTR_TIMEOUT,
ATTR_TMPFS,
ATTR_TRANSLATIONS,
ATTR_TYPE,
ATTR_UART,
ATTR_UDEV,
ATTR_URL,
ATTR_USB,
ATTR_VERSION,
ATTR_VERSION_TIMESTAMP,
ATTR_VIDEO,
ATTR_WATCHDOG,
ATTR_WEBUI,
@@ -85,43 +73,21 @@ from ..const import (
AddonStage,
AddonStartup,
)
from ..coresys import CoreSys
from ..coresys import CoreSys, CoreSysAttributes
from ..docker.const import Capabilities
from ..exceptions import AddonsNotSupportedError
from ..jobs.const import JOB_GROUP_ADDON
from ..jobs.job_group import JobGroup
from ..utils import version_is_new_enough
from .configuration import FolderMapping
from .const import (
ATTR_BACKUP,
ATTR_BREAKING_VERSIONS,
ATTR_CODENOTARY,
ATTR_PATH,
ATTR_READ_ONLY,
AddonBackupMode,
MappingType,
)
from .options import AddonOptions, UiOptions
from .validate import RE_SERVICE
from .validate import RE_SERVICE, RE_VOLUME
_LOGGER: logging.Logger = logging.getLogger(__name__)
Data = dict[str, Any]
Data = Dict[str, Any]
class AddonModel(JobGroup, ABC):
class AddonModel(CoreSysAttributes, ABC):
"""Add-on Data layout."""
def __init__(self, coresys: CoreSys, slug: str):
"""Initialize data holder."""
super().__init__(
coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug
)
self.coresys: CoreSys = coresys
self.slug: str = slug
self._path_icon_exists: bool = False
self._path_logo_exists: bool = False
self._path_changelog_exists: bool = False
self._path_documentation_exists: bool = False
@property
@abstractmethod
@@ -144,7 +110,7 @@ class AddonModel(JobGroup, ABC):
return self._available(self.data)
@property
def options(self) -> dict[str, Any]:
def options(self) -> Dict[str, Any]:
"""Return options with local changes."""
return self.data[ATTR_OPTIONS]
@@ -154,7 +120,7 @@ class AddonModel(JobGroup, ABC):
return self.data[ATTR_BOOT]
@property
def auto_update(self) -> bool | None:
def auto_update(self) -> Optional[bool]:
"""Return if auto update is enable."""
return None
@@ -169,7 +135,7 @@ class AddonModel(JobGroup, ABC):
return self.slug.replace("_", "-")
@property
def dns(self) -> list[str]:
def dns(self) -> List[str]:
"""Return list of DNS name for that add-on."""
return []
@@ -179,22 +145,22 @@ class AddonModel(JobGroup, ABC):
return self.data[ATTR_TIMEOUT]
@property
def uuid(self) -> str | None:
def uuid(self) -> Optional[str]:
"""Return an API token for this add-on."""
return None
@property
def supervisor_token(self) -> str | None:
def supervisor_token(self) -> Optional[str]:
"""Return access token for Supervisor API."""
return None
@property
def ingress_token(self) -> str | None:
def ingress_token(self) -> Optional[str]:
"""Return access token for Supervisor API."""
return None
@property
def ingress_entry(self) -> str | None:
def ingress_entry(self) -> Optional[str]:
"""Return ingress external URL."""
return None
@@ -204,7 +170,7 @@ class AddonModel(JobGroup, ABC):
return self.data[ATTR_DESCRIPTON]
@property
def long_description(self) -> str | None:
def long_description(self) -> Optional[str]:
"""Return README.md as long_description."""
readme = Path(self.path_location, "README.md")
@@ -213,7 +179,8 @@ class AddonModel(JobGroup, ABC):
return None
# Return data
return readme.read_text(encoding="utf-8")
with readme.open("r") as readme_file:
return readme_file.read()
@property
def repository(self) -> str:
@@ -230,11 +197,6 @@ class AddonModel(JobGroup, ABC):
"""Return latest version of add-on."""
return self.data[ATTR_VERSION]
@property
def latest_version_timestamp(self) -> datetime:
"""Return when latest version was first seen."""
return utc_from_timestamp(self.data[ATTR_VERSION_TIMESTAMP])
@property
def version(self) -> AwesomeVersion:
"""Return version of add-on."""
@@ -261,7 +223,7 @@ class AddonModel(JobGroup, ABC):
return self.data[ATTR_STAGE]
@property
def services_role(self) -> dict[str, str]:
def services_role(self) -> Dict[str, str]:
"""Return dict of services with rights."""
services_list = self.data.get(ATTR_SERVICES, [])
@@ -274,37 +236,37 @@ class AddonModel(JobGroup, ABC):
return services
@property
def discovery(self) -> list[str]:
def discovery(self) -> List[str]:
"""Return list of discoverable components/platforms."""
return self.data.get(ATTR_DISCOVERY, [])
@property
def ports_description(self) -> dict[str, str] | None:
def ports_description(self) -> Optional[Dict[str, str]]:
"""Return descriptions of ports."""
return self.data.get(ATTR_PORTS_DESCRIPTION)
@property
def ports(self) -> dict[str, int | None] | None:
def ports(self) -> Optional[Dict[str, Optional[int]]]:
"""Return ports of add-on."""
return self.data.get(ATTR_PORTS)
@property
def ingress_url(self) -> str | None:
def ingress_url(self) -> Optional[str]:
"""Return URL to ingress url."""
return None
@property
def webui(self) -> str | None:
def webui(self) -> Optional[str]:
"""Return URL to webui or None."""
return self.data.get(ATTR_WEBUI)
@property
def watchdog(self) -> str | None:
def watchdog(self) -> Optional[str]:
"""Return URL to for watchdog or None."""
return self.data.get(ATTR_WATCHDOG)
@property
def ingress_port(self) -> int | None:
def ingress_port(self) -> Optional[int]:
"""Return Ingress port."""
return None
@@ -338,28 +300,23 @@ class AddonModel(JobGroup, ABC):
"""Return True if add-on run on host IPC namespace."""
return self.data[ATTR_HOST_IPC]
@property
def host_uts(self) -> bool:
"""Return True if add-on run on host UTS namespace."""
return self.data[ATTR_HOST_UTS]
@property
def host_dbus(self) -> bool:
"""Return True if add-on run on host D-BUS."""
return self.data[ATTR_HOST_DBUS]
@property
def static_devices(self) -> list[Path]:
def static_devices(self) -> List[Path]:
"""Return static devices of add-on."""
return [Path(node) for node in self.data.get(ATTR_DEVICES, [])]
@property
def environment(self) -> dict[str, str] | None:
def environment(self) -> Optional[Dict[str, str]]:
"""Return environment of add-on."""
return self.data.get(ATTR_ENVIRONMENT)
@property
def privileged(self) -> list[Capabilities]:
def privileged(self) -> List[Capabilities]:
"""Return list of privilege."""
return self.data.get(ATTR_PRIVILEGED, [])
@@ -398,24 +355,9 @@ class AddonModel(JobGroup, ABC):
return self.data[ATTR_HASSIO_ROLE]
@property
def backup_exclude(self) -> list[str]:
"""Return Exclude list for backup."""
return self.data.get(ATTR_BACKUP_EXCLUDE, [])
@property
def backup_pre(self) -> str | None:
"""Return pre-backup command."""
return self.data.get(ATTR_BACKUP_PRE)
@property
def backup_post(self) -> str | None:
"""Return post-backup command."""
return self.data.get(ATTR_BACKUP_POST)
@property
def backup_mode(self) -> AddonBackupMode:
"""Return if backup is hot/cold."""
return self.data[ATTR_BACKUP]
def snapshot_exclude(self) -> List[str]:
"""Return Exclude list for snapshot."""
return self.data.get(ATTR_SNAPSHOT_EXCLUDE, [])
@property
def default_init(self) -> bool:
@@ -433,15 +375,10 @@ class AddonModel(JobGroup, ABC):
return self.data[ATTR_INGRESS]
@property
def ingress_panel(self) -> bool | None:
def ingress_panel(self) -> Optional[bool]:
"""Return True if the add-on access support ingress."""
return None
@property
def ingress_stream(self) -> bool:
"""Return True if post requests to ingress should be streamed."""
return self.data[ATTR_INGRESS_STREAM]
@property
def with_gpio(self) -> bool:
"""Return True if the add-on access to GPIO interface."""
@@ -483,7 +420,7 @@ class AddonModel(JobGroup, ABC):
return self.data[ATTR_DEVICETREE]
@property
def with_tmpfs(self) -> str | None:
def with_tmpfs(self) -> Optional[str]:
"""Return if tmp is in memory of add-on."""
return self.data[ATTR_TMPFS]
@@ -503,55 +440,47 @@ class AddonModel(JobGroup, ABC):
return self.data[ATTR_VIDEO]
@property
def homeassistant_version(self) -> str | None:
def homeassistant_version(self) -> Optional[str]:
"""Return min Home Assistant version they needed by Add-on."""
return self.data.get(ATTR_HOMEASSISTANT)
@property
def url(self) -> str | None:
def url(self) -> Optional[str]:
"""Return URL of add-on."""
return self.data.get(ATTR_URL)
@property
def with_icon(self) -> bool:
"""Return True if an icon exists."""
return self._path_icon_exists
return self.path_icon.exists()
@property
def with_logo(self) -> bool:
"""Return True if a logo exists."""
return self._path_logo_exists
return self.path_logo.exists()
@property
def with_changelog(self) -> bool:
"""Return True if a changelog exists."""
return self._path_changelog_exists
return self.path_changelog.exists()
@property
def with_documentation(self) -> bool:
"""Return True if a documentation exists."""
return self._path_documentation_exists
return self.path_documentation.exists()
@property
def supported_arch(self) -> list[str]:
def supported_arch(self) -> List[str]:
"""Return list of supported arch."""
return self.data[ATTR_ARCH]
@property
def supported_machine(self) -> list[str]:
def supported_machine(self) -> List[str]:
"""Return list of supported machine."""
return self.data.get(ATTR_MACHINE, [])
@property
def arch(self) -> str:
"""Return architecture to use for the addon's image."""
if ATTR_IMAGE in self.data:
return self.sys_arch.match(self.data[ATTR_ARCH])
return self.sys_arch.default
@property
def image(self) -> str | None:
def image(self) -> Optional[str]:
"""Generate image name from data."""
return self._image(self.data)
@@ -561,13 +490,14 @@ class AddonModel(JobGroup, ABC):
return ATTR_IMAGE not in self.data
@property
def map_volumes(self) -> dict[MappingType, FolderMapping]:
"""Return a dict of {MappingType: FolderMapping} from add-on."""
def map_volumes(self) -> Dict[str, str]:
"""Return a dict of {volume: policy} from add-on."""
volumes = {}
for volume in self.data[ATTR_MAP]:
volumes[MappingType(volume[ATTR_TYPE])] = FolderMapping(
volume.get(ATTR_PATH), volume[ATTR_READ_ONLY]
)
result = RE_VOLUME.match(volume)
if not result:
continue
volumes[result.group(1)] = result.group(2) or "ro"
return volumes
@@ -602,16 +532,18 @@ class AddonModel(JobGroup, ABC):
return Path(self.path_location, "apparmor.txt")
@property
def schema(self) -> AddonOptions:
"""Return Addon options validation object."""
def schema(self) -> vol.Schema:
"""Create a schema for add-on options."""
raw_schema = self.data[ATTR_SCHEMA]
if isinstance(raw_schema, bool):
raw_schema = {}
return AddonOptions(self.coresys, raw_schema, self.name, self.slug)
return vol.Schema(
vol.All(dict, AddonOptions(self.coresys, raw_schema, self.name, self.slug))
)
@property
def schema_ui(self) -> list[dict[any, any]] | None:
def schema_ui(self) -> Optional[List[Dict[str, Any]]]:
"""Create a UI schema for add-on options."""
raw_schema = self.data[ATTR_SCHEMA]
@@ -624,82 +556,31 @@ class AddonModel(JobGroup, ABC):
"""Return True if the add-on accesses the system journal."""
return self.data[ATTR_JOURNALD]
@property
def signed(self) -> bool:
"""Return True if the image is signed."""
return ATTR_CODENOTARY in self.data
@property
def codenotary(self) -> str | None:
"""Return Signer email address for CAS."""
return self.data.get(ATTR_CODENOTARY)
@property
def breaking_versions(self) -> list[AwesomeVersion]:
"""Return breaking versions of addon."""
return self.data[ATTR_BREAKING_VERSIONS]
def refresh_path_cache(self) -> Awaitable[None]:
"""Refresh cache of existing paths."""
def check_paths():
self._path_icon_exists = self.path_icon.exists()
self._path_logo_exists = self.path_logo.exists()
self._path_changelog_exists = self.path_changelog.exists()
self._path_documentation_exists = self.path_documentation.exists()
return self.sys_run_in_executor(check_paths)
def validate_availability(self) -> None:
"""Validate if addon is available for current system."""
return self._validate_availability(self.data, logger=_LOGGER.error)
def __eq__(self, other):
"""Compaired add-on objects."""
if not isinstance(other, AddonModel):
return False
return self.slug == other.slug
def _validate_availability(
self, config, *, logger: Callable[..., None] | None = None
) -> None:
"""Validate if addon is available for current system."""
def _available(self, config) -> bool:
"""Return True if this add-on is available on this platform."""
# Architecture
if not self.sys_arch.is_supported(config[ATTR_ARCH]):
raise AddonsNotSupportedError(
f"Add-on {self.slug} not supported on this platform, supported architectures: {', '.join(config[ATTR_ARCH])}",
logger,
)
return False
# Machine / Hardware
machine = config.get(ATTR_MACHINE)
if machine and (
f"!{self.sys_machine}" in machine or self.sys_machine not in machine
):
raise AddonsNotSupportedError(
f"Add-on {self.slug} not supported on this machine, supported machine types: {', '.join(machine)}",
logger,
)
# Home Assistant
version: AwesomeVersion | None = config.get(ATTR_HOMEASSISTANT)
with suppress(AwesomeVersionException, TypeError):
if version and not version_is_new_enough(
self.sys_homeassistant.version, version
):
raise AddonsNotSupportedError(
f"Add-on {self.slug} not supported on this system, requires Home Assistant version {version} or greater",
logger,
)
def _available(self, config) -> bool:
"""Return True if this add-on is available on this platform."""
try:
self._validate_availability(config)
except AddonsNotSupportedError:
if machine and f"!{self.sys_machine}" in machine:
return False
elif machine and self.sys_machine not in machine:
return False
return True
# Home Assistant
version: Optional[AwesomeVersion] = config.get(ATTR_HOMEASSISTANT)
try:
return self.sys_homeassistant.version >= version
except (AwesomeVersionException, TypeError):
return True
def _image(self, config) -> str:
"""Generate image name from data."""
@@ -710,3 +591,19 @@ class AddonModel(JobGroup, ABC):
# local build
return f"{config[ATTR_REPOSITORY]}/{self.sys_arch.default}-addon-{config[ATTR_SLUG]}"
def install(self) -> Awaitable[None]:
"""Install this add-on."""
return self.sys_addons.install(self.slug)
def uninstall(self) -> Awaitable[None]:
"""Uninstall this add-on."""
return self.sys_addons.uninstall(self.slug)
def update(self) -> Awaitable[None]:
"""Update this add-on."""
return self.sys_addons.update(self.slug)
def rebuild(self) -> Awaitable[None]:
"""Rebuild this add-on."""
return self.sys_addons.rebuild(self.slug)

View File

@@ -3,7 +3,7 @@ import hashlib
import logging
from pathlib import Path
import re
from typing import Any
from typing import Any, Dict, List, Set, Union
import voluptuous as vol
@@ -59,21 +59,16 @@ class AddonOptions(CoreSysAttributes):
"""Validate Add-ons Options."""
def __init__(
self, coresys: CoreSys, raw_schema: dict[str, Any], name: str, slug: str
self, coresys: CoreSys, raw_schema: Dict[str, Any], name: str, slug: str
):
"""Validate schema."""
self.coresys: CoreSys = coresys
self.raw_schema: dict[str, Any] = raw_schema
self.devices: set[Device] = set()
self.pwned: set[str] = set()
self.raw_schema: Dict[str, Any] = raw_schema
self.devices: Set[Device] = set()
self.pwned: Set[str] = set()
self._name = name
self._slug = slug
@property
def validate(self) -> vol.Schema:
"""Create a schema for add-on options."""
return vol.Schema(vol.All(dict, self))
def __call__(self, struct):
"""Create schema validator for add-ons options."""
options = {}
@@ -167,7 +162,7 @@ class AddonOptions(CoreSysAttributes):
device = self.sys_hardware.get_by_path(Path(value))
except HardwareNotFound:
raise vol.Invalid(
f"Device '{value}' does not exist in {self._name} ({self._slug})"
f"Device '{value}' does not exists! in {self._name} ({self._slug})"
) from None
# Have filter
@@ -187,7 +182,7 @@ class AddonOptions(CoreSysAttributes):
f"Fatal error for option '{key}' with type '{typ}' in {self._name} ({self._slug})"
) from None
def _nested_validate_list(self, typ: Any, data_list: list[Any], key: str):
def _nested_validate_list(self, typ: Any, data_list: List[Any], key: str):
"""Validate nested items."""
options = []
@@ -209,7 +204,7 @@ class AddonOptions(CoreSysAttributes):
return options
def _nested_validate_dict(
self, typ: dict[Any, Any], data_dict: dict[Any, Any], key: str
self, typ: Dict[Any, Any], data_dict: Dict[Any, Any], key: str
):
"""Validate nested items."""
options = {}
@@ -241,7 +236,7 @@ class AddonOptions(CoreSysAttributes):
return options
def _check_missing_options(
self, origin: dict[Any, Any], exists: dict[Any, Any], root: str
self, origin: Dict[Any, Any], exists: Dict[Any, Any], root: str
) -> None:
"""Check if all options are exists."""
missing = set(origin) - set(exists)
@@ -267,9 +262,9 @@ class UiOptions(CoreSysAttributes):
"""Initialize UI option render."""
self.coresys = coresys
def __call__(self, raw_schema: dict[str, Any]) -> list[dict[str, Any]]:
def __call__(self, raw_schema: Dict[str, Any]) -> List[Dict[str, Any]]:
"""Generate UI schema."""
ui_schema: list[dict[str, Any]] = []
ui_schema: List[Dict[str, Any]] = []
# read options
for key, value in raw_schema.items():
@@ -287,13 +282,13 @@ class UiOptions(CoreSysAttributes):
def _single_ui_option(
self,
ui_schema: list[dict[str, Any]],
ui_schema: List[Dict[str, Any]],
value: str,
key: str,
multiple: bool = False,
) -> None:
"""Validate a single element."""
ui_node: dict[str, str | bool | float | list[str]] = {"name": key}
ui_node: Dict[str, Union[str, bool, float, List[str]]] = {"name": key}
# If multiple
if multiple:
@@ -365,8 +360,8 @@ class UiOptions(CoreSysAttributes):
def _nested_ui_list(
self,
ui_schema: list[dict[str, Any]],
option_list: list[Any],
ui_schema: List[Dict[str, Any]],
option_list: List[Any],
key: str,
) -> None:
"""UI nested list items."""
@@ -383,8 +378,8 @@ class UiOptions(CoreSysAttributes):
def _nested_ui_dict(
self,
ui_schema: list[dict[str, Any]],
option_dict: dict[str, Any],
ui_schema: List[Dict[str, Any]],
option_dict: Dict[str, Any],
key: str,
multiple: bool = False,
) -> None:
@@ -408,7 +403,7 @@ class UiOptions(CoreSysAttributes):
ui_schema.append(ui_node)
def _create_device_filter(str_filter: str) -> dict[str, Any]:
def _create_device_filter(str_filter: str) -> Dict[str, Any]:
"""Generate device Filter."""
raw_filter = dict(value.split("=") for value in str_filter.split(";"))

View File

@@ -16,10 +16,10 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
def rating_security(addon: AddonModel) -> int:
"""Return 1-8 for security rating.
"""Return 1-6 for security rating.
1 = not secure
8 = high secure
6 = high secure
"""
rating = 5
@@ -35,24 +35,17 @@ def rating_security(addon: AddonModel) -> int:
elif addon.access_auth_api:
rating += 1
# Signed
if addon.signed:
rating += 1
# Privileged options
if (
any(
privilege in addon.privileged
for privilege in (
Capabilities.BPF,
Capabilities.DAC_READ_SEARCH,
Capabilities.NET_ADMIN,
Capabilities.NET_RAW,
Capabilities.PERFMON,
Capabilities.SYS_ADMIN,
Capabilities.SYS_MODULE,
Capabilities.SYS_PTRACE,
Capabilities.SYS_RAWIO,
Capabilities.SYS_PTRACE,
Capabilities.SYS_MODULE,
Capabilities.DAC_READ_SEARCH,
)
)
or addon.with_kernel_modules
@@ -73,15 +66,11 @@ def rating_security(addon: AddonModel) -> int:
if addon.host_pid:
rating += -2
# UTS host namespace allows to set hostname only with SYS_ADMIN
if addon.host_uts and Capabilities.SYS_ADMIN in addon.privileged:
rating += -1
# Docker Access & full Access
if addon.access_docker_api or addon.with_full_access:
rating = 1
return max(min(8, rating), 1)
return max(min(6, rating), 1)
async def remove_data(folder: Path) -> None:

View File

@@ -2,7 +2,7 @@
import logging
import re
import secrets
from typing import Any
from typing import Any, Dict
import uuid
import voluptuous as vol
@@ -19,9 +19,6 @@ from ..const import (
ATTR_AUDIO_OUTPUT,
ATTR_AUTH_API,
ATTR_AUTO_UPDATE,
ATTR_BACKUP_EXCLUDE,
ATTR_BACKUP_POST,
ATTR_BACKUP_PRE,
ATTR_BOOT,
ATTR_BUILD_FROM,
ATTR_CONFIGURATION,
@@ -41,18 +38,15 @@ from ..const import (
ATTR_HOST_IPC,
ATTR_HOST_NETWORK,
ATTR_HOST_PID,
ATTR_HOST_UTS,
ATTR_IMAGE,
ATTR_INGRESS,
ATTR_INGRESS_ENTRY,
ATTR_INGRESS_PANEL,
ATTR_INGRESS_PORT,
ATTR_INGRESS_STREAM,
ATTR_INGRESS_TOKEN,
ATTR_INIT,
ATTR_JOURNALD,
ATTR_KERNEL_MODULES,
ATTR_LABELS,
ATTR_LEGACY,
ATTR_LOCATON,
ATTR_MACHINE,
@@ -72,6 +66,7 @@ from ..const import (
ATTR_SCHEMA,
ATTR_SERVICES,
ATTR_SLUG,
ATTR_SNAPSHOT_EXCLUDE,
ATTR_SQUASH,
ATTR_STAGE,
ATTR_STARTUP,
@@ -81,7 +76,6 @@ from ..const import (
ATTR_TIMEOUT,
ATTR_TMPFS,
ATTR_TRANSLATIONS,
ATTR_TYPE,
ATTR_UART,
ATTR_UDEV,
ATTR_URL,
@@ -99,6 +93,7 @@ from ..const import (
AddonStartup,
AddonState,
)
from ..discovery.validate import valid_discovery_service
from ..docker.const import Capabilities
from ..validate import (
docker_image,
@@ -109,23 +104,11 @@ from ..validate import (
uuid_match,
version_tag,
)
from .const import (
ATTR_BACKUP,
ATTR_BREAKING_VERSIONS,
ATTR_CODENOTARY,
ATTR_PATH,
ATTR_READ_ONLY,
RE_SLUG,
AddonBackupMode,
MappingType,
)
from .options import RE_SCHEMA_ELEMENT
_LOGGER: logging.Logger = logging.getLogger(__name__)
RE_VOLUME = re.compile(
r"^(data|config|ssl|addons|backup|share|media|homeassistant_config|all_addon_configs|addon_config)(?::(rw|ro))?$"
)
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share|media)(?::(rw|ro))?$")
RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$")
@@ -138,10 +121,8 @@ SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
RE_MACHINE = re.compile(
r"^!?(?:"
r"|intel-nuc"
r"|generic-x86-64"
r"|odroid-c2"
r"|odroid-c4"
r"|odroid-m1"
r"|odroid-n2"
r"|odroid-xu"
r"|qemuarm-64"
@@ -154,17 +135,12 @@ RE_MACHINE = re.compile(
r"|raspberrypi3"
r"|raspberrypi4-64"
r"|raspberrypi4"
r"|raspberrypi5-64"
r"|yellow"
r"|green"
r"|tinker"
r")$"
)
RE_SLUG_FIELD = re.compile(r"^" + RE_SLUG + r"$")
def _warn_addon_config(config: dict[str, Any]):
def _warn_addon_config(config: Dict[str, Any]):
"""Warn about miss configs."""
name = config.get(ATTR_NAME)
if not name:
@@ -181,21 +157,13 @@ def _warn_addon_config(config: dict[str, Any]):
name,
)
if config.get(ATTR_BACKUP, AddonBackupMode.HOT) == AddonBackupMode.COLD and (
config.get(ATTR_BACKUP_POST) or config.get(ATTR_BACKUP_PRE)
):
_LOGGER.warning(
"Add-on which only support COLD backups trying to use post/pre commands. Please report this to the maintainer of %s",
name,
)
return config
def _migrate_addon_config(protocol=False):
"""Migrate addon config."""
def _migrate(config: dict[str, Any]):
def _migrate(config: Dict[str, Any]):
name = config.get(ATTR_NAME)
if not name:
raise vol.Invalid("Invalid Add-on config!")
@@ -210,9 +178,9 @@ def _migrate_addon_config(protocol=False):
name,
)
if value == "before":
config[ATTR_STARTUP] = AddonStartup.SERVICES
config[ATTR_STARTUP] = AddonStartup.SERVICES.value
elif value == "after":
config[ATTR_STARTUP] = AddonStartup.APPLICATION
config[ATTR_STARTUP] = AddonStartup.APPLICATION.value
# UART 2021-01-20
if "auto_uart" in config:
@@ -241,65 +209,6 @@ def _migrate_addon_config(protocol=False):
)
config[ATTR_TMPFS] = True
# 2021-06 "snapshot" renamed to "backup"
for entry in (
"snapshot_exclude",
"snapshot_post",
"snapshot_pre",
"snapshot",
):
if entry in config:
new_entry = entry.replace("snapshot", "backup")
config[new_entry] = config.pop(entry)
_LOGGER.warning(
"Add-on config '%s' is deprecated, '%s' should be used instead. Please report this to the maintainer of %s",
entry,
new_entry,
name,
)
# 2023-11 "map" entries can also be dict to allow path configuration
volumes = []
for entry in config.get(ATTR_MAP, []):
if isinstance(entry, dict):
volumes.append(entry)
if isinstance(entry, str):
result = RE_VOLUME.match(entry)
if not result:
continue
volumes.append(
{
ATTR_TYPE: result.group(1),
ATTR_READ_ONLY: result.group(2) != "rw",
}
)
if volumes:
config[ATTR_MAP] = volumes
# 2023-10 "config" became "homeassistant" so /config can be used for addon's public config
if any(volume[ATTR_TYPE] == MappingType.CONFIG for volume in volumes):
if any(
volume
and volume[ATTR_TYPE]
in {MappingType.ADDON_CONFIG, MappingType.HOMEASSISTANT_CONFIG}
for volume in volumes
):
_LOGGER.warning(
"Add-on config using incompatible map options, '%s' and '%s' are ignored if '%s' is included. Please report this to the maintainer of %s",
MappingType.ADDON_CONFIG,
MappingType.HOMEASSISTANT_CONFIG,
MappingType.CONFIG,
name,
)
else:
_LOGGER.debug(
"Add-on config using deprecated map option '%s' instead of '%s'. Please report this to the maintainer of %s",
MappingType.CONFIG,
MappingType.HOMEASSISTANT_CONFIG,
name,
)
return config
return _migrate
@@ -310,7 +219,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
{
vol.Required(ATTR_NAME): str,
vol.Required(ATTR_VERSION): version_tag,
vol.Required(ATTR_SLUG): vol.Match(RE_SLUG_FIELD),
vol.Required(ATTR_SLUG): str,
vol.Required(ATTR_DESCRIPTON): str,
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
vol.Optional(ATTR_MACHINE): vol.All([vol.Match(RE_MACHINE)], vol.Unique()),
@@ -335,28 +244,18 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
network_port, vol.Equal(0)
),
vol.Optional(ATTR_INGRESS_ENTRY): str,
vol.Optional(ATTR_INGRESS_STREAM, default=False): vol.Boolean(),
vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): str,
vol.Optional(ATTR_PANEL_TITLE): str,
vol.Optional(ATTR_PANEL_ADMIN, default=True): vol.Boolean(),
vol.Optional(ATTR_HOMEASSISTANT): version_tag,
vol.Optional(ATTR_HOMEASSISTANT): vol.Maybe(version_tag),
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
vol.Optional(ATTR_HOST_UTS, default=False): vol.Boolean(),
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
vol.Optional(ATTR_DEVICES): [str],
vol.Optional(ATTR_UDEV, default=False): vol.Boolean(),
vol.Optional(ATTR_TMPFS, default=False): vol.Boolean(),
vol.Optional(ATTR_MAP, default=list): [
vol.Schema(
{
vol.Required(ATTR_TYPE): vol.Coerce(MappingType),
vol.Optional(ATTR_READ_ONLY, default=True): bool,
vol.Optional(ATTR_PATH): str,
}
)
],
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): str},
vol.Optional(ATTR_PRIVILEGED): [vol.Coerce(Capabilities)],
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
@@ -377,14 +276,8 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
vol.Optional(ATTR_DISCOVERY): [str],
vol.Optional(ATTR_BACKUP_EXCLUDE): [str],
vol.Optional(ATTR_BACKUP_PRE): str,
vol.Optional(ATTR_BACKUP_POST): str,
vol.Optional(ATTR_BACKUP, default=AddonBackupMode.HOT): vol.Coerce(
AddonBackupMode
),
vol.Optional(ATTR_CODENOTARY): vol.Email(),
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
vol.Optional(ATTR_SNAPSHOT_EXCLUDE): [str],
vol.Optional(ATTR_OPTIONS, default={}): dict,
vol.Optional(ATTR_SCHEMA, default={}): vol.Any(
vol.Schema(
@@ -408,7 +301,6 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Coerce(int), vol.Range(min=10, max=300)
),
vol.Optional(ATTR_JOURNALD, default=False): vol.Boolean(),
vol.Optional(ATTR_BREAKING_VERSIONS, default=list): [version_tag],
},
extra=vol.REMOVE_EXTRA,
)
@@ -421,13 +313,13 @@ SCHEMA_ADDON_CONFIG = vol.All(
# pylint: disable=no-value-for-parameter
SCHEMA_BUILD_CONFIG = vol.Schema(
{
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Any(
vol.Match(RE_DOCKER_IMAGE_BUILD),
vol.Schema({vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD)}),
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema(
{vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD)}
),
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
vol.Optional(ATTR_ARGS, default=dict): vol.Schema({str: str}),
vol.Optional(ATTR_LABELS, default=dict): vol.Schema({str: str}),
vol.Optional(ATTR_ARGS, default=dict): vol.Schema(
{vol.Coerce(str): vol.Coerce(str)}
),
},
extra=vol.REMOVE_EXTRA,
)
@@ -494,7 +386,7 @@ SCHEMA_ADDONS_FILE = vol.Schema(
)
SCHEMA_ADDON_BACKUP = vol.Schema(
SCHEMA_ADDON_SNAPSHOT = vol.Schema(
{
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,

View File

@@ -1,50 +1,40 @@
"""Init file for Supervisor RESTful API."""
from functools import partial
import logging
from pathlib import Path
from typing import Any
from typing import Optional
from aiohttp import web
from aiohttp_fast_url_dispatcher import FastUrlDispatcher, attach_fast_url_dispatcher
from ..const import AddonState
from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import APIAddonNotInstalled, HostNotSupportedError
from ..utils.sentry import capture_exception
from .addons import APIAddons
from .audio import APIAudio
from .auth import APIAuth
from .backups import APIBackups
from .cli import APICli
from .const import CONTENT_TYPE_TEXT
from .discovery import APIDiscovery
from .dns import APICoreDNS
from .docker import APIDocker
from .hardware import APIHardware
from .homeassistant import APIHomeAssistant
from .host import APIHost
from .info import APIInfo
from .ingress import APIIngress
from .jobs import APIJobs
from .middleware.security import SecurityMiddleware
from .mounts import APIMounts
from .multicast import APIMulticast
from .network import APINetwork
from .observer import APIObserver
from .os import APIOS
from .proxy import APIProxy
from .resolution import APIResoulution
from .root import APIRoot
from .security import APISecurity
from .security import SecurityMiddleware
from .services import APIServices
from .snapshots import APISnapshots
from .store import APIStore
from .supervisor import APISupervisor
from .utils import api_process, api_process_raw
_LOGGER: logging.Logger = logging.getLogger(__name__)
MAX_CLIENT_SIZE: int = 1024**2 * 16
MAX_LINE_SIZE: int = 24570
MAX_CLIENT_SIZE: int = 1024 ** 2 * 16
class RestAPI(CoreSysAttributes):
@@ -57,34 +47,20 @@ class RestAPI(CoreSysAttributes):
self.webapp: web.Application = web.Application(
client_max_size=MAX_CLIENT_SIZE,
middlewares=[
self.security.block_bad_requests,
self.security.system_validation,
self.security.token_validation,
self.security.core_proxy,
],
handler_args={
"max_line_size": MAX_LINE_SIZE,
"max_field_size": MAX_LINE_SIZE,
},
)
attach_fast_url_dispatcher(self.webapp, FastUrlDispatcher())
# service stuff
self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5)
self._site: web.TCPSite | None = None
# share single host API handler for reuse in logging endpoints
self._api_host: APIHost | None = None
self._runner: web.AppRunner = web.AppRunner(self.webapp)
self._site: Optional[web.TCPSite] = None
async def load(self) -> None:
"""Register REST API Calls."""
self._api_host = APIHost()
self._api_host.coresys = self.coresys
self._register_addons()
self._register_audio()
self._register_auth()
self._register_backups()
self._register_cli()
self._register_discovery()
self._register_dns()
@@ -92,93 +68,41 @@ class RestAPI(CoreSysAttributes):
self._register_hardware()
self._register_homeassistant()
self._register_host()
self._register_jobs()
self._register_info()
self._register_ingress()
self._register_mounts()
self._register_multicast()
self._register_network()
self._register_observer()
self._register_os()
self._register_jobs()
self._register_panel()
self._register_proxy()
self._register_resolution()
self._register_root()
self._register_security()
self._register_services()
self._register_store()
self._register_snapshots()
self._register_supervisor()
self._register_store()
await self.start()
def _register_advanced_logs(self, path: str, syslog_identifier: str):
"""Register logs endpoint for a given path, returning logs for single syslog identifier."""
self.webapp.add_routes(
[
web.get(
f"{path}/logs",
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
),
web.get(
f"{path}/logs/follow",
partial(
self._api_host.advanced_logs,
identifier=syslog_identifier,
follow=True,
),
),
web.get(
f"{path}/logs/boots/{{bootid}}",
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
),
web.get(
f"{path}/logs/boots/{{bootid}}/follow",
partial(
self._api_host.advanced_logs,
identifier=syslog_identifier,
follow=True,
),
),
]
)
def _register_host(self) -> None:
"""Register hostcontrol functions."""
api_host = self._api_host
api_host = APIHost()
api_host.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/host/info", api_host.info),
web.get("/host/logs", api_host.advanced_logs),
web.get(
"/host/logs/follow",
partial(api_host.advanced_logs, follow=True),
),
web.get("/host/logs/identifiers", api_host.list_identifiers),
web.get("/host/logs/identifiers/{identifier}", api_host.advanced_logs),
web.get(
"/host/logs/identifiers/{identifier}/follow",
partial(api_host.advanced_logs, follow=True),
),
web.get("/host/logs/boots", api_host.list_boots),
web.get("/host/logs/boots/{bootid}", api_host.advanced_logs),
web.get(
"/host/logs/boots/{bootid}/follow",
partial(api_host.advanced_logs, follow=True),
),
web.get(
"/host/logs/boots/{bootid}/identifiers/{identifier}",
api_host.advanced_logs,
),
web.get(
"/host/logs/boots/{bootid}/identifiers/{identifier}/follow",
partial(api_host.advanced_logs, follow=True),
),
web.get("/host/logs", api_host.logs),
web.post("/host/reboot", api_host.reboot),
web.post("/host/shutdown", api_host.shutdown),
web.post("/host/reload", api_host.reload),
web.post("/host/options", api_host.options),
web.get("/host/services", api_host.services),
web.post("/host/services/{service}/stop", api_host.service_stop),
web.post("/host/services/{service}/start", api_host.service_start),
web.post("/host/services/{service}/restart", api_host.service_restart),
web.post("/host/services/{service}/reload", api_host.service_reload),
]
)
@@ -219,34 +143,6 @@ class RestAPI(CoreSysAttributes):
web.get("/os/info", api_os.info),
web.post("/os/update", api_os.update),
web.post("/os/config/sync", api_os.config_sync),
web.post("/os/datadisk/move", api_os.migrate_data),
web.get("/os/datadisk/list", api_os.list_data),
web.post("/os/datadisk/wipe", api_os.wipe_data),
web.post("/os/boot-slot", api_os.set_boot_slot),
]
)
# Boards endpoints
self.webapp.add_routes(
[
web.get("/os/boards/green", api_os.boards_green_info),
web.post("/os/boards/green", api_os.boards_green_options),
web.get("/os/boards/yellow", api_os.boards_yellow_info),
web.post("/os/boards/yellow", api_os.boards_yellow_options),
web.get("/os/boards/{board}", api_os.boards_other_info),
]
)
def _register_security(self) -> None:
"""Register Security functions."""
api_security = APISecurity()
api_security.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/security/info", api_security.info),
web.post("/security/options", api_security.options),
web.post("/security/integrity", api_security.integrity_check),
]
)
@@ -260,8 +156,6 @@ class RestAPI(CoreSysAttributes):
web.get("/jobs/info", api_jobs.info),
web.post("/jobs/options", api_jobs.options),
web.post("/jobs/reset", api_jobs.reset),
web.get("/jobs/{uuid}", api_jobs.job_info),
web.delete("/jobs/{uuid}", api_jobs.remove_job),
]
)
@@ -300,11 +194,11 @@ class RestAPI(CoreSysAttributes):
[
web.get("/multicast/info", api_multicast.info),
web.get("/multicast/stats", api_multicast.stats),
web.get("/multicast/logs", api_multicast.logs),
web.post("/multicast/update", api_multicast.update),
web.post("/multicast/restart", api_multicast.restart),
]
)
self._register_advanced_logs("/multicast", "hassio_multicast")
def _register_hardware(self) -> None:
"""Register hardware functions."""
@@ -315,24 +209,16 @@ class RestAPI(CoreSysAttributes):
[
web.get("/hardware/info", api_hardware.info),
web.get("/hardware/audio", api_hardware.audio),
web.post("/hardware/trigger", api_hardware.trigger),
]
)
def _register_root(self) -> None:
"""Register root functions."""
api_root = APIRoot()
api_root.coresys = self.coresys
def _register_info(self) -> None:
"""Register info functions."""
api_info = APIInfo()
api_info.coresys = self.coresys
self.webapp.add_routes([web.get("/info", api_root.info)])
self.webapp.add_routes([web.post("/refresh_updates", api_root.refresh_updates)])
self.webapp.add_routes(
[web.get("/available_updates", api_root.available_updates)]
)
# Remove: 2023
self.webapp.add_routes(
[web.get("/supervisor/available_updates", api_root.available_updates)]
)
self.webapp.add_routes([web.get("/info", api_info.info)])
def _register_resolution(self) -> None:
"""Register info functions."""
@@ -358,10 +244,6 @@ class RestAPI(CoreSysAttributes):
"/resolution/issue/{issue}",
api_resolution.dismiss_issue,
),
web.get(
"/resolution/issue/{issue}/suggestions",
api_resolution.suggestions_for_issue,
),
web.post("/resolution/healthcheck", api_resolution.healthcheck),
]
)
@@ -377,7 +259,6 @@ class RestAPI(CoreSysAttributes):
web.post("/auth", api_auth.auth),
web.post("/auth/reset", api_auth.reset),
web.delete("/auth/cache", api_auth.cache),
web.get("/auth/list", api_auth.list_users),
]
)
@@ -391,6 +272,7 @@ class RestAPI(CoreSysAttributes):
web.get("/supervisor/ping", api_supervisor.ping),
web.get("/supervisor/info", api_supervisor.info),
web.get("/supervisor/stats", api_supervisor.stats),
web.get("/supervisor/logs", api_supervisor.logs),
web.post("/supervisor/update", api_supervisor.update),
web.post("/supervisor/reload", api_supervisor.reload),
web.post("/supervisor/restart", api_supervisor.restart),
@@ -399,38 +281,6 @@ class RestAPI(CoreSysAttributes):
]
)
async def get_supervisor_logs(*args, **kwargs):
try:
return await self._api_host.advanced_logs_handler(
*args, identifier="hassio_supervisor", **kwargs
)
except Exception as err: # pylint: disable=broad-exception-caught
# Supervisor logs are critical, so catch everything, log the exception
# and try to return Docker container logs as the fallback
_LOGGER.exception(
"Failed to get supervisor logs using advanced_logs API"
)
if not isinstance(err, HostNotSupportedError):
# No need to capture HostNotSupportedError to Sentry, the cause
# is known and reported to the user using the resolution center.
capture_exception(err)
return await api_supervisor.logs(*args, **kwargs)
self.webapp.add_routes(
[
web.get("/supervisor/logs", get_supervisor_logs),
web.get(
"/supervisor/logs/follow",
partial(get_supervisor_logs, follow=True),
),
web.get("/supervisor/logs/boots/{bootid}", get_supervisor_logs),
web.get(
"/supervisor/logs/boots/{bootid}/follow",
partial(get_supervisor_logs, follow=True),
),
]
)
def _register_homeassistant(self) -> None:
"""Register Home Assistant functions."""
api_hass = APIHomeAssistant()
@@ -439,6 +289,7 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes(
[
web.get("/core/info", api_hass.info),
web.get("/core/logs", api_hass.logs),
web.get("/core/stats", api_hass.stats),
web.post("/core/options", api_hass.options),
web.post("/core/update", api_hass.update),
@@ -447,28 +298,20 @@ class RestAPI(CoreSysAttributes):
web.post("/core/start", api_hass.start),
web.post("/core/check", api_hass.check),
web.post("/core/rebuild", api_hass.rebuild),
]
)
self._register_advanced_logs("/core", "homeassistant")
# Reroute from legacy
self.webapp.add_routes(
[
# Remove with old Supervisor fallback
web.get("/homeassistant/info", api_hass.info),
web.get("/homeassistant/logs", api_hass.logs),
web.get("/homeassistant/stats", api_hass.stats),
web.post("/homeassistant/options", api_hass.options),
web.post("/homeassistant/update", api_hass.update),
web.post("/homeassistant/restart", api_hass.restart),
web.post("/homeassistant/stop", api_hass.stop),
web.post("/homeassistant/start", api_hass.start),
web.post("/homeassistant/update", api_hass.update),
web.post("/homeassistant/rebuild", api_hass.rebuild),
web.post("/homeassistant/check", api_hass.check),
web.post("/homeassistant/rebuild", api_hass.rebuild),
]
)
self._register_advanced_logs("/homeassistant", "homeassistant")
def _register_proxy(self) -> None:
"""Register Home Assistant API Proxy."""
api_proxy = APIProxy()
@@ -482,12 +325,7 @@ class RestAPI(CoreSysAttributes):
web.post("/core/api/{path:.+}", api_proxy.api),
web.get("/core/api/{path:.+}", api_proxy.api),
web.get("/core/api/", api_proxy.api),
]
)
# Reroute from legacy
self.webapp.add_routes(
[
# Remove with old Supervisor fallback
web.get("/homeassistant/api/websocket", api_proxy.websocket),
web.get("/homeassistant/websocket", api_proxy.websocket),
web.get("/homeassistant/api/stream", api_proxy.stream),
@@ -505,6 +343,8 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes(
[
web.get("/addons", api_addons.list),
web.post("/addons/reload", api_addons.reload),
web.get("/addons/{addon}/info", api_addons.info),
web.post("/addons/{addon}/uninstall", api_addons.uninstall),
web.post("/addons/{addon}/start", api_addons.start),
web.post("/addons/{addon}/stop", api_addons.stop),
@@ -515,52 +355,17 @@ class RestAPI(CoreSysAttributes):
),
web.get("/addons/{addon}/options/config", api_addons.options_config),
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
web.get("/addons/{addon}/logs", api_addons.logs),
web.get("/addons/{addon}/icon", api_addons.icon),
web.get("/addons/{addon}/logo", api_addons.logo),
web.get("/addons/{addon}/changelog", api_addons.changelog),
web.get("/addons/{addon}/documentation", api_addons.documentation),
web.post("/addons/{addon}/stdin", api_addons.stdin),
web.post("/addons/{addon}/security", api_addons.security),
web.get("/addons/{addon}/stats", api_addons.stats),
]
)
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
async def get_addon_logs(request, *args, **kwargs):
addon = api_addons.get_addon_for_request(request)
kwargs["identifier"] = f"addon_{addon.slug}"
return await self._api_host.advanced_logs(request, *args, **kwargs)
self.webapp.add_routes(
[
web.get("/addons/{addon}/logs", get_addon_logs),
web.get(
"/addons/{addon}/logs/follow",
partial(get_addon_logs, follow=True),
),
web.get("/addons/{addon}/logs/boots/{bootid}", get_addon_logs),
web.get(
"/addons/{addon}/logs/boots/{bootid}/follow",
partial(get_addon_logs, follow=True),
),
]
)
# Legacy routing to support requests for not installed addons
api_store = APIStore()
api_store.coresys = self.coresys
@api_process
async def addons_addon_info(request: web.Request) -> dict[str, Any]:
"""Route to store if info requested for not installed addon."""
try:
return await api_addons.info(request)
except APIAddonNotInstalled:
# Route to store/{addon}/info but add missing fields
return dict(
await api_store.addons_addon_info_wrapped(request),
state=AddonState.UNKNOWN,
options=self.sys_addons.store[request.match_info["addon"]].options,
)
self.webapp.add_routes([web.get("/addons/{addon}/info", addons_addon_info)])
def _register_ingress(self) -> None:
"""Register Ingress functions."""
api_ingress = APIIngress()
@@ -575,30 +380,30 @@ class RestAPI(CoreSysAttributes):
]
)
def _register_backups(self) -> None:
"""Register backups functions."""
api_backups = APIBackups()
api_backups.coresys = self.coresys
def _register_snapshots(self) -> None:
"""Register snapshots functions."""
api_snapshots = APISnapshots()
api_snapshots.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/backups", api_backups.list),
web.get("/backups/info", api_backups.info),
web.post("/backups/options", api_backups.options),
web.post("/backups/reload", api_backups.reload),
web.post("/backups/freeze", api_backups.freeze),
web.post("/backups/thaw", api_backups.thaw),
web.post("/backups/new/full", api_backups.backup_full),
web.post("/backups/new/partial", api_backups.backup_partial),
web.post("/backups/new/upload", api_backups.upload),
web.get("/backups/{slug}/info", api_backups.backup_info),
web.delete("/backups/{slug}", api_backups.remove),
web.post("/backups/{slug}/restore/full", api_backups.restore_full),
web.get("/snapshots", api_snapshots.list),
web.post("/snapshots/reload", api_snapshots.reload),
web.post("/snapshots/new/full", api_snapshots.snapshot_full),
web.post("/snapshots/new/partial", api_snapshots.snapshot_partial),
web.post("/snapshots/new/upload", api_snapshots.upload),
web.get("/snapshots/{snapshot}/info", api_snapshots.info),
web.delete("/snapshots/{snapshot}", api_snapshots.remove),
web.post(
"/backups/{slug}/restore/partial",
api_backups.restore_partial,
"/snapshots/{snapshot}/restore/full", api_snapshots.restore_full
),
web.get("/backups/{slug}/download", api_backups.download),
web.post(
"/snapshots/{snapshot}/restore/partial",
api_snapshots.restore_partial,
),
web.get("/snapshots/{snapshot}/download", api_snapshots.download),
# Old, remove at end of 2020
web.post("/snapshots/{snapshot}/remove", api_snapshots.remove),
]
)
@@ -639,6 +444,7 @@ class RestAPI(CoreSysAttributes):
[
web.get("/dns/info", api_dns.info),
web.get("/dns/stats", api_dns.stats),
web.get("/dns/logs", api_dns.logs),
web.post("/dns/update", api_dns.update),
web.post("/dns/options", api_dns.options),
web.post("/dns/restart", api_dns.restart),
@@ -646,8 +452,6 @@ class RestAPI(CoreSysAttributes):
]
)
self._register_advanced_logs("/dns", "hassio_dns")
def _register_audio(self) -> None:
"""Register Audio functions."""
api_audio = APIAudio()
@@ -657,6 +461,7 @@ class RestAPI(CoreSysAttributes):
[
web.get("/audio/info", api_audio.info),
web.get("/audio/stats", api_audio.stats),
web.get("/audio/logs", api_audio.logs),
web.post("/audio/update", api_audio.update),
web.post("/audio/restart", api_audio.restart),
web.post("/audio/reload", api_audio.reload),
@@ -669,24 +474,6 @@ class RestAPI(CoreSysAttributes):
]
)
self._register_advanced_logs("/audio", "hassio_audio")
def _register_mounts(self) -> None:
"""Register mounts endpoints."""
api_mounts = APIMounts()
api_mounts.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/mounts", api_mounts.info),
web.post("/mounts/options", api_mounts.options),
web.post("/mounts", api_mounts.create_mount),
web.put("/mounts/{mount}", api_mounts.update_mount),
web.delete("/mounts/{mount}", api_mounts.delete_mount),
web.post("/mounts/{mount}/reload", api_mounts.reload_mount),
]
)
def _register_store(self) -> None:
"""Register store endpoints."""
api_store = APIStore()
@@ -697,15 +484,7 @@ class RestAPI(CoreSysAttributes):
web.get("/store", api_store.store_info),
web.get("/store/addons", api_store.addons_list),
web.get("/store/addons/{addon}", api_store.addons_addon_info),
web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon),
web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo),
web.get(
"/store/addons/{addon}/changelog", api_store.addons_addon_changelog
),
web.get(
"/store/addons/{addon}/documentation",
api_store.addons_addon_documentation,
),
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
web.post(
"/store/addons/{addon}/install", api_store.addons_addon_install
),
@@ -718,34 +497,20 @@ class RestAPI(CoreSysAttributes):
"/store/addons/{addon}/update/{version}",
api_store.addons_addon_update,
),
# Must be below others since it has a wildcard in resource path
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
web.post("/store/reload", api_store.reload),
web.get("/store/repositories", api_store.repositories_list),
web.get(
"/store/repositories/{repository}",
api_store.repositories_repository_info,
),
web.post("/store/repositories", api_store.add_repository),
web.delete(
"/store/repositories/{repository}", api_store.remove_repository
),
]
)
# Reroute from legacy
self.webapp.add_routes(
[
web.post("/addons/reload", api_store.reload),
web.post("/addons/{addon}/install", api_store.addons_addon_install),
web.post("/addons/{addon}/update", api_store.addons_addon_update),
web.get("/addons/{addon}/icon", api_store.addons_addon_icon),
web.get("/addons/{addon}/logo", api_store.addons_addon_logo),
web.get("/addons/{addon}/changelog", api_store.addons_addon_changelog),
web.get(
"/addons/{addon}/documentation",
api_store.addons_addon_documentation,
),
]
)
@@ -771,7 +536,9 @@ class RestAPI(CoreSysAttributes):
async def start(self) -> None:
"""Run RESTful API webserver."""
await self._runner.setup()
self._site = web.TCPSite(self._runner, host="0.0.0.0", port=80)
self._site = web.TCPSite(
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5
)
try:
await self._site.start()

View File

@@ -1,15 +1,14 @@
"""Init file for Supervisor Home Assistant RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging
from typing import Any
from typing import Any, Awaitable, Dict, List
from aiohttp import web
import voluptuous as vol
from voluptuous.humanize import humanize_error
from ..addons import AnyAddon
from ..addons.addon import Addon
from ..addons.manager import AnyAddon
from ..addons.utils import rating_security
from ..const import (
ATTR_ADDONS,
@@ -46,7 +45,6 @@ from ..const import (
ATTR_HOST_IPC,
ATTR_HOST_NETWORK,
ATTR_HOST_PID,
ATTR_HOST_UTS,
ATTR_HOSTNAME,
ATTR_ICON,
ATTR_INGRESS,
@@ -54,11 +52,13 @@ from ..const import (
ATTR_INGRESS_PANEL,
ATTR_INGRESS_PORT,
ATTR_INGRESS_URL,
ATTR_INSTALLED,
ATTR_IP_ADDRESS,
ATTR_KERNEL_MODULES,
ATTR_LOGO,
ATTR_LONG_DESCRIPTION,
ATTR_MACHINE,
ATTR_MAINTAINER,
ATTR_MEMORY_LIMIT,
ATTR_MEMORY_PERCENT,
ATTR_MEMORY_USAGE,
@@ -71,12 +71,13 @@ from ..const import (
ATTR_OPTIONS,
ATTR_PRIVILEGED,
ATTR_PROTECTED,
ATTR_PWNED,
ATTR_RATING,
ATTR_REPOSITORIES,
ATTR_REPOSITORY,
ATTR_SCHEMA,
ATTR_SERVICES,
ATTR_SLUG,
ATTR_SOURCE,
ATTR_STAGE,
ATTR_STARTUP,
ATTR_STATE,
@@ -93,25 +94,23 @@ from ..const import (
ATTR_VIDEO,
ATTR_WATCHDOG,
ATTR_WEBUI,
CONTENT_TYPE_BINARY,
CONTENT_TYPE_PNG,
CONTENT_TYPE_TEXT,
REQUEST_FROM,
AddonBoot,
AddonState,
)
from ..coresys import CoreSysAttributes
from ..docker.stats import DockerStats
from ..exceptions import (
APIAddonNotInstalled,
APIError,
APIForbidden,
PwnedError,
PwnedSecret,
)
from ..exceptions import APIError, APIForbidden, PwnedError, PwnedSecret
from ..utils.pwned import check_pwned_password
from ..validate import docker_ports
from .const import ATTR_REMOVE_CONFIG, ATTR_SIGNED
from .utils import api_process, api_validate, json_loads
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): str})
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
# pylint: disable=no-value-for-parameter
SCHEMA_OPTIONS = vol.Schema(
@@ -119,26 +118,22 @@ SCHEMA_OPTIONS = vol.Schema(
vol.Optional(ATTR_BOOT): vol.Coerce(AddonBoot),
vol.Optional(ATTR_NETWORK): vol.Maybe(docker_ports),
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str),
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str),
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(vol.Coerce(str)),
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(vol.Coerce(str)),
vol.Optional(ATTR_INGRESS_PANEL): vol.Boolean(),
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
}
)
# pylint: disable=no-value-for-parameter
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
SCHEMA_UNINSTALL = vol.Schema(
{vol.Optional(ATTR_REMOVE_CONFIG, default=False): vol.Boolean()}
)
# pylint: enable=no-value-for-parameter
class APIAddons(CoreSysAttributes):
"""Handle RESTful API for add-on functions."""
def get_addon_for_request(self, request: web.Request) -> Addon:
"""Return addon, throw an exception if it doesn't exist."""
def _extract_addon(self, request: web.Request) -> AnyAddon:
"""Return addon, throw an exception it it doesn't exist."""
addon_slug: str = request.match_info.get("addon")
# Lookup itself
@@ -151,13 +146,17 @@ class APIAddons(CoreSysAttributes):
addon = self.sys_addons.get(addon_slug)
if not addon:
raise APIError(f"Addon {addon_slug} does not exist")
if not isinstance(addon, Addon) or not addon.is_installed:
raise APIAddonNotInstalled("Addon is not installed")
return addon
def _extract_addon_installed(self, request: web.Request) -> Addon:
addon = self._extract_addon(request)
if not isinstance(addon, Addon) or not addon.is_installed:
raise APIError("Addon is not installed")
return addon
@api_process
async def list(self, request: web.Request) -> dict[str, Any]:
async def list(self, request: web.Request) -> Dict[str, Any]:
"""Return all add-ons or repositories."""
data_addons = [
{
@@ -166,32 +165,45 @@ class APIAddons(CoreSysAttributes):
ATTR_DESCRIPTON: addon.description,
ATTR_ADVANCED: addon.advanced,
ATTR_STAGE: addon.stage,
ATTR_VERSION: addon.version,
ATTR_VERSION: addon.version if addon.is_installed else None,
ATTR_VERSION_LATEST: addon.latest_version,
ATTR_UPDATE_AVAILABLE: addon.need_update,
ATTR_UPDATE_AVAILABLE: addon.need_update
if addon.is_installed
else False,
ATTR_INSTALLED: addon.is_installed,
ATTR_AVAILABLE: addon.available,
ATTR_DETACHED: addon.is_detached,
ATTR_HOMEASSISTANT: addon.homeassistant_version,
ATTR_STATE: addon.state,
ATTR_REPOSITORY: addon.repository,
ATTR_BUILD: addon.need_build,
ATTR_URL: addon.url,
ATTR_ICON: addon.with_icon,
ATTR_LOGO: addon.with_logo,
}
for addon in self.sys_addons.installed
for addon in self.sys_addons.all
]
return {ATTR_ADDONS: data_addons}
data_repositories = [
{
ATTR_SLUG: repository.slug,
ATTR_NAME: repository.name,
ATTR_SOURCE: repository.source,
ATTR_URL: repository.url,
ATTR_MAINTAINER: repository.maintainer,
}
for repository in self.sys_store.all
]
return {ATTR_ADDONS: data_addons, ATTR_REPOSITORIES: data_repositories}
@api_process
async def reload(self, request: web.Request) -> None:
"""Reload all add-on data from store."""
await asyncio.shield(self.sys_store.reload())
async def info(self, request: web.Request) -> dict[str, Any]:
@api_process
async def info(self, request: web.Request) -> Dict[str, Any]:
"""Return add-on information."""
addon: AnyAddon = self.get_addon_for_request(request)
addon: AnyAddon = self._extract_addon(request)
data = {
ATTR_NAME: addon.name,
@@ -202,8 +214,11 @@ class APIAddons(CoreSysAttributes):
ATTR_LONG_DESCRIPTION: addon.long_description,
ATTR_ADVANCED: addon.advanced,
ATTR_STAGE: addon.stage,
ATTR_AUTO_UPDATE: None,
ATTR_REPOSITORY: addon.repository,
ATTR_VERSION: None,
ATTR_VERSION_LATEST: addon.latest_version,
ATTR_UPDATE_AVAILABLE: False,
ATTR_PROTECTED: addon.protected,
ATTR_RATING: rating_security(addon),
ATTR_BOOT: addon.boot,
@@ -213,6 +228,7 @@ class APIAddons(CoreSysAttributes):
ATTR_MACHINE: addon.supported_machine,
ATTR_HOMEASSISTANT: addon.homeassistant_version,
ATTR_URL: addon.url,
ATTR_STATE: AddonState.UNKNOWN,
ATTR_DETACHED: addon.is_detached,
ATTR_AVAILABLE: addon.available,
ATTR_BUILD: addon.need_build,
@@ -221,16 +237,17 @@ class APIAddons(CoreSysAttributes):
ATTR_HOST_NETWORK: addon.host_network,
ATTR_HOST_PID: addon.host_pid,
ATTR_HOST_IPC: addon.host_ipc,
ATTR_HOST_UTS: addon.host_uts,
ATTR_HOST_DBUS: addon.host_dbus,
ATTR_PRIVILEGED: addon.privileged,
ATTR_FULL_ACCESS: addon.with_full_access,
ATTR_APPARMOR: addon.apparmor,
ATTR_DEVICES: addon.static_devices,
ATTR_ICON: addon.with_icon,
ATTR_LOGO: addon.with_logo,
ATTR_CHANGELOG: addon.with_changelog,
ATTR_DOCUMENTATION: addon.with_documentation,
ATTR_STDIN: addon.with_stdin,
ATTR_WEBUI: None,
ATTR_HASSIO_API: addon.access_hassio_api,
ATTR_HASSIO_ROLE: addon.hassio_role,
ATTR_AUTH_API: addon.access_auth_api,
@@ -244,42 +261,55 @@ class APIAddons(CoreSysAttributes):
ATTR_DOCKER_API: addon.access_docker_api,
ATTR_VIDEO: addon.with_video,
ATTR_AUDIO: addon.with_audio,
ATTR_AUDIO_INPUT: None,
ATTR_AUDIO_OUTPUT: None,
ATTR_STARTUP: addon.startup,
ATTR_SERVICES: _pretty_services(addon),
ATTR_DISCOVERY: addon.discovery,
ATTR_IP_ADDRESS: None,
ATTR_TRANSLATIONS: addon.translations,
ATTR_INGRESS: addon.with_ingress,
ATTR_SIGNED: addon.signed,
ATTR_STATE: addon.state,
ATTR_WEBUI: addon.webui,
ATTR_INGRESS_ENTRY: addon.ingress_entry,
ATTR_INGRESS_URL: addon.ingress_url,
ATTR_INGRESS_PORT: addon.ingress_port,
ATTR_INGRESS_PANEL: addon.ingress_panel,
ATTR_AUDIO_INPUT: addon.audio_input,
ATTR_AUDIO_OUTPUT: addon.audio_output,
ATTR_AUTO_UPDATE: addon.auto_update,
ATTR_IP_ADDRESS: str(addon.ip_address),
ATTR_VERSION: addon.version,
ATTR_UPDATE_AVAILABLE: addon.need_update,
ATTR_WATCHDOG: addon.watchdog,
ATTR_DEVICES: addon.static_devices
+ [device.path for device in addon.devices],
ATTR_INGRESS_ENTRY: None,
ATTR_INGRESS_URL: None,
ATTR_INGRESS_PORT: None,
ATTR_INGRESS_PANEL: None,
ATTR_WATCHDOG: None,
}
if isinstance(addon, Addon) and addon.is_installed:
data.update(
{
ATTR_STATE: addon.state,
ATTR_WEBUI: addon.webui,
ATTR_INGRESS_ENTRY: addon.ingress_entry,
ATTR_INGRESS_URL: addon.ingress_url,
ATTR_INGRESS_PORT: addon.ingress_port,
ATTR_INGRESS_PANEL: addon.ingress_panel,
ATTR_AUDIO_INPUT: addon.audio_input,
ATTR_AUDIO_OUTPUT: addon.audio_output,
ATTR_AUTO_UPDATE: addon.auto_update,
ATTR_IP_ADDRESS: str(addon.ip_address),
ATTR_VERSION: addon.version,
ATTR_UPDATE_AVAILABLE: addon.need_update,
ATTR_WATCHDOG: addon.watchdog,
ATTR_DEVICES: addon.static_devices
+ [device.path for device in addon.devices],
}
)
return data
@api_process
async def options(self, request: web.Request) -> None:
"""Store user options for add-on."""
addon = self.get_addon_for_request(request)
addon = self._extract_addon_installed(request)
# Update secrets for validation
await self.sys_homeassistant.secrets.reload()
# Extend schema with add-on specific validation
addon_schema = SCHEMA_OPTIONS.extend(
{vol.Optional(ATTR_OPTIONS): vol.Maybe(addon.schema)}
{vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema)}
)
# Validate/Process Body
@@ -307,39 +337,31 @@ class APIAddons(CoreSysAttributes):
@api_process
async def options_validate(self, request: web.Request) -> None:
"""Validate user options for add-on."""
addon = self.get_addon_for_request(request)
data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False}
options = await request.json(loads=json_loads) or addon.options
addon = self._extract_addon_installed(request)
data = {ATTR_MESSAGE: "", ATTR_VALID: True}
# Validate config
options_schema = addon.schema
try:
options_schema.validate(options)
addon.schema(addon.options)
except vol.Invalid as ex:
data[ATTR_MESSAGE] = humanize_error(options, ex)
data[ATTR_MESSAGE] = humanize_error(addon.options, ex)
data[ATTR_VALID] = False
if not self.sys_security.pwned:
return data
# Validate security
if self.sys_config.force_security:
for secret in addon.pwned:
try:
await check_pwned_password(self.sys_websession, secret)
continue
except PwnedSecret:
data[ATTR_MESSAGE] = "Add-on use pwned secrets!"
except PwnedError as err:
data[
ATTR_MESSAGE
] = f"Error happening on pwned secrets check: {err!s}!"
# Pwned check
for secret in options_schema.pwned:
try:
await self.sys_security.verify_secret(secret)
continue
except PwnedSecret:
data[ATTR_PWNED] = True
except PwnedError:
data[ATTR_PWNED] = None
break
if self.sys_security.force and data[ATTR_PWNED] in (None, True):
data[ATTR_VALID] = False
if data[ATTR_PWNED] is None:
data[ATTR_MESSAGE] = "Error happening on pwned secrets check!"
else:
data[ATTR_MESSAGE] = "Add-on uses pwned secrets!"
data[ATTR_VALID] = False
break
return data
@@ -349,20 +371,18 @@ class APIAddons(CoreSysAttributes):
slug: str = request.match_info.get("addon")
if slug != "self":
raise APIForbidden("This can be only read by the Add-on itself!")
addon = self.get_addon_for_request(request)
# Lookup/reload secrets
await self.sys_homeassistant.secrets.reload()
addon = self._extract_addon_installed(request)
try:
return addon.schema.validate(addon.options)
return addon.schema(addon.options)
except vol.Invalid:
raise APIError("Invalid configuration data for the add-on") from None
@api_process
async def security(self, request: web.Request) -> None:
"""Store security options for add-on."""
addon = self.get_addon_for_request(request)
body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
addon = self._extract_addon_installed(request)
body: Dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
if ATTR_PROTECTED in body:
_LOGGER.warning("Changing protected flag for %s!", addon.slug)
@@ -371,9 +391,9 @@ class APIAddons(CoreSysAttributes):
addon.save_persist()
@api_process
async def stats(self, request: web.Request) -> dict[str, Any]:
async def stats(self, request: web.Request) -> Dict[str, Any]:
"""Return resource information."""
addon = self.get_addon_for_request(request)
addon = self._extract_addon_installed(request)
stats: DockerStats = await addon.stats()
@@ -389,47 +409,85 @@ class APIAddons(CoreSysAttributes):
}
@api_process
async def uninstall(self, request: web.Request) -> Awaitable[None]:
def uninstall(self, request: web.Request) -> Awaitable[None]:
"""Uninstall add-on."""
addon = self.get_addon_for_request(request)
body: dict[str, Any] = await api_validate(SCHEMA_UNINSTALL, request)
return await asyncio.shield(
self.sys_addons.uninstall(
addon.slug, remove_config=body[ATTR_REMOVE_CONFIG]
)
)
addon = self._extract_addon_installed(request)
return asyncio.shield(addon.uninstall())
@api_process
async def start(self, request: web.Request) -> None:
def start(self, request: web.Request) -> Awaitable[None]:
"""Start add-on."""
addon = self.get_addon_for_request(request)
if start_task := await asyncio.shield(addon.start()):
await start_task
addon = self._extract_addon_installed(request)
return asyncio.shield(addon.start())
@api_process
def stop(self, request: web.Request) -> Awaitable[None]:
"""Stop add-on."""
addon = self.get_addon_for_request(request)
addon = self._extract_addon_installed(request)
return asyncio.shield(addon.stop())
@api_process
async def restart(self, request: web.Request) -> None:
def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart add-on."""
addon: Addon = self.get_addon_for_request(request)
if start_task := await asyncio.shield(addon.restart()):
await start_task
addon: Addon = self._extract_addon_installed(request)
return asyncio.shield(addon.restart())
@api_process
async def rebuild(self, request: web.Request) -> None:
def rebuild(self, request: web.Request) -> Awaitable[None]:
"""Rebuild local build add-on."""
addon = self.get_addon_for_request(request)
if start_task := await asyncio.shield(self.sys_addons.rebuild(addon.slug)):
await start_task
addon = self._extract_addon_installed(request)
return asyncio.shield(addon.rebuild())
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return logs from add-on."""
addon = self._extract_addon_installed(request)
return addon.logs()
@api_process_raw(CONTENT_TYPE_PNG)
async def icon(self, request: web.Request) -> bytes:
"""Return icon from add-on."""
addon = self._extract_addon(request)
if not addon.with_icon:
raise APIError(f"No icon found for add-on {addon.slug}!")
with addon.path_icon.open("rb") as png:
return png.read()
@api_process_raw(CONTENT_TYPE_PNG)
async def logo(self, request: web.Request) -> bytes:
"""Return logo from add-on."""
addon = self._extract_addon(request)
if not addon.with_logo:
raise APIError(f"No logo found for add-on {addon.slug}!")
with addon.path_logo.open("rb") as png:
return png.read()
@api_process_raw(CONTENT_TYPE_TEXT)
async def changelog(self, request: web.Request) -> str:
"""Return changelog from add-on."""
addon = self._extract_addon(request)
if not addon.with_changelog:
raise APIError(f"No changelog found for add-on {addon.slug}!")
with addon.path_changelog.open("r") as changelog:
return changelog.read()
@api_process_raw(CONTENT_TYPE_TEXT)
async def documentation(self, request: web.Request) -> str:
"""Return documentation from add-on."""
addon = self._extract_addon(request)
if not addon.with_documentation:
raise APIError(f"No documentation found for add-on {addon.slug}!")
with addon.path_documentation.open("r") as documentation:
return documentation.read()
@api_process
async def stdin(self, request: web.Request) -> None:
"""Write to stdin of add-on."""
addon = self.get_addon_for_request(request)
addon = self._extract_addon_installed(request)
if not addon.with_stdin:
raise APIError(f"STDIN not supported the {addon.slug} add-on")
@@ -437,6 +495,6 @@ class APIAddons(CoreSysAttributes):
await asyncio.shield(addon.write_stdin(data))
def _pretty_services(addon: Addon) -> list[str]:
def _pretty_services(addon: AnyAddon) -> List[str]:
"""Return a simplified services role list."""
return [f"{name}:{access}" for name, access in addon.services_role.items()]

View File

@@ -1,11 +1,10 @@
"""Init file for Supervisor Audio RESTful API."""
import asyncio
from collections.abc import Awaitable
from dataclasses import asdict
import logging
from typing import Any
from typing import Any, Awaitable, Dict
from aiohttp import web
import attr
import voluptuous as vol
from ..const import (
@@ -30,12 +29,13 @@ from ..const import (
ATTR_VERSION,
ATTR_VERSION_LATEST,
ATTR_VOLUME,
CONTENT_TYPE_BINARY,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..host.sound import StreamType
from ..validate import version_tag
from .utils import api_process, api_validate
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -56,10 +56,10 @@ SCHEMA_MUTE = vol.Schema(
}
)
SCHEMA_DEFAULT = vol.Schema({vol.Required(ATTR_NAME): str})
SCHEMA_DEFAULT = vol.Schema({vol.Required(ATTR_NAME): vol.Coerce(str)})
SCHEMA_PROFILE = vol.Schema(
{vol.Required(ATTR_CARD): str, vol.Required(ATTR_NAME): str}
{vol.Required(ATTR_CARD): vol.Coerce(str), vol.Required(ATTR_NAME): vol.Coerce(str)}
)
@@ -67,7 +67,7 @@ class APIAudio(CoreSysAttributes):
"""Handle RESTful API for Audio functions."""
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
async def info(self, request: web.Request) -> Dict[str, Any]:
"""Return Audio information."""
return {
ATTR_VERSION: self.sys_plugins.audio.version,
@@ -75,17 +75,21 @@ class APIAudio(CoreSysAttributes):
ATTR_UPDATE_AVAILABLE: self.sys_plugins.audio.need_update,
ATTR_HOST: str(self.sys_docker.network.audio),
ATTR_AUDIO: {
ATTR_CARD: [asdict(card) for card in self.sys_host.sound.cards],
ATTR_INPUT: [asdict(stream) for stream in self.sys_host.sound.inputs],
ATTR_OUTPUT: [asdict(stream) for stream in self.sys_host.sound.outputs],
ATTR_CARD: [attr.asdict(card) for card in self.sys_host.sound.cards],
ATTR_INPUT: [
attr.asdict(stream) for stream in self.sys_host.sound.inputs
],
ATTR_OUTPUT: [
attr.asdict(stream) for stream in self.sys_host.sound.outputs
],
ATTR_APPLICATION: [
asdict(stream) for stream in self.sys_host.sound.applications
attr.asdict(stream) for stream in self.sys_host.sound.applications
],
},
}
@api_process
async def stats(self, request: web.Request) -> dict[str, Any]:
async def stats(self, request: web.Request) -> Dict[str, Any]:
"""Return resource information."""
stats = await self.sys_plugins.audio.stats()
@@ -110,6 +114,11 @@ class APIAudio(CoreSysAttributes):
raise APIError(f"Version {version} is already in use")
await asyncio.shield(self.sys_plugins.audio.update(version))
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return Audio Docker logs."""
return self.sys_plugins.audio.logs()
@api_process
def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart Audio plugin."""

View File

@@ -1,7 +1,7 @@
"""Init file for Supervisor auth/SSO RESTful API."""
import asyncio
import logging
from typing import Any
from typing import Dict
from aiohttp import BasicAuth, web
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE
@@ -9,31 +9,27 @@ from aiohttp.web_exceptions import HTTPUnauthorized
import voluptuous as vol
from ..addons.addon import Addon
from ..const import ATTR_NAME, ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
from ..coresys import CoreSysAttributes
from ..exceptions import APIForbidden
from ..utils.json import json_loads
from .const import (
ATTR_GROUP_IDS,
ATTR_IS_ACTIVE,
ATTR_IS_OWNER,
ATTR_LOCAL_ONLY,
ATTR_USERS,
from ..const import (
ATTR_PASSWORD,
ATTR_USERNAME,
CONTENT_TYPE_JSON,
CONTENT_TYPE_URL,
REQUEST_FROM,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIForbidden
from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
SCHEMA_PASSWORD_RESET = vol.Schema(
{
vol.Required(ATTR_USERNAME): str,
vol.Required(ATTR_PASSWORD): str,
vol.Required(ATTR_USERNAME): vol.Coerce(str),
vol.Required(ATTR_PASSWORD): vol.Coerce(str),
}
)
REALM_HEADER: dict[str, str] = {
REALM_HEADER: Dict[str, str] = {
WWW_AUTHENTICATE: 'Basic realm="Home Assistant Authentication"'
}
@@ -50,7 +46,7 @@ class APIAuth(CoreSysAttributes):
return self.sys_auth.check_login(addon, auth.login, auth.password)
def _process_dict(
self, request: web.Request, addon: Addon, data: dict[str, str]
self, request: web.Request, addon: Addon, data: Dict[str, str]
) -> bool:
"""Process login with dict data.
@@ -77,7 +73,7 @@ class APIAuth(CoreSysAttributes):
# Json
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
data = await request.json(loads=json_loads)
data = await request.json()
return await self._process_dict(request, addon, data)
# URL encoded
@@ -90,7 +86,7 @@ class APIAuth(CoreSysAttributes):
@api_process
async def reset(self, request: web.Request) -> None:
"""Process reset password request."""
body: dict[str, str] = await api_validate(SCHEMA_PASSWORD_RESET, request)
body: Dict[str, str] = await api_validate(SCHEMA_PASSWORD_RESET, request)
await asyncio.shield(
self.sys_auth.change_password(body[ATTR_USERNAME], body[ATTR_PASSWORD])
)
@@ -99,21 +95,3 @@ class APIAuth(CoreSysAttributes):
async def cache(self, request: web.Request) -> None:
"""Process cache reset request."""
self.sys_auth.reset_data()
@api_process
async def list_users(self, request: web.Request) -> dict[str, list[dict[str, Any]]]:
"""List users on the Home Assistant instance."""
return {
ATTR_USERS: [
{
ATTR_USERNAME: user[ATTR_USERNAME],
ATTR_NAME: user[ATTR_NAME],
ATTR_IS_OWNER: user[ATTR_IS_OWNER],
ATTR_IS_ACTIVE: user[ATTR_IS_ACTIVE],
ATTR_LOCAL_ONLY: user[ATTR_LOCAL_ONLY],
ATTR_GROUP_IDS: user[ATTR_GROUP_IDS],
}
for user in await self.sys_auth.list_users()
if user[ATTR_USERNAME]
]
}

View File

@@ -1,378 +0,0 @@
"""Backups RESTful API."""
import asyncio
from collections.abc import Callable
import errno
import logging
from pathlib import Path
import re
from tempfile import TemporaryDirectory
from typing import Any
from aiohttp import web
from aiohttp.hdrs import CONTENT_DISPOSITION
import voluptuous as vol
from ..backups.backup import Backup
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale
from ..const import (
ATTR_ADDONS,
ATTR_BACKUPS,
ATTR_COMPRESSED,
ATTR_CONTENT,
ATTR_DATE,
ATTR_DAYS_UNTIL_STALE,
ATTR_FOLDERS,
ATTR_HOMEASSISTANT,
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE,
ATTR_LOCATON,
ATTR_NAME,
ATTR_PASSWORD,
ATTR_PROTECTED,
ATTR_REPOSITORIES,
ATTR_SIZE,
ATTR_SLUG,
ATTR_SUPERVISOR_VERSION,
ATTR_TIMEOUT,
ATTR_TYPE,
ATTR_VERSION,
BusEvent,
CoreState,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..jobs import JobSchedulerOptions
from ..mounts.const import MountUsage
from ..resolution.const import UnhealthyReason
from .const import ATTR_BACKGROUND, ATTR_JOB_ID, CONTENT_TYPE_TAR
from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+")
# Backwards compatible
# Remove: 2022.08
_ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT]
# pylint: disable=no-value-for-parameter
SCHEMA_RESTORE_FULL = vol.Schema(
{
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
}
)
SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend(
{
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
}
)
SCHEMA_BACKUP_FULL = vol.Schema(
{
vol.Optional(ATTR_NAME): str,
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
vol.Optional(ATTR_LOCATON): vol.Maybe(str),
vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(),
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
}
)
SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
{
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
}
)
SCHEMA_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_DAYS_UNTIL_STALE): days_until_stale,
}
)
SCHEMA_FREEZE = vol.Schema(
{
vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1)),
}
)
class APIBackups(CoreSysAttributes):
"""Handle RESTful API for backups functions."""
def _extract_slug(self, request):
"""Return backup, throw an exception if it doesn't exist."""
backup = self.sys_backups.get(request.match_info.get("slug"))
if not backup:
raise APIError("Backup does not exist")
return backup
def _list_backups(self):
"""Return list of backups."""
return [
{
ATTR_SLUG: backup.slug,
ATTR_NAME: backup.name,
ATTR_DATE: backup.date,
ATTR_TYPE: backup.sys_type,
ATTR_SIZE: backup.size,
ATTR_LOCATON: backup.location,
ATTR_PROTECTED: backup.protected,
ATTR_COMPRESSED: backup.compressed,
ATTR_CONTENT: {
ATTR_HOMEASSISTANT: backup.homeassistant_version is not None,
ATTR_ADDONS: backup.addon_list,
ATTR_FOLDERS: backup.folders,
},
}
for backup in self.sys_backups.list_backups
]
@api_process
async def list(self, request):
"""Return backup list."""
data_backups = self._list_backups()
if request.path == "/snapshots":
# Kept for backwards compability
return {"snapshots": data_backups}
return {ATTR_BACKUPS: data_backups}
@api_process
async def info(self, request):
"""Return backup list and manager info."""
return {
ATTR_BACKUPS: self._list_backups(),
ATTR_DAYS_UNTIL_STALE: self.sys_backups.days_until_stale,
}
@api_process
async def options(self, request):
"""Set backup manager options."""
body = await api_validate(SCHEMA_OPTIONS, request)
if ATTR_DAYS_UNTIL_STALE in body:
self.sys_backups.days_until_stale = body[ATTR_DAYS_UNTIL_STALE]
self.sys_backups.save_data()
@api_process
async def reload(self, _):
"""Reload backup list."""
await asyncio.shield(self.sys_backups.reload())
return True
@api_process
async def backup_info(self, request):
"""Return backup info."""
backup = self._extract_slug(request)
data_addons = []
for addon_data in backup.addons:
data_addons.append(
{
ATTR_SLUG: addon_data[ATTR_SLUG],
ATTR_NAME: addon_data[ATTR_NAME],
ATTR_VERSION: addon_data[ATTR_VERSION],
ATTR_SIZE: addon_data[ATTR_SIZE],
}
)
return {
ATTR_SLUG: backup.slug,
ATTR_TYPE: backup.sys_type,
ATTR_NAME: backup.name,
ATTR_DATE: backup.date,
ATTR_SIZE: backup.size,
ATTR_COMPRESSED: backup.compressed,
ATTR_PROTECTED: backup.protected,
ATTR_SUPERVISOR_VERSION: backup.supervisor_version,
ATTR_HOMEASSISTANT: backup.homeassistant_version,
ATTR_LOCATON: backup.location,
ATTR_ADDONS: data_addons,
ATTR_REPOSITORIES: backup.repositories,
ATTR_FOLDERS: backup.folders,
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database,
}
def _location_to_mount(self, body: dict[str, Any]) -> dict[str, Any]:
"""Change location field to mount if necessary."""
if not body.get(ATTR_LOCATON):
return body
body[ATTR_LOCATON] = self.sys_mounts.get(body[ATTR_LOCATON])
if body[ATTR_LOCATON].usage != MountUsage.BACKUP:
raise APIError(
f"Mount {body[ATTR_LOCATON].name} is not used for backups, cannot backup to there"
)
return body
async def _background_backup_task(
self, backup_method: Callable, *args, **kwargs
) -> tuple[asyncio.Task, str]:
"""Start backup task in background and return task and job ID."""
event = asyncio.Event()
job, backup_task = self.sys_jobs.schedule_job(
backup_method, JobSchedulerOptions(), *args, **kwargs
)
async def release_on_freeze(new_state: CoreState):
if new_state == CoreState.FREEZE:
event.set()
# Wait for system to get into freeze state before returning
# If the backup fails validation it will raise before getting there
listener = self.sys_bus.register_event(
BusEvent.SUPERVISOR_STATE_CHANGE, release_on_freeze
)
try:
await asyncio.wait(
(
backup_task,
self.sys_create_task(event.wait()),
),
return_when=asyncio.FIRST_COMPLETED,
)
return (backup_task, job.uuid)
finally:
self.sys_bus.remove_listener(listener)
@api_process
async def backup_full(self, request):
"""Create full backup."""
body = await api_validate(SCHEMA_BACKUP_FULL, request)
background = body.pop(ATTR_BACKGROUND)
backup_task, job_id = await self._background_backup_task(
self.sys_backups.do_backup_full, **self._location_to_mount(body)
)
if background and not backup_task.done():
return {ATTR_JOB_ID: job_id}
backup: Backup = await backup_task
if backup:
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug}
raise APIError(
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details",
job_id=job_id,
)
@api_process
async def backup_partial(self, request):
"""Create a partial backup."""
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
background = body.pop(ATTR_BACKGROUND)
backup_task, job_id = await self._background_backup_task(
self.sys_backups.do_backup_partial, **self._location_to_mount(body)
)
if background and not backup_task.done():
return {ATTR_JOB_ID: job_id}
backup: Backup = await backup_task
if backup:
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug}
raise APIError(
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details",
job_id=job_id,
)
@api_process
async def restore_full(self, request):
"""Full restore of a backup."""
backup = self._extract_slug(request)
body = await api_validate(SCHEMA_RESTORE_FULL, request)
background = body.pop(ATTR_BACKGROUND)
restore_task, job_id = await self._background_backup_task(
self.sys_backups.do_restore_full, backup, **body
)
if background and not restore_task.done() or await restore_task:
return {ATTR_JOB_ID: job_id}
raise APIError(
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details",
job_id=job_id,
)
@api_process
async def restore_partial(self, request):
"""Partial restore a backup."""
backup = self._extract_slug(request)
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
background = body.pop(ATTR_BACKGROUND)
restore_task, job_id = await self._background_backup_task(
self.sys_backups.do_restore_partial, backup, **body
)
if background and not restore_task.done() or await restore_task:
return {ATTR_JOB_ID: job_id}
raise APIError(
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details",
job_id=job_id,
)
@api_process
async def freeze(self, request):
"""Initiate manual freeze for external backup."""
body = await api_validate(SCHEMA_FREEZE, request)
await asyncio.shield(self.sys_backups.freeze_all(**body))
@api_process
async def thaw(self, request):
"""Begin thaw after manual freeze."""
await self.sys_backups.thaw_all()
@api_process
async def remove(self, request):
"""Remove a backup."""
backup = self._extract_slug(request)
return self.sys_backups.remove(backup)
async def download(self, request):
"""Download a backup file."""
backup = self._extract_slug(request)
_LOGGER.info("Downloading backup %s", backup.slug)
response = web.FileResponse(backup.tarfile)
response.content_type = CONTENT_TYPE_TAR
response.headers[
CONTENT_DISPOSITION
] = f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
return response
@api_process
async def upload(self, request):
"""Upload a backup file."""
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir:
tar_file = Path(temp_dir, "backup.tar")
reader = await request.multipart()
contents = await reader.next()
try:
with tar_file.open("wb") as backup:
while True:
chunk = await contents.read_chunk()
if not chunk:
break
backup.write(chunk)
except OSError as err:
if err.errno == errno.EBADMSG:
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
_LOGGER.error("Can't write new backup file: %s", err)
return False
except asyncio.CancelledError:
return False
backup = await asyncio.shield(self.sys_backups.import_backup(tar_file))
if backup:
return {ATTR_SLUG: backup.slug}
return False

View File

@@ -1,7 +1,7 @@
"""Init file for Supervisor HA cli RESTful API."""
import asyncio
import logging
from typing import Any
from typing import Any, Dict
from aiohttp import web
import voluptuous as vol
@@ -32,7 +32,7 @@ class APICli(CoreSysAttributes):
"""Handle RESTful API for HA Cli functions."""
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
async def info(self, request: web.Request) -> Dict[str, Any]:
"""Return HA cli information."""
return {
ATTR_VERSION: self.sys_plugins.cli.version,
@@ -41,7 +41,7 @@ class APICli(CoreSysAttributes):
}
@api_process
async def stats(self, request: web.Request) -> dict[str, Any]:
async def stats(self, request: web.Request) -> Dict[str, Any]:
"""Return resource information."""
stats = await self.sys_plugins.cli.stats()

View File

@@ -1,77 +0,0 @@
"""Const for API."""
from enum import StrEnum
CONTENT_TYPE_BINARY = "application/octet-stream"
CONTENT_TYPE_JSON = "application/json"
CONTENT_TYPE_PNG = "image/png"
CONTENT_TYPE_TAR = "application/tar"
CONTENT_TYPE_TEXT = "text/plain"
CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
CONTENT_TYPE_X_LOG = "text/x-log"
COOKIE_INGRESS = "ingress_session"
ATTR_AGENT_VERSION = "agent_version"
ATTR_APPARMOR_VERSION = "apparmor_version"
ATTR_ATTRIBUTES = "attributes"
ATTR_AVAILABLE_UPDATES = "available_updates"
ATTR_BACKGROUND = "background"
ATTR_BOOT_SLOT = "boot_slot"
ATTR_BOOT_SLOTS = "boot_slots"
ATTR_BOOT_TIMESTAMP = "boot_timestamp"
ATTR_BOOTS = "boots"
ATTR_BROADCAST_LLMNR = "broadcast_llmnr"
ATTR_BROADCAST_MDNS = "broadcast_mdns"
ATTR_BY_ID = "by_id"
ATTR_CHILDREN = "children"
ATTR_CONNECTION_BUS = "connection_bus"
ATTR_DATA_DISK = "data_disk"
ATTR_DEVICE = "device"
ATTR_DEV_PATH = "dev_path"
ATTR_DISKS = "disks"
ATTR_DRIVES = "drives"
ATTR_DT_SYNCHRONIZED = "dt_synchronized"
ATTR_DT_UTC = "dt_utc"
ATTR_EJECTABLE = "ejectable"
ATTR_FALLBACK = "fallback"
ATTR_FILESYSTEMS = "filesystems"
ATTR_GROUP_IDS = "group_ids"
ATTR_IDENTIFIERS = "identifiers"
ATTR_IS_ACTIVE = "is_active"
ATTR_IS_OWNER = "is_owner"
ATTR_JOB_ID = "job_id"
ATTR_JOBS = "jobs"
ATTR_LLMNR = "llmnr"
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
ATTR_LOCAL_ONLY = "local_only"
ATTR_MDNS = "mdns"
ATTR_MODEL = "model"
ATTR_MOUNTS = "mounts"
ATTR_MOUNT_POINTS = "mount_points"
ATTR_PANEL_PATH = "panel_path"
ATTR_REMOVABLE = "removable"
ATTR_REMOVE_CONFIG = "remove_config"
ATTR_REVISION = "revision"
ATTR_SAFE_MODE = "safe_mode"
ATTR_SEAT = "seat"
ATTR_SIGNED = "signed"
ATTR_STARTUP_TIME = "startup_time"
ATTR_STATUS = "status"
ATTR_SUBSYSTEM = "subsystem"
ATTR_SYSFS = "sysfs"
ATTR_SYSTEM_HEALTH_LED = "system_health_led"
ATTR_TIME_DETECTED = "time_detected"
ATTR_UPDATE_TYPE = "update_type"
ATTR_USAGE = "usage"
ATTR_USE_NTP = "use_ntp"
ATTR_USERS = "users"
ATTR_VENDOR = "vendor"
ATTR_VIRTUALIZATION = "virtualization"
class BootSlot(StrEnum):
"""Boot slots used by HAOS."""
A = "A"
B = "B"

View File

@@ -1,9 +1,6 @@
"""Init file for Supervisor network RESTful API."""
import logging
import voluptuous as vol
from ..addons.addon import Addon
from ..const import (
ATTR_ADDON,
ATTR_CONFIG,
@@ -12,18 +9,16 @@ from ..const import (
ATTR_SERVICES,
ATTR_UUID,
REQUEST_FROM,
AddonState,
)
from ..coresys import CoreSysAttributes
from ..discovery.validate import valid_discovery_service
from ..exceptions import APIError, APIForbidden
from .utils import api_process, api_validate, require_home_assistant
_LOGGER: logging.Logger = logging.getLogger(__name__)
SCHEMA_DISCOVERY = vol.Schema(
{
vol.Required(ATTR_SERVICE): str,
vol.Required(ATTR_CONFIG): dict,
vol.Required(ATTR_SERVICE): valid_discovery_service,
vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
}
)
@@ -41,19 +36,19 @@ class APIDiscovery(CoreSysAttributes):
@api_process
@require_home_assistant
async def list(self, request):
"""Show registered and available services."""
"""Show register services."""
# Get available discovery
discovery = [
{
ATTR_ADDON: message.addon,
ATTR_SERVICE: message.service,
ATTR_UUID: message.uuid,
ATTR_CONFIG: message.config,
}
for message in self.sys_discovery.list_messages
if (addon := self.sys_addons.get(message.addon, local_only=True))
and addon.state == AddonState.STARTED
]
discovery = []
for message in self.sys_discovery.list_messages:
discovery.append(
{
ATTR_ADDON: message.addon,
ATTR_SERVICE: message.service,
ATTR_UUID: message.uuid,
ATTR_CONFIG: message.config,
}
)
# Get available services/add-ons
services = {}
@@ -67,19 +62,11 @@ class APIDiscovery(CoreSysAttributes):
async def set_discovery(self, request):
"""Write data into a discovery pipeline."""
body = await api_validate(SCHEMA_DISCOVERY, request)
addon: Addon = request[REQUEST_FROM]
service = body[ATTR_SERVICE]
addon = request[REQUEST_FROM]
# Access?
if body[ATTR_SERVICE] not in addon.discovery:
_LOGGER.error(
"Add-on %s attempted to send discovery for service %s which is not listed in its config. Please report this to the maintainer of the add-on",
addon.name,
service,
)
raise APIForbidden(
"Add-ons must list services they provide via discovery in their config!"
)
raise APIForbidden("Can't use discovery!")
# Process discovery message
message = self.sys_discovery.send(addon, **body)

View File

@@ -1,8 +1,7 @@
"""Init file for Supervisor DNS RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging
from typing import Any
from typing import Any, Awaitable, Dict
from aiohttp import web
import voluptuous as vol
@@ -22,22 +21,17 @@ from ..const import (
ATTR_UPDATE_AVAILABLE,
ATTR_VERSION,
ATTR_VERSION_LATEST,
CONTENT_TYPE_BINARY,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..validate import dns_server_list, version_tag
from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS
from .utils import api_process, api_validate
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter
SCHEMA_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_SERVERS): dns_server_list,
vol.Optional(ATTR_FALLBACK): vol.Boolean(),
}
)
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_SERVERS): dns_server_list})
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
@@ -46,7 +40,7 @@ class APICoreDNS(CoreSysAttributes):
"""Handle RESTful API for DNS functions."""
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
async def info(self, request: web.Request) -> Dict[str, Any]:
"""Return DNS information."""
return {
ATTR_VERSION: self.sys_plugins.dns.version,
@@ -55,32 +49,21 @@ class APICoreDNS(CoreSysAttributes):
ATTR_HOST: str(self.sys_docker.network.dns),
ATTR_SERVERS: self.sys_plugins.dns.servers,
ATTR_LOCALS: self.sys_plugins.dns.locals,
ATTR_MDNS: self.sys_plugins.dns.mdns,
ATTR_LLMNR: self.sys_plugins.dns.llmnr,
ATTR_FALLBACK: self.sys_plugins.dns.fallback,
}
@api_process
async def options(self, request: web.Request) -> None:
"""Set DNS options."""
body = await api_validate(SCHEMA_OPTIONS, request)
restart_required = False
if ATTR_SERVERS in body:
self.sys_plugins.dns.servers = body[ATTR_SERVERS]
restart_required = True
if ATTR_FALLBACK in body:
self.sys_plugins.dns.fallback = body[ATTR_FALLBACK]
restart_required = True
if restart_required:
self.sys_create_task(self.sys_plugins.dns.restart())
self.sys_plugins.dns.save_data()
@api_process
async def stats(self, request: web.Request) -> dict[str, Any]:
async def stats(self, request: web.Request) -> Dict[str, Any]:
"""Return resource information."""
stats = await self.sys_plugins.dns.stats()
@@ -105,6 +88,11 @@ class APICoreDNS(CoreSysAttributes):
raise APIError(f"Version {version} is already in use")
await asyncio.shield(self.sys_plugins.dns.update(version))
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return DNS Docker logs."""
return self.sys_plugins.dns.logs()
@api_process
def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart CoreDNS plugin."""

View File

@@ -1,6 +1,6 @@
"""Init file for Supervisor Home Assistant RESTful API."""
import logging
from typing import Any
from typing import Any, Dict
from aiohttp import web
import voluptuous as vol
@@ -21,7 +21,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
SCHEMA_DOCKER_REGISTRY = vol.Schema(
{
str: {
vol.Coerce(str): {
vol.Required(ATTR_USERNAME): str,
vol.Required(ATTR_PASSWORD): str,
}
@@ -33,7 +33,7 @@ class APIDocker(CoreSysAttributes):
"""Handle RESTful API for Docker configuration."""
@api_process
async def registries(self, request) -> dict[str, Any]:
async def registries(self, request) -> Dict[str, Any]:
"""Return the list of registries."""
data_registries = {}
for hostname, registry in self.sys_docker.config.registries.items():

View File

@@ -1,52 +1,26 @@
"""Init file for Supervisor hardware RESTful API."""
import logging
from typing import Any
from typing import Any, Awaitable, Dict
from aiohttp import web
from ..const import (
ATTR_AUDIO,
ATTR_DEVICES,
ATTR_ID,
ATTR_INPUT,
ATTR_NAME,
ATTR_OUTPUT,
ATTR_SERIAL,
ATTR_SIZE,
ATTR_SYSTEM,
)
from ..const import ATTR_AUDIO, ATTR_DEVICES, ATTR_INPUT, ATTR_NAME, ATTR_OUTPUT
from ..coresys import CoreSysAttributes
from ..dbus.udisks2 import UDisks2Manager
from ..dbus.udisks2.block import UDisks2Block
from ..dbus.udisks2.drive import UDisks2Drive
from ..hardware.data import Device
from .const import (
from ..hardware.const import (
ATTR_ATTRIBUTES,
ATTR_BY_ID,
ATTR_CHILDREN,
ATTR_CONNECTION_BUS,
ATTR_DEV_PATH,
ATTR_DEVICE,
ATTR_DRIVES,
ATTR_EJECTABLE,
ATTR_FILESYSTEMS,
ATTR_MODEL,
ATTR_MOUNT_POINTS,
ATTR_REMOVABLE,
ATTR_REVISION,
ATTR_SEAT,
ATTR_SUBSYSTEM,
ATTR_SYSFS,
ATTR_TIME_DETECTED,
ATTR_VENDOR,
)
from ..hardware.data import Device
from .utils import api_process
_LOGGER: logging.Logger = logging.getLogger(__name__)
def device_struct(device: Device) -> dict[str, Any]:
"""Return a dict with information of a interface to be used in the API."""
def device_struct(device: Device) -> Dict[str, Any]:
"""Return a dict with information of a interface to be used in th API."""
return {
ATTR_NAME: device.name,
ATTR_SYSFS: device.sysfs,
@@ -54,43 +28,6 @@ def device_struct(device: Device) -> dict[str, Any]:
ATTR_SUBSYSTEM: device.subsystem,
ATTR_BY_ID: device.by_id,
ATTR_ATTRIBUTES: device.attributes,
ATTR_CHILDREN: device.children,
}
def filesystem_struct(fs_block: UDisks2Block) -> dict[str, Any]:
"""Return a dict with information of a filesystem block device to be used in the API."""
return {
ATTR_DEVICE: str(fs_block.device),
ATTR_ID: fs_block.id,
ATTR_SIZE: fs_block.size,
ATTR_NAME: fs_block.id_label,
ATTR_SYSTEM: fs_block.hint_system,
ATTR_MOUNT_POINTS: [
str(mount_point) for mount_point in fs_block.filesystem.mount_points
],
}
def drive_struct(udisks2: UDisks2Manager, drive: UDisks2Drive) -> dict[str, Any]:
"""Return a dict with information of a disk to be used in the API."""
return {
ATTR_VENDOR: drive.vendor,
ATTR_MODEL: drive.model,
ATTR_REVISION: drive.revision,
ATTR_SERIAL: drive.serial,
ATTR_ID: drive.id,
ATTR_SIZE: drive.size,
ATTR_TIME_DETECTED: drive.time_detected.isoformat(),
ATTR_CONNECTION_BUS: drive.connection_bus,
ATTR_SEAT: drive.seat,
ATTR_REMOVABLE: drive.removable,
ATTR_EJECTABLE: drive.ejectable,
ATTR_FILESYSTEMS: [
filesystem_struct(block)
for block in udisks2.block_devices
if block.filesystem and block.drive == drive.object_path
],
}
@@ -98,20 +35,16 @@ class APIHardware(CoreSysAttributes):
"""Handle RESTful API for hardware functions."""
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
async def info(self, request: web.Request) -> Dict[str, Any]:
"""Show hardware info."""
return {
ATTR_DEVICES: [
device_struct(device) for device in self.sys_hardware.devices
],
ATTR_DRIVES: [
drive_struct(self.sys_dbus.udisks2, drive)
for drive in self.sys_dbus.udisks2.drives
],
]
}
@api_process
async def audio(self, request: web.Request) -> dict[str, Any]:
async def audio(self, request: web.Request) -> Dict[str, Any]:
"""Show pulse audio profiles."""
return {
ATTR_AUDIO: {
@@ -125,3 +58,8 @@ class APIHardware(CoreSysAttributes):
},
}
}
@api_process
async def trigger(self, request: web.Request) -> Awaitable[None]:
"""Trigger a udev device reload."""
_LOGGER.debug("Ignoring DEPRECATED hardware trigger function call.")

View File

@@ -1,8 +1,7 @@
"""Init file for Supervisor Home Assistant RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging
from typing import Any
from typing import Any, Awaitable, Dict
from aiohttp import web
import voluptuous as vol
@@ -11,8 +10,6 @@ from ..const import (
ATTR_ARCH,
ATTR_AUDIO_INPUT,
ATTR_AUDIO_OUTPUT,
ATTR_BACKUP,
ATTR_BACKUPS_EXCLUDE_DATABASE,
ATTR_BLK_READ,
ATTR_BLK_WRITE,
ATTR_BOOT,
@@ -31,13 +28,14 @@ from ..const import (
ATTR_UPDATE_AVAILABLE,
ATTR_VERSION,
ATTR_VERSION_LATEST,
ATTR_WAIT_BOOT,
ATTR_WATCHDOG,
CONTENT_TYPE_BINARY,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..validate import docker_image, network_port, version_tag
from .const import ATTR_SAFE_MODE
from .utils import api_process, api_validate
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -45,36 +43,25 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
SCHEMA_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_BOOT): vol.Boolean(),
vol.Optional(ATTR_IMAGE): vol.Maybe(docker_image),
vol.Optional(ATTR_IMAGE): docker_image,
vol.Optional(ATTR_PORT): network_port,
vol.Optional(ATTR_SSL): vol.Boolean(),
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(str),
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str),
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str),
vol.Optional(ATTR_BACKUPS_EXCLUDE_DATABASE): vol.Boolean(),
vol.Optional(ATTR_WAIT_BOOT): vol.All(vol.Coerce(int), vol.Range(min=60)),
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(vol.Coerce(str)),
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(vol.Coerce(str)),
}
)
SCHEMA_UPDATE = vol.Schema(
{
vol.Optional(ATTR_VERSION): version_tag,
vol.Optional(ATTR_BACKUP): bool,
}
)
SCHEMA_RESTART = vol.Schema(
{
vol.Optional(ATTR_SAFE_MODE, default=False): vol.Boolean(),
}
)
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
class APIHomeAssistant(CoreSysAttributes):
"""Handle RESTful API for Home Assistant functions."""
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
async def info(self, request: web.Request) -> Dict[str, Any]:
"""Return host information."""
return {
ATTR_VERSION: self.sys_homeassistant.version,
@@ -88,9 +75,11 @@ class APIHomeAssistant(CoreSysAttributes):
ATTR_PORT: self.sys_homeassistant.api_port,
ATTR_SSL: self.sys_homeassistant.api_ssl,
ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
ATTR_WAIT_BOOT: self.sys_homeassistant.wait_boot,
ATTR_AUDIO_INPUT: self.sys_homeassistant.audio_input,
ATTR_AUDIO_OUTPUT: self.sys_homeassistant.audio_output,
ATTR_BACKUPS_EXCLUDE_DATABASE: self.sys_homeassistant.backups_exclude_database,
# Remove end of Q3 2020
"last_version": self.sys_homeassistant.latest_version,
}
@api_process
@@ -100,9 +89,6 @@ class APIHomeAssistant(CoreSysAttributes):
if ATTR_IMAGE in body:
self.sys_homeassistant.image = body[ATTR_IMAGE]
self.sys_homeassistant.override_image = (
self.sys_homeassistant.image != self.sys_homeassistant.default_image
)
if ATTR_BOOT in body:
self.sys_homeassistant.boot = body[ATTR_BOOT]
@@ -116,6 +102,9 @@ class APIHomeAssistant(CoreSysAttributes):
if ATTR_WATCHDOG in body:
self.sys_homeassistant.watchdog = body[ATTR_WATCHDOG]
if ATTR_WAIT_BOOT in body:
self.sys_homeassistant.wait_boot = body[ATTR_WAIT_BOOT]
if ATTR_REFRESH_TOKEN in body:
self.sys_homeassistant.refresh_token = body[ATTR_REFRESH_TOKEN]
@@ -125,15 +114,10 @@ class APIHomeAssistant(CoreSysAttributes):
if ATTR_AUDIO_OUTPUT in body:
self.sys_homeassistant.audio_output = body[ATTR_AUDIO_OUTPUT]
if ATTR_BACKUPS_EXCLUDE_DATABASE in body:
self.sys_homeassistant.backups_exclude_database = body[
ATTR_BACKUPS_EXCLUDE_DATABASE
]
self.sys_homeassistant.save_data()
@api_process
async def stats(self, request: web.Request) -> dict[Any, str]:
async def stats(self, request: web.Request) -> Dict[Any, str]:
"""Return resource information."""
stats = await self.sys_homeassistant.core.stats()
if not stats:
@@ -153,14 +137,10 @@ class APIHomeAssistant(CoreSysAttributes):
@api_process
async def update(self, request: web.Request) -> None:
"""Update Home Assistant."""
body = await api_validate(SCHEMA_UPDATE, request)
body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self.sys_homeassistant.latest_version)
await asyncio.shield(
self.sys_homeassistant.core.update(
version=body.get(ATTR_VERSION, self.sys_homeassistant.latest_version),
backup=body.get(ATTR_BACKUP),
)
)
await asyncio.shield(self.sys_homeassistant.core.update(version))
@api_process
def stop(self, request: web.Request) -> Awaitable[None]:
@@ -173,22 +153,19 @@ class APIHomeAssistant(CoreSysAttributes):
return asyncio.shield(self.sys_homeassistant.core.start())
@api_process
async def restart(self, request: web.Request) -> None:
def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart Home Assistant."""
body = await api_validate(SCHEMA_RESTART, request)
await asyncio.shield(
self.sys_homeassistant.core.restart(safe_mode=body[ATTR_SAFE_MODE])
)
return asyncio.shield(self.sys_homeassistant.core.restart())
@api_process
async def rebuild(self, request: web.Request) -> None:
def rebuild(self, request: web.Request) -> Awaitable[None]:
"""Rebuild Home Assistant."""
body = await api_validate(SCHEMA_RESTART, request)
return asyncio.shield(self.sys_homeassistant.core.rebuild())
await asyncio.shield(
self.sys_homeassistant.core.rebuild(safe_mode=body[ATTR_SAFE_MODE])
)
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return Home Assistant Docker logs."""
return self.sys_homeassistant.core.logs()
@api_process
async def check(self, request: web.Request) -> None:

View File

@@ -1,13 +1,9 @@
"""Init file for Supervisor host RESTful API."""
import asyncio
from contextlib import suppress
import logging
from typing import Awaitable
from aiohttp import web
from aiohttp.hdrs import ACCEPT, RANGE
import voluptuous as vol
from voluptuous.error import CoerceInvalid
from ..const import (
ATTR_CHASSIS,
@@ -25,44 +21,14 @@ from ..const import (
ATTR_OPERATING_SYSTEM,
ATTR_SERVICES,
ATTR_STATE,
ATTR_TIMEZONE,
CONTENT_TYPE_BINARY,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError, HostLogError
from ..host.const import (
PARAM_BOOT_ID,
PARAM_FOLLOW,
PARAM_SYSLOG_IDENTIFIER,
LogFormat,
LogFormatter,
)
from ..utils.systemd_journal import journal_logs_reader
from .const import (
ATTR_AGENT_VERSION,
ATTR_APPARMOR_VERSION,
ATTR_BOOT_TIMESTAMP,
ATTR_BOOTS,
ATTR_BROADCAST_LLMNR,
ATTR_BROADCAST_MDNS,
ATTR_DT_SYNCHRONIZED,
ATTR_DT_UTC,
ATTR_IDENTIFIERS,
ATTR_LLMNR_HOSTNAME,
ATTR_STARTUP_TIME,
ATTR_USE_NTP,
ATTR_VIRTUALIZATION,
CONTENT_TYPE_TEXT,
CONTENT_TYPE_X_LOG,
)
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
SERVICE = "service"
IDENTIFIER = "identifier"
BOOTID = "bootid"
DEFAULT_RANGE = 100
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): str})
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): vol.Coerce(str)})
class APIHost(CoreSysAttributes):
@@ -72,10 +38,7 @@ class APIHost(CoreSysAttributes):
async def info(self, request):
"""Return host information."""
return {
ATTR_AGENT_VERSION: self.sys_dbus.agent.version,
ATTR_APPARMOR_VERSION: self.sys_host.apparmor.version,
ATTR_CHASSIS: self.sys_host.info.chassis,
ATTR_VIRTUALIZATION: self.sys_host.info.virtualization,
ATTR_CPE: self.sys_host.info.cpe,
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
ATTR_DISK_FREE: self.sys_host.info.free_space,
@@ -84,17 +47,8 @@ class APIHost(CoreSysAttributes):
ATTR_DISK_LIFE_TIME: self.sys_host.info.disk_life_time,
ATTR_FEATURES: self.sys_host.features,
ATTR_HOSTNAME: self.sys_host.info.hostname,
ATTR_LLMNR_HOSTNAME: self.sys_host.info.llmnr_hostname,
ATTR_KERNEL: self.sys_host.info.kernel,
ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system,
ATTR_TIMEZONE: self.sys_host.info.timezone,
ATTR_DT_UTC: self.sys_host.info.dt_utc,
ATTR_DT_SYNCHRONIZED: self.sys_host.info.dt_synchronized,
ATTR_USE_NTP: self.sys_host.info.use_ntp,
ATTR_STARTUP_TIME: self.sys_host.info.startup_time,
ATTR_BOOT_TIMESTAMP: self.sys_host.info.boot_timestamp,
ATTR_BROADCAST_LLMNR: self.sys_host.info.broadcast_llmnr,
ATTR_BROADCAST_MDNS: self.sys_host.info.broadcast_mdns,
}
@api_process
@@ -121,7 +75,11 @@ class APIHost(CoreSysAttributes):
@api_process
def reload(self, request):
"""Reload host data."""
return asyncio.shield(self.sys_host.reload())
return asyncio.shield(
asyncio.wait(
[self.sys_host.reload(), self.sys_resolution.evaluate.evaluate_system()]
)
)
@api_process
async def services(self, request):
@@ -139,89 +97,30 @@ class APIHost(CoreSysAttributes):
return {ATTR_SERVICES: services}
@api_process
async def list_boots(self, _: web.Request):
"""Return a list of boot IDs."""
boot_ids = await self.sys_host.logs.get_boot_ids()
return {
ATTR_BOOTS: {
str(1 + i - len(boot_ids)): boot_id
for i, boot_id in enumerate(boot_ids)
}
}
def service_start(self, request):
"""Start a service."""
unit = request.match_info.get(SERVICE)
return asyncio.shield(self.sys_host.services.start(unit))
@api_process
async def list_identifiers(self, _: web.Request):
"""Return a list of syslog identifiers."""
return {ATTR_IDENTIFIERS: await self.sys_host.logs.get_identifiers()}
def service_stop(self, request):
"""Stop a service."""
unit = request.match_info.get(SERVICE)
return asyncio.shield(self.sys_host.services.stop(unit))
async def _get_boot_id(self, possible_offset: str) -> str:
"""Convert offset into boot ID if required."""
with suppress(CoerceInvalid):
offset = vol.Coerce(int)(possible_offset)
try:
return await self.sys_host.logs.get_boot_id(offset)
except (ValueError, HostLogError) as err:
raise APIError() from err
return possible_offset
@api_process
def service_reload(self, request):
"""Reload a service."""
unit = request.match_info.get(SERVICE)
return asyncio.shield(self.sys_host.services.reload(unit))
async def advanced_logs_handler(
self, request: web.Request, identifier: str | None = None, follow: bool = False
) -> web.StreamResponse:
"""Return systemd-journald logs."""
log_formatter = LogFormatter.PLAIN
params = {}
if identifier:
params[PARAM_SYSLOG_IDENTIFIER] = identifier
elif IDENTIFIER in request.match_info:
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER)
else:
params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers
# host logs should be always verbose, no matter what Accept header is used
log_formatter = LogFormatter.VERBOSE
@api_process
def service_restart(self, request):
"""Restart a service."""
unit = request.match_info.get(SERVICE)
return asyncio.shield(self.sys_host.services.restart(unit))
if BOOTID in request.match_info:
params[PARAM_BOOT_ID] = await self._get_boot_id(
request.match_info.get(BOOTID)
)
if follow:
params[PARAM_FOLLOW] = ""
if ACCEPT in request.headers and request.headers[ACCEPT] not in [
CONTENT_TYPE_TEXT,
CONTENT_TYPE_X_LOG,
"*/*",
]:
raise APIError(
"Invalid content type requested. Only text/plain and text/x-log "
"supported for now."
)
if request.headers[ACCEPT] == CONTENT_TYPE_X_LOG:
log_formatter = LogFormatter.VERBOSE
if RANGE in request.headers:
range_header = request.headers.get(RANGE)
else:
range_header = f"entries=:-{DEFAULT_RANGE}:"
async with self.sys_host.logs.journald_logs(
params=params, range_header=range_header, accept=LogFormat.JOURNAL
) as resp:
try:
response = web.StreamResponse()
response.content_type = CONTENT_TYPE_TEXT
await response.prepare(request)
async for line in journal_logs_reader(resp, log_formatter):
await response.write(line.encode("utf-8") + b"\n")
except ConnectionResetError as ex:
raise APIError(
"Connection reset when trying to fetch data from systemd-journald."
) from ex
return response
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
async def advanced_logs(
self, request: web.Request, identifier: str | None = None, follow: bool = False
) -> web.StreamResponse:
"""Return systemd-journald logs. Wrapped as standard API handler."""
return await self.advanced_logs_handler(request, identifier, follow)
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return host kernel logs."""
return self.sys_host.info.get_dmesg()

52
supervisor/api/info.py Normal file
View File

@@ -0,0 +1,52 @@
"""Init file for Supervisor info RESTful API."""
import logging
from typing import Any, Dict
from aiohttp import web
from ..const import (
ATTR_ARCH,
ATTR_CHANNEL,
ATTR_DOCKER,
ATTR_FEATURES,
ATTR_HASSOS,
ATTR_HOMEASSISTANT,
ATTR_HOSTNAME,
ATTR_LOGGING,
ATTR_MACHINE,
ATTR_OPERATING_SYSTEM,
ATTR_STATE,
ATTR_SUPERVISOR,
ATTR_SUPPORTED,
ATTR_SUPPORTED_ARCH,
ATTR_TIMEZONE,
)
from ..coresys import CoreSysAttributes
from .utils import api_process
_LOGGER: logging.Logger = logging.getLogger(__name__)
class APIInfo(CoreSysAttributes):
"""Handle RESTful API for info functions."""
@api_process
async def info(self, request: web.Request) -> Dict[str, Any]:
"""Show system info."""
return {
ATTR_SUPERVISOR: self.sys_supervisor.version,
ATTR_HOMEASSISTANT: self.sys_homeassistant.version,
ATTR_HASSOS: self.sys_hassos.version,
ATTR_DOCKER: self.sys_docker.info.version,
ATTR_HOSTNAME: self.sys_host.info.hostname,
ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system,
ATTR_FEATURES: self.sys_host.features,
ATTR_MACHINE: self.sys_machine,
ATTR_ARCH: self.sys_arch.default,
ATTR_STATE: self.sys_core.state,
ATTR_SUPPORTED_ARCH: self.sys_arch.supported,
ATTR_SUPPORTED: self.sys_core.supported,
ATTR_CHANNEL: self.sys_updater.channel,
ATTR_LOGGING: self.sys_config.logging,
ATTR_TIMEZONE: self.sys_config.timezone,
}

View File

@@ -2,10 +2,10 @@
import asyncio
from ipaddress import ip_address
import logging
from typing import Any
from typing import Any, Dict, Union
import aiohttp
from aiohttp import ClientTimeout, hdrs, web
from aiohttp import hdrs, web
from aiohttp.web_exceptions import (
HTTPBadGateway,
HTTPServiceUnavailable,
@@ -21,65 +21,22 @@ from ..const import (
ATTR_ICON,
ATTR_PANELS,
ATTR_SESSION,
ATTR_SESSION_DATA_USER_ID,
ATTR_TITLE,
HEADER_REMOTE_USER_DISPLAY_NAME,
HEADER_REMOTE_USER_ID,
HEADER_REMOTE_USER_NAME,
COOKIE_INGRESS,
HEADER_TOKEN,
HEADER_TOKEN_OLD,
IngressSessionData,
IngressSessionDataUser,
)
from ..coresys import CoreSysAttributes
from ..exceptions import HomeAssistantAPIError
from .const import COOKIE_INGRESS
from .utils import api_process, api_validate, require_home_assistant
_LOGGER: logging.Logger = logging.getLogger(__name__)
VALIDATE_SESSION_DATA = vol.Schema({ATTR_SESSION: str})
"""Expected optional payload of create session request"""
SCHEMA_INGRESS_CREATE_SESSION_DATA = vol.Schema(
{
vol.Optional(ATTR_SESSION_DATA_USER_ID): str,
}
)
# from https://github.com/aio-libs/aiohttp/blob/8ae650bee4add9f131d49b96a0a150311ea58cd1/aiohttp/helpers.py#L1059C1-L1079C1
def must_be_empty_body(method: str, code: int) -> bool:
"""Check if a request must return an empty body."""
return (
status_code_must_be_empty_body(code)
or method_must_be_empty_body(method)
or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
)
def method_must_be_empty_body(method: str) -> bool:
"""Check if a method must return an empty body."""
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2
return method.upper() == hdrs.METH_HEAD
def status_code_must_be_empty_body(code: int) -> bool:
"""Check if a status code must return an empty body."""
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
return code in {204, 304} or 100 <= code < 200
class APIIngress(CoreSysAttributes):
"""Ingress view to handle add-on webui routing."""
_list_of_users: list[IngressSessionDataUser]
def __init__(self) -> None:
"""Initialize APIIngress."""
self._list_of_users = []
def _extract_addon(self, request: web.Request) -> Addon:
"""Return addon, throw an exception it it doesn't exist."""
token = request.match_info.get("token")
@@ -97,7 +54,7 @@ class APIIngress(CoreSysAttributes):
return f"http://{addon.ip_address}:{addon.ingress_port}/{path}"
@api_process
async def panels(self, request: web.Request) -> dict[str, Any]:
async def panels(self, request: web.Request) -> Dict[str, Any]:
"""Create a list of panel data."""
addons = {}
for addon in self.sys_ingress.addons:
@@ -112,26 +69,14 @@ class APIIngress(CoreSysAttributes):
@api_process
@require_home_assistant
async def create_session(self, request: web.Request) -> dict[str, Any]:
async def create_session(self, request: web.Request) -> Dict[str, Any]:
"""Create a new session."""
schema_ingress_config_session_data = await api_validate(
SCHEMA_INGRESS_CREATE_SESSION_DATA, request
)
data: IngressSessionData | None = None
if ATTR_SESSION_DATA_USER_ID in schema_ingress_config_session_data:
user = await self._find_user_by_id(
schema_ingress_config_session_data[ATTR_SESSION_DATA_USER_ID]
)
if user:
data = IngressSessionData(user)
session = self.sys_ingress.create_session(data)
session = self.sys_ingress.create_session()
return {ATTR_SESSION: session}
@api_process
@require_home_assistant
async def validate_session(self, request: web.Request) -> dict[str, Any]:
async def validate_session(self, request: web.Request) -> Dict[str, Any]:
"""Validate session and extending how long it's valid for."""
data = await api_validate(VALIDATE_SESSION_DATA, request)
@@ -140,9 +85,10 @@ class APIIngress(CoreSysAttributes):
_LOGGER.warning("No valid ingress session %s", data[ATTR_SESSION])
raise HTTPUnauthorized()
@require_home_assistant
async def handler(
self, request: web.Request
) -> web.Response | web.StreamResponse | web.WebSocketResponse:
) -> Union[web.Response, web.StreamResponse, web.WebSocketResponse]:
"""Route data to Supervisor ingress service."""
# Check Ingress Session
@@ -154,14 +100,13 @@ class APIIngress(CoreSysAttributes):
# Process requests
addon = self._extract_addon(request)
path = request.match_info.get("path")
session_data = self.sys_ingress.get_session_data(session)
try:
# Websocket
if _is_websocket(request):
return await self._handle_websocket(request, addon, path, session_data)
return await self._handle_websocket(request, addon, path)
# Request
return await self._handle_request(request, addon, path, session_data)
return await self._handle_request(request, addon, path)
except aiohttp.ClientError as err:
_LOGGER.error("Ingress error: %s", err)
@@ -169,11 +114,7 @@ class APIIngress(CoreSysAttributes):
raise HTTPBadGateway()
async def _handle_websocket(
self,
request: web.Request,
addon: Addon,
path: str,
session_data: IngressSessionData | None,
self, request: web.Request, addon: Addon, path: str
) -> web.WebSocketResponse:
"""Ingress route for websocket."""
if hdrs.SEC_WEBSOCKET_PROTOCOL in request.headers:
@@ -191,7 +132,7 @@ class APIIngress(CoreSysAttributes):
# Preparing
url = self._create_url(addon, path)
source_header = _init_header(request, addon, session_data)
source_header = _init_header(request, addon)
# Support GET query
if request.query_string:
@@ -208,8 +149,8 @@ class APIIngress(CoreSysAttributes):
# Proxy requests
await asyncio.wait(
[
self.sys_create_task(_websocket_forward(ws_server, ws_client)),
self.sys_create_task(_websocket_forward(ws_client, ws_server)),
_websocket_forward(ws_server, ws_client),
_websocket_forward(ws_client, ws_server),
],
return_when=asyncio.FIRST_COMPLETED,
)
@@ -217,25 +158,12 @@ class APIIngress(CoreSysAttributes):
return ws_server
async def _handle_request(
self,
request: web.Request,
addon: Addon,
path: str,
session_data: IngressSessionData | None,
) -> web.Response | web.StreamResponse:
self, request: web.Request, addon: Addon, path: str
) -> Union[web.Response, web.StreamResponse]:
"""Ingress route for request."""
url = self._create_url(addon, path)
source_header = _init_header(request, addon, session_data)
# Passing the raw stream breaks requests for some webservers
# since we just need it for POST requests really, for all other methods
# we read the bytes and pass that to the request to the add-on
# add-ons needs to add support with that in the configuration
data = (
request.content
if request.method == "POST" and addon.ingress_stream
else await request.read()
)
data = await request.read()
source_header = _init_header(request, addon)
async with self.sys_websession.request(
request.method,
@@ -244,22 +172,12 @@ class APIIngress(CoreSysAttributes):
params=request.query,
allow_redirects=False,
data=data,
timeout=ClientTimeout(total=None),
skip_auto_headers={hdrs.CONTENT_TYPE},
) as result:
headers = _response_header(result)
# Avoid parsing content_type in simple cases for better performance
if maybe_content_type := result.headers.get(hdrs.CONTENT_TYPE):
content_type = (maybe_content_type.partition(";"))[0].strip()
else:
content_type = result.content_type
# Simple request
if (
# empty body responses should not be streamed,
# otherwise aiohttp < 3.9.0 may generate
# an invalid "0\r\n\r\n" chunk instead of an empty response.
must_be_empty_body(request.method, result.status)
or hdrs.CONTENT_LENGTH in result.headers
hdrs.CONTENT_LENGTH in result.headers
and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000
):
# Return Response
@@ -267,13 +185,13 @@ class APIIngress(CoreSysAttributes):
return web.Response(
headers=headers,
status=result.status,
content_type=content_type,
content_type=result.content_type,
body=body,
)
# Stream response
response = web.StreamResponse(status=result.status, headers=headers)
response.content_type = content_type
response.content_type = result.content_type
try:
await response.prepare(request)
@@ -289,50 +207,24 @@ class APIIngress(CoreSysAttributes):
return response
async def _find_user_by_id(self, user_id: str) -> IngressSessionDataUser | None:
"""Find user object by the user's ID."""
try:
list_of_users = await self.sys_homeassistant.get_users()
except (HomeAssistantAPIError, TypeError) as err:
_LOGGER.error(
"%s error occurred while requesting list of users: %s", type(err), err
)
return None
if list_of_users is not None:
self._list_of_users = list_of_users
return next((user for user in self._list_of_users if user.id == user_id), None)
def _init_header(
request: web.Request, addon: Addon, session_data: IngressSessionData | None
) -> CIMultiDict | dict[str, str]:
request: web.Request, addon: str
) -> Union[CIMultiDict, Dict[str, str]]:
"""Create initial header."""
headers = {}
if session_data is not None:
headers[HEADER_REMOTE_USER_ID] = session_data.user.id
if session_data.user.username is not None:
headers[HEADER_REMOTE_USER_NAME] = session_data.user.username
if session_data.user.display_name is not None:
headers[HEADER_REMOTE_USER_DISPLAY_NAME] = session_data.user.display_name
# filter flags
for name, value in request.headers.items():
if name in (
hdrs.CONTENT_LENGTH,
hdrs.CONTENT_ENCODING,
hdrs.TRANSFER_ENCODING,
hdrs.SEC_WEBSOCKET_EXTENSIONS,
hdrs.SEC_WEBSOCKET_PROTOCOL,
hdrs.SEC_WEBSOCKET_VERSION,
hdrs.SEC_WEBSOCKET_KEY,
istr(HEADER_TOKEN),
istr(HEADER_TOKEN_OLD),
istr(HEADER_REMOTE_USER_ID),
istr(HEADER_REMOTE_USER_NAME),
istr(HEADER_REMOTE_USER_DISPLAY_NAME),
):
continue
headers[name] = value
@@ -345,7 +237,7 @@ def _init_header(
return headers
def _response_header(response: aiohttp.ClientResponse) -> dict[str, str]:
def _response_header(response: aiohttp.ClientResponse) -> Dict[str, str]:
"""Create response header."""
headers = {}

View File

@@ -1,15 +1,12 @@
"""Init file for Supervisor Jobs RESTful API."""
import logging
from typing import Any
from typing import Any, Dict
from aiohttp import web
import voluptuous as vol
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..jobs import SupervisorJob
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
from .const import ATTR_JOBS
from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -22,47 +19,11 @@ SCHEMA_OPTIONS = vol.Schema(
class APIJobs(CoreSysAttributes):
"""Handle RESTful API for OS functions."""
def _list_jobs(self, start: SupervisorJob | None = None) -> list[dict[str, Any]]:
"""Return current job tree."""
jobs_by_parent: dict[str | None, list[SupervisorJob]] = {}
for job in self.sys_jobs.jobs:
if job.internal:
continue
if job.parent_id not in jobs_by_parent:
jobs_by_parent[job.parent_id] = [job]
else:
jobs_by_parent[job.parent_id].append(job)
job_list: list[dict[str, Any]] = []
queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = (
[(job_list, start)]
if start
else [(job_list, job) for job in jobs_by_parent.get(None, [])]
)
while queue:
(current_list, current_job) = queue.pop(0)
child_jobs: list[dict[str, Any]] = []
# We remove parent_id and instead use that info to represent jobs as a tree
job_dict = current_job.as_dict() | {"child_jobs": child_jobs}
job_dict.pop("parent_id")
current_list.append(job_dict)
if current_job.uuid in jobs_by_parent:
queue.extend(
[(child_jobs, job) for job in jobs_by_parent.get(current_job.uuid)]
)
return job_list
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
async def info(self, request: web.Request) -> Dict[str, Any]:
"""Return JobManager information."""
return {
ATTR_IGNORE_CONDITIONS: self.sys_jobs.ignore_conditions,
ATTR_JOBS: self._list_jobs(),
}
@api_process
@@ -81,19 +42,3 @@ class APIJobs(CoreSysAttributes):
async def reset(self, request: web.Request) -> None:
"""Reset options for JobManager."""
self.sys_jobs.reset_data()
@api_process
async def job_info(self, request: web.Request) -> dict[str, Any]:
"""Get details of a job by ID."""
job = self.sys_jobs.get_job(request.match_info.get("uuid"))
return self._list_jobs(job)[0]
@api_process
async def remove_job(self, request: web.Request) -> None:
"""Remove a completed job."""
job = self.sys_jobs.get_job(request.match_info.get("uuid"))
if not job.done:
raise APIError(f"Job {job.uuid} is not done!")
self.sys_jobs.remove_job(job)

View File

@@ -1 +0,0 @@
"""API middleware for aiohttp."""

View File

@@ -1,316 +0,0 @@
"""Handle security part of this API."""
import logging
import re
from typing import Final
from urllib.parse import unquote
from aiohttp.web import Request, RequestHandler, Response, middleware
from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized
from awesomeversion import AwesomeVersion
from ...addons.const import RE_SLUG
from ...const import (
REQUEST_FROM,
ROLE_ADMIN,
ROLE_BACKUP,
ROLE_DEFAULT,
ROLE_HOMEASSISTANT,
ROLE_MANAGER,
CoreState,
)
from ...coresys import CoreSys, CoreSysAttributes
from ...utils import version_is_new_enough
from ..utils import api_return_error, excract_supervisor_token
_LOGGER: logging.Logger = logging.getLogger(__name__)
_CORE_VERSION: Final = AwesomeVersion("2023.3.4")
# fmt: off
_CORE_FRONTEND_PATHS: Final = (
r"|/app/.*\.(?:js|gz|json|map|woff2)"
r"|/(store/)?addons/" + RE_SLUG + r"/(logo|icon)"
)
CORE_FRONTEND: Final = re.compile(
r"^(?:" + _CORE_FRONTEND_PATHS + r")$"
)
# Block Anytime
BLACKLIST: Final = re.compile(
r"^(?:"
r"|/homeassistant/api/hassio/.*"
r"|/core/api/hassio/.*"
r")$"
)
# Free to call or have own security concepts
NO_SECURITY_CHECK: Final = re.compile(
r"^(?:"
r"|/homeassistant/api/.*"
r"|/homeassistant/websocket"
r"|/core/api/.*"
r"|/core/websocket"
r"|/supervisor/ping"
r"|/ingress/[-_A-Za-z0-9]+/.*"
+ _CORE_FRONTEND_PATHS
+ r")$"
)
# Observer allow API calls
OBSERVER_CHECK: Final = re.compile(
r"^(?:"
r"|/.+/info"
r")$"
)
# Can called by every add-on
ADDONS_API_BYPASS: Final = re.compile(
r"^(?:"
r"|/addons/self/(?!security|update)[^/]+"
r"|/addons/self/options/config"
r"|/info"
r"|/services.*"
r"|/discovery.*"
r"|/auth"
r")$"
)
# Policy role add-on API access
ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
ROLE_DEFAULT: re.compile(
r"^(?:"
r"|/.+/info"
r")$"
),
ROLE_HOMEASSISTANT: re.compile(
r"^(?:"
r"|/.+/info"
r"|/core/.+"
r"|/homeassistant/.+"
r")$"
),
ROLE_BACKUP: re.compile(
r"^(?:"
r"|/.+/info"
r"|/backups.*"
r")$"
),
ROLE_MANAGER: re.compile(
r"^(?:"
r"|/.+/info"
r"|/addons(?:/" + RE_SLUG + r"/(?!security).+|/reload)?"
r"|/audio/.+"
r"|/auth/cache"
r"|/available_updates"
r"|/backups.*"
r"|/cli/.+"
r"|/core/.+"
r"|/dns/.+"
r"|/docker/.+"
r"|/jobs/.+"
r"|/hardware/.+"
r"|/hassos/.+"
r"|/homeassistant/.+"
r"|/host/.+"
r"|/mounts.*"
r"|/multicast/.+"
r"|/network/.+"
r"|/observer/.+"
r"|/os/(?!datadisk/wipe).+"
r"|/refresh_updates"
r"|/resolution/.+"
r"|/security/.+"
r"|/snapshots.*"
r"|/store.*"
r"|/supervisor/.+"
r")$"
),
ROLE_ADMIN: re.compile(
r".*"
),
}
FILTERS: Final = re.compile(
r"(?:"
# Common exploits
r"proc/self/environ"
r"|(<|%3C).*script.*(>|%3E)"
# File Injections
r"|(\.\.//?)+" # ../../anywhere
r"|[a-zA-Z0-9_]=/([a-z0-9_.]//?)+" # .html?v=/.//test
# SQL Injections
r"|union.*select.*\("
r"|union.*all.*select.*"
r"|concat.*\("
r")",
flags=re.IGNORECASE,
)
# fmt: on
class SecurityMiddleware(CoreSysAttributes):
"""Security middleware functions."""
def __init__(self, coresys: CoreSys):
"""Initialize security middleware."""
self.coresys: CoreSys = coresys
def _recursive_unquote(self, value: str) -> str:
"""Handle values that are encoded multiple times."""
if (unquoted := unquote(value)) != value:
unquoted = self._recursive_unquote(unquoted)
return unquoted
@middleware
async def block_bad_requests(
self, request: Request, handler: RequestHandler
) -> Response:
"""Process request and tblock commonly known exploit attempts."""
if FILTERS.search(self._recursive_unquote(request.path)):
_LOGGER.warning(
"Filtered a potential harmful request to: %s", request.raw_path
)
raise HTTPBadRequest
if FILTERS.search(self._recursive_unquote(request.query_string)):
_LOGGER.warning(
"Filtered a request with a potential harmful query string: %s",
request.raw_path,
)
raise HTTPBadRequest
return await handler(request)
@middleware
async def system_validation(
self, request: Request, handler: RequestHandler
) -> Response:
"""Check if core is ready to response."""
if self.sys_core.state not in (
CoreState.STARTUP,
CoreState.RUNNING,
CoreState.FREEZE,
):
return api_return_error(
message=f"System is not ready with state: {self.sys_core.state}"
)
return await handler(request)
@middleware
async def token_validation(
self, request: Request, handler: RequestHandler
) -> Response:
"""Check security access of this layer."""
request_from = None
supervisor_token = excract_supervisor_token(request)
# Blacklist
if BLACKLIST.match(request.path):
_LOGGER.error("%s is blacklisted!", request.path)
raise HTTPForbidden()
# Ignore security check
if NO_SECURITY_CHECK.match(request.path):
_LOGGER.debug("Passthrough %s", request.path)
request[REQUEST_FROM] = None
return await handler(request)
# Not token
if not supervisor_token:
_LOGGER.warning("No API token provided for %s", request.path)
raise HTTPUnauthorized()
# Home-Assistant
if supervisor_token == self.sys_homeassistant.supervisor_token:
_LOGGER.debug("%s access from Home Assistant", request.path)
request_from = self.sys_homeassistant
# Host
if supervisor_token == self.sys_plugins.cli.supervisor_token:
_LOGGER.debug("%s access from Host", request.path)
request_from = self.sys_host
# Observer
if supervisor_token == self.sys_plugins.observer.supervisor_token:
if not OBSERVER_CHECK.match(request.path):
_LOGGER.warning("%s invalid Observer access", request.path)
raise HTTPForbidden()
_LOGGER.debug("%s access from Observer", request.path)
request_from = self.sys_plugins.observer
# Add-on
addon = None
if supervisor_token and not request_from:
addon = self.sys_addons.from_token(supervisor_token)
# Check Add-on API access
if addon and ADDONS_API_BYPASS.match(request.path):
_LOGGER.debug("Passthrough %s from %s", request.path, addon.slug)
request_from = addon
elif addon and addon.access_hassio_api:
# Check Role
if ADDONS_ROLE_ACCESS[addon.hassio_role].match(request.path):
_LOGGER.info("%s access from %s", request.path, addon.slug)
request_from = addon
else:
_LOGGER.warning("%s no role for %s", request.path, addon.slug)
elif addon:
_LOGGER.warning(
"%s missing API permission for %s", addon.slug, request.path
)
if request_from:
request[REQUEST_FROM] = request_from
return await handler(request)
_LOGGER.error("Invalid token for access %s", request.path)
raise HTTPForbidden()
@middleware
async def core_proxy(self, request: Request, handler: RequestHandler) -> Response:
"""Validate user from Core API proxy."""
if request[REQUEST_FROM] != self.sys_homeassistant or version_is_new_enough(
self.sys_homeassistant.version, _CORE_VERSION
):
return await handler(request)
authorization_index: int | None = None
content_type_index: int | None = None
user_request: bool = False
admin_request: bool = False
ingress_request: bool = False
for idx, (key, value) in enumerate(request.raw_headers):
if key in (b"Authorization", b"X-Hassio-Key"):
authorization_index = idx
elif key == b"Content-Type":
content_type_index = idx
elif key == b"X-Hass-User-ID":
user_request = True
elif key == b"X-Hass-Is-Admin":
admin_request = value == b"1"
elif key == b"X-Ingress-Path":
ingress_request = True
if (user_request or admin_request) and not ingress_request:
return await handler(request)
is_proxy_request = (
authorization_index is not None
and content_type_index is not None
and content_type_index - authorization_index == 1
)
if (
not CORE_FRONTEND.match(request.path) and is_proxy_request
) or ingress_request:
raise HTTPBadRequest()
return await handler(request)

View File

@@ -1,124 +0,0 @@
"""Inits file for supervisor mounts REST API."""
from typing import Any
from aiohttp import web
import voluptuous as vol
from ..const import ATTR_NAME, ATTR_STATE
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..mounts.const import ATTR_DEFAULT_BACKUP_MOUNT, MountUsage
from ..mounts.mount import Mount
from ..mounts.validate import SCHEMA_MOUNT_CONFIG
from .const import ATTR_MOUNTS
from .utils import api_process, api_validate
SCHEMA_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_DEFAULT_BACKUP_MOUNT): vol.Maybe(str),
}
)
class APIMounts(CoreSysAttributes):
"""Handle REST API for mounting options."""
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
"""Return MountManager info."""
return {
ATTR_DEFAULT_BACKUP_MOUNT: self.sys_mounts.default_backup_mount.name
if self.sys_mounts.default_backup_mount
else None,
ATTR_MOUNTS: [
mount.to_dict() | {ATTR_STATE: mount.state}
for mount in self.sys_mounts.mounts
],
}
@api_process
async def options(self, request: web.Request) -> None:
"""Set Mount Manager options."""
body = await api_validate(SCHEMA_OPTIONS, request)
if ATTR_DEFAULT_BACKUP_MOUNT in body:
name: str | None = body[ATTR_DEFAULT_BACKUP_MOUNT]
if name is None:
self.sys_mounts.default_backup_mount = None
elif (mount := self.sys_mounts.get(name)).usage != MountUsage.BACKUP:
raise APIError(
f"Mount {name} is not used for backups, cannot use it as default backup mount"
)
else:
self.sys_mounts.default_backup_mount = mount
self.sys_mounts.save_data()
@api_process
async def create_mount(self, request: web.Request) -> None:
"""Create a new mount in supervisor."""
body = await api_validate(SCHEMA_MOUNT_CONFIG, request)
if body[ATTR_NAME] in self.sys_mounts:
raise APIError(f"A mount already exists with name {body[ATTR_NAME]}")
mount = Mount.from_dict(self.coresys, body)
await self.sys_mounts.create_mount(mount)
# If it's a backup mount, reload backups
if mount.usage == MountUsage.BACKUP:
self.sys_create_task(self.sys_backups.reload())
# If there's no default backup mount, set it to the new mount
if not self.sys_mounts.default_backup_mount:
self.sys_mounts.default_backup_mount = mount
self.sys_mounts.save_data()
@api_process
async def update_mount(self, request: web.Request) -> None:
"""Update an existing mount in supervisor."""
name = request.match_info.get("mount")
name_schema = vol.Schema(
{vol.Optional(ATTR_NAME, default=name): name}, extra=vol.ALLOW_EXTRA
)
body = await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request)
if name not in self.sys_mounts:
raise APIError(f"No mount exists with name {name}")
mount = Mount.from_dict(self.coresys, body)
await self.sys_mounts.create_mount(mount)
# If it's a backup mount, reload backups
if mount.usage == MountUsage.BACKUP:
self.sys_create_task(self.sys_backups.reload())
# If this mount was the default backup mount and isn't for backups any more, remove it
elif self.sys_mounts.default_backup_mount == mount:
self.sys_mounts.default_backup_mount = None
self.sys_mounts.save_data()
@api_process
async def delete_mount(self, request: web.Request) -> None:
"""Delete an existing mount in supervisor."""
name = request.match_info.get("mount")
mount = await self.sys_mounts.remove_mount(name)
# If it was a backup mount, reload backups
if mount.usage == MountUsage.BACKUP:
self.sys_create_task(self.sys_backups.reload())
self.sys_mounts.save_data()
@api_process
async def reload_mount(self, request: web.Request) -> None:
"""Reload an existing mount in supervisor."""
name = request.match_info.get("mount")
await self.sys_mounts.reload_mount(name)
# If it's a backup mount, reload backups
if self.sys_mounts.get(name).usage == MountUsage.BACKUP:
self.sys_create_task(self.sys_backups.reload())

View File

@@ -1,8 +1,7 @@
"""Init file for Supervisor Multicast RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging
from typing import Any
from typing import Any, Awaitable, Dict
from aiohttp import web
import voluptuous as vol
@@ -19,11 +18,12 @@ from ..const import (
ATTR_UPDATE_AVAILABLE,
ATTR_VERSION,
ATTR_VERSION_LATEST,
CONTENT_TYPE_BINARY,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..validate import version_tag
from .utils import api_process, api_validate
from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -34,7 +34,7 @@ class APIMulticast(CoreSysAttributes):
"""Handle RESTful API for Multicast functions."""
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
async def info(self, request: web.Request) -> Dict[str, Any]:
"""Return Multicast information."""
return {
ATTR_VERSION: self.sys_plugins.multicast.version,
@@ -43,7 +43,7 @@ class APIMulticast(CoreSysAttributes):
}
@api_process
async def stats(self, request: web.Request) -> dict[str, Any]:
async def stats(self, request: web.Request) -> Dict[str, Any]:
"""Return resource information."""
stats = await self.sys_plugins.multicast.stats()
@@ -68,6 +68,11 @@ class APIMulticast(CoreSysAttributes):
raise APIError(f"Version {version} is already in use")
await asyncio.shield(self.sys_plugins.multicast.update(version))
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return Multicast Docker logs."""
return self.sys_plugins.multicast.logs()
@api_process
def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart Multicast plugin."""

View File

@@ -1,11 +1,10 @@
"""REST API for network."""
import asyncio
from collections.abc import Awaitable
from dataclasses import replace
from ipaddress import ip_address, ip_interface
from typing import Any
from typing import Any, Awaitable, Dict
from aiohttp import web
import attr
import voluptuous as vol
from ..const import (
@@ -31,7 +30,6 @@ from ..const import (
ATTR_PARENT,
ATTR_PRIMARY,
ATTR_PSK,
ATTR_READY,
ATTR_SIGNAL,
ATTR_SSID,
ATTR_SUPERVISOR_INTERNET,
@@ -43,7 +41,8 @@ from ..const import (
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError, HostNetworkNotFound
from ..host.configuration import (
from ..host.const import AuthMethod, InterfaceType, WifiMode
from ..host.network import (
AccessPoint,
Interface,
InterfaceMethod,
@@ -51,7 +50,6 @@ from ..host.configuration import (
VlanConfig,
WifiConfig,
)
from ..host.const import AuthMethod, InterfaceType, WifiMode
from .utils import api_process, api_validate
_SCHEMA_IP_CONFIG = vol.Schema(
@@ -84,18 +82,17 @@ SCHEMA_UPDATE = vol.Schema(
)
def ipconfig_struct(config: IpConfig) -> dict[str, Any]:
def ipconfig_struct(config: IpConfig) -> Dict[str, Any]:
"""Return a dict with information about ip configuration."""
return {
ATTR_METHOD: config.method,
ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
ATTR_NAMESERVERS: [str(address) for address in config.nameservers],
ATTR_GATEWAY: str(config.gateway) if config.gateway else None,
ATTR_READY: config.ready,
}
def wifi_struct(config: WifiConfig) -> dict[str, Any]:
def wifi_struct(config: WifiConfig) -> Dict[str, Any]:
"""Return a dict with information about wifi configuration."""
return {
ATTR_MODE: config.mode,
@@ -105,7 +102,7 @@ def wifi_struct(config: WifiConfig) -> dict[str, Any]:
}
def vlan_struct(config: VlanConfig) -> dict[str, Any]:
def vlan_struct(config: VlanConfig) -> Dict[str, Any]:
"""Return a dict with information about VLAN configuration."""
return {
ATTR_ID: config.id,
@@ -113,7 +110,7 @@ def vlan_struct(config: VlanConfig) -> dict[str, Any]:
}
def interface_struct(interface: Interface) -> dict[str, Any]:
def interface_struct(interface: Interface) -> Dict[str, Any]:
"""Return a dict with information of a interface to be used in th API."""
return {
ATTR_INTERFACE: interface.name,
@@ -121,7 +118,6 @@ def interface_struct(interface: Interface) -> dict[str, Any]:
ATTR_ENABLED: interface.enabled,
ATTR_CONNECTED: interface.connected,
ATTR_PRIMARY: interface.primary,
ATTR_MAC: interface.mac,
ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None,
ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None,
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
@@ -129,7 +125,7 @@ def interface_struct(interface: Interface) -> dict[str, Any]:
}
def accesspoint_struct(accesspoint: AccessPoint) -> dict[str, Any]:
def accesspoint_struct(accesspoint: AccessPoint) -> Dict[str, Any]:
"""Return a dict for AccessPoint."""
return {
ATTR_MODE: accesspoint.mode,
@@ -145,7 +141,9 @@ class APINetwork(CoreSysAttributes):
def _get_interface(self, name: str) -> Interface:
"""Get Interface by name or default."""
if name.lower() == "default":
name = name.lower()
if name == "default":
for interface in self.sys_host.network.interfaces:
if not interface.primary:
continue
@@ -160,7 +158,7 @@ class APINetwork(CoreSysAttributes):
raise APIError(f"Interface {name} does not exist") from None
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
async def info(self, request: web.Request) -> Dict[str, Any]:
"""Return network information."""
return {
ATTR_INTERFACES: [
@@ -178,7 +176,7 @@ class APINetwork(CoreSysAttributes):
}
@api_process
async def interface_info(self, request: web.Request) -> dict[str, Any]:
async def interface_info(self, request: web.Request) -> Dict[str, Any]:
"""Return network information for a interface."""
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
@@ -197,19 +195,17 @@ class APINetwork(CoreSysAttributes):
# Apply config
for key, config in body.items():
if key == ATTR_IPV4:
interface.ipv4 = replace(
interface.ipv4
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
interface.ipv4 = attr.evolve(
interface.ipv4 or IpConfig(InterfaceMethod.STATIC, [], None, []),
**config,
)
elif key == ATTR_IPV6:
interface.ipv6 = replace(
interface.ipv6
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
interface.ipv6 = attr.evolve(
interface.ipv6 or IpConfig(InterfaceMethod.STATIC, [], None, []),
**config,
)
elif key == ATTR_WIFI:
interface.wifi = replace(
interface.wifi = attr.evolve(
interface.wifi
or WifiConfig(
WifiMode.INFRASTRUCTURE, "", AuthMethod.OPEN, None, None
@@ -224,12 +220,10 @@ class APINetwork(CoreSysAttributes):
@api_process
def reload(self, request: web.Request) -> Awaitable[None]:
"""Reload network data."""
return asyncio.shield(
self.sys_host.network.update(force_connectivity_check=True)
)
return asyncio.shield(self.sys_host.network.update())
@api_process
async def scan_accesspoints(self, request: web.Request) -> dict[str, Any]:
async def scan_accesspoints(self, request: web.Request) -> Dict[str, Any]:
"""Scan and return a list of available networks."""
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
@@ -263,7 +257,6 @@ class APINetwork(CoreSysAttributes):
body[ATTR_IPV4].get(ATTR_ADDRESS, []),
body[ATTR_IPV4].get(ATTR_GATEWAY, None),
body[ATTR_IPV4].get(ATTR_NAMESERVERS, []),
None,
)
ipv6_config = None
@@ -273,12 +266,9 @@ class APINetwork(CoreSysAttributes):
body[ATTR_IPV6].get(ATTR_ADDRESS, []),
body[ATTR_IPV6].get(ATTR_GATEWAY, None),
body[ATTR_IPV6].get(ATTR_NAMESERVERS, []),
None,
)
vlan_interface = Interface(
"",
"",
"",
True,
True,

View File

@@ -1,7 +1,7 @@
"""Init file for Supervisor Observer RESTful API."""
import asyncio
import logging
from typing import Any
from typing import Any, Dict
from aiohttp import web
import voluptuous as vol
@@ -33,7 +33,7 @@ class APIObserver(CoreSysAttributes):
"""Handle RESTful API for Observer functions."""
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
async def info(self, request: web.Request) -> Dict[str, Any]:
"""Return HA Observer information."""
return {
ATTR_HOST: str(self.sys_docker.network.observer),
@@ -43,7 +43,7 @@ class APIObserver(CoreSysAttributes):
}
@api_process
async def stats(self, request: web.Request) -> dict[str, Any]:
async def stats(self, request: web.Request) -> Dict[str, Any]:
"""Return resource information."""
stats = await self.sys_plugins.observer.stats()

View File

@@ -1,213 +1,50 @@
"""Init file for Supervisor HassOS RESTful API."""
import asyncio
from collections.abc import Awaitable
import logging
from typing import Any
from typing import Any, Awaitable, Dict
from aiohttp import web
import voluptuous as vol
from ..const import (
ATTR_ACTIVITY_LED,
ATTR_BOARD,
ATTR_BOOT,
ATTR_DEVICES,
ATTR_DISK_LED,
ATTR_HEARTBEAT_LED,
ATTR_ID,
ATTR_NAME,
ATTR_POWER_LED,
ATTR_SERIAL,
ATTR_SIZE,
ATTR_STATE,
ATTR_UPDATE_AVAILABLE,
ATTR_VERSION,
ATTR_VERSION_LATEST,
)
from ..coresys import CoreSysAttributes
from ..exceptions import BoardInvalidError
from ..resolution.const import ContextType, IssueType, SuggestionType
from ..validate import version_tag
from .const import (
ATTR_BOOT_SLOT,
ATTR_BOOT_SLOTS,
ATTR_DATA_DISK,
ATTR_DEV_PATH,
ATTR_DEVICE,
ATTR_DISKS,
ATTR_MODEL,
ATTR_STATUS,
ATTR_SYSTEM_HEALTH_LED,
ATTR_VENDOR,
BootSlot,
)
from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
SCHEMA_SET_BOOT_SLOT = vol.Schema({vol.Required(ATTR_BOOT_SLOT): vol.Coerce(BootSlot)})
SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str})
SCHEMA_YELLOW_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_DISK_LED): vol.Boolean(),
vol.Optional(ATTR_HEARTBEAT_LED): vol.Boolean(),
vol.Optional(ATTR_POWER_LED): vol.Boolean(),
}
)
SCHEMA_GREEN_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_ACTIVITY_LED): vol.Boolean(),
vol.Optional(ATTR_POWER_LED): vol.Boolean(),
vol.Optional(ATTR_SYSTEM_HEALTH_LED): vol.Boolean(),
}
)
# pylint: enable=no-value-for-parameter
class APIOS(CoreSysAttributes):
"""Handle RESTful API for OS functions."""
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
async def info(self, request: web.Request) -> Dict[str, Any]:
"""Return OS information."""
return {
ATTR_VERSION: self.sys_os.version,
ATTR_VERSION_LATEST: self.sys_os.latest_version,
ATTR_UPDATE_AVAILABLE: self.sys_os.need_update,
ATTR_BOARD: self.sys_os.board,
ATTR_VERSION: self.sys_hassos.version,
ATTR_VERSION_LATEST: self.sys_hassos.latest_version,
ATTR_UPDATE_AVAILABLE: self.sys_hassos.need_update,
ATTR_BOARD: self.sys_hassos.board,
ATTR_BOOT: self.sys_dbus.rauc.boot_slot,
ATTR_DATA_DISK: self.sys_os.datadisk.disk_used_id,
ATTR_BOOT_SLOTS: {
slot.bootname: {
ATTR_STATE: slot.state,
ATTR_STATUS: slot.boot_status,
ATTR_VERSION: slot.bundle_version,
}
for slot in self.sys_os.slots
if slot.bootname
},
}
@api_process
async def update(self, request: web.Request) -> None:
"""Update OS."""
body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self.sys_os.latest_version)
version = body.get(ATTR_VERSION, self.sys_hassos.latest_version)
await asyncio.shield(self.sys_os.update(version))
await asyncio.shield(self.sys_hassos.update(version))
@api_process
def config_sync(self, request: web.Request) -> Awaitable[None]:
"""Trigger config reload on OS."""
return asyncio.shield(self.sys_os.config_sync())
@api_process
async def migrate_data(self, request: web.Request) -> None:
"""Trigger data disk migration on Host."""
body = await api_validate(SCHEMA_DISK, request)
await asyncio.shield(self.sys_os.datadisk.migrate_disk(body[ATTR_DEVICE]))
@api_process
def wipe_data(self, request: web.Request) -> Awaitable[None]:
"""Trigger data disk wipe on Host."""
return asyncio.shield(self.sys_os.datadisk.wipe_disk())
@api_process
async def set_boot_slot(self, request: web.Request) -> None:
"""Change the active boot slot and reboot into it."""
body = await api_validate(SCHEMA_SET_BOOT_SLOT, request)
await asyncio.shield(self.sys_os.set_boot_slot(body[ATTR_BOOT_SLOT]))
@api_process
async def list_data(self, request: web.Request) -> dict[str, Any]:
"""Return possible data targets."""
return {
ATTR_DEVICES: [disk.id for disk in self.sys_os.datadisk.available_disks],
ATTR_DISKS: [
{
ATTR_NAME: disk.name,
ATTR_VENDOR: disk.vendor,
ATTR_MODEL: disk.model,
ATTR_SERIAL: disk.serial,
ATTR_SIZE: disk.size,
ATTR_ID: disk.id,
ATTR_DEV_PATH: disk.device_path.as_posix(),
}
for disk in self.sys_os.datadisk.available_disks
],
}
@api_process
async def boards_green_info(self, request: web.Request) -> dict[str, Any]:
"""Get green board settings."""
return {
ATTR_ACTIVITY_LED: self.sys_dbus.agent.board.green.activity_led,
ATTR_POWER_LED: self.sys_dbus.agent.board.green.power_led,
ATTR_SYSTEM_HEALTH_LED: self.sys_dbus.agent.board.green.user_led,
}
@api_process
async def boards_green_options(self, request: web.Request) -> None:
"""Update green board settings."""
body = await api_validate(SCHEMA_GREEN_OPTIONS, request)
if ATTR_ACTIVITY_LED in body:
await self.sys_dbus.agent.board.green.set_activity_led(
body[ATTR_ACTIVITY_LED]
)
if ATTR_POWER_LED in body:
await self.sys_dbus.agent.board.green.set_power_led(body[ATTR_POWER_LED])
if ATTR_SYSTEM_HEALTH_LED in body:
await self.sys_dbus.agent.board.green.set_user_led(
body[ATTR_SYSTEM_HEALTH_LED]
)
self.sys_dbus.agent.board.green.save_data()
@api_process
async def boards_yellow_info(self, request: web.Request) -> dict[str, Any]:
"""Get yellow board settings."""
return {
ATTR_DISK_LED: self.sys_dbus.agent.board.yellow.disk_led,
ATTR_HEARTBEAT_LED: self.sys_dbus.agent.board.yellow.heartbeat_led,
ATTR_POWER_LED: self.sys_dbus.agent.board.yellow.power_led,
}
@api_process
async def boards_yellow_options(self, request: web.Request) -> None:
"""Update yellow board settings."""
body = await api_validate(SCHEMA_YELLOW_OPTIONS, request)
if ATTR_DISK_LED in body:
await self.sys_dbus.agent.board.yellow.set_disk_led(body[ATTR_DISK_LED])
if ATTR_HEARTBEAT_LED in body:
await self.sys_dbus.agent.board.yellow.set_heartbeat_led(
body[ATTR_HEARTBEAT_LED]
)
if ATTR_POWER_LED in body:
await self.sys_dbus.agent.board.yellow.set_power_led(body[ATTR_POWER_LED])
self.sys_dbus.agent.board.yellow.save_data()
self.sys_resolution.create_issue(
IssueType.REBOOT_REQUIRED,
ContextType.SYSTEM,
suggestions=[SuggestionType.EXECUTE_REBOOT],
)
@api_process
async def boards_other_info(self, request: web.Request) -> dict[str, Any]:
"""Empty success return if board is in use, error otherwise."""
if request.match_info["board"] != self.sys_os.board:
raise BoardInvalidError(
f"{request.match_info['board']} board is not in use", _LOGGER.error
)
return {}
return asyncio.shield(self.sys_hassos.config_sync())

View File

@@ -1 +1,9 @@
!function(){function n(n){var t=document.createElement("script");t.src=n,document.body.appendChild(t)}if(/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent))n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js");else try{new Function("import('/api/hassio/app/frontend_latest/entrypoint-qzB1D0O4L9U.js')")()}catch(t){n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js")}}()
try {
new Function("import('/api/hassio/app/frontend_latest/entrypoint.4050b348.js')")();
} catch (err) {
var el = document.createElement('script');
el.src = '/api/hassio/app/frontend_es5/entrypoint.bcf8e8ff.js';
document.body.appendChild(el);
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,2 +0,0 @@
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([[1047],{32594:function(e,t,r){r.d(t,{U:function(){return n}});var n=function(e){return e.stopPropagation()}},75054:function(e,t,r){r.r(t),r.d(t,{HaTimeDuration:function(){return f}});var n,a=r(88962),i=r(33368),o=r(71650),d=r(82390),u=r(69205),l=r(70906),s=r(91808),c=r(68144),v=r(79932),f=(r(47289),(0,s.Z)([(0,v.Mo)("ha-selector-duration")],(function(e,t){var r=function(t){(0,u.Z)(n,t);var r=(0,l.Z)(n);function n(){var t;(0,o.Z)(this,n);for(var a=arguments.length,i=new Array(a),u=0;u<a;u++)i[u]=arguments[u];return t=r.call.apply(r,[this].concat(i)),e((0,d.Z)(t)),t}return(0,i.Z)(n)}(t);return{F:r,d:[{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"value",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"disabled",value:function(){return!1}},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"required",value:function(){return!0}},{kind:"method",key:"render",value:function(){var e;return(0,c.dy)(n||(n=(0,a.Z)([' <ha-duration-input .label="','" .helper="','" .data="','" .disabled="','" .required="','" ?enableDay="','"></ha-duration-input> '])),this.label,this.helper,this.value,this.disabled,this.required,null===(e=this.selector.duration)||void 0===e?void 0:e.enable_day)}}]}}),c.oi))}}]);
//# sourceMappingURL=1047-g7fFLS9eP4I.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"1047-g7fFLS9eP4I.js","mappings":"yKAAO,IAAMA,EAAkB,SAACC,GAAE,OAAKA,EAAGD,iBAAiB,C,qLCQ9CE,G,UAAcC,EAAAA,EAAAA,GAAA,EAD1BC,EAAAA,EAAAA,IAAc,0BAAuB,SAAAC,EAAAC,GAAA,IACzBJ,EAAc,SAAAK,IAAAC,EAAAA,EAAAA,GAAAN,EAAAK,GAAA,IAAAE,GAAAC,EAAAA,EAAAA,GAAAR,GAAA,SAAAA,IAAA,IAAAS,GAAAC,EAAAA,EAAAA,GAAA,KAAAV,GAAA,QAAAW,EAAAC,UAAAC,OAAAC,EAAA,IAAAC,MAAAJ,GAAAK,EAAA,EAAAA,EAAAL,EAAAK,IAAAF,EAAAE,GAAAJ,UAAAI,GAAA,OAAAP,EAAAF,EAAAU,KAAAC,MAAAX,EAAA,OAAAY,OAAAL,IAAAX,GAAAiB,EAAAA,EAAAA,GAAAX,IAAAA,CAAA,QAAAY,EAAAA,EAAAA,GAAArB,EAAA,EAAAI,GAAA,OAAAkB,EAAdtB,EAAcuB,EAAA,EAAAC,KAAA,QAAAC,WAAA,EACxBC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAG,EACjB,OAAOC,EAAAA,EAAAA,IAAIC,IAAAA,GAAAC,EAAAA,EAAAA,GAAA,wIAEEC,KAAKC,MACJD,KAAKE,OACPF,KAAKP,MACDO,KAAKG,SACLH,KAAKI,SACkB,QADVR,EACZI,KAAKK,SAASC,gBAAQ,IAAAV,OAAA,EAAtBA,EAAwBW,WAG3C,IAAC,GA1BiCC,EAAAA,I","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/common/dom/stop_propagation.ts","https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/components/ha-selector/ha-selector-duration.ts"],"names":["stopPropagation","ev","HaTimeDuration","_decorate","customElement","_initialize","_LitElement","_LitElement2","_inherits","_super","_createSuper","_this","_classCallCheck","_len","arguments","length","args","Array","_key","call","apply","concat","_assertThisInitialized","_createClass","F","d","kind","decorators","property","attribute","key","value","type","Boolean","_this$selector$durati","html","_templateObject","_taggedTemplateLiteral","this","label","helper","disabled","required","selector","duration","enable_day","LitElement"],"sourceRoot":""}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1,2 +0,0 @@
!function(){"use strict";var n,t,e={14595:function(n,t,e){e(58556);var r,i,o=e(93217),u=e(422),a=e(62173),s=function(n,t,e){if("input"===n){if("type"===t&&"checkbox"===e||"checked"===t||"disabled"===t)return;return""}},c={renderMarkdown:function(n,t){var e,o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};return r||(r=Object.assign(Object.assign({},(0,a.getDefaultWhiteList)()),{},{input:["type","disabled","checked"],"ha-icon":["icon"],"ha-svg-icon":["path"],"ha-alert":["alert-type","title"]})),o.allowSvg?(i||(i=Object.assign(Object.assign({},r),{},{svg:["xmlns","height","width"],path:["transform","stroke","d"],img:["src"]})),e=i):e=r,(0,a.filterXSS)((0,u.TU)(n,t),{whiteList:e,onTagAttr:s})}};(0,o.Jj)(c)}},r={};function i(n){var t=r[n];if(void 0!==t)return t.exports;var o=r[n]={exports:{}};return e[n](o,o.exports,i),o.exports}i.m=e,i.x=function(){var n=i.O(void 0,[9191,215],(function(){return i(14595)}));return n=i.O(n)},n=[],i.O=function(t,e,r,o){if(!e){var u=1/0;for(f=0;f<n.length;f++){e=n[f][0],r=n[f][1],o=n[f][2];for(var a=!0,s=0;s<e.length;s++)(!1&o||u>=o)&&Object.keys(i.O).every((function(n){return i.O[n](e[s])}))?e.splice(s--,1):(a=!1,o<u&&(u=o));if(a){n.splice(f--,1);var c=r();void 0!==c&&(t=c)}}return t}o=o||0;for(var f=n.length;f>0&&n[f-1][2]>o;f--)n[f]=n[f-1];n[f]=[e,r,o]},i.n=function(n){var t=n&&n.__esModule?function(){return n.default}:function(){return n};return i.d(t,{a:t}),t},i.d=function(n,t){for(var e in t)i.o(t,e)&&!i.o(n,e)&&Object.defineProperty(n,e,{enumerable:!0,get:t[e]})},i.f={},i.e=function(n){return Promise.all(Object.keys(i.f).reduce((function(t,e){return i.f[e](n,t),t}),[]))},i.u=function(n){return n+"-"+{215:"FPZmDYZTPdk",9191:"37260H-osZ4"}[n]+".js"},i.o=function(n,t){return Object.prototype.hasOwnProperty.call(n,t)},i.p="/api/hassio/app/frontend_es5/",function(){var n={1402:1};i.f.i=function(t,e){n[t]||importScripts(i.p+i.u(t))};var t=self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[],e=t.push.bind(t);t.push=function(t){var r=t[0],o=t[1],u=t[2];for(var a in o)i.o(o,a)&&(i.m[a]=o[a]);for(u&&u(i);r.length;)n[r.pop()]=1;e(t)}}(),t=i.x,i.x=function(){return Promise.all([i.e(9191),i.e(215)]).then(t)};i.x()}();
//# sourceMappingURL=1402-6WKUruvoXtM.js.map

View File

@@ -1 +0,0 @@
{"version":3,"file":"1402-6WKUruvoXtM.js","mappings":"6BAAIA,ECAAC,E,sCCMAC,EACAC,E,+BAMEC,EAAY,SAChBC,EACAC,EACAC,GAEA,GAAY,UAARF,EAAiB,CACnB,GACY,SAATC,GAA6B,aAAVC,GACX,YAATD,GACS,aAATA,EAEA,OAEF,MAAO,EACT,CAEF,EA0CME,EAAM,CACVC,eAzCqB,SACrBC,EACAC,GAKW,IAWPC,EAfJC,EAGCC,UAAAC,OAAA,QAAAC,IAAAF,UAAA,GAAAA,UAAA,GAAG,CAAC,EA4BL,OA1BKZ,IACHA,EAAee,OAAAC,OAAAD,OAAAC,OAAA,IACVC,EAAAA,EAAAA,wBAAqB,IACxBC,MAAO,CAAC,OAAQ,WAAY,WAC5B,UAAW,CAAC,QACZ,cAAe,CAAC,QAChB,WAAY,CAAC,aAAc,YAM3BP,EAAYQ,UACTlB,IACHA,EAAYc,OAAAC,OAAAD,OAAAC,OAAA,GACPhB,GAAe,IAClBoB,IAAK,CAAC,QAAS,SAAU,SACzBC,KAAM,CAAC,YAAa,SAAU,KAC9BC,IAAK,CAAC,UAGVZ,EAAYT,GAEZS,EAAYV,GAGPuB,EAAAA,EAAAA,YAAUC,EAAAA,EAAAA,IAAOhB,EAASC,GAAgB,CAC/CC,UAAAA,EACAR,UAAAA,GAEJ,IAQAuB,EAAAA,EAAAA,IAAOnB,E,GC5EHoB,EAA2B,CAAC,EAGhC,SAASC,EAAoBC,GAE5B,IAAIC,EAAeH,EAAyBE,GAC5C,QAAqBd,IAAjBe,EACH,OAAOA,EAAaC,QAGrB,IAAIC,EAASL,EAAyBE,GAAY,CAGjDE,QAAS,CAAC,GAOX,OAHAE,EAAoBJ,GAAUG,EAAQA,EAAOD,QAASH,GAG/CI,EAAOD,OACf,CAGAH,EAAoBM,EAAID,EAGxBL,EAAoBO,EAAI,WAGvB,IAAIC,EAAsBR,EAAoBS,OAAEtB,EAAW,CAAC,KAAK,MAAM,WAAa,OAAOa,EAAoB,MAAQ,IAEvH,OADAQ,EAAsBR,EAAoBS,EAAED,EAE7C,EHlCIrC,EAAW,GACf6B,EAAoBS,EAAI,SAASC,EAAQC,EAAUC,EAAIC,GACtD,IAAGF,EAAH,CAMA,IAAIG,EAAeC,IACnB,IAASC,EAAI,EAAGA,EAAI7C,EAASe,OAAQ8B,IAAK,CACrCL,EAAWxC,EAAS6C,GAAG,GACvBJ,EAAKzC,EAAS6C,GAAG,GACjBH,EAAW1C,EAAS6C,GAAG,GAE3B,IAJA,IAGIC,GAAY,EACPC,EAAI,EAAGA,EAAIP,EAASzB,OAAQgC,MACpB,EAAXL,GAAsBC,GAAgBD,IAAazB,OAAO+B,KAAKnB,EAAoBS,GAAGW,OAAM,SAASC,GAAO,OAAOrB,EAAoBS,EAAEY,GAAKV,EAASO,GAAK,IAChKP,EAASW,OAAOJ,IAAK,IAErBD,GAAY,EACTJ,EAAWC,IAAcA,EAAeD,IAG7C,GAAGI,EAAW,CACb9C,EAASmD,OAAON,IAAK,GACrB,IAAIO,EAAIX,SACEzB,IAANoC,IAAiBb,EAASa,EAC/B,CACD,CACA,OAAOb,CArBP,CAJCG,EAAWA,GAAY,EACvB,IAAI,IAAIG,EAAI7C,EAASe,OAAQ8B,EAAI,GAAK7C,EAAS6C,EAAI,GAAG,GAAKH,EAAUG,IAAK7C,EAAS6C,GAAK7C,EAAS6C,EAAI,GACrG7C,EAAS6C,GAAK,CAACL,EAAUC,EAAIC,EAwB/B,EI5BAb,EAAoBwB,EAAI,SAASpB,GAChC,IAAIqB,EAASrB,GAAUA,EAAOsB,WAC7B,WAAa,OAAOtB,EAAgB,OAAG,EACvC,WAAa,OAAOA,CAAQ,EAE7B,OADAJ,EAAoB2B,EAAEF,EAAQ,CAAEG,EAAGH,IAC5BA,CACR,ECNAzB,EAAoB2B,EAAI,SAASxB,EAAS0B,GACzC,IAAI,IAAIR,KAAOQ,EACX7B,EAAoB8B,EAAED,EAAYR,KAASrB,EAAoB8B,EAAE3B,EAASkB,IAC5EjC,OAAO2C,eAAe5B,EAASkB,EAAK,CAAEW,YAAY,EAAMC,IAAKJ,EAAWR,IAG3E,ECPArB,EAAoBkC,EAAI,CAAC,EAGzBlC,EAAoBmC,EAAI,SAASC,GAChC,OAAOC,QAAQC,IAAIlD,OAAO+B,KAAKnB,EAAoBkC,GAAGK,QAAO,SAASC,EAAUnB,GAE/E,OADArB,EAAoBkC,EAAEb,GAAKe,EAASI,GAC7BA,CACR,GAAG,IACJ,ECPAxC,EAAoByC,EAAI,SAASL,GAEhC,OAAYA,EAAU,IAAM,CAAC,IAAM,cAAc,KAAO,eAAeA,GAAW,KACnF,ECJApC,EAAoB8B,EAAI,SAASY,EAAKC,GAAQ,OAAOvD,OAAOwD,UAAUC,eAAeC,KAAKJ,EAAKC,EAAO,ECAtG3C,EAAoB+C,EAAI,gC,WCIxB,IAAIC,EAAkB,CACrB,KAAM,GAkBPhD,EAAoBkC,EAAElB,EAAI,SAASoB,EAASI,GAEvCQ,EAAgBZ,IAElBa,cAAcjD,EAAoB+C,EAAI/C,EAAoByC,EAAEL,GAG/D,EAEA,IAAIc,EAAqBC,KAA0C,oCAAIA,KAA0C,qCAAK,GAClHC,EAA6BF,EAAmBG,KAAKC,KAAKJ,GAC9DA,EAAmBG,KAzBA,SAASE,GAC3B,IAAI5C,EAAW4C,EAAK,GAChBC,EAAcD,EAAK,GACnBE,EAAUF,EAAK,GACnB,IAAI,IAAItD,KAAYuD,EAChBxD,EAAoB8B,EAAE0B,EAAavD,KACrCD,EAAoBM,EAAEL,GAAYuD,EAAYvD,IAIhD,IADGwD,GAASA,EAAQzD,GACdW,EAASzB,QACd8D,EAAgBrC,EAAS+C,OAAS,EACnCN,EAA2BG,EAC5B,C,ITtBInF,EAAO4B,EAAoBO,EAC/BP,EAAoBO,EAAI,WACvB,OAAO8B,QAAQC,IAAI,CAClBtC,EAAoBmC,EAAE,MACtBnC,EAAoBmC,EAAE,OACpBwB,KAAKvF,EACT,EUL0B4B,EAAoBO,G","sources":["no-source/webpack/runtime/chunk loaded","no-source/webpack/runtime/startup chunk dependencies","https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/resources/markdown-worker.ts","no-source/webpack/bootstrap","no-source/webpack/runtime/compat get default export","no-source/webpack/runtime/define property getters","no-source/webpack/runtime/ensure chunk","no-source/webpack/runtime/get javascript chunk filename","no-source/webpack/runtime/hasOwnProperty shorthand","no-source/webpack/runtime/publicPath","no-source/webpack/runtime/importScripts chunk loading","no-source/webpack/startup"],"names":["deferred","next","whiteListNormal","whiteListSvg","onTagAttr","tag","name","value","api","renderMarkdown","content","markedOptions","whiteList","hassOptions","arguments","length","undefined","Object","assign","getDefaultWhiteList","input","allowSvg","svg","path","img","filterXSS","marked","expose","__webpack_module_cache__","__webpack_require__","moduleId","cachedModule","exports","module","__webpack_modules__","m","x","__webpack_exports__","O","result","chunkIds","fn","priority","notFulfilled","Infinity","i","fulfilled","j","keys","every","key","splice","r","n","getter","__esModule","d","a","definition","o","defineProperty","enumerable","get","f","e","chunkId","Promise","all","reduce","promises","u","obj","prop","prototype","hasOwnProperty","call","p","installedChunks","importScripts","chunkLoadingGlobal","self","parentChunkLoadingFunction","push","bind","data","moreModules","runtime","pop","then"],"sourceRoot":""}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1 +0,0 @@
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([[1639],{71639:function(s){s.exports=[]}}]);

Some files were not shown because too many files have changed in this diff Show More