Compare commits

..

2 Commits

Author SHA1 Message Date
Paulus Schoutsen
ec897081cd Update supervisor/api/__init__.py 2022-01-20 14:58:00 -08:00
Paulus Schoutsen
839361133a Add refresh updates API endpoint 2022-01-20 10:14:37 -08:00
595 changed files with 11243 additions and 22140 deletions

View File

@@ -10,7 +10,6 @@
"visualstudioexptteam.vscodeintellicode", "visualstudioexptteam.vscodeintellicode",
"esbenp.prettier-vscode" "esbenp.prettier-vscode"
], ],
"mounts": [ "type=volume,target=/var/lib/docker" ],
"settings": { "settings": {
"terminal.integrated.profiles.linux": { "terminal.integrated.profiles.linux": {
"zsh": { "zsh": {

View File

@@ -31,7 +31,6 @@ categories:
- title: ":arrow_up: Dependency Updates" - title: ":arrow_up: Dependency Updates"
label: "dependencies" label: "dependencies"
collapse-after: 1
include-labels: include-labels:
- "breaking-change" - "breaking-change"

View File

@@ -33,7 +33,6 @@ on:
- setup.py - setup.py
env: env:
DEFAULT_PYTHON: 3.9
BUILD_NAME: supervisor BUILD_NAME: supervisor
BUILD_TYPE: supervisor BUILD_TYPE: supervisor
WHEELS_TAG: 3.9-alpine3.14 WHEELS_TAG: 3.9-alpine3.14
@@ -50,7 +49,7 @@ jobs:
requirements: ${{ steps.requirements.outputs.changed }} requirements: ${{ steps.requirements.outputs.changed }}
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v3.0.2 uses: actions/checkout@v2.4.0
with: with:
fetch-depth: 0 fetch-depth: 0
@@ -85,13 +84,13 @@ jobs:
arch: ${{ fromJson(needs.init.outputs.architectures) }} arch: ${{ fromJson(needs.init.outputs.architectures) }}
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v3.0.2 uses: actions/checkout@v2.4.0
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Build wheels - name: Build wheels
if: needs.init.outputs.requirements == 'true' if: needs.init.outputs.requirements == 'true'
uses: home-assistant/wheels@2022.01.2 uses: home-assistant/wheels@master
with: with:
tag: ${{ env.WHEELS_TAG }} tag: ${{ env.WHEELS_TAG }}
arch: ${{ matrix.arch }} arch: ${{ matrix.arch }}
@@ -110,14 +109,14 @@ jobs:
- name: Login to DockerHub - name: Login to DockerHub
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
uses: docker/login-action@v2.0.0 uses: docker/login-action@v1.12.0
with: with:
username: ${{ secrets.DOCKERHUB_USERNAME }} username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }} password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry - name: Login to GitHub Container Registry
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
uses: docker/login-action@v2.0.0 uses: docker/login-action@v1.12.0
with: with:
registry: ghcr.io registry: ghcr.io
username: ${{ github.repository_owner }} username: ${{ github.repository_owner }}
@@ -128,7 +127,7 @@ jobs:
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
- name: Build supervisor - name: Build supervisor
uses: home-assistant/builder@2022.06.2 uses: home-assistant/builder@2021.12.0
with: with:
args: | args: |
$BUILD_ARGS \ $BUILD_ARGS \
@@ -139,43 +138,30 @@ jobs:
CAS_API_KEY: ${{ secrets.CAS_TOKEN }} CAS_API_KEY: ${{ secrets.CAS_TOKEN }}
codenotary: codenotary:
name: CAS signature name: CodeNotary signature
needs: init needs: init
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout the repository - name: Checkout the repository
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
uses: actions/checkout@v3.0.2 uses: actions/checkout@v2.4.0
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.publish == 'true'
uses: actions/setup-python@v4.2.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
- name: Set version - name: Set version
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
uses: home-assistant/actions/helpers/version@master uses: home-assistant/actions/helpers/version@master
with: with:
type: ${{ env.BUILD_TYPE }} type: ${{ env.BUILD_TYPE }}
- name: Install dirhash and calc hash - name: Signing image
if: needs.init.outputs.publish == 'true'
id: dirhash
run: |
pip3 install dirhash
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
echo "::set-output name=dirhash::${dir_hash}"
- name: Signing Source
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
uses: home-assistant/actions/helpers/codenotary@master uses: home-assistant/actions/helpers/codenotary@master
with: with:
source: hash://${{ steps.dirhash.outputs.dirhash }} source: dir://${{ github.workspace }}
asset: supervisor-${{ needs.init.outputs.version }} user: ${{ secrets.VCN_USER }}
token: ${{ secrets.CAS_TOKEN }} password: ${{ secrets.VCN_PASSWORD }}
organisation: ${{ secrets.VCN_ORG }}
version: version:
name: Update version name: Update version
@@ -184,7 +170,7 @@ jobs:
steps: steps:
- name: Checkout the repository - name: Checkout the repository
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
uses: actions/checkout@v3.0.2 uses: actions/checkout@v2.4.0
- name: Initialize git - name: Initialize git
if: needs.init.outputs.publish == 'true' if: needs.init.outputs.publish == 'true'
@@ -209,11 +195,11 @@ jobs:
timeout-minutes: 60 timeout-minutes: 60
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v3.0.2 uses: actions/checkout@v2.4.0
- name: Build the Supervisor - name: Build the Supervisor
if: needs.init.outputs.publish != 'true' if: needs.init.outputs.publish != 'true'
uses: home-assistant/builder@2022.06.2 uses: home-assistant/builder@2021.12.0
with: with:
args: | args: |
--test \ --test \
@@ -260,13 +246,13 @@ jobs:
run: | run: |
echo "Checking supervisor info" echo "Checking supervisor info"
test=$(docker exec hassio_cli ha supervisor info --no-progress --raw-json | jq -r '.result') test=$(docker exec hassio_cli ha supervisor info --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then if [ "$test" != "ok" ];then
exit 1 exit 1
fi fi
echo "Checking supervisor network info" echo "Checking supervisor network info"
test=$(docker exec hassio_cli ha network info --no-progress --raw-json | jq -r '.result') test=$(docker exec hassio_cli ha network info --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then if [ "$test" != "ok" ];then
exit 1 exit 1
fi fi
@@ -274,19 +260,13 @@ jobs:
run: | run: |
echo "Install Core SSH Add-on" echo "Install Core SSH Add-on"
test=$(docker exec hassio_cli ha addons install core_ssh --no-progress --raw-json | jq -r '.result') test=$(docker exec hassio_cli ha addons install core_ssh --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then if [ "$test" != "ok" ];then
exit 1
fi
# Make sure it actually installed
test=$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.version')
if [[ "$test" == "null" ]]; then
exit 1 exit 1
fi fi
echo "Start Core SSH Add-on" echo "Start Core SSH Add-on"
test=$(docker exec hassio_cli ha addons start core_ssh --no-progress --raw-json | jq -r '.result') test=$(docker exec hassio_cli ha addons start core_ssh --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then if [ "$test" != "ok" ];then
exit 1 exit 1
fi fi
@@ -295,77 +275,19 @@ jobs:
run: | run: |
echo "Enable Content-Trust" echo "Enable Content-Trust"
test=$(docker exec hassio_cli ha security options --content-trust=true --no-progress --raw-json | jq -r '.result') test=$(docker exec hassio_cli ha security options --content-trust=true --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then if [ "$test" != "ok" ];then
exit 1 exit 1
fi fi
echo "Run supervisor health check" echo "Run supervisor health check"
test=$(docker exec hassio_cli ha resolution healthcheck --no-progress --raw-json | jq -r '.result') test=$(docker exec hassio_cli ha resolution healthcheck --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then if [ "$test" != "ok" ];then
exit 1 exit 1
fi fi
echo "Check supervisor unhealthy" echo "Check supervisor unhealthy"
test=$(docker exec hassio_cli ha resolution info --no-progress --raw-json | jq -r '.data.unhealthy[]') test=$(docker exec hassio_cli ha resolution info --no-progress --raw-json | jq -r '.data.unhealthy[]')
if [ "$test" != "" ]; then if [ "$test" != "" ];then
exit 1
fi
echo "Check supervisor supported"
test=$(docker exec hassio_cli ha resolution info --no-progress --raw-json | jq -r '.data.unsupported[]')
if [[ "$test" =~ source_mods ]]; then
exit 1
fi
- name: Create full backup
id: backup
run: |
test=$(docker exec hassio_cli ha backups new --no-progress --raw-json)
if [ "$(echo $test | jq -r '.result')" != "ok" ]; then
exit 1
fi
echo "::set-output name=slug::$(echo $test | jq -r '.data.slug')"
- name: Uninstall SSH add-on
run: |
test=$(docker exec hassio_cli ha addons uninstall core_ssh --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
- name: Restart supervisor
run: |
test=$(docker exec hassio_cli ha supervisor restart --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
- name: Wait for Supervisor to come up
run: |
SUPERVISOR=$(docker inspect --format='{{.NetworkSettings.IPAddress}}' hassio_supervisor)
ping="error"
while [ "$ping" != "ok" ]; do
ping=$(curl -sSL "http://$SUPERVISOR/supervisor/ping" | jq -r '.result')
sleep 5
done
- name: Restore SSH add-on from backup
run: |
test=$(docker exec hassio_cli ha backups restore ${{ steps.backup.outputs.slug }} --addons core_ssh --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1
fi
# Make sure it actually installed
test=$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.version')
if [[ "$test" == "null" ]]; then
exit 1
fi
- name: Restore SSL directory from backup
run: |
test=$(docker exec hassio_cli ha backups restore ${{ steps.backup.outputs.slug }} --folders ssl --no-progress --raw-json | jq -r '.result')
if [ "$test" != "ok" ]; then
exit 1 exit 1
fi fi

View File

@@ -10,7 +10,7 @@ on:
env: env:
DEFAULT_PYTHON: 3.9 DEFAULT_PYTHON: 3.9
PRE_COMMIT_HOME: ~/.cache/pre-commit PRE_COMMIT_HOME: ~/.cache/pre-commit
DEFAULT_CAS: v1.0.2 DEFAULT_VCN: v0.9.8
jobs: jobs:
# Separate job to pre-populate the base dependency cache # Separate job to pre-populate the base dependency cache
@@ -23,15 +23,15 @@ jobs:
name: Prepare Python ${{ matrix.python-version }} dependencies name: Prepare Python ${{ matrix.python-version }} dependencies
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v3.0.2 uses: actions/checkout@v2.4.0
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ matrix.python-version }}
id: python id: python
uses: actions/setup-python@v4.2.0 uses: actions/setup-python@v2.3.1
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v3.0.6 uses: actions/cache@v2.1.7
with: with:
path: venv path: venv
key: | key: |
@@ -45,7 +45,7 @@ jobs:
pip install -r requirements.txt -r requirements_tests.txt pip install -r requirements.txt -r requirements_tests.txt
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache@v3.0.6 uses: actions/cache@v2.1.7
with: with:
path: ${{ env.PRE_COMMIT_HOME }} path: ${{ env.PRE_COMMIT_HOME }}
key: | key: |
@@ -64,15 +64,15 @@ jobs:
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v3.0.2 uses: actions/checkout@v2.4.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.2.0 uses: actions/setup-python@v2.3.1
id: python id: python
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v3.0.6 uses: actions/cache@v2.1.7
with: with:
path: venv path: venv
key: | key: |
@@ -93,7 +93,7 @@ jobs:
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v3.0.2 uses: actions/checkout@v2.4.0
- name: Register hadolint problem matcher - name: Register hadolint problem matcher
run: | run: |
echo "::add-matcher::.github/workflows/matchers/hadolint.json" echo "::add-matcher::.github/workflows/matchers/hadolint.json"
@@ -108,15 +108,15 @@ jobs:
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v3.0.2 uses: actions/checkout@v2.4.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.2.0 uses: actions/setup-python@v2.3.1
id: python id: python
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v3.0.6 uses: actions/cache@v2.1.7
with: with:
path: venv path: venv
key: | key: |
@@ -128,7 +128,7 @@ jobs:
exit 1 exit 1
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache@v3.0.6 uses: actions/cache@v2.1.7
with: with:
path: ${{ env.PRE_COMMIT_HOME }} path: ${{ env.PRE_COMMIT_HOME }}
key: | key: |
@@ -152,15 +152,15 @@ jobs:
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v3.0.2 uses: actions/checkout@v2.4.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.2.0 uses: actions/setup-python@v2.3.1
id: python id: python
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v3.0.6 uses: actions/cache@v2.1.7
with: with:
path: venv path: venv
key: | key: |
@@ -184,15 +184,15 @@ jobs:
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v3.0.2 uses: actions/checkout@v2.4.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.2.0 uses: actions/setup-python@v2.3.1
id: python id: python
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v3.0.6 uses: actions/cache@v2.1.7
with: with:
path: venv path: venv
key: | key: |
@@ -204,7 +204,7 @@ jobs:
exit 1 exit 1
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache@v3.0.6 uses: actions/cache@v2.1.7
with: with:
path: ${{ env.PRE_COMMIT_HOME }} path: ${{ env.PRE_COMMIT_HOME }}
key: | key: |
@@ -225,15 +225,15 @@ jobs:
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v3.0.2 uses: actions/checkout@v2.4.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.2.0 uses: actions/setup-python@v2.3.1
id: python id: python
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v3.0.6 uses: actions/cache@v2.1.7
with: with:
path: venv path: venv
key: | key: |
@@ -245,7 +245,7 @@ jobs:
exit 1 exit 1
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache@v3.0.6 uses: actions/cache@v2.1.7
with: with:
path: ${{ env.PRE_COMMIT_HOME }} path: ${{ env.PRE_COMMIT_HOME }}
key: | key: |
@@ -269,15 +269,15 @@ jobs:
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v3.0.2 uses: actions/checkout@v2.4.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.2.0 uses: actions/setup-python@v2.3.1
id: python id: python
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v3.0.6 uses: actions/cache@v2.1.7
with: with:
path: venv path: venv
key: | key: |
@@ -301,15 +301,15 @@ jobs:
needs: prepare needs: prepare
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v3.0.2 uses: actions/checkout@v2.4.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.2.0 uses: actions/setup-python@v2.3.1
id: python id: python
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v3.0.6 uses: actions/cache@v2.1.7
with: with:
path: venv path: venv
key: | key: |
@@ -321,7 +321,7 @@ jobs:
exit 1 exit 1
- name: Restore pre-commit environment from cache - name: Restore pre-commit environment from cache
id: cache-precommit id: cache-precommit
uses: actions/cache@v3.0.6 uses: actions/cache@v2.1.7
with: with:
path: ${{ env.PRE_COMMIT_HOME }} path: ${{ env.PRE_COMMIT_HOME }}
key: | key: |
@@ -345,19 +345,19 @@ jobs:
name: Run tests Python ${{ matrix.python-version }} name: Run tests Python ${{ matrix.python-version }}
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v3.0.2 uses: actions/checkout@v2.4.0
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4.2.0 uses: actions/setup-python@v2.3.1
id: python id: python
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
- name: Install CAS tools - name: Install VCN tools
uses: home-assistant/actions/helpers/cas@master uses: home-assistant/actions/helpers/vcn@master
with: with:
version: ${{ env.DEFAULT_CAS }} vcn_version: ${{ env.DEFAULT_VCN }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v3.0.6 uses: actions/cache@v2.1.7
with: with:
path: venv path: venv
key: | key: |
@@ -392,7 +392,7 @@ jobs:
-o console_output_style=count \ -o console_output_style=count \
tests tests
- name: Upload coverage artifact - name: Upload coverage artifact
uses: actions/upload-artifact@v3.1.0 uses: actions/upload-artifact@v2.3.1
with: with:
name: coverage-${{ matrix.python-version }} name: coverage-${{ matrix.python-version }}
path: .coverage path: .coverage
@@ -403,15 +403,15 @@ jobs:
needs: pytest needs: pytest
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v3.0.2 uses: actions/checkout@v2.4.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }} - name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@v4.2.0 uses: actions/setup-python@v2.3.1
id: python id: python
with: with:
python-version: ${{ env.DEFAULT_PYTHON }} python-version: ${{ env.DEFAULT_PYTHON }}
- name: Restore Python virtual environment - name: Restore Python virtual environment
id: cache-venv id: cache-venv
uses: actions/cache@v3.0.6 uses: actions/cache@v2.1.7
with: with:
path: venv path: venv
key: | key: |
@@ -422,7 +422,7 @@ jobs:
echo "Failed to restore Python virtual environment from cache" echo "Failed to restore Python virtual environment from cache"
exit 1 exit 1
- name: Download all coverage artifacts - name: Download all coverage artifacts
uses: actions/download-artifact@v3 uses: actions/download-artifact@v2
- name: Combine coverage results - name: Combine coverage results
run: | run: |
. venv/bin/activate . venv/bin/activate
@@ -430,4 +430,4 @@ jobs:
coverage report coverage report
coverage xml coverage xml
- name: Upload coverage to Codecov - name: Upload coverage to Codecov
uses: codecov/codecov-action@v3.1.0 uses: codecov/codecov-action@v2.1.0

View File

@@ -9,7 +9,7 @@ jobs:
lock: lock:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: dessant/lock-threads@v3.0.0 - uses: dessant/lock-threads@v3
with: with:
github-token: ${{ github.token }} github-token: ${{ github.token }}
issue-inactive-days: "30" issue-inactive-days: "30"

View File

@@ -11,7 +11,7 @@ jobs:
name: Release Drafter name: Release Drafter
steps: steps:
- name: Checkout the repository - name: Checkout the repository
uses: actions/checkout@v3.0.2 uses: actions/checkout@v2.4.0
with: with:
fetch-depth: 0 fetch-depth: 0
@@ -36,7 +36,7 @@ jobs:
echo "::set-output name=version::$datepre.$newpost" echo "::set-output name=version::$datepre.$newpost"
- name: Run Release Drafter - name: Run Release Drafter
uses: release-drafter/release-drafter@v5.20.0 uses: release-drafter/release-drafter@v5
with: with:
tag: ${{ steps.version.outputs.version }} tag: ${{ steps.version.outputs.version }}
name: ${{ steps.version.outputs.version }} name: ${{ steps.version.outputs.version }}

View File

@@ -10,9 +10,9 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Check out code from GitHub - name: Check out code from GitHub
uses: actions/checkout@v3.0.2 uses: actions/checkout@v2.4.0
- name: Sentry Release - name: Sentry Release
uses: getsentry/action-release@v1.2.0 uses: getsentry/action-release@v1.1.6
env: env:
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
SENTRY_ORG: ${{ secrets.SENTRY_ORG }} SENTRY_ORG: ${{ secrets.SENTRY_ORG }}

View File

@@ -9,7 +9,7 @@ jobs:
stale: stale:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/stale@v5.1.1 - uses: actions/stale@v4
with: with:
repo-token: ${{ secrets.GITHUB_TOKEN }} repo-token: ${{ secrets.GITHUB_TOKEN }}
days-before-stale: 60 days-before-stale: 60

View File

@@ -1,6 +1,6 @@
repos: repos:
- repo: https://github.com/psf/black - repo: https://github.com/psf/black
rev: 22.6.0 rev: 21.12b0
hooks: hooks:
- id: black - id: black
args: args:
@@ -28,7 +28,7 @@ repos:
hooks: hooks:
- id: isort - id: isort
- repo: https://github.com/asottile/pyupgrade - repo: https://github.com/asottile/pyupgrade
rev: v2.32.1 rev: v2.31.0
hooks: hooks:
- id: pyupgrade - id: pyupgrade
args: [--py39-plus] args: [--py39-plus]

View File

@@ -5,12 +5,10 @@ ENV \
S6_SERVICES_GRACETIME=10000 \ S6_SERVICES_GRACETIME=10000 \
SUPERVISOR_API=http://localhost SUPERVISOR_API=http://localhost
ARG \ ARG BUILD_ARCH
BUILD_ARCH \ WORKDIR /usr/src
CAS_VERSION
# Install base # Install base
WORKDIR /usr/src
RUN \ RUN \
set -x \ set -x \
&& apk add --no-cache \ && apk add --no-cache \
@@ -20,20 +18,7 @@ RUN \
libffi \ libffi \
libpulse \ libpulse \
musl \ musl \
openssl \ openssl
&& apk add --no-cache --virtual .build-dependencies \
build-base \
go \
\
&& git clone -b "v${CAS_VERSION}" --depth 1 \
https://github.com/codenotary/cas \
&& cd cas \
&& make cas \
&& mv cas /usr/bin/cas \
\
&& apk del .build-dependencies \
&& rm -rf /root/go /root/.cache \
&& rm -rf /usr/src/cas
# Install requirements # Install requirements
COPY requirements.txt . COPY requirements.txt .

View File

@@ -9,8 +9,6 @@ build_from:
codenotary: codenotary:
signer: notary@home-assistant.io signer: notary@home-assistant.io
base_image: notary@home-assistant.io base_image: notary@home-assistant.io
args:
CAS_VERSION: 1.0.2
labels: labels:
io.hass.type: supervisor io.hass.type: supervisor
org.opencontainers.image.title: Home Assistant Supervisor org.opencontainers.image.title: Home Assistant Supervisor

View File

@@ -12,19 +12,24 @@ extension-pkg-whitelist=
# locally-disabled - it spams too much # locally-disabled - it spams too much
# duplicate-code - unavoidable # duplicate-code - unavoidable
# cyclic-import - doesn't test if both import on load # cyclic-import - doesn't test if both import on load
# abstract-class-little-used - prevents from setting right foundation
# abstract-class-not-used - is flaky, should not show up but does # abstract-class-not-used - is flaky, should not show up but does
# unused-argument - generic callbacks and setup methods create a lot of warnings # unused-argument - generic callbacks and setup methods create a lot of warnings
# redefined-variable-type - this is Python, we're duck typing!
# too-many-* - are not enforced for the sake of readability # too-many-* - are not enforced for the sake of readability
# too-few-* - same as too-many-* # too-few-* - same as too-many-*
# abstract-method - with intro of async there are always methods missing # abstract-method - with intro of async there are always methods missing
disable= disable=
format, format,
abstract-class-little-used,
abstract-method, abstract-method,
cyclic-import, cyclic-import,
duplicate-code, duplicate-code,
locally-disabled, locally-disabled,
no-else-return, no-else-return,
no-self-use,
not-context-manager, not-context-manager,
redefined-variable-type,
too-few-public-methods, too-few-public-methods,
too-many-arguments, too-many-arguments,
too-many-branches, too-many-branches,

View File

@@ -1,2 +0,0 @@
[pytest]
asyncio_mode = auto

View File

@@ -1,25 +1,22 @@
aiodns==3.0.0
aiohttp==3.8.1 aiohttp==3.8.1
async_timeout==4.0.2 async_timeout==4.0.2
atomicwrites-homeassistant==1.4.1 atomicwrites==1.4.0
attrs==22.1.0 attrs==21.2.0
awesomeversion==22.6.0 awesomeversion==22.1.0
brotli==1.0.9 brotli==1.0.9
cchardet==2.1.7 cchardet==2.1.7
ciso8601==2.2.0 ciso8601==2.2.0
colorlog==6.6.0 colorlog==6.6.0
cpe==1.2.1 cpe==1.2.1
cryptography==37.0.4 cryptography==36.0.1
debugpy==1.6.2 debugpy==1.5.1
deepmerge==1.0.1 deepmerge==1.0.1
dirhash==0.2.1
docker==5.0.3 docker==5.0.3
gitpython==3.1.27 gitpython==3.1.26
jinja2==3.1.2 jinja2==3.0.3
pulsectl==22.3.2 pulsectl==21.10.5
pyudev==0.23.2 pyudev==0.22.0
ruamel.yaml==0.17.17 ruamel.yaml==0.17.17
securetar==2022.2.0 sentry-sdk==1.5.2
sentry-sdk==1.9.2 voluptuous==0.12.2
voluptuous==0.13.1
dbus-next==0.2.3 dbus-next==0.2.3

View File

@@ -1,15 +1,14 @@
black==22.6.0 black==21.12b0
codecov==2.1.12 codecov==2.1.12
coverage==6.4.3 coverage==6.2
flake8-docstrings==1.6.0 flake8-docstrings==1.6.0
flake8==5.0.4 flake8==4.0.1
pre-commit==2.20.0 pre-commit==2.17.0
pydocstyle==6.1.1 pydocstyle==6.1.1
pylint==2.14.5 pylint==2.12.2
pytest-aiohttp==1.0.4 pytest-aiohttp==0.3.0
pytest-asyncio==0.18.3 pytest-asyncio==0.12.0 # NB!: Versions over 0.12.0 breaks pytest-aiohttp (https://github.com/aio-libs/pytest-aiohttp/issues/16)
pytest-cov==3.0.0 pytest-cov==3.0.0
pytest-timeout==2.1.0 pytest-timeout==2.0.2
pytest==7.1.2 pytest==6.2.5
pyupgrade==2.37.3 pyupgrade==2.31.0
time-machine==2.7.1

0
rootfs/etc/cont-init.d/udev.sh Executable file → Normal file
View File

11
rootfs/etc/services.d/supervisor/finish Executable file → Normal file
View File

@@ -1,11 +1,8 @@
#!/usr/bin/env bashio #!/usr/bin/execlineb -S1
# ============================================================================== # ==============================================================================
# Take down the S6 supervision tree when Supervisor fails # Take down the S6 supervision tree when Supervisor fails
# ============================================================================== # ==============================================================================
if { s6-test ${1} -ne 100 }
if { s6-test ${1} -ne 256 }
if [[ "$1" -ne 100 ]] && [[ "$1" -ne 256 ]]; then redirfd -w 2 /dev/null s6-svscanctl -t /var/run/s6/services
bashio::log.warning "Halt Supervisor"
/run/s6/basedir/bin/halt
fi
bashio::log.info "Supervisor restart after closing"

1
rootfs/etc/services.d/supervisor/run Executable file → Normal file
View File

@@ -3,6 +3,5 @@
# Start Supervisor service # Start Supervisor service
# ============================================================================== # ==============================================================================
export LD_PRELOAD="/usr/local/lib/libjemalloc.so.2" export LD_PRELOAD="/usr/local/lib/libjemalloc.so.2"
export MALLOC_CONF="background_thread:true,metadata_thp:auto"
exec python3 -m supervisor exec python3 -m supervisor

11
rootfs/etc/services.d/watchdog/finish Executable file → Normal file
View File

@@ -1,11 +1,8 @@
#!/usr/bin/env bashio #!/usr/bin/execlineb -S1
# ============================================================================== # ==============================================================================
# Take down the S6 supervision tree when Watchdog fails # Take down the S6 supervision tree when Watchdog fails
# ============================================================================== # ==============================================================================
if { s6-test ${1} -ne 0 }
if { s6-test ${1} -ne 256 }
if [[ "$1" -ne 0 ]] && [[ "$1" -ne 256 ]]; then s6-svscanctl -t /var/run/s6/services
bashio::log.warning "Halt Supervisor (Wuff)"
/run/s6/basedir/bin/halt
fi
bashio::log.info "Watchdog restart after closing"

2
rootfs/etc/services.d/watchdog/run Executable file → Normal file
View File

@@ -31,4 +31,4 @@ do
done done
bashio::exit.nok "Watchdog detected issue with Supervisor - taking container down!" basio::exit.nok "Watchdog detected issue with Supervisor - taking container down!"

View File

@@ -1,4 +0,0 @@
-----BEGIN PUBLIC KEY-----
MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE03LvYuz79GTJx4uKp3w6NrSe5JZI
iBtgzzYi0YQYtZO/r+xFpgDJEa0gLHkXtl94fpqrFiN89In83lzaszbZtA==
-----END PUBLIC KEY-----

View File

@@ -1,8 +0,0 @@
{
"currentcontext": {
"LcHost": "cas.codenotary.com",
"LcPort": "443"
},
"schemaversion": 3,
"users": null
}

View File

@@ -49,7 +49,6 @@ setup(
"supervisor.resolution.evaluations", "supervisor.resolution.evaluations",
"supervisor.resolution.fixups", "supervisor.resolution.fixups",
"supervisor.resolution", "supervisor.resolution",
"supervisor.security",
"supervisor.services.modules", "supervisor.services.modules",
"supervisor.services", "supervisor.services",
"supervisor.store", "supervisor.store",

View File

@@ -167,7 +167,6 @@ class AddonManager(CoreSysAttributes):
self.data.install(store) self.data.install(store)
addon = Addon(self.coresys, slug) addon = Addon(self.coresys, slug)
await addon.load()
if not addon.path_data.is_dir(): if not addon.path_data.is_dir():
_LOGGER.info( _LOGGER.info(
@@ -179,7 +178,7 @@ class AddonManager(CoreSysAttributes):
await addon.install_apparmor() await addon.install_apparmor()
try: try:
await addon.instance.install(store.version, store.image, arch=addon.arch) await addon.instance.install(store.version, store.image)
except DockerError as err: except DockerError as err:
self.data.uninstall(addon) self.data.uninstall(addon)
raise AddonsError() from err raise AddonsError() from err

View File

@@ -14,7 +14,6 @@ from typing import Any, Awaitable, Final, Optional
import aiohttp import aiohttp
from deepmerge import Merger from deepmerge import Merger
from securetar import atomic_contents_add, secure_path
import voluptuous as vol import voluptuous as vol
from voluptuous.humanize import humanize_error from voluptuous.humanize import humanize_error
@@ -48,12 +47,9 @@ from ..const import (
AddonBoot, AddonBoot,
AddonStartup, AddonStartup,
AddonState, AddonState,
BusEvent,
) )
from ..coresys import CoreSys from ..coresys import CoreSys
from ..docker.addon import DockerAddon from ..docker.addon import DockerAddon
from ..docker.const import ContainerState
from ..docker.monitor import DockerContainerStateEvent
from ..docker.stats import DockerStats from ..docker.stats import DockerStats
from ..exceptions import ( from ..exceptions import (
AddonConfigurationError, AddonConfigurationError,
@@ -61,6 +57,7 @@ from ..exceptions import (
AddonsNotSupportedError, AddonsNotSupportedError,
ConfigurationFileError, ConfigurationFileError,
DockerError, DockerError,
DockerRequestError,
HostAppArmorError, HostAppArmorError,
) )
from ..hardware.data import Device from ..hardware.data import Device
@@ -68,7 +65,8 @@ from ..homeassistant.const import WSEvent, WSType
from ..utils import check_port from ..utils import check_port
from ..utils.apparmor import adjust_profile from ..utils.apparmor import adjust_profile
from ..utils.json import read_json_file, write_json_file from ..utils.json import read_json_file, write_json_file
from .const import WATCHDOG_RETRY_SECONDS, AddonBackupMode from ..utils.tar import atomic_contents_add, secure_path
from .const import AddonBackupMode
from .model import AddonModel, Data from .model import AddonModel, Data
from .options import AddonOptions from .options import AddonOptions
from .utils import remove_data from .utils import remove_data
@@ -86,6 +84,8 @@ RE_WATCHDOG = re.compile(
r":\/\/\[HOST\]:(?:\[PORT:)?(?P<t_port>\d+)\]?(?P<s_suffix>.*)$" r":\/\/\[HOST\]:(?:\[PORT:)?(?P<t_port>\d+)\]?(?P<s_suffix>.*)$"
) )
RE_OLD_AUDIO = re.compile(r"\d+,\d+")
WATCHDOG_TIMEOUT = aiohttp.ClientTimeout(total=10) WATCHDOG_TIMEOUT = aiohttp.ClientTimeout(total=10)
_OPTIONS_MERGER: Final = Merger( _OPTIONS_MERGER: Final = Merger(
@@ -137,16 +137,15 @@ class Addon(AddonModel):
async def load(self) -> None: async def load(self) -> None:
"""Async initialize of object.""" """Async initialize of object."""
self.sys_bus.register_event(
BusEvent.DOCKER_CONTAINER_STATE_CHANGE, self.container_state_changed
)
self.sys_bus.register_event(
BusEvent.DOCKER_CONTAINER_STATE_CHANGE, self.watchdog_container
)
with suppress(DockerError): with suppress(DockerError):
await self.instance.attach(version=self.version) await self.instance.attach(version=self.version)
# Evaluate state
if await self.instance.is_running():
self.state = AddonState.STARTED
else:
self.state = AddonState.STOPPED
@property @property
def ip_address(self) -> IPv4Address: def ip_address(self) -> IPv4Address:
"""Return IP of add-on instance.""" """Return IP of add-on instance."""
@@ -356,9 +355,6 @@ class Addon(AddonModel):
@property @property
def ingress_panel(self) -> Optional[bool]: def ingress_panel(self) -> Optional[bool]:
"""Return True if the add-on access support ingress.""" """Return True if the add-on access support ingress."""
if not self.with_ingress:
return None
return self.persist[ATTR_INGRESS_PANEL] return self.persist[ATTR_INGRESS_PANEL]
@ingress_panel.setter @ingress_panel.setter
@@ -371,7 +367,13 @@ class Addon(AddonModel):
"""Return a pulse profile for output or None.""" """Return a pulse profile for output or None."""
if not self.with_audio: if not self.with_audio:
return None return None
return self.persist.get(ATTR_AUDIO_OUTPUT)
# Fallback with old audio settings
# Remove after 210
output_data = self.persist.get(ATTR_AUDIO_OUTPUT)
if output_data and RE_OLD_AUDIO.fullmatch(output_data):
return None
return output_data
@audio_output.setter @audio_output.setter
def audio_output(self, value: Optional[str]): def audio_output(self, value: Optional[str]):
@@ -384,7 +386,12 @@ class Addon(AddonModel):
if not self.with_audio: if not self.with_audio:
return None return None
return self.persist.get(ATTR_AUDIO_INPUT) # Fallback with old audio settings
# Remove after 210
input_data = self.persist.get(ATTR_AUDIO_INPUT)
if input_data and RE_OLD_AUDIO.fullmatch(input_data):
return None
return input_data
@audio_input.setter @audio_input.setter
def audio_input(self, value: Optional[str]) -> None: def audio_input(self, value: Optional[str]) -> None:
@@ -619,17 +626,27 @@ class Addon(AddonModel):
# Start Add-on # Start Add-on
try: try:
await self.instance.run() await self.instance.run()
except DockerRequestError as err:
self.state = AddonState.ERROR
raise AddonsError() from err
except DockerError as err: except DockerError as err:
self.state = AddonState.ERROR self.state = AddonState.ERROR
raise AddonsError() from err raise AddonsError() from err
else:
self.state = AddonState.STARTED
async def stop(self) -> None: async def stop(self) -> None:
"""Stop add-on.""" """Stop add-on."""
try: try:
await self.instance.stop() await self.instance.stop()
except DockerRequestError as err:
self.state = AddonState.ERROR
raise AddonsError() from err
except DockerError as err: except DockerError as err:
self.state = AddonState.ERROR self.state = AddonState.ERROR
raise AddonsError() from err raise AddonsError() from err
else:
self.state = AddonState.STOPPED
async def restart(self) -> None: async def restart(self) -> None:
"""Restart add-on.""" """Restart add-on."""
@@ -677,18 +694,16 @@ class Addon(AddonModel):
try: try:
command_return = await self.instance.run_inside(command) command_return = await self.instance.run_inside(command)
if command_return.exit_code != 0: if command_return.exit_code != 0:
_LOGGER.debug( _LOGGER.error(
"Pre-/Post backup command failed with: %s", command_return.output "Pre-/Post backup command returned error code: %s",
) command_return.exit_code,
raise AddonsError(
f"Pre-/Post backup command returned error code: {command_return.exit_code}",
_LOGGER.error,
) )
raise AddonsError()
except DockerError as err: except DockerError as err:
raise AddonsError( _LOGGER.error(
f"Failed running pre-/post backup command {command}: {str(err)}", "Failed running pre-/post backup command %s: %s", command, err
_LOGGER.error, )
) from err raise AddonsError() from err
async def backup(self, tar_file: tarfile.TarFile) -> None: async def backup(self, tar_file: tarfile.TarFile) -> None:
"""Backup state of an add-on.""" """Backup state of an add-on."""
@@ -733,7 +748,8 @@ class Addon(AddonModel):
def _write_tarfile(): def _write_tarfile():
"""Write tar inside loop.""" """Write tar inside loop."""
with tar_file as backup: with tar_file as backup:
# Backup metadata # Backup system
backup.add(temp, arcname=".") backup.add(temp, arcname=".")
# Backup data # Backup data
@@ -800,10 +816,12 @@ class Addon(AddonModel):
try: try:
data = SCHEMA_ADDON_BACKUP(data) data = SCHEMA_ADDON_BACKUP(data)
except vol.Invalid as err: except vol.Invalid as err:
raise AddonsError( _LOGGER.error(
f"Can't validate {self.slug}, backup data: {humanize_error(data, err)}", "Can't validate %s, backup data: %s",
_LOGGER.error, self.slug,
) from err humanize_error(data, err),
)
raise AddonsError() from err
# If available # If available
if not self._available(data[ATTR_SYSTEM]): if not self._available(data[ATTR_SYSTEM]):
@@ -875,69 +893,3 @@ class Addon(AddonModel):
return await self.start() return await self.start()
_LOGGER.info("Finished restore for add-on %s", self.slug) _LOGGER.info("Finished restore for add-on %s", self.slug)
def check_trust(self) -> Awaitable[None]:
"""Calculate Addon docker content trust.
Return Coroutine.
"""
return self.instance.check_trust()
async def container_state_changed(self, event: DockerContainerStateEvent) -> None:
"""Set addon state from container state."""
if event.name != self.instance.name:
return
if event.state in [
ContainerState.RUNNING,
ContainerState.HEALTHY,
ContainerState.UNHEALTHY,
]:
self.state = AddonState.STARTED
elif event.state == ContainerState.STOPPED:
self.state = AddonState.STOPPED
elif event.state == ContainerState.FAILED:
self.state = AddonState.ERROR
async def watchdog_container(self, event: DockerContainerStateEvent) -> None:
"""Process state changes in addon container and restart if necessary."""
if not (event.name == self.instance.name and self.watchdog):
return
if event.state == ContainerState.UNHEALTHY:
while await self.instance.current_state() == event.state:
if not self.in_progress:
_LOGGER.warning(
"Watchdog found addon %s is unhealthy, restarting...", self.name
)
try:
await self.restart()
except AddonsError as err:
_LOGGER.error("Watchdog restart of addon %s failed!", self.name)
self.sys_capture_exception(err)
else:
break
await asyncio.sleep(WATCHDOG_RETRY_SECONDS)
elif event.state == ContainerState.FAILED:
# Ensure failed container is removed before attempting reanimation
with suppress(DockerError):
await self.instance.stop(remove_container=True)
while await self.instance.current_state() == event.state:
if not self.in_progress:
_LOGGER.warning(
"Watchdog found addon %s failed, restarting...", self.name
)
try:
await self.start()
except AddonsError as err:
_LOGGER.error(
"Watchdog reanimation of addon %s failed!", self.name
)
self.sys_capture_exception(err)
else:
break
await asyncio.sleep(WATCHDOG_RETRY_SECONDS)

View File

@@ -10,5 +10,3 @@ class AddonBackupMode(str, Enum):
ATTR_BACKUP = "backup" ATTR_BACKUP = "backup"
ATTR_CODENOTARY = "codenotary"
WATCHDOG_RETRY_SECONDS = 10

View File

@@ -79,7 +79,7 @@ from ..const import (
) )
from ..coresys import CoreSys, CoreSysAttributes from ..coresys import CoreSys, CoreSysAttributes
from ..docker.const import Capabilities from ..docker.const import Capabilities
from .const import ATTR_BACKUP, ATTR_CODENOTARY from .const import ATTR_BACKUP
from .options import AddonOptions, UiOptions from .options import AddonOptions, UiOptions
from .validate import RE_SERVICE, RE_VOLUME from .validate import RE_SERVICE, RE_VOLUME
@@ -503,14 +503,6 @@ class AddonModel(CoreSysAttributes, ABC):
"""Return list of supported machine.""" """Return list of supported machine."""
return self.data.get(ATTR_MACHINE, []) return self.data.get(ATTR_MACHINE, [])
@property
def arch(self) -> str:
"""Return architecture to use for the addon's image."""
if ATTR_IMAGE in self.data:
return self.sys_arch.match(self.data[ATTR_ARCH])
return self.sys_arch.default
@property @property
def image(self) -> Optional[str]: def image(self) -> Optional[str]:
"""Generate image name from data.""" """Generate image name from data."""
@@ -586,16 +578,6 @@ class AddonModel(CoreSysAttributes, ABC):
"""Return True if the add-on accesses the system journal.""" """Return True if the add-on accesses the system journal."""
return self.data[ATTR_JOURNALD] return self.data[ATTR_JOURNALD]
@property
def signed(self) -> bool:
"""Return True if the image is signed."""
return ATTR_CODENOTARY in self.data
@property
def codenotary(self) -> Optional[str]:
"""Return Signer email address for CAS."""
return self.data.get(ATTR_CODENOTARY)
def __eq__(self, other): def __eq__(self, other):
"""Compaired add-on objects.""" """Compaired add-on objects."""
if not isinstance(other, AddonModel): if not isinstance(other, AddonModel):

View File

@@ -16,10 +16,10 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
def rating_security(addon: AddonModel) -> int: def rating_security(addon: AddonModel) -> int:
"""Return 1-8 for security rating. """Return 1-6 for security rating.
1 = not secure 1 = not secure
8 = high secure 6 = high secure
""" """
rating = 5 rating = 5
@@ -35,10 +35,6 @@ def rating_security(addon: AddonModel) -> int:
elif addon.access_auth_api: elif addon.access_auth_api:
rating += 1 rating += 1
# Signed
if addon.signed:
rating += 1
# Privileged options # Privileged options
if ( if (
any( any(
@@ -74,7 +70,7 @@ def rating_security(addon: AddonModel) -> int:
if addon.access_docker_api or addon.with_full_access: if addon.access_docker_api or addon.with_full_access:
rating = 1 rating = 1
return max(min(8, rating), 1) return max(min(6, rating), 1)
async def remove_data(folder: Path) -> None: async def remove_data(folder: Path) -> None:

View File

@@ -110,7 +110,7 @@ from ..validate import (
uuid_match, uuid_match,
version_tag, version_tag,
) )
from .const import ATTR_BACKUP, ATTR_CODENOTARY from .const import ATTR_BACKUP
from .options import RE_SCHEMA_ELEMENT from .options import RE_SCHEMA_ELEMENT
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -317,7 +317,6 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
vol.Optional(ATTR_BACKUP, default=AddonBackupMode.HOT): vol.Coerce( vol.Optional(ATTR_BACKUP, default=AddonBackupMode.HOT): vol.Coerce(
AddonBackupMode AddonBackupMode
), ),
vol.Optional(ATTR_CODENOTARY): vol.Email(),
vol.Optional(ATTR_OPTIONS, default={}): dict, vol.Optional(ATTR_OPTIONS, default={}): dict,
vol.Optional(ATTR_SCHEMA, default={}): vol.Any( vol.Optional(ATTR_SCHEMA, default={}): vol.Any(
vol.Schema( vol.Schema(

View File

@@ -1,14 +1,10 @@
"""Init file for Supervisor RESTful API.""" """Init file for Supervisor RESTful API."""
import logging import logging
from pathlib import Path from pathlib import Path
from typing import Any, Optional from typing import Optional
from aiohttp import web from aiohttp import web
from supervisor.api.utils import api_process
from supervisor.const import AddonState
from supervisor.exceptions import APIAddonNotInstalled
from ..coresys import CoreSys, CoreSysAttributes from ..coresys import CoreSys, CoreSysAttributes
from .addons import APIAddons from .addons import APIAddons
from .audio import APIAudio from .audio import APIAudio
@@ -21,6 +17,7 @@ from .docker import APIDocker
from .hardware import APIHardware from .hardware import APIHardware
from .homeassistant import APIHomeAssistant from .homeassistant import APIHomeAssistant
from .host import APIHost from .host import APIHost
from .info import APIInfo
from .ingress import APIIngress from .ingress import APIIngress
from .jobs import APIJobs from .jobs import APIJobs
from .middleware.security import SecurityMiddleware from .middleware.security import SecurityMiddleware
@@ -30,7 +27,6 @@ from .observer import APIObserver
from .os import APIOS from .os import APIOS
from .proxy import APIProxy from .proxy import APIProxy
from .resolution import APIResoulution from .resolution import APIResoulution
from .root import APIRoot
from .security import APISecurity from .security import APISecurity
from .services import APIServices from .services import APIServices
from .store import APIStore from .store import APIStore
@@ -39,8 +35,7 @@ from .supervisor import APISupervisor
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
MAX_CLIENT_SIZE: int = 1024**2 * 16 MAX_CLIENT_SIZE: int = 1024 ** 2 * 16
MAX_LINE_SIZE: int = 24570
class RestAPI(CoreSysAttributes): class RestAPI(CoreSysAttributes):
@@ -56,10 +51,6 @@ class RestAPI(CoreSysAttributes):
self.security.system_validation, self.security.system_validation,
self.security.token_validation, self.security.token_validation,
], ],
handler_args={
"max_line_size": MAX_LINE_SIZE,
"max_field_size": MAX_LINE_SIZE,
},
) )
# service stuff # service stuff
@@ -79,7 +70,7 @@ class RestAPI(CoreSysAttributes):
self._register_hardware() self._register_hardware()
self._register_homeassistant() self._register_homeassistant()
self._register_host() self._register_host()
self._register_root() self._register_info()
self._register_ingress() self._register_ingress()
self._register_multicast() self._register_multicast()
self._register_network() self._register_network()
@@ -168,7 +159,6 @@ class RestAPI(CoreSysAttributes):
[ [
web.get("/security/info", api_security.info), web.get("/security/info", api_security.info),
web.post("/security/options", api_security.options), web.post("/security/options", api_security.options),
web.post("/security/integrity", api_security.integrity_check),
] ]
) )
@@ -238,21 +228,12 @@ class RestAPI(CoreSysAttributes):
] ]
) )
def _register_root(self) -> None: def _register_info(self) -> None:
"""Register root functions.""" """Register info functions."""
api_root = APIRoot() api_info = APIInfo()
api_root.coresys = self.coresys api_info.coresys = self.coresys
self.webapp.add_routes([web.get("/info", api_root.info)]) self.webapp.add_routes([web.get("/info", api_info.info)])
self.webapp.add_routes([web.post("/refresh_updates", api_root.refresh_updates)])
self.webapp.add_routes(
[web.get("/available_updates", api_root.available_updates)]
)
# Remove: 2023
self.webapp.add_routes(
[web.get("/supervisor/available_updates", api_root.available_updates)]
)
def _register_resolution(self) -> None: def _register_resolution(self) -> None:
"""Register info functions.""" """Register info functions."""
@@ -303,6 +284,10 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes( self.webapp.add_routes(
[ [
web.get(
"/supervisor/available_updates", api_supervisor.available_updates
),
web.post("/refresh_updates", api_supervisor.reload),
web.get("/supervisor/ping", api_supervisor.ping), web.get("/supervisor/ping", api_supervisor.ping),
web.get("/supervisor/info", api_supervisor.info), web.get("/supervisor/info", api_supervisor.info),
web.get("/supervisor/stats", api_supervisor.stats), web.get("/supervisor/stats", api_supervisor.stats),
@@ -332,22 +317,17 @@ class RestAPI(CoreSysAttributes):
web.post("/core/start", api_hass.start), web.post("/core/start", api_hass.start),
web.post("/core/check", api_hass.check), web.post("/core/check", api_hass.check),
web.post("/core/rebuild", api_hass.rebuild), web.post("/core/rebuild", api_hass.rebuild),
] # Remove with old Supervisor fallback
)
# Reroute from legacy
self.webapp.add_routes(
[
web.get("/homeassistant/info", api_hass.info), web.get("/homeassistant/info", api_hass.info),
web.get("/homeassistant/logs", api_hass.logs), web.get("/homeassistant/logs", api_hass.logs),
web.get("/homeassistant/stats", api_hass.stats), web.get("/homeassistant/stats", api_hass.stats),
web.post("/homeassistant/options", api_hass.options), web.post("/homeassistant/options", api_hass.options),
web.post("/homeassistant/update", api_hass.update),
web.post("/homeassistant/restart", api_hass.restart), web.post("/homeassistant/restart", api_hass.restart),
web.post("/homeassistant/stop", api_hass.stop), web.post("/homeassistant/stop", api_hass.stop),
web.post("/homeassistant/start", api_hass.start), web.post("/homeassistant/start", api_hass.start),
web.post("/homeassistant/update", api_hass.update),
web.post("/homeassistant/rebuild", api_hass.rebuild),
web.post("/homeassistant/check", api_hass.check), web.post("/homeassistant/check", api_hass.check),
web.post("/homeassistant/rebuild", api_hass.rebuild),
] ]
) )
@@ -364,12 +344,7 @@ class RestAPI(CoreSysAttributes):
web.post("/core/api/{path:.+}", api_proxy.api), web.post("/core/api/{path:.+}", api_proxy.api),
web.get("/core/api/{path:.+}", api_proxy.api), web.get("/core/api/{path:.+}", api_proxy.api),
web.get("/core/api/", api_proxy.api), web.get("/core/api/", api_proxy.api),
] # Remove with old Supervisor fallback
)
# Reroute from legacy
self.webapp.add_routes(
[
web.get("/homeassistant/api/websocket", api_proxy.websocket), web.get("/homeassistant/api/websocket", api_proxy.websocket),
web.get("/homeassistant/websocket", api_proxy.websocket), web.get("/homeassistant/websocket", api_proxy.websocket),
web.get("/homeassistant/api/stream", api_proxy.stream), web.get("/homeassistant/api/stream", api_proxy.stream),
@@ -387,6 +362,8 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes( self.webapp.add_routes(
[ [
web.get("/addons", api_addons.list), web.get("/addons", api_addons.list),
web.post("/addons/reload", api_addons.reload),
web.get("/addons/{addon}/info", api_addons.info),
web.post("/addons/{addon}/uninstall", api_addons.uninstall), web.post("/addons/{addon}/uninstall", api_addons.uninstall),
web.post("/addons/{addon}/start", api_addons.start), web.post("/addons/{addon}/start", api_addons.start),
web.post("/addons/{addon}/stop", api_addons.stop), web.post("/addons/{addon}/stop", api_addons.stop),
@@ -398,31 +375,16 @@ class RestAPI(CoreSysAttributes):
web.get("/addons/{addon}/options/config", api_addons.options_config), web.get("/addons/{addon}/options/config", api_addons.options_config),
web.post("/addons/{addon}/rebuild", api_addons.rebuild), web.post("/addons/{addon}/rebuild", api_addons.rebuild),
web.get("/addons/{addon}/logs", api_addons.logs), web.get("/addons/{addon}/logs", api_addons.logs),
web.get("/addons/{addon}/icon", api_addons.icon),
web.get("/addons/{addon}/logo", api_addons.logo),
web.get("/addons/{addon}/changelog", api_addons.changelog),
web.get("/addons/{addon}/documentation", api_addons.documentation),
web.post("/addons/{addon}/stdin", api_addons.stdin), web.post("/addons/{addon}/stdin", api_addons.stdin),
web.post("/addons/{addon}/security", api_addons.security), web.post("/addons/{addon}/security", api_addons.security),
web.get("/addons/{addon}/stats", api_addons.stats), web.get("/addons/{addon}/stats", api_addons.stats),
] ]
) )
# Legacy routing to support requests for not installed addons
api_store = APIStore()
api_store.coresys = self.coresys
@api_process
async def addons_addon_info(request: web.Request) -> dict[str, Any]:
"""Route to store if info requested for not installed addon."""
try:
return await api_addons.info(request)
except APIAddonNotInstalled:
# Route to store/{addon}/info but add missing fields
return dict(
await api_store.addons_addon_info_wrapped(request),
state=AddonState.UNKNOWN,
options=self.sys_addons.store[request.match_info["addon"]].options,
)
self.webapp.add_routes([web.get("/addons/{addon}/info", addons_addon_info)])
def _register_ingress(self) -> None: def _register_ingress(self) -> None:
"""Register Ingress functions.""" """Register Ingress functions."""
api_ingress = APIIngress() api_ingress = APIIngress()
@@ -444,6 +406,21 @@ class RestAPI(CoreSysAttributes):
self.webapp.add_routes( self.webapp.add_routes(
[ [
web.get("/snapshots", api_backups.list),
web.post("/snapshots/reload", api_backups.reload),
web.post("/snapshots/new/full", api_backups.backup_full),
web.post("/snapshots/new/partial", api_backups.backup_partial),
web.post("/snapshots/new/upload", api_backups.upload),
web.get("/snapshots/{slug}/info", api_backups.info),
web.delete("/snapshots/{slug}", api_backups.remove),
web.post("/snapshots/{slug}/restore/full", api_backups.restore_full),
web.post(
"/snapshots/{slug}/restore/partial",
api_backups.restore_partial,
),
web.get("/snapshots/{slug}/download", api_backups.download),
web.post("/snapshots/{slug}/remove", api_backups.remove),
# June 2021: /snapshots was renamed to /backups
web.get("/backups", api_backups.list), web.get("/backups", api_backups.list),
web.post("/backups/reload", api_backups.reload), web.post("/backups/reload", api_backups.reload),
web.post("/backups/new/full", api_backups.backup_full), web.post("/backups/new/full", api_backups.backup_full),
@@ -538,15 +515,6 @@ class RestAPI(CoreSysAttributes):
web.get("/store/addons", api_store.addons_list), web.get("/store/addons", api_store.addons_list),
web.get("/store/addons/{addon}", api_store.addons_addon_info), web.get("/store/addons/{addon}", api_store.addons_addon_info),
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info), web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon),
web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo),
web.get(
"/store/addons/{addon}/changelog", api_store.addons_addon_changelog
),
web.get(
"/store/addons/{addon}/documentation",
api_store.addons_addon_documentation,
),
web.post( web.post(
"/store/addons/{addon}/install", api_store.addons_addon_install "/store/addons/{addon}/install", api_store.addons_addon_install
), ),
@@ -565,26 +533,14 @@ class RestAPI(CoreSysAttributes):
"/store/repositories/{repository}", "/store/repositories/{repository}",
api_store.repositories_repository_info, api_store.repositories_repository_info,
), ),
web.post("/store/repositories", api_store.add_repository),
web.delete(
"/store/repositories/{repository}", api_store.remove_repository
),
] ]
) )
# Reroute from legacy # Reroute from legacy
self.webapp.add_routes( self.webapp.add_routes(
[ [
web.post("/addons/reload", api_store.reload),
web.post("/addons/{addon}/install", api_store.addons_addon_install), web.post("/addons/{addon}/install", api_store.addons_addon_install),
web.post("/addons/{addon}/update", api_store.addons_addon_update), web.post("/addons/{addon}/update", api_store.addons_addon_update),
web.get("/addons/{addon}/icon", api_store.addons_addon_icon),
web.get("/addons/{addon}/logo", api_store.addons_addon_logo),
web.get("/addons/{addon}/changelog", api_store.addons_addon_changelog),
web.get(
"/addons/{addon}/documentation",
api_store.addons_addon_documentation,
),
] ]
) )

View File

@@ -52,11 +52,13 @@ from ..const import (
ATTR_INGRESS_PANEL, ATTR_INGRESS_PANEL,
ATTR_INGRESS_PORT, ATTR_INGRESS_PORT,
ATTR_INGRESS_URL, ATTR_INGRESS_URL,
ATTR_INSTALLED,
ATTR_IP_ADDRESS, ATTR_IP_ADDRESS,
ATTR_KERNEL_MODULES, ATTR_KERNEL_MODULES,
ATTR_LOGO, ATTR_LOGO,
ATTR_LONG_DESCRIPTION, ATTR_LONG_DESCRIPTION,
ATTR_MACHINE, ATTR_MACHINE,
ATTR_MAINTAINER,
ATTR_MEMORY_LIMIT, ATTR_MEMORY_LIMIT,
ATTR_MEMORY_PERCENT, ATTR_MEMORY_PERCENT,
ATTR_MEMORY_USAGE, ATTR_MEMORY_USAGE,
@@ -71,10 +73,12 @@ from ..const import (
ATTR_PROTECTED, ATTR_PROTECTED,
ATTR_PWNED, ATTR_PWNED,
ATTR_RATING, ATTR_RATING,
ATTR_REPOSITORIES,
ATTR_REPOSITORY, ATTR_REPOSITORY,
ATTR_SCHEMA, ATTR_SCHEMA,
ATTR_SERVICES, ATTR_SERVICES,
ATTR_SLUG, ATTR_SLUG,
ATTR_SOURCE,
ATTR_STAGE, ATTR_STAGE,
ATTR_STARTUP, ATTR_STARTUP,
ATTR_STATE, ATTR_STATE,
@@ -91,20 +95,17 @@ from ..const import (
ATTR_VIDEO, ATTR_VIDEO,
ATTR_WATCHDOG, ATTR_WATCHDOG,
ATTR_WEBUI, ATTR_WEBUI,
CONTENT_TYPE_BINARY,
CONTENT_TYPE_PNG,
CONTENT_TYPE_TEXT,
REQUEST_FROM, REQUEST_FROM,
AddonBoot, AddonBoot,
AddonState,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..docker.stats import DockerStats from ..docker.stats import DockerStats
from ..exceptions import ( from ..exceptions import APIError, APIForbidden, PwnedError, PwnedSecret
APIAddonNotInstalled,
APIError,
APIForbidden,
PwnedError,
PwnedSecret,
)
from ..validate import docker_ports from ..validate import docker_ports
from .const import ATTR_SIGNED, CONTENT_TYPE_BINARY
from .utils import api_process, api_process_raw, api_validate, json_loads from .utils import api_process, api_process_raw, api_validate, json_loads
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -131,7 +132,7 @@ SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
class APIAddons(CoreSysAttributes): class APIAddons(CoreSysAttributes):
"""Handle RESTful API for add-on functions.""" """Handle RESTful API for add-on functions."""
def _extract_addon(self, request: web.Request) -> Addon: def _extract_addon(self, request: web.Request) -> AnyAddon:
"""Return addon, throw an exception it it doesn't exist.""" """Return addon, throw an exception it it doesn't exist."""
addon_slug: str = request.match_info.get("addon") addon_slug: str = request.match_info.get("addon")
@@ -145,11 +146,15 @@ class APIAddons(CoreSysAttributes):
addon = self.sys_addons.get(addon_slug) addon = self.sys_addons.get(addon_slug)
if not addon: if not addon:
raise APIError(f"Addon {addon_slug} does not exist") raise APIError(f"Addon {addon_slug} does not exist")
if not isinstance(addon, Addon) or not addon.is_installed:
raise APIAddonNotInstalled("Addon is not installed")
return addon return addon
def _extract_addon_installed(self, request: web.Request) -> Addon:
addon = self._extract_addon(request)
if not isinstance(addon, Addon) or not addon.is_installed:
raise APIError("Addon is not installed")
return addon
@api_process @api_process
async def list(self, request: web.Request) -> dict[str, Any]: async def list(self, request: web.Request) -> dict[str, Any]:
"""Return all add-ons or repositories.""" """Return all add-ons or repositories."""
@@ -160,29 +165,42 @@ class APIAddons(CoreSysAttributes):
ATTR_DESCRIPTON: addon.description, ATTR_DESCRIPTON: addon.description,
ATTR_ADVANCED: addon.advanced, ATTR_ADVANCED: addon.advanced,
ATTR_STAGE: addon.stage, ATTR_STAGE: addon.stage,
ATTR_VERSION: addon.version, ATTR_VERSION: addon.version if addon.is_installed else None,
ATTR_VERSION_LATEST: addon.latest_version, ATTR_VERSION_LATEST: addon.latest_version,
ATTR_UPDATE_AVAILABLE: addon.need_update, ATTR_UPDATE_AVAILABLE: addon.need_update
if addon.is_installed
else False,
ATTR_INSTALLED: addon.is_installed,
ATTR_AVAILABLE: addon.available, ATTR_AVAILABLE: addon.available,
ATTR_DETACHED: addon.is_detached, ATTR_DETACHED: addon.is_detached,
ATTR_HOMEASSISTANT: addon.homeassistant_version, ATTR_HOMEASSISTANT: addon.homeassistant_version,
ATTR_STATE: addon.state,
ATTR_REPOSITORY: addon.repository, ATTR_REPOSITORY: addon.repository,
ATTR_BUILD: addon.need_build, ATTR_BUILD: addon.need_build,
ATTR_URL: addon.url, ATTR_URL: addon.url,
ATTR_ICON: addon.with_icon, ATTR_ICON: addon.with_icon,
ATTR_LOGO: addon.with_logo, ATTR_LOGO: addon.with_logo,
} }
for addon in self.sys_addons.installed for addon in self.sys_addons.all
] ]
return {ATTR_ADDONS: data_addons} data_repositories = [
{
ATTR_SLUG: repository.slug,
ATTR_NAME: repository.name,
ATTR_SOURCE: repository.source,
ATTR_URL: repository.url,
ATTR_MAINTAINER: repository.maintainer,
}
for repository in self.sys_store.all
]
return {ATTR_ADDONS: data_addons, ATTR_REPOSITORIES: data_repositories}
@api_process @api_process
async def reload(self, request: web.Request) -> None: async def reload(self, request: web.Request) -> None:
"""Reload all add-on data from store.""" """Reload all add-on data from store."""
await asyncio.shield(self.sys_store.reload()) await asyncio.shield(self.sys_store.reload())
@api_process
async def info(self, request: web.Request) -> dict[str, Any]: async def info(self, request: web.Request) -> dict[str, Any]:
"""Return add-on information.""" """Return add-on information."""
addon: AnyAddon = self._extract_addon(request) addon: AnyAddon = self._extract_addon(request)
@@ -196,8 +214,11 @@ class APIAddons(CoreSysAttributes):
ATTR_LONG_DESCRIPTION: addon.long_description, ATTR_LONG_DESCRIPTION: addon.long_description,
ATTR_ADVANCED: addon.advanced, ATTR_ADVANCED: addon.advanced,
ATTR_STAGE: addon.stage, ATTR_STAGE: addon.stage,
ATTR_AUTO_UPDATE: None,
ATTR_REPOSITORY: addon.repository, ATTR_REPOSITORY: addon.repository,
ATTR_VERSION: None,
ATTR_VERSION_LATEST: addon.latest_version, ATTR_VERSION_LATEST: addon.latest_version,
ATTR_UPDATE_AVAILABLE: False,
ATTR_PROTECTED: addon.protected, ATTR_PROTECTED: addon.protected,
ATTR_RATING: rating_security(addon), ATTR_RATING: rating_security(addon),
ATTR_BOOT: addon.boot, ATTR_BOOT: addon.boot,
@@ -207,6 +228,7 @@ class APIAddons(CoreSysAttributes):
ATTR_MACHINE: addon.supported_machine, ATTR_MACHINE: addon.supported_machine,
ATTR_HOMEASSISTANT: addon.homeassistant_version, ATTR_HOMEASSISTANT: addon.homeassistant_version,
ATTR_URL: addon.url, ATTR_URL: addon.url,
ATTR_STATE: AddonState.UNKNOWN,
ATTR_DETACHED: addon.is_detached, ATTR_DETACHED: addon.is_detached,
ATTR_AVAILABLE: addon.available, ATTR_AVAILABLE: addon.available,
ATTR_BUILD: addon.need_build, ATTR_BUILD: addon.need_build,
@@ -219,11 +241,13 @@ class APIAddons(CoreSysAttributes):
ATTR_PRIVILEGED: addon.privileged, ATTR_PRIVILEGED: addon.privileged,
ATTR_FULL_ACCESS: addon.with_full_access, ATTR_FULL_ACCESS: addon.with_full_access,
ATTR_APPARMOR: addon.apparmor, ATTR_APPARMOR: addon.apparmor,
ATTR_DEVICES: addon.static_devices,
ATTR_ICON: addon.with_icon, ATTR_ICON: addon.with_icon,
ATTR_LOGO: addon.with_logo, ATTR_LOGO: addon.with_logo,
ATTR_CHANGELOG: addon.with_changelog, ATTR_CHANGELOG: addon.with_changelog,
ATTR_DOCUMENTATION: addon.with_documentation, ATTR_DOCUMENTATION: addon.with_documentation,
ATTR_STDIN: addon.with_stdin, ATTR_STDIN: addon.with_stdin,
ATTR_WEBUI: None,
ATTR_HASSIO_API: addon.access_hassio_api, ATTR_HASSIO_API: addon.access_hassio_api,
ATTR_HASSIO_ROLE: addon.hassio_role, ATTR_HASSIO_ROLE: addon.hassio_role,
ATTR_AUTH_API: addon.access_auth_api, ATTR_AUTH_API: addon.access_auth_api,
@@ -237,35 +261,48 @@ class APIAddons(CoreSysAttributes):
ATTR_DOCKER_API: addon.access_docker_api, ATTR_DOCKER_API: addon.access_docker_api,
ATTR_VIDEO: addon.with_video, ATTR_VIDEO: addon.with_video,
ATTR_AUDIO: addon.with_audio, ATTR_AUDIO: addon.with_audio,
ATTR_AUDIO_INPUT: None,
ATTR_AUDIO_OUTPUT: None,
ATTR_STARTUP: addon.startup, ATTR_STARTUP: addon.startup,
ATTR_SERVICES: _pretty_services(addon), ATTR_SERVICES: _pretty_services(addon),
ATTR_DISCOVERY: addon.discovery, ATTR_DISCOVERY: addon.discovery,
ATTR_IP_ADDRESS: None,
ATTR_TRANSLATIONS: addon.translations, ATTR_TRANSLATIONS: addon.translations,
ATTR_INGRESS: addon.with_ingress, ATTR_INGRESS: addon.with_ingress,
ATTR_SIGNED: addon.signed, ATTR_INGRESS_ENTRY: None,
ATTR_STATE: addon.state, ATTR_INGRESS_URL: None,
ATTR_WEBUI: addon.webui, ATTR_INGRESS_PORT: None,
ATTR_INGRESS_ENTRY: addon.ingress_entry, ATTR_INGRESS_PANEL: None,
ATTR_INGRESS_URL: addon.ingress_url, ATTR_WATCHDOG: None,
ATTR_INGRESS_PORT: addon.ingress_port,
ATTR_INGRESS_PANEL: addon.ingress_panel,
ATTR_AUDIO_INPUT: addon.audio_input,
ATTR_AUDIO_OUTPUT: addon.audio_output,
ATTR_AUTO_UPDATE: addon.auto_update,
ATTR_IP_ADDRESS: str(addon.ip_address),
ATTR_VERSION: addon.version,
ATTR_UPDATE_AVAILABLE: addon.need_update,
ATTR_WATCHDOG: addon.watchdog,
ATTR_DEVICES: addon.static_devices
+ [device.path for device in addon.devices],
} }
if isinstance(addon, Addon) and addon.is_installed:
data.update(
{
ATTR_STATE: addon.state,
ATTR_WEBUI: addon.webui,
ATTR_INGRESS_ENTRY: addon.ingress_entry,
ATTR_INGRESS_URL: addon.ingress_url,
ATTR_INGRESS_PORT: addon.ingress_port,
ATTR_INGRESS_PANEL: addon.ingress_panel,
ATTR_AUDIO_INPUT: addon.audio_input,
ATTR_AUDIO_OUTPUT: addon.audio_output,
ATTR_AUTO_UPDATE: addon.auto_update,
ATTR_IP_ADDRESS: str(addon.ip_address),
ATTR_VERSION: addon.version,
ATTR_UPDATE_AVAILABLE: addon.need_update,
ATTR_WATCHDOG: addon.watchdog,
ATTR_DEVICES: addon.static_devices
+ [device.path for device in addon.devices],
}
)
return data return data
@api_process @api_process
async def options(self, request: web.Request) -> None: async def options(self, request: web.Request) -> None:
"""Store user options for add-on.""" """Store user options for add-on."""
addon = self._extract_addon(request) addon = self._extract_addon_installed(request)
# Update secrets for validation # Update secrets for validation
await self.sys_homeassistant.secrets.reload() await self.sys_homeassistant.secrets.reload()
@@ -300,7 +337,7 @@ class APIAddons(CoreSysAttributes):
@api_process @api_process
async def options_validate(self, request: web.Request) -> None: async def options_validate(self, request: web.Request) -> None:
"""Validate user options for add-on.""" """Validate user options for add-on."""
addon = self._extract_addon(request) addon = self._extract_addon_installed(request)
data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False} data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False}
options = await request.json(loads=json_loads) or addon.options options = await request.json(loads=json_loads) or addon.options
@@ -342,7 +379,7 @@ class APIAddons(CoreSysAttributes):
slug: str = request.match_info.get("addon") slug: str = request.match_info.get("addon")
if slug != "self": if slug != "self":
raise APIForbidden("This can be only read by the Add-on itself!") raise APIForbidden("This can be only read by the Add-on itself!")
addon = self._extract_addon(request) addon = self._extract_addon_installed(request)
# Lookup/reload secrets # Lookup/reload secrets
await self.sys_homeassistant.secrets.reload() await self.sys_homeassistant.secrets.reload()
@@ -354,7 +391,7 @@ class APIAddons(CoreSysAttributes):
@api_process @api_process
async def security(self, request: web.Request) -> None: async def security(self, request: web.Request) -> None:
"""Store security options for add-on.""" """Store security options for add-on."""
addon = self._extract_addon(request) addon = self._extract_addon_installed(request)
body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request) body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
if ATTR_PROTECTED in body: if ATTR_PROTECTED in body:
@@ -366,7 +403,7 @@ class APIAddons(CoreSysAttributes):
@api_process @api_process
async def stats(self, request: web.Request) -> dict[str, Any]: async def stats(self, request: web.Request) -> dict[str, Any]:
"""Return resource information.""" """Return resource information."""
addon = self._extract_addon(request) addon = self._extract_addon_installed(request)
stats: DockerStats = await addon.stats() stats: DockerStats = await addon.stats()
@@ -384,43 +421,83 @@ class APIAddons(CoreSysAttributes):
@api_process @api_process
def uninstall(self, request: web.Request) -> Awaitable[None]: def uninstall(self, request: web.Request) -> Awaitable[None]:
"""Uninstall add-on.""" """Uninstall add-on."""
addon = self._extract_addon(request) addon = self._extract_addon_installed(request)
return asyncio.shield(addon.uninstall()) return asyncio.shield(addon.uninstall())
@api_process @api_process
def start(self, request: web.Request) -> Awaitable[None]: def start(self, request: web.Request) -> Awaitable[None]:
"""Start add-on.""" """Start add-on."""
addon = self._extract_addon(request) addon = self._extract_addon_installed(request)
return asyncio.shield(addon.start()) return asyncio.shield(addon.start())
@api_process @api_process
def stop(self, request: web.Request) -> Awaitable[None]: def stop(self, request: web.Request) -> Awaitable[None]:
"""Stop add-on.""" """Stop add-on."""
addon = self._extract_addon(request) addon = self._extract_addon_installed(request)
return asyncio.shield(addon.stop()) return asyncio.shield(addon.stop())
@api_process @api_process
def restart(self, request: web.Request) -> Awaitable[None]: def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart add-on.""" """Restart add-on."""
addon: Addon = self._extract_addon(request) addon: Addon = self._extract_addon_installed(request)
return asyncio.shield(addon.restart()) return asyncio.shield(addon.restart())
@api_process @api_process
def rebuild(self, request: web.Request) -> Awaitable[None]: def rebuild(self, request: web.Request) -> Awaitable[None]:
"""Rebuild local build add-on.""" """Rebuild local build add-on."""
addon = self._extract_addon(request) addon = self._extract_addon_installed(request)
return asyncio.shield(addon.rebuild()) return asyncio.shield(addon.rebuild())
@api_process_raw(CONTENT_TYPE_BINARY) @api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]: def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return logs from add-on.""" """Return logs from add-on."""
addon = self._extract_addon(request) addon = self._extract_addon_installed(request)
return addon.logs() return addon.logs()
@api_process_raw(CONTENT_TYPE_PNG)
async def icon(self, request: web.Request) -> bytes:
"""Return icon from add-on."""
addon = self._extract_addon(request)
if not addon.with_icon:
raise APIError(f"No icon found for add-on {addon.slug}!")
with addon.path_icon.open("rb") as png:
return png.read()
@api_process_raw(CONTENT_TYPE_PNG)
async def logo(self, request: web.Request) -> bytes:
"""Return logo from add-on."""
addon = self._extract_addon(request)
if not addon.with_logo:
raise APIError(f"No logo found for add-on {addon.slug}!")
with addon.path_logo.open("rb") as png:
return png.read()
@api_process_raw(CONTENT_TYPE_TEXT)
async def changelog(self, request: web.Request) -> str:
"""Return changelog from add-on."""
addon = self._extract_addon(request)
if not addon.with_changelog:
raise APIError(f"No changelog found for add-on {addon.slug}!")
with addon.path_changelog.open("r") as changelog:
return changelog.read()
@api_process_raw(CONTENT_TYPE_TEXT)
async def documentation(self, request: web.Request) -> str:
"""Return documentation from add-on."""
addon = self._extract_addon(request)
if not addon.with_documentation:
raise APIError(f"No documentation found for add-on {addon.slug}!")
with addon.path_documentation.open("r") as documentation:
return documentation.read()
@api_process @api_process
async def stdin(self, request: web.Request) -> None: async def stdin(self, request: web.Request) -> None:
"""Write to stdin of add-on.""" """Write to stdin of add-on."""
addon = self._extract_addon(request) addon = self._extract_addon_installed(request)
if not addon.with_stdin: if not addon.with_stdin:
raise APIError(f"STDIN not supported the {addon.slug} add-on") raise APIError(f"STDIN not supported the {addon.slug} add-on")
@@ -428,6 +505,6 @@ class APIAddons(CoreSysAttributes):
await asyncio.shield(addon.write_stdin(data)) await asyncio.shield(addon.write_stdin(data))
def _pretty_services(addon: Addon) -> list[str]: def _pretty_services(addon: AnyAddon) -> list[str]:
"""Return a simplified services role list.""" """Return a simplified services role list."""
return [f"{name}:{access}" for name, access in addon.services_role.items()] return [f"{name}:{access}" for name, access in addon.services_role.items()]

View File

@@ -29,12 +29,12 @@ from ..const import (
ATTR_VERSION, ATTR_VERSION,
ATTR_VERSION_LATEST, ATTR_VERSION_LATEST,
ATTR_VOLUME, ATTR_VOLUME,
CONTENT_TYPE_BINARY,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError from ..exceptions import APIError
from ..host.sound import StreamType from ..host.sound import StreamType
from ..validate import version_tag from ..validate import version_tag
from .const import CONTENT_TYPE_BINARY
from .utils import api_process, api_process_raw, api_validate from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)

View File

@@ -8,10 +8,15 @@ from aiohttp.web_exceptions import HTTPUnauthorized
import voluptuous as vol import voluptuous as vol
from ..addons.addon import Addon from ..addons.addon import Addon
from ..const import ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM from ..const import (
ATTR_PASSWORD,
ATTR_USERNAME,
CONTENT_TYPE_JSON,
CONTENT_TYPE_URL,
REQUEST_FROM,
)
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIForbidden from ..exceptions import APIForbidden
from .const import CONTENT_TYPE_JSON, CONTENT_TYPE_URL
from .utils import api_process, api_validate from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)

View File

@@ -9,11 +9,10 @@ from aiohttp import web
from aiohttp.hdrs import CONTENT_DISPOSITION from aiohttp.hdrs import CONTENT_DISPOSITION
import voluptuous as vol import voluptuous as vol
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT from ..backups.validate import ALL_FOLDERS
from ..const import ( from ..const import (
ATTR_ADDONS, ATTR_ADDONS,
ATTR_BACKUPS, ATTR_BACKUPS,
ATTR_COMPRESSED,
ATTR_CONTENT, ATTR_CONTENT,
ATTR_DATE, ATTR_DATE,
ATTR_FOLDERS, ATTR_FOLDERS,
@@ -26,27 +25,23 @@ from ..const import (
ATTR_SLUG, ATTR_SLUG,
ATTR_TYPE, ATTR_TYPE,
ATTR_VERSION, ATTR_VERSION,
CONTENT_TYPE_TAR,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError from ..exceptions import APIError
from .const import CONTENT_TYPE_TAR
from .utils import api_process, api_validate from .utils import api_process, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+") RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+")
# Backwards compatible
# Remove: 2022.08
_ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT]
# pylint: disable=no-value-for-parameter # pylint: disable=no-value-for-parameter
SCHEMA_RESTORE_PARTIAL = vol.Schema( SCHEMA_RESTORE_PARTIAL = vol.Schema(
{ {
vol.Optional(ATTR_PASSWORD): vol.Maybe(str), vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(), vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()), vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()), vol.Optional(ATTR_FOLDERS): vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
} }
) )
@@ -56,14 +51,13 @@ SCHEMA_BACKUP_FULL = vol.Schema(
{ {
vol.Optional(ATTR_NAME): str, vol.Optional(ATTR_NAME): str,
vol.Optional(ATTR_PASSWORD): vol.Maybe(str), vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
} }
) )
SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend( SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
{ {
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()), vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()), vol.Optional(ATTR_FOLDERS): vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(), vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
} }
) )
@@ -92,7 +86,6 @@ class APIBackups(CoreSysAttributes):
ATTR_TYPE: backup.sys_type, ATTR_TYPE: backup.sys_type,
ATTR_SIZE: backup.size, ATTR_SIZE: backup.size,
ATTR_PROTECTED: backup.protected, ATTR_PROTECTED: backup.protected,
ATTR_COMPRESSED: backup.compressed,
ATTR_CONTENT: { ATTR_CONTENT: {
ATTR_HOMEASSISTANT: backup.homeassistant_version is not None, ATTR_HOMEASSISTANT: backup.homeassistant_version is not None,
ATTR_ADDONS: backup.addon_list, ATTR_ADDONS: backup.addon_list,
@@ -135,7 +128,6 @@ class APIBackups(CoreSysAttributes):
ATTR_NAME: backup.name, ATTR_NAME: backup.name,
ATTR_DATE: backup.date, ATTR_DATE: backup.date,
ATTR_SIZE: backup.size, ATTR_SIZE: backup.size,
ATTR_COMPRESSED: backup.compressed,
ATTR_PROTECTED: backup.protected, ATTR_PROTECTED: backup.protected,
ATTR_HOMEASSISTANT: backup.homeassistant_version, ATTR_HOMEASSISTANT: backup.homeassistant_version,
ATTR_ADDONS: data_addons, ATTR_ADDONS: data_addons,

View File

@@ -1,39 +1,15 @@
"""Const for API.""" """Const for API."""
CONTENT_TYPE_BINARY = "application/octet-stream"
CONTENT_TYPE_JSON = "application/json"
CONTENT_TYPE_PNG = "image/png"
CONTENT_TYPE_TAR = "application/tar"
CONTENT_TYPE_TEXT = "text/plain"
CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
COOKIE_INGRESS = "ingress_session"
HEADER_TOKEN_OLD = "X-Hassio-Key"
HEADER_TOKEN = "X-Supervisor-Token"
ATTR_APPARMOR_VERSION = "apparmor_version"
ATTR_AGENT_VERSION = "agent_version" ATTR_AGENT_VERSION = "agent_version"
ATTR_AVAILABLE_UPDATES = "available_updates"
ATTR_BOOT_TIMESTAMP = "boot_timestamp" ATTR_BOOT_TIMESTAMP = "boot_timestamp"
ATTR_BROADCAST_LLMNR = "broadcast_llmnr"
ATTR_BROADCAST_MDNS = "broadcast_mdns"
ATTR_DATA_DISK = "data_disk" ATTR_DATA_DISK = "data_disk"
ATTR_DEVICE = "device" ATTR_DEVICE = "device"
ATTR_DT_SYNCHRONIZED = "dt_synchronized" ATTR_DT_SYNCHRONIZED = "dt_synchronized"
ATTR_DT_UTC = "dt_utc" ATTR_DT_UTC = "dt_utc"
ATTR_FALLBACK = "fallback"
ATTR_LLMNR = "llmnr"
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
ATTR_MDNS = "mdns"
ATTR_PANEL_PATH = "panel_path"
ATTR_SIGNED = "signed"
ATTR_STARTUP_TIME = "startup_time" ATTR_STARTUP_TIME = "startup_time"
ATTR_UPDATE_TYPE = "update_type"
ATTR_USE_NTP = "use_ntp" ATTR_USE_NTP = "use_ntp"
ATTR_BY_ID = "by_id" ATTR_USE_RTC = "use_rtc"
ATTR_SUBSYSTEM = "subsystem" ATTR_APPARMOR_VERSION = "apparmor_version"
ATTR_SYSFS = "sysfs" ATTR_PANEL_PATH = "panel_path"
ATTR_DEV_PATH = "dev_path" ATTR_UPDATE_TYPE = "update_type"
ATTR_ATTRIBUTES = "attributes" ATTR_AVAILABLE_UPDATES = "available_updates"
ATTR_CHILDREN = "children"

View File

@@ -21,22 +21,17 @@ from ..const import (
ATTR_UPDATE_AVAILABLE, ATTR_UPDATE_AVAILABLE,
ATTR_VERSION, ATTR_VERSION,
ATTR_VERSION_LATEST, ATTR_VERSION_LATEST,
CONTENT_TYPE_BINARY,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError from ..exceptions import APIError
from ..validate import dns_server_list, version_tag from ..validate import dns_server_list, version_tag
from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS, CONTENT_TYPE_BINARY
from .utils import api_process, api_process_raw, api_validate from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter # pylint: disable=no-value-for-parameter
SCHEMA_OPTIONS = vol.Schema( SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_SERVERS): dns_server_list})
{
vol.Optional(ATTR_SERVERS): dns_server_list,
vol.Optional(ATTR_FALLBACK): vol.Boolean(),
}
)
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag}) SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
@@ -54,26 +49,15 @@ class APICoreDNS(CoreSysAttributes):
ATTR_HOST: str(self.sys_docker.network.dns), ATTR_HOST: str(self.sys_docker.network.dns),
ATTR_SERVERS: self.sys_plugins.dns.servers, ATTR_SERVERS: self.sys_plugins.dns.servers,
ATTR_LOCALS: self.sys_plugins.dns.locals, ATTR_LOCALS: self.sys_plugins.dns.locals,
ATTR_MDNS: self.sys_plugins.dns.mdns,
ATTR_LLMNR: self.sys_plugins.dns.llmnr,
ATTR_FALLBACK: self.sys_plugins.dns.fallback,
} }
@api_process @api_process
async def options(self, request: web.Request) -> None: async def options(self, request: web.Request) -> None:
"""Set DNS options.""" """Set DNS options."""
body = await api_validate(SCHEMA_OPTIONS, request) body = await api_validate(SCHEMA_OPTIONS, request)
restart_required = False
if ATTR_SERVERS in body: if ATTR_SERVERS in body:
self.sys_plugins.dns.servers = body[ATTR_SERVERS] self.sys_plugins.dns.servers = body[ATTR_SERVERS]
restart_required = True
if ATTR_FALLBACK in body:
self.sys_plugins.dns.fallback = body[ATTR_FALLBACK]
restart_required = True
if restart_required:
self.sys_create_task(self.sys_plugins.dns.restart()) self.sys_create_task(self.sys_plugins.dns.restart())
self.sys_plugins.dns.save_data() self.sys_plugins.dns.save_data()

View File

@@ -6,15 +6,14 @@ from aiohttp import web
from ..const import ATTR_AUDIO, ATTR_DEVICES, ATTR_INPUT, ATTR_NAME, ATTR_OUTPUT from ..const import ATTR_AUDIO, ATTR_DEVICES, ATTR_INPUT, ATTR_NAME, ATTR_OUTPUT
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..hardware.data import Device from ..hardware.const import (
from .const import (
ATTR_ATTRIBUTES, ATTR_ATTRIBUTES,
ATTR_BY_ID, ATTR_BY_ID,
ATTR_CHILDREN,
ATTR_DEV_PATH, ATTR_DEV_PATH,
ATTR_SUBSYSTEM, ATTR_SUBSYSTEM,
ATTR_SYSFS, ATTR_SYSFS,
) )
from ..hardware.data import Device
from .utils import api_process from .utils import api_process
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -29,7 +28,6 @@ def device_struct(device: Device) -> dict[str, Any]:
ATTR_SUBSYSTEM: device.subsystem, ATTR_SUBSYSTEM: device.subsystem,
ATTR_BY_ID: device.by_id, ATTR_BY_ID: device.by_id,
ATTR_ATTRIBUTES: device.attributes, ATTR_ATTRIBUTES: device.attributes,
ATTR_CHILDREN: device.children,
} }

View File

@@ -29,12 +29,13 @@ from ..const import (
ATTR_UPDATE_AVAILABLE, ATTR_UPDATE_AVAILABLE,
ATTR_VERSION, ATTR_VERSION,
ATTR_VERSION_LATEST, ATTR_VERSION_LATEST,
ATTR_WAIT_BOOT,
ATTR_WATCHDOG, ATTR_WATCHDOG,
CONTENT_TYPE_BINARY,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError from ..exceptions import APIError
from ..validate import docker_image, network_port, version_tag from ..validate import docker_image, network_port, version_tag
from .const import CONTENT_TYPE_BINARY
from .utils import api_process, api_process_raw, api_validate from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@@ -47,6 +48,7 @@ SCHEMA_OPTIONS = vol.Schema(
vol.Optional(ATTR_PORT): network_port, vol.Optional(ATTR_PORT): network_port,
vol.Optional(ATTR_SSL): vol.Boolean(), vol.Optional(ATTR_SSL): vol.Boolean(),
vol.Optional(ATTR_WATCHDOG): vol.Boolean(), vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
vol.Optional(ATTR_WAIT_BOOT): vol.All(vol.Coerce(int), vol.Range(min=60)),
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(str), vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(str),
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str), vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str),
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str), vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str),
@@ -79,8 +81,11 @@ class APIHomeAssistant(CoreSysAttributes):
ATTR_PORT: self.sys_homeassistant.api_port, ATTR_PORT: self.sys_homeassistant.api_port,
ATTR_SSL: self.sys_homeassistant.api_ssl, ATTR_SSL: self.sys_homeassistant.api_ssl,
ATTR_WATCHDOG: self.sys_homeassistant.watchdog, ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
ATTR_WAIT_BOOT: self.sys_homeassistant.wait_boot,
ATTR_AUDIO_INPUT: self.sys_homeassistant.audio_input, ATTR_AUDIO_INPUT: self.sys_homeassistant.audio_input,
ATTR_AUDIO_OUTPUT: self.sys_homeassistant.audio_output, ATTR_AUDIO_OUTPUT: self.sys_homeassistant.audio_output,
# Remove end of Q3 2020
"last_version": self.sys_homeassistant.latest_version,
} }
@api_process @api_process
@@ -103,6 +108,9 @@ class APIHomeAssistant(CoreSysAttributes):
if ATTR_WATCHDOG in body: if ATTR_WATCHDOG in body:
self.sys_homeassistant.watchdog = body[ATTR_WATCHDOG] self.sys_homeassistant.watchdog = body[ATTR_WATCHDOG]
if ATTR_WAIT_BOOT in body:
self.sys_homeassistant.wait_boot = body[ATTR_WAIT_BOOT]
if ATTR_REFRESH_TOKEN in body: if ATTR_REFRESH_TOKEN in body:
self.sys_homeassistant.refresh_token = body[ATTR_REFRESH_TOKEN] self.sys_homeassistant.refresh_token = body[ATTR_REFRESH_TOKEN]

View File

@@ -22,20 +22,18 @@ from ..const import (
ATTR_SERVICES, ATTR_SERVICES,
ATTR_STATE, ATTR_STATE,
ATTR_TIMEZONE, ATTR_TIMEZONE,
CONTENT_TYPE_BINARY,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from .const import ( from .const import (
ATTR_AGENT_VERSION, ATTR_AGENT_VERSION,
ATTR_APPARMOR_VERSION, ATTR_APPARMOR_VERSION,
ATTR_BOOT_TIMESTAMP, ATTR_BOOT_TIMESTAMP,
ATTR_BROADCAST_LLMNR,
ATTR_BROADCAST_MDNS,
ATTR_DT_SYNCHRONIZED, ATTR_DT_SYNCHRONIZED,
ATTR_DT_UTC, ATTR_DT_UTC,
ATTR_LLMNR_HOSTNAME,
ATTR_STARTUP_TIME, ATTR_STARTUP_TIME,
ATTR_USE_NTP, ATTR_USE_NTP,
CONTENT_TYPE_BINARY, ATTR_USE_RTC,
) )
from .utils import api_process, api_process_raw, api_validate from .utils import api_process, api_process_raw, api_validate
@@ -62,17 +60,15 @@ class APIHost(CoreSysAttributes):
ATTR_DISK_LIFE_TIME: self.sys_host.info.disk_life_time, ATTR_DISK_LIFE_TIME: self.sys_host.info.disk_life_time,
ATTR_FEATURES: self.sys_host.features, ATTR_FEATURES: self.sys_host.features,
ATTR_HOSTNAME: self.sys_host.info.hostname, ATTR_HOSTNAME: self.sys_host.info.hostname,
ATTR_LLMNR_HOSTNAME: self.sys_host.info.llmnr_hostname,
ATTR_KERNEL: self.sys_host.info.kernel, ATTR_KERNEL: self.sys_host.info.kernel,
ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system, ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system,
ATTR_TIMEZONE: self.sys_host.info.timezone, ATTR_TIMEZONE: self.sys_host.info.timezone,
ATTR_DT_UTC: self.sys_host.info.dt_utc, ATTR_DT_UTC: self.sys_host.info.dt_utc,
ATTR_DT_SYNCHRONIZED: self.sys_host.info.dt_synchronized, ATTR_DT_SYNCHRONIZED: self.sys_host.info.dt_synchronized,
ATTR_USE_NTP: self.sys_host.info.use_ntp, ATTR_USE_NTP: self.sys_host.info.use_ntp,
ATTR_USE_RTC: self.sys_host.info.use_rtc,
ATTR_STARTUP_TIME: self.sys_host.info.startup_time, ATTR_STARTUP_TIME: self.sys_host.info.startup_time,
ATTR_BOOT_TIMESTAMP: self.sys_host.info.boot_timestamp, ATTR_BOOT_TIMESTAMP: self.sys_host.info.boot_timestamp,
ATTR_BROADCAST_LLMNR: self.sys_host.info.broadcast_llmnr,
ATTR_BROADCAST_MDNS: self.sys_host.info.broadcast_mdns,
} }
@api_process @api_process
@@ -99,7 +95,11 @@ class APIHost(CoreSysAttributes):
@api_process @api_process
def reload(self, request): def reload(self, request):
"""Reload host data.""" """Reload host data."""
return asyncio.shield(self.sys_host.reload()) return asyncio.shield(
asyncio.wait(
[self.sys_host.reload(), self.sys_resolution.evaluate.evaluate_system()]
)
)
@api_process @api_process
async def services(self, request): async def services(self, request):

52
supervisor/api/info.py Normal file
View File

@@ -0,0 +1,52 @@
"""Init file for Supervisor info RESTful API."""
import logging
from typing import Any
from aiohttp import web
from ..const import (
ATTR_ARCH,
ATTR_CHANNEL,
ATTR_DOCKER,
ATTR_FEATURES,
ATTR_HASSOS,
ATTR_HOMEASSISTANT,
ATTR_HOSTNAME,
ATTR_LOGGING,
ATTR_MACHINE,
ATTR_OPERATING_SYSTEM,
ATTR_STATE,
ATTR_SUPERVISOR,
ATTR_SUPPORTED,
ATTR_SUPPORTED_ARCH,
ATTR_TIMEZONE,
)
from ..coresys import CoreSysAttributes
from .utils import api_process
_LOGGER: logging.Logger = logging.getLogger(__name__)
class APIInfo(CoreSysAttributes):
"""Handle RESTful API for info functions."""
@api_process
async def info(self, request: web.Request) -> dict[str, Any]:
"""Show system info."""
return {
ATTR_SUPERVISOR: self.sys_supervisor.version,
ATTR_HOMEASSISTANT: self.sys_homeassistant.version,
ATTR_HASSOS: self.sys_os.version,
ATTR_DOCKER: self.sys_docker.info.version,
ATTR_HOSTNAME: self.sys_host.info.hostname,
ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system,
ATTR_FEATURES: self.sys_host.features,
ATTR_MACHINE: self.sys_machine,
ATTR_ARCH: self.sys_arch.default,
ATTR_STATE: self.sys_core.state,
ATTR_SUPPORTED_ARCH: self.sys_arch.supported,
ATTR_SUPPORTED: self.sys_core.supported,
ATTR_CHANNEL: self.sys_updater.channel,
ATTR_LOGGING: self.sys_config.logging,
ATTR_TIMEZONE: self.sys_timezone,
}

View File

@@ -22,9 +22,11 @@ from ..const import (
ATTR_PANELS, ATTR_PANELS,
ATTR_SESSION, ATTR_SESSION,
ATTR_TITLE, ATTR_TITLE,
COOKIE_INGRESS,
HEADER_TOKEN,
HEADER_TOKEN_OLD,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from .const import COOKIE_INGRESS, HEADER_TOKEN, HEADER_TOKEN_OLD
from .utils import api_process, api_validate, require_home_assistant from .utils import api_process, api_validate, require_home_assistant
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)

View File

@@ -77,6 +77,7 @@ ADDONS_ROLE_ACCESS = {
r"^(?:" r"^(?:"
r"|/.+/info" r"|/.+/info"
r"|/backups.*" r"|/backups.*"
r"|/snapshots.*"
r")$" r")$"
), ),
ROLE_MANAGER: re.compile( ROLE_MANAGER: re.compile(

View File

@@ -18,11 +18,11 @@ from ..const import (
ATTR_UPDATE_AVAILABLE, ATTR_UPDATE_AVAILABLE,
ATTR_VERSION, ATTR_VERSION,
ATTR_VERSION_LATEST, ATTR_VERSION_LATEST,
CONTENT_TYPE_BINARY,
) )
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..exceptions import APIError from ..exceptions import APIError
from ..validate import version_tag from ..validate import version_tag
from .const import CONTENT_TYPE_BINARY
from .utils import api_process, api_process_raw, api_validate from .utils import api_process, api_process_raw, api_validate
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)

View File

@@ -141,7 +141,9 @@ class APINetwork(CoreSysAttributes):
def _get_interface(self, name: str) -> Interface: def _get_interface(self, name: str) -> Interface:
"""Get Interface by name or default.""" """Get Interface by name or default."""
if name.lower() == "default": name = name.lower()
if name == "default":
for interface in self.sys_host.network.interfaces: for interface in self.sys_host.network.interfaces:
if not interface.primary: if not interface.primary:
continue continue

View File

@@ -1,14 +1,14 @@
function loadES5() { function loadES5() {
var el = document.createElement('script'); var el = document.createElement('script');
el.src = '/api/hassio/app/frontend_es5/entrypoint.75b60951.js'; el.src = '/api/hassio/app/frontend_es5/entrypoint.5d40ff8b.js';
document.body.appendChild(el); document.body.appendChild(el);
} }
if (/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent)) { if (/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent)) {
loadES5(); loadES5();
} else { } else {
try { try {
new Function("import('/api/hassio/app/frontend_latest/entrypoint.f358ba39.js')")(); new Function("import('/api/hassio/app/frontend_latest/entrypoint.f09e9f8e.js')")();
} catch (err) { } catch (err) {
loadES5(); loadES5();
} }

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1 +0,0 @@
!function(){"use strict";var t,n,e={14971:function(t,n,e){var r,i,o=e(93217),u=e(69330),a=(e(58556),e(62173)),c=function(t,n,e){if("input"===t){if("type"===n&&"checkbox"===e||"checked"===n||"disabled"===n)return;return""}},f={renderMarkdown:function(t,n){var e,o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};return r||(r=Object.assign({},(0,a.getDefaultWhiteList)(),{input:["type","disabled","checked"],"ha-icon":["icon"],"ha-svg-icon":["path"],"ha-alert":["alert-type","title"]})),o.allowSvg?(i||(i=Object.assign({},r,{svg:["xmlns","height","width"],path:["transform","stroke","d"],img:["src"]})),e=i):e=r,(0,a.filterXSS)((0,u.TU)(t,n),{whiteList:e,onTagAttr:c})}};(0,o.Jj)(f)}},r={};function i(t){var n=r[t];if(void 0!==n)return n.exports;var o=r[t]={exports:{}};return e[t](o,o.exports,i),o.exports}i.m=e,i.x=function(){var t=i.O(void 0,[191,752],(function(){return i(14971)}));return t=i.O(t)},t=[],i.O=function(n,e,r,o){if(!e){var u=1/0;for(s=0;s<t.length;s++){e=t[s][0],r=t[s][1],o=t[s][2];for(var a=!0,c=0;c<e.length;c++)(!1&o||u>=o)&&Object.keys(i.O).every((function(t){return i.O[t](e[c])}))?e.splice(c--,1):(a=!1,o<u&&(u=o));if(a){t.splice(s--,1);var f=r();void 0!==f&&(n=f)}}return n}o=o||0;for(var s=t.length;s>0&&t[s-1][2]>o;s--)t[s]=t[s-1];t[s]=[e,r,o]},i.n=function(t){var n=t&&t.__esModule?function(){return t.default}:function(){return t};return i.d(n,{a:n}),n},i.d=function(t,n){for(var e in n)i.o(n,e)&&!i.o(t,e)&&Object.defineProperty(t,e,{enumerable:!0,get:n[e]})},i.f={},i.e=function(t){return Promise.all(Object.keys(i.f).reduce((function(n,e){return i.f[e](t,n),n}),[]))},i.u=function(t){return{191:"2dbdaab4",752:"829db8ac"}[t]+".js"},i.o=function(t,n){return Object.prototype.hasOwnProperty.call(t,n)},i.p="/api/hassio/app/frontend_es5/",function(){var t={971:1};i.f.i=function(n,e){t[n]||importScripts(i.p+i.u(n))};var n=self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[],e=n.push.bind(n);n.push=function(n){var r=n[0],o=n[1],u=n[2];for(var a in o)i.o(o,a)&&(i.m[a]=o[a]);for(u&&u(i);r.length;)t[r.pop()]=1;e(n)}}(),n=i.x,i.x=function(){return Promise.all([i.e(191),i.e(752)]).then(n)};i.x()}();

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@@ -1 +0,0 @@
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([[639],{71639:function(s){s.exports=[]}}]);

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,10 @@
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@@ -1,4 +0,0 @@
/* @preserve
* Leaflet 1.7.1, a JS library for interactive maps. http://leafletjs.com
* (c) 2010-2019 Vladimir Agafonkin, (c) 2010-2011 CloudMade
*/

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

Some files were not shown because too many files have changed in this diff Show More