mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-09-02 04:40:21 +00:00
Compare commits
4 Commits
fix-error-
...
ipv6
Author | SHA1 | Date | |
---|---|---|---|
![]() |
2470d16d85 | ||
![]() |
5c0172440a | ||
![]() |
006db94cc0 | ||
![]() |
9f1ab99265 |
@@ -1,51 +1,37 @@
|
|||||||
{
|
{
|
||||||
"name": "Supervisor dev",
|
"name": "Supervisor dev",
|
||||||
"image": "ghcr.io/home-assistant/devcontainer:2-supervisor",
|
"image": "ghcr.io/home-assistant/devcontainer:supervisor",
|
||||||
"containerEnv": {
|
|
||||||
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
|
|
||||||
},
|
|
||||||
"remoteEnv": {
|
|
||||||
"PATH": "${containerEnv:VIRTUAL_ENV}/bin:${containerEnv:PATH}"
|
|
||||||
},
|
|
||||||
"appPort": ["9123:8123", "7357:4357"],
|
"appPort": ["9123:8123", "7357:4357"],
|
||||||
"postCreateCommand": "bash devcontainer_setup",
|
"postCreateCommand": "bash devcontainer_bootstrap",
|
||||||
"postStartCommand": "bash devcontainer_bootstrap",
|
|
||||||
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
|
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
|
||||||
"customizations": {
|
"extensions": [
|
||||||
"vscode": {
|
"ms-python.python",
|
||||||
"extensions": [
|
"ms-python.vscode-pylance",
|
||||||
"charliermarsh.ruff",
|
"visualstudioexptteam.vscodeintellicode",
|
||||||
"ms-python.pylint",
|
"esbenp.prettier-vscode"
|
||||||
"ms-python.vscode-pylance",
|
],
|
||||||
"visualstudioexptteam.vscodeintellicode",
|
"mounts": [ "type=volume,target=/var/lib/docker" ],
|
||||||
"redhat.vscode-yaml",
|
"settings": {
|
||||||
"esbenp.prettier-vscode",
|
"terminal.integrated.profiles.linux": {
|
||||||
"GitHub.vscode-pull-request-github"
|
"zsh": {
|
||||||
],
|
"path": "/usr/bin/zsh"
|
||||||
"settings": {
|
|
||||||
"python.defaultInterpreterPath": "/home/vscode/.local/ha-venv/bin/python",
|
|
||||||
"python.pythonPath": "/home/vscode/.local/ha-venv/bin/python",
|
|
||||||
"python.terminal.activateEnvInCurrentTerminal": true,
|
|
||||||
"python.testing.pytestArgs": ["--no-cov"],
|
|
||||||
"pylint.importStrategy": "fromEnvironment",
|
|
||||||
"editor.formatOnPaste": false,
|
|
||||||
"editor.formatOnSave": true,
|
|
||||||
"editor.formatOnType": true,
|
|
||||||
"files.trimTrailingWhitespace": true,
|
|
||||||
"terminal.integrated.profiles.linux": {
|
|
||||||
"zsh": {
|
|
||||||
"path": "/usr/bin/zsh"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"terminal.integrated.defaultProfile.linux": "zsh",
|
|
||||||
"[python]": {
|
|
||||||
"editor.defaultFormatter": "charliermarsh.ruff"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
},
|
"terminal.integrated.defaultProfile.linux": "zsh",
|
||||||
"mounts": [
|
"editor.formatOnPaste": false,
|
||||||
"type=volume,target=/var/lib/docker",
|
"editor.formatOnSave": true,
|
||||||
"type=volume,target=/mnt/supervisor"
|
"editor.formatOnType": true,
|
||||||
]
|
"files.trimTrailingWhitespace": true,
|
||||||
|
"python.pythonPath": "/usr/local/bin/python3",
|
||||||
|
"python.linting.pylintEnabled": true,
|
||||||
|
"python.linting.enabled": true,
|
||||||
|
"python.formatting.provider": "black",
|
||||||
|
"python.formatting.blackArgs": ["--target-version", "py39"],
|
||||||
|
"python.formatting.blackPath": "/usr/local/bin/black",
|
||||||
|
"python.linting.banditPath": "/usr/local/bin/bandit",
|
||||||
|
"python.linting.flake8Path": "/usr/local/bin/flake8",
|
||||||
|
"python.linting.mypyPath": "/usr/local/bin/mypy",
|
||||||
|
"python.linting.pylintPath": "/usr/local/bin/pylint",
|
||||||
|
"python.linting.pydocstylePath": "/usr/local/bin/pydocstyle"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
53
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
53
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -20,14 +20,22 @@ body:
|
|||||||
attributes:
|
attributes:
|
||||||
value: |
|
value: |
|
||||||
## Environment
|
## Environment
|
||||||
|
- type: input
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
attributes:
|
||||||
|
label: What is the used version of the Supervisor?
|
||||||
|
placeholder: supervisor-
|
||||||
|
description: >
|
||||||
|
Can be found in the Supervisor panel -> System tab. Starts with
|
||||||
|
`supervisor-....`.
|
||||||
- type: dropdown
|
- type: dropdown
|
||||||
validations:
|
validations:
|
||||||
required: true
|
required: true
|
||||||
attributes:
|
attributes:
|
||||||
label: What type of installation are you running?
|
label: What type of installation are you running?
|
||||||
description: >
|
description: >
|
||||||
If you don't know, can be found in [Settings -> System -> Repairs -> (three dot menu) -> System Information](https://my.home-assistant.io/redirect/system_health/).
|
If you don't know, you can find it in: Configuration panel -> Info.
|
||||||
It is listed as the `Installation Type` value.
|
|
||||||
options:
|
options:
|
||||||
- Home Assistant OS
|
- Home Assistant OS
|
||||||
- Home Assistant Supervised
|
- Home Assistant Supervised
|
||||||
@@ -40,6 +48,22 @@ body:
|
|||||||
- Home Assistant Operating System
|
- Home Assistant Operating System
|
||||||
- Debian
|
- Debian
|
||||||
- Other (e.g., Raspbian/Raspberry Pi OS/Fedora)
|
- Other (e.g., Raspbian/Raspberry Pi OS/Fedora)
|
||||||
|
- type: input
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
attributes:
|
||||||
|
label: What is the version of your installed operating system?
|
||||||
|
placeholder: "5.11"
|
||||||
|
description: Can be found in the Supervisor panel -> System tab.
|
||||||
|
- type: input
|
||||||
|
validations:
|
||||||
|
required: true
|
||||||
|
attributes:
|
||||||
|
label: What version of Home Assistant Core is installed?
|
||||||
|
placeholder: core-
|
||||||
|
description: >
|
||||||
|
Can be found in the Supervisor panel -> System tab. Starts with
|
||||||
|
`core-....`.
|
||||||
- type: markdown
|
- type: markdown
|
||||||
attributes:
|
attributes:
|
||||||
value: |
|
value: |
|
||||||
@@ -63,31 +87,8 @@ body:
|
|||||||
attributes:
|
attributes:
|
||||||
label: Anything in the Supervisor logs that might be useful for us?
|
label: Anything in the Supervisor logs that might be useful for us?
|
||||||
description: >
|
description: >
|
||||||
Supervisor Logs can be found in [Settings -> System -> Logs](https://my.home-assistant.io/redirect/logs/)
|
The Supervisor logs can be found in the Supervisor panel -> System tab.
|
||||||
then choose `Supervisor` in the top right.
|
|
||||||
|
|
||||||
[](https://my.home-assistant.io/redirect/supervisor_logs/)
|
|
||||||
render: txt
|
render: txt
|
||||||
- type: textarea
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
attributes:
|
|
||||||
label: System information
|
|
||||||
description: >
|
|
||||||
The System information can be found in [Settings -> System -> Repairs -> (three dot menu) -> System Information](https://my.home-assistant.io/redirect/system_health/).
|
|
||||||
Click the copy button at the bottom of the pop-up and paste it here.
|
|
||||||
|
|
||||||
[](https://my.home-assistant.io/redirect/system_health/)
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: Supervisor diagnostics
|
|
||||||
placeholder: "drag-and-drop the diagnostics data file here (do not copy-and-paste the content)"
|
|
||||||
description: >-
|
|
||||||
Supervisor diagnostics can be found in [Settings -> Devices & services](https://my.home-assistant.io/redirect/integrations/).
|
|
||||||
Find the card that says `Home Assistant Supervisor`, open it, and select the three dot menu of the Supervisor integration entry
|
|
||||||
and select 'Download diagnostics'.
|
|
||||||
|
|
||||||
**Please drag-and-drop the downloaded file into the textbox below. Do not copy and paste its contents.**
|
|
||||||
- type: textarea
|
- type: textarea
|
||||||
attributes:
|
attributes:
|
||||||
label: Additional information
|
label: Additional information
|
||||||
|
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -38,7 +38,6 @@
|
|||||||
- This PR is related to issue:
|
- This PR is related to issue:
|
||||||
- Link to documentation pull request:
|
- Link to documentation pull request:
|
||||||
- Link to cli pull request:
|
- Link to cli pull request:
|
||||||
- Link to client library pull request:
|
|
||||||
|
|
||||||
## Checklist
|
## Checklist
|
||||||
|
|
||||||
@@ -53,14 +52,12 @@
|
|||||||
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
||||||
- [ ] There is no commented out code in this PR.
|
- [ ] There is no commented out code in this PR.
|
||||||
- [ ] I have followed the [development checklist][dev-checklist]
|
- [ ] I have followed the [development checklist][dev-checklist]
|
||||||
- [ ] The code has been formatted using Ruff (`ruff format supervisor tests`)
|
- [ ] The code has been formatted using Black (`black --fast supervisor tests`)
|
||||||
- [ ] Tests have been added to verify that the new code works.
|
- [ ] Tests have been added to verify that the new code works.
|
||||||
|
|
||||||
If API endpoints or add-on configuration are added/changed:
|
If API endpoints of add-on configuration are added/changed:
|
||||||
|
|
||||||
- [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository]
|
- [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository]
|
||||||
- [ ] [CLI][cli-repository] updated (if necessary)
|
|
||||||
- [ ] [Client library][client-library-repository] updated (if necessary)
|
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
Thank you for contributing <3
|
Thank you for contributing <3
|
||||||
@@ -70,5 +67,3 @@ If API endpoints or add-on configuration are added/changed:
|
|||||||
|
|
||||||
[dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html
|
[dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html
|
||||||
[docs-repository]: https://github.com/home-assistant/developers.home-assistant
|
[docs-repository]: https://github.com/home-assistant/developers.home-assistant
|
||||||
[cli-repository]: https://github.com/home-assistant/cli
|
|
||||||
[client-library-repository]: https://github.com/home-assistant-libs/python-supervisor-client/
|
|
||||||
|
134
.github/workflows/builder.yml
vendored
134
.github/workflows/builder.yml
vendored
@@ -33,13 +33,10 @@ on:
|
|||||||
- setup.py
|
- setup.py
|
||||||
|
|
||||||
env:
|
env:
|
||||||
DEFAULT_PYTHON: "3.13"
|
DEFAULT_PYTHON: 3.9
|
||||||
BUILD_NAME: supervisor
|
BUILD_NAME: supervisor
|
||||||
BUILD_TYPE: supervisor
|
BUILD_TYPE: supervisor
|
||||||
|
WHEELS_TAG: 3.9-alpine3.14
|
||||||
concurrency:
|
|
||||||
group: "${{ github.workflow }}-${{ github.ref }}"
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
init:
|
init:
|
||||||
@@ -53,7 +50,7 @@ jobs:
|
|||||||
requirements: ${{ steps.requirements.outputs.changed }}
|
requirements: ${{ steps.requirements.outputs.changed }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v3.0.2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -70,51 +67,39 @@ jobs:
|
|||||||
- name: Get changed files
|
- name: Get changed files
|
||||||
id: changed_files
|
id: changed_files
|
||||||
if: steps.version.outputs.publish == 'false'
|
if: steps.version.outputs.publish == 'false'
|
||||||
uses: masesgroup/retrieve-changed-files@v3.0.0
|
uses: jitterbit/get-changed-files@v1
|
||||||
|
|
||||||
- name: Check if requirements files changed
|
- name: Check if requirements files changed
|
||||||
id: requirements
|
id: requirements
|
||||||
run: |
|
run: |
|
||||||
if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.yaml) ]]; then
|
if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.json) ]]; then
|
||||||
echo "changed=true" >> "$GITHUB_OUTPUT"
|
echo "::set-output name=changed::true"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
build:
|
build:
|
||||||
name: Build ${{ matrix.arch }} supervisor
|
name: Build ${{ matrix.arch }} supervisor
|
||||||
needs: init
|
needs: init
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
id-token: write
|
|
||||||
packages: write
|
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v3.0.2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
- name: Write env-file
|
|
||||||
if: needs.init.outputs.requirements == 'true'
|
|
||||||
run: |
|
|
||||||
(
|
|
||||||
# Fix out of memory issues with rust
|
|
||||||
echo "CARGO_NET_GIT_FETCH_WITH_CLI=true"
|
|
||||||
) > .env_file
|
|
||||||
|
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
if: needs.init.outputs.requirements == 'true'
|
if: needs.init.outputs.requirements == 'true'
|
||||||
uses: home-assistant/wheels@2024.11.0
|
uses: home-assistant/wheels@2022.01.2
|
||||||
with:
|
with:
|
||||||
abi: cp313
|
tag: ${{ env.WHEELS_TAG }}
|
||||||
tag: musllinux_1_2
|
|
||||||
arch: ${{ matrix.arch }}
|
arch: ${{ matrix.arch }}
|
||||||
|
wheels-host: wheels.hass.io
|
||||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||||
apk: "libffi-dev;openssl-dev;yaml-dev"
|
wheels-user: wheels
|
||||||
|
apk: "build-base;libffi-dev;openssl-dev;cargo"
|
||||||
skip-binary: aiohttp
|
skip-binary: aiohttp
|
||||||
env-file: true
|
|
||||||
requirements: "requirements.txt"
|
requirements: "requirements.txt"
|
||||||
|
|
||||||
- name: Set version
|
- name: Set version
|
||||||
@@ -123,33 +108,16 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
type: ${{ env.BUILD_TYPE }}
|
type: ${{ env.BUILD_TYPE }}
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Login to DockerHub
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: docker/login-action@v2.0.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
- name: Install Cosign
|
|
||||||
if: needs.init.outputs.publish == 'true'
|
|
||||||
uses: sigstore/cosign-installer@v3.8.1
|
|
||||||
with:
|
|
||||||
cosign-release: "v2.4.0"
|
|
||||||
|
|
||||||
- name: Install dirhash and calc hash
|
|
||||||
if: needs.init.outputs.publish == 'true'
|
|
||||||
run: |
|
|
||||||
pip3 install setuptools dirhash
|
|
||||||
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
|
|
||||||
echo "${dir_hash}" > rootfs/supervisor.sha256
|
|
||||||
|
|
||||||
- name: Sign supervisor SHA256
|
|
||||||
if: needs.init.outputs.publish == 'true'
|
|
||||||
run: |
|
|
||||||
cosign sign-blob --yes rootfs/supervisor.sha256 --bundle rootfs/supervisor.sha256.sig
|
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: docker/login-action@v3.3.0
|
uses: docker/login-action@v2.0.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@@ -160,17 +128,55 @@ jobs:
|
|||||||
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Build supervisor
|
- name: Build supervisor
|
||||||
uses: home-assistant/builder@2025.02.0
|
uses: home-assistant/builder@2022.06.2
|
||||||
with:
|
with:
|
||||||
args: |
|
args: |
|
||||||
$BUILD_ARGS \
|
$BUILD_ARGS \
|
||||||
--${{ matrix.arch }} \
|
--${{ matrix.arch }} \
|
||||||
--target /data \
|
--target /data \
|
||||||
--cosign \
|
|
||||||
--generic ${{ needs.init.outputs.version }}
|
--generic ${{ needs.init.outputs.version }}
|
||||||
env:
|
env:
|
||||||
CAS_API_KEY: ${{ secrets.CAS_TOKEN }}
|
CAS_API_KEY: ${{ secrets.CAS_TOKEN }}
|
||||||
|
|
||||||
|
codenotary:
|
||||||
|
name: CAS signature
|
||||||
|
needs: init
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout the repository
|
||||||
|
if: needs.init.outputs.publish == 'true'
|
||||||
|
uses: actions/checkout@v3.0.2
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
if: needs.init.outputs.publish == 'true'
|
||||||
|
uses: actions/setup-python@v4.2.0
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
|
- name: Set version
|
||||||
|
if: needs.init.outputs.publish == 'true'
|
||||||
|
uses: home-assistant/actions/helpers/version@master
|
||||||
|
with:
|
||||||
|
type: ${{ env.BUILD_TYPE }}
|
||||||
|
|
||||||
|
- name: Install dirhash and calc hash
|
||||||
|
if: needs.init.outputs.publish == 'true'
|
||||||
|
id: dirhash
|
||||||
|
run: |
|
||||||
|
pip3 install dirhash
|
||||||
|
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
|
||||||
|
echo "::set-output name=dirhash::${dir_hash}"
|
||||||
|
|
||||||
|
- name: Signing Source
|
||||||
|
if: needs.init.outputs.publish == 'true'
|
||||||
|
uses: home-assistant/actions/helpers/codenotary@master
|
||||||
|
with:
|
||||||
|
source: hash://${{ steps.dirhash.outputs.dirhash }}
|
||||||
|
asset: supervisor-${{ needs.init.outputs.version }}
|
||||||
|
token: ${{ secrets.CAS_TOKEN }}
|
||||||
|
|
||||||
version:
|
version:
|
||||||
name: Update version
|
name: Update version
|
||||||
needs: ["init", "run_supervisor"]
|
needs: ["init", "run_supervisor"]
|
||||||
@@ -178,7 +184,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v3.0.2
|
||||||
|
|
||||||
- name: Initialize git
|
- name: Initialize git
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
@@ -199,15 +205,15 @@ jobs:
|
|||||||
run_supervisor:
|
run_supervisor:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
name: Run the Supervisor
|
name: Run the Supervisor
|
||||||
needs: ["build", "init"]
|
needs: ["build", "codenotary", "init"]
|
||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v3.0.2
|
||||||
|
|
||||||
- name: Build the Supervisor
|
- name: Build the Supervisor
|
||||||
if: needs.init.outputs.publish != 'true'
|
if: needs.init.outputs.publish != 'true'
|
||||||
uses: home-assistant/builder@2025.02.0
|
uses: home-assistant/builder@2022.06.2
|
||||||
with:
|
with:
|
||||||
args: |
|
args: |
|
||||||
--test \
|
--test \
|
||||||
@@ -219,7 +225,7 @@ jobs:
|
|||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
run: |
|
run: |
|
||||||
docker pull ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }}
|
docker pull ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }}
|
||||||
docker tag ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }} ghcr.io/home-assistant/amd64-hassio-supervisor:runner
|
docker tag ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }} homeassistant/amd64-hassio-supervisor:runner
|
||||||
|
|
||||||
- name: Create the Supervisor
|
- name: Create the Supervisor
|
||||||
run: |
|
run: |
|
||||||
@@ -236,7 +242,7 @@ jobs:
|
|||||||
-e SUPERVISOR_NAME=hassio_supervisor \
|
-e SUPERVISOR_NAME=hassio_supervisor \
|
||||||
-e SUPERVISOR_DEV=1 \
|
-e SUPERVISOR_DEV=1 \
|
||||||
-e SUPERVISOR_MACHINE="qemux86-64" \
|
-e SUPERVISOR_MACHINE="qemux86-64" \
|
||||||
ghcr.io/home-assistant/amd64-hassio-supervisor:runner
|
homeassistant/amd64-hassio-supervisor:runner
|
||||||
|
|
||||||
- name: Start the Supervisor
|
- name: Start the Supervisor
|
||||||
run: docker start hassio_supervisor
|
run: docker start hassio_supervisor
|
||||||
@@ -284,12 +290,6 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Make sure its state is started
|
|
||||||
test="$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.state')"
|
|
||||||
if [ "$test" != "started" ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Check the Supervisor code sign
|
- name: Check the Supervisor code sign
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
run: |
|
run: |
|
||||||
@@ -324,7 +324,7 @@ jobs:
|
|||||||
if [ "$(echo $test | jq -r '.result')" != "ok" ]; then
|
if [ "$(echo $test | jq -r '.result')" != "ok" ]; then
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
echo "slug=$(echo $test | jq -r '.data.slug')" >> "$GITHUB_OUTPUT"
|
echo "::set-output name=slug::$(echo $test | jq -r '.data.slug')"
|
||||||
|
|
||||||
- name: Uninstall SSH add-on
|
- name: Uninstall SSH add-on
|
||||||
run: |
|
run: |
|
||||||
@@ -362,12 +362,6 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Make sure its state is started
|
|
||||||
test="$(docker exec hassio_cli ha addons info core_ssh --no-progress --raw-json | jq -r '.data.state')"
|
|
||||||
if [ "$test" != "started" ]; then
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
- name: Restore SSL directory from backup
|
- name: Restore SSL directory from backup
|
||||||
run: |
|
run: |
|
||||||
test=$(docker exec hassio_cli ha backups restore ${{ steps.backup.outputs.slug }} --folders ssl --no-progress --raw-json | jq -r '.result')
|
test=$(docker exec hassio_cli ha backups restore ${{ steps.backup.outputs.slug }} --folders ssl --no-progress --raw-json | jq -r '.result')
|
||||||
|
326
.github/workflows/ci.yaml
vendored
326
.github/workflows/ci.yaml
vendored
@@ -8,32 +8,30 @@ on:
|
|||||||
pull_request: ~
|
pull_request: ~
|
||||||
|
|
||||||
env:
|
env:
|
||||||
DEFAULT_PYTHON: "3.13"
|
DEFAULT_PYTHON: 3.9
|
||||||
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
PRE_COMMIT_HOME: ~/.cache/pre-commit
|
||||||
|
DEFAULT_CAS: v1.0.2
|
||||||
concurrency:
|
|
||||||
group: "${{ github.workflow }}-${{ github.ref }}"
|
|
||||||
cancel-in-progress: true
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
# Separate job to pre-populate the base dependency cache
|
# Separate job to pre-populate the base dependency cache
|
||||||
# This prevent upcoming jobs to do the same individually
|
# This prevent upcoming jobs to do the same individually
|
||||||
prepare:
|
prepare:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
strategy:
|
||||||
python-version: ${{ steps.python.outputs.python-version }}
|
matrix:
|
||||||
name: Prepare Python dependencies
|
python-version: [3.9]
|
||||||
|
name: Prepare Python ${{ matrix.python-version }} dependencies
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v3.0.2
|
||||||
- name: Set up Python
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v4.2.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ matrix.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.2.2
|
uses: actions/cache@v3.0.6
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -47,10 +45,9 @@ jobs:
|
|||||||
pip install -r requirements.txt -r requirements_tests.txt
|
pip install -r requirements.txt -r requirements_tests.txt
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v4.2.2
|
uses: actions/cache@v3.0.6
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
lookup-only: true
|
|
||||||
key: |
|
key: |
|
||||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
@@ -61,91 +58,34 @@ jobs:
|
|||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pre-commit install-hooks
|
pre-commit install-hooks
|
||||||
|
|
||||||
lint-ruff-format:
|
lint-black:
|
||||||
name: Check ruff-format
|
name: Check black
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v3.0.2
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v4.2.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.2.2
|
uses: actions/cache@v3.0.6
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
- name: Fail job if Python cache restore failed
|
- name: Fail job if Python cache restore failed
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
run: |
|
run: |
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
exit 1
|
exit 1
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Run black
|
||||||
id: cache-precommit
|
|
||||||
uses: actions/cache@v4.2.2
|
|
||||||
with:
|
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
|
||||||
- name: Fail job if cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Run ruff-format
|
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure
|
black --target-version py38 --check supervisor tests setup.py
|
||||||
env:
|
|
||||||
RUFF_OUTPUT_FORMAT: github
|
|
||||||
|
|
||||||
lint-ruff:
|
|
||||||
name: Check ruff
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: prepare
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v4.2.2
|
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
uses: actions/setup-python@v5.4.0
|
|
||||||
id: python
|
|
||||||
with:
|
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
|
||||||
- name: Restore Python virtual environment
|
|
||||||
id: cache-venv
|
|
||||||
uses: actions/cache@v4.2.2
|
|
||||||
with:
|
|
||||||
path: venv
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
|
||||||
- name: Fail job if Python cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Restore pre-commit environment from cache
|
|
||||||
id: cache-precommit
|
|
||||||
uses: actions/cache@v4.2.2
|
|
||||||
with:
|
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
|
||||||
- name: Fail job if cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Run ruff
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
|
|
||||||
env:
|
|
||||||
RUFF_OUTPUT_FORMAT: github
|
|
||||||
|
|
||||||
lint-dockerfile:
|
lint-dockerfile:
|
||||||
name: Check Dockerfile
|
name: Check Dockerfile
|
||||||
@@ -153,7 +93,7 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v3.0.2
|
||||||
- name: Register hadolint problem matcher
|
- name: Register hadolint problem matcher
|
||||||
run: |
|
run: |
|
||||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||||
@@ -168,19 +108,19 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v3.0.2
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v4.2.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.2.2
|
uses: actions/cache@v3.0.6
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
- name: Fail job if Python cache restore failed
|
- name: Fail job if Python cache restore failed
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
run: |
|
run: |
|
||||||
@@ -188,9 +128,9 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v4.2.2
|
uses: actions/cache@v3.0.6
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
key: |
|
key: |
|
||||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
- name: Fail job if cache restore failed
|
- name: Fail job if cache restore failed
|
||||||
@@ -206,25 +146,57 @@ jobs:
|
|||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
||||||
|
|
||||||
lint-json:
|
lint-flake8:
|
||||||
name: Check JSON
|
name: Check flake8
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v3.0.2
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v4.2.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.2.2
|
uses: actions/cache@v3.0.6
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Register flake8 problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/flake8.json"
|
||||||
|
- name: Run flake8
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
flake8 supervisor tests
|
||||||
|
|
||||||
|
lint-isort:
|
||||||
|
name: Check isort
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v3.0.2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v4.2.0
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v3.0.6
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
- name: Fail job if Python cache restore failed
|
- name: Fail job if Python cache restore failed
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
run: |
|
run: |
|
||||||
@@ -232,9 +204,50 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v4.2.2
|
uses: actions/cache@v3.0.6
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run isort
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
|
||||||
|
|
||||||
|
lint-json:
|
||||||
|
name: Check JSON
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v3.0.2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v4.2.0
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v3.0.6
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v3.0.6
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
key: |
|
key: |
|
||||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
- name: Fail job if cache restore failed
|
- name: Fail job if cache restore failed
|
||||||
@@ -256,28 +269,24 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v3.0.2
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v4.2.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.2.2
|
uses: actions/cache@v3.0.6
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
- name: Fail job if Python cache restore failed
|
- name: Fail job if Python cache restore failed
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
run: |
|
run: |
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
exit 1
|
exit 1
|
||||||
- name: Install additional system dependencies
|
|
||||||
run: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y --no-install-recommends libpulse0
|
|
||||||
- name: Register pylint problem matcher
|
- name: Register pylint problem matcher
|
||||||
run: |
|
run: |
|
||||||
echo "::add-matcher::.github/workflows/matchers/pylint.json"
|
echo "::add-matcher::.github/workflows/matchers/pylint.json"
|
||||||
@@ -286,29 +295,73 @@ jobs:
|
|||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pylint supervisor tests
|
pylint supervisor tests
|
||||||
|
|
||||||
pytest:
|
lint-pyupgrade:
|
||||||
|
name: Check pyupgrade
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: prepare
|
needs: prepare
|
||||||
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v3.0.2
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v4.2.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
- name: Install Cosign
|
|
||||||
uses: sigstore/cosign-installer@v3.8.1
|
|
||||||
with:
|
|
||||||
cosign-release: "v2.4.0"
|
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.2.2
|
uses: actions/cache@v3.0.6
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v3.0.6
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run pyupgrade
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
|
||||||
|
|
||||||
|
pytest:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
python-version: [3.9]
|
||||||
|
name: Run tests Python ${{ matrix.python-version }}
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v3.0.2
|
||||||
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
|
uses: actions/setup-python@v4.2.0
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
- name: Install CAS tools
|
||||||
|
uses: home-assistant/actions/helpers/cas@master
|
||||||
|
with:
|
||||||
|
version: ${{ env.DEFAULT_CAS }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v3.0.6
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
- name: Fail job if Python cache restore failed
|
- name: Fail job if Python cache restore failed
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
run: |
|
run: |
|
||||||
@@ -317,7 +370,7 @@ jobs:
|
|||||||
- name: Install additional system dependencies
|
- name: Install additional system dependencies
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus-daemon
|
sudo apt-get install -y --no-install-recommends libpulse0 libudev1
|
||||||
- name: Register Python problem matcher
|
- name: Register Python problem matcher
|
||||||
run: |
|
run: |
|
||||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||||
@@ -339,38 +392,37 @@ jobs:
|
|||||||
-o console_output_style=count \
|
-o console_output_style=count \
|
||||||
tests
|
tests
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
uses: actions/upload-artifact@v4.6.1
|
uses: actions/upload-artifact@v3.1.0
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}
|
name: coverage-${{ matrix.python-version }}
|
||||||
path: .coverage
|
path: .coverage
|
||||||
include-hidden-files: true
|
|
||||||
|
|
||||||
coverage:
|
coverage:
|
||||||
name: Process test coverage
|
name: Process test coverage
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: ["pytest", "prepare"]
|
needs: pytest
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v3.0.2
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.4.0
|
uses: actions/setup-python@v4.2.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.2.2
|
uses: actions/cache@v3.0.6
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
- name: Fail job if Python cache restore failed
|
- name: Fail job if Python cache restore failed
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
run: |
|
run: |
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
exit 1
|
exit 1
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@v4.1.9
|
uses: actions/download-artifact@v3
|
||||||
- name: Combine coverage results
|
- name: Combine coverage results
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
@@ -378,4 +430,4 @@ jobs:
|
|||||||
coverage report
|
coverage report
|
||||||
coverage xml
|
coverage xml
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
uses: codecov/codecov-action@v5.4.0
|
uses: codecov/codecov-action@v3.1.0
|
||||||
|
2
.github/workflows/lock.yml
vendored
2
.github/workflows/lock.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
|||||||
lock:
|
lock:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: dessant/lock-threads@v5.0.1
|
- uses: dessant/lock-threads@v3.0.0
|
||||||
with:
|
with:
|
||||||
github-token: ${{ github.token }}
|
github-token: ${{ github.token }}
|
||||||
issue-inactive-days: "30"
|
issue-inactive-days: "30"
|
||||||
|
30
.github/workflows/matchers/flake8.json
vendored
Normal file
30
.github/workflows/matchers/flake8.json
vendored
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "flake8-error",
|
||||||
|
"severity": "error",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"message": 4
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"owner": "flake8-warning",
|
||||||
|
"severity": "warning",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"message": 4
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
6
.github/workflows/release-drafter.yml
vendored
6
.github/workflows/release-drafter.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
|||||||
name: Release Drafter
|
name: Release Drafter
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v3.0.2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -33,10 +33,10 @@ jobs:
|
|||||||
|
|
||||||
echo Current version: $latest
|
echo Current version: $latest
|
||||||
echo New target version: $datepre.$newpost
|
echo New target version: $datepre.$newpost
|
||||||
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT"
|
echo "::set-output name=version::$datepre.$newpost"
|
||||||
|
|
||||||
- name: Run Release Drafter
|
- name: Run Release Drafter
|
||||||
uses: release-drafter/release-drafter@v6.1.0
|
uses: release-drafter/release-drafter@v5.20.0
|
||||||
with:
|
with:
|
||||||
tag: ${{ steps.version.outputs.version }}
|
tag: ${{ steps.version.outputs.version }}
|
||||||
name: ${{ steps.version.outputs.version }}
|
name: ${{ steps.version.outputs.version }}
|
||||||
|
4
.github/workflows/sentry.yaml
vendored
4
.github/workflows/sentry.yaml
vendored
@@ -10,9 +10,9 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v3.0.2
|
||||||
- name: Sentry Release
|
- name: Sentry Release
|
||||||
uses: getsentry/action-release@v1.10.4
|
uses: getsentry/action-release@v1.2.0
|
||||||
env:
|
env:
|
||||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||||
|
4
.github/workflows/stale.yml
vendored
4
.github/workflows/stale.yml
vendored
@@ -9,10 +9,10 @@ jobs:
|
|||||||
stale:
|
stale:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v9.1.0
|
- uses: actions/stale@v5.1.1
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
days-before-stale: 30
|
days-before-stale: 60
|
||||||
days-before-close: 7
|
days-before-close: 7
|
||||||
stale-issue-label: "stale"
|
stale-issue-label: "stale"
|
||||||
exempt-issue-labels: "no-stale,Help%20wanted,help-wanted,pinned,rfc,security"
|
exempt-issue-labels: "no-stale,Help%20wanted,help-wanted,pinned,rfc,security"
|
||||||
|
79
.github/workflows/update_frontend.yml
vendored
79
.github/workflows/update_frontend.yml
vendored
@@ -1,79 +0,0 @@
|
|||||||
name: Update frontend
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule: # once a day
|
|
||||||
- cron: "0 0 * * *"
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
check-version:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
skip: ${{ steps.check_version.outputs.skip || steps.check_existing_pr.outputs.skip }}
|
|
||||||
current_version: ${{ steps.check_version.outputs.current_version }}
|
|
||||||
latest_version: ${{ steps.latest_frontend_version.outputs.latest_tag }}
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: Get latest frontend release
|
|
||||||
id: latest_frontend_version
|
|
||||||
uses: abatilo/release-info-action@v1.3.3
|
|
||||||
with:
|
|
||||||
owner: home-assistant
|
|
||||||
repo: frontend
|
|
||||||
- name: Check if version is up to date
|
|
||||||
id: check_version
|
|
||||||
run: |
|
|
||||||
current_version="$(cat .ha-frontend-version)"
|
|
||||||
latest_version="${{ steps.latest_frontend_version.outputs.latest_tag }}"
|
|
||||||
echo "current_version=${current_version}" >> $GITHUB_OUTPUT
|
|
||||||
echo "LATEST_VERSION=${latest_version}" >> $GITHUB_ENV
|
|
||||||
if [[ ! "$current_version" < "$latest_version" ]]; then
|
|
||||||
echo "Frontend version is up to date"
|
|
||||||
echo "skip=true" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
- name: Check if there is no open PR with this version
|
|
||||||
if: steps.check_version.outputs.skip != 'true'
|
|
||||||
id: check_existing_pr
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ github.token }}
|
|
||||||
run: |
|
|
||||||
PR=$(gh pr list --state open --base main --json title --search "Update frontend to version $LATEST_VERSION")
|
|
||||||
if [[ "$PR" != "[]" ]]; then
|
|
||||||
echo "Skipping - There is already a PR open for version $LATEST_VERSION"
|
|
||||||
echo "skip=true" >> $GITHUB_OUTPUT
|
|
||||||
fi
|
|
||||||
create-pr:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: check-version
|
|
||||||
if: needs.check-version.outputs.skip != 'true'
|
|
||||||
steps:
|
|
||||||
- name: Checkout code
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
- name: Clear www folder
|
|
||||||
run: |
|
|
||||||
rm -rf supervisor/api/panel/*
|
|
||||||
- name: Update version file
|
|
||||||
run: |
|
|
||||||
echo "${{ needs.check-version.outputs.latest_version }}" > .ha-frontend-version
|
|
||||||
- name: Download release assets
|
|
||||||
uses: robinraju/release-downloader@v1
|
|
||||||
with:
|
|
||||||
repository: 'home-assistant/frontend'
|
|
||||||
tag: ${{ needs.check-version.outputs.latest_version }}
|
|
||||||
fileName: home_assistant_frontend_supervisor-${{ needs.check-version.outputs.latest_version }}.tar.gz
|
|
||||||
extract: true
|
|
||||||
out-file-path: supervisor/api/panel/
|
|
||||||
- name: Create PR
|
|
||||||
uses: peter-evans/create-pull-request@v7
|
|
||||||
with:
|
|
||||||
commit-message: "Update frontend to version ${{ needs.check-version.outputs.latest_version }}"
|
|
||||||
branch: autoupdate-frontend
|
|
||||||
base: main
|
|
||||||
draft: true
|
|
||||||
sign-commits: true
|
|
||||||
title: "Update frontend to version ${{ needs.check-version.outputs.latest_version }}"
|
|
||||||
body: >
|
|
||||||
Update frontend from ${{ needs.check-version.outputs.current_version }} to
|
|
||||||
[${{ needs.check-version.outputs.latest_version }}](https://github.com/home-assistant/frontend/releases/tag/${{ needs.check-version.outputs.latest_version }})
|
|
||||||
|
|
4
.gitmodules
vendored
Normal file
4
.gitmodules
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
[submodule "home-assistant-polymer"]
|
||||||
|
path = home-assistant-polymer
|
||||||
|
url = https://github.com/home-assistant/home-assistant-polymer
|
||||||
|
branch = dev
|
@@ -1 +0,0 @@
|
|||||||
20250221.0
|
|
@@ -3,5 +3,4 @@ ignored:
|
|||||||
- DL3006
|
- DL3006
|
||||||
- DL3013
|
- DL3013
|
||||||
- DL3018
|
- DL3018
|
||||||
- DL3042
|
|
||||||
- SC2155
|
- SC2155
|
||||||
|
@@ -1,15 +1,34 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/psf/black
|
||||||
rev: v0.9.1
|
rev: 22.6.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: black
|
||||||
args:
|
args:
|
||||||
- --fix
|
- --safe
|
||||||
- id: ruff-format
|
- --quiet
|
||||||
|
- --target-version
|
||||||
|
- py39
|
||||||
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
||||||
|
- repo: https://gitlab.com/pycqa/flake8
|
||||||
|
rev: 3.8.3
|
||||||
|
hooks:
|
||||||
|
- id: flake8
|
||||||
|
additional_dependencies:
|
||||||
|
- flake8-docstrings==1.5.0
|
||||||
|
- pydocstyle==5.0.2
|
||||||
|
files: ^(supervisor|script|tests)/.+\.py$
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v5.0.0
|
rev: v3.1.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-executables-have-shebangs
|
- id: check-executables-have-shebangs
|
||||||
stages: [manual]
|
stages: [manual]
|
||||||
- id: check-json
|
- id: check-json
|
||||||
|
- repo: https://github.com/PyCQA/isort
|
||||||
|
rev: 5.9.3
|
||||||
|
hooks:
|
||||||
|
- id: isort
|
||||||
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
|
rev: v2.32.1
|
||||||
|
hooks:
|
||||||
|
- id: pyupgrade
|
||||||
|
args: [--py39-plus]
|
||||||
|
7
.vscode/launch.json
vendored
7
.vscode/launch.json
vendored
@@ -13,13 +13,6 @@
|
|||||||
"remoteRoot": "/usr/src/supervisor"
|
"remoteRoot": "/usr/src/supervisor"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Debug Tests",
|
|
||||||
"type": "python",
|
|
||||||
"request": "test",
|
|
||||||
"console": "internalConsole",
|
|
||||||
"justMyCode": false
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
18
.vscode/tasks.json
vendored
18
.vscode/tasks.json
vendored
@@ -58,23 +58,9 @@
|
|||||||
"problemMatcher": []
|
"problemMatcher": []
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Ruff Check",
|
"label": "Flake8",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "ruff check --fix supervisor tests",
|
"command": "flake8 supervisor tests",
|
||||||
"group": {
|
|
||||||
"kind": "test",
|
|
||||||
"isDefault": true
|
|
||||||
},
|
|
||||||
"presentation": {
|
|
||||||
"reveal": "always",
|
|
||||||
"panel": "new"
|
|
||||||
},
|
|
||||||
"problemMatcher": []
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"label": "Ruff Format",
|
|
||||||
"type": "shell",
|
|
||||||
"command": "ruff format supervisor tests",
|
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "test",
|
"kind": "test",
|
||||||
"isDefault": true
|
"isDefault": true
|
||||||
|
36
Dockerfile
36
Dockerfile
@@ -3,21 +3,17 @@ FROM ${BUILD_FROM}
|
|||||||
|
|
||||||
ENV \
|
ENV \
|
||||||
S6_SERVICES_GRACETIME=10000 \
|
S6_SERVICES_GRACETIME=10000 \
|
||||||
SUPERVISOR_API=http://localhost \
|
SUPERVISOR_API=http://localhost
|
||||||
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1 \
|
|
||||||
UV_SYSTEM_PYTHON=true
|
|
||||||
|
|
||||||
ARG \
|
ARG \
|
||||||
COSIGN_VERSION \
|
|
||||||
BUILD_ARCH \
|
BUILD_ARCH \
|
||||||
QEMU_CPU
|
CAS_VERSION
|
||||||
|
|
||||||
# Install base
|
# Install base
|
||||||
WORKDIR /usr/src
|
WORKDIR /usr/src
|
||||||
RUN \
|
RUN \
|
||||||
set -x \
|
set -x \
|
||||||
&& apk add --no-cache \
|
&& apk add --no-cache \
|
||||||
findutils \
|
|
||||||
eudev \
|
eudev \
|
||||||
eudev-libs \
|
eudev-libs \
|
||||||
git \
|
git \
|
||||||
@@ -25,27 +21,33 @@ RUN \
|
|||||||
libpulse \
|
libpulse \
|
||||||
musl \
|
musl \
|
||||||
openssl \
|
openssl \
|
||||||
yaml \
|
&& apk add --no-cache --virtual .build-dependencies \
|
||||||
|
build-base \
|
||||||
|
go \
|
||||||
\
|
\
|
||||||
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
|
&& git clone -b "v${CAS_VERSION}" --depth 1 \
|
||||||
&& chmod a+x /usr/bin/cosign \
|
https://github.com/codenotary/cas \
|
||||||
&& pip3 install uv==0.6.1
|
&& cd cas \
|
||||||
|
&& make cas \
|
||||||
|
&& mv cas /usr/bin/cas \
|
||||||
|
\
|
||||||
|
&& apk del .build-dependencies \
|
||||||
|
&& rm -rf /root/go /root/.cache \
|
||||||
|
&& rm -rf /usr/src/cas
|
||||||
|
|
||||||
# Install requirements
|
# Install requirements
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
RUN \
|
RUN \
|
||||||
if [ "${BUILD_ARCH}" = "i386" ]; then \
|
export MAKEFLAGS="-j$(nproc)" \
|
||||||
setarch="linux32"; \
|
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
|
||||||
else \
|
"https://wheels.home-assistant.io/alpine-$(cut -d '.' -f 1-2 < /etc/alpine-release)/${BUILD_ARCH}/" \
|
||||||
setarch=""; \
|
-r ./requirements.txt \
|
||||||
fi \
|
|
||||||
&& ${setarch} uv pip install --compile-bytecode --no-cache --no-build -r requirements.txt \
|
|
||||||
&& rm -f requirements.txt
|
&& rm -f requirements.txt
|
||||||
|
|
||||||
# Install Home Assistant Supervisor
|
# Install Home Assistant Supervisor
|
||||||
COPY . supervisor
|
COPY . supervisor
|
||||||
RUN \
|
RUN \
|
||||||
uv pip install --no-cache -e ./supervisor \
|
pip3 install --no-cache-dir -e ./supervisor \
|
||||||
&& python3 -m compileall ./supervisor/supervisor
|
&& python3 -m compileall ./supervisor/supervisor
|
||||||
|
|
||||||
|
|
||||||
|
@@ -30,5 +30,3 @@ Releases are done in 3 stages (channels) with this structure:
|
|||||||
|
|
||||||
[development]: https://developers.home-assistant.io/docs/supervisor/development
|
[development]: https://developers.home-assistant.io/docs/supervisor/development
|
||||||
[stable]: https://github.com/home-assistant/version/blob/master/stable.json
|
[stable]: https://github.com/home-assistant/version/blob/master/stable.json
|
||||||
|
|
||||||
[](https://www.openhomefoundation.org/)
|
|
||||||
|
18
build.yaml
18
build.yaml
@@ -1,18 +1,16 @@
|
|||||||
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
|
image: homeassistant/{arch}-hassio-supervisor
|
||||||
|
shadow_repository: ghcr.io/home-assistant
|
||||||
build_from:
|
build_from:
|
||||||
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.13-alpine3.21
|
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.9-alpine3.14
|
||||||
armhf: ghcr.io/home-assistant/armhf-base-python:3.13-alpine3.21
|
armhf: ghcr.io/home-assistant/armhf-base-python:3.9-alpine3.14
|
||||||
armv7: ghcr.io/home-assistant/armv7-base-python:3.13-alpine3.21
|
armv7: ghcr.io/home-assistant/armv7-base-python:3.9-alpine3.14
|
||||||
amd64: ghcr.io/home-assistant/amd64-base-python:3.13-alpine3.21
|
amd64: ghcr.io/home-assistant/amd64-base-python:3.9-alpine3.14
|
||||||
i386: ghcr.io/home-assistant/i386-base-python:3.13-alpine3.21
|
i386: ghcr.io/home-assistant/i386-base-python:3.9-alpine3.14
|
||||||
codenotary:
|
codenotary:
|
||||||
signer: notary@home-assistant.io
|
signer: notary@home-assistant.io
|
||||||
base_image: notary@home-assistant.io
|
base_image: notary@home-assistant.io
|
||||||
cosign:
|
|
||||||
base_identity: https://github.com/home-assistant/docker-base/.*
|
|
||||||
identity: https://github.com/home-assistant/supervisor/.*
|
|
||||||
args:
|
args:
|
||||||
COSIGN_VERSION: 2.4.0
|
CAS_VERSION: 1.0.2
|
||||||
labels:
|
labels:
|
||||||
io.hass.type: supervisor
|
io.hass.type: supervisor
|
||||||
org.opencontainers.image.title: Home Assistant Supervisor
|
org.opencontainers.image.title: Home Assistant Supervisor
|
||||||
|
1
home-assistant-polymer
Submodule
1
home-assistant-polymer
Submodule
Submodule home-assistant-polymer added at ca28feca80
45
pylintrc
Normal file
45
pylintrc
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
[MASTER]
|
||||||
|
reports=no
|
||||||
|
jobs=2
|
||||||
|
|
||||||
|
good-names=id,i,j,k,ex,Run,_,fp,T,os
|
||||||
|
|
||||||
|
extension-pkg-whitelist=
|
||||||
|
ciso8601
|
||||||
|
|
||||||
|
# Reasons disabled:
|
||||||
|
# format - handled by black
|
||||||
|
# locally-disabled - it spams too much
|
||||||
|
# duplicate-code - unavoidable
|
||||||
|
# cyclic-import - doesn't test if both import on load
|
||||||
|
# abstract-class-not-used - is flaky, should not show up but does
|
||||||
|
# unused-argument - generic callbacks and setup methods create a lot of warnings
|
||||||
|
# too-many-* - are not enforced for the sake of readability
|
||||||
|
# too-few-* - same as too-many-*
|
||||||
|
# abstract-method - with intro of async there are always methods missing
|
||||||
|
disable=
|
||||||
|
format,
|
||||||
|
abstract-method,
|
||||||
|
cyclic-import,
|
||||||
|
duplicate-code,
|
||||||
|
locally-disabled,
|
||||||
|
no-else-return,
|
||||||
|
not-context-manager,
|
||||||
|
too-few-public-methods,
|
||||||
|
too-many-arguments,
|
||||||
|
too-many-branches,
|
||||||
|
too-many-instance-attributes,
|
||||||
|
too-many-lines,
|
||||||
|
too-many-locals,
|
||||||
|
too-many-public-methods,
|
||||||
|
too-many-return-statements,
|
||||||
|
too-many-statements,
|
||||||
|
unused-argument,
|
||||||
|
consider-using-with
|
||||||
|
|
||||||
|
[EXCEPTIONS]
|
||||||
|
overgeneral-exceptions=Exception
|
||||||
|
|
||||||
|
|
||||||
|
[TYPECHECK]
|
||||||
|
ignored-modules = distutils
|
374
pyproject.toml
374
pyproject.toml
@@ -1,374 +0,0 @@
|
|||||||
[build-system]
|
|
||||||
requires = ["setuptools~=75.8.0", "wheel~=0.45.0"]
|
|
||||||
build-backend = "setuptools.build_meta"
|
|
||||||
|
|
||||||
[project]
|
|
||||||
name = "Supervisor"
|
|
||||||
dynamic = ["version", "dependencies"]
|
|
||||||
license = { text = "Apache-2.0" }
|
|
||||||
description = "Open-source private cloud os for Home-Assistant based on HassOS"
|
|
||||||
readme = "README.md"
|
|
||||||
authors = [
|
|
||||||
{ name = "The Home Assistant Authors", email = "hello@home-assistant.io" },
|
|
||||||
]
|
|
||||||
keywords = ["docker", "home-assistant", "api"]
|
|
||||||
requires-python = ">=3.13.0"
|
|
||||||
|
|
||||||
[project.urls]
|
|
||||||
"Homepage" = "https://www.home-assistant.io/"
|
|
||||||
"Source Code" = "https://github.com/home-assistant/supervisor"
|
|
||||||
"Bug Reports" = "https://github.com/home-assistant/supervisor/issues"
|
|
||||||
"Docs: Dev" = "https://developers.home-assistant.io/"
|
|
||||||
"Discord" = "https://www.home-assistant.io/join-chat/"
|
|
||||||
"Forum" = "https://community.home-assistant.io/"
|
|
||||||
|
|
||||||
[tool.setuptools]
|
|
||||||
platforms = ["any"]
|
|
||||||
zip-safe = false
|
|
||||||
include-package-data = true
|
|
||||||
|
|
||||||
[tool.setuptools.packages.find]
|
|
||||||
include = ["supervisor*"]
|
|
||||||
|
|
||||||
[tool.pylint.MAIN]
|
|
||||||
py-version = "3.13"
|
|
||||||
# Use a conservative default here; 2 should speed up most setups and not hurt
|
|
||||||
# any too bad. Override on command line as appropriate.
|
|
||||||
jobs = 2
|
|
||||||
persistent = false
|
|
||||||
extension-pkg-allow-list = ["ciso8601"]
|
|
||||||
|
|
||||||
[tool.pylint.BASIC]
|
|
||||||
class-const-naming-style = "any"
|
|
||||||
good-names = ["id", "i", "j", "k", "ex", "Run", "_", "fp", "T", "os"]
|
|
||||||
|
|
||||||
[tool.pylint."MESSAGES CONTROL"]
|
|
||||||
# Reasons disabled:
|
|
||||||
# format - handled by ruff
|
|
||||||
# abstract-method - with intro of async there are always methods missing
|
|
||||||
# cyclic-import - doesn't test if both import on load
|
|
||||||
# duplicate-code - unavoidable
|
|
||||||
# locally-disabled - it spams too much
|
|
||||||
# too-many-* - are not enforced for the sake of readability
|
|
||||||
# too-few-* - same as too-many-*
|
|
||||||
# unused-argument - generic callbacks and setup methods create a lot of warnings
|
|
||||||
disable = [
|
|
||||||
"format",
|
|
||||||
"abstract-method",
|
|
||||||
"cyclic-import",
|
|
||||||
"duplicate-code",
|
|
||||||
"locally-disabled",
|
|
||||||
"no-else-return",
|
|
||||||
"not-context-manager",
|
|
||||||
"too-few-public-methods",
|
|
||||||
"too-many-arguments",
|
|
||||||
"too-many-branches",
|
|
||||||
"too-many-instance-attributes",
|
|
||||||
"too-many-lines",
|
|
||||||
"too-many-locals",
|
|
||||||
"too-many-public-methods",
|
|
||||||
"too-many-return-statements",
|
|
||||||
"too-many-statements",
|
|
||||||
"unused-argument",
|
|
||||||
"consider-using-with",
|
|
||||||
|
|
||||||
# Handled by ruff
|
|
||||||
# Ref: <https://github.com/astral-sh/ruff/issues/970>
|
|
||||||
"await-outside-async", # PLE1142
|
|
||||||
"bad-str-strip-call", # PLE1310
|
|
||||||
"bad-string-format-type", # PLE1307
|
|
||||||
"bidirectional-unicode", # PLE2502
|
|
||||||
"continue-in-finally", # PLE0116
|
|
||||||
"duplicate-bases", # PLE0241
|
|
||||||
"format-needs-mapping", # F502
|
|
||||||
"function-redefined", # F811
|
|
||||||
# Needed because ruff does not understand type of __all__ generated by a function
|
|
||||||
# "invalid-all-format", # PLE0605
|
|
||||||
"invalid-all-object", # PLE0604
|
|
||||||
"invalid-character-backspace", # PLE2510
|
|
||||||
"invalid-character-esc", # PLE2513
|
|
||||||
"invalid-character-nul", # PLE2514
|
|
||||||
"invalid-character-sub", # PLE2512
|
|
||||||
"invalid-character-zero-width-space", # PLE2515
|
|
||||||
"logging-too-few-args", # PLE1206
|
|
||||||
"logging-too-many-args", # PLE1205
|
|
||||||
"missing-format-string-key", # F524
|
|
||||||
"mixed-format-string", # F506
|
|
||||||
"no-method-argument", # N805
|
|
||||||
"no-self-argument", # N805
|
|
||||||
"nonexistent-operator", # B002
|
|
||||||
"nonlocal-without-binding", # PLE0117
|
|
||||||
"not-in-loop", # F701, F702
|
|
||||||
"notimplemented-raised", # F901
|
|
||||||
"return-in-init", # PLE0101
|
|
||||||
"return-outside-function", # F706
|
|
||||||
"syntax-error", # E999
|
|
||||||
"too-few-format-args", # F524
|
|
||||||
"too-many-format-args", # F522
|
|
||||||
"too-many-star-expressions", # F622
|
|
||||||
"truncated-format-string", # F501
|
|
||||||
"undefined-all-variable", # F822
|
|
||||||
"undefined-variable", # F821
|
|
||||||
"used-prior-global-declaration", # PLE0118
|
|
||||||
"yield-inside-async-function", # PLE1700
|
|
||||||
"yield-outside-function", # F704
|
|
||||||
"anomalous-backslash-in-string", # W605
|
|
||||||
"assert-on-string-literal", # PLW0129
|
|
||||||
"assert-on-tuple", # F631
|
|
||||||
"bad-format-string", # W1302, F
|
|
||||||
"bad-format-string-key", # W1300, F
|
|
||||||
"bare-except", # E722
|
|
||||||
"binary-op-exception", # PLW0711
|
|
||||||
"cell-var-from-loop", # B023
|
|
||||||
# "dangerous-default-value", # B006, ruff catches new occurrences, needs more work
|
|
||||||
"duplicate-except", # B014
|
|
||||||
"duplicate-key", # F601
|
|
||||||
"duplicate-string-formatting-argument", # F
|
|
||||||
"duplicate-value", # F
|
|
||||||
"eval-used", # PGH001
|
|
||||||
"exec-used", # S102
|
|
||||||
# "expression-not-assigned", # B018, ruff catches new occurrences, needs more work
|
|
||||||
"f-string-without-interpolation", # F541
|
|
||||||
"forgotten-debug-statement", # T100
|
|
||||||
"format-string-without-interpolation", # F
|
|
||||||
# "global-statement", # PLW0603, ruff catches new occurrences, needs more work
|
|
||||||
"global-variable-not-assigned", # PLW0602
|
|
||||||
"implicit-str-concat", # ISC001
|
|
||||||
"import-self", # PLW0406
|
|
||||||
"inconsistent-quotes", # Q000
|
|
||||||
"invalid-envvar-default", # PLW1508
|
|
||||||
"keyword-arg-before-vararg", # B026
|
|
||||||
"logging-format-interpolation", # G
|
|
||||||
"logging-fstring-interpolation", # G
|
|
||||||
"logging-not-lazy", # G
|
|
||||||
"misplaced-future", # F404
|
|
||||||
"named-expr-without-context", # PLW0131
|
|
||||||
"nested-min-max", # PLW3301
|
|
||||||
# "pointless-statement", # B018, ruff catches new occurrences, needs more work
|
|
||||||
"raise-missing-from", # TRY200
|
|
||||||
# "redefined-builtin", # A001, ruff is way more stricter, needs work
|
|
||||||
"try-except-raise", # TRY203
|
|
||||||
"unused-argument", # ARG001, we don't use it
|
|
||||||
"unused-format-string-argument", #F507
|
|
||||||
"unused-format-string-key", # F504
|
|
||||||
"unused-import", # F401
|
|
||||||
"unused-variable", # F841
|
|
||||||
"useless-else-on-loop", # PLW0120
|
|
||||||
"wildcard-import", # F403
|
|
||||||
"bad-classmethod-argument", # N804
|
|
||||||
"consider-iterating-dictionary", # SIM118
|
|
||||||
"empty-docstring", # D419
|
|
||||||
"invalid-name", # N815
|
|
||||||
"line-too-long", # E501, disabled globally
|
|
||||||
"missing-class-docstring", # D101
|
|
||||||
"missing-final-newline", # W292
|
|
||||||
"missing-function-docstring", # D103
|
|
||||||
"missing-module-docstring", # D100
|
|
||||||
"multiple-imports", #E401
|
|
||||||
"singleton-comparison", # E711, E712
|
|
||||||
"subprocess-run-check", # PLW1510
|
|
||||||
"superfluous-parens", # UP034
|
|
||||||
"ungrouped-imports", # I001
|
|
||||||
"unidiomatic-typecheck", # E721
|
|
||||||
"unnecessary-direct-lambda-call", # PLC3002
|
|
||||||
"unnecessary-lambda-assignment", # PLC3001
|
|
||||||
"unneeded-not", # SIM208
|
|
||||||
"useless-import-alias", # PLC0414
|
|
||||||
"wrong-import-order", # I001
|
|
||||||
"wrong-import-position", # E402
|
|
||||||
"comparison-of-constants", # PLR0133
|
|
||||||
"comparison-with-itself", # PLR0124
|
|
||||||
# "consider-alternative-union-syntax", # UP007, typing extension
|
|
||||||
"consider-merging-isinstance", # PLR1701
|
|
||||||
# "consider-using-alias", # UP006, typing extension
|
|
||||||
"consider-using-dict-comprehension", # C402
|
|
||||||
"consider-using-generator", # C417
|
|
||||||
"consider-using-get", # SIM401
|
|
||||||
"consider-using-set-comprehension", # C401
|
|
||||||
"consider-using-sys-exit", # PLR1722
|
|
||||||
"consider-using-ternary", # SIM108
|
|
||||||
"literal-comparison", # F632
|
|
||||||
"property-with-parameters", # PLR0206
|
|
||||||
"super-with-arguments", # UP008
|
|
||||||
"too-many-branches", # PLR0912
|
|
||||||
"too-many-return-statements", # PLR0911
|
|
||||||
"too-many-statements", # PLR0915
|
|
||||||
"trailing-comma-tuple", # COM818
|
|
||||||
"unnecessary-comprehension", # C416
|
|
||||||
"use-a-generator", # C417
|
|
||||||
"use-dict-literal", # C406
|
|
||||||
"use-list-literal", # C405
|
|
||||||
"useless-object-inheritance", # UP004
|
|
||||||
"useless-return", # PLR1711
|
|
||||||
# "no-self-use", # PLR6301 # Optional plugin, not enabled
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.pylint.REPORTS]
|
|
||||||
score = false
|
|
||||||
|
|
||||||
[tool.pylint.TYPECHECK]
|
|
||||||
ignored-modules = ["distutils"]
|
|
||||||
|
|
||||||
[tool.pylint.FORMAT]
|
|
||||||
expected-line-ending-format = "LF"
|
|
||||||
|
|
||||||
[tool.pylint.EXCEPTIONS]
|
|
||||||
overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"]
|
|
||||||
|
|
||||||
[tool.pylint.DESIGN]
|
|
||||||
max-positional-arguments = 10
|
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
|
||||||
testpaths = ["tests"]
|
|
||||||
norecursedirs = [".git"]
|
|
||||||
log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s"
|
|
||||||
log_date_format = "%Y-%m-%d %H:%M:%S"
|
|
||||||
asyncio_default_fixture_loop_scope = "function"
|
|
||||||
asyncio_mode = "auto"
|
|
||||||
filterwarnings = [
|
|
||||||
"error",
|
|
||||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:dirhash",
|
|
||||||
"ignore::pytest.PytestUnraisableExceptionWarning",
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.ruff]
|
|
||||||
lint.select = [
|
|
||||||
"B002", # Python does not support the unary prefix increment
|
|
||||||
"B007", # Loop control variable {name} not used within loop body
|
|
||||||
"B014", # Exception handler with duplicate exception
|
|
||||||
"B023", # Function definition does not bind loop variable {name}
|
|
||||||
"B026", # Star-arg unpacking after a keyword argument is strongly discouraged
|
|
||||||
"B904", # Use raise from to specify exception cause
|
|
||||||
"C", # complexity
|
|
||||||
"COM818", # Trailing comma on bare tuple prohibited
|
|
||||||
"D", # docstrings
|
|
||||||
"DTZ003", # Use datetime.now(tz=) instead of datetime.utcnow()
|
|
||||||
"DTZ004", # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts)
|
|
||||||
"E", # pycodestyle
|
|
||||||
"F", # pyflakes/autoflake
|
|
||||||
"G", # flake8-logging-format
|
|
||||||
"I", # isort
|
|
||||||
"ICN001", # import concentions; {name} should be imported as {asname}
|
|
||||||
"N804", # First argument of a class method should be named cls
|
|
||||||
"N805", # First argument of a method should be named self
|
|
||||||
"N815", # Variable {name} in class scope should not be mixedCase
|
|
||||||
"PGH004", # Use specific rule codes when using noqa
|
|
||||||
"PLC0414", # Useless import alias. Import alias does not rename original package.
|
|
||||||
"PLC", # pylint
|
|
||||||
"PLE", # pylint
|
|
||||||
"PLR", # pylint
|
|
||||||
"PLW", # pylint
|
|
||||||
"Q000", # Double quotes found but single quotes preferred
|
|
||||||
"RUF006", # Store a reference to the return value of asyncio.create_task
|
|
||||||
"S102", # Use of exec detected
|
|
||||||
"S103", # bad-file-permissions
|
|
||||||
"S108", # hardcoded-temp-file
|
|
||||||
"S306", # suspicious-mktemp-usage
|
|
||||||
"S307", # suspicious-eval-usage
|
|
||||||
"S313", # suspicious-xmlc-element-tree-usage
|
|
||||||
"S314", # suspicious-xml-element-tree-usage
|
|
||||||
"S315", # suspicious-xml-expat-reader-usage
|
|
||||||
"S316", # suspicious-xml-expat-builder-usage
|
|
||||||
"S317", # suspicious-xml-sax-usage
|
|
||||||
"S318", # suspicious-xml-mini-dom-usage
|
|
||||||
"S319", # suspicious-xml-pull-dom-usage
|
|
||||||
"S320", # suspicious-xmle-tree-usage
|
|
||||||
"S601", # paramiko-call
|
|
||||||
"S602", # subprocess-popen-with-shell-equals-true
|
|
||||||
"S604", # call-with-shell-equals-true
|
|
||||||
"S608", # hardcoded-sql-expression
|
|
||||||
"S609", # unix-command-wildcard-injection
|
|
||||||
"SIM105", # Use contextlib.suppress({exception}) instead of try-except-pass
|
|
||||||
"SIM117", # Merge with-statements that use the same scope
|
|
||||||
"SIM118", # Use {key} in {dict} instead of {key} in {dict}.keys()
|
|
||||||
"SIM201", # Use {left} != {right} instead of not {left} == {right}
|
|
||||||
"SIM208", # Use {expr} instead of not (not {expr})
|
|
||||||
"SIM212", # Use {a} if {a} else {b} instead of {b} if not {a} else {a}
|
|
||||||
"SIM300", # Yoda conditions. Use 'age == 42' instead of '42 == age'.
|
|
||||||
"SIM401", # Use get from dict with default instead of an if block
|
|
||||||
"T100", # Trace found: {name} used
|
|
||||||
"T20", # flake8-print
|
|
||||||
"TID251", # Banned imports
|
|
||||||
"TRY004", # Prefer TypeError exception for invalid type
|
|
||||||
"TRY203", # Remove exception handler; error is immediately re-raised
|
|
||||||
"UP", # pyupgrade
|
|
||||||
"W", # pycodestyle
|
|
||||||
]
|
|
||||||
|
|
||||||
lint.ignore = [
|
|
||||||
"D202", # No blank lines allowed after function docstring
|
|
||||||
"D203", # 1 blank line required before class docstring
|
|
||||||
"D213", # Multi-line docstring summary should start at the second line
|
|
||||||
"D406", # Section name should end with a newline
|
|
||||||
"D407", # Section name underlining
|
|
||||||
"E501", # line too long
|
|
||||||
"E731", # do not assign a lambda expression, use a def
|
|
||||||
|
|
||||||
# Ignore ignored, as the rule is now back in preview/nursery, which cannot
|
|
||||||
# be ignored anymore without warnings.
|
|
||||||
# https://github.com/astral-sh/ruff/issues/7491
|
|
||||||
# "PLC1901", # Lots of false positives
|
|
||||||
|
|
||||||
# False positives https://github.com/astral-sh/ruff/issues/5386
|
|
||||||
"PLC0208", # Use a sequence type instead of a `set` when iterating over values
|
|
||||||
"PLR0911", # Too many return statements ({returns} > {max_returns})
|
|
||||||
"PLR0912", # Too many branches ({branches} > {max_branches})
|
|
||||||
"PLR0913", # Too many arguments to function call ({c_args} > {max_args})
|
|
||||||
"PLR0915", # Too many statements ({statements} > {max_statements})
|
|
||||||
"PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable
|
|
||||||
"PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target
|
|
||||||
"UP006", # keep type annotation style as is
|
|
||||||
"UP007", # keep type annotation style as is
|
|
||||||
# Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923
|
|
||||||
"UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)`
|
|
||||||
|
|
||||||
# May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
|
|
||||||
"W191",
|
|
||||||
"E111",
|
|
||||||
"E114",
|
|
||||||
"E117",
|
|
||||||
"D206",
|
|
||||||
"D300",
|
|
||||||
"Q000",
|
|
||||||
"Q001",
|
|
||||||
"Q002",
|
|
||||||
"Q003",
|
|
||||||
"COM812",
|
|
||||||
"COM819",
|
|
||||||
"ISC001",
|
|
||||||
"ISC002",
|
|
||||||
|
|
||||||
# Disabled because ruff does not understand type of __all__ generated by a function
|
|
||||||
"PLE0605",
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.ruff.lint.flake8-import-conventions.extend-aliases]
|
|
||||||
voluptuous = "vol"
|
|
||||||
|
|
||||||
[tool.ruff.lint.flake8-pytest-style]
|
|
||||||
fixture-parentheses = false
|
|
||||||
|
|
||||||
[tool.ruff.lint.flake8-tidy-imports.banned-api]
|
|
||||||
"pytz".msg = "use zoneinfo instead"
|
|
||||||
|
|
||||||
[tool.ruff.lint.isort]
|
|
||||||
force-sort-within-sections = true
|
|
||||||
section-order = [
|
|
||||||
"future",
|
|
||||||
"standard-library",
|
|
||||||
"third-party",
|
|
||||||
"first-party",
|
|
||||||
"local-folder",
|
|
||||||
]
|
|
||||||
forced-separate = ["tests"]
|
|
||||||
known-first-party = ["supervisor", "tests"]
|
|
||||||
combine-as-imports = true
|
|
||||||
split-on-trailing-comma = false
|
|
||||||
|
|
||||||
[tool.ruff.lint.per-file-ignores]
|
|
||||||
|
|
||||||
# DBus Service Mocks must use typing and names understood by dbus-fast
|
|
||||||
"tests/dbus_service_mocks/*.py" = ["F722", "F821", "N815"]
|
|
||||||
|
|
||||||
[tool.ruff.lint.mccabe]
|
|
||||||
max-complexity = 25
|
|
2
pytest.ini
Normal file
2
pytest.ini
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
[pytest]
|
||||||
|
asyncio_mode = auto
|
@@ -1,29 +1,25 @@
|
|||||||
aiodns==3.2.0
|
aiodns==3.0.0
|
||||||
aiohttp==3.11.13
|
aiohttp==3.8.1
|
||||||
|
async_timeout==4.0.2
|
||||||
atomicwrites-homeassistant==1.4.1
|
atomicwrites-homeassistant==1.4.1
|
||||||
attrs==25.1.0
|
attrs==22.1.0
|
||||||
awesomeversion==24.6.0
|
awesomeversion==22.6.0
|
||||||
brotli==1.1.0
|
brotli==1.0.9
|
||||||
ciso8601==2.3.2
|
cchardet==2.1.7
|
||||||
colorlog==6.9.0
|
ciso8601==2.2.0
|
||||||
cpe==1.3.1
|
colorlog==6.6.0
|
||||||
cryptography==44.0.1
|
cpe==1.2.1
|
||||||
debugpy==1.8.12
|
cryptography==37.0.4
|
||||||
deepmerge==2.0
|
debugpy==1.6.2
|
||||||
dirhash==0.5.0
|
deepmerge==1.0.1
|
||||||
docker==7.1.0
|
dirhash==0.2.1
|
||||||
faust-cchardet==2.1.19
|
docker==5.0.3
|
||||||
gitpython==3.1.44
|
gitpython==3.1.27
|
||||||
jinja2==3.1.5
|
jinja2==3.1.2
|
||||||
orjson==3.10.12
|
pulsectl==22.3.2
|
||||||
pulsectl==24.12.0
|
pyudev==0.23.2
|
||||||
pyudev==0.24.3
|
ruamel.yaml==0.17.17
|
||||||
PyYAML==6.0.2
|
securetar==2022.2.0
|
||||||
requests==2.32.3
|
sentry-sdk==1.9.2
|
||||||
securetar==2025.2.1
|
voluptuous==0.13.1
|
||||||
sentry-sdk==2.22.0
|
dbus-next==0.2.3
|
||||||
setuptools==75.8.2
|
|
||||||
voluptuous==0.15.2
|
|
||||||
dbus-fast==2.34.0
|
|
||||||
typing_extensions==4.12.2
|
|
||||||
zlib-fast==0.2.1
|
|
||||||
|
@@ -1,13 +1,15 @@
|
|||||||
astroid==3.3.8
|
black==22.6.0
|
||||||
coverage==7.6.12
|
codecov==2.1.12
|
||||||
pre-commit==4.1.0
|
coverage==6.4.3
|
||||||
pylint==3.3.4
|
flake8-docstrings==1.6.0
|
||||||
pytest-aiohttp==1.1.0
|
flake8==5.0.4
|
||||||
pytest-asyncio==0.25.2
|
pre-commit==2.20.0
|
||||||
pytest-cov==6.0.0
|
pydocstyle==6.1.1
|
||||||
pytest-timeout==2.3.1
|
pylint==2.14.5
|
||||||
pytest==8.3.4
|
pytest-aiohttp==1.0.4
|
||||||
ruff==0.9.8
|
pytest-asyncio==0.18.3
|
||||||
time-machine==2.16.0
|
pytest-cov==3.0.0
|
||||||
typing_extensions==4.12.2
|
pytest-timeout==2.1.0
|
||||||
urllib3==2.3.0
|
pytest==7.1.2
|
||||||
|
pyupgrade==2.37.3
|
||||||
|
time-machine==2.7.1
|
||||||
|
@@ -15,7 +15,7 @@ do
|
|||||||
if [[ "${supervisor_state}" = "running" ]]; then
|
if [[ "${supervisor_state}" = "running" ]]; then
|
||||||
|
|
||||||
# Check API
|
# Check API
|
||||||
if bashio::supervisor.ping > /dev/null; then
|
if bashio::supervisor.ping; then
|
||||||
failed_count=0
|
failed_count=0
|
||||||
else
|
else
|
||||||
bashio::log.warning "Maybe found an issue on API healthy"
|
bashio::log.warning "Maybe found an issue on API healthy"
|
||||||
|
4
rootfs/root/.cas-trusted-signing-pub-key
Normal file
4
rootfs/root/.cas-trusted-signing-pub-key
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
-----BEGIN PUBLIC KEY-----
|
||||||
|
MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE03LvYuz79GTJx4uKp3w6NrSe5JZI
|
||||||
|
iBtgzzYi0YQYtZO/r+xFpgDJEa0gLHkXtl94fpqrFiN89In83lzaszbZtA==
|
||||||
|
-----END PUBLIC KEY-----
|
8
rootfs/root/.cas/config.json
Normal file
8
rootfs/root/.cas/config.json
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"currentcontext": {
|
||||||
|
"LcHost": "cas.codenotary.com",
|
||||||
|
"LcPort": "443"
|
||||||
|
},
|
||||||
|
"schemaversion": 3,
|
||||||
|
"users": null
|
||||||
|
}
|
30
scripts/update-frontend.sh
Executable file
30
scripts/update-frontend.sh
Executable file
@@ -0,0 +1,30 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
source "/etc/supervisor_scripts/common"
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Update frontend
|
||||||
|
git submodule update --init --recursive --remote
|
||||||
|
|
||||||
|
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
|
||||||
|
cd home-assistant-polymer
|
||||||
|
nvm install
|
||||||
|
script/bootstrap
|
||||||
|
|
||||||
|
# Download translations
|
||||||
|
start_docker
|
||||||
|
./script/translations_download
|
||||||
|
|
||||||
|
# build frontend
|
||||||
|
cd hassio
|
||||||
|
./script/build_hassio
|
||||||
|
|
||||||
|
# Copy frontend
|
||||||
|
rm -rf ../../supervisor/api/panel/*
|
||||||
|
cp -rf build/* ../../supervisor/api/panel/
|
||||||
|
|
||||||
|
# Reset frontend git
|
||||||
|
cd ..
|
||||||
|
git reset --hard HEAD
|
||||||
|
|
||||||
|
stop_docker
|
29
setup.cfg
Normal file
29
setup.cfg
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
[isort]
|
||||||
|
multi_line_output = 3
|
||||||
|
include_trailing_comma=True
|
||||||
|
force_grid_wrap=0
|
||||||
|
line_length=88
|
||||||
|
indent = " "
|
||||||
|
force_sort_within_sections = true
|
||||||
|
sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
|
||||||
|
default_section = THIRDPARTY
|
||||||
|
forced_separate = tests
|
||||||
|
combine_as_imports = true
|
||||||
|
use_parentheses = true
|
||||||
|
known_first_party = supervisor,tests
|
||||||
|
|
||||||
|
[flake8]
|
||||||
|
exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build
|
||||||
|
doctests = True
|
||||||
|
max-line-length = 88
|
||||||
|
# E501: line too long
|
||||||
|
# W503: Line break occurred before a binary operator
|
||||||
|
# E203: Whitespace before ':'
|
||||||
|
# D202 No blank lines allowed after function docstring
|
||||||
|
# W504 line break after binary operator
|
||||||
|
ignore =
|
||||||
|
E501,
|
||||||
|
W503,
|
||||||
|
E203,
|
||||||
|
D202,
|
||||||
|
W504
|
76
setup.py
76
setup.py
@@ -1,28 +1,60 @@
|
|||||||
"""Home Assistant Supervisor setup."""
|
"""Home Assistant Supervisor setup."""
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
import re
|
|
||||||
|
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
|
|
||||||
RE_SUPERVISOR_VERSION = re.compile(r"^SUPERVISOR_VERSION =\s*(.+)$")
|
from supervisor.const import SUPERVISOR_VERSION
|
||||||
|
|
||||||
SUPERVISOR_DIR = Path(__file__).parent
|
|
||||||
REQUIREMENTS_FILE = SUPERVISOR_DIR / "requirements.txt"
|
|
||||||
CONST_FILE = SUPERVISOR_DIR / "supervisor/const.py"
|
|
||||||
|
|
||||||
REQUIREMENTS = REQUIREMENTS_FILE.read_text(encoding="utf-8")
|
|
||||||
CONSTANTS = CONST_FILE.read_text(encoding="utf-8")
|
|
||||||
|
|
||||||
|
|
||||||
def _get_supervisor_version():
|
|
||||||
for line in CONSTANTS.split("/n"):
|
|
||||||
if match := RE_SUPERVISOR_VERSION.match(line):
|
|
||||||
return match.group(1)
|
|
||||||
return "9999.09.9.dev9999"
|
|
||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
version=_get_supervisor_version(),
|
name="Supervisor",
|
||||||
dependencies=REQUIREMENTS.split("/n"),
|
version=SUPERVISOR_VERSION,
|
||||||
|
license="BSD License",
|
||||||
|
author="The Home Assistant Authors",
|
||||||
|
author_email="hello@home-assistant.io",
|
||||||
|
url="https://home-assistant.io/",
|
||||||
|
description=("Open-source private cloud os for Home-Assistant" " based on HassOS"),
|
||||||
|
long_description=(
|
||||||
|
"A maintainless private cloud operator system that"
|
||||||
|
"setup a Home-Assistant instance. Based on HassOS"
|
||||||
|
),
|
||||||
|
classifiers=[
|
||||||
|
"Intended Audience :: End Users/Desktop",
|
||||||
|
"Intended Audience :: Developers",
|
||||||
|
"License :: OSI Approved :: Apache Software License",
|
||||||
|
"Operating System :: OS Independent",
|
||||||
|
"Topic :: Home Automation",
|
||||||
|
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||||
|
"Topic :: Scientific/Engineering :: Atmospheric Science",
|
||||||
|
"Development Status :: 5 - Production/Stable",
|
||||||
|
"Intended Audience :: Developers",
|
||||||
|
"Programming Language :: Python :: 3.8",
|
||||||
|
],
|
||||||
|
keywords=["docker", "home-assistant", "api"],
|
||||||
|
zip_safe=False,
|
||||||
|
platforms="any",
|
||||||
|
packages=[
|
||||||
|
"supervisor.addons",
|
||||||
|
"supervisor.api",
|
||||||
|
"supervisor.backups",
|
||||||
|
"supervisor.dbus.network",
|
||||||
|
"supervisor.dbus.network.setting",
|
||||||
|
"supervisor.dbus",
|
||||||
|
"supervisor.discovery.services",
|
||||||
|
"supervisor.discovery",
|
||||||
|
"supervisor.docker",
|
||||||
|
"supervisor.homeassistant",
|
||||||
|
"supervisor.host",
|
||||||
|
"supervisor.jobs",
|
||||||
|
"supervisor.misc",
|
||||||
|
"supervisor.plugins",
|
||||||
|
"supervisor.resolution.checks",
|
||||||
|
"supervisor.resolution.evaluations",
|
||||||
|
"supervisor.resolution.fixups",
|
||||||
|
"supervisor.resolution",
|
||||||
|
"supervisor.security",
|
||||||
|
"supervisor.services.modules",
|
||||||
|
"supervisor.services",
|
||||||
|
"supervisor.store",
|
||||||
|
"supervisor.utils",
|
||||||
|
"supervisor",
|
||||||
|
],
|
||||||
|
include_package_data=True,
|
||||||
)
|
)
|
||||||
|
@@ -1,20 +1,11 @@
|
|||||||
"""Main file for Supervisor."""
|
"""Main file for Supervisor."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
import zlib_fast
|
from supervisor import bootstrap
|
||||||
|
|
||||||
# Enable fast zlib before importing supervisor
|
|
||||||
zlib_fast.enable()
|
|
||||||
|
|
||||||
from supervisor import bootstrap # pylint: disable=wrong-import-position # noqa: E402
|
|
||||||
from supervisor.utils.logging import ( # pylint: disable=wrong-import-position # noqa: E402
|
|
||||||
activate_log_queue_handler,
|
|
||||||
)
|
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -37,8 +28,7 @@ if __name__ == "__main__":
|
|||||||
bootstrap.initialize_logging()
|
bootstrap.initialize_logging()
|
||||||
|
|
||||||
# Init async event loop
|
# Init async event loop
|
||||||
loop = asyncio.new_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
asyncio.set_event_loop(loop)
|
|
||||||
|
|
||||||
# Check if all information are available to setup Supervisor
|
# Check if all information are available to setup Supervisor
|
||||||
bootstrap.check_environment()
|
bootstrap.check_environment()
|
||||||
@@ -47,14 +37,13 @@ if __name__ == "__main__":
|
|||||||
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
||||||
loop.set_default_executor(executor)
|
loop.set_default_executor(executor)
|
||||||
|
|
||||||
activate_log_queue_handler()
|
|
||||||
|
|
||||||
_LOGGER.info("Initializing Supervisor setup")
|
_LOGGER.info("Initializing Supervisor setup")
|
||||||
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
||||||
loop.set_debug(coresys.config.debug)
|
loop.set_debug(coresys.config.debug)
|
||||||
loop.run_until_complete(coresys.core.connect())
|
loop.run_until_complete(coresys.core.connect())
|
||||||
|
|
||||||
loop.run_until_complete(bootstrap.supervisor_debugger(coresys))
|
bootstrap.supervisor_debugger(coresys)
|
||||||
|
bootstrap.migrate_system_env(coresys)
|
||||||
|
|
||||||
# Signal health startup for container
|
# Signal health startup for container
|
||||||
run_os_startup_check_cleanup()
|
run_os_startup_check_cleanup()
|
||||||
|
@@ -1 +1,439 @@
|
|||||||
"""Init file for Supervisor add-ons."""
|
"""Init file for Supervisor add-ons."""
|
||||||
|
import asyncio
|
||||||
|
from contextlib import suppress
|
||||||
|
import logging
|
||||||
|
import tarfile
|
||||||
|
from typing import Optional, Union
|
||||||
|
|
||||||
|
from ..const import AddonBoot, AddonStartup, AddonState
|
||||||
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
|
from ..exceptions import (
|
||||||
|
AddonConfigurationError,
|
||||||
|
AddonsError,
|
||||||
|
AddonsJobError,
|
||||||
|
AddonsNotSupportedError,
|
||||||
|
CoreDNSError,
|
||||||
|
DockerAPIError,
|
||||||
|
DockerError,
|
||||||
|
DockerNotFound,
|
||||||
|
HomeAssistantAPIError,
|
||||||
|
HostAppArmorError,
|
||||||
|
)
|
||||||
|
from ..jobs.decorator import Job, JobCondition
|
||||||
|
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||||
|
from ..store.addon import AddonStore
|
||||||
|
from ..utils import check_exception_chain
|
||||||
|
from .addon import Addon
|
||||||
|
from .data import AddonsData
|
||||||
|
|
||||||
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
AnyAddon = Union[Addon, AddonStore]
|
||||||
|
|
||||||
|
|
||||||
|
class AddonManager(CoreSysAttributes):
|
||||||
|
"""Manage add-ons inside Supervisor."""
|
||||||
|
|
||||||
|
def __init__(self, coresys: CoreSys):
|
||||||
|
"""Initialize Docker base wrapper."""
|
||||||
|
self.coresys: CoreSys = coresys
|
||||||
|
self.data: AddonsData = AddonsData(coresys)
|
||||||
|
self.local: dict[str, Addon] = {}
|
||||||
|
self.store: dict[str, AddonStore] = {}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def all(self) -> list[AnyAddon]:
|
||||||
|
"""Return a list of all add-ons."""
|
||||||
|
addons: dict[str, AnyAddon] = {**self.store, **self.local}
|
||||||
|
return list(addons.values())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def installed(self) -> list[Addon]:
|
||||||
|
"""Return a list of all installed add-ons."""
|
||||||
|
return list(self.local.values())
|
||||||
|
|
||||||
|
def get(self, addon_slug: str, local_only: bool = False) -> Optional[AnyAddon]:
|
||||||
|
"""Return an add-on from slug.
|
||||||
|
|
||||||
|
Prio:
|
||||||
|
1 - Local
|
||||||
|
2 - Store
|
||||||
|
"""
|
||||||
|
if addon_slug in self.local:
|
||||||
|
return self.local[addon_slug]
|
||||||
|
if not local_only:
|
||||||
|
return self.store.get(addon_slug)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def from_token(self, token: str) -> Optional[Addon]:
|
||||||
|
"""Return an add-on from Supervisor token."""
|
||||||
|
for addon in self.installed:
|
||||||
|
if token == addon.supervisor_token:
|
||||||
|
return addon
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def load(self) -> None:
|
||||||
|
"""Start up add-on management."""
|
||||||
|
tasks = []
|
||||||
|
for slug in self.data.system:
|
||||||
|
addon = self.local[slug] = Addon(self.coresys, slug)
|
||||||
|
tasks.append(addon.load())
|
||||||
|
|
||||||
|
# Run initial tasks
|
||||||
|
_LOGGER.info("Found %d installed add-ons", len(tasks))
|
||||||
|
if tasks:
|
||||||
|
await asyncio.wait(tasks)
|
||||||
|
|
||||||
|
# Sync DNS
|
||||||
|
await self.sync_dns()
|
||||||
|
|
||||||
|
async def boot(self, stage: AddonStartup) -> None:
|
||||||
|
"""Boot add-ons with mode auto."""
|
||||||
|
tasks: list[Addon] = []
|
||||||
|
for addon in self.installed:
|
||||||
|
if addon.boot != AddonBoot.AUTO or addon.startup != stage:
|
||||||
|
continue
|
||||||
|
tasks.append(addon)
|
||||||
|
|
||||||
|
# Evaluate add-ons which need to be started
|
||||||
|
_LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks))
|
||||||
|
if not tasks:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Start Add-ons sequential
|
||||||
|
# avoid issue on slow IO
|
||||||
|
for addon in tasks:
|
||||||
|
try:
|
||||||
|
await addon.start()
|
||||||
|
except AddonsError as err:
|
||||||
|
# Check if there is an system/user issue
|
||||||
|
if check_exception_chain(
|
||||||
|
err, (DockerAPIError, DockerNotFound, AddonConfigurationError)
|
||||||
|
):
|
||||||
|
addon.boot = AddonBoot.MANUAL
|
||||||
|
addon.save_persist()
|
||||||
|
except Exception as err: # pylint: disable=broad-except
|
||||||
|
self.sys_capture_exception(err)
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
|
_LOGGER.warning("Can't start Add-on %s", addon.slug)
|
||||||
|
|
||||||
|
await asyncio.sleep(self.sys_config.wait_boot)
|
||||||
|
|
||||||
|
async def shutdown(self, stage: AddonStartup) -> None:
|
||||||
|
"""Shutdown addons."""
|
||||||
|
tasks: list[Addon] = []
|
||||||
|
for addon in self.installed:
|
||||||
|
if addon.state != AddonState.STARTED or addon.startup != stage:
|
||||||
|
continue
|
||||||
|
tasks.append(addon)
|
||||||
|
|
||||||
|
# Evaluate add-ons which need to be stopped
|
||||||
|
_LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks))
|
||||||
|
if not tasks:
|
||||||
|
return
|
||||||
|
|
||||||
|
# Stop Add-ons sequential
|
||||||
|
# avoid issue on slow IO
|
||||||
|
for addon in tasks:
|
||||||
|
try:
|
||||||
|
await addon.stop()
|
||||||
|
except Exception as err: # pylint: disable=broad-except
|
||||||
|
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
|
||||||
|
self.sys_capture_exception(err)
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
conditions=[
|
||||||
|
JobCondition.FREE_SPACE,
|
||||||
|
JobCondition.INTERNET_HOST,
|
||||||
|
JobCondition.HEALTHY,
|
||||||
|
],
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
|
async def install(self, slug: str) -> None:
|
||||||
|
"""Install an add-on."""
|
||||||
|
if slug in self.local:
|
||||||
|
raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning)
|
||||||
|
store = self.store.get(slug)
|
||||||
|
|
||||||
|
if not store:
|
||||||
|
raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error)
|
||||||
|
|
||||||
|
if not store.available:
|
||||||
|
raise AddonsNotSupportedError(
|
||||||
|
f"Add-on {slug} not supported on this platform", _LOGGER.error
|
||||||
|
)
|
||||||
|
|
||||||
|
self.data.install(store)
|
||||||
|
addon = Addon(self.coresys, slug)
|
||||||
|
await addon.load()
|
||||||
|
|
||||||
|
if not addon.path_data.is_dir():
|
||||||
|
_LOGGER.info(
|
||||||
|
"Creating Home Assistant add-on data folder %s", addon.path_data
|
||||||
|
)
|
||||||
|
addon.path_data.mkdir()
|
||||||
|
|
||||||
|
# Setup/Fix AppArmor profile
|
||||||
|
await addon.install_apparmor()
|
||||||
|
|
||||||
|
try:
|
||||||
|
await addon.instance.install(store.version, store.image, arch=addon.arch)
|
||||||
|
except DockerError as err:
|
||||||
|
self.data.uninstall(addon)
|
||||||
|
raise AddonsError() from err
|
||||||
|
else:
|
||||||
|
self.local[slug] = addon
|
||||||
|
|
||||||
|
# Reload ingress tokens
|
||||||
|
if addon.with_ingress:
|
||||||
|
await self.sys_ingress.reload()
|
||||||
|
|
||||||
|
_LOGGER.info("Add-on '%s' successfully installed", slug)
|
||||||
|
|
||||||
|
async def uninstall(self, slug: str) -> None:
|
||||||
|
"""Remove an add-on."""
|
||||||
|
if slug not in self.local:
|
||||||
|
_LOGGER.warning("Add-on %s is not installed", slug)
|
||||||
|
return
|
||||||
|
addon = self.local[slug]
|
||||||
|
|
||||||
|
try:
|
||||||
|
await addon.instance.remove()
|
||||||
|
except DockerError as err:
|
||||||
|
raise AddonsError() from err
|
||||||
|
else:
|
||||||
|
addon.state = AddonState.UNKNOWN
|
||||||
|
|
||||||
|
await addon.remove_data()
|
||||||
|
|
||||||
|
# Cleanup audio settings
|
||||||
|
if addon.path_pulse.exists():
|
||||||
|
with suppress(OSError):
|
||||||
|
addon.path_pulse.unlink()
|
||||||
|
|
||||||
|
# Cleanup AppArmor profile
|
||||||
|
with suppress(HostAppArmorError):
|
||||||
|
await addon.uninstall_apparmor()
|
||||||
|
|
||||||
|
# Cleanup Ingress panel from sidebar
|
||||||
|
if addon.ingress_panel:
|
||||||
|
addon.ingress_panel = False
|
||||||
|
with suppress(HomeAssistantAPIError):
|
||||||
|
await self.sys_ingress.update_hass_panel(addon)
|
||||||
|
|
||||||
|
# Cleanup Ingress dynamic port assignment
|
||||||
|
if addon.with_ingress:
|
||||||
|
self.sys_create_task(self.sys_ingress.reload())
|
||||||
|
self.sys_ingress.del_dynamic_port(slug)
|
||||||
|
|
||||||
|
# Cleanup discovery data
|
||||||
|
for message in self.sys_discovery.list_messages:
|
||||||
|
if message.addon != addon.slug:
|
||||||
|
continue
|
||||||
|
self.sys_discovery.remove(message)
|
||||||
|
|
||||||
|
# Cleanup services data
|
||||||
|
for service in self.sys_services.list_services:
|
||||||
|
if addon.slug not in service.active:
|
||||||
|
continue
|
||||||
|
service.del_service_data(addon)
|
||||||
|
|
||||||
|
self.data.uninstall(addon)
|
||||||
|
self.local.pop(slug)
|
||||||
|
|
||||||
|
_LOGGER.info("Add-on '%s' successfully removed", slug)
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
conditions=[
|
||||||
|
JobCondition.FREE_SPACE,
|
||||||
|
JobCondition.INTERNET_HOST,
|
||||||
|
JobCondition.HEALTHY,
|
||||||
|
],
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
|
async def update(self, slug: str, backup: Optional[bool] = False) -> None:
|
||||||
|
"""Update add-on."""
|
||||||
|
if slug not in self.local:
|
||||||
|
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
||||||
|
addon = self.local[slug]
|
||||||
|
|
||||||
|
if addon.is_detached:
|
||||||
|
raise AddonsError(
|
||||||
|
f"Add-on {slug} is not available inside store", _LOGGER.error
|
||||||
|
)
|
||||||
|
store = self.store[slug]
|
||||||
|
|
||||||
|
if addon.version == store.version:
|
||||||
|
raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning)
|
||||||
|
|
||||||
|
# Check if available, Maybe something have changed
|
||||||
|
if not store.available:
|
||||||
|
raise AddonsNotSupportedError(
|
||||||
|
f"Add-on {slug} not supported on that platform", _LOGGER.error
|
||||||
|
)
|
||||||
|
|
||||||
|
if backup:
|
||||||
|
await self.sys_backups.do_backup_partial(
|
||||||
|
name=f"addon_{addon.slug}_{addon.version}",
|
||||||
|
homeassistant=False,
|
||||||
|
addons=[addon.slug],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update instance
|
||||||
|
last_state: AddonState = addon.state
|
||||||
|
old_image = addon.image
|
||||||
|
try:
|
||||||
|
await addon.instance.update(store.version, store.image)
|
||||||
|
except DockerError as err:
|
||||||
|
raise AddonsError() from err
|
||||||
|
|
||||||
|
_LOGGER.info("Add-on '%s' successfully updated", slug)
|
||||||
|
self.data.update(store)
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
with suppress(DockerError):
|
||||||
|
await addon.instance.cleanup(old_image=old_image)
|
||||||
|
|
||||||
|
# Setup/Fix AppArmor profile
|
||||||
|
await addon.install_apparmor()
|
||||||
|
|
||||||
|
# restore state
|
||||||
|
if last_state == AddonState.STARTED:
|
||||||
|
await addon.start()
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
conditions=[
|
||||||
|
JobCondition.FREE_SPACE,
|
||||||
|
JobCondition.INTERNET_HOST,
|
||||||
|
JobCondition.HEALTHY,
|
||||||
|
],
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
|
async def rebuild(self, slug: str) -> None:
|
||||||
|
"""Perform a rebuild of local build add-on."""
|
||||||
|
if slug not in self.local:
|
||||||
|
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
||||||
|
addon = self.local[slug]
|
||||||
|
|
||||||
|
if addon.is_detached:
|
||||||
|
raise AddonsError(
|
||||||
|
f"Add-on {slug} is not available inside store", _LOGGER.error
|
||||||
|
)
|
||||||
|
store = self.store[slug]
|
||||||
|
|
||||||
|
# Check if a rebuild is possible now
|
||||||
|
if addon.version != store.version:
|
||||||
|
raise AddonsError(
|
||||||
|
"Version changed, use Update instead Rebuild", _LOGGER.error
|
||||||
|
)
|
||||||
|
if not addon.need_build:
|
||||||
|
raise AddonsNotSupportedError(
|
||||||
|
"Can't rebuild a image based add-on", _LOGGER.error
|
||||||
|
)
|
||||||
|
|
||||||
|
# remove docker container but not addon config
|
||||||
|
last_state: AddonState = addon.state
|
||||||
|
try:
|
||||||
|
await addon.instance.remove()
|
||||||
|
await addon.instance.install(addon.version)
|
||||||
|
except DockerError as err:
|
||||||
|
raise AddonsError() from err
|
||||||
|
else:
|
||||||
|
self.data.update(store)
|
||||||
|
_LOGGER.info("Add-on '%s' successfully rebuilt", slug)
|
||||||
|
|
||||||
|
# restore state
|
||||||
|
if last_state == AddonState.STARTED:
|
||||||
|
await addon.start()
|
||||||
|
|
||||||
|
@Job(
|
||||||
|
conditions=[
|
||||||
|
JobCondition.FREE_SPACE,
|
||||||
|
JobCondition.INTERNET_HOST,
|
||||||
|
JobCondition.HEALTHY,
|
||||||
|
],
|
||||||
|
on_condition=AddonsJobError,
|
||||||
|
)
|
||||||
|
async def restore(self, slug: str, tar_file: tarfile.TarFile) -> None:
|
||||||
|
"""Restore state of an add-on."""
|
||||||
|
if slug not in self.local:
|
||||||
|
_LOGGER.debug("Add-on %s is not local available for restore", slug)
|
||||||
|
addon = Addon(self.coresys, slug)
|
||||||
|
else:
|
||||||
|
_LOGGER.debug("Add-on %s is local available for restore", slug)
|
||||||
|
addon = self.local[slug]
|
||||||
|
|
||||||
|
await addon.restore(tar_file)
|
||||||
|
|
||||||
|
# Check if new
|
||||||
|
if slug not in self.local:
|
||||||
|
_LOGGER.info("Detect new Add-on after restore %s", slug)
|
||||||
|
self.local[slug] = addon
|
||||||
|
|
||||||
|
# Update ingress
|
||||||
|
if addon.with_ingress:
|
||||||
|
await self.sys_ingress.reload()
|
||||||
|
with suppress(HomeAssistantAPIError):
|
||||||
|
await self.sys_ingress.update_hass_panel(addon)
|
||||||
|
|
||||||
|
@Job(conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST])
|
||||||
|
async def repair(self) -> None:
|
||||||
|
"""Repair local add-ons."""
|
||||||
|
needs_repair: list[Addon] = []
|
||||||
|
|
||||||
|
# Evaluate Add-ons to repair
|
||||||
|
for addon in self.installed:
|
||||||
|
if await addon.instance.exists():
|
||||||
|
continue
|
||||||
|
needs_repair.append(addon)
|
||||||
|
|
||||||
|
_LOGGER.info("Found %d add-ons to repair", len(needs_repair))
|
||||||
|
if not needs_repair:
|
||||||
|
return
|
||||||
|
|
||||||
|
for addon in needs_repair:
|
||||||
|
_LOGGER.info("Repairing for add-on: %s", addon.slug)
|
||||||
|
with suppress(DockerError, KeyError):
|
||||||
|
# Need pull a image again
|
||||||
|
if not addon.need_build:
|
||||||
|
await addon.instance.install(addon.version, addon.image)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Need local lookup
|
||||||
|
if addon.need_build and not addon.is_detached:
|
||||||
|
store = self.store[addon.slug]
|
||||||
|
# If this add-on is available for rebuild
|
||||||
|
if addon.version == store.version:
|
||||||
|
await addon.instance.install(addon.version, addon.image)
|
||||||
|
continue
|
||||||
|
|
||||||
|
_LOGGER.error("Can't repair %s", addon.slug)
|
||||||
|
with suppress(AddonsError):
|
||||||
|
await self.uninstall(addon.slug)
|
||||||
|
|
||||||
|
async def sync_dns(self) -> None:
|
||||||
|
"""Sync add-ons DNS names."""
|
||||||
|
# Update hosts
|
||||||
|
for addon in self.installed:
|
||||||
|
try:
|
||||||
|
if not await addon.instance.is_running():
|
||||||
|
continue
|
||||||
|
except DockerError as err:
|
||||||
|
_LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err)
|
||||||
|
self.sys_resolution.create_issue(
|
||||||
|
IssueType.CORRUPT_DOCKER,
|
||||||
|
ContextType.ADDON,
|
||||||
|
reference=addon.slug,
|
||||||
|
suggestions=[SuggestionType.EXECUTE_REPAIR],
|
||||||
|
)
|
||||||
|
self.sys_capture_exception(err)
|
||||||
|
else:
|
||||||
|
self.sys_plugins.dns.add_host(
|
||||||
|
ipv4=addon.ip_address, names=[addon.hostname], write=False
|
||||||
|
)
|
||||||
|
|
||||||
|
# Write hosts files
|
||||||
|
with suppress(CoreDNSError):
|
||||||
|
self.sys_plugins.dns.write_hosts()
|
||||||
|
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,6 @@
|
|||||||
"""Supervisor add-on build environment."""
|
"""Supervisor add-on build environment."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from functools import cached_property
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
@@ -17,8 +15,7 @@ from ..const import (
|
|||||||
META_ADDON,
|
META_ADDON,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
from ..docker.interface import MAP_ARCH
|
from ..exceptions import ConfigurationFileError
|
||||||
from ..exceptions import ConfigurationFileError, HassioArchNotFound
|
|
||||||
from ..utils.common import FileConfiguration, find_one_filetype
|
from ..utils.common import FileConfiguration, find_one_filetype
|
||||||
from .validate import SCHEMA_BUILD_CONFIG
|
from .validate import SCHEMA_BUILD_CONFIG
|
||||||
|
|
||||||
@@ -34,59 +31,28 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
|||||||
self.coresys: CoreSys = coresys
|
self.coresys: CoreSys = coresys
|
||||||
self.addon = addon
|
self.addon = addon
|
||||||
|
|
||||||
# Search for build file later in executor
|
|
||||||
super().__init__(None, SCHEMA_BUILD_CONFIG)
|
|
||||||
|
|
||||||
def _get_build_file(self) -> Path:
|
|
||||||
"""Get build file.
|
|
||||||
|
|
||||||
Must be run in executor.
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
return find_one_filetype(
|
build_file = find_one_filetype(
|
||||||
self.addon.path_location, "build", FILE_SUFFIX_CONFIGURATION
|
self.addon.path_location, "build", FILE_SUFFIX_CONFIGURATION
|
||||||
)
|
)
|
||||||
except ConfigurationFileError:
|
except ConfigurationFileError:
|
||||||
return self.addon.path_location / "build.json"
|
build_file = self.addon.path_location / "build.json"
|
||||||
|
|
||||||
async def read_data(self) -> None:
|
super().__init__(build_file, SCHEMA_BUILD_CONFIG)
|
||||||
"""Load data from file."""
|
|
||||||
if not self._file:
|
|
||||||
self._file = await self.sys_run_in_executor(self._get_build_file)
|
|
||||||
|
|
||||||
await super().read_data()
|
def save_data(self):
|
||||||
|
|
||||||
async def save_data(self):
|
|
||||||
"""Ignore save function."""
|
"""Ignore save function."""
|
||||||
raise RuntimeError()
|
raise RuntimeError()
|
||||||
|
|
||||||
@cached_property
|
|
||||||
def arch(self) -> str:
|
|
||||||
"""Return arch of the add-on."""
|
|
||||||
return self.sys_arch.match(self.addon.arch)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def base_image(self) -> str:
|
def base_image(self) -> str:
|
||||||
"""Return base image for this add-on."""
|
"""Return base image for this add-on."""
|
||||||
if not self._data[ATTR_BUILD_FROM]:
|
if not self._data[ATTR_BUILD_FROM]:
|
||||||
return f"ghcr.io/home-assistant/{self.sys_arch.default}-base:latest"
|
return f"ghcr.io/home-assistant/{self.sys_arch.default}-base:latest"
|
||||||
|
|
||||||
if isinstance(self._data[ATTR_BUILD_FROM], str):
|
|
||||||
return self._data[ATTR_BUILD_FROM]
|
|
||||||
|
|
||||||
# Evaluate correct base image
|
# Evaluate correct base image
|
||||||
if self.arch not in self._data[ATTR_BUILD_FROM]:
|
arch = self.sys_arch.match(list(self._data[ATTR_BUILD_FROM].keys()))
|
||||||
raise HassioArchNotFound(
|
return self._data[ATTR_BUILD_FROM][arch]
|
||||||
f"Add-on {self.addon.slug} is not supported on {self.arch}"
|
|
||||||
)
|
|
||||||
return self._data[ATTR_BUILD_FROM][self.arch]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def dockerfile(self) -> Path:
|
|
||||||
"""Return Dockerfile path."""
|
|
||||||
if self.addon.path_location.joinpath(f"Dockerfile.{self.arch}").exists():
|
|
||||||
return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}")
|
|
||||||
return self.addon.path_location.joinpath("Dockerfile")
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def squash(self) -> bool:
|
def squash(self) -> bool:
|
||||||
@@ -106,29 +72,24 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
|||||||
@property
|
@property
|
||||||
def is_valid(self) -> bool:
|
def is_valid(self) -> bool:
|
||||||
"""Return true if the build env is valid."""
|
"""Return true if the build env is valid."""
|
||||||
try:
|
return all(
|
||||||
return all(
|
[
|
||||||
[
|
self.addon.path_location.is_dir(),
|
||||||
self.addon.path_location.is_dir(),
|
Path(self.addon.path_location, "Dockerfile").is_file(),
|
||||||
self.dockerfile.is_file(),
|
]
|
||||||
]
|
)
|
||||||
)
|
|
||||||
except HassioArchNotFound:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_docker_args(self, version: AwesomeVersion, image: str | None = None):
|
def get_docker_args(self, version: AwesomeVersion):
|
||||||
"""Create a dict with Docker build arguments."""
|
"""Create a dict with Docker build arguments."""
|
||||||
args = {
|
args = {
|
||||||
"path": str(self.addon.path_location),
|
"path": str(self.addon.path_location),
|
||||||
"tag": f"{image or self.addon.image}:{version!s}",
|
"tag": f"{self.addon.image}:{version!s}",
|
||||||
"dockerfile": str(self.dockerfile),
|
|
||||||
"pull": True,
|
"pull": True,
|
||||||
"forcerm": not self.sys_dev,
|
"forcerm": not self.sys_dev,
|
||||||
"squash": self.squash,
|
"squash": self.squash,
|
||||||
"platform": MAP_ARCH[self.arch],
|
|
||||||
"labels": {
|
"labels": {
|
||||||
"io.hass.version": version,
|
"io.hass.version": version,
|
||||||
"io.hass.arch": self.arch,
|
"io.hass.arch": self.sys_arch.default,
|
||||||
"io.hass.type": META_ADDON,
|
"io.hass.type": META_ADDON,
|
||||||
"io.hass.name": self._fix_label("name"),
|
"io.hass.name": self._fix_label("name"),
|
||||||
"io.hass.description": self._fix_label("description"),
|
"io.hass.description": self._fix_label("description"),
|
||||||
|
@@ -1,11 +0,0 @@
|
|||||||
"""Confgiuration Objects for Addon Config."""
|
|
||||||
|
|
||||||
from dataclasses import dataclass
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
|
||||||
class FolderMapping:
|
|
||||||
"""Represent folder mapping configuration."""
|
|
||||||
|
|
||||||
path: str | None
|
|
||||||
read_only: bool
|
|
@@ -1,49 +1,14 @@
|
|||||||
"""Add-on static data."""
|
"""Add-on static data."""
|
||||||
|
from enum import Enum
|
||||||
from datetime import timedelta
|
|
||||||
from enum import StrEnum
|
|
||||||
|
|
||||||
from ..jobs.const import JobCondition
|
|
||||||
|
|
||||||
|
|
||||||
class AddonBackupMode(StrEnum):
|
class AddonBackupMode(str, Enum):
|
||||||
"""Backup mode of an Add-on."""
|
"""Backup mode of an Add-on."""
|
||||||
|
|
||||||
HOT = "hot"
|
HOT = "hot"
|
||||||
COLD = "cold"
|
COLD = "cold"
|
||||||
|
|
||||||
|
|
||||||
class MappingType(StrEnum):
|
|
||||||
"""Mapping type of an Add-on Folder."""
|
|
||||||
|
|
||||||
DATA = "data"
|
|
||||||
CONFIG = "config"
|
|
||||||
SSL = "ssl"
|
|
||||||
ADDONS = "addons"
|
|
||||||
BACKUP = "backup"
|
|
||||||
SHARE = "share"
|
|
||||||
MEDIA = "media"
|
|
||||||
HOMEASSISTANT_CONFIG = "homeassistant_config"
|
|
||||||
ALL_ADDON_CONFIGS = "all_addon_configs"
|
|
||||||
ADDON_CONFIG = "addon_config"
|
|
||||||
|
|
||||||
|
|
||||||
ATTR_BACKUP = "backup"
|
ATTR_BACKUP = "backup"
|
||||||
ATTR_BREAKING_VERSIONS = "breaking_versions"
|
|
||||||
ATTR_CODENOTARY = "codenotary"
|
ATTR_CODENOTARY = "codenotary"
|
||||||
ATTR_READ_ONLY = "read_only"
|
|
||||||
ATTR_PATH = "path"
|
|
||||||
WATCHDOG_RETRY_SECONDS = 10
|
WATCHDOG_RETRY_SECONDS = 10
|
||||||
WATCHDOG_MAX_ATTEMPTS = 5
|
|
||||||
WATCHDOG_THROTTLE_PERIOD = timedelta(minutes=30)
|
|
||||||
WATCHDOG_THROTTLE_MAX_CALLS = 10
|
|
||||||
|
|
||||||
ADDON_UPDATE_CONDITIONS = [
|
|
||||||
JobCondition.FREE_SPACE,
|
|
||||||
JobCondition.HEALTHY,
|
|
||||||
JobCondition.INTERNET_HOST,
|
|
||||||
JobCondition.PLUGINS_UPDATED,
|
|
||||||
JobCondition.SUPERVISOR_UPDATED,
|
|
||||||
]
|
|
||||||
|
|
||||||
RE_SLUG = r"[-_.A-Za-z0-9]+"
|
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
"""Init file for Supervisor add-on data."""
|
"""Init file for Supervisor add-on data."""
|
||||||
|
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@@ -38,7 +37,7 @@ class AddonsData(FileConfiguration, CoreSysAttributes):
|
|||||||
"""Return local add-on data."""
|
"""Return local add-on data."""
|
||||||
return self._data[ATTR_SYSTEM]
|
return self._data[ATTR_SYSTEM]
|
||||||
|
|
||||||
async def install(self, addon: AddonStore) -> None:
|
def install(self, addon: AddonStore) -> None:
|
||||||
"""Set addon as installed."""
|
"""Set addon as installed."""
|
||||||
self.system[addon.slug] = deepcopy(addon.data)
|
self.system[addon.slug] = deepcopy(addon.data)
|
||||||
self.user[addon.slug] = {
|
self.user[addon.slug] = {
|
||||||
@@ -46,28 +45,26 @@ class AddonsData(FileConfiguration, CoreSysAttributes):
|
|||||||
ATTR_VERSION: addon.version,
|
ATTR_VERSION: addon.version,
|
||||||
ATTR_IMAGE: addon.image,
|
ATTR_IMAGE: addon.image,
|
||||||
}
|
}
|
||||||
await self.save_data()
|
self.save_data()
|
||||||
|
|
||||||
async def uninstall(self, addon: Addon) -> None:
|
def uninstall(self, addon: Addon) -> None:
|
||||||
"""Set add-on as uninstalled."""
|
"""Set add-on as uninstalled."""
|
||||||
self.system.pop(addon.slug, None)
|
self.system.pop(addon.slug, None)
|
||||||
self.user.pop(addon.slug, None)
|
self.user.pop(addon.slug, None)
|
||||||
await self.save_data()
|
self.save_data()
|
||||||
|
|
||||||
async def update(self, addon: AddonStore) -> None:
|
def update(self, addon: AddonStore) -> None:
|
||||||
"""Update version of add-on."""
|
"""Update version of add-on."""
|
||||||
self.system[addon.slug] = deepcopy(addon.data)
|
self.system[addon.slug] = deepcopy(addon.data)
|
||||||
self.user[addon.slug].update(
|
self.user[addon.slug].update(
|
||||||
{ATTR_VERSION: addon.version, ATTR_IMAGE: addon.image}
|
{ATTR_VERSION: addon.version, ATTR_IMAGE: addon.image}
|
||||||
)
|
)
|
||||||
await self.save_data()
|
self.save_data()
|
||||||
|
|
||||||
async def restore(
|
def restore(self, slug: str, user: Config, system: Config, image: str) -> None:
|
||||||
self, slug: str, user: Config, system: Config, image: str
|
|
||||||
) -> None:
|
|
||||||
"""Restore data to add-on."""
|
"""Restore data to add-on."""
|
||||||
self.user[slug] = deepcopy(user)
|
self.user[slug] = deepcopy(user)
|
||||||
self.system[slug] = deepcopy(system)
|
self.system[slug] = deepcopy(system)
|
||||||
|
|
||||||
self.user[slug][ATTR_IMAGE] = image
|
self.user[slug][ATTR_IMAGE] = image
|
||||||
await self.save_data()
|
self.save_data()
|
||||||
|
@@ -1,403 +0,0 @@
|
|||||||
"""Supervisor add-on manager."""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
from collections.abc import Awaitable
|
|
||||||
from contextlib import suppress
|
|
||||||
import logging
|
|
||||||
import tarfile
|
|
||||||
from typing import Self, Union
|
|
||||||
|
|
||||||
from attr import evolve
|
|
||||||
|
|
||||||
from ..const import AddonBoot, AddonStartup, AddonState
|
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
|
||||||
from ..exceptions import (
|
|
||||||
AddonsError,
|
|
||||||
AddonsJobError,
|
|
||||||
AddonsNotSupportedError,
|
|
||||||
CoreDNSError,
|
|
||||||
DockerError,
|
|
||||||
HassioError,
|
|
||||||
HomeAssistantAPIError,
|
|
||||||
)
|
|
||||||
from ..jobs.decorator import Job, JobCondition
|
|
||||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
|
||||||
from ..store.addon import AddonStore
|
|
||||||
from ..utils.sentry import async_capture_exception
|
|
||||||
from .addon import Addon
|
|
||||||
from .const import ADDON_UPDATE_CONDITIONS
|
|
||||||
from .data import AddonsData
|
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
AnyAddon = Union[Addon, AddonStore]
|
|
||||||
|
|
||||||
|
|
||||||
class AddonManager(CoreSysAttributes):
|
|
||||||
"""Manage add-ons inside Supervisor."""
|
|
||||||
|
|
||||||
def __init__(self, coresys: CoreSys):
|
|
||||||
"""Initialize Docker base wrapper."""
|
|
||||||
self.coresys: CoreSys = coresys
|
|
||||||
self.data: AddonsData = AddonsData(coresys)
|
|
||||||
self.local: dict[str, Addon] = {}
|
|
||||||
self.store: dict[str, AddonStore] = {}
|
|
||||||
|
|
||||||
@property
|
|
||||||
def all(self) -> list[AnyAddon]:
|
|
||||||
"""Return a list of all add-ons."""
|
|
||||||
addons: dict[str, AnyAddon] = {**self.store, **self.local}
|
|
||||||
return list(addons.values())
|
|
||||||
|
|
||||||
@property
|
|
||||||
def installed(self) -> list[Addon]:
|
|
||||||
"""Return a list of all installed add-ons."""
|
|
||||||
return list(self.local.values())
|
|
||||||
|
|
||||||
def get(self, addon_slug: str, local_only: bool = False) -> AnyAddon | None:
|
|
||||||
"""Return an add-on from slug.
|
|
||||||
|
|
||||||
Prio:
|
|
||||||
1 - Local
|
|
||||||
2 - Store
|
|
||||||
"""
|
|
||||||
if addon_slug in self.local:
|
|
||||||
return self.local[addon_slug]
|
|
||||||
if not local_only:
|
|
||||||
return self.store.get(addon_slug)
|
|
||||||
return None
|
|
||||||
|
|
||||||
def from_token(self, token: str) -> Addon | None:
|
|
||||||
"""Return an add-on from Supervisor token."""
|
|
||||||
for addon in self.installed:
|
|
||||||
if token == addon.supervisor_token:
|
|
||||||
return addon
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def load_config(self) -> Self:
|
|
||||||
"""Load config in executor."""
|
|
||||||
await self.data.read_data()
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def load(self) -> None:
|
|
||||||
"""Start up add-on management."""
|
|
||||||
# Refresh cache for all store addons
|
|
||||||
tasks: list[Awaitable[None]] = [
|
|
||||||
store.refresh_path_cache() for store in self.store.values()
|
|
||||||
]
|
|
||||||
|
|
||||||
# Load all installed addons
|
|
||||||
for slug in self.data.system:
|
|
||||||
addon = self.local[slug] = Addon(self.coresys, slug)
|
|
||||||
tasks.append(addon.load())
|
|
||||||
|
|
||||||
# Run initial tasks
|
|
||||||
_LOGGER.info("Found %d installed add-ons", len(self.data.system))
|
|
||||||
if tasks:
|
|
||||||
await asyncio.gather(*tasks)
|
|
||||||
|
|
||||||
# Sync DNS
|
|
||||||
await self.sync_dns()
|
|
||||||
|
|
||||||
async def boot(self, stage: AddonStartup) -> None:
|
|
||||||
"""Boot add-ons with mode auto."""
|
|
||||||
tasks: list[Addon] = []
|
|
||||||
for addon in self.installed:
|
|
||||||
if addon.boot != AddonBoot.AUTO or addon.startup != stage:
|
|
||||||
continue
|
|
||||||
tasks.append(addon)
|
|
||||||
|
|
||||||
# Evaluate add-ons which need to be started
|
|
||||||
_LOGGER.info("Phase '%s' starting %d add-ons", stage, len(tasks))
|
|
||||||
if not tasks:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Start Add-ons sequential
|
|
||||||
# avoid issue on slow IO
|
|
||||||
# Config.wait_boot is deprecated. Until addons update with healthchecks,
|
|
||||||
# add a sleep task for it to keep the same minimum amount of wait time
|
|
||||||
wait_boot: list[Awaitable[None]] = [asyncio.sleep(self.sys_config.wait_boot)]
|
|
||||||
for addon in tasks:
|
|
||||||
try:
|
|
||||||
if start_task := await addon.start():
|
|
||||||
wait_boot.append(start_task)
|
|
||||||
except HassioError:
|
|
||||||
self.sys_resolution.add_issue(
|
|
||||||
evolve(addon.boot_failed_issue),
|
|
||||||
suggestions=[
|
|
||||||
SuggestionType.EXECUTE_START,
|
|
||||||
SuggestionType.DISABLE_BOOT,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
continue
|
|
||||||
|
|
||||||
_LOGGER.warning("Can't start Add-on %s", addon.slug)
|
|
||||||
|
|
||||||
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
|
|
||||||
await asyncio.gather(*wait_boot, return_exceptions=True)
|
|
||||||
|
|
||||||
# After waiting for startup, create an issue for boot addons that are error or unknown state
|
|
||||||
# Ignore stopped as single shot addons can be run at boot and this is successful exit
|
|
||||||
# Timeout waiting for startup is not a failure, addon is probably just slow
|
|
||||||
for addon in tasks:
|
|
||||||
if addon.state in {AddonState.ERROR, AddonState.UNKNOWN}:
|
|
||||||
self.sys_resolution.add_issue(
|
|
||||||
evolve(addon.boot_failed_issue),
|
|
||||||
suggestions=[
|
|
||||||
SuggestionType.EXECUTE_START,
|
|
||||||
SuggestionType.DISABLE_BOOT,
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
async def shutdown(self, stage: AddonStartup) -> None:
|
|
||||||
"""Shutdown addons."""
|
|
||||||
tasks: list[Addon] = []
|
|
||||||
for addon in self.installed:
|
|
||||||
if addon.state != AddonState.STARTED or addon.startup != stage:
|
|
||||||
continue
|
|
||||||
tasks.append(addon)
|
|
||||||
|
|
||||||
# Evaluate add-ons which need to be stopped
|
|
||||||
_LOGGER.info("Phase '%s' stopping %d add-ons", stage, len(tasks))
|
|
||||||
if not tasks:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Stop Add-ons sequential
|
|
||||||
# avoid issue on slow IO
|
|
||||||
for addon in tasks:
|
|
||||||
try:
|
|
||||||
await addon.stop()
|
|
||||||
except Exception as err: # pylint: disable=broad-except
|
|
||||||
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
|
|
||||||
await async_capture_exception(err)
|
|
||||||
|
|
||||||
@Job(
|
|
||||||
name="addon_manager_install",
|
|
||||||
conditions=ADDON_UPDATE_CONDITIONS,
|
|
||||||
on_condition=AddonsJobError,
|
|
||||||
)
|
|
||||||
async def install(self, slug: str) -> None:
|
|
||||||
"""Install an add-on."""
|
|
||||||
self.sys_jobs.current.reference = slug
|
|
||||||
|
|
||||||
if slug in self.local:
|
|
||||||
raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning)
|
|
||||||
store = self.store.get(slug)
|
|
||||||
|
|
||||||
if not store:
|
|
||||||
raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error)
|
|
||||||
|
|
||||||
store.validate_availability()
|
|
||||||
|
|
||||||
await Addon(self.coresys, slug).install()
|
|
||||||
|
|
||||||
_LOGGER.info("Add-on '%s' successfully installed", slug)
|
|
||||||
|
|
||||||
async def uninstall(self, slug: str, *, remove_config: bool = False) -> None:
|
|
||||||
"""Remove an add-on."""
|
|
||||||
if slug not in self.local:
|
|
||||||
_LOGGER.warning("Add-on %s is not installed", slug)
|
|
||||||
return
|
|
||||||
|
|
||||||
shared_image = any(
|
|
||||||
self.local[slug].image == addon.image
|
|
||||||
and self.local[slug].version == addon.version
|
|
||||||
for addon in self.installed
|
|
||||||
if addon.slug != slug
|
|
||||||
)
|
|
||||||
await self.local[slug].uninstall(
|
|
||||||
remove_config=remove_config, remove_image=not shared_image
|
|
||||||
)
|
|
||||||
|
|
||||||
_LOGGER.info("Add-on '%s' successfully removed", slug)
|
|
||||||
|
|
||||||
@Job(
|
|
||||||
name="addon_manager_update",
|
|
||||||
conditions=ADDON_UPDATE_CONDITIONS,
|
|
||||||
on_condition=AddonsJobError,
|
|
||||||
)
|
|
||||||
async def update(
|
|
||||||
self, slug: str, backup: bool | None = False
|
|
||||||
) -> asyncio.Task | None:
|
|
||||||
"""Update add-on.
|
|
||||||
|
|
||||||
Returns a Task that completes when addon has state 'started' (see addon.start)
|
|
||||||
if addon is started after update. Else nothing is returned.
|
|
||||||
"""
|
|
||||||
self.sys_jobs.current.reference = slug
|
|
||||||
|
|
||||||
if slug not in self.local:
|
|
||||||
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
|
||||||
addon = self.local[slug]
|
|
||||||
|
|
||||||
if addon.is_detached:
|
|
||||||
raise AddonsError(
|
|
||||||
f"Add-on {slug} is not available inside store", _LOGGER.error
|
|
||||||
)
|
|
||||||
store = self.store[slug]
|
|
||||||
|
|
||||||
if addon.version == store.version:
|
|
||||||
raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning)
|
|
||||||
|
|
||||||
# Check if available, Maybe something have changed
|
|
||||||
store.validate_availability()
|
|
||||||
|
|
||||||
if backup:
|
|
||||||
await self.sys_backups.do_backup_partial(
|
|
||||||
name=f"addon_{addon.slug}_{addon.version}",
|
|
||||||
homeassistant=False,
|
|
||||||
addons=[addon.slug],
|
|
||||||
)
|
|
||||||
|
|
||||||
return await addon.update()
|
|
||||||
|
|
||||||
@Job(
|
|
||||||
name="addon_manager_rebuild",
|
|
||||||
conditions=[
|
|
||||||
JobCondition.FREE_SPACE,
|
|
||||||
JobCondition.INTERNET_HOST,
|
|
||||||
JobCondition.HEALTHY,
|
|
||||||
],
|
|
||||||
on_condition=AddonsJobError,
|
|
||||||
)
|
|
||||||
async def rebuild(self, slug: str) -> asyncio.Task | None:
|
|
||||||
"""Perform a rebuild of local build add-on.
|
|
||||||
|
|
||||||
Returns a Task that completes when addon has state 'started' (see addon.start)
|
|
||||||
if addon is started after rebuild. Else nothing is returned.
|
|
||||||
"""
|
|
||||||
self.sys_jobs.current.reference = slug
|
|
||||||
|
|
||||||
if slug not in self.local:
|
|
||||||
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
|
||||||
addon = self.local[slug]
|
|
||||||
|
|
||||||
if addon.is_detached:
|
|
||||||
raise AddonsError(
|
|
||||||
f"Add-on {slug} is not available inside store", _LOGGER.error
|
|
||||||
)
|
|
||||||
store = self.store[slug]
|
|
||||||
|
|
||||||
# Check if a rebuild is possible now
|
|
||||||
if addon.version != store.version:
|
|
||||||
raise AddonsError(
|
|
||||||
"Version changed, use Update instead Rebuild", _LOGGER.error
|
|
||||||
)
|
|
||||||
if not addon.need_build:
|
|
||||||
raise AddonsNotSupportedError(
|
|
||||||
"Can't rebuild a image based add-on", _LOGGER.error
|
|
||||||
)
|
|
||||||
|
|
||||||
return await addon.rebuild()
|
|
||||||
|
|
||||||
@Job(
|
|
||||||
name="addon_manager_restore",
|
|
||||||
conditions=[
|
|
||||||
JobCondition.FREE_SPACE,
|
|
||||||
JobCondition.INTERNET_HOST,
|
|
||||||
JobCondition.HEALTHY,
|
|
||||||
],
|
|
||||||
on_condition=AddonsJobError,
|
|
||||||
)
|
|
||||||
async def restore(
|
|
||||||
self, slug: str, tar_file: tarfile.TarFile
|
|
||||||
) -> asyncio.Task | None:
|
|
||||||
"""Restore state of an add-on.
|
|
||||||
|
|
||||||
Returns a Task that completes when addon has state 'started' (see addon.start)
|
|
||||||
if addon is started after restore. Else nothing is returned.
|
|
||||||
"""
|
|
||||||
self.sys_jobs.current.reference = slug
|
|
||||||
|
|
||||||
if slug not in self.local:
|
|
||||||
_LOGGER.debug("Add-on %s is not local available for restore", slug)
|
|
||||||
addon = Addon(self.coresys, slug)
|
|
||||||
had_ingress = False
|
|
||||||
else:
|
|
||||||
_LOGGER.debug("Add-on %s is local available for restore", slug)
|
|
||||||
addon = self.local[slug]
|
|
||||||
had_ingress = addon.ingress_panel
|
|
||||||
|
|
||||||
wait_for_start = await addon.restore(tar_file)
|
|
||||||
|
|
||||||
# Check if new
|
|
||||||
if slug not in self.local:
|
|
||||||
_LOGGER.info("Detect new Add-on after restore %s", slug)
|
|
||||||
self.local[slug] = addon
|
|
||||||
|
|
||||||
# Update ingress
|
|
||||||
if had_ingress != addon.ingress_panel:
|
|
||||||
await self.sys_ingress.reload()
|
|
||||||
with suppress(HomeAssistantAPIError):
|
|
||||||
await self.sys_ingress.update_hass_panel(addon)
|
|
||||||
|
|
||||||
return wait_for_start
|
|
||||||
|
|
||||||
@Job(
|
|
||||||
name="addon_manager_repair",
|
|
||||||
conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST],
|
|
||||||
)
|
|
||||||
async def repair(self) -> None:
|
|
||||||
"""Repair local add-ons."""
|
|
||||||
needs_repair: list[Addon] = []
|
|
||||||
|
|
||||||
# Evaluate Add-ons to repair
|
|
||||||
for addon in self.installed:
|
|
||||||
if await addon.instance.exists():
|
|
||||||
continue
|
|
||||||
needs_repair.append(addon)
|
|
||||||
|
|
||||||
_LOGGER.info("Found %d add-ons to repair", len(needs_repair))
|
|
||||||
if not needs_repair:
|
|
||||||
return
|
|
||||||
|
|
||||||
for addon in needs_repair:
|
|
||||||
_LOGGER.info("Repairing for add-on: %s", addon.slug)
|
|
||||||
with suppress(DockerError, KeyError):
|
|
||||||
# Need pull a image again
|
|
||||||
if not addon.need_build:
|
|
||||||
await addon.instance.install(addon.version, addon.image)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Need local lookup
|
|
||||||
if addon.need_build and not addon.is_detached:
|
|
||||||
store = self.store[addon.slug]
|
|
||||||
# If this add-on is available for rebuild
|
|
||||||
if addon.version == store.version:
|
|
||||||
await addon.instance.install(addon.version, addon.image)
|
|
||||||
continue
|
|
||||||
|
|
||||||
_LOGGER.error("Can't repair %s", addon.slug)
|
|
||||||
with suppress(AddonsError):
|
|
||||||
await self.uninstall(addon.slug)
|
|
||||||
|
|
||||||
async def sync_dns(self) -> None:
|
|
||||||
"""Sync add-ons DNS names."""
|
|
||||||
# Update hosts
|
|
||||||
add_host_coros: list[Awaitable[None]] = []
|
|
||||||
for addon in self.installed:
|
|
||||||
try:
|
|
||||||
if not await addon.instance.is_running():
|
|
||||||
continue
|
|
||||||
except DockerError as err:
|
|
||||||
_LOGGER.warning("Add-on %s is corrupt: %s", addon.slug, err)
|
|
||||||
self.sys_resolution.create_issue(
|
|
||||||
IssueType.CORRUPT_DOCKER,
|
|
||||||
ContextType.ADDON,
|
|
||||||
reference=addon.slug,
|
|
||||||
suggestions=[SuggestionType.EXECUTE_REPAIR],
|
|
||||||
)
|
|
||||||
await async_capture_exception(err)
|
|
||||||
else:
|
|
||||||
add_host_coros.append(
|
|
||||||
self.sys_plugins.dns.add_host(
|
|
||||||
ipv4=addon.ip_address, names=[addon.hostname], write=False
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
await asyncio.gather(*add_host_coros)
|
|
||||||
|
|
||||||
# Write hosts files
|
|
||||||
with suppress(CoreDNSError):
|
|
||||||
await self.sys_plugins.dns.write_hosts()
|
|
@@ -1,17 +1,11 @@
|
|||||||
"""Init file for Supervisor add-ons."""
|
"""Init file for Supervisor add-ons."""
|
||||||
|
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from collections import defaultdict
|
|
||||||
from collections.abc import Awaitable, Callable
|
|
||||||
from contextlib import suppress
|
|
||||||
from datetime import datetime
|
|
||||||
import logging
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any, Awaitable, Optional
|
||||||
|
|
||||||
from awesomeversion import AwesomeVersion, AwesomeVersionException
|
from awesomeversion import AwesomeVersion, AwesomeVersionException
|
||||||
|
|
||||||
from supervisor.utils.dt import utc_from_timestamp
|
from supervisor.addons.const import AddonBackupMode
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ADVANCED,
|
ATTR_ADVANCED,
|
||||||
@@ -39,7 +33,6 @@ from ..const import (
|
|||||||
ATTR_HOST_IPC,
|
ATTR_HOST_IPC,
|
||||||
ATTR_HOST_NETWORK,
|
ATTR_HOST_NETWORK,
|
||||||
ATTR_HOST_PID,
|
ATTR_HOST_PID,
|
||||||
ATTR_HOST_UTS,
|
|
||||||
ATTR_IMAGE,
|
ATTR_IMAGE,
|
||||||
ATTR_INGRESS,
|
ATTR_INGRESS,
|
||||||
ATTR_INGRESS_STREAM,
|
ATTR_INGRESS_STREAM,
|
||||||
@@ -47,7 +40,7 @@ from ..const import (
|
|||||||
ATTR_JOURNALD,
|
ATTR_JOURNALD,
|
||||||
ATTR_KERNEL_MODULES,
|
ATTR_KERNEL_MODULES,
|
||||||
ATTR_LEGACY,
|
ATTR_LEGACY,
|
||||||
ATTR_LOCATION,
|
ATTR_LOCATON,
|
||||||
ATTR_MACHINE,
|
ATTR_MACHINE,
|
||||||
ATTR_MAP,
|
ATTR_MAP,
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
@@ -69,13 +62,11 @@ from ..const import (
|
|||||||
ATTR_TIMEOUT,
|
ATTR_TIMEOUT,
|
||||||
ATTR_TMPFS,
|
ATTR_TMPFS,
|
||||||
ATTR_TRANSLATIONS,
|
ATTR_TRANSLATIONS,
|
||||||
ATTR_TYPE,
|
|
||||||
ATTR_UART,
|
ATTR_UART,
|
||||||
ATTR_UDEV,
|
ATTR_UDEV,
|
||||||
ATTR_URL,
|
ATTR_URL,
|
||||||
ATTR_USB,
|
ATTR_USB,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
ATTR_VERSION_TIMESTAMP,
|
|
||||||
ATTR_VIDEO,
|
ATTR_VIDEO,
|
||||||
ATTR_WATCHDOG,
|
ATTR_WATCHDOG,
|
||||||
ATTR_WEBUI,
|
ATTR_WEBUI,
|
||||||
@@ -83,47 +74,25 @@ from ..const import (
|
|||||||
SECURITY_DISABLE,
|
SECURITY_DISABLE,
|
||||||
SECURITY_PROFILE,
|
SECURITY_PROFILE,
|
||||||
AddonBoot,
|
AddonBoot,
|
||||||
AddonBootConfig,
|
|
||||||
AddonStage,
|
AddonStage,
|
||||||
AddonStartup,
|
AddonStartup,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSys
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
from ..docker.const import Capabilities
|
from ..docker.const import Capabilities
|
||||||
from ..exceptions import AddonsNotSupportedError
|
from .const import ATTR_BACKUP, ATTR_CODENOTARY
|
||||||
from ..jobs.const import JOB_GROUP_ADDON
|
|
||||||
from ..jobs.job_group import JobGroup
|
|
||||||
from ..utils import version_is_new_enough
|
|
||||||
from .configuration import FolderMapping
|
|
||||||
from .const import (
|
|
||||||
ATTR_BACKUP,
|
|
||||||
ATTR_BREAKING_VERSIONS,
|
|
||||||
ATTR_CODENOTARY,
|
|
||||||
ATTR_PATH,
|
|
||||||
ATTR_READ_ONLY,
|
|
||||||
AddonBackupMode,
|
|
||||||
MappingType,
|
|
||||||
)
|
|
||||||
from .options import AddonOptions, UiOptions
|
from .options import AddonOptions, UiOptions
|
||||||
from .validate import RE_SERVICE
|
from .validate import RE_SERVICE, RE_VOLUME
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
Data = dict[str, Any]
|
Data = dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
class AddonModel(JobGroup, ABC):
|
class AddonModel(CoreSysAttributes, ABC):
|
||||||
"""Add-on Data layout."""
|
"""Add-on Data layout."""
|
||||||
|
|
||||||
def __init__(self, coresys: CoreSys, slug: str):
|
def __init__(self, coresys: CoreSys, slug: str):
|
||||||
"""Initialize data holder."""
|
"""Initialize data holder."""
|
||||||
super().__init__(
|
self.coresys: CoreSys = coresys
|
||||||
coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug
|
|
||||||
)
|
|
||||||
self.slug: str = slug
|
self.slug: str = slug
|
||||||
self._path_icon_exists: bool = False
|
|
||||||
self._path_logo_exists: bool = False
|
|
||||||
self._path_changelog_exists: bool = False
|
|
||||||
self._path_documentation_exists: bool = False
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
@@ -151,17 +120,12 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return self.data[ATTR_OPTIONS]
|
return self.data[ATTR_OPTIONS]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def boot_config(self) -> AddonBootConfig:
|
def boot(self) -> AddonBoot:
|
||||||
"""Return boot config."""
|
"""Return boot config with prio local settings."""
|
||||||
return self.data[ATTR_BOOT]
|
return self.data[ATTR_BOOT]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def boot(self) -> AddonBoot:
|
def auto_update(self) -> Optional[bool]:
|
||||||
"""Return boot config with prio local settings unless config is forced."""
|
|
||||||
return AddonBoot(self.data[ATTR_BOOT])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def auto_update(self) -> bool | None:
|
|
||||||
"""Return if auto update is enable."""
|
"""Return if auto update is enable."""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -186,22 +150,22 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return self.data[ATTR_TIMEOUT]
|
return self.data[ATTR_TIMEOUT]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def uuid(self) -> str | None:
|
def uuid(self) -> Optional[str]:
|
||||||
"""Return an API token for this add-on."""
|
"""Return an API token for this add-on."""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def supervisor_token(self) -> str | None:
|
def supervisor_token(self) -> Optional[str]:
|
||||||
"""Return access token for Supervisor API."""
|
"""Return access token for Supervisor API."""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ingress_token(self) -> str | None:
|
def ingress_token(self) -> Optional[str]:
|
||||||
"""Return access token for Supervisor API."""
|
"""Return access token for Supervisor API."""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ingress_entry(self) -> str | None:
|
def ingress_entry(self) -> Optional[str]:
|
||||||
"""Return ingress external URL."""
|
"""Return ingress external URL."""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -210,6 +174,18 @@ class AddonModel(JobGroup, ABC):
|
|||||||
"""Return description of add-on."""
|
"""Return description of add-on."""
|
||||||
return self.data[ATTR_DESCRIPTON]
|
return self.data[ATTR_DESCRIPTON]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def long_description(self) -> Optional[str]:
|
||||||
|
"""Return README.md as long_description."""
|
||||||
|
readme = Path(self.path_location, "README.md")
|
||||||
|
|
||||||
|
# If readme not exists
|
||||||
|
if not readme.exists():
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Return data
|
||||||
|
return readme.read_text(encoding="utf-8")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def repository(self) -> str:
|
def repository(self) -> str:
|
||||||
"""Return repository of add-on."""
|
"""Return repository of add-on."""
|
||||||
@@ -225,11 +201,6 @@ class AddonModel(JobGroup, ABC):
|
|||||||
"""Return latest version of add-on."""
|
"""Return latest version of add-on."""
|
||||||
return self.data[ATTR_VERSION]
|
return self.data[ATTR_VERSION]
|
||||||
|
|
||||||
@property
|
|
||||||
def latest_version_timestamp(self) -> datetime:
|
|
||||||
"""Return when latest version was first seen."""
|
|
||||||
return utc_from_timestamp(self.data[ATTR_VERSION_TIMESTAMP])
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self) -> AwesomeVersion:
|
def version(self) -> AwesomeVersion:
|
||||||
"""Return version of add-on."""
|
"""Return version of add-on."""
|
||||||
@@ -274,32 +245,32 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return self.data.get(ATTR_DISCOVERY, [])
|
return self.data.get(ATTR_DISCOVERY, [])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ports_description(self) -> dict[str, str] | None:
|
def ports_description(self) -> Optional[dict[str, str]]:
|
||||||
"""Return descriptions of ports."""
|
"""Return descriptions of ports."""
|
||||||
return self.data.get(ATTR_PORTS_DESCRIPTION)
|
return self.data.get(ATTR_PORTS_DESCRIPTION)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ports(self) -> dict[str, int | None] | None:
|
def ports(self) -> Optional[dict[str, Optional[int]]]:
|
||||||
"""Return ports of add-on."""
|
"""Return ports of add-on."""
|
||||||
return self.data.get(ATTR_PORTS)
|
return self.data.get(ATTR_PORTS)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ingress_url(self) -> str | None:
|
def ingress_url(self) -> Optional[str]:
|
||||||
"""Return URL to ingress url."""
|
"""Return URL to ingress url."""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def webui(self) -> str | None:
|
def webui(self) -> Optional[str]:
|
||||||
"""Return URL to webui or None."""
|
"""Return URL to webui or None."""
|
||||||
return self.data.get(ATTR_WEBUI)
|
return self.data.get(ATTR_WEBUI)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def watchdog(self) -> str | None:
|
def watchdog(self) -> Optional[str]:
|
||||||
"""Return URL to for watchdog or None."""
|
"""Return URL to for watchdog or None."""
|
||||||
return self.data.get(ATTR_WATCHDOG)
|
return self.data.get(ATTR_WATCHDOG)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ingress_port(self) -> int | None:
|
def ingress_port(self) -> Optional[int]:
|
||||||
"""Return Ingress port."""
|
"""Return Ingress port."""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -333,11 +304,6 @@ class AddonModel(JobGroup, ABC):
|
|||||||
"""Return True if add-on run on host IPC namespace."""
|
"""Return True if add-on run on host IPC namespace."""
|
||||||
return self.data[ATTR_HOST_IPC]
|
return self.data[ATTR_HOST_IPC]
|
||||||
|
|
||||||
@property
|
|
||||||
def host_uts(self) -> bool:
|
|
||||||
"""Return True if add-on run on host UTS namespace."""
|
|
||||||
return self.data[ATTR_HOST_UTS]
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def host_dbus(self) -> bool:
|
def host_dbus(self) -> bool:
|
||||||
"""Return True if add-on run on host D-BUS."""
|
"""Return True if add-on run on host D-BUS."""
|
||||||
@@ -349,7 +315,7 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return [Path(node) for node in self.data.get(ATTR_DEVICES, [])]
|
return [Path(node) for node in self.data.get(ATTR_DEVICES, [])]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def environment(self) -> dict[str, str] | None:
|
def environment(self) -> Optional[dict[str, str]]:
|
||||||
"""Return environment of add-on."""
|
"""Return environment of add-on."""
|
||||||
return self.data.get(ATTR_ENVIRONMENT)
|
return self.data.get(ATTR_ENVIRONMENT)
|
||||||
|
|
||||||
@@ -398,12 +364,12 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return self.data.get(ATTR_BACKUP_EXCLUDE, [])
|
return self.data.get(ATTR_BACKUP_EXCLUDE, [])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def backup_pre(self) -> str | None:
|
def backup_pre(self) -> Optional[str]:
|
||||||
"""Return pre-backup command."""
|
"""Return pre-backup command."""
|
||||||
return self.data.get(ATTR_BACKUP_PRE)
|
return self.data.get(ATTR_BACKUP_PRE)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def backup_post(self) -> str | None:
|
def backup_post(self) -> Optional[str]:
|
||||||
"""Return post-backup command."""
|
"""Return post-backup command."""
|
||||||
return self.data.get(ATTR_BACKUP_POST)
|
return self.data.get(ATTR_BACKUP_POST)
|
||||||
|
|
||||||
@@ -428,7 +394,7 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return self.data[ATTR_INGRESS]
|
return self.data[ATTR_INGRESS]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ingress_panel(self) -> bool | None:
|
def ingress_panel(self) -> Optional[bool]:
|
||||||
"""Return True if the add-on access support ingress."""
|
"""Return True if the add-on access support ingress."""
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -478,7 +444,7 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return self.data[ATTR_DEVICETREE]
|
return self.data[ATTR_DEVICETREE]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_tmpfs(self) -> str | None:
|
def with_tmpfs(self) -> Optional[str]:
|
||||||
"""Return if tmp is in memory of add-on."""
|
"""Return if tmp is in memory of add-on."""
|
||||||
return self.data[ATTR_TMPFS]
|
return self.data[ATTR_TMPFS]
|
||||||
|
|
||||||
@@ -498,34 +464,34 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return self.data[ATTR_VIDEO]
|
return self.data[ATTR_VIDEO]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def homeassistant_version(self) -> str | None:
|
def homeassistant_version(self) -> Optional[str]:
|
||||||
"""Return min Home Assistant version they needed by Add-on."""
|
"""Return min Home Assistant version they needed by Add-on."""
|
||||||
return self.data.get(ATTR_HOMEASSISTANT)
|
return self.data.get(ATTR_HOMEASSISTANT)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def url(self) -> str | None:
|
def url(self) -> Optional[str]:
|
||||||
"""Return URL of add-on."""
|
"""Return URL of add-on."""
|
||||||
return self.data.get(ATTR_URL)
|
return self.data.get(ATTR_URL)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_icon(self) -> bool:
|
def with_icon(self) -> bool:
|
||||||
"""Return True if an icon exists."""
|
"""Return True if an icon exists."""
|
||||||
return self._path_icon_exists
|
return self.path_icon.exists()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_logo(self) -> bool:
|
def with_logo(self) -> bool:
|
||||||
"""Return True if a logo exists."""
|
"""Return True if a logo exists."""
|
||||||
return self._path_logo_exists
|
return self.path_logo.exists()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_changelog(self) -> bool:
|
def with_changelog(self) -> bool:
|
||||||
"""Return True if a changelog exists."""
|
"""Return True if a changelog exists."""
|
||||||
return self._path_changelog_exists
|
return self.path_changelog.exists()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_documentation(self) -> bool:
|
def with_documentation(self) -> bool:
|
||||||
"""Return True if a documentation exists."""
|
"""Return True if a documentation exists."""
|
||||||
return self._path_documentation_exists
|
return self.path_documentation.exists()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def supported_arch(self) -> list[str]:
|
def supported_arch(self) -> list[str]:
|
||||||
@@ -546,7 +512,7 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return self.sys_arch.default
|
return self.sys_arch.default
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def image(self) -> str | None:
|
def image(self) -> Optional[str]:
|
||||||
"""Generate image name from data."""
|
"""Generate image name from data."""
|
||||||
return self._image(self.data)
|
return self._image(self.data)
|
||||||
|
|
||||||
@@ -556,20 +522,21 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return ATTR_IMAGE not in self.data
|
return ATTR_IMAGE not in self.data
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def map_volumes(self) -> dict[MappingType, FolderMapping]:
|
def map_volumes(self) -> dict[str, str]:
|
||||||
"""Return a dict of {MappingType: FolderMapping} from add-on."""
|
"""Return a dict of {volume: policy} from add-on."""
|
||||||
volumes = {}
|
volumes = {}
|
||||||
for volume in self.data[ATTR_MAP]:
|
for volume in self.data[ATTR_MAP]:
|
||||||
volumes[MappingType(volume[ATTR_TYPE])] = FolderMapping(
|
result = RE_VOLUME.match(volume)
|
||||||
volume.get(ATTR_PATH), volume[ATTR_READ_ONLY]
|
if not result:
|
||||||
)
|
continue
|
||||||
|
volumes[result.group(1)] = result.group(2) or "ro"
|
||||||
|
|
||||||
return volumes
|
return volumes
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_location(self) -> Path:
|
def path_location(self) -> Path:
|
||||||
"""Return path to this add-on."""
|
"""Return path to this add-on."""
|
||||||
return Path(self.data[ATTR_LOCATION])
|
return Path(self.data[ATTR_LOCATON])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_icon(self) -> Path:
|
def path_icon(self) -> Path:
|
||||||
@@ -606,7 +573,7 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return AddonOptions(self.coresys, raw_schema, self.name, self.slug)
|
return AddonOptions(self.coresys, raw_schema, self.name, self.slug)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def schema_ui(self) -> list[dict[any, any]] | None:
|
def schema_ui(self) -> Optional[list[dict[any, any]]]:
|
||||||
"""Create a UI schema for add-on options."""
|
"""Create a UI schema for add-on options."""
|
||||||
raw_schema = self.data[ATTR_SCHEMA]
|
raw_schema = self.data[ATTR_SCHEMA]
|
||||||
|
|
||||||
@@ -625,91 +592,35 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return ATTR_CODENOTARY in self.data
|
return ATTR_CODENOTARY in self.data
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def codenotary(self) -> str | None:
|
def codenotary(self) -> Optional[str]:
|
||||||
"""Return Signer email address for CAS."""
|
"""Return Signer email address for CAS."""
|
||||||
return self.data.get(ATTR_CODENOTARY)
|
return self.data.get(ATTR_CODENOTARY)
|
||||||
|
|
||||||
@property
|
|
||||||
def breaking_versions(self) -> list[AwesomeVersion]:
|
|
||||||
"""Return breaking versions of addon."""
|
|
||||||
return self.data[ATTR_BREAKING_VERSIONS]
|
|
||||||
|
|
||||||
async def long_description(self) -> str | None:
|
|
||||||
"""Return README.md as long_description."""
|
|
||||||
|
|
||||||
def read_readme() -> str | None:
|
|
||||||
readme = Path(self.path_location, "README.md")
|
|
||||||
|
|
||||||
# If readme not exists
|
|
||||||
if not readme.exists():
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Return data
|
|
||||||
return readme.read_text(encoding="utf-8")
|
|
||||||
|
|
||||||
return await self.sys_run_in_executor(read_readme)
|
|
||||||
|
|
||||||
def refresh_path_cache(self) -> Awaitable[None]:
|
|
||||||
"""Refresh cache of existing paths."""
|
|
||||||
|
|
||||||
def check_paths():
|
|
||||||
self._path_icon_exists = self.path_icon.exists()
|
|
||||||
self._path_logo_exists = self.path_logo.exists()
|
|
||||||
self._path_changelog_exists = self.path_changelog.exists()
|
|
||||||
self._path_documentation_exists = self.path_documentation.exists()
|
|
||||||
|
|
||||||
return self.sys_run_in_executor(check_paths)
|
|
||||||
|
|
||||||
def validate_availability(self) -> None:
|
|
||||||
"""Validate if addon is available for current system."""
|
|
||||||
return self._validate_availability(self.data, logger=_LOGGER.error)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
"""Compaired add-on objects."""
|
"""Compaired add-on objects."""
|
||||||
if not isinstance(other, AddonModel):
|
if not isinstance(other, AddonModel):
|
||||||
return False
|
return False
|
||||||
return self.slug == other.slug
|
return self.slug == other.slug
|
||||||
|
|
||||||
def _validate_availability(
|
def _available(self, config) -> bool:
|
||||||
self, config, *, logger: Callable[..., None] | None = None
|
"""Return True if this add-on is available on this platform."""
|
||||||
) -> None:
|
|
||||||
"""Validate if addon is available for current system."""
|
|
||||||
# Architecture
|
# Architecture
|
||||||
if not self.sys_arch.is_supported(config[ATTR_ARCH]):
|
if not self.sys_arch.is_supported(config[ATTR_ARCH]):
|
||||||
raise AddonsNotSupportedError(
|
return False
|
||||||
f"Add-on {self.slug} not supported on this platform, supported architectures: {', '.join(config[ATTR_ARCH])}",
|
|
||||||
logger,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Machine / Hardware
|
# Machine / Hardware
|
||||||
machine = config.get(ATTR_MACHINE)
|
machine = config.get(ATTR_MACHINE)
|
||||||
if machine and (
|
if machine and f"!{self.sys_machine}" in machine:
|
||||||
f"!{self.sys_machine}" in machine or self.sys_machine not in machine
|
return False
|
||||||
):
|
elif machine and self.sys_machine not in machine:
|
||||||
raise AddonsNotSupportedError(
|
|
||||||
f"Add-on {self.slug} not supported on this machine, supported machine types: {', '.join(machine)}",
|
|
||||||
logger,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Home Assistant
|
|
||||||
version: AwesomeVersion | None = config.get(ATTR_HOMEASSISTANT)
|
|
||||||
with suppress(AwesomeVersionException, TypeError):
|
|
||||||
if version and not version_is_new_enough(
|
|
||||||
self.sys_homeassistant.version, version
|
|
||||||
):
|
|
||||||
raise AddonsNotSupportedError(
|
|
||||||
f"Add-on {self.slug} not supported on this system, requires Home Assistant version {version} or greater",
|
|
||||||
logger,
|
|
||||||
)
|
|
||||||
|
|
||||||
def _available(self, config) -> bool:
|
|
||||||
"""Return True if this add-on is available on this platform."""
|
|
||||||
try:
|
|
||||||
self._validate_availability(config)
|
|
||||||
except AddonsNotSupportedError:
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
# Home Assistant
|
||||||
|
version: Optional[AwesomeVersion] = config.get(ATTR_HOMEASSISTANT)
|
||||||
|
try:
|
||||||
|
return self.sys_homeassistant.version >= version
|
||||||
|
except (AwesomeVersionException, TypeError):
|
||||||
|
return True
|
||||||
|
|
||||||
def _image(self, config) -> str:
|
def _image(self, config) -> str:
|
||||||
"""Generate image name from data."""
|
"""Generate image name from data."""
|
||||||
@@ -720,3 +631,19 @@ class AddonModel(JobGroup, ABC):
|
|||||||
|
|
||||||
# local build
|
# local build
|
||||||
return f"{config[ATTR_REPOSITORY]}/{self.sys_arch.default}-addon-{config[ATTR_SLUG]}"
|
return f"{config[ATTR_REPOSITORY]}/{self.sys_arch.default}-addon-{config[ATTR_SLUG]}"
|
||||||
|
|
||||||
|
def install(self) -> Awaitable[None]:
|
||||||
|
"""Install this add-on."""
|
||||||
|
return self.sys_addons.install(self.slug)
|
||||||
|
|
||||||
|
def uninstall(self) -> Awaitable[None]:
|
||||||
|
"""Uninstall this add-on."""
|
||||||
|
return self.sys_addons.uninstall(self.slug)
|
||||||
|
|
||||||
|
def update(self, backup: Optional[bool] = False) -> Awaitable[None]:
|
||||||
|
"""Update this add-on."""
|
||||||
|
return self.sys_addons.update(self.slug, backup=backup)
|
||||||
|
|
||||||
|
def rebuild(self) -> Awaitable[None]:
|
||||||
|
"""Rebuild this add-on."""
|
||||||
|
return self.sys_addons.rebuild(self.slug)
|
||||||
|
@@ -1,10 +1,9 @@
|
|||||||
"""Add-on Options / UI rendering."""
|
"""Add-on Options / UI rendering."""
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import re
|
import re
|
||||||
from typing import Any
|
from typing import Any, Union
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
@@ -294,7 +293,7 @@ class UiOptions(CoreSysAttributes):
|
|||||||
multiple: bool = False,
|
multiple: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Validate a single element."""
|
"""Validate a single element."""
|
||||||
ui_node: dict[str, str | bool | float | list[str]] = {"name": key}
|
ui_node: dict[str, Union[str, bool, float, list[str]]] = {"name": key}
|
||||||
|
|
||||||
# If multiple
|
# If multiple
|
||||||
if multiple:
|
if multiple:
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
"""Util add-ons functions."""
|
"""Util add-ons functions."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
@@ -45,16 +44,12 @@ def rating_security(addon: AddonModel) -> int:
|
|||||||
any(
|
any(
|
||||||
privilege in addon.privileged
|
privilege in addon.privileged
|
||||||
for privilege in (
|
for privilege in (
|
||||||
Capabilities.BPF,
|
|
||||||
Capabilities.CHECKPOINT_RESTORE,
|
|
||||||
Capabilities.DAC_READ_SEARCH,
|
|
||||||
Capabilities.NET_ADMIN,
|
Capabilities.NET_ADMIN,
|
||||||
Capabilities.NET_RAW,
|
|
||||||
Capabilities.PERFMON,
|
|
||||||
Capabilities.SYS_ADMIN,
|
Capabilities.SYS_ADMIN,
|
||||||
Capabilities.SYS_MODULE,
|
|
||||||
Capabilities.SYS_PTRACE,
|
|
||||||
Capabilities.SYS_RAWIO,
|
Capabilities.SYS_RAWIO,
|
||||||
|
Capabilities.SYS_PTRACE,
|
||||||
|
Capabilities.SYS_MODULE,
|
||||||
|
Capabilities.DAC_READ_SEARCH,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
or addon.with_kernel_modules
|
or addon.with_kernel_modules
|
||||||
@@ -75,10 +70,6 @@ def rating_security(addon: AddonModel) -> int:
|
|||||||
if addon.host_pid:
|
if addon.host_pid:
|
||||||
rating += -2
|
rating += -2
|
||||||
|
|
||||||
# UTS host namespace allows to set hostname only with SYS_ADMIN
|
|
||||||
if addon.host_uts and Capabilities.SYS_ADMIN in addon.privileged:
|
|
||||||
rating += -1
|
|
||||||
|
|
||||||
# Docker Access & full Access
|
# Docker Access & full Access
|
||||||
if addon.access_docker_api or addon.with_full_access:
|
if addon.access_docker_api or addon.with_full_access:
|
||||||
rating = 1
|
rating = 1
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
"""Validate add-ons options schema."""
|
"""Validate add-ons options schema."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
import secrets
|
import secrets
|
||||||
@@ -8,6 +7,8 @@ import uuid
|
|||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from supervisor.addons.const import AddonBackupMode
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ARCH_ALL,
|
ARCH_ALL,
|
||||||
ATTR_ACCESS_TOKEN,
|
ATTR_ACCESS_TOKEN,
|
||||||
@@ -42,7 +43,6 @@ from ..const import (
|
|||||||
ATTR_HOST_IPC,
|
ATTR_HOST_IPC,
|
||||||
ATTR_HOST_NETWORK,
|
ATTR_HOST_NETWORK,
|
||||||
ATTR_HOST_PID,
|
ATTR_HOST_PID,
|
||||||
ATTR_HOST_UTS,
|
|
||||||
ATTR_IMAGE,
|
ATTR_IMAGE,
|
||||||
ATTR_INGRESS,
|
ATTR_INGRESS,
|
||||||
ATTR_INGRESS_ENTRY,
|
ATTR_INGRESS_ENTRY,
|
||||||
@@ -55,7 +55,7 @@ from ..const import (
|
|||||||
ATTR_KERNEL_MODULES,
|
ATTR_KERNEL_MODULES,
|
||||||
ATTR_LABELS,
|
ATTR_LABELS,
|
||||||
ATTR_LEGACY,
|
ATTR_LEGACY,
|
||||||
ATTR_LOCATION,
|
ATTR_LOCATON,
|
||||||
ATTR_MACHINE,
|
ATTR_MACHINE,
|
||||||
ATTR_MAP,
|
ATTR_MAP,
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
@@ -79,12 +79,9 @@ from ..const import (
|
|||||||
ATTR_STATE,
|
ATTR_STATE,
|
||||||
ATTR_STDIN,
|
ATTR_STDIN,
|
||||||
ATTR_SYSTEM,
|
ATTR_SYSTEM,
|
||||||
ATTR_SYSTEM_MANAGED,
|
|
||||||
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
|
|
||||||
ATTR_TIMEOUT,
|
ATTR_TIMEOUT,
|
||||||
ATTR_TMPFS,
|
ATTR_TMPFS,
|
||||||
ATTR_TRANSLATIONS,
|
ATTR_TRANSLATIONS,
|
||||||
ATTR_TYPE,
|
|
||||||
ATTR_UART,
|
ATTR_UART,
|
||||||
ATTR_UDEV,
|
ATTR_UDEV,
|
||||||
ATTR_URL,
|
ATTR_URL,
|
||||||
@@ -98,11 +95,11 @@ from ..const import (
|
|||||||
ROLE_ALL,
|
ROLE_ALL,
|
||||||
ROLE_DEFAULT,
|
ROLE_DEFAULT,
|
||||||
AddonBoot,
|
AddonBoot,
|
||||||
AddonBootConfig,
|
|
||||||
AddonStage,
|
AddonStage,
|
||||||
AddonStartup,
|
AddonStartup,
|
||||||
AddonState,
|
AddonState,
|
||||||
)
|
)
|
||||||
|
from ..discovery.validate import valid_discovery_service
|
||||||
from ..docker.const import Capabilities
|
from ..docker.const import Capabilities
|
||||||
from ..validate import (
|
from ..validate import (
|
||||||
docker_image,
|
docker_image,
|
||||||
@@ -113,23 +110,12 @@ from ..validate import (
|
|||||||
uuid_match,
|
uuid_match,
|
||||||
version_tag,
|
version_tag,
|
||||||
)
|
)
|
||||||
from .const import (
|
from .const import ATTR_BACKUP, ATTR_CODENOTARY
|
||||||
ATTR_BACKUP,
|
|
||||||
ATTR_BREAKING_VERSIONS,
|
|
||||||
ATTR_CODENOTARY,
|
|
||||||
ATTR_PATH,
|
|
||||||
ATTR_READ_ONLY,
|
|
||||||
RE_SLUG,
|
|
||||||
AddonBackupMode,
|
|
||||||
MappingType,
|
|
||||||
)
|
|
||||||
from .options import RE_SCHEMA_ELEMENT
|
from .options import RE_SCHEMA_ELEMENT
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
RE_VOLUME = re.compile(
|
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share|media)(?::(rw|ro))?$")
|
||||||
r"^(data|config|ssl|addons|backup|share|media|homeassistant_config|all_addon_configs|addon_config)(?::(rw|ro))?$"
|
|
||||||
)
|
|
||||||
RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$")
|
RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$")
|
||||||
|
|
||||||
|
|
||||||
@@ -145,7 +131,6 @@ RE_MACHINE = re.compile(
|
|||||||
r"|generic-x86-64"
|
r"|generic-x86-64"
|
||||||
r"|odroid-c2"
|
r"|odroid-c2"
|
||||||
r"|odroid-c4"
|
r"|odroid-c4"
|
||||||
r"|odroid-m1"
|
|
||||||
r"|odroid-n2"
|
r"|odroid-n2"
|
||||||
r"|odroid-xu"
|
r"|odroid-xu"
|
||||||
r"|qemuarm-64"
|
r"|qemuarm-64"
|
||||||
@@ -158,15 +143,10 @@ RE_MACHINE = re.compile(
|
|||||||
r"|raspberrypi3"
|
r"|raspberrypi3"
|
||||||
r"|raspberrypi4-64"
|
r"|raspberrypi4-64"
|
||||||
r"|raspberrypi4"
|
r"|raspberrypi4"
|
||||||
r"|raspberrypi5-64"
|
|
||||||
r"|yellow"
|
|
||||||
r"|green"
|
|
||||||
r"|tinker"
|
r"|tinker"
|
||||||
r")$"
|
r")$"
|
||||||
)
|
)
|
||||||
|
|
||||||
RE_SLUG_FIELD = re.compile(r"^" + RE_SLUG + r"$")
|
|
||||||
|
|
||||||
|
|
||||||
def _warn_addon_config(config: dict[str, Any]):
|
def _warn_addon_config(config: dict[str, Any]):
|
||||||
"""Warn about miss configs."""
|
"""Warn about miss configs."""
|
||||||
@@ -214,9 +194,9 @@ def _migrate_addon_config(protocol=False):
|
|||||||
name,
|
name,
|
||||||
)
|
)
|
||||||
if value == "before":
|
if value == "before":
|
||||||
config[ATTR_STARTUP] = AddonStartup.SERVICES
|
config[ATTR_STARTUP] = AddonStartup.SERVICES.value
|
||||||
elif value == "after":
|
elif value == "after":
|
||||||
config[ATTR_STARTUP] = AddonStartup.APPLICATION
|
config[ATTR_STARTUP] = AddonStartup.APPLICATION.value
|
||||||
|
|
||||||
# UART 2021-01-20
|
# UART 2021-01-20
|
||||||
if "auto_uart" in config:
|
if "auto_uart" in config:
|
||||||
@@ -262,48 +242,6 @@ def _migrate_addon_config(protocol=False):
|
|||||||
name,
|
name,
|
||||||
)
|
)
|
||||||
|
|
||||||
# 2023-11 "map" entries can also be dict to allow path configuration
|
|
||||||
volumes = []
|
|
||||||
for entry in config.get(ATTR_MAP, []):
|
|
||||||
if isinstance(entry, dict):
|
|
||||||
volumes.append(entry)
|
|
||||||
if isinstance(entry, str):
|
|
||||||
result = RE_VOLUME.match(entry)
|
|
||||||
if not result:
|
|
||||||
continue
|
|
||||||
volumes.append(
|
|
||||||
{
|
|
||||||
ATTR_TYPE: result.group(1),
|
|
||||||
ATTR_READ_ONLY: result.group(2) != "rw",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
if volumes:
|
|
||||||
config[ATTR_MAP] = volumes
|
|
||||||
|
|
||||||
# 2023-10 "config" became "homeassistant" so /config can be used for addon's public config
|
|
||||||
if any(volume[ATTR_TYPE] == MappingType.CONFIG for volume in volumes):
|
|
||||||
if any(
|
|
||||||
volume
|
|
||||||
and volume[ATTR_TYPE]
|
|
||||||
in {MappingType.ADDON_CONFIG, MappingType.HOMEASSISTANT_CONFIG}
|
|
||||||
for volume in volumes
|
|
||||||
):
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Add-on config using incompatible map options, '%s' and '%s' are ignored if '%s' is included. Please report this to the maintainer of %s",
|
|
||||||
MappingType.ADDON_CONFIG,
|
|
||||||
MappingType.HOMEASSISTANT_CONFIG,
|
|
||||||
MappingType.CONFIG,
|
|
||||||
name,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
_LOGGER.debug(
|
|
||||||
"Add-on config using deprecated map option '%s' instead of '%s'. Please report this to the maintainer of %s",
|
|
||||||
MappingType.CONFIG,
|
|
||||||
MappingType.HOMEASSISTANT_CONFIG,
|
|
||||||
name,
|
|
||||||
)
|
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
return _migrate
|
return _migrate
|
||||||
@@ -314,7 +252,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
|||||||
{
|
{
|
||||||
vol.Required(ATTR_NAME): str,
|
vol.Required(ATTR_NAME): str,
|
||||||
vol.Required(ATTR_VERSION): version_tag,
|
vol.Required(ATTR_VERSION): version_tag,
|
||||||
vol.Required(ATTR_SLUG): vol.Match(RE_SLUG_FIELD),
|
vol.Required(ATTR_SLUG): str,
|
||||||
vol.Required(ATTR_DESCRIPTON): str,
|
vol.Required(ATTR_DESCRIPTON): str,
|
||||||
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
|
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
|
||||||
vol.Optional(ATTR_MACHINE): vol.All([vol.Match(RE_MACHINE)], vol.Unique()),
|
vol.Optional(ATTR_MACHINE): vol.All([vol.Match(RE_MACHINE)], vol.Unique()),
|
||||||
@@ -322,9 +260,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
|||||||
vol.Optional(ATTR_STARTUP, default=AddonStartup.APPLICATION): vol.Coerce(
|
vol.Optional(ATTR_STARTUP, default=AddonStartup.APPLICATION): vol.Coerce(
|
||||||
AddonStartup
|
AddonStartup
|
||||||
),
|
),
|
||||||
vol.Optional(ATTR_BOOT, default=AddonBootConfig.AUTO): vol.Coerce(
|
vol.Optional(ATTR_BOOT, default=AddonBoot.AUTO): vol.Coerce(AddonBoot),
|
||||||
AddonBootConfig
|
|
||||||
),
|
|
||||||
vol.Optional(ATTR_INIT, default=True): vol.Boolean(),
|
vol.Optional(ATTR_INIT, default=True): vol.Boolean(),
|
||||||
vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(),
|
vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_STAGE, default=AddonStage.STABLE): vol.Coerce(AddonStage),
|
vol.Optional(ATTR_STAGE, default=AddonStage.STABLE): vol.Coerce(AddonStage),
|
||||||
@@ -349,20 +285,11 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
|||||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
|
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_HOST_UTS, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_DEVICES): [str],
|
vol.Optional(ATTR_DEVICES): [str],
|
||||||
vol.Optional(ATTR_UDEV, default=False): vol.Boolean(),
|
vol.Optional(ATTR_UDEV, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_TMPFS, default=False): vol.Boolean(),
|
vol.Optional(ATTR_TMPFS, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_MAP, default=list): [
|
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
||||||
vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Required(ATTR_TYPE): vol.Coerce(MappingType),
|
|
||||||
vol.Optional(ATTR_READ_ONLY, default=True): bool,
|
|
||||||
vol.Optional(ATTR_PATH): str,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
],
|
|
||||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): str},
|
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): str},
|
||||||
vol.Optional(ATTR_PRIVILEGED): [vol.Coerce(Capabilities)],
|
vol.Optional(ATTR_PRIVILEGED): [vol.Coerce(Capabilities)],
|
||||||
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
||||||
@@ -383,7 +310,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
|||||||
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
|
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
||||||
vol.Optional(ATTR_DISCOVERY): [str],
|
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
|
||||||
vol.Optional(ATTR_BACKUP_EXCLUDE): [str],
|
vol.Optional(ATTR_BACKUP_EXCLUDE): [str],
|
||||||
vol.Optional(ATTR_BACKUP_PRE): str,
|
vol.Optional(ATTR_BACKUP_PRE): str,
|
||||||
vol.Optional(ATTR_BACKUP_POST): str,
|
vol.Optional(ATTR_BACKUP_POST): str,
|
||||||
@@ -414,7 +341,6 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
|||||||
vol.Coerce(int), vol.Range(min=10, max=300)
|
vol.Coerce(int), vol.Range(min=10, max=300)
|
||||||
),
|
),
|
||||||
vol.Optional(ATTR_JOURNALD, default=False): vol.Boolean(),
|
vol.Optional(ATTR_JOURNALD, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_BREAKING_VERSIONS, default=list): [version_tag],
|
|
||||||
},
|
},
|
||||||
extra=vol.REMOVE_EXTRA,
|
extra=vol.REMOVE_EXTRA,
|
||||||
)
|
)
|
||||||
@@ -427,9 +353,8 @@ SCHEMA_ADDON_CONFIG = vol.All(
|
|||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_BUILD_CONFIG = vol.Schema(
|
SCHEMA_BUILD_CONFIG = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Any(
|
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema(
|
||||||
vol.Match(RE_DOCKER_IMAGE_BUILD),
|
{vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD)}
|
||||||
vol.Schema({vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD)}),
|
|
||||||
),
|
),
|
||||||
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_ARGS, default=dict): vol.Schema({str: str}),
|
vol.Optional(ATTR_ARGS, default=dict): vol.Schema({str: str}),
|
||||||
@@ -473,8 +398,6 @@ SCHEMA_ADDON_USER = vol.Schema(
|
|||||||
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
||||||
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
|
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_WATCHDOG, default=False): vol.Boolean(),
|
vol.Optional(ATTR_WATCHDOG, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_SYSTEM_MANAGED, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY, default=None): vol.Maybe(str),
|
|
||||||
},
|
},
|
||||||
extra=vol.REMOVE_EXTRA,
|
extra=vol.REMOVE_EXTRA,
|
||||||
)
|
)
|
||||||
@@ -483,7 +406,7 @@ SCHEMA_ADDON_SYSTEM = vol.All(
|
|||||||
_migrate_addon_config(),
|
_migrate_addon_config(),
|
||||||
_SCHEMA_ADDON_CONFIG.extend(
|
_SCHEMA_ADDON_CONFIG.extend(
|
||||||
{
|
{
|
||||||
vol.Required(ATTR_LOCATION): str,
|
vol.Required(ATTR_LOCATON): str,
|
||||||
vol.Required(ATTR_REPOSITORY): str,
|
vol.Required(ATTR_REPOSITORY): str,
|
||||||
vol.Required(ATTR_TRANSLATIONS, default=dict): {
|
vol.Required(ATTR_TRANSLATIONS, default=dict): {
|
||||||
str: SCHEMA_ADDON_TRANSLATIONS
|
str: SCHEMA_ADDON_TRANSLATIONS
|
||||||
|
@@ -1,22 +1,20 @@
|
|||||||
"""Init file for Supervisor RESTful API."""
|
"""Init file for Supervisor RESTful API."""
|
||||||
|
|
||||||
from functools import partial
|
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any, Optional
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
|
|
||||||
from ..const import AddonState
|
from supervisor.api.utils import api_process
|
||||||
|
from supervisor.const import AddonState
|
||||||
|
from supervisor.exceptions import APIAddonNotInstalled
|
||||||
|
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
from ..exceptions import APIAddonNotInstalled, HostNotSupportedError
|
|
||||||
from ..utils.sentry import async_capture_exception
|
|
||||||
from .addons import APIAddons
|
from .addons import APIAddons
|
||||||
from .audio import APIAudio
|
from .audio import APIAudio
|
||||||
from .auth import APIAuth
|
from .auth import APIAuth
|
||||||
from .backups import APIBackups
|
from .backups import APIBackups
|
||||||
from .cli import APICli
|
from .cli import APICli
|
||||||
from .const import CONTENT_TYPE_TEXT
|
|
||||||
from .discovery import APIDiscovery
|
from .discovery import APIDiscovery
|
||||||
from .dns import APICoreDNS
|
from .dns import APICoreDNS
|
||||||
from .docker import APIDocker
|
from .docker import APIDocker
|
||||||
@@ -26,7 +24,6 @@ from .host import APIHost
|
|||||||
from .ingress import APIIngress
|
from .ingress import APIIngress
|
||||||
from .jobs import APIJobs
|
from .jobs import APIJobs
|
||||||
from .middleware.security import SecurityMiddleware
|
from .middleware.security import SecurityMiddleware
|
||||||
from .mounts import APIMounts
|
|
||||||
from .multicast import APIMulticast
|
from .multicast import APIMulticast
|
||||||
from .network import APINetwork
|
from .network import APINetwork
|
||||||
from .observer import APIObserver
|
from .observer import APIObserver
|
||||||
@@ -38,7 +35,6 @@ from .security import APISecurity
|
|||||||
from .services import APIServices
|
from .services import APIServices
|
||||||
from .store import APIStore
|
from .store import APIStore
|
||||||
from .supervisor import APISupervisor
|
from .supervisor import APISupervisor
|
||||||
from .utils import api_process, api_process_raw
|
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -57,10 +53,8 @@ class RestAPI(CoreSysAttributes):
|
|||||||
self.webapp: web.Application = web.Application(
|
self.webapp: web.Application = web.Application(
|
||||||
client_max_size=MAX_CLIENT_SIZE,
|
client_max_size=MAX_CLIENT_SIZE,
|
||||||
middlewares=[
|
middlewares=[
|
||||||
self.security.block_bad_requests,
|
|
||||||
self.security.system_validation,
|
self.security.system_validation,
|
||||||
self.security.token_validation,
|
self.security.token_validation,
|
||||||
self.security.core_proxy,
|
|
||||||
],
|
],
|
||||||
handler_args={
|
handler_args={
|
||||||
"max_line_size": MAX_LINE_SIZE,
|
"max_line_size": MAX_LINE_SIZE,
|
||||||
@@ -69,17 +63,11 @@ class RestAPI(CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# service stuff
|
# service stuff
|
||||||
self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5)
|
self._runner: web.AppRunner = web.AppRunner(self.webapp)
|
||||||
self._site: web.TCPSite | None = None
|
self._site: Optional[web.TCPSite] = None
|
||||||
|
|
||||||
# share single host API handler for reuse in logging endpoints
|
|
||||||
self._api_host: APIHost | None = None
|
|
||||||
|
|
||||||
async def load(self) -> None:
|
async def load(self) -> None:
|
||||||
"""Register REST API Calls."""
|
"""Register REST API Calls."""
|
||||||
self._api_host = APIHost()
|
|
||||||
self._api_host.coresys = self.coresys
|
|
||||||
|
|
||||||
self._register_addons()
|
self._register_addons()
|
||||||
self._register_audio()
|
self._register_audio()
|
||||||
self._register_auth()
|
self._register_auth()
|
||||||
@@ -91,93 +79,41 @@ class RestAPI(CoreSysAttributes):
|
|||||||
self._register_hardware()
|
self._register_hardware()
|
||||||
self._register_homeassistant()
|
self._register_homeassistant()
|
||||||
self._register_host()
|
self._register_host()
|
||||||
self._register_jobs()
|
self._register_root()
|
||||||
self._register_ingress()
|
self._register_ingress()
|
||||||
self._register_mounts()
|
|
||||||
self._register_multicast()
|
self._register_multicast()
|
||||||
self._register_network()
|
self._register_network()
|
||||||
self._register_observer()
|
self._register_observer()
|
||||||
self._register_os()
|
self._register_os()
|
||||||
|
self._register_jobs()
|
||||||
self._register_panel()
|
self._register_panel()
|
||||||
self._register_proxy()
|
self._register_proxy()
|
||||||
self._register_resolution()
|
self._register_resolution()
|
||||||
self._register_root()
|
|
||||||
self._register_security()
|
|
||||||
self._register_services()
|
self._register_services()
|
||||||
self._register_store()
|
|
||||||
self._register_supervisor()
|
self._register_supervisor()
|
||||||
|
self._register_store()
|
||||||
|
self._register_security()
|
||||||
|
|
||||||
await self.start()
|
await self.start()
|
||||||
|
|
||||||
def _register_advanced_logs(self, path: str, syslog_identifier: str):
|
|
||||||
"""Register logs endpoint for a given path, returning logs for single syslog identifier."""
|
|
||||||
|
|
||||||
self.webapp.add_routes(
|
|
||||||
[
|
|
||||||
web.get(
|
|
||||||
f"{path}/logs",
|
|
||||||
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
|
|
||||||
),
|
|
||||||
web.get(
|
|
||||||
f"{path}/logs/follow",
|
|
||||||
partial(
|
|
||||||
self._api_host.advanced_logs,
|
|
||||||
identifier=syslog_identifier,
|
|
||||||
follow=True,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
web.get(
|
|
||||||
f"{path}/logs/boots/{{bootid}}",
|
|
||||||
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
|
|
||||||
),
|
|
||||||
web.get(
|
|
||||||
f"{path}/logs/boots/{{bootid}}/follow",
|
|
||||||
partial(
|
|
||||||
self._api_host.advanced_logs,
|
|
||||||
identifier=syslog_identifier,
|
|
||||||
follow=True,
|
|
||||||
),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
def _register_host(self) -> None:
|
def _register_host(self) -> None:
|
||||||
"""Register hostcontrol functions."""
|
"""Register hostcontrol functions."""
|
||||||
api_host = self._api_host
|
api_host = APIHost()
|
||||||
|
api_host.coresys = self.coresys
|
||||||
|
|
||||||
self.webapp.add_routes(
|
self.webapp.add_routes(
|
||||||
[
|
[
|
||||||
web.get("/host/info", api_host.info),
|
web.get("/host/info", api_host.info),
|
||||||
web.get("/host/logs", api_host.advanced_logs),
|
web.get("/host/logs", api_host.logs),
|
||||||
web.get(
|
|
||||||
"/host/logs/follow",
|
|
||||||
partial(api_host.advanced_logs, follow=True),
|
|
||||||
),
|
|
||||||
web.get("/host/logs/identifiers", api_host.list_identifiers),
|
|
||||||
web.get("/host/logs/identifiers/{identifier}", api_host.advanced_logs),
|
|
||||||
web.get(
|
|
||||||
"/host/logs/identifiers/{identifier}/follow",
|
|
||||||
partial(api_host.advanced_logs, follow=True),
|
|
||||||
),
|
|
||||||
web.get("/host/logs/boots", api_host.list_boots),
|
|
||||||
web.get("/host/logs/boots/{bootid}", api_host.advanced_logs),
|
|
||||||
web.get(
|
|
||||||
"/host/logs/boots/{bootid}/follow",
|
|
||||||
partial(api_host.advanced_logs, follow=True),
|
|
||||||
),
|
|
||||||
web.get(
|
|
||||||
"/host/logs/boots/{bootid}/identifiers/{identifier}",
|
|
||||||
api_host.advanced_logs,
|
|
||||||
),
|
|
||||||
web.get(
|
|
||||||
"/host/logs/boots/{bootid}/identifiers/{identifier}/follow",
|
|
||||||
partial(api_host.advanced_logs, follow=True),
|
|
||||||
),
|
|
||||||
web.post("/host/reboot", api_host.reboot),
|
web.post("/host/reboot", api_host.reboot),
|
||||||
web.post("/host/shutdown", api_host.shutdown),
|
web.post("/host/shutdown", api_host.shutdown),
|
||||||
web.post("/host/reload", api_host.reload),
|
web.post("/host/reload", api_host.reload),
|
||||||
web.post("/host/options", api_host.options),
|
web.post("/host/options", api_host.options),
|
||||||
web.get("/host/services", api_host.services),
|
web.get("/host/services", api_host.services),
|
||||||
|
web.post("/host/services/{service}/stop", api_host.service_stop),
|
||||||
|
web.post("/host/services/{service}/start", api_host.service_start),
|
||||||
|
web.post("/host/services/{service}/restart", api_host.service_restart),
|
||||||
|
web.post("/host/services/{service}/reload", api_host.service_reload),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -220,19 +156,6 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.post("/os/config/sync", api_os.config_sync),
|
web.post("/os/config/sync", api_os.config_sync),
|
||||||
web.post("/os/datadisk/move", api_os.migrate_data),
|
web.post("/os/datadisk/move", api_os.migrate_data),
|
||||||
web.get("/os/datadisk/list", api_os.list_data),
|
web.get("/os/datadisk/list", api_os.list_data),
|
||||||
web.post("/os/datadisk/wipe", api_os.wipe_data),
|
|
||||||
web.post("/os/boot-slot", api_os.set_boot_slot),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Boards endpoints
|
|
||||||
self.webapp.add_routes(
|
|
||||||
[
|
|
||||||
web.get("/os/boards/green", api_os.boards_green_info),
|
|
||||||
web.post("/os/boards/green", api_os.boards_green_options),
|
|
||||||
web.get("/os/boards/yellow", api_os.boards_yellow_info),
|
|
||||||
web.post("/os/boards/yellow", api_os.boards_yellow_options),
|
|
||||||
web.get("/os/boards/{board}", api_os.boards_other_info),
|
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -259,8 +182,6 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.get("/jobs/info", api_jobs.info),
|
web.get("/jobs/info", api_jobs.info),
|
||||||
web.post("/jobs/options", api_jobs.options),
|
web.post("/jobs/options", api_jobs.options),
|
||||||
web.post("/jobs/reset", api_jobs.reset),
|
web.post("/jobs/reset", api_jobs.reset),
|
||||||
web.get("/jobs/{uuid}", api_jobs.job_info),
|
|
||||||
web.delete("/jobs/{uuid}", api_jobs.remove_job),
|
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -299,11 +220,11 @@ class RestAPI(CoreSysAttributes):
|
|||||||
[
|
[
|
||||||
web.get("/multicast/info", api_multicast.info),
|
web.get("/multicast/info", api_multicast.info),
|
||||||
web.get("/multicast/stats", api_multicast.stats),
|
web.get("/multicast/stats", api_multicast.stats),
|
||||||
|
web.get("/multicast/logs", api_multicast.logs),
|
||||||
web.post("/multicast/update", api_multicast.update),
|
web.post("/multicast/update", api_multicast.update),
|
||||||
web.post("/multicast/restart", api_multicast.restart),
|
web.post("/multicast/restart", api_multicast.restart),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
self._register_advanced_logs("/multicast", "hassio_multicast")
|
|
||||||
|
|
||||||
def _register_hardware(self) -> None:
|
def _register_hardware(self) -> None:
|
||||||
"""Register hardware functions."""
|
"""Register hardware functions."""
|
||||||
@@ -357,10 +278,6 @@ class RestAPI(CoreSysAttributes):
|
|||||||
"/resolution/issue/{issue}",
|
"/resolution/issue/{issue}",
|
||||||
api_resolution.dismiss_issue,
|
api_resolution.dismiss_issue,
|
||||||
),
|
),
|
||||||
web.get(
|
|
||||||
"/resolution/issue/{issue}/suggestions",
|
|
||||||
api_resolution.suggestions_for_issue,
|
|
||||||
),
|
|
||||||
web.post("/resolution/healthcheck", api_resolution.healthcheck),
|
web.post("/resolution/healthcheck", api_resolution.healthcheck),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
@@ -376,7 +293,6 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.post("/auth", api_auth.auth),
|
web.post("/auth", api_auth.auth),
|
||||||
web.post("/auth/reset", api_auth.reset),
|
web.post("/auth/reset", api_auth.reset),
|
||||||
web.delete("/auth/cache", api_auth.cache),
|
web.delete("/auth/cache", api_auth.cache),
|
||||||
web.get("/auth/list", api_auth.list_users),
|
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -390,6 +306,7 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.get("/supervisor/ping", api_supervisor.ping),
|
web.get("/supervisor/ping", api_supervisor.ping),
|
||||||
web.get("/supervisor/info", api_supervisor.info),
|
web.get("/supervisor/info", api_supervisor.info),
|
||||||
web.get("/supervisor/stats", api_supervisor.stats),
|
web.get("/supervisor/stats", api_supervisor.stats),
|
||||||
|
web.get("/supervisor/logs", api_supervisor.logs),
|
||||||
web.post("/supervisor/update", api_supervisor.update),
|
web.post("/supervisor/update", api_supervisor.update),
|
||||||
web.post("/supervisor/reload", api_supervisor.reload),
|
web.post("/supervisor/reload", api_supervisor.reload),
|
||||||
web.post("/supervisor/restart", api_supervisor.restart),
|
web.post("/supervisor/restart", api_supervisor.restart),
|
||||||
@@ -398,39 +315,6 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
async def get_supervisor_logs(*args, **kwargs):
|
|
||||||
try:
|
|
||||||
return await self._api_host.advanced_logs_handler(
|
|
||||||
*args, identifier="hassio_supervisor", **kwargs
|
|
||||||
)
|
|
||||||
except Exception as err: # pylint: disable=broad-exception-caught
|
|
||||||
# Supervisor logs are critical, so catch everything, log the exception
|
|
||||||
# and try to return Docker container logs as the fallback
|
|
||||||
_LOGGER.exception(
|
|
||||||
"Failed to get supervisor logs using advanced_logs API"
|
|
||||||
)
|
|
||||||
if not isinstance(err, HostNotSupportedError):
|
|
||||||
# No need to capture HostNotSupportedError to Sentry, the cause
|
|
||||||
# is known and reported to the user using the resolution center.
|
|
||||||
await async_capture_exception(err)
|
|
||||||
kwargs.pop("follow", None) # Follow is not supported for Docker logs
|
|
||||||
return await api_supervisor.logs(*args, **kwargs)
|
|
||||||
|
|
||||||
self.webapp.add_routes(
|
|
||||||
[
|
|
||||||
web.get("/supervisor/logs", get_supervisor_logs),
|
|
||||||
web.get(
|
|
||||||
"/supervisor/logs/follow",
|
|
||||||
partial(get_supervisor_logs, follow=True),
|
|
||||||
),
|
|
||||||
web.get("/supervisor/logs/boots/{bootid}", get_supervisor_logs),
|
|
||||||
web.get(
|
|
||||||
"/supervisor/logs/boots/{bootid}/follow",
|
|
||||||
partial(get_supervisor_logs, follow=True),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
def _register_homeassistant(self) -> None:
|
def _register_homeassistant(self) -> None:
|
||||||
"""Register Home Assistant functions."""
|
"""Register Home Assistant functions."""
|
||||||
api_hass = APIHomeAssistant()
|
api_hass = APIHomeAssistant()
|
||||||
@@ -439,6 +323,7 @@ class RestAPI(CoreSysAttributes):
|
|||||||
self.webapp.add_routes(
|
self.webapp.add_routes(
|
||||||
[
|
[
|
||||||
web.get("/core/info", api_hass.info),
|
web.get("/core/info", api_hass.info),
|
||||||
|
web.get("/core/logs", api_hass.logs),
|
||||||
web.get("/core/stats", api_hass.stats),
|
web.get("/core/stats", api_hass.stats),
|
||||||
web.post("/core/options", api_hass.options),
|
web.post("/core/options", api_hass.options),
|
||||||
web.post("/core/update", api_hass.update),
|
web.post("/core/update", api_hass.update),
|
||||||
@@ -450,12 +335,11 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
self._register_advanced_logs("/core", "homeassistant")
|
|
||||||
|
|
||||||
# Reroute from legacy
|
# Reroute from legacy
|
||||||
self.webapp.add_routes(
|
self.webapp.add_routes(
|
||||||
[
|
[
|
||||||
web.get("/homeassistant/info", api_hass.info),
|
web.get("/homeassistant/info", api_hass.info),
|
||||||
|
web.get("/homeassistant/logs", api_hass.logs),
|
||||||
web.get("/homeassistant/stats", api_hass.stats),
|
web.get("/homeassistant/stats", api_hass.stats),
|
||||||
web.post("/homeassistant/options", api_hass.options),
|
web.post("/homeassistant/options", api_hass.options),
|
||||||
web.post("/homeassistant/restart", api_hass.restart),
|
web.post("/homeassistant/restart", api_hass.restart),
|
||||||
@@ -467,8 +351,6 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
self._register_advanced_logs("/homeassistant", "homeassistant")
|
|
||||||
|
|
||||||
def _register_proxy(self) -> None:
|
def _register_proxy(self) -> None:
|
||||||
"""Register Home Assistant API Proxy."""
|
"""Register Home Assistant API Proxy."""
|
||||||
api_proxy = APIProxy()
|
api_proxy = APIProxy()
|
||||||
@@ -510,39 +392,18 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.post("/addons/{addon}/stop", api_addons.stop),
|
web.post("/addons/{addon}/stop", api_addons.stop),
|
||||||
web.post("/addons/{addon}/restart", api_addons.restart),
|
web.post("/addons/{addon}/restart", api_addons.restart),
|
||||||
web.post("/addons/{addon}/options", api_addons.options),
|
web.post("/addons/{addon}/options", api_addons.options),
|
||||||
web.post("/addons/{addon}/sys_options", api_addons.sys_options),
|
|
||||||
web.post(
|
web.post(
|
||||||
"/addons/{addon}/options/validate", api_addons.options_validate
|
"/addons/{addon}/options/validate", api_addons.options_validate
|
||||||
),
|
),
|
||||||
web.get("/addons/{addon}/options/config", api_addons.options_config),
|
web.get("/addons/{addon}/options/config", api_addons.options_config),
|
||||||
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
|
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
|
||||||
|
web.get("/addons/{addon}/logs", api_addons.logs),
|
||||||
web.post("/addons/{addon}/stdin", api_addons.stdin),
|
web.post("/addons/{addon}/stdin", api_addons.stdin),
|
||||||
web.post("/addons/{addon}/security", api_addons.security),
|
web.post("/addons/{addon}/security", api_addons.security),
|
||||||
web.get("/addons/{addon}/stats", api_addons.stats),
|
web.get("/addons/{addon}/stats", api_addons.stats),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
|
|
||||||
async def get_addon_logs(request, *args, **kwargs):
|
|
||||||
addon = api_addons.get_addon_for_request(request)
|
|
||||||
kwargs["identifier"] = f"addon_{addon.slug}"
|
|
||||||
return await self._api_host.advanced_logs(request, *args, **kwargs)
|
|
||||||
|
|
||||||
self.webapp.add_routes(
|
|
||||||
[
|
|
||||||
web.get("/addons/{addon}/logs", get_addon_logs),
|
|
||||||
web.get(
|
|
||||||
"/addons/{addon}/logs/follow",
|
|
||||||
partial(get_addon_logs, follow=True),
|
|
||||||
),
|
|
||||||
web.get("/addons/{addon}/logs/boots/{bootid}", get_addon_logs),
|
|
||||||
web.get(
|
|
||||||
"/addons/{addon}/logs/boots/{bootid}/follow",
|
|
||||||
partial(get_addon_logs, follow=True),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Legacy routing to support requests for not installed addons
|
# Legacy routing to support requests for not installed addons
|
||||||
api_store = APIStore()
|
api_store = APIStore()
|
||||||
api_store.coresys = self.coresys
|
api_store.coresys = self.coresys
|
||||||
@@ -584,15 +445,11 @@ class RestAPI(CoreSysAttributes):
|
|||||||
self.webapp.add_routes(
|
self.webapp.add_routes(
|
||||||
[
|
[
|
||||||
web.get("/backups", api_backups.list),
|
web.get("/backups", api_backups.list),
|
||||||
web.get("/backups/info", api_backups.info),
|
|
||||||
web.post("/backups/options", api_backups.options),
|
|
||||||
web.post("/backups/reload", api_backups.reload),
|
web.post("/backups/reload", api_backups.reload),
|
||||||
web.post("/backups/freeze", api_backups.freeze),
|
|
||||||
web.post("/backups/thaw", api_backups.thaw),
|
|
||||||
web.post("/backups/new/full", api_backups.backup_full),
|
web.post("/backups/new/full", api_backups.backup_full),
|
||||||
web.post("/backups/new/partial", api_backups.backup_partial),
|
web.post("/backups/new/partial", api_backups.backup_partial),
|
||||||
web.post("/backups/new/upload", api_backups.upload),
|
web.post("/backups/new/upload", api_backups.upload),
|
||||||
web.get("/backups/{slug}/info", api_backups.backup_info),
|
web.get("/backups/{slug}/info", api_backups.info),
|
||||||
web.delete("/backups/{slug}", api_backups.remove),
|
web.delete("/backups/{slug}", api_backups.remove),
|
||||||
web.post("/backups/{slug}/restore/full", api_backups.restore_full),
|
web.post("/backups/{slug}/restore/full", api_backups.restore_full),
|
||||||
web.post(
|
web.post(
|
||||||
@@ -640,6 +497,7 @@ class RestAPI(CoreSysAttributes):
|
|||||||
[
|
[
|
||||||
web.get("/dns/info", api_dns.info),
|
web.get("/dns/info", api_dns.info),
|
||||||
web.get("/dns/stats", api_dns.stats),
|
web.get("/dns/stats", api_dns.stats),
|
||||||
|
web.get("/dns/logs", api_dns.logs),
|
||||||
web.post("/dns/update", api_dns.update),
|
web.post("/dns/update", api_dns.update),
|
||||||
web.post("/dns/options", api_dns.options),
|
web.post("/dns/options", api_dns.options),
|
||||||
web.post("/dns/restart", api_dns.restart),
|
web.post("/dns/restart", api_dns.restart),
|
||||||
@@ -647,8 +505,6 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
self._register_advanced_logs("/dns", "hassio_dns")
|
|
||||||
|
|
||||||
def _register_audio(self) -> None:
|
def _register_audio(self) -> None:
|
||||||
"""Register Audio functions."""
|
"""Register Audio functions."""
|
||||||
api_audio = APIAudio()
|
api_audio = APIAudio()
|
||||||
@@ -658,6 +514,7 @@ class RestAPI(CoreSysAttributes):
|
|||||||
[
|
[
|
||||||
web.get("/audio/info", api_audio.info),
|
web.get("/audio/info", api_audio.info),
|
||||||
web.get("/audio/stats", api_audio.stats),
|
web.get("/audio/stats", api_audio.stats),
|
||||||
|
web.get("/audio/logs", api_audio.logs),
|
||||||
web.post("/audio/update", api_audio.update),
|
web.post("/audio/update", api_audio.update),
|
||||||
web.post("/audio/restart", api_audio.restart),
|
web.post("/audio/restart", api_audio.restart),
|
||||||
web.post("/audio/reload", api_audio.reload),
|
web.post("/audio/reload", api_audio.reload),
|
||||||
@@ -670,24 +527,6 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
self._register_advanced_logs("/audio", "hassio_audio")
|
|
||||||
|
|
||||||
def _register_mounts(self) -> None:
|
|
||||||
"""Register mounts endpoints."""
|
|
||||||
api_mounts = APIMounts()
|
|
||||||
api_mounts.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes(
|
|
||||||
[
|
|
||||||
web.get("/mounts", api_mounts.info),
|
|
||||||
web.post("/mounts/options", api_mounts.options),
|
|
||||||
web.post("/mounts", api_mounts.create_mount),
|
|
||||||
web.put("/mounts/{mount}", api_mounts.update_mount),
|
|
||||||
web.delete("/mounts/{mount}", api_mounts.delete_mount),
|
|
||||||
web.post("/mounts/{mount}/reload", api_mounts.reload_mount),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
def _register_store(self) -> None:
|
def _register_store(self) -> None:
|
||||||
"""Register store endpoints."""
|
"""Register store endpoints."""
|
||||||
api_store = APIStore()
|
api_store = APIStore()
|
||||||
@@ -698,6 +537,7 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.get("/store", api_store.store_info),
|
web.get("/store", api_store.store_info),
|
||||||
web.get("/store/addons", api_store.addons_list),
|
web.get("/store/addons", api_store.addons_list),
|
||||||
web.get("/store/addons/{addon}", api_store.addons_addon_info),
|
web.get("/store/addons/{addon}", api_store.addons_addon_info),
|
||||||
|
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
|
||||||
web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon),
|
web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon),
|
||||||
web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo),
|
web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo),
|
||||||
web.get(
|
web.get(
|
||||||
@@ -719,8 +559,6 @@ class RestAPI(CoreSysAttributes):
|
|||||||
"/store/addons/{addon}/update/{version}",
|
"/store/addons/{addon}/update/{version}",
|
||||||
api_store.addons_addon_update,
|
api_store.addons_addon_update,
|
||||||
),
|
),
|
||||||
# Must be below others since it has a wildcard in resource path
|
|
||||||
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
|
|
||||||
web.post("/store/reload", api_store.reload),
|
web.post("/store/reload", api_store.reload),
|
||||||
web.get("/store/repositories", api_store.repositories_list),
|
web.get("/store/repositories", api_store.repositories_list),
|
||||||
web.get(
|
web.get(
|
||||||
@@ -772,7 +610,9 @@ class RestAPI(CoreSysAttributes):
|
|||||||
async def start(self) -> None:
|
async def start(self) -> None:
|
||||||
"""Run RESTful API webserver."""
|
"""Run RESTful API webserver."""
|
||||||
await self._runner.setup()
|
await self._runner.setup()
|
||||||
self._site = web.TCPSite(self._runner, host="0.0.0.0", port=80)
|
self._site = web.TCPSite(
|
||||||
|
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await self._site.start()
|
await self._site.start()
|
||||||
|
@@ -1,16 +1,14 @@
|
|||||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any, Awaitable
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
|
from ..addons import AnyAddon
|
||||||
from ..addons.addon import Addon
|
from ..addons.addon import Addon
|
||||||
from ..addons.manager import AnyAddon
|
|
||||||
from ..addons.utils import rating_security
|
from ..addons.utils import rating_security
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ADDONS,
|
ATTR_ADDONS,
|
||||||
@@ -47,7 +45,6 @@ from ..const import (
|
|||||||
ATTR_HOST_IPC,
|
ATTR_HOST_IPC,
|
||||||
ATTR_HOST_NETWORK,
|
ATTR_HOST_NETWORK,
|
||||||
ATTR_HOST_PID,
|
ATTR_HOST_PID,
|
||||||
ATTR_HOST_UTS,
|
|
||||||
ATTR_HOSTNAME,
|
ATTR_HOSTNAME,
|
||||||
ATTR_ICON,
|
ATTR_ICON,
|
||||||
ATTR_INGRESS,
|
ATTR_INGRESS,
|
||||||
@@ -82,8 +79,6 @@ from ..const import (
|
|||||||
ATTR_STARTUP,
|
ATTR_STARTUP,
|
||||||
ATTR_STATE,
|
ATTR_STATE,
|
||||||
ATTR_STDIN,
|
ATTR_STDIN,
|
||||||
ATTR_SYSTEM_MANAGED,
|
|
||||||
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
|
|
||||||
ATTR_TRANSLATIONS,
|
ATTR_TRANSLATIONS,
|
||||||
ATTR_UART,
|
ATTR_UART,
|
||||||
ATTR_UDEV,
|
ATTR_UDEV,
|
||||||
@@ -98,7 +93,6 @@ from ..const import (
|
|||||||
ATTR_WEBUI,
|
ATTR_WEBUI,
|
||||||
REQUEST_FROM,
|
REQUEST_FROM,
|
||||||
AddonBoot,
|
AddonBoot,
|
||||||
AddonBootConfig,
|
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..docker.stats import DockerStats
|
from ..docker.stats import DockerStats
|
||||||
@@ -106,13 +100,12 @@ from ..exceptions import (
|
|||||||
APIAddonNotInstalled,
|
APIAddonNotInstalled,
|
||||||
APIError,
|
APIError,
|
||||||
APIForbidden,
|
APIForbidden,
|
||||||
APINotFound,
|
|
||||||
PwnedError,
|
PwnedError,
|
||||||
PwnedSecret,
|
PwnedSecret,
|
||||||
)
|
)
|
||||||
from ..validate import docker_ports
|
from ..validate import docker_ports
|
||||||
from .const import ATTR_BOOT_CONFIG, ATTR_REMOVE_CONFIG, ATTR_SIGNED
|
from .const import ATTR_SIGNED, CONTENT_TYPE_BINARY
|
||||||
from .utils import api_process, api_validate, json_loads
|
from .utils import api_process, api_process_raw, api_validate, json_loads
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -131,26 +124,15 @@ SCHEMA_OPTIONS = vol.Schema(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
SCHEMA_SYS_OPTIONS = vol.Schema(
|
# pylint: disable=no-value-for-parameter
|
||||||
{
|
|
||||||
vol.Optional(ATTR_SYSTEM_MANAGED): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY): vol.Maybe(str),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
|
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
|
||||||
|
|
||||||
SCHEMA_UNINSTALL = vol.Schema(
|
|
||||||
{vol.Optional(ATTR_REMOVE_CONFIG, default=False): vol.Boolean()}
|
|
||||||
)
|
|
||||||
# pylint: enable=no-value-for-parameter
|
|
||||||
|
|
||||||
|
|
||||||
class APIAddons(CoreSysAttributes):
|
class APIAddons(CoreSysAttributes):
|
||||||
"""Handle RESTful API for add-on functions."""
|
"""Handle RESTful API for add-on functions."""
|
||||||
|
|
||||||
def get_addon_for_request(self, request: web.Request) -> Addon:
|
def _extract_addon(self, request: web.Request) -> Addon:
|
||||||
"""Return addon, throw an exception if it doesn't exist."""
|
"""Return addon, throw an exception it it doesn't exist."""
|
||||||
addon_slug: str = request.match_info.get("addon")
|
addon_slug: str = request.match_info.get("addon")
|
||||||
|
|
||||||
# Lookup itself
|
# Lookup itself
|
||||||
@@ -162,7 +144,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
|
|
||||||
addon = self.sys_addons.get(addon_slug)
|
addon = self.sys_addons.get(addon_slug)
|
||||||
if not addon:
|
if not addon:
|
||||||
raise APINotFound(f"Addon {addon_slug} does not exist")
|
raise APIError(f"Addon {addon_slug} does not exist")
|
||||||
if not isinstance(addon, Addon) or not addon.is_installed:
|
if not isinstance(addon, Addon) or not addon.is_installed:
|
||||||
raise APIAddonNotInstalled("Addon is not installed")
|
raise APIAddonNotInstalled("Addon is not installed")
|
||||||
|
|
||||||
@@ -190,7 +172,6 @@ class APIAddons(CoreSysAttributes):
|
|||||||
ATTR_URL: addon.url,
|
ATTR_URL: addon.url,
|
||||||
ATTR_ICON: addon.with_icon,
|
ATTR_ICON: addon.with_icon,
|
||||||
ATTR_LOGO: addon.with_logo,
|
ATTR_LOGO: addon.with_logo,
|
||||||
ATTR_SYSTEM_MANAGED: addon.system_managed,
|
|
||||||
}
|
}
|
||||||
for addon in self.sys_addons.installed
|
for addon in self.sys_addons.installed
|
||||||
]
|
]
|
||||||
@@ -204,7 +185,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
|
|
||||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return add-on information."""
|
"""Return add-on information."""
|
||||||
addon: AnyAddon = self.get_addon_for_request(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
ATTR_NAME: addon.name,
|
ATTR_NAME: addon.name,
|
||||||
@@ -212,14 +193,13 @@ class APIAddons(CoreSysAttributes):
|
|||||||
ATTR_HOSTNAME: addon.hostname,
|
ATTR_HOSTNAME: addon.hostname,
|
||||||
ATTR_DNS: addon.dns,
|
ATTR_DNS: addon.dns,
|
||||||
ATTR_DESCRIPTON: addon.description,
|
ATTR_DESCRIPTON: addon.description,
|
||||||
ATTR_LONG_DESCRIPTION: await addon.long_description(),
|
ATTR_LONG_DESCRIPTION: addon.long_description,
|
||||||
ATTR_ADVANCED: addon.advanced,
|
ATTR_ADVANCED: addon.advanced,
|
||||||
ATTR_STAGE: addon.stage,
|
ATTR_STAGE: addon.stage,
|
||||||
ATTR_REPOSITORY: addon.repository,
|
ATTR_REPOSITORY: addon.repository,
|
||||||
ATTR_VERSION_LATEST: addon.latest_version,
|
ATTR_VERSION_LATEST: addon.latest_version,
|
||||||
ATTR_PROTECTED: addon.protected,
|
ATTR_PROTECTED: addon.protected,
|
||||||
ATTR_RATING: rating_security(addon),
|
ATTR_RATING: rating_security(addon),
|
||||||
ATTR_BOOT_CONFIG: addon.boot_config,
|
|
||||||
ATTR_BOOT: addon.boot,
|
ATTR_BOOT: addon.boot,
|
||||||
ATTR_OPTIONS: addon.options,
|
ATTR_OPTIONS: addon.options,
|
||||||
ATTR_SCHEMA: addon.schema_ui,
|
ATTR_SCHEMA: addon.schema_ui,
|
||||||
@@ -235,7 +215,6 @@ class APIAddons(CoreSysAttributes):
|
|||||||
ATTR_HOST_NETWORK: addon.host_network,
|
ATTR_HOST_NETWORK: addon.host_network,
|
||||||
ATTR_HOST_PID: addon.host_pid,
|
ATTR_HOST_PID: addon.host_pid,
|
||||||
ATTR_HOST_IPC: addon.host_ipc,
|
ATTR_HOST_IPC: addon.host_ipc,
|
||||||
ATTR_HOST_UTS: addon.host_uts,
|
|
||||||
ATTR_HOST_DBUS: addon.host_dbus,
|
ATTR_HOST_DBUS: addon.host_dbus,
|
||||||
ATTR_PRIVILEGED: addon.privileged,
|
ATTR_PRIVILEGED: addon.privileged,
|
||||||
ATTR_FULL_ACCESS: addon.with_full_access,
|
ATTR_FULL_ACCESS: addon.with_full_access,
|
||||||
@@ -279,8 +258,6 @@ class APIAddons(CoreSysAttributes):
|
|||||||
ATTR_WATCHDOG: addon.watchdog,
|
ATTR_WATCHDOG: addon.watchdog,
|
||||||
ATTR_DEVICES: addon.static_devices
|
ATTR_DEVICES: addon.static_devices
|
||||||
+ [device.path for device in addon.devices],
|
+ [device.path for device in addon.devices],
|
||||||
ATTR_SYSTEM_MANAGED: addon.system_managed,
|
|
||||||
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY: addon.system_managed_config_entry,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return data
|
return data
|
||||||
@@ -288,7 +265,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def options(self, request: web.Request) -> None:
|
async def options(self, request: web.Request) -> None:
|
||||||
"""Store user options for add-on."""
|
"""Store user options for add-on."""
|
||||||
addon = self.get_addon_for_request(request)
|
addon = self._extract_addon(request)
|
||||||
|
|
||||||
# Update secrets for validation
|
# Update secrets for validation
|
||||||
await self.sys_homeassistant.secrets.reload()
|
await self.sys_homeassistant.secrets.reload()
|
||||||
@@ -303,10 +280,6 @@ class APIAddons(CoreSysAttributes):
|
|||||||
if ATTR_OPTIONS in body:
|
if ATTR_OPTIONS in body:
|
||||||
addon.options = body[ATTR_OPTIONS]
|
addon.options = body[ATTR_OPTIONS]
|
||||||
if ATTR_BOOT in body:
|
if ATTR_BOOT in body:
|
||||||
if addon.boot_config == AddonBootConfig.MANUAL_ONLY:
|
|
||||||
raise APIError(
|
|
||||||
f"Addon {addon.slug} boot option is set to {addon.boot_config} so it cannot be changed"
|
|
||||||
)
|
|
||||||
addon.boot = body[ATTR_BOOT]
|
addon.boot = body[ATTR_BOOT]
|
||||||
if ATTR_AUTO_UPDATE in body:
|
if ATTR_AUTO_UPDATE in body:
|
||||||
addon.auto_update = body[ATTR_AUTO_UPDATE]
|
addon.auto_update = body[ATTR_AUTO_UPDATE]
|
||||||
@@ -322,26 +295,12 @@ class APIAddons(CoreSysAttributes):
|
|||||||
if ATTR_WATCHDOG in body:
|
if ATTR_WATCHDOG in body:
|
||||||
addon.watchdog = body[ATTR_WATCHDOG]
|
addon.watchdog = body[ATTR_WATCHDOG]
|
||||||
|
|
||||||
await addon.save_persist()
|
addon.save_persist()
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def sys_options(self, request: web.Request) -> None:
|
|
||||||
"""Store system options for an add-on."""
|
|
||||||
addon = self.get_addon_for_request(request)
|
|
||||||
|
|
||||||
# Validate/Process Body
|
|
||||||
body = await api_validate(SCHEMA_SYS_OPTIONS, request)
|
|
||||||
if ATTR_SYSTEM_MANAGED in body:
|
|
||||||
addon.system_managed = body[ATTR_SYSTEM_MANAGED]
|
|
||||||
if ATTR_SYSTEM_MANAGED_CONFIG_ENTRY in body:
|
|
||||||
addon.system_managed_config_entry = body[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY]
|
|
||||||
|
|
||||||
await addon.save_persist()
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def options_validate(self, request: web.Request) -> None:
|
async def options_validate(self, request: web.Request) -> None:
|
||||||
"""Validate user options for add-on."""
|
"""Validate user options for add-on."""
|
||||||
addon = self.get_addon_for_request(request)
|
addon = self._extract_addon(request)
|
||||||
data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False}
|
data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False}
|
||||||
|
|
||||||
options = await request.json(loads=json_loads) or addon.options
|
options = await request.json(loads=json_loads) or addon.options
|
||||||
@@ -383,7 +342,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
slug: str = request.match_info.get("addon")
|
slug: str = request.match_info.get("addon")
|
||||||
if slug != "self":
|
if slug != "self":
|
||||||
raise APIForbidden("This can be only read by the Add-on itself!")
|
raise APIForbidden("This can be only read by the Add-on itself!")
|
||||||
addon = self.get_addon_for_request(request)
|
addon = self._extract_addon(request)
|
||||||
|
|
||||||
# Lookup/reload secrets
|
# Lookup/reload secrets
|
||||||
await self.sys_homeassistant.secrets.reload()
|
await self.sys_homeassistant.secrets.reload()
|
||||||
@@ -395,19 +354,19 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def security(self, request: web.Request) -> None:
|
async def security(self, request: web.Request) -> None:
|
||||||
"""Store security options for add-on."""
|
"""Store security options for add-on."""
|
||||||
addon = self.get_addon_for_request(request)
|
addon = self._extract_addon(request)
|
||||||
body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
|
body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
|
||||||
|
|
||||||
if ATTR_PROTECTED in body:
|
if ATTR_PROTECTED in body:
|
||||||
_LOGGER.warning("Changing protected flag for %s!", addon.slug)
|
_LOGGER.warning("Changing protected flag for %s!", addon.slug)
|
||||||
addon.protected = body[ATTR_PROTECTED]
|
addon.protected = body[ATTR_PROTECTED]
|
||||||
|
|
||||||
await addon.save_persist()
|
addon.save_persist()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def stats(self, request: web.Request) -> dict[str, Any]:
|
async def stats(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return resource information."""
|
"""Return resource information."""
|
||||||
addon = self.get_addon_for_request(request)
|
addon = self._extract_addon(request)
|
||||||
|
|
||||||
stats: DockerStats = await addon.stats()
|
stats: DockerStats = await addon.stats()
|
||||||
|
|
||||||
@@ -423,47 +382,45 @@ class APIAddons(CoreSysAttributes):
|
|||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def uninstall(self, request: web.Request) -> Awaitable[None]:
|
def uninstall(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Uninstall add-on."""
|
"""Uninstall add-on."""
|
||||||
addon = self.get_addon_for_request(request)
|
addon = self._extract_addon(request)
|
||||||
body: dict[str, Any] = await api_validate(SCHEMA_UNINSTALL, request)
|
return asyncio.shield(addon.uninstall())
|
||||||
return await asyncio.shield(
|
|
||||||
self.sys_addons.uninstall(
|
|
||||||
addon.slug, remove_config=body[ATTR_REMOVE_CONFIG]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def start(self, request: web.Request) -> None:
|
def start(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Start add-on."""
|
"""Start add-on."""
|
||||||
addon = self.get_addon_for_request(request)
|
addon = self._extract_addon(request)
|
||||||
if start_task := await asyncio.shield(addon.start()):
|
return asyncio.shield(addon.start())
|
||||||
await start_task
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def stop(self, request: web.Request) -> Awaitable[None]:
|
def stop(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Stop add-on."""
|
"""Stop add-on."""
|
||||||
addon = self.get_addon_for_request(request)
|
addon = self._extract_addon(request)
|
||||||
return asyncio.shield(addon.stop())
|
return asyncio.shield(addon.stop())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restart(self, request: web.Request) -> None:
|
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Restart add-on."""
|
"""Restart add-on."""
|
||||||
addon: Addon = self.get_addon_for_request(request)
|
addon: Addon = self._extract_addon(request)
|
||||||
if start_task := await asyncio.shield(addon.restart()):
|
return asyncio.shield(addon.restart())
|
||||||
await start_task
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def rebuild(self, request: web.Request) -> None:
|
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Rebuild local build add-on."""
|
"""Rebuild local build add-on."""
|
||||||
addon = self.get_addon_for_request(request)
|
addon = self._extract_addon(request)
|
||||||
if start_task := await asyncio.shield(self.sys_addons.rebuild(addon.slug)):
|
return asyncio.shield(addon.rebuild())
|
||||||
await start_task
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
|
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||||
|
"""Return logs from add-on."""
|
||||||
|
addon = self._extract_addon(request)
|
||||||
|
return addon.logs()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def stdin(self, request: web.Request) -> None:
|
async def stdin(self, request: web.Request) -> None:
|
||||||
"""Write to stdin of add-on."""
|
"""Write to stdin of add-on."""
|
||||||
addon = self.get_addon_for_request(request)
|
addon = self._extract_addon(request)
|
||||||
if not addon.with_stdin:
|
if not addon.with_stdin:
|
||||||
raise APIError(f"STDIN not supported the {addon.slug} add-on")
|
raise APIError(f"STDIN not supported the {addon.slug} add-on")
|
||||||
|
|
||||||
|
@@ -1,12 +1,10 @@
|
|||||||
"""Init file for Supervisor Audio RESTful API."""
|
"""Init file for Supervisor Audio RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
|
||||||
from dataclasses import asdict
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any, Awaitable
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
|
import attr
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
@@ -36,7 +34,8 @@ from ..coresys import CoreSysAttributes
|
|||||||
from ..exceptions import APIError
|
from ..exceptions import APIError
|
||||||
from ..host.sound import StreamType
|
from ..host.sound import StreamType
|
||||||
from ..validate import version_tag
|
from ..validate import version_tag
|
||||||
from .utils import api_process, api_validate
|
from .const import CONTENT_TYPE_BINARY
|
||||||
|
from .utils import api_process, api_process_raw, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -76,11 +75,15 @@ class APIAudio(CoreSysAttributes):
|
|||||||
ATTR_UPDATE_AVAILABLE: self.sys_plugins.audio.need_update,
|
ATTR_UPDATE_AVAILABLE: self.sys_plugins.audio.need_update,
|
||||||
ATTR_HOST: str(self.sys_docker.network.audio),
|
ATTR_HOST: str(self.sys_docker.network.audio),
|
||||||
ATTR_AUDIO: {
|
ATTR_AUDIO: {
|
||||||
ATTR_CARD: [asdict(card) for card in self.sys_host.sound.cards],
|
ATTR_CARD: [attr.asdict(card) for card in self.sys_host.sound.cards],
|
||||||
ATTR_INPUT: [asdict(stream) for stream in self.sys_host.sound.inputs],
|
ATTR_INPUT: [
|
||||||
ATTR_OUTPUT: [asdict(stream) for stream in self.sys_host.sound.outputs],
|
attr.asdict(stream) for stream in self.sys_host.sound.inputs
|
||||||
|
],
|
||||||
|
ATTR_OUTPUT: [
|
||||||
|
attr.asdict(stream) for stream in self.sys_host.sound.outputs
|
||||||
|
],
|
||||||
ATTR_APPLICATION: [
|
ATTR_APPLICATION: [
|
||||||
asdict(stream) for stream in self.sys_host.sound.applications
|
attr.asdict(stream) for stream in self.sys_host.sound.applications
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@@ -111,6 +114,11 @@ class APIAudio(CoreSysAttributes):
|
|||||||
raise APIError(f"Version {version} is already in use")
|
raise APIError(f"Version {version} is already in use")
|
||||||
await asyncio.shield(self.sys_plugins.audio.update(version))
|
await asyncio.shield(self.sys_plugins.audio.update(version))
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
|
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||||
|
"""Return Audio Docker logs."""
|
||||||
|
return self.sys_plugins.audio.logs()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Restart Audio plugin."""
|
"""Restart Audio plugin."""
|
||||||
|
@@ -1,8 +1,6 @@
|
|||||||
"""Init file for Supervisor auth/SSO RESTful API."""
|
"""Init file for Supervisor auth/SSO RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from aiohttp import BasicAuth, web
|
from aiohttp import BasicAuth, web
|
||||||
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE
|
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE
|
||||||
@@ -10,19 +8,10 @@ from aiohttp.web_exceptions import HTTPUnauthorized
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..addons.addon import Addon
|
from ..addons.addon import Addon
|
||||||
from ..const import ATTR_NAME, ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
|
from ..const import ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIForbidden
|
from ..exceptions import APIForbidden
|
||||||
from ..utils.json import json_loads
|
from .const import CONTENT_TYPE_JSON, CONTENT_TYPE_URL
|
||||||
from .const import (
|
|
||||||
ATTR_GROUP_IDS,
|
|
||||||
ATTR_IS_ACTIVE,
|
|
||||||
ATTR_IS_OWNER,
|
|
||||||
ATTR_LOCAL_ONLY,
|
|
||||||
ATTR_USERS,
|
|
||||||
CONTENT_TYPE_JSON,
|
|
||||||
CONTENT_TYPE_URL,
|
|
||||||
)
|
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
@@ -78,7 +67,7 @@ class APIAuth(CoreSysAttributes):
|
|||||||
|
|
||||||
# Json
|
# Json
|
||||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
|
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
|
||||||
data = await request.json(loads=json_loads)
|
data = await request.json()
|
||||||
return await self._process_dict(request, addon, data)
|
return await self._process_dict(request, addon, data)
|
||||||
|
|
||||||
# URL encoded
|
# URL encoded
|
||||||
@@ -99,22 +88,4 @@ class APIAuth(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def cache(self, request: web.Request) -> None:
|
async def cache(self, request: web.Request) -> None:
|
||||||
"""Process cache reset request."""
|
"""Process cache reset request."""
|
||||||
await self.sys_auth.reset_data()
|
self.sys_auth.reset_data()
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def list_users(self, request: web.Request) -> dict[str, list[dict[str, Any]]]:
|
|
||||||
"""List users on the Home Assistant instance."""
|
|
||||||
return {
|
|
||||||
ATTR_USERS: [
|
|
||||||
{
|
|
||||||
ATTR_USERNAME: user[ATTR_USERNAME],
|
|
||||||
ATTR_NAME: user[ATTR_NAME],
|
|
||||||
ATTR_IS_OWNER: user[ATTR_IS_OWNER],
|
|
||||||
ATTR_IS_ACTIVE: user[ATTR_IS_ACTIVE],
|
|
||||||
ATTR_LOCAL_ONLY: user[ATTR_LOCAL_ONLY],
|
|
||||||
ATTR_GROUP_IDS: user[ATTR_GROUP_IDS],
|
|
||||||
}
|
|
||||||
for user in await self.sys_auth.list_users()
|
|
||||||
if user[ATTR_USERNAME]
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
@@ -1,145 +1,73 @@
|
|||||||
"""Backups RESTful API."""
|
"""Backups RESTful API."""
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Callable
|
|
||||||
import errno
|
|
||||||
from io import IOBase
|
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import re
|
import re
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
|
||||||
|
|
||||||
from ..backups.backup import Backup
|
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT
|
||||||
from ..backups.const import LOCATION_CLOUD_BACKUP, LOCATION_TYPE
|
|
||||||
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ADDONS,
|
ATTR_ADDONS,
|
||||||
ATTR_BACKUPS,
|
ATTR_BACKUPS,
|
||||||
ATTR_COMPRESSED,
|
ATTR_COMPRESSED,
|
||||||
ATTR_CONTENT,
|
ATTR_CONTENT,
|
||||||
ATTR_DATE,
|
ATTR_DATE,
|
||||||
ATTR_DAYS_UNTIL_STALE,
|
|
||||||
ATTR_EXTRA,
|
|
||||||
ATTR_FILENAME,
|
|
||||||
ATTR_FOLDERS,
|
ATTR_FOLDERS,
|
||||||
ATTR_HOMEASSISTANT,
|
ATTR_HOMEASSISTANT,
|
||||||
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE,
|
|
||||||
ATTR_JOB_ID,
|
|
||||||
ATTR_LOCATION,
|
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
ATTR_PASSWORD,
|
ATTR_PASSWORD,
|
||||||
ATTR_PATH,
|
|
||||||
ATTR_PROTECTED,
|
ATTR_PROTECTED,
|
||||||
ATTR_REPOSITORIES,
|
ATTR_REPOSITORIES,
|
||||||
ATTR_SIZE,
|
ATTR_SIZE,
|
||||||
ATTR_SIZE_BYTES,
|
|
||||||
ATTR_SLUG,
|
ATTR_SLUG,
|
||||||
ATTR_SUPERVISOR_VERSION,
|
|
||||||
ATTR_TIMEOUT,
|
|
||||||
ATTR_TYPE,
|
ATTR_TYPE,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
REQUEST_FROM,
|
|
||||||
BusEvent,
|
|
||||||
CoreState,
|
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError, APIForbidden, APINotFound
|
from ..exceptions import APIError
|
||||||
from ..jobs import JobSchedulerOptions
|
from .const import CONTENT_TYPE_TAR
|
||||||
from ..mounts.const import MountUsage
|
|
||||||
from ..resolution.const import UnhealthyReason
|
|
||||||
from .const import (
|
|
||||||
ATTR_ADDITIONAL_LOCATIONS,
|
|
||||||
ATTR_BACKGROUND,
|
|
||||||
ATTR_LOCATION_ATTRIBUTES,
|
|
||||||
ATTR_LOCATIONS,
|
|
||||||
CONTENT_TYPE_TAR,
|
|
||||||
)
|
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
ALL_ADDONS_FLAG = "ALL"
|
|
||||||
|
|
||||||
LOCATION_LOCAL = ".local"
|
|
||||||
|
|
||||||
RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+")
|
RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+")
|
||||||
RE_BACKUP_FILENAME = re.compile(r"^[^\\\/]+\.tar$")
|
|
||||||
|
|
||||||
# Backwards compatible
|
# Backwards compatible
|
||||||
# Remove: 2022.08
|
# Remove: 2022.08
|
||||||
_ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT]
|
_ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT]
|
||||||
|
|
||||||
|
|
||||||
def _ensure_list(item: Any) -> list:
|
|
||||||
"""Ensure value is a list."""
|
|
||||||
if not isinstance(item, list):
|
|
||||||
return [item]
|
|
||||||
return item
|
|
||||||
|
|
||||||
|
|
||||||
def _convert_local_location(item: str | None) -> str | None:
|
|
||||||
"""Convert local location value."""
|
|
||||||
if item in {LOCATION_LOCAL, ""}:
|
|
||||||
return None
|
|
||||||
return item
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_FOLDERS = vol.All([vol.In(_ALL_FOLDERS)], vol.Unique())
|
SCHEMA_RESTORE_PARTIAL = vol.Schema(
|
||||||
SCHEMA_LOCATION = vol.All(vol.Maybe(str), _convert_local_location)
|
|
||||||
SCHEMA_LOCATION_LIST = vol.All(_ensure_list, [SCHEMA_LOCATION], vol.Unique())
|
|
||||||
|
|
||||||
SCHEMA_RESTORE_FULL = vol.Schema(
|
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
||||||
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
|
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||||
vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION,
|
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
|
||||||
|
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend(
|
SCHEMA_RESTORE_FULL = vol.Schema({vol.Optional(ATTR_PASSWORD): vol.Maybe(str)})
|
||||||
{
|
|
||||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
|
|
||||||
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
SCHEMA_BACKUP_FULL = vol.Schema(
|
SCHEMA_BACKUP_FULL = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_NAME): str,
|
vol.Optional(ATTR_NAME): str,
|
||||||
vol.Optional(ATTR_FILENAME): vol.Match(RE_BACKUP_FILENAME),
|
|
||||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
||||||
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
|
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
|
||||||
vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION_LIST,
|
|
||||||
vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_EXTRA): dict,
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
|
SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_ADDONS): vol.Or(
|
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
|
||||||
ALL_ADDONS_FLAG, vol.All([str], vol.Unique())
|
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
|
||||||
),
|
|
||||||
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS,
|
|
||||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_DAYS_UNTIL_STALE): days_until_stale})
|
|
||||||
SCHEMA_FREEZE = vol.Schema({vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1))})
|
|
||||||
SCHEMA_REMOVE = vol.Schema({vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION_LIST})
|
|
||||||
|
|
||||||
|
|
||||||
class APIBackups(CoreSysAttributes):
|
class APIBackups(CoreSysAttributes):
|
||||||
"""Handle RESTful API for backups functions."""
|
"""Handle RESTful API for backups functions."""
|
||||||
@@ -148,48 +76,30 @@ class APIBackups(CoreSysAttributes):
|
|||||||
"""Return backup, throw an exception if it doesn't exist."""
|
"""Return backup, throw an exception if it doesn't exist."""
|
||||||
backup = self.sys_backups.get(request.match_info.get("slug"))
|
backup = self.sys_backups.get(request.match_info.get("slug"))
|
||||||
if not backup:
|
if not backup:
|
||||||
raise APINotFound("Backup does not exist")
|
raise APIError("Backup does not exist")
|
||||||
return backup
|
return backup
|
||||||
|
|
||||||
def _make_location_attributes(self, backup: Backup) -> dict[str, dict[str, Any]]:
|
|
||||||
"""Make location attributes dictionary."""
|
|
||||||
return {
|
|
||||||
loc if loc else LOCATION_LOCAL: {
|
|
||||||
ATTR_PROTECTED: backup.all_locations[loc][ATTR_PROTECTED],
|
|
||||||
ATTR_SIZE_BYTES: backup.all_locations[loc][ATTR_SIZE_BYTES],
|
|
||||||
}
|
|
||||||
for loc in backup.locations
|
|
||||||
}
|
|
||||||
|
|
||||||
def _list_backups(self):
|
|
||||||
"""Return list of backups."""
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
ATTR_SLUG: backup.slug,
|
|
||||||
ATTR_NAME: backup.name,
|
|
||||||
ATTR_DATE: backup.date,
|
|
||||||
ATTR_TYPE: backup.sys_type,
|
|
||||||
ATTR_SIZE: backup.size,
|
|
||||||
ATTR_SIZE_BYTES: backup.size_bytes,
|
|
||||||
ATTR_LOCATION: backup.location,
|
|
||||||
ATTR_LOCATIONS: backup.locations,
|
|
||||||
ATTR_PROTECTED: backup.protected,
|
|
||||||
ATTR_LOCATION_ATTRIBUTES: self._make_location_attributes(backup),
|
|
||||||
ATTR_COMPRESSED: backup.compressed,
|
|
||||||
ATTR_CONTENT: {
|
|
||||||
ATTR_HOMEASSISTANT: backup.homeassistant_version is not None,
|
|
||||||
ATTR_ADDONS: backup.addon_list,
|
|
||||||
ATTR_FOLDERS: backup.folders,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for backup in self.sys_backups.list_backups
|
|
||||||
if backup.location != LOCATION_CLOUD_BACKUP
|
|
||||||
]
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def list(self, request):
|
async def list(self, request):
|
||||||
"""Return backup list."""
|
"""Return backup list."""
|
||||||
data_backups = self._list_backups()
|
data_backups = []
|
||||||
|
for backup in self.sys_backups.list_backups:
|
||||||
|
data_backups.append(
|
||||||
|
{
|
||||||
|
ATTR_SLUG: backup.slug,
|
||||||
|
ATTR_NAME: backup.name,
|
||||||
|
ATTR_DATE: backup.date,
|
||||||
|
ATTR_TYPE: backup.sys_type,
|
||||||
|
ATTR_SIZE: backup.size,
|
||||||
|
ATTR_PROTECTED: backup.protected,
|
||||||
|
ATTR_COMPRESSED: backup.compressed,
|
||||||
|
ATTR_CONTENT: {
|
||||||
|
ATTR_HOMEASSISTANT: backup.homeassistant_version is not None,
|
||||||
|
ATTR_ADDONS: backup.addon_list,
|
||||||
|
ATTR_FOLDERS: backup.folders,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
if request.path == "/snapshots":
|
if request.path == "/snapshots":
|
||||||
# Kept for backwards compability
|
# Kept for backwards compability
|
||||||
@@ -198,31 +108,13 @@ class APIBackups(CoreSysAttributes):
|
|||||||
return {ATTR_BACKUPS: data_backups}
|
return {ATTR_BACKUPS: data_backups}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request):
|
async def reload(self, request):
|
||||||
"""Return backup list and manager info."""
|
|
||||||
return {
|
|
||||||
ATTR_BACKUPS: self._list_backups(),
|
|
||||||
ATTR_DAYS_UNTIL_STALE: self.sys_backups.days_until_stale,
|
|
||||||
}
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def options(self, request):
|
|
||||||
"""Set backup manager options."""
|
|
||||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
|
||||||
|
|
||||||
if ATTR_DAYS_UNTIL_STALE in body:
|
|
||||||
self.sys_backups.days_until_stale = body[ATTR_DAYS_UNTIL_STALE]
|
|
||||||
|
|
||||||
await self.sys_backups.save_data()
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def reload(self, _):
|
|
||||||
"""Reload backup list."""
|
"""Reload backup list."""
|
||||||
await asyncio.shield(self.sys_backups.reload())
|
await asyncio.shield(self.sys_backups.reload())
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def backup_info(self, request):
|
async def info(self, request):
|
||||||
"""Return backup info."""
|
"""Return backup info."""
|
||||||
backup = self._extract_slug(request)
|
backup = self._extract_slug(request)
|
||||||
|
|
||||||
@@ -243,328 +135,91 @@ class APIBackups(CoreSysAttributes):
|
|||||||
ATTR_NAME: backup.name,
|
ATTR_NAME: backup.name,
|
||||||
ATTR_DATE: backup.date,
|
ATTR_DATE: backup.date,
|
||||||
ATTR_SIZE: backup.size,
|
ATTR_SIZE: backup.size,
|
||||||
ATTR_SIZE_BYTES: backup.size_bytes,
|
|
||||||
ATTR_COMPRESSED: backup.compressed,
|
ATTR_COMPRESSED: backup.compressed,
|
||||||
ATTR_PROTECTED: backup.protected,
|
ATTR_PROTECTED: backup.protected,
|
||||||
ATTR_LOCATION_ATTRIBUTES: self._make_location_attributes(backup),
|
|
||||||
ATTR_SUPERVISOR_VERSION: backup.supervisor_version,
|
|
||||||
ATTR_HOMEASSISTANT: backup.homeassistant_version,
|
ATTR_HOMEASSISTANT: backup.homeassistant_version,
|
||||||
ATTR_LOCATION: backup.location,
|
|
||||||
ATTR_LOCATIONS: backup.locations,
|
|
||||||
ATTR_ADDONS: data_addons,
|
ATTR_ADDONS: data_addons,
|
||||||
ATTR_REPOSITORIES: backup.repositories,
|
ATTR_REPOSITORIES: backup.repositories,
|
||||||
ATTR_FOLDERS: backup.folders,
|
ATTR_FOLDERS: backup.folders,
|
||||||
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database,
|
|
||||||
ATTR_EXTRA: backup.extra,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def _location_to_mount(self, location: str | None) -> LOCATION_TYPE:
|
|
||||||
"""Convert a single location to a mount if possible."""
|
|
||||||
if not location or location == LOCATION_CLOUD_BACKUP:
|
|
||||||
return location
|
|
||||||
|
|
||||||
mount = self.sys_mounts.get(location)
|
|
||||||
if mount.usage != MountUsage.BACKUP:
|
|
||||||
raise APIError(
|
|
||||||
f"Mount {mount.name} is not used for backups, cannot backup to there"
|
|
||||||
)
|
|
||||||
|
|
||||||
return mount
|
|
||||||
|
|
||||||
def _location_field_to_mount(self, body: dict[str, Any]) -> dict[str, Any]:
|
|
||||||
"""Change location field to mount if necessary."""
|
|
||||||
body[ATTR_LOCATION] = self._location_to_mount(body.get(ATTR_LOCATION))
|
|
||||||
return body
|
|
||||||
|
|
||||||
def _validate_cloud_backup_location(
|
|
||||||
self, request: web.Request, location: list[str | None] | str | None
|
|
||||||
) -> None:
|
|
||||||
"""Cloud backup location is only available to Home Assistant."""
|
|
||||||
if not isinstance(location, list):
|
|
||||||
location = [location]
|
|
||||||
if (
|
|
||||||
LOCATION_CLOUD_BACKUP in location
|
|
||||||
and request.get(REQUEST_FROM) != self.sys_homeassistant
|
|
||||||
):
|
|
||||||
raise APIForbidden(
|
|
||||||
f"Location {LOCATION_CLOUD_BACKUP} is only available for Home Assistant"
|
|
||||||
)
|
|
||||||
|
|
||||||
async def _background_backup_task(
|
|
||||||
self, backup_method: Callable, *args, **kwargs
|
|
||||||
) -> tuple[asyncio.Task, str]:
|
|
||||||
"""Start backup task in background and return task and job ID."""
|
|
||||||
event = asyncio.Event()
|
|
||||||
job, backup_task = self.sys_jobs.schedule_job(
|
|
||||||
backup_method, JobSchedulerOptions(), *args, **kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
async def release_on_freeze(new_state: CoreState):
|
|
||||||
if new_state == CoreState.FREEZE:
|
|
||||||
event.set()
|
|
||||||
|
|
||||||
# Wait for system to get into freeze state before returning
|
|
||||||
# If the backup fails validation it will raise before getting there
|
|
||||||
listener = self.sys_bus.register_event(
|
|
||||||
BusEvent.SUPERVISOR_STATE_CHANGE, release_on_freeze
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
event_task = self.sys_create_task(event.wait())
|
|
||||||
_, pending = await asyncio.wait(
|
|
||||||
(
|
|
||||||
backup_task,
|
|
||||||
event_task,
|
|
||||||
),
|
|
||||||
return_when=asyncio.FIRST_COMPLETED,
|
|
||||||
)
|
|
||||||
# It seems backup returned early (error or something), make sure to cancel
|
|
||||||
# the event task to avoid "Task was destroyed but it is pending!" errors.
|
|
||||||
if event_task in pending:
|
|
||||||
event_task.cancel()
|
|
||||||
return (backup_task, job.uuid)
|
|
||||||
finally:
|
|
||||||
self.sys_bus.remove_listener(listener)
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def backup_full(self, request: web.Request):
|
async def backup_full(self, request):
|
||||||
"""Create full backup."""
|
"""Create full backup."""
|
||||||
body = await api_validate(SCHEMA_BACKUP_FULL, request)
|
body = await api_validate(SCHEMA_BACKUP_FULL, request)
|
||||||
locations: list[LOCATION_TYPE] | None = None
|
backup = await asyncio.shield(self.sys_backups.do_backup_full(**body))
|
||||||
|
|
||||||
if ATTR_LOCATION in body:
|
if backup:
|
||||||
location_names: list[str | None] = body.pop(ATTR_LOCATION)
|
return {ATTR_SLUG: backup.slug}
|
||||||
self._validate_cloud_backup_location(request, location_names)
|
return False
|
||||||
|
|
||||||
locations = [
|
@api_process
|
||||||
self._location_to_mount(location) for location in location_names
|
async def backup_partial(self, request):
|
||||||
]
|
"""Create a partial backup."""
|
||||||
body[ATTR_LOCATION] = locations.pop(0)
|
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
|
||||||
if locations:
|
backup = await asyncio.shield(self.sys_backups.do_backup_partial(**body))
|
||||||
body[ATTR_ADDITIONAL_LOCATIONS] = locations
|
|
||||||
|
if backup:
|
||||||
background = body.pop(ATTR_BACKGROUND)
|
return {ATTR_SLUG: backup.slug}
|
||||||
backup_task, job_id = await self._background_backup_task(
|
return False
|
||||||
self.sys_backups.do_backup_full, **body
|
|
||||||
)
|
@api_process
|
||||||
|
async def restore_full(self, request):
|
||||||
if background and not backup_task.done():
|
"""Full restore of a backup."""
|
||||||
return {ATTR_JOB_ID: job_id}
|
backup = self._extract_slug(request)
|
||||||
|
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
||||||
backup: Backup = await backup_task
|
|
||||||
if backup:
|
return await asyncio.shield(self.sys_backups.do_restore_full(backup, **body))
|
||||||
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug}
|
|
||||||
raise APIError(
|
@api_process
|
||||||
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details",
|
async def restore_partial(self, request):
|
||||||
job_id=job_id,
|
"""Partial restore a backup."""
|
||||||
)
|
backup = self._extract_slug(request)
|
||||||
|
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
||||||
@api_process
|
|
||||||
async def backup_partial(self, request: web.Request):
|
return await asyncio.shield(self.sys_backups.do_restore_partial(backup, **body))
|
||||||
"""Create a partial backup."""
|
|
||||||
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
|
@api_process
|
||||||
locations: list[LOCATION_TYPE] | None = None
|
async def remove(self, request):
|
||||||
|
"""Remove a backup."""
|
||||||
if ATTR_LOCATION in body:
|
backup = self._extract_slug(request)
|
||||||
location_names: list[str | None] = body.pop(ATTR_LOCATION)
|
return self.sys_backups.remove(backup)
|
||||||
self._validate_cloud_backup_location(request, location_names)
|
|
||||||
|
async def download(self, request):
|
||||||
locations = [
|
"""Download a backup file."""
|
||||||
self._location_to_mount(location) for location in location_names
|
backup = self._extract_slug(request)
|
||||||
]
|
|
||||||
body[ATTR_LOCATION] = locations.pop(0)
|
_LOGGER.info("Downloading backup %s", backup.slug)
|
||||||
if locations:
|
response = web.FileResponse(backup.tarfile)
|
||||||
body[ATTR_ADDITIONAL_LOCATIONS] = locations
|
response.content_type = CONTENT_TYPE_TAR
|
||||||
|
response.headers[
|
||||||
if body.get(ATTR_ADDONS) == ALL_ADDONS_FLAG:
|
CONTENT_DISPOSITION
|
||||||
body[ATTR_ADDONS] = list(self.sys_addons.local)
|
] = f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
|
||||||
|
return response
|
||||||
background = body.pop(ATTR_BACKGROUND)
|
|
||||||
backup_task, job_id = await self._background_backup_task(
|
@api_process
|
||||||
self.sys_backups.do_backup_partial, **body
|
async def upload(self, request):
|
||||||
)
|
"""Upload a backup file."""
|
||||||
|
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir:
|
||||||
if background and not backup_task.done():
|
tar_file = Path(temp_dir, "backup.tar")
|
||||||
return {ATTR_JOB_ID: job_id}
|
reader = await request.multipart()
|
||||||
|
contents = await reader.next()
|
||||||
backup: Backup = await backup_task
|
try:
|
||||||
if backup:
|
with tar_file.open("wb") as backup:
|
||||||
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug}
|
while True:
|
||||||
raise APIError(
|
chunk = await contents.read_chunk()
|
||||||
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details",
|
if not chunk:
|
||||||
job_id=job_id,
|
break
|
||||||
)
|
backup.write(chunk)
|
||||||
|
|
||||||
@api_process
|
except OSError as err:
|
||||||
async def restore_full(self, request: web.Request):
|
_LOGGER.error("Can't write new backup file: %s", err)
|
||||||
"""Full restore of a backup."""
|
return False
|
||||||
backup = self._extract_slug(request)
|
|
||||||
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
except asyncio.CancelledError:
|
||||||
self._validate_cloud_backup_location(
|
return False
|
||||||
request, body.get(ATTR_LOCATION, backup.location)
|
|
||||||
)
|
backup = await asyncio.shield(self.sys_backups.import_backup(tar_file))
|
||||||
background = body.pop(ATTR_BACKGROUND)
|
|
||||||
restore_task, job_id = await self._background_backup_task(
|
|
||||||
self.sys_backups.do_restore_full, backup, **body
|
|
||||||
)
|
|
||||||
|
|
||||||
if background and not restore_task.done() or await restore_task:
|
|
||||||
return {ATTR_JOB_ID: job_id}
|
|
||||||
raise APIError(
|
|
||||||
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details",
|
|
||||||
job_id=job_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def restore_partial(self, request: web.Request):
|
|
||||||
"""Partial restore a backup."""
|
|
||||||
backup = self._extract_slug(request)
|
|
||||||
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
|
||||||
self._validate_cloud_backup_location(
|
|
||||||
request, body.get(ATTR_LOCATION, backup.location)
|
|
||||||
)
|
|
||||||
background = body.pop(ATTR_BACKGROUND)
|
|
||||||
restore_task, job_id = await self._background_backup_task(
|
|
||||||
self.sys_backups.do_restore_partial, backup, **body
|
|
||||||
)
|
|
||||||
|
|
||||||
if background and not restore_task.done() or await restore_task:
|
|
||||||
return {ATTR_JOB_ID: job_id}
|
|
||||||
raise APIError(
|
|
||||||
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details",
|
|
||||||
job_id=job_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def freeze(self, request: web.Request):
|
|
||||||
"""Initiate manual freeze for external backup."""
|
|
||||||
body = await api_validate(SCHEMA_FREEZE, request)
|
|
||||||
await asyncio.shield(self.sys_backups.freeze_all(**body))
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def thaw(self, request: web.Request):
|
|
||||||
"""Begin thaw after manual freeze."""
|
|
||||||
await self.sys_backups.thaw_all()
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def remove(self, request: web.Request):
|
|
||||||
"""Remove a backup."""
|
|
||||||
backup = self._extract_slug(request)
|
|
||||||
body = await api_validate(SCHEMA_REMOVE, request)
|
|
||||||
locations: list[LOCATION_TYPE] | None = None
|
|
||||||
|
|
||||||
if ATTR_LOCATION in body:
|
|
||||||
self._validate_cloud_backup_location(request, body[ATTR_LOCATION])
|
|
||||||
locations = [self._location_to_mount(name) for name in body[ATTR_LOCATION]]
|
|
||||||
else:
|
|
||||||
self._validate_cloud_backup_location(request, backup.location)
|
|
||||||
|
|
||||||
await self.sys_backups.remove(backup, locations=locations)
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def download(self, request: web.Request):
|
|
||||||
"""Download a backup file."""
|
|
||||||
backup = self._extract_slug(request)
|
|
||||||
# Query will give us '' for /backups, convert value to None
|
|
||||||
location = _convert_local_location(
|
|
||||||
request.query.get(ATTR_LOCATION, backup.location)
|
|
||||||
)
|
|
||||||
self._validate_cloud_backup_location(request, location)
|
|
||||||
if location not in backup.all_locations:
|
|
||||||
raise APIError(f"Backup {backup.slug} is not in location {location}")
|
|
||||||
|
|
||||||
_LOGGER.info("Downloading backup %s", backup.slug)
|
|
||||||
filename = backup.all_locations[location][ATTR_PATH]
|
|
||||||
# If the file is missing, return 404 and trigger reload of location
|
|
||||||
if not filename.is_file():
|
|
||||||
self.sys_create_task(self.sys_backups.reload(location))
|
|
||||||
return web.Response(status=404)
|
|
||||||
|
|
||||||
response = web.FileResponse(filename)
|
|
||||||
response.content_type = CONTENT_TYPE_TAR
|
|
||||||
|
|
||||||
download_filename = filename.name
|
|
||||||
if download_filename == f"{backup.slug}.tar":
|
|
||||||
download_filename = f"{RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
|
|
||||||
response.headers[CONTENT_DISPOSITION] = (
|
|
||||||
f"attachment; filename={download_filename}"
|
|
||||||
)
|
|
||||||
return response
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def upload(self, request: web.Request):
|
|
||||||
"""Upload a backup file."""
|
|
||||||
location: LOCATION_TYPE = None
|
|
||||||
locations: list[LOCATION_TYPE] | None = None
|
|
||||||
tmp_path = self.sys_config.path_tmp
|
|
||||||
if ATTR_LOCATION in request.query:
|
|
||||||
location_names: list[str] = request.query.getall(ATTR_LOCATION)
|
|
||||||
self._validate_cloud_backup_location(request, location_names)
|
|
||||||
# Convert empty string to None if necessary
|
|
||||||
locations = [
|
|
||||||
self._location_to_mount(location)
|
|
||||||
if _convert_local_location(location)
|
|
||||||
else None
|
|
||||||
for location in location_names
|
|
||||||
]
|
|
||||||
location = locations.pop(0)
|
|
||||||
|
|
||||||
if location and location != LOCATION_CLOUD_BACKUP:
|
|
||||||
tmp_path = location.local_where
|
|
||||||
|
|
||||||
filename: str | None = None
|
|
||||||
if ATTR_FILENAME in request.query:
|
|
||||||
filename = request.query.get(ATTR_FILENAME)
|
|
||||||
try:
|
|
||||||
vol.Match(RE_BACKUP_FILENAME)(filename)
|
|
||||||
except vol.Invalid as ex:
|
|
||||||
raise APIError(humanize_error(filename, ex)) from None
|
|
||||||
|
|
||||||
temp_dir: TemporaryDirectory | None = None
|
|
||||||
backup_file_stream: IOBase | None = None
|
|
||||||
|
|
||||||
def open_backup_file() -> Path:
|
|
||||||
nonlocal temp_dir, backup_file_stream
|
|
||||||
temp_dir = TemporaryDirectory(dir=tmp_path.as_posix())
|
|
||||||
tar_file = Path(temp_dir.name, "backup.tar")
|
|
||||||
backup_file_stream = tar_file.open("wb")
|
|
||||||
return tar_file
|
|
||||||
|
|
||||||
def close_backup_file() -> None:
|
|
||||||
if backup_file_stream:
|
|
||||||
backup_file_stream.close()
|
|
||||||
if temp_dir:
|
|
||||||
temp_dir.cleanup()
|
|
||||||
|
|
||||||
try:
|
|
||||||
reader = await request.multipart()
|
|
||||||
contents = await reader.next()
|
|
||||||
tar_file = await self.sys_run_in_executor(open_backup_file)
|
|
||||||
while chunk := await contents.read_chunk(size=2**16):
|
|
||||||
await self.sys_run_in_executor(backup_file_stream.write, chunk)
|
|
||||||
|
|
||||||
backup = await asyncio.shield(
|
|
||||||
self.sys_backups.import_backup(
|
|
||||||
tar_file,
|
|
||||||
filename,
|
|
||||||
location=location,
|
|
||||||
additional_locations=locations,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
except OSError as err:
|
|
||||||
if err.errno == errno.EBADMSG and location in {
|
|
||||||
LOCATION_CLOUD_BACKUP,
|
|
||||||
None,
|
|
||||||
}:
|
|
||||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
|
||||||
_LOGGER.error("Can't write new backup file: %s", err)
|
|
||||||
return False
|
|
||||||
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
finally:
|
|
||||||
if temp_dir or backup:
|
|
||||||
await self.sys_run_in_executor(close_backup_file)
|
|
||||||
|
|
||||||
if backup:
|
if backup:
|
||||||
return {ATTR_SLUG: backup.slug}
|
return {ATTR_SLUG: backup.slug}
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
"""Init file for Supervisor HA cli RESTful API."""
|
"""Init file for Supervisor HA cli RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
@@ -1,82 +1,39 @@
|
|||||||
"""Const for API."""
|
"""Const for API."""
|
||||||
|
|
||||||
from enum import StrEnum
|
|
||||||
|
|
||||||
CONTENT_TYPE_BINARY = "application/octet-stream"
|
CONTENT_TYPE_BINARY = "application/octet-stream"
|
||||||
CONTENT_TYPE_JSON = "application/json"
|
CONTENT_TYPE_JSON = "application/json"
|
||||||
CONTENT_TYPE_PNG = "image/png"
|
CONTENT_TYPE_PNG = "image/png"
|
||||||
CONTENT_TYPE_TAR = "application/tar"
|
CONTENT_TYPE_TAR = "application/tar"
|
||||||
CONTENT_TYPE_TEXT = "text/plain"
|
CONTENT_TYPE_TEXT = "text/plain"
|
||||||
CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
|
CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
|
||||||
CONTENT_TYPE_X_LOG = "text/x-log"
|
|
||||||
|
|
||||||
COOKIE_INGRESS = "ingress_session"
|
COOKIE_INGRESS = "ingress_session"
|
||||||
|
|
||||||
ATTR_ADDITIONAL_LOCATIONS = "additional_locations"
|
HEADER_TOKEN_OLD = "X-Hassio-Key"
|
||||||
ATTR_AGENT_VERSION = "agent_version"
|
HEADER_TOKEN = "X-Supervisor-Token"
|
||||||
|
|
||||||
ATTR_APPARMOR_VERSION = "apparmor_version"
|
ATTR_APPARMOR_VERSION = "apparmor_version"
|
||||||
ATTR_ATTRIBUTES = "attributes"
|
ATTR_AGENT_VERSION = "agent_version"
|
||||||
ATTR_AVAILABLE_UPDATES = "available_updates"
|
ATTR_AVAILABLE_UPDATES = "available_updates"
|
||||||
ATTR_BACKGROUND = "background"
|
|
||||||
ATTR_BOOT_CONFIG = "boot_config"
|
|
||||||
ATTR_BOOT_SLOT = "boot_slot"
|
|
||||||
ATTR_BOOT_SLOTS = "boot_slots"
|
|
||||||
ATTR_BOOT_TIMESTAMP = "boot_timestamp"
|
ATTR_BOOT_TIMESTAMP = "boot_timestamp"
|
||||||
ATTR_BOOTS = "boots"
|
|
||||||
ATTR_BROADCAST_LLMNR = "broadcast_llmnr"
|
ATTR_BROADCAST_LLMNR = "broadcast_llmnr"
|
||||||
ATTR_BROADCAST_MDNS = "broadcast_mdns"
|
ATTR_BROADCAST_MDNS = "broadcast_mdns"
|
||||||
ATTR_BY_ID = "by_id"
|
|
||||||
ATTR_CHILDREN = "children"
|
|
||||||
ATTR_CONNECTION_BUS = "connection_bus"
|
|
||||||
ATTR_DATA_DISK = "data_disk"
|
ATTR_DATA_DISK = "data_disk"
|
||||||
ATTR_DEVICE = "device"
|
ATTR_DEVICE = "device"
|
||||||
ATTR_DEV_PATH = "dev_path"
|
|
||||||
ATTR_DISKS = "disks"
|
|
||||||
ATTR_DRIVES = "drives"
|
|
||||||
ATTR_DT_SYNCHRONIZED = "dt_synchronized"
|
ATTR_DT_SYNCHRONIZED = "dt_synchronized"
|
||||||
ATTR_DT_UTC = "dt_utc"
|
ATTR_DT_UTC = "dt_utc"
|
||||||
ATTR_EJECTABLE = "ejectable"
|
|
||||||
ATTR_FALLBACK = "fallback"
|
ATTR_FALLBACK = "fallback"
|
||||||
ATTR_FILESYSTEMS = "filesystems"
|
|
||||||
ATTR_FORCE = "force"
|
|
||||||
ATTR_GROUP_IDS = "group_ids"
|
|
||||||
ATTR_IDENTIFIERS = "identifiers"
|
|
||||||
ATTR_IS_ACTIVE = "is_active"
|
|
||||||
ATTR_IS_OWNER = "is_owner"
|
|
||||||
ATTR_JOBS = "jobs"
|
|
||||||
ATTR_LLMNR = "llmnr"
|
ATTR_LLMNR = "llmnr"
|
||||||
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
|
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
|
||||||
ATTR_LOCAL_ONLY = "local_only"
|
|
||||||
ATTR_LOCATION_ATTRIBUTES = "location_attributes"
|
|
||||||
ATTR_LOCATIONS = "locations"
|
|
||||||
ATTR_MDNS = "mdns"
|
ATTR_MDNS = "mdns"
|
||||||
ATTR_MODEL = "model"
|
|
||||||
ATTR_MOUNTS = "mounts"
|
|
||||||
ATTR_MOUNT_POINTS = "mount_points"
|
|
||||||
ATTR_PANEL_PATH = "panel_path"
|
ATTR_PANEL_PATH = "panel_path"
|
||||||
ATTR_REMOVABLE = "removable"
|
|
||||||
ATTR_REMOVE_CONFIG = "remove_config"
|
|
||||||
ATTR_REVISION = "revision"
|
|
||||||
ATTR_SAFE_MODE = "safe_mode"
|
|
||||||
ATTR_SEAT = "seat"
|
|
||||||
ATTR_SIGNED = "signed"
|
ATTR_SIGNED = "signed"
|
||||||
ATTR_STARTUP_TIME = "startup_time"
|
ATTR_STARTUP_TIME = "startup_time"
|
||||||
ATTR_STATUS = "status"
|
ATTR_UPDATE_TYPE = "update_type"
|
||||||
|
ATTR_USE_NTP = "use_ntp"
|
||||||
|
ATTR_BY_ID = "by_id"
|
||||||
ATTR_SUBSYSTEM = "subsystem"
|
ATTR_SUBSYSTEM = "subsystem"
|
||||||
ATTR_SYSFS = "sysfs"
|
ATTR_SYSFS = "sysfs"
|
||||||
ATTR_SYSTEM_HEALTH_LED = "system_health_led"
|
ATTR_DEV_PATH = "dev_path"
|
||||||
ATTR_TIME_DETECTED = "time_detected"
|
ATTR_ATTRIBUTES = "attributes"
|
||||||
ATTR_UPDATE_TYPE = "update_type"
|
ATTR_CHILDREN = "children"
|
||||||
ATTR_USAGE = "usage"
|
|
||||||
ATTR_USE_NTP = "use_ntp"
|
|
||||||
ATTR_USERS = "users"
|
|
||||||
ATTR_USER_PATH = "user_path"
|
|
||||||
ATTR_VENDOR = "vendor"
|
|
||||||
ATTR_VIRTUALIZATION = "virtualization"
|
|
||||||
|
|
||||||
|
|
||||||
class BootSlot(StrEnum):
|
|
||||||
"""Boot slots used by HAOS."""
|
|
||||||
|
|
||||||
A = "A"
|
|
||||||
B = "B"
|
|
||||||
|
@@ -1,10 +1,6 @@
|
|||||||
"""Init file for Supervisor network RESTful API."""
|
"""Init file for Supervisor network RESTful API."""
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..addons.addon import Addon
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ADDON,
|
ATTR_ADDON,
|
||||||
ATTR_CONFIG,
|
ATTR_CONFIG,
|
||||||
@@ -13,18 +9,16 @@ from ..const import (
|
|||||||
ATTR_SERVICES,
|
ATTR_SERVICES,
|
||||||
ATTR_UUID,
|
ATTR_UUID,
|
||||||
REQUEST_FROM,
|
REQUEST_FROM,
|
||||||
AddonState,
|
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIForbidden, APINotFound
|
from ..discovery.validate import valid_discovery_service
|
||||||
|
from ..exceptions import APIError, APIForbidden
|
||||||
from .utils import api_process, api_validate, require_home_assistant
|
from .utils import api_process, api_validate, require_home_assistant
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
SCHEMA_DISCOVERY = vol.Schema(
|
SCHEMA_DISCOVERY = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Required(ATTR_SERVICE): str,
|
vol.Required(ATTR_SERVICE): valid_discovery_service,
|
||||||
vol.Required(ATTR_CONFIG): dict,
|
vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -36,25 +30,25 @@ class APIDiscovery(CoreSysAttributes):
|
|||||||
"""Extract discovery message from URL."""
|
"""Extract discovery message from URL."""
|
||||||
message = self.sys_discovery.get(request.match_info.get("uuid"))
|
message = self.sys_discovery.get(request.match_info.get("uuid"))
|
||||||
if not message:
|
if not message:
|
||||||
raise APINotFound("Discovery message not found")
|
raise APIError("Discovery message not found")
|
||||||
return message
|
return message
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@require_home_assistant
|
@require_home_assistant
|
||||||
async def list(self, request):
|
async def list(self, request):
|
||||||
"""Show registered and available services."""
|
"""Show register services."""
|
||||||
|
|
||||||
# Get available discovery
|
# Get available discovery
|
||||||
discovery = [
|
discovery = []
|
||||||
{
|
for message in self.sys_discovery.list_messages:
|
||||||
ATTR_ADDON: message.addon,
|
discovery.append(
|
||||||
ATTR_SERVICE: message.service,
|
{
|
||||||
ATTR_UUID: message.uuid,
|
ATTR_ADDON: message.addon,
|
||||||
ATTR_CONFIG: message.config,
|
ATTR_SERVICE: message.service,
|
||||||
}
|
ATTR_UUID: message.uuid,
|
||||||
for message in self.sys_discovery.list_messages
|
ATTR_CONFIG: message.config,
|
||||||
if (addon := self.sys_addons.get(message.addon, local_only=True))
|
}
|
||||||
and addon.state == AddonState.STARTED
|
)
|
||||||
]
|
|
||||||
|
|
||||||
# Get available services/add-ons
|
# Get available services/add-ons
|
||||||
services = {}
|
services = {}
|
||||||
@@ -68,22 +62,14 @@ class APIDiscovery(CoreSysAttributes):
|
|||||||
async def set_discovery(self, request):
|
async def set_discovery(self, request):
|
||||||
"""Write data into a discovery pipeline."""
|
"""Write data into a discovery pipeline."""
|
||||||
body = await api_validate(SCHEMA_DISCOVERY, request)
|
body = await api_validate(SCHEMA_DISCOVERY, request)
|
||||||
addon: Addon = request[REQUEST_FROM]
|
addon = request[REQUEST_FROM]
|
||||||
service = body[ATTR_SERVICE]
|
|
||||||
|
|
||||||
# Access?
|
# Access?
|
||||||
if body[ATTR_SERVICE] not in addon.discovery:
|
if body[ATTR_SERVICE] not in addon.discovery:
|
||||||
_LOGGER.error(
|
raise APIForbidden("Can't use discovery!")
|
||||||
"Add-on %s attempted to send discovery for service %s which is not listed in its config. Please report this to the maintainer of the add-on",
|
|
||||||
addon.name,
|
|
||||||
service,
|
|
||||||
)
|
|
||||||
raise APIForbidden(
|
|
||||||
"Add-ons must list services they provide via discovery in their config!"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Process discovery message
|
# Process discovery message
|
||||||
message = await self.sys_discovery.send(addon, **body)
|
message = self.sys_discovery.send(addon, **body)
|
||||||
|
|
||||||
return {ATTR_UUID: message.uuid}
|
return {ATTR_UUID: message.uuid}
|
||||||
|
|
||||||
@@ -110,5 +96,5 @@ class APIDiscovery(CoreSysAttributes):
|
|||||||
if message.addon != addon.slug:
|
if message.addon != addon.slug:
|
||||||
raise APIForbidden("Can't remove discovery message")
|
raise APIForbidden("Can't remove discovery message")
|
||||||
|
|
||||||
await self.sys_discovery.remove(message)
|
self.sys_discovery.remove(message)
|
||||||
return True
|
return True
|
||||||
|
@@ -1,9 +1,7 @@
|
|||||||
"""Init file for Supervisor DNS RESTful API."""
|
"""Init file for Supervisor DNS RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any, Awaitable
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
@@ -27,8 +25,8 @@ from ..const import (
|
|||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError
|
from ..exceptions import APIError
|
||||||
from ..validate import dns_server_list, version_tag
|
from ..validate import dns_server_list, version_tag
|
||||||
from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS
|
from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS, CONTENT_TYPE_BINARY
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_process_raw, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -78,7 +76,7 @@ class APICoreDNS(CoreSysAttributes):
|
|||||||
if restart_required:
|
if restart_required:
|
||||||
self.sys_create_task(self.sys_plugins.dns.restart())
|
self.sys_create_task(self.sys_plugins.dns.restart())
|
||||||
|
|
||||||
await self.sys_plugins.dns.save_data()
|
self.sys_plugins.dns.save_data()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def stats(self, request: web.Request) -> dict[str, Any]:
|
async def stats(self, request: web.Request) -> dict[str, Any]:
|
||||||
@@ -106,6 +104,11 @@ class APICoreDNS(CoreSysAttributes):
|
|||||||
raise APIError(f"Version {version} is already in use")
|
raise APIError(f"Version {version} is already in use")
|
||||||
await asyncio.shield(self.sys_plugins.dns.update(version))
|
await asyncio.shield(self.sys_plugins.dns.update(version))
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
|
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||||
|
"""Return DNS Docker logs."""
|
||||||
|
return self.sys_plugins.dns.logs()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Restart CoreDNS plugin."""
|
"""Restart CoreDNS plugin."""
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@@ -16,7 +15,6 @@ from ..const import (
|
|||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APINotFound
|
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
@@ -53,17 +51,14 @@ class APIDocker(CoreSysAttributes):
|
|||||||
for hostname, registry in body.items():
|
for hostname, registry in body.items():
|
||||||
self.sys_docker.config.registries[hostname] = registry
|
self.sys_docker.config.registries[hostname] = registry
|
||||||
|
|
||||||
await self.sys_docker.config.save_data()
|
self.sys_docker.config.save_data()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def remove_registry(self, request: web.Request):
|
async def remove_registry(self, request: web.Request):
|
||||||
"""Delete a docker registry."""
|
"""Delete a docker registry."""
|
||||||
hostname = request.match_info.get(ATTR_HOSTNAME)
|
hostname = request.match_info.get(ATTR_HOSTNAME)
|
||||||
if hostname not in self.sys_docker.config.registries:
|
|
||||||
raise APINotFound(f"Hostname {hostname} does not exist in registries")
|
|
||||||
|
|
||||||
del self.sys_docker.config.registries[hostname]
|
del self.sys_docker.config.registries[hostname]
|
||||||
await self.sys_docker.config.save_data()
|
self.sys_docker.config.save_data()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request: web.Request):
|
async def info(self, request: web.Request):
|
||||||
|
@@ -1,45 +1,19 @@
|
|||||||
"""Init file for Supervisor hardware RESTful API."""
|
"""Init file for Supervisor hardware RESTful API."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
|
|
||||||
from ..const import (
|
from ..const import ATTR_AUDIO, ATTR_DEVICES, ATTR_INPUT, ATTR_NAME, ATTR_OUTPUT
|
||||||
ATTR_AUDIO,
|
|
||||||
ATTR_DEVICES,
|
|
||||||
ATTR_ID,
|
|
||||||
ATTR_INPUT,
|
|
||||||
ATTR_NAME,
|
|
||||||
ATTR_OUTPUT,
|
|
||||||
ATTR_SERIAL,
|
|
||||||
ATTR_SIZE,
|
|
||||||
ATTR_SYSTEM,
|
|
||||||
)
|
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..dbus.udisks2 import UDisks2Manager
|
|
||||||
from ..dbus.udisks2.block import UDisks2Block
|
|
||||||
from ..dbus.udisks2.drive import UDisks2Drive
|
|
||||||
from ..hardware.data import Device
|
from ..hardware.data import Device
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTR_ATTRIBUTES,
|
ATTR_ATTRIBUTES,
|
||||||
ATTR_BY_ID,
|
ATTR_BY_ID,
|
||||||
ATTR_CHILDREN,
|
ATTR_CHILDREN,
|
||||||
ATTR_CONNECTION_BUS,
|
|
||||||
ATTR_DEV_PATH,
|
ATTR_DEV_PATH,
|
||||||
ATTR_DEVICE,
|
|
||||||
ATTR_DRIVES,
|
|
||||||
ATTR_EJECTABLE,
|
|
||||||
ATTR_FILESYSTEMS,
|
|
||||||
ATTR_MODEL,
|
|
||||||
ATTR_MOUNT_POINTS,
|
|
||||||
ATTR_REMOVABLE,
|
|
||||||
ATTR_REVISION,
|
|
||||||
ATTR_SEAT,
|
|
||||||
ATTR_SUBSYSTEM,
|
ATTR_SUBSYSTEM,
|
||||||
ATTR_SYSFS,
|
ATTR_SYSFS,
|
||||||
ATTR_TIME_DETECTED,
|
|
||||||
ATTR_VENDOR,
|
|
||||||
)
|
)
|
||||||
from .utils import api_process
|
from .utils import api_process
|
||||||
|
|
||||||
@@ -47,7 +21,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
|
|
||||||
def device_struct(device: Device) -> dict[str, Any]:
|
def device_struct(device: Device) -> dict[str, Any]:
|
||||||
"""Return a dict with information of a interface to be used in the API."""
|
"""Return a dict with information of a interface to be used in th API."""
|
||||||
return {
|
return {
|
||||||
ATTR_NAME: device.name,
|
ATTR_NAME: device.name,
|
||||||
ATTR_SYSFS: device.sysfs,
|
ATTR_SYSFS: device.sysfs,
|
||||||
@@ -59,42 +33,6 @@ def device_struct(device: Device) -> dict[str, Any]:
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def filesystem_struct(fs_block: UDisks2Block) -> dict[str, Any]:
|
|
||||||
"""Return a dict with information of a filesystem block device to be used in the API."""
|
|
||||||
return {
|
|
||||||
ATTR_DEVICE: str(fs_block.device),
|
|
||||||
ATTR_ID: fs_block.id,
|
|
||||||
ATTR_SIZE: fs_block.size,
|
|
||||||
ATTR_NAME: fs_block.id_label,
|
|
||||||
ATTR_SYSTEM: fs_block.hint_system,
|
|
||||||
ATTR_MOUNT_POINTS: [
|
|
||||||
str(mount_point) for mount_point in fs_block.filesystem.mount_points
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def drive_struct(udisks2: UDisks2Manager, drive: UDisks2Drive) -> dict[str, Any]:
|
|
||||||
"""Return a dict with information of a disk to be used in the API."""
|
|
||||||
return {
|
|
||||||
ATTR_VENDOR: drive.vendor,
|
|
||||||
ATTR_MODEL: drive.model,
|
|
||||||
ATTR_REVISION: drive.revision,
|
|
||||||
ATTR_SERIAL: drive.serial,
|
|
||||||
ATTR_ID: drive.id,
|
|
||||||
ATTR_SIZE: drive.size,
|
|
||||||
ATTR_TIME_DETECTED: drive.time_detected.isoformat(),
|
|
||||||
ATTR_CONNECTION_BUS: drive.connection_bus,
|
|
||||||
ATTR_SEAT: drive.seat,
|
|
||||||
ATTR_REMOVABLE: drive.removable,
|
|
||||||
ATTR_EJECTABLE: drive.ejectable,
|
|
||||||
ATTR_FILESYSTEMS: [
|
|
||||||
filesystem_struct(block)
|
|
||||||
for block in udisks2.block_devices
|
|
||||||
if block.filesystem and block.drive == drive.object_path
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class APIHardware(CoreSysAttributes):
|
class APIHardware(CoreSysAttributes):
|
||||||
"""Handle RESTful API for hardware functions."""
|
"""Handle RESTful API for hardware functions."""
|
||||||
|
|
||||||
@@ -104,11 +42,7 @@ class APIHardware(CoreSysAttributes):
|
|||||||
return {
|
return {
|
||||||
ATTR_DEVICES: [
|
ATTR_DEVICES: [
|
||||||
device_struct(device) for device in self.sys_hardware.devices
|
device_struct(device) for device in self.sys_hardware.devices
|
||||||
],
|
]
|
||||||
ATTR_DRIVES: [
|
|
||||||
drive_struct(self.sys_dbus.udisks2, drive)
|
|
||||||
for drive in self.sys_dbus.udisks2.drives
|
|
||||||
],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
|
@@ -1,9 +1,7 @@
|
|||||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any, Awaitable
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
@@ -13,7 +11,6 @@ from ..const import (
|
|||||||
ATTR_AUDIO_INPUT,
|
ATTR_AUDIO_INPUT,
|
||||||
ATTR_AUDIO_OUTPUT,
|
ATTR_AUDIO_OUTPUT,
|
||||||
ATTR_BACKUP,
|
ATTR_BACKUP,
|
||||||
ATTR_BACKUPS_EXCLUDE_DATABASE,
|
|
||||||
ATTR_BLK_READ,
|
ATTR_BLK_READ,
|
||||||
ATTR_BLK_WRITE,
|
ATTR_BLK_WRITE,
|
||||||
ATTR_BOOT,
|
ATTR_BOOT,
|
||||||
@@ -35,10 +32,10 @@ from ..const import (
|
|||||||
ATTR_WATCHDOG,
|
ATTR_WATCHDOG,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIDBMigrationInProgress, APIError
|
from ..exceptions import APIError
|
||||||
from ..validate import docker_image, network_port, version_tag
|
from ..validate import docker_image, network_port, version_tag
|
||||||
from .const import ATTR_FORCE, ATTR_SAFE_MODE
|
from .const import CONTENT_TYPE_BINARY
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_process_raw, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -53,7 +50,6 @@ SCHEMA_OPTIONS = vol.Schema(
|
|||||||
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(str),
|
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(str),
|
||||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str),
|
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str),
|
||||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str),
|
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str),
|
||||||
vol.Optional(ATTR_BACKUPS_EXCLUDE_DATABASE): vol.Boolean(),
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -64,34 +60,10 @@ SCHEMA_UPDATE = vol.Schema(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
SCHEMA_RESTART = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Optional(ATTR_SAFE_MODE, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
SCHEMA_STOP = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class APIHomeAssistant(CoreSysAttributes):
|
class APIHomeAssistant(CoreSysAttributes):
|
||||||
"""Handle RESTful API for Home Assistant functions."""
|
"""Handle RESTful API for Home Assistant functions."""
|
||||||
|
|
||||||
async def _check_offline_migration(self, force: bool = False) -> None:
|
|
||||||
"""Check and raise if there's an offline DB migration in progress."""
|
|
||||||
if (
|
|
||||||
not force
|
|
||||||
and (state := await self.sys_homeassistant.api.get_api_state())
|
|
||||||
and state.offline_db_migration
|
|
||||||
):
|
|
||||||
raise APIDBMigrationInProgress(
|
|
||||||
"Offline database migration in progress, try again after it has completed"
|
|
||||||
)
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return host information."""
|
"""Return host information."""
|
||||||
@@ -109,7 +81,6 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
|
ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
|
||||||
ATTR_AUDIO_INPUT: self.sys_homeassistant.audio_input,
|
ATTR_AUDIO_INPUT: self.sys_homeassistant.audio_input,
|
||||||
ATTR_AUDIO_OUTPUT: self.sys_homeassistant.audio_output,
|
ATTR_AUDIO_OUTPUT: self.sys_homeassistant.audio_output,
|
||||||
ATTR_BACKUPS_EXCLUDE_DATABASE: self.sys_homeassistant.backups_exclude_database,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -119,9 +90,6 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
|
|
||||||
if ATTR_IMAGE in body:
|
if ATTR_IMAGE in body:
|
||||||
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
||||||
self.sys_homeassistant.override_image = (
|
|
||||||
self.sys_homeassistant.image != self.sys_homeassistant.default_image
|
|
||||||
)
|
|
||||||
|
|
||||||
if ATTR_BOOT in body:
|
if ATTR_BOOT in body:
|
||||||
self.sys_homeassistant.boot = body[ATTR_BOOT]
|
self.sys_homeassistant.boot = body[ATTR_BOOT]
|
||||||
@@ -144,12 +112,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
if ATTR_AUDIO_OUTPUT in body:
|
if ATTR_AUDIO_OUTPUT in body:
|
||||||
self.sys_homeassistant.audio_output = body[ATTR_AUDIO_OUTPUT]
|
self.sys_homeassistant.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||||
|
|
||||||
if ATTR_BACKUPS_EXCLUDE_DATABASE in body:
|
self.sys_homeassistant.save_data()
|
||||||
self.sys_homeassistant.backups_exclude_database = body[
|
|
||||||
ATTR_BACKUPS_EXCLUDE_DATABASE
|
|
||||||
]
|
|
||||||
|
|
||||||
await self.sys_homeassistant.save_data()
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def stats(self, request: web.Request) -> dict[Any, str]:
|
async def stats(self, request: web.Request) -> dict[Any, str]:
|
||||||
@@ -173,7 +136,6 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
async def update(self, request: web.Request) -> None:
|
async def update(self, request: web.Request) -> None:
|
||||||
"""Update Home Assistant."""
|
"""Update Home Assistant."""
|
||||||
body = await api_validate(SCHEMA_UPDATE, request)
|
body = await api_validate(SCHEMA_UPDATE, request)
|
||||||
await self._check_offline_migration()
|
|
||||||
|
|
||||||
await asyncio.shield(
|
await asyncio.shield(
|
||||||
self.sys_homeassistant.core.update(
|
self.sys_homeassistant.core.update(
|
||||||
@@ -183,12 +145,9 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def stop(self, request: web.Request) -> Awaitable[None]:
|
def stop(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Stop Home Assistant."""
|
"""Stop Home Assistant."""
|
||||||
body = await api_validate(SCHEMA_STOP, request)
|
return asyncio.shield(self.sys_homeassistant.core.stop())
|
||||||
await self._check_offline_migration(force=body[ATTR_FORCE])
|
|
||||||
|
|
||||||
return await asyncio.shield(self.sys_homeassistant.core.stop())
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def start(self, request: web.Request) -> Awaitable[None]:
|
def start(self, request: web.Request) -> Awaitable[None]:
|
||||||
@@ -196,24 +155,19 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
return asyncio.shield(self.sys_homeassistant.core.start())
|
return asyncio.shield(self.sys_homeassistant.core.start())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restart(self, request: web.Request) -> None:
|
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Restart Home Assistant."""
|
"""Restart Home Assistant."""
|
||||||
body = await api_validate(SCHEMA_RESTART, request)
|
return asyncio.shield(self.sys_homeassistant.core.restart())
|
||||||
await self._check_offline_migration(force=body[ATTR_FORCE])
|
|
||||||
|
|
||||||
await asyncio.shield(
|
|
||||||
self.sys_homeassistant.core.restart(safe_mode=body[ATTR_SAFE_MODE])
|
|
||||||
)
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def rebuild(self, request: web.Request) -> None:
|
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Rebuild Home Assistant."""
|
"""Rebuild Home Assistant."""
|
||||||
body = await api_validate(SCHEMA_RESTART, request)
|
return asyncio.shield(self.sys_homeassistant.core.rebuild())
|
||||||
await self._check_offline_migration(force=body[ATTR_FORCE])
|
|
||||||
|
|
||||||
await asyncio.shield(
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
self.sys_homeassistant.core.rebuild(safe_mode=body[ATTR_SAFE_MODE])
|
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||||
)
|
"""Return Home Assistant Docker logs."""
|
||||||
|
return self.sys_homeassistant.core.logs()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def check(self, request: web.Request) -> None:
|
async def check(self, request: web.Request) -> None:
|
||||||
|
@@ -1,13 +1,9 @@
|
|||||||
"""Init file for Supervisor host RESTful API."""
|
"""Init file for Supervisor host RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from contextlib import suppress
|
from typing import Awaitable
|
||||||
import logging
|
|
||||||
|
|
||||||
from aiohttp import ClientConnectionResetError, web
|
from aiohttp import web
|
||||||
from aiohttp.hdrs import ACCEPT, RANGE
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.error import CoerceInvalid
|
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_CHASSIS,
|
ATTR_CHASSIS,
|
||||||
@@ -28,66 +24,29 @@ from ..const import (
|
|||||||
ATTR_TIMEZONE,
|
ATTR_TIMEZONE,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIDBMigrationInProgress, APIError, HostLogError
|
|
||||||
from ..host.const import (
|
|
||||||
PARAM_BOOT_ID,
|
|
||||||
PARAM_FOLLOW,
|
|
||||||
PARAM_SYSLOG_IDENTIFIER,
|
|
||||||
LogFormat,
|
|
||||||
LogFormatter,
|
|
||||||
)
|
|
||||||
from ..utils.systemd_journal import journal_logs_reader
|
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTR_AGENT_VERSION,
|
ATTR_AGENT_VERSION,
|
||||||
ATTR_APPARMOR_VERSION,
|
ATTR_APPARMOR_VERSION,
|
||||||
ATTR_BOOT_TIMESTAMP,
|
ATTR_BOOT_TIMESTAMP,
|
||||||
ATTR_BOOTS,
|
|
||||||
ATTR_BROADCAST_LLMNR,
|
ATTR_BROADCAST_LLMNR,
|
||||||
ATTR_BROADCAST_MDNS,
|
ATTR_BROADCAST_MDNS,
|
||||||
ATTR_DT_SYNCHRONIZED,
|
ATTR_DT_SYNCHRONIZED,
|
||||||
ATTR_DT_UTC,
|
ATTR_DT_UTC,
|
||||||
ATTR_FORCE,
|
|
||||||
ATTR_IDENTIFIERS,
|
|
||||||
ATTR_LLMNR_HOSTNAME,
|
ATTR_LLMNR_HOSTNAME,
|
||||||
ATTR_STARTUP_TIME,
|
ATTR_STARTUP_TIME,
|
||||||
ATTR_USE_NTP,
|
ATTR_USE_NTP,
|
||||||
ATTR_VIRTUALIZATION,
|
CONTENT_TYPE_BINARY,
|
||||||
CONTENT_TYPE_TEXT,
|
|
||||||
CONTENT_TYPE_X_LOG,
|
|
||||||
)
|
)
|
||||||
from .utils import api_process, api_process_raw, api_validate
|
from .utils import api_process, api_process_raw, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
SERVICE = "service"
|
||||||
|
|
||||||
IDENTIFIER = "identifier"
|
|
||||||
BOOTID = "bootid"
|
|
||||||
DEFAULT_LINES = 100
|
|
||||||
|
|
||||||
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): str})
|
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): str})
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA_SHUTDOWN = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
# pylint: enable=no-value-for-parameter
|
|
||||||
|
|
||||||
|
|
||||||
class APIHost(CoreSysAttributes):
|
class APIHost(CoreSysAttributes):
|
||||||
"""Handle RESTful API for host functions."""
|
"""Handle RESTful API for host functions."""
|
||||||
|
|
||||||
async def _check_ha_offline_migration(self, force: bool) -> None:
|
|
||||||
"""Check if HA has an offline migration in progress and raise if not forced."""
|
|
||||||
if (
|
|
||||||
not force
|
|
||||||
and (state := await self.sys_homeassistant.api.get_api_state())
|
|
||||||
and state.offline_db_migration
|
|
||||||
):
|
|
||||||
raise APIDBMigrationInProgress(
|
|
||||||
"Home Assistant offline database migration in progress, please wait until complete before shutting down host"
|
|
||||||
)
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request):
|
async def info(self, request):
|
||||||
"""Return host information."""
|
"""Return host information."""
|
||||||
@@ -95,13 +54,12 @@ class APIHost(CoreSysAttributes):
|
|||||||
ATTR_AGENT_VERSION: self.sys_dbus.agent.version,
|
ATTR_AGENT_VERSION: self.sys_dbus.agent.version,
|
||||||
ATTR_APPARMOR_VERSION: self.sys_host.apparmor.version,
|
ATTR_APPARMOR_VERSION: self.sys_host.apparmor.version,
|
||||||
ATTR_CHASSIS: self.sys_host.info.chassis,
|
ATTR_CHASSIS: self.sys_host.info.chassis,
|
||||||
ATTR_VIRTUALIZATION: self.sys_host.info.virtualization,
|
|
||||||
ATTR_CPE: self.sys_host.info.cpe,
|
ATTR_CPE: self.sys_host.info.cpe,
|
||||||
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
|
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
|
||||||
ATTR_DISK_FREE: await self.sys_host.info.free_space(),
|
ATTR_DISK_FREE: self.sys_host.info.free_space,
|
||||||
ATTR_DISK_TOTAL: await self.sys_host.info.total_space(),
|
ATTR_DISK_TOTAL: self.sys_host.info.total_space,
|
||||||
ATTR_DISK_USED: await self.sys_host.info.used_space(),
|
ATTR_DISK_USED: self.sys_host.info.used_space,
|
||||||
ATTR_DISK_LIFE_TIME: await self.sys_host.info.disk_life_time(),
|
ATTR_DISK_LIFE_TIME: self.sys_host.info.disk_life_time,
|
||||||
ATTR_FEATURES: self.sys_host.features,
|
ATTR_FEATURES: self.sys_host.features,
|
||||||
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
||||||
ATTR_LLMNR_HOSTNAME: self.sys_host.info.llmnr_hostname,
|
ATTR_LLMNR_HOSTNAME: self.sys_host.info.llmnr_hostname,
|
||||||
@@ -129,20 +87,14 @@ class APIHost(CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def reboot(self, request):
|
def reboot(self, request):
|
||||||
"""Reboot host."""
|
"""Reboot host."""
|
||||||
body = await api_validate(SCHEMA_SHUTDOWN, request)
|
return asyncio.shield(self.sys_host.control.reboot())
|
||||||
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
|
|
||||||
|
|
||||||
return await asyncio.shield(self.sys_host.control.reboot())
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def shutdown(self, request):
|
def shutdown(self, request):
|
||||||
"""Poweroff host."""
|
"""Poweroff host."""
|
||||||
body = await api_validate(SCHEMA_SHUTDOWN, request)
|
return asyncio.shield(self.sys_host.control.shutdown())
|
||||||
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
|
|
||||||
|
|
||||||
return await asyncio.shield(self.sys_host.control.shutdown())
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def reload(self, request):
|
def reload(self, request):
|
||||||
@@ -165,115 +117,30 @@ class APIHost(CoreSysAttributes):
|
|||||||
return {ATTR_SERVICES: services}
|
return {ATTR_SERVICES: services}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def list_boots(self, _: web.Request):
|
def service_start(self, request):
|
||||||
"""Return a list of boot IDs."""
|
"""Start a service."""
|
||||||
boot_ids = await self.sys_host.logs.get_boot_ids()
|
unit = request.match_info.get(SERVICE)
|
||||||
return {
|
return asyncio.shield(self.sys_host.services.start(unit))
|
||||||
ATTR_BOOTS: {
|
|
||||||
str(1 + i - len(boot_ids)): boot_id
|
|
||||||
for i, boot_id in enumerate(boot_ids)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def list_identifiers(self, _: web.Request):
|
def service_stop(self, request):
|
||||||
"""Return a list of syslog identifiers."""
|
"""Stop a service."""
|
||||||
return {ATTR_IDENTIFIERS: await self.sys_host.logs.get_identifiers()}
|
unit = request.match_info.get(SERVICE)
|
||||||
|
return asyncio.shield(self.sys_host.services.stop(unit))
|
||||||
|
|
||||||
async def _get_boot_id(self, possible_offset: str) -> str:
|
@api_process
|
||||||
"""Convert offset into boot ID if required."""
|
def service_reload(self, request):
|
||||||
with suppress(CoerceInvalid):
|
"""Reload a service."""
|
||||||
offset = vol.Coerce(int)(possible_offset)
|
unit = request.match_info.get(SERVICE)
|
||||||
try:
|
return asyncio.shield(self.sys_host.services.reload(unit))
|
||||||
return await self.sys_host.logs.get_boot_id(offset)
|
|
||||||
except (ValueError, HostLogError) as err:
|
|
||||||
raise APIError() from err
|
|
||||||
return possible_offset
|
|
||||||
|
|
||||||
async def advanced_logs_handler(
|
@api_process
|
||||||
self, request: web.Request, identifier: str | None = None, follow: bool = False
|
def service_restart(self, request):
|
||||||
) -> web.StreamResponse:
|
"""Restart a service."""
|
||||||
"""Return systemd-journald logs."""
|
unit = request.match_info.get(SERVICE)
|
||||||
log_formatter = LogFormatter.PLAIN
|
return asyncio.shield(self.sys_host.services.restart(unit))
|
||||||
params = {}
|
|
||||||
if identifier:
|
|
||||||
params[PARAM_SYSLOG_IDENTIFIER] = identifier
|
|
||||||
elif IDENTIFIER in request.match_info:
|
|
||||||
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER)
|
|
||||||
else:
|
|
||||||
params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers
|
|
||||||
# host logs should be always verbose, no matter what Accept header is used
|
|
||||||
log_formatter = LogFormatter.VERBOSE
|
|
||||||
|
|
||||||
if BOOTID in request.match_info:
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
params[PARAM_BOOT_ID] = await self._get_boot_id(
|
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||||
request.match_info.get(BOOTID)
|
"""Return host kernel logs."""
|
||||||
)
|
return self.sys_host.info.get_dmesg()
|
||||||
if follow:
|
|
||||||
params[PARAM_FOLLOW] = ""
|
|
||||||
|
|
||||||
if ACCEPT in request.headers and request.headers[ACCEPT] not in [
|
|
||||||
CONTENT_TYPE_TEXT,
|
|
||||||
CONTENT_TYPE_X_LOG,
|
|
||||||
"*/*",
|
|
||||||
]:
|
|
||||||
raise APIError(
|
|
||||||
"Invalid content type requested. Only text/plain and text/x-log "
|
|
||||||
"supported for now."
|
|
||||||
)
|
|
||||||
|
|
||||||
if "verbose" in request.query or request.headers[ACCEPT] == CONTENT_TYPE_X_LOG:
|
|
||||||
log_formatter = LogFormatter.VERBOSE
|
|
||||||
|
|
||||||
if "lines" in request.query:
|
|
||||||
lines = request.query.get("lines", DEFAULT_LINES)
|
|
||||||
try:
|
|
||||||
lines = int(lines)
|
|
||||||
except ValueError:
|
|
||||||
# If the user passed a non-integer value, just use the default instead of error.
|
|
||||||
lines = DEFAULT_LINES
|
|
||||||
finally:
|
|
||||||
# We can't use the entries= Range header syntax to refer to the last 1 line,
|
|
||||||
# and passing 1 to the calculation below would return the 1st line of the logs
|
|
||||||
# instead. Since this is really an edge case that doesn't matter much, we'll just
|
|
||||||
# return 2 lines at minimum.
|
|
||||||
lines = max(2, lines)
|
|
||||||
# entries=cursor[[:num_skip]:num_entries]
|
|
||||||
range_header = f"entries=:-{lines - 1}:{'' if follow else lines}"
|
|
||||||
elif RANGE in request.headers:
|
|
||||||
range_header = request.headers.get(RANGE)
|
|
||||||
else:
|
|
||||||
range_header = (
|
|
||||||
f"entries=:-{DEFAULT_LINES - 1}:{'' if follow else DEFAULT_LINES}"
|
|
||||||
)
|
|
||||||
|
|
||||||
async with self.sys_host.logs.journald_logs(
|
|
||||||
params=params, range_header=range_header, accept=LogFormat.JOURNAL
|
|
||||||
) as resp:
|
|
||||||
try:
|
|
||||||
response = web.StreamResponse()
|
|
||||||
response.content_type = CONTENT_TYPE_TEXT
|
|
||||||
headers_returned = False
|
|
||||||
async for cursor, line in journal_logs_reader(resp, log_formatter):
|
|
||||||
if not headers_returned:
|
|
||||||
if cursor:
|
|
||||||
response.headers["X-First-Cursor"] = cursor
|
|
||||||
response.headers["X-Accel-Buffering"] = "no"
|
|
||||||
await response.prepare(request)
|
|
||||||
headers_returned = True
|
|
||||||
# When client closes the connection while reading busy logs, we
|
|
||||||
# sometimes get this exception. It should be safe to ignore it.
|
|
||||||
with suppress(ClientConnectionResetError):
|
|
||||||
await response.write(line.encode("utf-8") + b"\n")
|
|
||||||
except ConnectionResetError as ex:
|
|
||||||
raise APIError(
|
|
||||||
"Connection reset when trying to fetch data from systemd-journald."
|
|
||||||
) from ex
|
|
||||||
return response
|
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
|
|
||||||
async def advanced_logs(
|
|
||||||
self, request: web.Request, identifier: str | None = None, follow: bool = False
|
|
||||||
) -> web.StreamResponse:
|
|
||||||
"""Return systemd-journald logs. Wrapped as standard API handler."""
|
|
||||||
return await self.advanced_logs_handler(request, identifier, follow)
|
|
||||||
|
@@ -1,9 +1,8 @@
|
|||||||
"""Supervisor Add-on ingress service."""
|
"""Supervisor Add-on ingress service."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from ipaddress import ip_address
|
from ipaddress import ip_address
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any, Union
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
from aiohttp import ClientTimeout, hdrs, web
|
from aiohttp import ClientTimeout, hdrs, web
|
||||||
@@ -22,65 +21,20 @@ from ..const import (
|
|||||||
ATTR_ICON,
|
ATTR_ICON,
|
||||||
ATTR_PANELS,
|
ATTR_PANELS,
|
||||||
ATTR_SESSION,
|
ATTR_SESSION,
|
||||||
ATTR_SESSION_DATA_USER_ID,
|
|
||||||
ATTR_TITLE,
|
ATTR_TITLE,
|
||||||
HEADER_REMOTE_USER_DISPLAY_NAME,
|
|
||||||
HEADER_REMOTE_USER_ID,
|
|
||||||
HEADER_REMOTE_USER_NAME,
|
|
||||||
HEADER_TOKEN,
|
|
||||||
HEADER_TOKEN_OLD,
|
|
||||||
IngressSessionData,
|
|
||||||
IngressSessionDataUser,
|
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import HomeAssistantAPIError
|
from .const import COOKIE_INGRESS, HEADER_TOKEN, HEADER_TOKEN_OLD
|
||||||
from .const import COOKIE_INGRESS
|
|
||||||
from .utils import api_process, api_validate, require_home_assistant
|
from .utils import api_process, api_validate, require_home_assistant
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
VALIDATE_SESSION_DATA = vol.Schema({ATTR_SESSION: str})
|
VALIDATE_SESSION_DATA = vol.Schema({ATTR_SESSION: str})
|
||||||
|
|
||||||
"""Expected optional payload of create session request"""
|
|
||||||
SCHEMA_INGRESS_CREATE_SESSION_DATA = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Optional(ATTR_SESSION_DATA_USER_ID): str,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# from https://github.com/aio-libs/aiohttp/blob/8ae650bee4add9f131d49b96a0a150311ea58cd1/aiohttp/helpers.py#L1059C1-L1079C1
|
|
||||||
def must_be_empty_body(method: str, code: int) -> bool:
|
|
||||||
"""Check if a request must return an empty body."""
|
|
||||||
return (
|
|
||||||
status_code_must_be_empty_body(code)
|
|
||||||
or method_must_be_empty_body(method)
|
|
||||||
or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def method_must_be_empty_body(method: str) -> bool:
|
|
||||||
"""Check if a method must return an empty body."""
|
|
||||||
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
|
|
||||||
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2
|
|
||||||
return method.upper() == hdrs.METH_HEAD
|
|
||||||
|
|
||||||
|
|
||||||
def status_code_must_be_empty_body(code: int) -> bool:
|
|
||||||
"""Check if a status code must return an empty body."""
|
|
||||||
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
|
|
||||||
return code in {204, 304} or 100 <= code < 200
|
|
||||||
|
|
||||||
|
|
||||||
class APIIngress(CoreSysAttributes):
|
class APIIngress(CoreSysAttributes):
|
||||||
"""Ingress view to handle add-on webui routing."""
|
"""Ingress view to handle add-on webui routing."""
|
||||||
|
|
||||||
_list_of_users: list[IngressSessionDataUser]
|
|
||||||
|
|
||||||
def __init__(self) -> None:
|
|
||||||
"""Initialize APIIngress."""
|
|
||||||
self._list_of_users = []
|
|
||||||
|
|
||||||
def _extract_addon(self, request: web.Request) -> Addon:
|
def _extract_addon(self, request: web.Request) -> Addon:
|
||||||
"""Return addon, throw an exception it it doesn't exist."""
|
"""Return addon, throw an exception it it doesn't exist."""
|
||||||
token = request.match_info.get("token")
|
token = request.match_info.get("token")
|
||||||
@@ -115,19 +69,7 @@ class APIIngress(CoreSysAttributes):
|
|||||||
@require_home_assistant
|
@require_home_assistant
|
||||||
async def create_session(self, request: web.Request) -> dict[str, Any]:
|
async def create_session(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Create a new session."""
|
"""Create a new session."""
|
||||||
schema_ingress_config_session_data = await api_validate(
|
session = self.sys_ingress.create_session()
|
||||||
SCHEMA_INGRESS_CREATE_SESSION_DATA, request
|
|
||||||
)
|
|
||||||
data: IngressSessionData | None = None
|
|
||||||
|
|
||||||
if ATTR_SESSION_DATA_USER_ID in schema_ingress_config_session_data:
|
|
||||||
user = await self._find_user_by_id(
|
|
||||||
schema_ingress_config_session_data[ATTR_SESSION_DATA_USER_ID]
|
|
||||||
)
|
|
||||||
if user:
|
|
||||||
data = IngressSessionData(user)
|
|
||||||
|
|
||||||
session = self.sys_ingress.create_session(data)
|
|
||||||
return {ATTR_SESSION: session}
|
return {ATTR_SESSION: session}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -141,9 +83,10 @@ class APIIngress(CoreSysAttributes):
|
|||||||
_LOGGER.warning("No valid ingress session %s", data[ATTR_SESSION])
|
_LOGGER.warning("No valid ingress session %s", data[ATTR_SESSION])
|
||||||
raise HTTPUnauthorized()
|
raise HTTPUnauthorized()
|
||||||
|
|
||||||
|
@require_home_assistant
|
||||||
async def handler(
|
async def handler(
|
||||||
self, request: web.Request
|
self, request: web.Request
|
||||||
) -> web.Response | web.StreamResponse | web.WebSocketResponse:
|
) -> Union[web.Response, web.StreamResponse, web.WebSocketResponse]:
|
||||||
"""Route data to Supervisor ingress service."""
|
"""Route data to Supervisor ingress service."""
|
||||||
|
|
||||||
# Check Ingress Session
|
# Check Ingress Session
|
||||||
@@ -155,14 +98,13 @@ class APIIngress(CoreSysAttributes):
|
|||||||
# Process requests
|
# Process requests
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
path = request.match_info.get("path")
|
path = request.match_info.get("path")
|
||||||
session_data = self.sys_ingress.get_session_data(session)
|
|
||||||
try:
|
try:
|
||||||
# Websocket
|
# Websocket
|
||||||
if _is_websocket(request):
|
if _is_websocket(request):
|
||||||
return await self._handle_websocket(request, addon, path, session_data)
|
return await self._handle_websocket(request, addon, path)
|
||||||
|
|
||||||
# Request
|
# Request
|
||||||
return await self._handle_request(request, addon, path, session_data)
|
return await self._handle_request(request, addon, path)
|
||||||
|
|
||||||
except aiohttp.ClientError as err:
|
except aiohttp.ClientError as err:
|
||||||
_LOGGER.error("Ingress error: %s", err)
|
_LOGGER.error("Ingress error: %s", err)
|
||||||
@@ -170,11 +112,7 @@ class APIIngress(CoreSysAttributes):
|
|||||||
raise HTTPBadGateway()
|
raise HTTPBadGateway()
|
||||||
|
|
||||||
async def _handle_websocket(
|
async def _handle_websocket(
|
||||||
self,
|
self, request: web.Request, addon: Addon, path: str
|
||||||
request: web.Request,
|
|
||||||
addon: Addon,
|
|
||||||
path: str,
|
|
||||||
session_data: IngressSessionData | None,
|
|
||||||
) -> web.WebSocketResponse:
|
) -> web.WebSocketResponse:
|
||||||
"""Ingress route for websocket."""
|
"""Ingress route for websocket."""
|
||||||
if hdrs.SEC_WEBSOCKET_PROTOCOL in request.headers:
|
if hdrs.SEC_WEBSOCKET_PROTOCOL in request.headers:
|
||||||
@@ -192,7 +130,7 @@ class APIIngress(CoreSysAttributes):
|
|||||||
|
|
||||||
# Preparing
|
# Preparing
|
||||||
url = self._create_url(addon, path)
|
url = self._create_url(addon, path)
|
||||||
source_header = _init_header(request, addon, session_data)
|
source_header = _init_header(request, addon)
|
||||||
|
|
||||||
# Support GET query
|
# Support GET query
|
||||||
if request.query_string:
|
if request.query_string:
|
||||||
@@ -209,8 +147,8 @@ class APIIngress(CoreSysAttributes):
|
|||||||
# Proxy requests
|
# Proxy requests
|
||||||
await asyncio.wait(
|
await asyncio.wait(
|
||||||
[
|
[
|
||||||
self.sys_create_task(_websocket_forward(ws_server, ws_client)),
|
_websocket_forward(ws_server, ws_client),
|
||||||
self.sys_create_task(_websocket_forward(ws_client, ws_server)),
|
_websocket_forward(ws_client, ws_server),
|
||||||
],
|
],
|
||||||
return_when=asyncio.FIRST_COMPLETED,
|
return_when=asyncio.FIRST_COMPLETED,
|
||||||
)
|
)
|
||||||
@@ -218,15 +156,11 @@ class APIIngress(CoreSysAttributes):
|
|||||||
return ws_server
|
return ws_server
|
||||||
|
|
||||||
async def _handle_request(
|
async def _handle_request(
|
||||||
self,
|
self, request: web.Request, addon: Addon, path: str
|
||||||
request: web.Request,
|
) -> Union[web.Response, web.StreamResponse]:
|
||||||
addon: Addon,
|
|
||||||
path: str,
|
|
||||||
session_data: IngressSessionData | None,
|
|
||||||
) -> web.Response | web.StreamResponse:
|
|
||||||
"""Ingress route for request."""
|
"""Ingress route for request."""
|
||||||
url = self._create_url(addon, path)
|
url = self._create_url(addon, path)
|
||||||
source_header = _init_header(request, addon, session_data)
|
source_header = _init_header(request, addon)
|
||||||
|
|
||||||
# Passing the raw stream breaks requests for some webservers
|
# Passing the raw stream breaks requests for some webservers
|
||||||
# since we just need it for POST requests really, for all other methods
|
# since we just need it for POST requests really, for all other methods
|
||||||
@@ -246,21 +180,12 @@ class APIIngress(CoreSysAttributes):
|
|||||||
allow_redirects=False,
|
allow_redirects=False,
|
||||||
data=data,
|
data=data,
|
||||||
timeout=ClientTimeout(total=None),
|
timeout=ClientTimeout(total=None),
|
||||||
skip_auto_headers={hdrs.CONTENT_TYPE},
|
|
||||||
) as result:
|
) as result:
|
||||||
headers = _response_header(result)
|
headers = _response_header(result)
|
||||||
# Avoid parsing content_type in simple cases for better performance
|
|
||||||
if maybe_content_type := result.headers.get(hdrs.CONTENT_TYPE):
|
|
||||||
content_type = (maybe_content_type.partition(";"))[0].strip()
|
|
||||||
else:
|
|
||||||
content_type = result.content_type
|
|
||||||
# Simple request
|
# Simple request
|
||||||
if (
|
if (
|
||||||
# empty body responses should not be streamed,
|
hdrs.CONTENT_LENGTH in result.headers
|
||||||
# otherwise aiohttp < 3.9.0 may generate
|
|
||||||
# an invalid "0\r\n\r\n" chunk instead of an empty response.
|
|
||||||
must_be_empty_body(request.method, result.status)
|
|
||||||
or hdrs.CONTENT_LENGTH in result.headers
|
|
||||||
and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000
|
and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000
|
||||||
):
|
):
|
||||||
# Return Response
|
# Return Response
|
||||||
@@ -268,16 +193,15 @@ class APIIngress(CoreSysAttributes):
|
|||||||
return web.Response(
|
return web.Response(
|
||||||
headers=headers,
|
headers=headers,
|
||||||
status=result.status,
|
status=result.status,
|
||||||
content_type=content_type,
|
content_type=result.content_type,
|
||||||
body=body,
|
body=body,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Stream response
|
# Stream response
|
||||||
response = web.StreamResponse(status=result.status, headers=headers)
|
response = web.StreamResponse(status=result.status, headers=headers)
|
||||||
response.content_type = content_type
|
response.content_type = result.content_type
|
||||||
|
|
||||||
try:
|
try:
|
||||||
response.headers["X-Accel-Buffering"] = "no"
|
|
||||||
await response.prepare(request)
|
await response.prepare(request)
|
||||||
async for data in result.content.iter_chunked(4096):
|
async for data in result.content.iter_chunked(4096):
|
||||||
await response.write(data)
|
await response.write(data)
|
||||||
@@ -291,35 +215,13 @@ class APIIngress(CoreSysAttributes):
|
|||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
async def _find_user_by_id(self, user_id: str) -> IngressSessionDataUser | None:
|
|
||||||
"""Find user object by the user's ID."""
|
|
||||||
try:
|
|
||||||
list_of_users = await self.sys_homeassistant.get_users()
|
|
||||||
except (HomeAssistantAPIError, TypeError) as err:
|
|
||||||
_LOGGER.error(
|
|
||||||
"%s error occurred while requesting list of users: %s", type(err), err
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
if list_of_users is not None:
|
|
||||||
self._list_of_users = list_of_users
|
|
||||||
|
|
||||||
return next((user for user in self._list_of_users if user.id == user_id), None)
|
|
||||||
|
|
||||||
|
|
||||||
def _init_header(
|
def _init_header(
|
||||||
request: web.Request, addon: Addon, session_data: IngressSessionData | None
|
request: web.Request, addon: str
|
||||||
) -> CIMultiDict | dict[str, str]:
|
) -> Union[CIMultiDict, dict[str, str]]:
|
||||||
"""Create initial header."""
|
"""Create initial header."""
|
||||||
headers = {}
|
headers = {}
|
||||||
|
|
||||||
if session_data is not None:
|
|
||||||
headers[HEADER_REMOTE_USER_ID] = session_data.user.id
|
|
||||||
if session_data.user.username is not None:
|
|
||||||
headers[HEADER_REMOTE_USER_NAME] = session_data.user.username
|
|
||||||
if session_data.user.display_name is not None:
|
|
||||||
headers[HEADER_REMOTE_USER_DISPLAY_NAME] = session_data.user.display_name
|
|
||||||
|
|
||||||
# filter flags
|
# filter flags
|
||||||
for name, value in request.headers.items():
|
for name, value in request.headers.items():
|
||||||
if name in (
|
if name in (
|
||||||
@@ -332,9 +234,6 @@ def _init_header(
|
|||||||
hdrs.SEC_WEBSOCKET_KEY,
|
hdrs.SEC_WEBSOCKET_KEY,
|
||||||
istr(HEADER_TOKEN),
|
istr(HEADER_TOKEN),
|
||||||
istr(HEADER_TOKEN_OLD),
|
istr(HEADER_TOKEN_OLD),
|
||||||
istr(HEADER_REMOTE_USER_ID),
|
|
||||||
istr(HEADER_REMOTE_USER_NAME),
|
|
||||||
istr(HEADER_REMOTE_USER_DISPLAY_NAME),
|
|
||||||
):
|
):
|
||||||
continue
|
continue
|
||||||
headers[name] = value
|
headers[name] = value
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
"""Init file for Supervisor Jobs RESTful API."""
|
"""Init file for Supervisor Jobs RESTful API."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@@ -7,10 +6,7 @@ from aiohttp import web
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError, APINotFound, JobNotFound
|
|
||||||
from ..jobs import SupervisorJob
|
|
||||||
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
|
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
|
||||||
from .const import ATTR_JOBS
|
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
@@ -23,65 +19,11 @@ SCHEMA_OPTIONS = vol.Schema(
|
|||||||
class APIJobs(CoreSysAttributes):
|
class APIJobs(CoreSysAttributes):
|
||||||
"""Handle RESTful API for OS functions."""
|
"""Handle RESTful API for OS functions."""
|
||||||
|
|
||||||
def _extract_job(self, request: web.Request) -> SupervisorJob:
|
|
||||||
"""Extract job from request or raise."""
|
|
||||||
try:
|
|
||||||
return self.sys_jobs.get_job(request.match_info.get("uuid"))
|
|
||||||
except JobNotFound:
|
|
||||||
raise APINotFound("Job does not exist") from None
|
|
||||||
|
|
||||||
def _list_jobs(self, start: SupervisorJob | None = None) -> list[dict[str, Any]]:
|
|
||||||
"""Return current job tree.
|
|
||||||
|
|
||||||
Jobs are added to cache as they are created so by default they are in oldest to newest.
|
|
||||||
This is correct ordering for child jobs as it makes logical sense to present those in
|
|
||||||
the order they occurred within the parent. For the list as a whole, sort from newest
|
|
||||||
to oldest as its likely any client is most interested in the newer ones.
|
|
||||||
"""
|
|
||||||
# Initially sort oldest to newest so all child lists end up in correct order
|
|
||||||
jobs_by_parent: dict[str | None, list[SupervisorJob]] = {}
|
|
||||||
for job in sorted(self.sys_jobs.jobs):
|
|
||||||
if job.internal:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if job.parent_id not in jobs_by_parent:
|
|
||||||
jobs_by_parent[job.parent_id] = [job]
|
|
||||||
else:
|
|
||||||
jobs_by_parent[job.parent_id].append(job)
|
|
||||||
|
|
||||||
# After parent-child organization, sort the root jobs only from newest to oldest
|
|
||||||
job_list: list[dict[str, Any]] = []
|
|
||||||
queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = (
|
|
||||||
[(job_list, start)]
|
|
||||||
if start
|
|
||||||
else [
|
|
||||||
(job_list, job)
|
|
||||||
for job in sorted(jobs_by_parent.get(None, []), reverse=True)
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
while queue:
|
|
||||||
(current_list, current_job) = queue.pop(0)
|
|
||||||
child_jobs: list[dict[str, Any]] = []
|
|
||||||
|
|
||||||
# We remove parent_id and instead use that info to represent jobs as a tree
|
|
||||||
job_dict = current_job.as_dict() | {"child_jobs": child_jobs}
|
|
||||||
job_dict.pop("parent_id")
|
|
||||||
current_list.append(job_dict)
|
|
||||||
|
|
||||||
if current_job.uuid in jobs_by_parent:
|
|
||||||
queue.extend(
|
|
||||||
[(child_jobs, job) for job in jobs_by_parent.get(current_job.uuid)]
|
|
||||||
)
|
|
||||||
|
|
||||||
return job_list
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return JobManager information."""
|
"""Return JobManager information."""
|
||||||
return {
|
return {
|
||||||
ATTR_IGNORE_CONDITIONS: self.sys_jobs.ignore_conditions,
|
ATTR_IGNORE_CONDITIONS: self.sys_jobs.ignore_conditions,
|
||||||
ATTR_JOBS: self._list_jobs(),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -92,27 +34,11 @@ class APIJobs(CoreSysAttributes):
|
|||||||
if ATTR_IGNORE_CONDITIONS in body:
|
if ATTR_IGNORE_CONDITIONS in body:
|
||||||
self.sys_jobs.ignore_conditions = body[ATTR_IGNORE_CONDITIONS]
|
self.sys_jobs.ignore_conditions = body[ATTR_IGNORE_CONDITIONS]
|
||||||
|
|
||||||
await self.sys_jobs.save_data()
|
self.sys_jobs.save_data()
|
||||||
|
|
||||||
await self.sys_resolution.evaluate.evaluate_system()
|
await self.sys_resolution.evaluate.evaluate_system()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def reset(self, request: web.Request) -> None:
|
async def reset(self, request: web.Request) -> None:
|
||||||
"""Reset options for JobManager."""
|
"""Reset options for JobManager."""
|
||||||
await self.sys_jobs.reset_data()
|
self.sys_jobs.reset_data()
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def job_info(self, request: web.Request) -> dict[str, Any]:
|
|
||||||
"""Get details of a job by ID."""
|
|
||||||
job = self._extract_job(request)
|
|
||||||
return self._list_jobs(job)[0]
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def remove_job(self, request: web.Request) -> None:
|
|
||||||
"""Remove a completed job."""
|
|
||||||
job = self._extract_job(request)
|
|
||||||
|
|
||||||
if not job.done:
|
|
||||||
raise APIError(f"Job {job.uuid} is not done!")
|
|
||||||
|
|
||||||
self.sys_jobs.remove_job(job)
|
|
||||||
|
@@ -1,17 +1,10 @@
|
|||||||
"""Handle security part of this API."""
|
"""Handle security part of this API."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
from typing import Final
|
|
||||||
from urllib.parse import unquote
|
|
||||||
|
|
||||||
from aiohttp.web import Request, RequestHandler, Response, middleware
|
from aiohttp.web import Request, RequestHandler, Response, middleware
|
||||||
from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized
|
from aiohttp.web_exceptions import HTTPForbidden, HTTPUnauthorized
|
||||||
from awesomeversion import AwesomeVersion
|
|
||||||
|
|
||||||
from supervisor.homeassistant.const import LANDINGPAGE
|
|
||||||
|
|
||||||
from ...addons.const import RE_SLUG
|
|
||||||
from ...const import (
|
from ...const import (
|
||||||
REQUEST_FROM,
|
REQUEST_FROM,
|
||||||
ROLE_ADMIN,
|
ROLE_ADMIN,
|
||||||
@@ -22,26 +15,14 @@ from ...const import (
|
|||||||
CoreState,
|
CoreState,
|
||||||
)
|
)
|
||||||
from ...coresys import CoreSys, CoreSysAttributes
|
from ...coresys import CoreSys, CoreSysAttributes
|
||||||
from ...utils import version_is_new_enough
|
|
||||||
from ..utils import api_return_error, excract_supervisor_token
|
from ..utils import api_return_error, excract_supervisor_token
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
_CORE_VERSION: Final = AwesomeVersion("2023.3.4")
|
|
||||||
|
|
||||||
# fmt: off
|
# fmt: off
|
||||||
|
|
||||||
_CORE_FRONTEND_PATHS: Final = (
|
|
||||||
r"|/app/.*\.(?:js|gz|json|map|woff2)"
|
|
||||||
r"|/(store/)?addons/" + RE_SLUG + r"/(logo|icon)"
|
|
||||||
)
|
|
||||||
|
|
||||||
CORE_FRONTEND: Final = re.compile(
|
|
||||||
r"^(?:" + _CORE_FRONTEND_PATHS + r")$"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# Block Anytime
|
# Block Anytime
|
||||||
BLACKLIST: Final = re.compile(
|
BLACKLIST = re.compile(
|
||||||
r"^(?:"
|
r"^(?:"
|
||||||
r"|/homeassistant/api/hassio/.*"
|
r"|/homeassistant/api/hassio/.*"
|
||||||
r"|/core/api/hassio/.*"
|
r"|/core/api/hassio/.*"
|
||||||
@@ -49,27 +30,25 @@ BLACKLIST: Final = re.compile(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Free to call or have own security concepts
|
# Free to call or have own security concepts
|
||||||
NO_SECURITY_CHECK: Final = re.compile(
|
NO_SECURITY_CHECK = re.compile(
|
||||||
r"^(?:"
|
r"^(?:"
|
||||||
r"|/homeassistant/api/.*"
|
r"|/homeassistant/api/.*"
|
||||||
r"|/homeassistant/websocket"
|
r"|/homeassistant/websocket"
|
||||||
r"|/core/api/.*"
|
r"|/core/api/.*"
|
||||||
r"|/core/websocket"
|
r"|/core/websocket"
|
||||||
r"|/supervisor/ping"
|
r"|/supervisor/ping"
|
||||||
r"|/ingress/[-_A-Za-z0-9]+/.*"
|
r")$"
|
||||||
+ _CORE_FRONTEND_PATHS
|
|
||||||
+ r")$"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Observer allow API calls
|
# Observer allow API calls
|
||||||
OBSERVER_CHECK: Final = re.compile(
|
OBSERVER_CHECK = re.compile(
|
||||||
r"^(?:"
|
r"^(?:"
|
||||||
r"|/.+/info"
|
r"|/.+/info"
|
||||||
r")$"
|
r")$"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Can called by every add-on
|
# Can called by every add-on
|
||||||
ADDONS_API_BYPASS: Final = re.compile(
|
ADDONS_API_BYPASS = re.compile(
|
||||||
r"^(?:"
|
r"^(?:"
|
||||||
r"|/addons/self/(?!security|update)[^/]+"
|
r"|/addons/self/(?!security|update)[^/]+"
|
||||||
r"|/addons/self/options/config"
|
r"|/addons/self/options/config"
|
||||||
@@ -80,15 +59,8 @@ ADDONS_API_BYPASS: Final = re.compile(
|
|||||||
r")$"
|
r")$"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Home Assistant only
|
|
||||||
CORE_ONLY_PATHS: Final = re.compile(
|
|
||||||
r"^(?:"
|
|
||||||
r"/addons/" + RE_SLUG + "/sys_options"
|
|
||||||
r")$"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Policy role add-on API access
|
# Policy role add-on API access
|
||||||
ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
|
ADDONS_ROLE_ACCESS = {
|
||||||
ROLE_DEFAULT: re.compile(
|
ROLE_DEFAULT: re.compile(
|
||||||
r"^(?:"
|
r"^(?:"
|
||||||
r"|/.+/info"
|
r"|/.+/info"
|
||||||
@@ -110,11 +82,9 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
|
|||||||
ROLE_MANAGER: re.compile(
|
ROLE_MANAGER: re.compile(
|
||||||
r"^(?:"
|
r"^(?:"
|
||||||
r"|/.+/info"
|
r"|/.+/info"
|
||||||
r"|/addons(?:/" + RE_SLUG + r"/(?!security).+|/reload)?"
|
r"|/addons(?:/[^/]+/(?!security).+|/reload)?"
|
||||||
r"|/audio/.+"
|
r"|/audio/.+"
|
||||||
r"|/auth/cache"
|
r"|/auth/cache"
|
||||||
r"|/available_updates"
|
|
||||||
r"|/backups.*"
|
|
||||||
r"|/cli/.+"
|
r"|/cli/.+"
|
||||||
r"|/core/.+"
|
r"|/core/.+"
|
||||||
r"|/dns/.+"
|
r"|/dns/.+"
|
||||||
@@ -124,17 +94,16 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
|
|||||||
r"|/hassos/.+"
|
r"|/hassos/.+"
|
||||||
r"|/homeassistant/.+"
|
r"|/homeassistant/.+"
|
||||||
r"|/host/.+"
|
r"|/host/.+"
|
||||||
r"|/mounts.*"
|
|
||||||
r"|/multicast/.+"
|
r"|/multicast/.+"
|
||||||
r"|/network/.+"
|
r"|/network/.+"
|
||||||
r"|/observer/.+"
|
r"|/observer/.+"
|
||||||
r"|/os/(?!datadisk/wipe).+"
|
r"|/os/.+"
|
||||||
r"|/refresh_updates"
|
|
||||||
r"|/resolution/.+"
|
r"|/resolution/.+"
|
||||||
r"|/security/.+"
|
r"|/backups.*"
|
||||||
r"|/snapshots.*"
|
r"|/snapshots.*"
|
||||||
r"|/store.*"
|
r"|/store.*"
|
||||||
r"|/supervisor/.+"
|
r"|/supervisor/.+"
|
||||||
|
r"|/security/.+"
|
||||||
r")$"
|
r")$"
|
||||||
),
|
),
|
||||||
ROLE_ADMIN: re.compile(
|
ROLE_ADMIN: re.compile(
|
||||||
@@ -142,26 +111,6 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
|
|||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
FILTERS: Final = re.compile(
|
|
||||||
r"(?:"
|
|
||||||
|
|
||||||
# Common exploits
|
|
||||||
r"proc/self/environ"
|
|
||||||
r"|(<|%3C).*script.*(>|%3E)"
|
|
||||||
|
|
||||||
# File Injections
|
|
||||||
r"|(\.\.//?)+" # ../../anywhere
|
|
||||||
r"|[a-zA-Z0-9_]=/([a-z0-9_.]//?)+" # .html?v=/.//test
|
|
||||||
|
|
||||||
# SQL Injections
|
|
||||||
r"|union.*select.*\("
|
|
||||||
r"|union.*all.*select.*"
|
|
||||||
r"|concat.*\("
|
|
||||||
|
|
||||||
r")",
|
|
||||||
flags=re.IGNORECASE,
|
|
||||||
)
|
|
||||||
|
|
||||||
# fmt: on
|
# fmt: on
|
||||||
|
|
||||||
|
|
||||||
@@ -172,32 +121,6 @@ class SecurityMiddleware(CoreSysAttributes):
|
|||||||
"""Initialize security middleware."""
|
"""Initialize security middleware."""
|
||||||
self.coresys: CoreSys = coresys
|
self.coresys: CoreSys = coresys
|
||||||
|
|
||||||
def _recursive_unquote(self, value: str) -> str:
|
|
||||||
"""Handle values that are encoded multiple times."""
|
|
||||||
if (unquoted := unquote(value)) != value:
|
|
||||||
unquoted = self._recursive_unquote(unquoted)
|
|
||||||
return unquoted
|
|
||||||
|
|
||||||
@middleware
|
|
||||||
async def block_bad_requests(
|
|
||||||
self, request: Request, handler: RequestHandler
|
|
||||||
) -> Response:
|
|
||||||
"""Process request and tblock commonly known exploit attempts."""
|
|
||||||
if FILTERS.search(self._recursive_unquote(request.path)):
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Filtered a potential harmful request to: %s", request.raw_path
|
|
||||||
)
|
|
||||||
raise HTTPBadRequest
|
|
||||||
|
|
||||||
if FILTERS.search(self._recursive_unquote(request.query_string)):
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Filtered a request with a potential harmful query string: %s",
|
|
||||||
request.raw_path,
|
|
||||||
)
|
|
||||||
raise HTTPBadRequest
|
|
||||||
|
|
||||||
return await handler(request)
|
|
||||||
|
|
||||||
@middleware
|
@middleware
|
||||||
async def system_validation(
|
async def system_validation(
|
||||||
self, request: Request, handler: RequestHandler
|
self, request: Request, handler: RequestHandler
|
||||||
@@ -209,7 +132,7 @@ class SecurityMiddleware(CoreSysAttributes):
|
|||||||
CoreState.FREEZE,
|
CoreState.FREEZE,
|
||||||
):
|
):
|
||||||
return api_return_error(
|
return api_return_error(
|
||||||
message=f"System is not ready with state: {self.sys_core.state}"
|
message=f"System is not ready with state: {self.sys_core.state.value}"
|
||||||
)
|
)
|
||||||
|
|
||||||
return await handler(request)
|
return await handler(request)
|
||||||
@@ -230,7 +153,6 @@ class SecurityMiddleware(CoreSysAttributes):
|
|||||||
# Ignore security check
|
# Ignore security check
|
||||||
if NO_SECURITY_CHECK.match(request.path):
|
if NO_SECURITY_CHECK.match(request.path):
|
||||||
_LOGGER.debug("Passthrough %s", request.path)
|
_LOGGER.debug("Passthrough %s", request.path)
|
||||||
request[REQUEST_FROM] = None
|
|
||||||
return await handler(request)
|
return await handler(request)
|
||||||
|
|
||||||
# Not token
|
# Not token
|
||||||
@@ -242,9 +164,6 @@ class SecurityMiddleware(CoreSysAttributes):
|
|||||||
if supervisor_token == self.sys_homeassistant.supervisor_token:
|
if supervisor_token == self.sys_homeassistant.supervisor_token:
|
||||||
_LOGGER.debug("%s access from Home Assistant", request.path)
|
_LOGGER.debug("%s access from Home Assistant", request.path)
|
||||||
request_from = self.sys_homeassistant
|
request_from = self.sys_homeassistant
|
||||||
elif CORE_ONLY_PATHS.match(request.path):
|
|
||||||
_LOGGER.warning("Attempted access to %s from client besides Home Assistant")
|
|
||||||
raise HTTPForbidden()
|
|
||||||
|
|
||||||
# Host
|
# Host
|
||||||
if supervisor_token == self.sys_plugins.cli.supervisor_token:
|
if supervisor_token == self.sys_plugins.cli.supervisor_token:
|
||||||
@@ -286,46 +205,3 @@ class SecurityMiddleware(CoreSysAttributes):
|
|||||||
|
|
||||||
_LOGGER.error("Invalid token for access %s", request.path)
|
_LOGGER.error("Invalid token for access %s", request.path)
|
||||||
raise HTTPForbidden()
|
raise HTTPForbidden()
|
||||||
|
|
||||||
@middleware
|
|
||||||
async def core_proxy(self, request: Request, handler: RequestHandler) -> Response:
|
|
||||||
"""Validate user from Core API proxy."""
|
|
||||||
if (
|
|
||||||
request[REQUEST_FROM] != self.sys_homeassistant
|
|
||||||
or self.sys_homeassistant.version == LANDINGPAGE
|
|
||||||
or version_is_new_enough(self.sys_homeassistant.version, _CORE_VERSION)
|
|
||||||
):
|
|
||||||
return await handler(request)
|
|
||||||
|
|
||||||
authorization_index: int | None = None
|
|
||||||
content_type_index: int | None = None
|
|
||||||
user_request: bool = False
|
|
||||||
admin_request: bool = False
|
|
||||||
ingress_request: bool = False
|
|
||||||
|
|
||||||
for idx, (key, value) in enumerate(request.raw_headers):
|
|
||||||
if key in (b"Authorization", b"X-Hassio-Key"):
|
|
||||||
authorization_index = idx
|
|
||||||
elif key == b"Content-Type":
|
|
||||||
content_type_index = idx
|
|
||||||
elif key == b"X-Hass-User-ID":
|
|
||||||
user_request = True
|
|
||||||
elif key == b"X-Hass-Is-Admin":
|
|
||||||
admin_request = value == b"1"
|
|
||||||
elif key == b"X-Ingress-Path":
|
|
||||||
ingress_request = True
|
|
||||||
|
|
||||||
if (user_request or admin_request) and not ingress_request:
|
|
||||||
return await handler(request)
|
|
||||||
|
|
||||||
is_proxy_request = (
|
|
||||||
authorization_index is not None
|
|
||||||
and content_type_index is not None
|
|
||||||
and content_type_index - authorization_index == 1
|
|
||||||
)
|
|
||||||
|
|
||||||
if (
|
|
||||||
not CORE_FRONTEND.match(request.path) and is_proxy_request
|
|
||||||
) or ingress_request:
|
|
||||||
raise HTTPBadRequest()
|
|
||||||
return await handler(request)
|
|
||||||
|
@@ -1,135 +0,0 @@
|
|||||||
"""Inits file for supervisor mounts REST API."""
|
|
||||||
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from aiohttp import web
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from ..const import ATTR_NAME, ATTR_STATE
|
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
from ..exceptions import APIError, APINotFound
|
|
||||||
from ..mounts.const import ATTR_DEFAULT_BACKUP_MOUNT, MountUsage
|
|
||||||
from ..mounts.mount import Mount
|
|
||||||
from ..mounts.validate import SCHEMA_MOUNT_CONFIG
|
|
||||||
from .const import ATTR_MOUNTS, ATTR_USER_PATH
|
|
||||||
from .utils import api_process, api_validate
|
|
||||||
|
|
||||||
SCHEMA_OPTIONS = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Optional(ATTR_DEFAULT_BACKUP_MOUNT): vol.Maybe(str),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class APIMounts(CoreSysAttributes):
|
|
||||||
"""Handle REST API for mounting options."""
|
|
||||||
|
|
||||||
def _extract_mount(self, request: web.Request) -> Mount:
|
|
||||||
"""Extract mount from request or raise."""
|
|
||||||
name = request.match_info.get("mount")
|
|
||||||
if name not in self.sys_mounts:
|
|
||||||
raise APINotFound(f"No mount exists with name {name}")
|
|
||||||
return self.sys_mounts.get(name)
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
|
||||||
"""Return MountManager info."""
|
|
||||||
return {
|
|
||||||
ATTR_DEFAULT_BACKUP_MOUNT: self.sys_mounts.default_backup_mount.name
|
|
||||||
if self.sys_mounts.default_backup_mount
|
|
||||||
else None,
|
|
||||||
ATTR_MOUNTS: [
|
|
||||||
mount.to_dict()
|
|
||||||
| {
|
|
||||||
ATTR_STATE: mount.state,
|
|
||||||
ATTR_USER_PATH: mount.container_where.as_posix()
|
|
||||||
if mount.container_where
|
|
||||||
else None,
|
|
||||||
}
|
|
||||||
for mount in self.sys_mounts.mounts
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def options(self, request: web.Request) -> None:
|
|
||||||
"""Set Mount Manager options."""
|
|
||||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
|
||||||
|
|
||||||
if ATTR_DEFAULT_BACKUP_MOUNT in body:
|
|
||||||
name: str | None = body[ATTR_DEFAULT_BACKUP_MOUNT]
|
|
||||||
if name is None:
|
|
||||||
self.sys_mounts.default_backup_mount = None
|
|
||||||
elif (mount := self.sys_mounts.get(name)).usage != MountUsage.BACKUP:
|
|
||||||
raise APIError(
|
|
||||||
f"Mount {name} is not used for backups, cannot use it as default backup mount"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
self.sys_mounts.default_backup_mount = mount
|
|
||||||
|
|
||||||
await self.sys_mounts.save_data()
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def create_mount(self, request: web.Request) -> None:
|
|
||||||
"""Create a new mount in supervisor."""
|
|
||||||
body = await api_validate(SCHEMA_MOUNT_CONFIG, request)
|
|
||||||
|
|
||||||
if body[ATTR_NAME] in self.sys_mounts:
|
|
||||||
raise APIError(f"A mount already exists with name {body[ATTR_NAME]}")
|
|
||||||
|
|
||||||
mount = Mount.from_dict(self.coresys, body)
|
|
||||||
await self.sys_mounts.create_mount(mount)
|
|
||||||
|
|
||||||
# If it's a backup mount, reload backups
|
|
||||||
if mount.usage == MountUsage.BACKUP:
|
|
||||||
self.sys_create_task(self.sys_backups.reload())
|
|
||||||
|
|
||||||
# If there's no default backup mount, set it to the new mount
|
|
||||||
if not self.sys_mounts.default_backup_mount:
|
|
||||||
self.sys_mounts.default_backup_mount = mount
|
|
||||||
|
|
||||||
await self.sys_mounts.save_data()
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def update_mount(self, request: web.Request) -> None:
|
|
||||||
"""Update an existing mount in supervisor."""
|
|
||||||
current = self._extract_mount(request)
|
|
||||||
name_schema = vol.Schema(
|
|
||||||
{vol.Optional(ATTR_NAME, default=current.name): current.name},
|
|
||||||
extra=vol.ALLOW_EXTRA,
|
|
||||||
)
|
|
||||||
body = await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request)
|
|
||||||
|
|
||||||
mount = Mount.from_dict(self.coresys, body)
|
|
||||||
await self.sys_mounts.create_mount(mount)
|
|
||||||
|
|
||||||
# If it's a backup mount, reload backups
|
|
||||||
if mount.usage == MountUsage.BACKUP:
|
|
||||||
self.sys_create_task(self.sys_backups.reload())
|
|
||||||
|
|
||||||
# If this mount was the default backup mount and isn't for backups any more, remove it
|
|
||||||
elif self.sys_mounts.default_backup_mount == mount:
|
|
||||||
self.sys_mounts.default_backup_mount = None
|
|
||||||
|
|
||||||
await self.sys_mounts.save_data()
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def delete_mount(self, request: web.Request) -> None:
|
|
||||||
"""Delete an existing mount in supervisor."""
|
|
||||||
current = self._extract_mount(request)
|
|
||||||
mount = await self.sys_mounts.remove_mount(current.name)
|
|
||||||
|
|
||||||
# If it was a backup mount, reload backups
|
|
||||||
if mount.usage == MountUsage.BACKUP:
|
|
||||||
self.sys_create_task(self.sys_backups.reload())
|
|
||||||
|
|
||||||
await self.sys_mounts.save_data()
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def reload_mount(self, request: web.Request) -> None:
|
|
||||||
"""Reload an existing mount in supervisor."""
|
|
||||||
mount = self._extract_mount(request)
|
|
||||||
await self.sys_mounts.reload_mount(mount.name)
|
|
||||||
|
|
||||||
# If it's a backup mount, reload backups
|
|
||||||
if mount.usage == MountUsage.BACKUP:
|
|
||||||
self.sys_create_task(self.sys_backups.reload())
|
|
@@ -1,9 +1,7 @@
|
|||||||
"""Init file for Supervisor Multicast RESTful API."""
|
"""Init file for Supervisor Multicast RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any, Awaitable
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
@@ -24,7 +22,8 @@ from ..const import (
|
|||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError
|
from ..exceptions import APIError
|
||||||
from ..validate import version_tag
|
from ..validate import version_tag
|
||||||
from .utils import api_process, api_validate
|
from .const import CONTENT_TYPE_BINARY
|
||||||
|
from .utils import api_process, api_process_raw, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -69,6 +68,11 @@ class APIMulticast(CoreSysAttributes):
|
|||||||
raise APIError(f"Version {version} is already in use")
|
raise APIError(f"Version {version} is already in use")
|
||||||
await asyncio.shield(self.sys_plugins.multicast.update(version))
|
await asyncio.shield(self.sys_plugins.multicast.update(version))
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
|
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||||
|
"""Return Multicast Docker logs."""
|
||||||
|
return self.sys_plugins.multicast.logs()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Restart Multicast plugin."""
|
"""Restart Multicast plugin."""
|
||||||
|
@@ -1,11 +1,10 @@
|
|||||||
"""REST API for network."""
|
"""REST API for network."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from ipaddress import ip_address, ip_interface
|
||||||
from ipaddress import IPv4Address, IPv4Interface, IPv6Address, IPv6Interface
|
from typing import Any, Awaitable
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
|
import attr
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
@@ -31,7 +30,6 @@ from ..const import (
|
|||||||
ATTR_PARENT,
|
ATTR_PARENT,
|
||||||
ATTR_PRIMARY,
|
ATTR_PRIMARY,
|
||||||
ATTR_PSK,
|
ATTR_PSK,
|
||||||
ATTR_READY,
|
|
||||||
ATTR_SIGNAL,
|
ATTR_SIGNAL,
|
||||||
ATTR_SSID,
|
ATTR_SSID,
|
||||||
ATTR_SUPERVISOR_INTERNET,
|
ATTR_SUPERVISOR_INTERNET,
|
||||||
@@ -42,34 +40,24 @@ from ..const import (
|
|||||||
DOCKER_NETWORK_MASK,
|
DOCKER_NETWORK_MASK,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError, APINotFound, HostNetworkNotFound
|
from ..exceptions import APIError, HostNetworkNotFound
|
||||||
from ..host.configuration import (
|
from ..host.const import AuthMethod, InterfaceType, WifiMode
|
||||||
|
from ..host.network import (
|
||||||
AccessPoint,
|
AccessPoint,
|
||||||
Interface,
|
Interface,
|
||||||
InterfaceMethod,
|
InterfaceMethod,
|
||||||
IpConfig,
|
IpConfig,
|
||||||
IpSetting,
|
|
||||||
VlanConfig,
|
VlanConfig,
|
||||||
WifiConfig,
|
WifiConfig,
|
||||||
)
|
)
|
||||||
from ..host.const import AuthMethod, InterfaceType, WifiMode
|
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_SCHEMA_IPV4_CONFIG = vol.Schema(
|
_SCHEMA_IP_CONFIG = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv4Interface)],
|
vol.Optional(ATTR_ADDRESS): [vol.Coerce(ip_interface)],
|
||||||
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
|
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
|
||||||
vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv4Address),
|
vol.Optional(ATTR_GATEWAY): vol.Coerce(ip_address),
|
||||||
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv4Address)],
|
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(ip_address)],
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
_SCHEMA_IPV6_CONFIG = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv6Interface)],
|
|
||||||
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
|
|
||||||
vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv6Address),
|
|
||||||
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv6Address)],
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -86,22 +74,21 @@ _SCHEMA_WIFI_CONFIG = vol.Schema(
|
|||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_UPDATE = vol.Schema(
|
SCHEMA_UPDATE = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_IPV4): _SCHEMA_IPV4_CONFIG,
|
vol.Optional(ATTR_IPV4): _SCHEMA_IP_CONFIG,
|
||||||
vol.Optional(ATTR_IPV6): _SCHEMA_IPV6_CONFIG,
|
vol.Optional(ATTR_IPV6): _SCHEMA_IP_CONFIG,
|
||||||
vol.Optional(ATTR_WIFI): _SCHEMA_WIFI_CONFIG,
|
vol.Optional(ATTR_WIFI): _SCHEMA_WIFI_CONFIG,
|
||||||
vol.Optional(ATTR_ENABLED): vol.Boolean(),
|
vol.Optional(ATTR_ENABLED): vol.Boolean(),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def ipconfig_struct(config: IpConfig, setting: IpSetting) -> dict[str, Any]:
|
def ipconfig_struct(config: IpConfig) -> dict[str, Any]:
|
||||||
"""Return a dict with information about ip configuration."""
|
"""Return a dict with information about ip configuration."""
|
||||||
return {
|
return {
|
||||||
ATTR_METHOD: setting.method,
|
ATTR_METHOD: config.method,
|
||||||
ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
|
ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
|
||||||
ATTR_NAMESERVERS: [str(address) for address in config.nameservers],
|
ATTR_NAMESERVERS: [str(address) for address in config.nameservers],
|
||||||
ATTR_GATEWAY: str(config.gateway) if config.gateway else None,
|
ATTR_GATEWAY: str(config.gateway) if config.gateway else None,
|
||||||
ATTR_READY: config.ready,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -131,9 +118,8 @@ def interface_struct(interface: Interface) -> dict[str, Any]:
|
|||||||
ATTR_ENABLED: interface.enabled,
|
ATTR_ENABLED: interface.enabled,
|
||||||
ATTR_CONNECTED: interface.connected,
|
ATTR_CONNECTED: interface.connected,
|
||||||
ATTR_PRIMARY: interface.primary,
|
ATTR_PRIMARY: interface.primary,
|
||||||
ATTR_MAC: interface.mac,
|
ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None,
|
||||||
ATTR_IPV4: ipconfig_struct(interface.ipv4, interface.ipv4setting),
|
ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None,
|
||||||
ATTR_IPV6: ipconfig_struct(interface.ipv6, interface.ipv6setting),
|
|
||||||
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
|
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
|
||||||
ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None,
|
ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None,
|
||||||
}
|
}
|
||||||
@@ -167,7 +153,7 @@ class APINetwork(CoreSysAttributes):
|
|||||||
except HostNetworkNotFound:
|
except HostNetworkNotFound:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
raise APINotFound(f"Interface {name} does not exist") from None
|
raise APIError(f"Interface {name} does not exist") from None
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||||
@@ -207,26 +193,22 @@ class APINetwork(CoreSysAttributes):
|
|||||||
# Apply config
|
# Apply config
|
||||||
for key, config in body.items():
|
for key, config in body.items():
|
||||||
if key == ATTR_IPV4:
|
if key == ATTR_IPV4:
|
||||||
interface.ipv4setting = IpSetting(
|
interface.ipv4 = attr.evolve(
|
||||||
config.get(ATTR_METHOD, InterfaceMethod.STATIC),
|
interface.ipv4 or IpConfig(InterfaceMethod.STATIC, [], None, []),
|
||||||
config.get(ATTR_ADDRESS, []),
|
**config,
|
||||||
config.get(ATTR_GATEWAY),
|
|
||||||
config.get(ATTR_NAMESERVERS, []),
|
|
||||||
)
|
)
|
||||||
elif key == ATTR_IPV6:
|
elif key == ATTR_IPV6:
|
||||||
interface.ipv6setting = IpSetting(
|
interface.ipv6 = attr.evolve(
|
||||||
config.get(ATTR_METHOD, InterfaceMethod.STATIC),
|
interface.ipv6 or IpConfig(InterfaceMethod.STATIC, [], None, []),
|
||||||
config.get(ATTR_ADDRESS, []),
|
**config,
|
||||||
config.get(ATTR_GATEWAY),
|
|
||||||
config.get(ATTR_NAMESERVERS, []),
|
|
||||||
)
|
)
|
||||||
elif key == ATTR_WIFI:
|
elif key == ATTR_WIFI:
|
||||||
interface.wifi = WifiConfig(
|
interface.wifi = attr.evolve(
|
||||||
config.get(ATTR_MODE, WifiMode.INFRASTRUCTURE),
|
interface.wifi
|
||||||
config.get(ATTR_SSID, ""),
|
or WifiConfig(
|
||||||
config.get(ATTR_AUTH, AuthMethod.OPEN),
|
WifiMode.INFRASTRUCTURE, "", AuthMethod.OPEN, None, None
|
||||||
config.get(ATTR_PSK, None),
|
),
|
||||||
None,
|
**config,
|
||||||
)
|
)
|
||||||
elif key == ATTR_ENABLED:
|
elif key == ATTR_ENABLED:
|
||||||
interface.enabled = config
|
interface.enabled = config
|
||||||
@@ -236,9 +218,7 @@ class APINetwork(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
def reload(self, request: web.Request) -> Awaitable[None]:
|
def reload(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Reload network data."""
|
"""Reload network data."""
|
||||||
return asyncio.shield(
|
return asyncio.shield(self.sys_host.network.update())
|
||||||
self.sys_host.network.update(force_connectivity_check=True)
|
|
||||||
)
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def scan_accesspoints(self, request: web.Request) -> dict[str, Any]:
|
async def scan_accesspoints(self, request: web.Request) -> dict[str, Any]:
|
||||||
@@ -268,18 +248,18 @@ class APINetwork(CoreSysAttributes):
|
|||||||
|
|
||||||
vlan_config = VlanConfig(vlan, interface.name)
|
vlan_config = VlanConfig(vlan, interface.name)
|
||||||
|
|
||||||
ipv4_setting = None
|
ipv4_config = None
|
||||||
if ATTR_IPV4 in body:
|
if ATTR_IPV4 in body:
|
||||||
ipv4_setting = IpSetting(
|
ipv4_config = IpConfig(
|
||||||
body[ATTR_IPV4].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
body[ATTR_IPV4].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
||||||
body[ATTR_IPV4].get(ATTR_ADDRESS, []),
|
body[ATTR_IPV4].get(ATTR_ADDRESS, []),
|
||||||
body[ATTR_IPV4].get(ATTR_GATEWAY, None),
|
body[ATTR_IPV4].get(ATTR_GATEWAY, None),
|
||||||
body[ATTR_IPV4].get(ATTR_NAMESERVERS, []),
|
body[ATTR_IPV4].get(ATTR_NAMESERVERS, []),
|
||||||
)
|
)
|
||||||
|
|
||||||
ipv6_setting = None
|
ipv6_config = None
|
||||||
if ATTR_IPV6 in body:
|
if ATTR_IPV6 in body:
|
||||||
ipv6_setting = IpSetting(
|
ipv6_config = IpConfig(
|
||||||
body[ATTR_IPV6].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
body[ATTR_IPV6].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
||||||
body[ATTR_IPV6].get(ATTR_ADDRESS, []),
|
body[ATTR_IPV6].get(ATTR_ADDRESS, []),
|
||||||
body[ATTR_IPV6].get(ATTR_GATEWAY, None),
|
body[ATTR_IPV6].get(ATTR_GATEWAY, None),
|
||||||
@@ -287,17 +267,13 @@ class APINetwork(CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
|
|
||||||
vlan_interface = Interface(
|
vlan_interface = Interface(
|
||||||
"",
|
|
||||||
"",
|
|
||||||
"",
|
"",
|
||||||
True,
|
True,
|
||||||
True,
|
True,
|
||||||
False,
|
False,
|
||||||
InterfaceType.VLAN,
|
InterfaceType.VLAN,
|
||||||
None,
|
ipv4_config,
|
||||||
ipv4_setting,
|
ipv6_config,
|
||||||
None,
|
|
||||||
ipv6_setting,
|
|
||||||
None,
|
None,
|
||||||
vlan_config,
|
vlan_config,
|
||||||
)
|
)
|
||||||
|
@@ -1,5 +1,4 @@
|
|||||||
"""Init file for Supervisor Observer RESTful API."""
|
"""Init file for Supervisor Observer RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
@@ -1,71 +1,29 @@
|
|||||||
"""Init file for Supervisor HassOS RESTful API."""
|
"""Init file for Supervisor HassOS RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from pathlib import Path
|
||||||
|
from typing import Any, Awaitable
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ACTIVITY_LED,
|
|
||||||
ATTR_BOARD,
|
ATTR_BOARD,
|
||||||
ATTR_BOOT,
|
ATTR_BOOT,
|
||||||
ATTR_DEVICES,
|
ATTR_DEVICES,
|
||||||
ATTR_DISK_LED,
|
|
||||||
ATTR_HEARTBEAT_LED,
|
|
||||||
ATTR_ID,
|
|
||||||
ATTR_NAME,
|
|
||||||
ATTR_POWER_LED,
|
|
||||||
ATTR_SERIAL,
|
|
||||||
ATTR_SIZE,
|
|
||||||
ATTR_STATE,
|
|
||||||
ATTR_UPDATE_AVAILABLE,
|
ATTR_UPDATE_AVAILABLE,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
ATTR_VERSION_LATEST,
|
ATTR_VERSION_LATEST,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import BoardInvalidError
|
|
||||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
|
||||||
from ..validate import version_tag
|
from ..validate import version_tag
|
||||||
from .const import (
|
from .const import ATTR_DATA_DISK, ATTR_DEVICE
|
||||||
ATTR_BOOT_SLOT,
|
|
||||||
ATTR_BOOT_SLOTS,
|
|
||||||
ATTR_DATA_DISK,
|
|
||||||
ATTR_DEV_PATH,
|
|
||||||
ATTR_DEVICE,
|
|
||||||
ATTR_DISKS,
|
|
||||||
ATTR_MODEL,
|
|
||||||
ATTR_STATUS,
|
|
||||||
ATTR_SYSTEM_HEALTH_LED,
|
|
||||||
ATTR_VENDOR,
|
|
||||||
BootSlot,
|
|
||||||
)
|
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
||||||
SCHEMA_SET_BOOT_SLOT = vol.Schema({vol.Required(ATTR_BOOT_SLOT): vol.Coerce(BootSlot)})
|
SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): vol.All(str, vol.Coerce(Path))})
|
||||||
SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str})
|
|
||||||
|
|
||||||
SCHEMA_YELLOW_OPTIONS = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Optional(ATTR_DISK_LED): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_HEARTBEAT_LED): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_POWER_LED): vol.Boolean(),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
SCHEMA_GREEN_OPTIONS = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Optional(ATTR_ACTIVITY_LED): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_POWER_LED): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_SYSTEM_HEALTH_LED): vol.Boolean(),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
# pylint: enable=no-value-for-parameter
|
|
||||||
|
|
||||||
|
|
||||||
class APIOS(CoreSysAttributes):
|
class APIOS(CoreSysAttributes):
|
||||||
@@ -80,16 +38,7 @@ class APIOS(CoreSysAttributes):
|
|||||||
ATTR_UPDATE_AVAILABLE: self.sys_os.need_update,
|
ATTR_UPDATE_AVAILABLE: self.sys_os.need_update,
|
||||||
ATTR_BOARD: self.sys_os.board,
|
ATTR_BOARD: self.sys_os.board,
|
||||||
ATTR_BOOT: self.sys_dbus.rauc.boot_slot,
|
ATTR_BOOT: self.sys_dbus.rauc.boot_slot,
|
||||||
ATTR_DATA_DISK: self.sys_os.datadisk.disk_used_id,
|
ATTR_DATA_DISK: self.sys_os.datadisk.disk_used,
|
||||||
ATTR_BOOT_SLOTS: {
|
|
||||||
slot.bootname: {
|
|
||||||
ATTR_STATE: slot.state,
|
|
||||||
ATTR_STATUS: slot.boot_status,
|
|
||||||
ATTR_VERSION: slot.bundle_version,
|
|
||||||
}
|
|
||||||
for slot in self.sys_os.slots
|
|
||||||
if slot.bootname
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -112,103 +61,9 @@ class APIOS(CoreSysAttributes):
|
|||||||
|
|
||||||
await asyncio.shield(self.sys_os.datadisk.migrate_disk(body[ATTR_DEVICE]))
|
await asyncio.shield(self.sys_os.datadisk.migrate_disk(body[ATTR_DEVICE]))
|
||||||
|
|
||||||
@api_process
|
|
||||||
def wipe_data(self, request: web.Request) -> Awaitable[None]:
|
|
||||||
"""Trigger data disk wipe on Host."""
|
|
||||||
return asyncio.shield(self.sys_os.datadisk.wipe_disk())
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def set_boot_slot(self, request: web.Request) -> None:
|
|
||||||
"""Change the active boot slot and reboot into it."""
|
|
||||||
body = await api_validate(SCHEMA_SET_BOOT_SLOT, request)
|
|
||||||
await asyncio.shield(self.sys_os.set_boot_slot(body[ATTR_BOOT_SLOT]))
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def list_data(self, request: web.Request) -> dict[str, Any]:
|
async def list_data(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return possible data targets."""
|
"""Return possible data targets."""
|
||||||
return {
|
return {
|
||||||
ATTR_DEVICES: [disk.id for disk in self.sys_os.datadisk.available_disks],
|
ATTR_DEVICES: self.sys_os.datadisk.available_disks,
|
||||||
ATTR_DISKS: [
|
|
||||||
{
|
|
||||||
ATTR_NAME: disk.name,
|
|
||||||
ATTR_VENDOR: disk.vendor,
|
|
||||||
ATTR_MODEL: disk.model,
|
|
||||||
ATTR_SERIAL: disk.serial,
|
|
||||||
ATTR_SIZE: disk.size,
|
|
||||||
ATTR_ID: disk.id,
|
|
||||||
ATTR_DEV_PATH: disk.device_path.as_posix(),
|
|
||||||
}
|
|
||||||
for disk in self.sys_os.datadisk.available_disks
|
|
||||||
],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def boards_green_info(self, request: web.Request) -> dict[str, Any]:
|
|
||||||
"""Get green board settings."""
|
|
||||||
return {
|
|
||||||
ATTR_ACTIVITY_LED: self.sys_dbus.agent.board.green.activity_led,
|
|
||||||
ATTR_POWER_LED: self.sys_dbus.agent.board.green.power_led,
|
|
||||||
ATTR_SYSTEM_HEALTH_LED: self.sys_dbus.agent.board.green.user_led,
|
|
||||||
}
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def boards_green_options(self, request: web.Request) -> None:
|
|
||||||
"""Update green board settings."""
|
|
||||||
body = await api_validate(SCHEMA_GREEN_OPTIONS, request)
|
|
||||||
|
|
||||||
if ATTR_ACTIVITY_LED in body:
|
|
||||||
await self.sys_dbus.agent.board.green.set_activity_led(
|
|
||||||
body[ATTR_ACTIVITY_LED]
|
|
||||||
)
|
|
||||||
|
|
||||||
if ATTR_POWER_LED in body:
|
|
||||||
await self.sys_dbus.agent.board.green.set_power_led(body[ATTR_POWER_LED])
|
|
||||||
|
|
||||||
if ATTR_SYSTEM_HEALTH_LED in body:
|
|
||||||
await self.sys_dbus.agent.board.green.set_user_led(
|
|
||||||
body[ATTR_SYSTEM_HEALTH_LED]
|
|
||||||
)
|
|
||||||
|
|
||||||
await self.sys_dbus.agent.board.green.save_data()
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def boards_yellow_info(self, request: web.Request) -> dict[str, Any]:
|
|
||||||
"""Get yellow board settings."""
|
|
||||||
return {
|
|
||||||
ATTR_DISK_LED: self.sys_dbus.agent.board.yellow.disk_led,
|
|
||||||
ATTR_HEARTBEAT_LED: self.sys_dbus.agent.board.yellow.heartbeat_led,
|
|
||||||
ATTR_POWER_LED: self.sys_dbus.agent.board.yellow.power_led,
|
|
||||||
}
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def boards_yellow_options(self, request: web.Request) -> None:
|
|
||||||
"""Update yellow board settings."""
|
|
||||||
body = await api_validate(SCHEMA_YELLOW_OPTIONS, request)
|
|
||||||
|
|
||||||
if ATTR_DISK_LED in body:
|
|
||||||
await self.sys_dbus.agent.board.yellow.set_disk_led(body[ATTR_DISK_LED])
|
|
||||||
|
|
||||||
if ATTR_HEARTBEAT_LED in body:
|
|
||||||
await self.sys_dbus.agent.board.yellow.set_heartbeat_led(
|
|
||||||
body[ATTR_HEARTBEAT_LED]
|
|
||||||
)
|
|
||||||
|
|
||||||
if ATTR_POWER_LED in body:
|
|
||||||
await self.sys_dbus.agent.board.yellow.set_power_led(body[ATTR_POWER_LED])
|
|
||||||
|
|
||||||
await self.sys_dbus.agent.board.yellow.save_data()
|
|
||||||
self.sys_resolution.create_issue(
|
|
||||||
IssueType.REBOOT_REQUIRED,
|
|
||||||
ContextType.SYSTEM,
|
|
||||||
suggestions=[SuggestionType.EXECUTE_REBOOT],
|
|
||||||
)
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def boards_other_info(self, request: web.Request) -> dict[str, Any]:
|
|
||||||
"""Empty success return if board is in use, error otherwise."""
|
|
||||||
if request.match_info["board"] != self.sys_os.board:
|
|
||||||
raise BoardInvalidError(
|
|
||||||
f"{request.match_info['board']} board is not in use", _LOGGER.error
|
|
||||||
)
|
|
||||||
|
|
||||||
return {}
|
|
||||||
|
@@ -1 +1,16 @@
|
|||||||
!function(){function d(d){var e=document.createElement("script");e.src=d,document.body.appendChild(e)}if(/Edge?\/(12[2-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Firefox\/(12[4-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Chrom(ium|e)\/(109|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|(Maci|X1{2}).+ Version\/(17\.([4-9]|\d{2,})|(1[89]|[2-9]\d|\d{3,})\.\d+)([,.]\d+|)( \(\w+\)|)( Mobile\/\w+|) Safari\/|Chrome.+OPR\/(10[89]|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+\.\d+|(CPU[ +]OS|iPhone[ +]OS|CPU[ +]iPhone|CPU IPhone OS|CPU iPad OS)[ +]+(15[._]([6-9]|\d{2,})|(1[6-9]|[2-9]\d|\d{3,})[._]\d+)([._]\d+|)|Android:?[ /-](12[3-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})(\.\d+|)(\.\d+|)|Mobile Safari.+OPR\/([89]\d|\d{3,})\.\d+\.\d+|Android.+Firefox\/(12[4-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Android.+Chrom(ium|e)\/(12[3-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|SamsungBrowser\/(2[4-9]|[3-9]\d|\d{3,})\.\d+|Home As{2}istant\/[\d.]+ \(.+; macOS (1[2-9]|[2-9]\d|\d{3,})\.\d+(\.\d+)?\)/.test(navigator.userAgent))try{new Function("import('/api/hassio/app/frontend_latest/entrypoint.9ac99222ee42fbb3.js')")()}catch(e){d("/api/hassio/app/frontend_es5/entrypoint.85ccafe1fda9d9a5.js")}else d("/api/hassio/app/frontend_es5/entrypoint.85ccafe1fda9d9a5.js")}()
|
|
||||||
|
function loadES5() {
|
||||||
|
var el = document.createElement('script');
|
||||||
|
el.src = '/api/hassio/app/frontend_es5/entrypoint.75b60951.js';
|
||||||
|
document.body.appendChild(el);
|
||||||
|
}
|
||||||
|
if (/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent)) {
|
||||||
|
loadES5();
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
new Function("import('/api/hassio/app/frontend_latest/entrypoint.f358ba39.js')")();
|
||||||
|
} catch (err) {
|
||||||
|
loadES5();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
Binary file not shown.
Binary file not shown.
1
supervisor/api/panel/frontend_es5/0c8a1a3d.js
Normal file
1
supervisor/api/panel/frontend_es5/0c8a1a3d.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/0c8a1a3d.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/0c8a1a3d.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_es5/0ce034b1.js
Normal file
1
supervisor/api/panel/frontend_es5/0ce034b1.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/0ce034b1.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/0ce034b1.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_es5/0d47fc56.js
Normal file
1
supervisor/api/panel/frontend_es5/0d47fc56.js
Normal file
@@ -0,0 +1 @@
|
|||||||
|
!function(){"use strict";var t,n,e={14971:function(t,n,e){var r,i,o=e(93217),u=e(69330),a=(e(58556),e(62173)),c=function(t,n,e){if("input"===t){if("type"===n&&"checkbox"===e||"checked"===n||"disabled"===n)return;return""}},f={renderMarkdown:function(t,n){var e,o=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};return r||(r=Object.assign({},(0,a.getDefaultWhiteList)(),{input:["type","disabled","checked"],"ha-icon":["icon"],"ha-svg-icon":["path"],"ha-alert":["alert-type","title"]})),o.allowSvg?(i||(i=Object.assign({},r,{svg:["xmlns","height","width"],path:["transform","stroke","d"],img:["src"]})),e=i):e=r,(0,a.filterXSS)((0,u.TU)(t,n),{whiteList:e,onTagAttr:c})}};(0,o.Jj)(f)}},r={};function i(t){var n=r[t];if(void 0!==n)return n.exports;var o=r[t]={exports:{}};return e[t](o,o.exports,i),o.exports}i.m=e,i.x=function(){var t=i.O(void 0,[191,752],(function(){return i(14971)}));return t=i.O(t)},t=[],i.O=function(n,e,r,o){if(!e){var u=1/0;for(s=0;s<t.length;s++){e=t[s][0],r=t[s][1],o=t[s][2];for(var a=!0,c=0;c<e.length;c++)(!1&o||u>=o)&&Object.keys(i.O).every((function(t){return i.O[t](e[c])}))?e.splice(c--,1):(a=!1,o<u&&(u=o));if(a){t.splice(s--,1);var f=r();void 0!==f&&(n=f)}}return n}o=o||0;for(var s=t.length;s>0&&t[s-1][2]>o;s--)t[s]=t[s-1];t[s]=[e,r,o]},i.n=function(t){var n=t&&t.__esModule?function(){return t.default}:function(){return t};return i.d(n,{a:n}),n},i.d=function(t,n){for(var e in n)i.o(n,e)&&!i.o(t,e)&&Object.defineProperty(t,e,{enumerable:!0,get:n[e]})},i.f={},i.e=function(t){return Promise.all(Object.keys(i.f).reduce((function(n,e){return i.f[e](t,n),n}),[]))},i.u=function(t){return{191:"2dbdaab4",752:"829db8ac"}[t]+".js"},i.o=function(t,n){return Object.prototype.hasOwnProperty.call(t,n)},i.p="/api/hassio/app/frontend_es5/",function(){var t={971:1};i.f.i=function(n,e){t[n]||importScripts(i.p+i.u(n))};var n=self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[],e=n.push.bind(n);n.push=function(n){var r=n[0],o=n[1],u=n[2];for(var a in o)i.o(o,a)&&(i.m[a]=o[a]);for(u&&u(i);r.length;)t[r.pop()]=1;e(n)}}(),n=i.x,i.x=function(){return Promise.all([i.e(191),i.e(752)]).then(n)};i.x()}();
|
BIN
supervisor/api/panel/frontend_es5/0d47fc56.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/0d47fc56.js.gz
Normal file
Binary file not shown.
2
supervisor/api/panel/frontend_es5/0ef95294.js
Normal file
2
supervisor/api/panel/frontend_es5/0ef95294.js
Normal file
File diff suppressed because one or more lines are too long
14
supervisor/api/panel/frontend_es5/0ef95294.js.LICENSE.txt
Normal file
14
supervisor/api/panel/frontend_es5/0ef95294.js.LICENSE.txt
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
/*! *****************************************************************************
|
||||||
|
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License"); you may not use
|
||||||
|
this file except in compliance with the License. You may obtain a copy of the
|
||||||
|
License at http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
|
||||||
|
WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
|
||||||
|
MERCHANTABLITY OR NON-INFRINGEMENT.
|
||||||
|
|
||||||
|
See the Apache Version 2.0 License for specific language governing permissions
|
||||||
|
and limitations under the License.
|
||||||
|
***************************************************************************** */
|
BIN
supervisor/api/panel/frontend_es5/0ef95294.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/0ef95294.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_es5/0f16c741.js
Normal file
1
supervisor/api/panel/frontend_es5/0f16c741.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/0f16c741.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/0f16c741.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"1081.e647cbe586ff9dd0.js","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20250221.0/src/components/ha-button-toggle-group.ts","https://raw.githubusercontent.com/home-assistant/frontend/20250221.0/src/components/ha-selector/ha-selector-button-toggle.ts"],"names":["_decorate","customElement","_initialize","_LitElement","F","constructor","args","d","kind","decorators","property","attribute","key","value","type","Boolean","queryAll","html","_t","_","this","buttons","map","button","iconPath","_t2","label","active","_handleClick","_t3","styleMap","width","fullWidth","length","dense","_this$_buttons","_buttons","forEach","async","updateComplete","shadowRoot","querySelector","style","margin","ev","currentTarget","fireEvent","static","css","_t4","LitElement","HaButtonToggleSelector","_this$selector$button","_this$selector$button2","_this$selector$button3","options","selector","button_toggle","option","translationKey","translation_key","localizeValue","localizedLabel","sort","a","b","caseInsensitiveStringCompare","hass","locale","language","toggleButtons","item","_valueChanged","_ev$detail","_this$value","stopPropagation","detail","target","disabled","undefined"],"mappings":"sXAWgCA,EAAAA,EAAAA,GAAA,EAD/BC,EAAAA,EAAAA,IAAc,4BAAyB,SAAAC,EAAAC,GAkIvC,OAAAC,EAlID,cACgCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAC7BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,UAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEC,UAAW,aAAcG,KAAMC,WAAUH,IAAA,YAAAC,KAAAA,GAAA,OAClC,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEvBC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,QAAAC,KAAAA,GAAA,OAAgB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEhDO,EAAAA,EAAAA,IAAS,eAAaJ,IAAA,WAAAC,WAAA,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEvB,WACE,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,uBAELC,KAAKC,QAAQC,KAAKC,GAClBA,EAAOC,UACHP,EAAAA,EAAAA,IAAIQ,IAAAA,EAAAN,CAAA,2GACOI,EAAOG,MACRH,EAAOC,SACND,EAAOV,MACNO,KAAKO,SAAWJ,EAAOV,MACxBO,KAAKQ,eAEhBX,EAAAA,EAAAA,IAAIY,IAAAA,EAAAV,CAAA,iHACMW,EAAAA,EAAAA,GAAS,CACfC,MAAOX,KAAKY,UACL,IAAMZ,KAAKC,QAAQY,OAAtB,IACA,YAGGb,KAAKc,MACLX,EAAOV,MACNO,KAAKO,SAAWJ,EAAOV,MACxBO,KAAKQ,aACXL,EAAOG,SAKxB,GAAC,CAAAlB,KAAA,SAAAI,IAAA,UAAAC,MAED,WAAoB,IAAAsB,EAEL,QAAbA,EAAAf,KAAKgB,gBAAQ,IAAAD,GAAbA,EAAeE,SAAQC,gBACff,EAAOgB,eAEXhB,EAAOiB,WAAYC,cAAc,UACjCC,MAAMC,OAAS,GAAG,GAExB,GAAC,CAAAnC,KAAA,SAAAI,IAAA,eAAAC,MAED,SAAqB+B,GACnBxB,KAAKO,OAASiB,EAAGC,cAAchC,OAC/BiC,EAAAA,EAAAA,GAAU1B,KAAM,gBAAiB,CAAEP,MAAOO,KAAKO,QACjD,GAAC,CAAAnB,KAAA,QAAAuC,QAAA,EAAAnC,IAAA,SAAAC,KAAAA,GAAA,OAEemC,EAAAA,EAAAA,IAAGC,IAAAA,EAAA9B,CAAA,u0CAzDoB+B,EAAAA,I,MCD5BC,GAAsBnD,EAAAA,EAAAA,GAAA,EADlCC,EAAAA,EAAAA,IAAc,+BAA4B,SAAAC,EAAAC,GA4F1C,OAAAC,EA5FD,cACmCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAChCC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,gBAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAG9BC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAuC,EAAAC,EAAAC,EACjB,MAAMC,GACuB,QAA3BH,EAAAhC,KAAKoC,SAASC,qBAAa,IAAAL,GAAS,QAATA,EAA3BA,EAA6BG,eAAO,IAAAH,OAAA,EAApCA,EAAsC9B,KAAKoC,GACvB,iBAAXA,EACFA,EACA,CAAE7C,MAAO6C,EAAQhC,MAAOgC,OAC1B,GAEDC,EAA4C,QAA9BN,EAAGjC,KAAKoC,SAASC,qBAAa,IAAAJ,OAAA,EAA3BA,EAA6BO,gBAEhDxC,KAAKyC,eAAiBF,GACxBJ,EAAQlB,SAASqB,IACf,MAAMI,EAAiB1C,KAAKyC,cAC1B,GAAGF,aAA0BD,EAAO7C,SAElCiD,IACFJ,EAAOhC,MAAQoC,EACjB,IAI2B,QAA/BR,EAAIlC,KAAKoC,SAASC,qBAAa,IAAAH,GAA3BA,EAA6BS,MAC/BR,EAAQQ,MAAK,CAACC,EAAGC,KACfC,EAAAA,EAAAA,GACEF,EAAEtC,MACFuC,EAAEvC,MACFN,KAAK+C,KAAKC,OAAOC,YAKvB,MAAMC,EAAgCf,EAAQjC,KAAKiD,IAAkB,CACnE7C,MAAO6C,EAAK7C,MACZb,MAAO0D,EAAK1D,UAGd,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,iHACPC,KAAKM,MAEM4C,EACDlD,KAAKP,MACEO,KAAKoD,cAG5B,GAAC,CAAAhE,KAAA,SAAAI,IAAA,gBAAAC,MAED,SAAsB+B,GAAI,IAAA6B,EAAAC,EACxB9B,EAAG+B,kBAEH,MAAM9D,GAAiB,QAAT4D,EAAA7B,EAAGgC,cAAM,IAAAH,OAAA,EAATA,EAAW5D,QAAS+B,EAAGiC,OAAOhE,MACxCO,KAAK0D,eAAsBC,IAAVlE,GAAuBA,KAAqB,QAAhB6D,EAAMtD,KAAKP,aAAK,IAAA6D,EAAAA,EAAI,MAGrE5B,EAAAA,EAAAA,GAAU1B,KAAM,gBAAiB,CAC/BP,MAAOA,GAEX,GAAC,CAAAL,KAAA,QAAAuC,QAAA,EAAAnC,IAAA,SAAAC,KAAAA,GAAA,OAEemC,EAAAA,EAAAA,IAAGvB,IAAAA,EAAAN,CAAA,wLA5EuB+B,EAAAA,G"}
|
|
File diff suppressed because one or more lines are too long
@@ -1,40 +0,0 @@
|
|||||||
/**
|
|
||||||
* @license
|
|
||||||
* Copyright 2017 Google LLC
|
|
||||||
* SPDX-License-Identifier: BSD-3-Clause
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @license
|
|
||||||
* Copyright 2018 Google Inc.
|
|
||||||
*
|
|
||||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
* of this software and associated documentation files (the "Software"), to deal
|
|
||||||
* in the Software without restriction, including without limitation the rights
|
|
||||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
* copies of the Software, and to permit persons to whom the Software is
|
|
||||||
* furnished to do so, subject to the following conditions:
|
|
||||||
*
|
|
||||||
* The above copyright notice and this permission notice shall be included in
|
|
||||||
* all copies or substantial portions of the Software.
|
|
||||||
*
|
|
||||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
||||||
* THE SOFTWARE.
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @license
|
|
||||||
* Copyright 2018 Google LLC
|
|
||||||
* SPDX-License-Identifier: Apache-2.0
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @license
|
|
||||||
* Copyright 2021 Google LLC
|
|
||||||
* SPDX-LIcense-Identifier: Apache-2.0
|
|
||||||
*/
|
|
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
1
supervisor/api/panel/frontend_es5/11665104.js
Normal file
1
supervisor/api/panel/frontend_es5/11665104.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/11665104.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/11665104.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
|||||||
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["12"],{5739:function(e,a,t){t.a(e,(async function(e,i){try{t.r(a),t.d(a,{HaNavigationSelector:()=>c});var d=t(73577),r=(t(71695),t(47021),t(57243)),n=t(50778),l=t(36522),o=t(63297),s=e([o]);o=(s.then?(await s)():s)[0];let u,h=e=>e,c=(0,d.Z)([(0,n.Mo)("ha-selector-navigation")],(function(e,a){return{F:class extends a{constructor(...a){super(...a),e(this)}},d:[{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"value",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,n.Cb)({type:Boolean,reflect:!0})],key:"disabled",value(){return!1}},{kind:"field",decorators:[(0,n.Cb)({type:Boolean})],key:"required",value(){return!0}},{kind:"method",key:"render",value:function(){return(0,r.dy)(u||(u=h` <ha-navigation-picker .hass="${0}" .label="${0}" .value="${0}" .required="${0}" .disabled="${0}" .helper="${0}" @value-changed="${0}"></ha-navigation-picker> `),this.hass,this.label,this.value,this.required,this.disabled,this.helper,this._valueChanged)}},{kind:"method",key:"_valueChanged",value:function(e){(0,l.B)(this,"value-changed",{value:e.detail.value})}}]}}),r.oi);i()}catch(u){i(u)}}))}}]);
|
|
||||||
//# sourceMappingURL=12.ffa1bdc0a98802fa.js.map
|
|
Binary file not shown.
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"12.ffa1bdc0a98802fa.js","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20250221.0/src/components/ha-selector/ha-selector-navigation.ts"],"names":["HaNavigationSelector","_decorate","customElement","_initialize","_LitElement","F","constructor","args","d","kind","decorators","property","attribute","key","value","type","Boolean","reflect","html","_t","_","this","hass","label","required","disabled","helper","_valueChanged","ev","fireEvent","detail","LitElement"],"mappings":"mVAQaA,GAAoBC,EAAAA,EAAAA,GAAA,EADhCC,EAAAA,EAAAA,IAAc,4BAAyB,SAAAC,EAAAC,GAiCvC,OAAAC,EAjCD,cACiCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAC9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,QAASC,SAAS,KAAOJ,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAElEC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WACE,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,mKAECC,KAAKC,KACJD,KAAKE,MACLF,KAAKP,MACFO,KAAKG,SACLH,KAAKI,SACPJ,KAAKK,OACEL,KAAKM,cAG5B,GAAC,CAAAlB,KAAA,SAAAI,IAAA,gBAAAC,MAED,SAAsBc,IACpBC,EAAAA,EAAAA,GAAUR,KAAM,gBAAiB,CAAEP,MAAOc,EAAGE,OAAOhB,OACtD,IAAC,GA/BuCiB,EAAAA,I"}
|
|
@@ -1,2 +0,0 @@
|
|||||||
(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["1236"],{4121:function(){Intl.PluralRules&&"function"==typeof Intl.PluralRules.__addLocaleData&&Intl.PluralRules.__addLocaleData({data:{categories:{cardinal:["one","other"],ordinal:["one","two","few","other"]},fn:function(e,n){var t=String(e).split("."),a=!t[1],l=Number(t[0])==e,o=l&&t[0].slice(-1),r=l&&t[0].slice(-2);return n?1==o&&11!=r?"one":2==o&&12!=r?"two":3==o&&13!=r?"few":"other":1==e&&a?"one":"other"}},locale:"en"})}}]);
|
|
||||||
//# sourceMappingURL=1236.64ca65d0ea4d76d4.js.map
|
|
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user