mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-09-08 04:36:21 +00:00
Compare commits
171 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
a00e81c03f | ||
![]() |
776e6bb418 | ||
![]() |
b31fca656e | ||
![]() |
fa783a0d2c | ||
![]() |
96c0fbaf10 | ||
![]() |
24f7801ddc | ||
![]() |
8e83e007e9 | ||
![]() |
d0db466e67 | ||
![]() |
3010bd4eb6 | ||
![]() |
069bed8815 | ||
![]() |
d2088ae5f8 | ||
![]() |
0ca5a241bb | ||
![]() |
dff32a8e84 | ||
![]() |
4a20344652 | ||
![]() |
98b969ef06 | ||
![]() |
c8cb8aecf7 | ||
![]() |
73e8875018 | ||
![]() |
02aed9c084 | ||
![]() |
89148f8fff | ||
![]() |
6bde527f5c | ||
![]() |
d62aabc01b | ||
![]() |
82299a3799 | ||
![]() |
c02f30dd7e | ||
![]() |
e91983adb4 | ||
![]() |
ff88359429 | ||
![]() |
5a60d5cbe8 | ||
![]() |
2b41ffe019 | ||
![]() |
1c23e26f93 | ||
![]() |
3d555f951d | ||
![]() |
6d39b4d7cd | ||
![]() |
4fe5d09f01 | ||
![]() |
e52af3bfb4 | ||
![]() |
0467b33cd5 | ||
![]() |
14167f6e13 | ||
![]() |
7a1aba6f81 | ||
![]() |
920f7f2ece | ||
![]() |
06fadbd70f | ||
![]() |
d4f486864f | ||
![]() |
d3a21303d9 | ||
![]() |
e1cbfdd84b | ||
![]() |
87170a4497 | ||
![]() |
ae6f8bd345 | ||
![]() |
b9496e0972 | ||
![]() |
c36a6dcd65 | ||
![]() |
19ca836b78 | ||
![]() |
8a6ea7ab50 | ||
![]() |
6721b8f265 | ||
![]() |
9393521f98 | ||
![]() |
398b24e0ab | ||
![]() |
374bcf8073 | ||
![]() |
7e3859e2f5 | ||
![]() |
490ec0d462 | ||
![]() |
15bf1ee50e | ||
![]() |
6376d92a0d | ||
![]() |
10230b0b4c | ||
![]() |
2495cda5ec | ||
![]() |
ae8ddca040 | ||
![]() |
0212d027fb | ||
![]() |
a3096153ab | ||
![]() |
7434ca9e99 | ||
![]() |
4ac7f7dcf0 | ||
![]() |
e9f5b13aa5 | ||
![]() |
1fbb6d46ea | ||
![]() |
8dbfea75b1 | ||
![]() |
3b3840c087 | ||
![]() |
a21353909d | ||
![]() |
5497ed885a | ||
![]() |
39baea759a | ||
![]() |
80ddb1d262 | ||
![]() |
e24987a610 | ||
![]() |
9e5c276e3b | ||
![]() |
c33d31996d | ||
![]() |
aa1f08fe8a | ||
![]() |
d78689554a | ||
![]() |
5bee1d851c | ||
![]() |
ddb8eef4d1 | ||
![]() |
da513e7347 | ||
![]() |
4279d7fd16 | ||
![]() |
934eab2e8c | ||
![]() |
2a31edc768 | ||
![]() |
fcdd66dc6e | ||
![]() |
a65d3222b9 | ||
![]() |
36179596a0 | ||
![]() |
c083c850c1 | ||
![]() |
ff903d7b5a | ||
![]() |
dd603e1ec2 | ||
![]() |
a2f06b1553 | ||
![]() |
8115d2b3d3 | ||
![]() |
4f97bb9e0b | ||
![]() |
84d24a2c4d | ||
![]() |
b709061656 | ||
![]() |
cd9034b3f1 | ||
![]() |
25d324c73a | ||
![]() |
3a834d1a73 | ||
![]() |
e9fecb817d | ||
![]() |
56e70d7ec4 | ||
![]() |
2e73a85aa9 | ||
![]() |
1e119e9c03 | ||
![]() |
6f6e5c97df | ||
![]() |
6ef99974cf | ||
![]() |
8984b9aef6 | ||
![]() |
63e08b15bc | ||
![]() |
319b2b5d4c | ||
![]() |
bae7bb8ce4 | ||
![]() |
0b44df366c | ||
![]() |
f253c797af | ||
![]() |
0a8b1c2797 | ||
![]() |
3b45fb417b | ||
![]() |
2a2d92e3c5 | ||
![]() |
a320e42ed5 | ||
![]() |
fdef712e01 | ||
![]() |
5717ac19d7 | ||
![]() |
33d7d76fee | ||
![]() |
73bdaa623c | ||
![]() |
8ca8f59a0b | ||
![]() |
745af3c039 | ||
![]() |
5d17e1011a | ||
![]() |
826464c41b | ||
![]() |
a643df8cac | ||
![]() |
24ded99286 | ||
![]() |
6646eee504 | ||
![]() |
f55c10914e | ||
![]() |
b1e768f69e | ||
![]() |
4702f8bd5e | ||
![]() |
69959b2c97 | ||
![]() |
9d6f4f5392 | ||
![]() |
36b9a609bf | ||
![]() |
36ae0c82b6 | ||
![]() |
e11011ee51 | ||
![]() |
9125211a57 | ||
![]() |
3a4ef6ceb3 | ||
![]() |
ca82993278 | ||
![]() |
0925af91e3 | ||
![]() |
80bc32243c | ||
![]() |
f0d232880d | ||
![]() |
7c790dbbd9 | ||
![]() |
899b17e992 | ||
![]() |
d1b4521290 | ||
![]() |
9bb4feef29 | ||
![]() |
4bcdc98a31 | ||
![]() |
26f8c1df92 | ||
![]() |
a481ad73f3 | ||
![]() |
e4ac17fea6 | ||
![]() |
bcd940e95b | ||
![]() |
5365aa4466 | ||
![]() |
a0d106529c | ||
![]() |
bf1a9ec42d | ||
![]() |
fc5d97562f | ||
![]() |
f5c171e44f | ||
![]() |
a3c3f15806 | ||
![]() |
ef58a219ec | ||
![]() |
6708fe36e3 | ||
![]() |
e02fa2824c | ||
![]() |
a20f927082 | ||
![]() |
6d71e3fe81 | ||
![]() |
4056fcd75d | ||
![]() |
1e723cf0e3 | ||
![]() |
ce3f670597 | ||
![]() |
ce3d3d58ec | ||
![]() |
a92cab48e0 | ||
![]() |
ee76317392 | ||
![]() |
380ca13be1 | ||
![]() |
93f4c5e207 | ||
![]() |
e438858da0 | ||
![]() |
428a4dd849 | ||
![]() |
39cc8aaa13 | ||
![]() |
39a62864de | ||
![]() |
71a162a871 | ||
![]() |
05d7eff09a | ||
![]() |
7b8ad0782d | ||
![]() |
df3e9e3a5e |
@@ -33,10 +33,18 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
containerd.io \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install tools
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
jq \
|
||||
dbus \
|
||||
network-manager \
|
||||
libpulse0 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Python dependencies from requirements.txt if it exists
|
||||
COPY requirements.txt requirements_tests.txt ./
|
||||
RUN pip3 install -r requirements.txt -r requirements_tests.txt \
|
||||
&& pip3 install black tox \
|
||||
&& pip3 install tox \
|
||||
&& rm -f requirements.txt requirements_tests.txt
|
||||
|
||||
# Set the default shell to bash instead of sh
|
||||
|
@@ -1,31 +1,24 @@
|
||||
// See https://aka.ms/vscode-remote/devcontainer.json for format details.
|
||||
{
|
||||
"name": "Hass.io dev",
|
||||
"context": "..",
|
||||
"dockerFile": "Dockerfile",
|
||||
"appPort": "9123:8123",
|
||||
"runArgs": [
|
||||
"-e",
|
||||
"GIT_EDITOR=code --wait",
|
||||
"--privileged"
|
||||
],
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"esbenp.prettier-vscode"
|
||||
],
|
||||
"settings": {
|
||||
"python.pythonPath": "/usr/local/bin/python",
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.blackArgs": [
|
||||
"--target-version",
|
||||
"py37"
|
||||
],
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true
|
||||
}
|
||||
}
|
||||
"name": "Supervisor dev",
|
||||
"context": "..",
|
||||
"dockerFile": "Dockerfile",
|
||||
"appPort": "9123:8123",
|
||||
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"esbenp.prettier-vscode"
|
||||
],
|
||||
"settings": {
|
||||
"python.pythonPath": "/usr/local/bin/python",
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.blackArgs": ["--target-version", "py37"],
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true
|
||||
}
|
||||
}
|
||||
|
@@ -14,10 +14,10 @@
|
||||
# virtualenv
|
||||
venv/
|
||||
|
||||
# HA
|
||||
home-assistant-polymer/*
|
||||
misc/*
|
||||
script/*
|
||||
# Data
|
||||
home-assistant-polymer/
|
||||
script/
|
||||
tests/
|
||||
|
||||
# Test ENV
|
||||
data/
|
||||
|
27
.github/lock.yml
vendored
Normal file
27
.github/lock.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
# Configuration for Lock Threads - https://github.com/dessant/lock-threads
|
||||
|
||||
# Number of days of inactivity before a closed issue or pull request is locked
|
||||
daysUntilLock: 1
|
||||
|
||||
# Skip issues and pull requests created before a given timestamp. Timestamp must
|
||||
# follow ISO 8601 (`YYYY-MM-DD`). Set to `false` to disable
|
||||
skipCreatedBefore: 2020-01-01
|
||||
|
||||
# Issues and pull requests with these labels will be ignored. Set to `[]` to disable
|
||||
exemptLabels: []
|
||||
|
||||
# Label to add before locking, such as `outdated`. Set to `false` to disable
|
||||
lockLabel: false
|
||||
|
||||
# Comment to post before locking. Set to `false` to disable
|
||||
lockComment: false
|
||||
|
||||
# Assign `resolved` as the reason for locking. Set to `false` to disable
|
||||
setLockReason: false
|
||||
|
||||
# Limit to only `issues` or `pulls`
|
||||
only: pulls
|
||||
|
||||
# Optionally, specify configuration settings just for `issues` or `pulls`
|
||||
issues:
|
||||
daysUntilLock: 30
|
180
.vscode/tasks.json
vendored
180
.vscode/tasks.json
vendored
@@ -1,92 +1,90 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Run Testenv",
|
||||
"type": "shell",
|
||||
"command": "./scripts/test_env.sh",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Run Testenv CLI",
|
||||
"type": "shell",
|
||||
"command": "docker run --rm -ti -v /etc/machine-id:/etc/machine-id --network=hassio --add-host hassio:172.30.32.2 homeassistant/amd64-hassio-cli:dev",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Update UI",
|
||||
"type": "shell",
|
||||
"command": "./scripts/update-frontend.sh",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pytest",
|
||||
"type": "shell",
|
||||
"command": "pytest --timeout=10 tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Flake8",
|
||||
"type": "shell",
|
||||
"command": "flake8 hassio tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pylint",
|
||||
"type": "shell",
|
||||
"command": "pylint hassio",
|
||||
"dependsOn": [
|
||||
"Install all Requirements"
|
||||
],
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Run Testenv",
|
||||
"type": "shell",
|
||||
"command": "./scripts/test_env.sh",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Run Testenv CLI",
|
||||
"type": "shell",
|
||||
"command": "docker exec -ti hassio_cli /usr/bin/cli.sh",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Update UI",
|
||||
"type": "shell",
|
||||
"command": "./scripts/update-frontend.sh",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pytest",
|
||||
"type": "shell",
|
||||
"command": "pytest --timeout=10 tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Flake8",
|
||||
"type": "shell",
|
||||
"command": "flake8 hassio tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pylint",
|
||||
"type": "shell",
|
||||
"command": "pylint hassio",
|
||||
"dependsOn": ["Install all Requirements"],
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
||||
|
30
Dockerfile
30
Dockerfile
@@ -3,15 +3,15 @@ FROM $BUILD_FROM
|
||||
|
||||
# Install base
|
||||
RUN apk add --no-cache \
|
||||
openssl \
|
||||
libffi \
|
||||
musl \
|
||||
git \
|
||||
socat \
|
||||
glib \
|
||||
libstdc++ \
|
||||
eudev \
|
||||
eudev-libs
|
||||
eudev-libs \
|
||||
git \
|
||||
glib \
|
||||
libffi \
|
||||
libpulse \
|
||||
musl \
|
||||
openssl \
|
||||
socat
|
||||
|
||||
ARG BUILD_ARCH
|
||||
WORKDIR /usr/src
|
||||
@@ -24,15 +24,11 @@ RUN export MAKEFLAGS="-j$(nproc)" \
|
||||
-r ./requirements.txt \
|
||||
&& rm -f requirements.txt
|
||||
|
||||
# Install HassIO
|
||||
COPY . hassio
|
||||
RUN pip3 install --no-cache-dir -e ./hassio \
|
||||
&& python3 -m compileall ./hassio/hassio
|
||||
# Install Home Assistant Supervisor
|
||||
COPY . supervisor
|
||||
RUN pip3 install --no-cache-dir -e ./supervisor \
|
||||
&& python3 -m compileall ./supervisor/supervisor
|
||||
|
||||
|
||||
# Initialize udev daemon, handle CMD
|
||||
COPY entry.sh /bin/
|
||||
ENTRYPOINT ["/bin/entry.sh"]
|
||||
|
||||
WORKDIR /
|
||||
CMD [ "python3", "-m", "hassio" ]
|
||||
COPY rootfs /
|
||||
|
@@ -1,3 +1,3 @@
|
||||
include LICENSE.md
|
||||
graft hassio
|
||||
graft supervisor
|
||||
recursive-exclude * *.py[co]
|
||||
|
@@ -1,6 +1,6 @@
|
||||
[](https://dev.azure.com/home-assistant/Hass.io/_build/latest?definitionId=2&branchName=dev)
|
||||
|
||||
# Hass.io
|
||||
# Home Assistant Supervisor
|
||||
|
||||
## First private cloud solution for home automation
|
||||
|
||||
@@ -10,8 +10,6 @@ communicates with the Supervisor. The Supervisor provides an API to manage the
|
||||
installation. This includes changing network settings or installing
|
||||
and updating software.
|
||||
|
||||

|
||||
|
||||
## Installation
|
||||
|
||||
Installation instructions can be found at <https://home-assistant.io/hassio>.
|
||||
|
52
azure-pipelines-ci.yml
Normal file
52
azure-pipelines-ci.yml
Normal file
@@ -0,0 +1,52 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
- dev
|
||||
pr:
|
||||
- dev
|
||||
variables:
|
||||
- name: versionHadolint
|
||||
value: "v1.16.3"
|
||||
|
||||
jobs:
|
||||
- job: "Tox"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libpulse0 libudev1
|
||||
displayName: "Install Host library"
|
||||
- task: UsePythonVersion@0
|
||||
displayName: "Use Python 3.7"
|
||||
inputs:
|
||||
versionSpec: "3.7"
|
||||
- script: pip install tox
|
||||
displayName: "Install Tox"
|
||||
- script: tox
|
||||
displayName: "Run Tox"
|
||||
- job: "JQ"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: sudo apt-get install -y jq
|
||||
displayName: "Install JQ"
|
||||
- bash: |
|
||||
shopt -s globstar
|
||||
cat **/*.json | jq '.'
|
||||
displayName: "Run JQ"
|
||||
- job: "Hadolint"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
|
||||
displayName: "Install Hadolint"
|
||||
- script: |
|
||||
sudo docker run --rm -i \
|
||||
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
|
||||
hadolint/hadolint:$(versionHadolint) < Dockerfile
|
||||
displayName: "Run Hadolint"
|
53
azure-pipelines-release.yml
Normal file
53
azure-pipelines-release.yml
Normal file
@@ -0,0 +1,53 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- dev
|
||||
tags:
|
||||
include:
|
||||
- "*"
|
||||
pr: none
|
||||
variables:
|
||||
- name: versionBuilder
|
||||
value: "7.0"
|
||||
- group: docker
|
||||
|
||||
jobs:
|
||||
- job: "VersionValidate"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: "Use Python 3.7"
|
||||
inputs:
|
||||
versionSpec: "3.7"
|
||||
- script: |
|
||||
setup_version="$(python setup.py -V)"
|
||||
branch_version="$(Build.SourceBranchName)"
|
||||
|
||||
if [ "${branch_version}" == "dev" ]; then
|
||||
exit 0
|
||||
elif [ "${setup_version}" != "${branch_version}" ]; then
|
||||
echo "Version of tag ${branch_version} don't match with ${setup_version}!"
|
||||
exit 1
|
||||
fi
|
||||
displayName: "Check version of branch/tag"
|
||||
- job: "Release"
|
||||
dependsOn:
|
||||
- "VersionValidate"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
||||
displayName: "Docker hub login"
|
||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
||||
displayName: "Install Builder"
|
||||
- script: |
|
||||
sudo docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--generic $(Build.SourceBranchName) --all -t /data
|
||||
displayName: "Build Release"
|
26
azure-pipelines-wheels.yml
Normal file
26
azure-pipelines-wheels.yml
Normal file
@@ -0,0 +1,26 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- dev
|
||||
pr: none
|
||||
variables:
|
||||
- name: versionWheels
|
||||
value: '1.6.1-3.7-alpine3.11'
|
||||
resources:
|
||||
repositories:
|
||||
- repository: azure
|
||||
type: github
|
||||
name: 'home-assistant/ci-azure'
|
||||
endpoint: 'home-assistant'
|
||||
|
||||
|
||||
jobs:
|
||||
- template: templates/azp-job-wheels.yaml@azure
|
||||
parameters:
|
||||
builderVersion: '$(versionWheels)'
|
||||
builderApk: 'build-base;libffi-dev;openssl-dev'
|
||||
builderPip: 'Cython'
|
||||
wheelsRequirement: 'requirements.txt'
|
@@ -1,168 +0,0 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
- dev
|
||||
tags:
|
||||
include:
|
||||
- '*'
|
||||
exclude:
|
||||
- untagged*
|
||||
pr:
|
||||
- dev
|
||||
variables:
|
||||
- name: basePythonTag
|
||||
value: '3.7-alpine3.10'
|
||||
- name: versionHadolint
|
||||
value: 'v1.16.3'
|
||||
- name: versionBuilder
|
||||
value: '4.4'
|
||||
- name: versionWheels
|
||||
value: '1.0-3.7-alpine3.10'
|
||||
- group: docker
|
||||
- group: wheels
|
||||
|
||||
|
||||
stages:
|
||||
|
||||
- stage: 'Test'
|
||||
jobs:
|
||||
- job: 'Tox'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: 'Use Python 3.7'
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
- script: pip install tox
|
||||
displayName: 'Install Tox'
|
||||
- script: tox
|
||||
displayName: 'Run Tox'
|
||||
- job: 'Black'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: 'Use Python $(python.version)'
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
- script: pip install black
|
||||
displayName: 'Install black'
|
||||
- script: black --target-version py37 --check hassio tests
|
||||
displayName: 'Run Black'
|
||||
- job: 'JQ'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- script: sudo apt-get install -y jq
|
||||
displayName: 'Install JQ'
|
||||
- bash: |
|
||||
shopt -s globstar
|
||||
cat **/*.json | jq '.'
|
||||
displayName: 'Run JQ'
|
||||
- job: 'Hadolint'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
|
||||
displayName: 'Install Hadolint'
|
||||
- script: |
|
||||
sudo docker run --rm -i \
|
||||
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
|
||||
hadolint/hadolint:$(versionHadolint) < Dockerfile
|
||||
displayName: 'Run Hadolint'
|
||||
|
||||
- stage: 'Wheels'
|
||||
jobs:
|
||||
- job: 'Wheels'
|
||||
condition: eq(variables['Build.SourceBranchName'], 'dev')
|
||||
timeoutInMinutes: 360
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
strategy:
|
||||
maxParallel: 5
|
||||
matrix:
|
||||
amd64:
|
||||
buildArch: 'amd64'
|
||||
i386:
|
||||
buildArch: 'i386'
|
||||
armhf:
|
||||
buildArch: 'armhf'
|
||||
armv7:
|
||||
buildArch: 'armv7'
|
||||
aarch64:
|
||||
buildArch: 'aarch64'
|
||||
steps:
|
||||
- script: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
qemu-user-static \
|
||||
binfmt-support \
|
||||
curl
|
||||
|
||||
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
|
||||
sudo update-binfmts --enable qemu-arm
|
||||
sudo update-binfmts --enable qemu-aarch64
|
||||
displayName: 'Initial cross build'
|
||||
- script: |
|
||||
mkdir -p .ssh
|
||||
echo -e "-----BEGIN RSA PRIVATE KEY-----\n$(wheelsSSH)\n-----END RSA PRIVATE KEY-----" >> .ssh/id_rsa
|
||||
ssh-keyscan -H $(wheelsHost) >> .ssh/known_hosts
|
||||
chmod 600 .ssh/*
|
||||
displayName: 'Install ssh key'
|
||||
- script: sudo docker pull homeassistant/$(buildArch)-wheels:$(versionWheels)
|
||||
displayName: 'Install wheels builder'
|
||||
- script: |
|
||||
sudo docker run --rm -v $(pwd):/data:ro -v $(pwd)/.ssh:/root/.ssh:rw \
|
||||
homeassistant/$(buildArch)-wheels:$(versionWheels) \
|
||||
--apk "build-base;libffi-dev;openssl-dev" \
|
||||
--index $(wheelsIndex) \
|
||||
--requirement requirements.txt \
|
||||
--upload rsync \
|
||||
--remote wheels@$(wheelsHost):/opt/wheels
|
||||
displayName: 'Run wheels build'
|
||||
|
||||
- stage: 'Deploy'
|
||||
jobs:
|
||||
- job: 'VersionValidate'
|
||||
condition: or(startsWith(variables['Build.SourceBranch'], 'refs/tags'), eq(variables['Build.SourceBranchName'], 'dev'))
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: 'Use Python 3.7'
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
- script: |
|
||||
setup_version="$(python setup.py -V)"
|
||||
branch_version="$(Build.SourceBranchName)"
|
||||
|
||||
if [ "${branch_version}" == "dev" ]; then
|
||||
exit 0
|
||||
elif [ "${setup_version}" != "${branch_version}" ]; then
|
||||
echo "Version of tag ${branch_version} don't match with ${setup_version}!"
|
||||
exit 1
|
||||
fi
|
||||
displayName: 'Check version of branch/tag'
|
||||
- job: 'Release'
|
||||
dependsOn:
|
||||
- 'VersionValidate'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
||||
displayName: 'Docker hub login'
|
||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
||||
displayName: 'Install Builder'
|
||||
- script: |
|
||||
sudo docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--supervisor $(basePythonTag) --version $(Build.SourceBranchName) \
|
||||
--all -t /data --docker-hub homeassistant
|
||||
displayName: 'Build Release'
|
13
build.json
Normal file
13
build.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"image": "homeassistant/{arch}-hassio-supervisor",
|
||||
"build_from": {
|
||||
"aarch64": "homeassistant/aarch64-base-python:3.7-alpine3.11",
|
||||
"armhf": "homeassistant/armhf-base-python:3.7-alpine3.11",
|
||||
"armv7": "homeassistant/armv7-base-python:3.7-alpine3.11",
|
||||
"amd64": "homeassistant/amd64-base-python:3.7-alpine3.11",
|
||||
"i386": "homeassistant/i386-base-python:3.7-alpine3.11"
|
||||
},
|
||||
"labels": {
|
||||
"io.hass.type": "supervisor"
|
||||
}
|
||||
}
|
13
entry.sh
13
entry.sh
@@ -1,13 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
udevd --daemon
|
||||
udevadm trigger
|
||||
|
||||
if CMD="$(command -v "$1")"; then
|
||||
shift
|
||||
exec "$CMD" "$@"
|
||||
else
|
||||
echo "Command not found: $1"
|
||||
exit 1
|
||||
fi
|
@@ -1 +0,0 @@
|
||||
"""Init file for Hass.io."""
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"sources":["webpack:///./src/ingress-view/hassio-ingress-view.ts"],"names":["customElement","HassioIngressView","property","this","_addon","html","_templateObject2","name","ingress_url","_templateObject","changedProps","_get","_getPrototypeOf","prototype","call","has","addon","route","path","substr","oldRoute","get","oldAddon","undefined","_fetchData","_callee","addonSlug","_ref","_ref2","regeneratorRuntime","wrap","_context","prev","next","Promise","all","fetchHassioAddonInfo","hass","catch","Error","createHassioSession","sent","_slicedToArray","ingress","t0","console","error","alert","message","history","back","stop","css","_templateObject3","LitElement"],"mappings":"4gSAmBCA,YAAc,0CACTC,smBACHC,kEACAA,mEACAA,4EAED,WACE,OAAKC,KAAKC,OAMHC,YAAPC,IAC0BH,KAAKC,OAAOG,KACpBJ,KAAKC,OAAOI,aAPrBH,YAAPI,0CAYJ,SAAkBC,GAGhB,GAFAC,EAAAC,EApBEX,EAoBFY,WAAA,eAAAV,MAAAW,KAAAX,KAAmBO,GAEdA,EAAaK,IAAI,SAAtB,CAIA,IAAMC,EAAQb,KAAKc,MAAMC,KAAKC,OAAO,GAE/BC,EAAWV,EAAaW,IAAI,SAC5BC,EAAWF,EAAWA,EAASF,KAAKC,OAAO,QAAKI,EAElDP,GAASA,IAAUM,GACrBnB,KAAKqB,WAAWR,0FAIpB,SAAAS,EAAyBC,GAAzB,IAAAC,EAAAC,EAAAZ,EAAA,OAAAa,mBAAAC,KAAA,SAAAC,GAAA,cAAAA,EAAAC,KAAAD,EAAAE,MAAA,cAAAF,EAAAC,KAAA,EAAAD,EAAAE,KAAA,EAE0BC,QAAQC,IAAI,CAChCC,YAAqBjC,KAAKkC,KAAMX,GAAWY,MAAM,WAC/C,MAAM,IAAIC,MAAM,iCAElBC,YAAoBrC,KAAKkC,MAAMC,MAAM,WACnC,MAAM,IAAIC,MAAM,2CAPxB,UAAAZ,EAAAI,EAAAU,KAAAb,EAAAc,EAAAf,EAAA,IAEWX,EAFXY,EAAA,IAWee,QAXf,CAAAZ,EAAAE,KAAA,cAYY,IAAIM,MAAM,wCAZtB,OAeIpC,KAAKC,OAASY,EAflBe,EAAAE,KAAA,iBAAAF,EAAAC,KAAA,GAAAD,EAAAa,GAAAb,EAAA,SAkBIc,QAAQC,MAARf,EAAAa,IACAG,MAAMhB,EAAAa,GAAII,SAAW,mCACrBC,QAAQC,OApBZ,yBAAAnB,EAAAoB,SAAA1B,EAAAtB,KAAA,yRAwBA,WACE,OAAOiD,YAAPC,UA7D4BC","file":"chunk.5dd33a3a20657ed46a19.js","sourcesContent":["import {\n LitElement,\n customElement,\n property,\n TemplateResult,\n html,\n PropertyValues,\n CSSResult,\n css,\n} from \"lit-element\";\nimport { HomeAssistant, Route } from \"../../../src/types\";\nimport {\n createHassioSession,\n HassioAddonDetails,\n fetchHassioAddonInfo,\n} from \"../../../src/data/hassio\";\nimport \"../../../src/layouts/hass-loading-screen\";\nimport \"../../../src/layouts/hass-subpage\";\n\n@customElement(\"hassio-ingress-view\")\nclass HassioIngressView extends LitElement {\n @property() public hass!: HomeAssistant;\n @property() public route!: Route;\n @property() private _addon?: HassioAddonDetails;\n\n protected render(): TemplateResult | void {\n if (!this._addon) {\n return html`\n <hass-loading-screen></hass-loading-screen>\n `;\n }\n\n return html`\n <hass-subpage .header=${this._addon.name} hassio>\n <iframe src=${this._addon.ingress_url}></iframe>\n </hass-subpage>\n `;\n }\n\n protected updated(changedProps: PropertyValues) {\n super.firstUpdated(changedProps);\n\n if (!changedProps.has(\"route\")) {\n return;\n }\n\n const addon = this.route.path.substr(1);\n\n const oldRoute = changedProps.get(\"route\") as this[\"route\"] | undefined;\n const oldAddon = oldRoute ? oldRoute.path.substr(1) : undefined;\n\n if (addon && addon !== oldAddon) {\n this._fetchData(addon);\n }\n }\n\n private async _fetchData(addonSlug: string) {\n try {\n const [addon] = await Promise.all([\n fetchHassioAddonInfo(this.hass, addonSlug).catch(() => {\n throw new Error(\"Failed to fetch add-on info\");\n }),\n createHassioSession(this.hass).catch(() => {\n throw new Error(\"Failed to create an ingress session\");\n }),\n ]);\n\n if (!addon.ingress) {\n throw new Error(\"This add-on does not support ingress\");\n }\n\n this._addon = addon;\n } catch (err) {\n // tslint:disable-next-line\n console.error(err);\n alert(err.message || \"Unknown error starting ingress.\");\n history.back();\n }\n }\n\n static get styles(): CSSResult {\n return css`\n iframe {\n display: block;\n width: 100%;\n height: 100%;\n border: 0;\n }\n paper-icon-button {\n color: var(--text-primary-color);\n }\n `;\n }\n}\n\ndeclare global {\n interface HTMLElementTagNameMap {\n \"hassio-ingress-view\": HassioIngressView;\n }\n}\n"],"sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
||||
(window.webpackJsonp=window.webpackJsonp||[]).push([[9],{101:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(124),i=t.n(e),o=t(126),u=t.n(o),a=i.a,c=u.a}}]);
|
||||
//# sourceMappingURL=chunk.7f8cce5798f837214ef8.js.map
|
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"sources":["webpack:///../src/resources/load_markdown.js"],"names":["__webpack_require__","r","__webpack_exports__","d","marked","filterXSS","marked__WEBPACK_IMPORTED_MODULE_0__","marked__WEBPACK_IMPORTED_MODULE_0___default","n","xss__WEBPACK_IMPORTED_MODULE_1__","xss__WEBPACK_IMPORTED_MODULE_1___default","marked_","filterXSS_"],"mappings":"0FAAAA,EAAAC,EAAAC,GAAAF,EAAAG,EAAAD,EAAA,2BAAAE,IAAAJ,EAAAG,EAAAD,EAAA,8BAAAG,IAAA,IAAAC,EAAAN,EAAA,KAAAO,EAAAP,EAAAQ,EAAAF,GAAAG,EAAAT,EAAA,KAAAU,EAAAV,EAAAQ,EAAAC,GAGaL,EAASO,IACTN,EAAYO","file":"chunk.7f8cce5798f837214ef8.js","sourcesContent":["import marked_ from \"marked\";\nimport filterXSS_ from \"xss\";\n\nexport const marked = marked_;\nexport const filterXSS = filterXSS_;\n"],"sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
||||
!function(e){function n(n){for(var t,o,a=n[0],i=n[1],f=0,c=[];f<a.length;f++)o=a[f],r[o]&&c.push(r[o][0]),r[o]=0;for(t in i)Object.prototype.hasOwnProperty.call(i,t)&&(e[t]=i[t]);for(u&&u(n);c.length;)c.shift()()}var t={},r={4:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var a=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=a);var i,f=document.createElement("script");f.charset="utf-8",f.timeout=120,o.nc&&f.setAttribute("nonce",o.nc),f.src=function(e){return o.p+"chunk."+{0:"7f411ffa9df152cb8f05",1:"598ae99dfd641ab3a30c",2:"af7784dbf07df8e24819",3:"b15efbd4fb2c8cac0ad4",5:"87d3a6d0178fb26762cf",6:"6f4702eafe52425373ed",7:"5dd33a3a20657ed46a19",8:"7c785f796f428abae18d",9:"7f8cce5798f837214ef8",10:"04bcaa18b59728e10be9",11:"9d7374dae6137783dda4",12:"6685a7f98b13655ab808",13:"f1156b978f6f3143a651"}[e]+".js"}(e),i=function(n){f.onerror=f.onload=null,clearTimeout(u);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),a=n&&n.target&&n.target.src,i=new Error("Loading chunk "+e+" failed.\n("+o+": "+a+")");i.type=o,i.request=a,t[1](i)}r[e]=void 0}};var u=setTimeout(function(){i({type:"timeout",target:f})},12e4);f.onerror=f.onload=i,document.head.appendChild(f)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var a=window.webpackJsonp=window.webpackJsonp||[],i=a.push.bind(a);a.push=n,a=a.slice();for(var f=0;f<a.length;f++)n(a[f]);var u=i;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(1),t.e(6)]).then(t.bind(null,2)),Promise.all([t.e(1),t.e(12),t.e(8)]).then(t.bind(null,1))});var r=document.createElement("style");r.innerHTML="\nbody {\n font-family: Roboto, sans-serif;\n -moz-osx-font-smoothing: grayscale;\n -webkit-font-smoothing: antialiased;\n font-weight: 400;\n margin: 0;\n padding: 0;\n height: 100vh;\n}\n",document.head.appendChild(r)}]);
|
||||
//# sourceMappingURL=entrypoint.js.map
|
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,58 +0,0 @@
|
||||
{
|
||||
"raspberrypi": [
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi2": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi3": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi3-64": [
|
||||
"aarch64",
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi4": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi4-64": [
|
||||
"aarch64",
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"tinker": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"odroid-c2": [
|
||||
"aarch64"
|
||||
],
|
||||
"odroid-xu": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"orangepi-prime": [
|
||||
"aarch64"
|
||||
],
|
||||
"qemux86": [
|
||||
"i386"
|
||||
],
|
||||
"qemux86-64": [
|
||||
"amd64",
|
||||
"i386"
|
||||
],
|
||||
"qemuarm": [
|
||||
"armhf"
|
||||
],
|
||||
"qemuarm-64": [
|
||||
"aarch64"
|
||||
],
|
||||
"intel-nuc": [
|
||||
"amd64",
|
||||
"i386"
|
||||
]
|
||||
}
|
@@ -1,17 +0,0 @@
|
||||
pcm.!default {
|
||||
type asym
|
||||
capture.pcm "mic"
|
||||
playback.pcm "speaker"
|
||||
}
|
||||
pcm.mic {
|
||||
type plug
|
||||
slave {
|
||||
pcm "hw:$input"
|
||||
}
|
||||
}
|
||||
pcm.speaker {
|
||||
type plug
|
||||
slave {
|
||||
pcm "hw:$output"
|
||||
}
|
||||
}
|
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"raspberrypi3": {
|
||||
"bcm2835 - bcm2835 ALSA": {
|
||||
"0,0": "Raspberry Jack",
|
||||
"0,1": "Raspberry HDMI"
|
||||
},
|
||||
"output": "0,0",
|
||||
"input": null
|
||||
},
|
||||
"raspberrypi2": {
|
||||
"output": "0,0",
|
||||
"input": null
|
||||
},
|
||||
"raspberrypi": {
|
||||
"output": "0,0",
|
||||
"input": null
|
||||
}
|
||||
}
|
@@ -1,15 +0,0 @@
|
||||
.:53 {
|
||||
log
|
||||
errors
|
||||
hosts /config/hosts {
|
||||
fallthrough
|
||||
}
|
||||
template ANY AAAA local.hass.io hassio {
|
||||
rcode NOERROR
|
||||
}
|
||||
forward . $servers {
|
||||
except local.hass.io
|
||||
policy sequential
|
||||
health_check 10s
|
||||
}
|
||||
}
|
@@ -1,57 +0,0 @@
|
||||
"""D-Bus interface for rauc."""
|
||||
import logging
|
||||
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
from ..exceptions import DBusError, DBusInterfaceError
|
||||
from ..utils.gdbus import DBus
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
DBUS_NAME = "de.pengutronix.rauc"
|
||||
DBUS_OBJECT = "/"
|
||||
|
||||
|
||||
class Rauc(DBusInterface):
|
||||
"""Handle D-Bus interface for rauc."""
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to D-Bus."""
|
||||
try:
|
||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
||||
except DBusError:
|
||||
_LOGGER.warning("Can't connect to rauc")
|
||||
except DBusInterfaceError:
|
||||
_LOGGER.warning("Host has no rauc support. OTA updates have been disabled.")
|
||||
|
||||
@dbus_connected
|
||||
def install(self, raucb_file):
|
||||
"""Install rauc bundle file.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Installer.Install(raucb_file)
|
||||
|
||||
@dbus_connected
|
||||
def get_slot_status(self):
|
||||
"""Get slot status.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Installer.GetSlotStatus()
|
||||
|
||||
@dbus_connected
|
||||
def get_properties(self):
|
||||
"""Return rauc informations.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.get_properties(f"{DBUS_NAME}.Installer")
|
||||
|
||||
@dbus_connected
|
||||
def signal_completed(self):
|
||||
"""Return a signal wrapper for completed signal.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.wait_signal(f"{DBUS_NAME}.Installer.Completed")
|
@@ -1,38 +0,0 @@
|
||||
"""HassOS Cli docker object."""
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .interface import DockerInterface
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerHassOSCli(DockerInterface, CoreSysAttributes):
|
||||
"""Docker Hass.io wrapper for HassOS Cli."""
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of HassOS CLI image."""
|
||||
return f"homeassistant/{self.sys_arch.supervisor}-hassio-cli"
|
||||
|
||||
def _stop(self, remove_container=True):
|
||||
"""Don't need stop."""
|
||||
return True
|
||||
|
||||
def _attach(self, tag: str):
|
||||
"""Attach to running Docker container.
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self.sys_docker.images.get(f"{self.image}:{tag}")
|
||||
|
||||
except docker.errors.DockerException:
|
||||
_LOGGER.warning("Can't find a HassOS CLI %s", self.image)
|
||||
|
||||
else:
|
||||
self._meta = image.attrs
|
||||
_LOGGER.info(
|
||||
"Found HassOS CLI %s with version %s", self.image, self.version
|
||||
)
|
@@ -1,138 +0,0 @@
|
||||
"""Host Audio support."""
|
||||
import logging
|
||||
import json
|
||||
from pathlib import Path
|
||||
from string import Template
|
||||
|
||||
import attr
|
||||
|
||||
from ..const import ATTR_INPUT, ATTR_OUTPUT, ATTR_DEVICES, ATTR_NAME, CHAN_ID, CHAN_TYPE
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s()
|
||||
class DefaultConfig:
|
||||
"""Default config input/output ALSA channel."""
|
||||
|
||||
input: str = attr.ib()
|
||||
output: str = attr.ib()
|
||||
|
||||
|
||||
AUDIODB_JSON: Path = Path(__file__).parents[1].joinpath("data/audiodb.json")
|
||||
ASOUND_TMPL: Path = Path(__file__).parents[1].joinpath("data/asound.tmpl")
|
||||
|
||||
|
||||
class AlsaAudio(CoreSysAttributes):
|
||||
"""Handle Audio ALSA host data."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize ALSA audio system."""
|
||||
self.coresys = coresys
|
||||
self._data = {ATTR_INPUT: {}, ATTR_OUTPUT: {}}
|
||||
self._cache = 0
|
||||
self._default = None
|
||||
|
||||
@property
|
||||
def input_devices(self):
|
||||
"""Return list of ALSA input devices."""
|
||||
self._update_device()
|
||||
return self._data[ATTR_INPUT]
|
||||
|
||||
@property
|
||||
def output_devices(self):
|
||||
"""Return list of ALSA output devices."""
|
||||
self._update_device()
|
||||
return self._data[ATTR_OUTPUT]
|
||||
|
||||
def _update_device(self):
|
||||
"""Update Internal device DB."""
|
||||
current_id = hash(frozenset(self.sys_hardware.audio_devices))
|
||||
|
||||
# Need rebuild?
|
||||
if current_id == self._cache:
|
||||
return
|
||||
|
||||
# Clean old stuff
|
||||
self._data[ATTR_INPUT].clear()
|
||||
self._data[ATTR_OUTPUT].clear()
|
||||
|
||||
# Init database
|
||||
_LOGGER.info("Update ALSA device list")
|
||||
database = self._audio_database()
|
||||
|
||||
# Process devices
|
||||
for dev_id, dev_data in self.sys_hardware.audio_devices.items():
|
||||
for chan_info in dev_data[ATTR_DEVICES]:
|
||||
chan_id = chan_info[CHAN_ID]
|
||||
chan_type = chan_info[CHAN_TYPE]
|
||||
alsa_id = f"{dev_id},{chan_id}"
|
||||
dev_name = dev_data[ATTR_NAME]
|
||||
|
||||
# Lookup type
|
||||
if chan_type.endswith("playback"):
|
||||
key = ATTR_OUTPUT
|
||||
elif chan_type.endswith("capture"):
|
||||
key = ATTR_INPUT
|
||||
else:
|
||||
_LOGGER.warning("Unknown channel type: %s", chan_type)
|
||||
continue
|
||||
|
||||
# Use name from DB or a generic name
|
||||
self._data[key][alsa_id] = (
|
||||
database.get(self.sys_machine, {})
|
||||
.get(dev_name, {})
|
||||
.get(alsa_id, f"{dev_name}: {chan_id}")
|
||||
)
|
||||
|
||||
self._cache = current_id
|
||||
|
||||
@staticmethod
|
||||
def _audio_database():
|
||||
"""Read local json audio data into dict."""
|
||||
try:
|
||||
return json.loads(AUDIODB_JSON.read_text())
|
||||
except (ValueError, OSError) as err:
|
||||
_LOGGER.warning("Can't read audio DB: %s", err)
|
||||
|
||||
return {}
|
||||
|
||||
@property
|
||||
def default(self):
|
||||
"""Generate ALSA default setting."""
|
||||
# Init defaults
|
||||
if self._default is None:
|
||||
database = self._audio_database()
|
||||
alsa_input = database.get(self.sys_machine, {}).get(ATTR_INPUT)
|
||||
alsa_output = database.get(self.sys_machine, {}).get(ATTR_OUTPUT)
|
||||
|
||||
self._default = DefaultConfig(alsa_input, alsa_output)
|
||||
|
||||
# Search exists/new output
|
||||
if self._default.output is None and self.output_devices:
|
||||
self._default.output = next(iter(self.output_devices))
|
||||
_LOGGER.info("Detect output device %s", self._default.output)
|
||||
|
||||
# Search exists/new input
|
||||
if self._default.input is None and self.input_devices:
|
||||
self._default.input = next(iter(self.input_devices))
|
||||
_LOGGER.info("Detect input device %s", self._default.input)
|
||||
|
||||
return self._default
|
||||
|
||||
def asound(self, alsa_input=None, alsa_output=None):
|
||||
"""Generate an asound data."""
|
||||
alsa_input = alsa_input or self.default.input
|
||||
alsa_output = alsa_output or self.default.output
|
||||
|
||||
# Read Template
|
||||
try:
|
||||
asound_data = ASOUND_TMPL.read_text()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't read asound.tmpl: %s", err)
|
||||
return ""
|
||||
|
||||
# Process Template
|
||||
asound_template = Template(asound_data)
|
||||
return asound_template.safe_substitute(input=alsa_input, output=alsa_output)
|
@@ -1 +0,0 @@
|
||||
"""Special object and tools for Hass.io."""
|
@@ -1,12 +0,0 @@
|
||||
"""Validate services schema."""
|
||||
import voluptuous as vol
|
||||
|
||||
from ..utils.validate import schema_or
|
||||
from .const import SERVICE_MQTT
|
||||
from .modules.mqtt import SCHEMA_CONFIG_MQTT
|
||||
|
||||
|
||||
SCHEMA_SERVICES_CONFIG = vol.Schema(
|
||||
{vol.Optional(SERVICE_MQTT, default=dict): schema_or(SCHEMA_CONFIG_MQTT)},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
Submodule home-assistant-polymer updated: 8238b700b0...554c0b692d
BIN
misc/hassio.png
BIN
misc/hassio.png
Binary file not shown.
Before Width: | Height: | Size: 37 KiB |
@@ -1 +0,0 @@
|
||||
<mxfile userAgent="Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36" version="7.9.5" editor="www.draw.io" type="device"><diagram name="Page-1" id="535f6c39-9b73-04c2-941c-82630de90f1a">5VrLcqM4FP0aLzsFiOcycefRVTPVqfFippdYKLYmMvII4cd8/QiQDEKQ4Bicnmp7Yevqybk691zJnoH55vDI4u36d5ogMnOs5DADX2eOY1vAER+F5VhZ3DCoDCuGE9moNizwv0j1lNYcJyjTGnJKCcdb3QhpmiLINVvMGN3rzV4o0WfdxitkGBYwJqb1T5zwtbTaflRXPCG8WsupQ8evKpYxfF0xmqdyvpkDXspXVb2J1VjyQbN1nNB9wwTuZ2DOKOXVt81hjkiBrYKt6vfQU3taN0MpH9JB+mkXkxypFftEdL17oWIEsUB+lKD4/+RUVXzJSpfdigZitoP4EBUl0KJuL4EpalPKNjGpO4tvq+Lz+0LNI9ZWTVVVSFhOszr7NeZosY1hUd6L7SYarfmGiJJdrAYTMqeEsrK1ABv5EAp7xhl9RY0aq3zJ9S/k+B14SdMOMY4ODZPE7xHRDeLsKJqo2ghUXeRe9yLp2329c1wF9LqxaXzZLpabdXUaunaY+CJ91u0/YPjvW4oLvy2OGUebC9GECVqGyy40gQ8ikJz8NS6AwUAAnREAdA0Av1L4itilyEHkCdJfGznXRM7pQg6MgJxvIPc0N1ArQyEqehTUO5PLIUTdXF6GnutZ42Do+p6OoW9i6FkmhN4IEEYGXigROiSLlPE1XdE0Jve19U5HtIEeOmD+V2G+CTxZ/KGqUrGwqs5TxR9yhL8R50epwHHOqTDVE/9G6VaO0Qt1RnMG5fKlyvOYrRDXtknxYG+6gyESc7zTBfgScFUuMTa6zhvoRiLxaeFbFp4Rw+IBELsS6O5ngR705hPLWuHPSzBsv0gw2gnEIt8itsOZCAlqAqbqnuIs+/a9N8E4mZe9SUe9Dez3w5YRnuZz369SDT2gJR4KE3ecsAU8PWyBjqzDDjvilj2GatrOFNyyG8RSUezELY1XZRgbSqJMMIPfFqcCYYBEbA4MlfkBE7WKQVyz1WmkQbbgs8gGpolwmhd0J7Tkoy62A9xAzIe6EKWJOZgwNobqTPjn80sc64Sfpl0qHjSSKzHKl1vx6ALDIppdJ2LFKHyBYyWresRyOtL8U3DS0nx3jIjlX5kr9o2l5wI3dhhemg8MpFWDLilNkcaVN9NmjRHAZITal9dnhDuJ4kifNZK5kRAe7tC+awqYs92Jzx922Kdpk2veTHzAgRoIvd4832d9InK52zrx/rjrrqE1pqduk4SmmeGvbB1vi69bRiHKsvd1RhelwarzIF6lcleHAMFSy/EDEDnA90InDC0XTJRFd2mSY3umJkUjSJK6vJsypNWltuRcmtTJsNck2Sgn2/FClez6THF50JQuV2ei9rlJjVDRUnZyGjfnZ45TUdkYp9wUp6cZtk9Ck6CQU/OKUvEz35CqAbgrqIChQD5eIvJMM8wxTUWTJeWcbkQDUlTcnX610K7Sy98t6jFuCV4VfTk9j+b1zXv7rl5OMAKRW5d4oOMSD3SklqNcwZs0HkBSK9BY6r7HUtvk6BA6XkXzztTxQYqofkH8KZIZtZgGA/f7vRm9CcHbrHSDZCIkNE8u1smrECjS45lrdZzOgqnuk8DbN+Fyc3/gOHYmRybK5RtaW58Bq0U6vWo7jCauSRO1WydXUre1ZdrRdDwJBP0/01lP+bJXCWHMLqefX7466OcV73HoF4FWOtFFv67r3FEULJiIfc19H4yZZU5P2WHs867BvsFu9AySPGK+npoefeqE7MRDwTT0cNWh9Sr0CH8VcYp8naPBZdrk/xraZP4R4g+0LY5alGHUf4vy/yWfusifgHyiWP/5rXJG/Q9DcP8f</diagram></mxfile>
|
@@ -1,16 +1,18 @@
|
||||
aiohttp==3.6.1
|
||||
async_timeout==3.0.1
|
||||
attrs==19.3.0
|
||||
cchardet==2.1.4
|
||||
colorlog==4.0.2
|
||||
cchardet==2.1.6
|
||||
colorlog==4.1.0
|
||||
cpe==1.2.1
|
||||
cryptography==2.8
|
||||
docker==4.1.0
|
||||
gitpython==3.0.4
|
||||
packaging==19.2
|
||||
pytz==2019.3
|
||||
pyudev==0.21.0
|
||||
ruamel.yaml==0.15.100
|
||||
uvloop==0.13.0
|
||||
voluptuous==0.11.7
|
||||
docker==4.2.0
|
||||
gitpython==3.1.0
|
||||
jinja2==2.11.1
|
||||
packaging==20.3
|
||||
ptvsd==4.3.2
|
||||
pulsectl==20.2.4
|
||||
pytz==2019.3
|
||||
pyudev==0.22.0
|
||||
ruamel.yaml==0.15.100
|
||||
uvloop==0.14.0
|
||||
voluptuous==0.11.7
|
||||
|
@@ -1,5 +1,6 @@
|
||||
flake8==3.7.8
|
||||
pylint==2.4.3
|
||||
pytest==5.2.1
|
||||
pytest-timeout==1.3.3
|
||||
flake8==3.7.9
|
||||
pylint==2.4.4
|
||||
pytest==5.4.1
|
||||
pytest-timeout==1.3.4
|
||||
pytest-aiohttp==0.3.0
|
||||
black==19.10b0
|
||||
|
9
rootfs/etc/cont-init.d/udev.sh
Normal file
9
rootfs/etc/cont-init.d/udev.sh
Normal file
@@ -0,0 +1,9 @@
|
||||
#!/usr/bin/with-contenv bashio
|
||||
# ==============================================================================
|
||||
# Start udev service
|
||||
# ==============================================================================
|
||||
udevd --daemon
|
||||
|
||||
bashio::log.info "Update udev informations"
|
||||
udevadm trigger
|
||||
udevadm settle
|
35
rootfs/etc/pulse/client.conf
Normal file
35
rootfs/etc/pulse/client.conf
Normal file
@@ -0,0 +1,35 @@
|
||||
# This file is part of PulseAudio.
|
||||
#
|
||||
# PulseAudio is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# PulseAudio is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with PulseAudio; if not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
## Configuration file for PulseAudio clients. See pulse-client.conf(5) for
|
||||
## more information. Default values are commented out. Use either ; or # for
|
||||
## commenting.
|
||||
|
||||
; default-sink =
|
||||
; default-source =
|
||||
default-server = unix://data/audio/external/pulse.sock
|
||||
; default-dbus-server =
|
||||
|
||||
autospawn = no
|
||||
; daemon-binary = /usr/bin/pulseaudio
|
||||
; extra-arguments = --log-target=syslog
|
||||
|
||||
; cookie-file =
|
||||
|
||||
; enable-shm = yes
|
||||
; shm-size-bytes = 0 # setting this 0 will use the system-default, usually 64 MiB
|
||||
|
||||
; auto-connect-localhost = no
|
||||
; auto-connect-display = no
|
5
rootfs/etc/services.d/supervisor/finish
Normal file
5
rootfs/etc/services.d/supervisor/finish
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/usr/bin/execlineb -S0
|
||||
# ==============================================================================
|
||||
# Take down the S6 supervision tree when Supervisor fails
|
||||
# ==============================================================================
|
||||
s6-svscanctl -t /var/run/s6/services
|
7
rootfs/etc/services.d/supervisor/run
Normal file
7
rootfs/etc/services.d/supervisor/run
Normal file
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/with-contenv bashio
|
||||
# ==============================================================================
|
||||
# Start Service service
|
||||
# ==============================================================================
|
||||
export LD_PRELOAD="/usr/local/lib/libjemalloc.so.2"
|
||||
|
||||
exec python3 -m supervisor
|
@@ -61,33 +61,63 @@ function build_supervisor() {
|
||||
docker run --rm --privileged \
|
||||
-v /run/docker.sock:/run/docker.sock -v "$(pwd):/data" \
|
||||
homeassistant/amd64-builder:dev \
|
||||
--supervisor 3.7-alpine3.10 --version dev \
|
||||
-t /data --test --amd64 \
|
||||
--no-cache --docker-hub homeassistant
|
||||
--generic dev -t /data --test --amd64 --no-cache
|
||||
}
|
||||
|
||||
|
||||
function install_cli() {
|
||||
docker pull homeassistant/amd64-hassio-cli:dev
|
||||
function cleanup_lastboot() {
|
||||
if [[ -f /workspaces/test_supervisor/config.json ]]; then
|
||||
echo "Cleaning up last boot"
|
||||
cp /workspaces/test_supervisor/config.json /tmp/config.json
|
||||
jq -rM 'del(.last_boot)' /tmp/config.json > /workspaces/test_supervisor/config.json
|
||||
rm /tmp/config.json
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
function cleanup_docker() {
|
||||
echo "Cleaning up stopped containers..."
|
||||
docker rm $(docker ps -a -q) || true
|
||||
}
|
||||
|
||||
|
||||
function setup_test_env() {
|
||||
mkdir -p /workspaces/test_hassio
|
||||
mkdir -p /workspaces/test_supervisor
|
||||
|
||||
echo "Start Supervisor"
|
||||
docker run --rm --privileged \
|
||||
--name hassio_supervisor \
|
||||
--security-opt seccomp=unconfined \
|
||||
--security-opt apparmor:unconfined \
|
||||
-v /run/docker.sock:/run/docker.sock \
|
||||
-v /run/dbus:/run/dbus \
|
||||
-v "/workspaces/test_hassio":/data \
|
||||
-v "/workspaces/test_supervisor":/data \
|
||||
-v /etc/machine-id:/etc/machine-id:ro \
|
||||
-e SUPERVISOR_SHARE="/workspaces/test_hassio" \
|
||||
-e SUPERVISOR_SHARE="/workspaces/test_supervisor" \
|
||||
-e SUPERVISOR_NAME=hassio_supervisor \
|
||||
-e SUPERVISOR_DEV=1 \
|
||||
-e HOMEASSISTANT_REPOSITORY="homeassistant/qemux86-64-homeassistant" \
|
||||
homeassistant/amd64-hassio-supervisor:latest
|
||||
|
||||
}
|
||||
|
||||
|
||||
function init_dbus() {
|
||||
if pgrep dbus-daemon; then
|
||||
echo "Dbus is running"
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "Startup dbus"
|
||||
mkdir -p /var/lib/dbus
|
||||
cp -f /etc/machine-id /var/lib/dbus/machine-id
|
||||
|
||||
# cleanups
|
||||
mkdir -p /run/dbus
|
||||
rm -f /run/dbus/pid
|
||||
|
||||
# run
|
||||
dbus-daemon --system --print-address
|
||||
}
|
||||
|
||||
echo "Start Test-Env"
|
||||
@@ -95,8 +125,9 @@ echo "Start Test-Env"
|
||||
start_docker
|
||||
trap "stop_docker" ERR
|
||||
|
||||
|
||||
build_supervisor
|
||||
install_cli
|
||||
cleanup_lastboot
|
||||
cleanup_docker
|
||||
init_dbus
|
||||
setup_test_env
|
||||
stop_docker
|
||||
|
@@ -14,5 +14,5 @@ cd hassio
|
||||
./script/build_hassio
|
||||
|
||||
# Copy frontend
|
||||
rm -f ../../hassio/api/panel/chunk.*
|
||||
cp -rf build/* ../../hassio/api/panel/
|
||||
rm -f ../../supervisor/hassio/api/panel/chunk.*
|
||||
cp -rf build/* ../../supervisor/api/panel/
|
23
setup.py
23
setup.py
@@ -1,10 +1,11 @@
|
||||
"""Home Assistant Supervisor setup."""
|
||||
from setuptools import setup
|
||||
|
||||
from hassio.const import HASSIO_VERSION
|
||||
from supervisor.const import SUPERVISOR_VERSION
|
||||
|
||||
setup(
|
||||
name="HassIO",
|
||||
version=HASSIO_VERSION,
|
||||
name="Supervisor",
|
||||
version=SUPERVISOR_VERSION,
|
||||
license="BSD License",
|
||||
author="The Home Assistant Authors",
|
||||
author_email="hello@home-assistant.io",
|
||||
@@ -24,19 +25,19 @@ setup(
|
||||
"Topic :: Scientific/Engineering :: Atmospheric Science",
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Intended Audience :: Developers",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
],
|
||||
keywords=["docker", "home-assistant", "api"],
|
||||
zip_safe=False,
|
||||
platforms="any",
|
||||
packages=[
|
||||
"hassio",
|
||||
"hassio.docker",
|
||||
"hassio.addons",
|
||||
"hassio.api",
|
||||
"hassio.misc",
|
||||
"hassio.utils",
|
||||
"hassio.snapshots",
|
||||
"supervisor",
|
||||
"supervisor.docker",
|
||||
"supervisor.addons",
|
||||
"supervisor.api",
|
||||
"supervisor.misc",
|
||||
"supervisor.utils",
|
||||
"supervisor.snapshots",
|
||||
],
|
||||
include_package_data=True,
|
||||
)
|
||||
|
1
supervisor/__init__.py
Normal file
1
supervisor/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Init file for Supervisor."""
|
@@ -1,10 +1,10 @@
|
||||
"""Main file for Hass.io."""
|
||||
"""Main file for Supervisor."""
|
||||
import asyncio
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from hassio import bootstrap
|
||||
from supervisor import bootstrap
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -29,35 +29,34 @@ if __name__ == "__main__":
|
||||
# Init async event loop
|
||||
loop = initialize_event_loop()
|
||||
|
||||
# Check if all information are available to setup Hass.io
|
||||
if not bootstrap.check_environment():
|
||||
sys.exit(1)
|
||||
# Check if all information are available to setup Supervisor
|
||||
bootstrap.check_environment()
|
||||
|
||||
# init executor pool
|
||||
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
||||
loop.set_default_executor(executor)
|
||||
|
||||
_LOGGER.info("Initialize Hass.io setup")
|
||||
_LOGGER.info("Initialize Supervisor setup")
|
||||
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
||||
loop.run_until_complete(coresys.core.connect())
|
||||
|
||||
bootstrap.supervisor_debugger(coresys)
|
||||
bootstrap.migrate_system_env(coresys)
|
||||
|
||||
_LOGGER.info("Setup HassIO")
|
||||
_LOGGER.info("Setup Supervisor")
|
||||
loop.run_until_complete(coresys.core.setup())
|
||||
|
||||
loop.call_soon_threadsafe(loop.create_task, coresys.core.start())
|
||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop)
|
||||
|
||||
try:
|
||||
_LOGGER.info("Run Hass.io")
|
||||
_LOGGER.info("Run Supervisor")
|
||||
loop.run_forever()
|
||||
finally:
|
||||
_LOGGER.info("Stopping Hass.io")
|
||||
_LOGGER.info("Stopping Supervisor")
|
||||
loop.run_until_complete(coresys.core.stop())
|
||||
executor.shutdown(wait=False)
|
||||
loop.close()
|
||||
|
||||
_LOGGER.info("Close Hass.io")
|
||||
_LOGGER.info("Close Supervisor")
|
||||
sys.exit(0)
|
@@ -1,4 +1,4 @@
|
||||
"""Init file for Hass.io add-ons."""
|
||||
"""Init file for Supervisor add-ons."""
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
@@ -25,7 +25,7 @@ AnyAddon = Union[Addon, AddonStore]
|
||||
|
||||
|
||||
class AddonManager(CoreSysAttributes):
|
||||
"""Manage add-ons inside Hass.io."""
|
||||
"""Manage add-ons inside Supervisor."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize Docker base wrapper."""
|
||||
@@ -57,9 +57,9 @@ class AddonManager(CoreSysAttributes):
|
||||
return self.store.get(addon_slug)
|
||||
|
||||
def from_token(self, token: str) -> Optional[Addon]:
|
||||
"""Return an add-on from Hass.io token."""
|
||||
"""Return an add-on from Supervisor token."""
|
||||
for addon in self.installed:
|
||||
if token == addon.hassio_token:
|
||||
if token == addon.supervisor_token:
|
||||
return addon
|
||||
return None
|
||||
|
||||
@@ -152,9 +152,9 @@ class AddonManager(CoreSysAttributes):
|
||||
await addon.remove_data()
|
||||
|
||||
# Cleanup audio settings
|
||||
if addon.path_asound.exists():
|
||||
if addon.path_pulse.exists():
|
||||
with suppress(OSError):
|
||||
addon.path_asound.unlink()
|
||||
addon.path_pulse.unlink()
|
||||
|
||||
# Cleanup AppArmor profile
|
||||
with suppress(HostAppArmorError):
|
||||
@@ -166,8 +166,17 @@ class AddonManager(CoreSysAttributes):
|
||||
with suppress(HomeAssistantAPIError):
|
||||
await self.sys_ingress.update_hass_panel(addon)
|
||||
|
||||
# Cleanup internal data
|
||||
addon.remove_discovery()
|
||||
# Cleanup discovery data
|
||||
for message in self.sys_discovery.list_messages:
|
||||
if message.addon != addon.slug:
|
||||
continue
|
||||
self.sys_discovery.remove(message)
|
||||
|
||||
# Cleanup services data
|
||||
for service in self.sys_services.list_services:
|
||||
if addon.slug not in service.active:
|
||||
continue
|
||||
service.del_service_data(addon)
|
||||
|
||||
self.data.uninstall(addon)
|
||||
self.local.pop(slug)
|
||||
@@ -245,7 +254,7 @@ class AddonManager(CoreSysAttributes):
|
||||
raise AddonsError() from None
|
||||
else:
|
||||
self.data.update(store)
|
||||
_LOGGER.info("Add-on '%s' successfully rebuilded", slug)
|
||||
_LOGGER.info("Add-on '%s' successfully rebuilt", slug)
|
||||
|
||||
# restore state
|
||||
if last_state == STATE_STARTED:
|
||||
@@ -254,20 +263,23 @@ class AddonManager(CoreSysAttributes):
|
||||
async def restore(self, slug: str, tar_file: tarfile.TarFile) -> None:
|
||||
"""Restore state of an add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.debug("Add-on %s is not local available for restore")
|
||||
_LOGGER.debug("Add-on %s is not local available for restore", slug)
|
||||
addon = Addon(self.coresys, slug)
|
||||
else:
|
||||
_LOGGER.debug("Add-on %s is local available for restore")
|
||||
_LOGGER.debug("Add-on %s is local available for restore", slug)
|
||||
addon = self.local[slug]
|
||||
|
||||
await addon.restore(tar_file)
|
||||
|
||||
# Check if new
|
||||
if slug in self.local:
|
||||
return
|
||||
if slug not in self.local:
|
||||
_LOGGER.info("Detect new Add-on after restore %s", slug)
|
||||
self.local[slug] = addon
|
||||
|
||||
_LOGGER.info("Detect new Add-on after restore %s", slug)
|
||||
self.local[slug] = addon
|
||||
# Update ingress
|
||||
if addon.with_ingress:
|
||||
with suppress(HomeAssistantAPIError):
|
||||
await self.sys_ingress.update_hass_panel(addon)
|
||||
|
||||
async def repair(self) -> None:
|
||||
"""Repair local add-ons."""
|
@@ -1,4 +1,4 @@
|
||||
"""Init file for Hass.io add-ons."""
|
||||
"""Init file for Supervisor add-ons."""
|
||||
from contextlib import suppress
|
||||
from copy import deepcopy
|
||||
from ipaddress import IPv4Address
|
||||
@@ -63,9 +63,11 @@ RE_WEBUI = re.compile(
|
||||
r":\/\/\[HOST\]:\[PORT:(?P<t_port>\d+)\](?P<s_suffix>.*)$"
|
||||
)
|
||||
|
||||
RE_OLD_AUDIO = re.compile(r"\d+,\d+")
|
||||
|
||||
|
||||
class Addon(AddonModel):
|
||||
"""Hold data for add-on inside Hass.io."""
|
||||
"""Hold data for add-on inside Supervisor."""
|
||||
|
||||
def __init__(self, coresys: CoreSys, slug: str):
|
||||
"""Initialize data holder."""
|
||||
@@ -162,13 +164,13 @@ class Addon(AddonModel):
|
||||
return self.persist[ATTR_UUID]
|
||||
|
||||
@property
|
||||
def hassio_token(self) -> Optional[str]:
|
||||
"""Return access token for Hass.io API."""
|
||||
def supervisor_token(self) -> Optional[str]:
|
||||
"""Return access token for Supervisor API."""
|
||||
return self.persist.get(ATTR_ACCESS_TOKEN)
|
||||
|
||||
@property
|
||||
def ingress_token(self) -> Optional[str]:
|
||||
"""Return access token for Hass.io API."""
|
||||
"""Return access token for Supervisor API."""
|
||||
return self.persist.get(ATTR_INGRESS_TOKEN)
|
||||
|
||||
@property
|
||||
@@ -250,7 +252,7 @@ class Addon(AddonModel):
|
||||
|
||||
# lookup the correct protocol from config
|
||||
if t_proto:
|
||||
proto = "https" if self.options[t_proto] else "http"
|
||||
proto = "https" if self.options.get(t_proto) else "http"
|
||||
else:
|
||||
proto = s_prefix
|
||||
|
||||
@@ -279,33 +281,39 @@ class Addon(AddonModel):
|
||||
|
||||
@property
|
||||
def audio_output(self) -> Optional[str]:
|
||||
"""Return ALSA config for output or None."""
|
||||
"""Return a pulse profile for output or None."""
|
||||
if not self.with_audio:
|
||||
return None
|
||||
return self.persist.get(ATTR_AUDIO_OUTPUT, self.sys_host.alsa.default.output)
|
||||
|
||||
# Fallback with old audio settings
|
||||
# Remove after 210
|
||||
output_data = self.persist.get(ATTR_AUDIO_OUTPUT)
|
||||
if output_data and RE_OLD_AUDIO.fullmatch(output_data):
|
||||
return None
|
||||
return output_data
|
||||
|
||||
@audio_output.setter
|
||||
def audio_output(self, value: Optional[str]):
|
||||
"""Set/reset audio output settings."""
|
||||
if value is None:
|
||||
self.persist.pop(ATTR_AUDIO_OUTPUT, None)
|
||||
else:
|
||||
self.persist[ATTR_AUDIO_OUTPUT] = value
|
||||
"""Set audio output profile settings."""
|
||||
self.persist[ATTR_AUDIO_OUTPUT] = value
|
||||
|
||||
@property
|
||||
def audio_input(self) -> Optional[str]:
|
||||
"""Return ALSA config for input or None."""
|
||||
"""Return pulse profile for input or None."""
|
||||
if not self.with_audio:
|
||||
return None
|
||||
return self.persist.get(ATTR_AUDIO_INPUT, self.sys_host.alsa.default.input)
|
||||
|
||||
# Fallback with old audio settings
|
||||
# Remove after 210
|
||||
input_data = self.persist.get(ATTR_AUDIO_INPUT)
|
||||
if input_data and RE_OLD_AUDIO.fullmatch(input_data):
|
||||
return None
|
||||
return input_data
|
||||
|
||||
@audio_input.setter
|
||||
def audio_input(self, value: Optional[str]):
|
||||
"""Set/reset audio input settings."""
|
||||
if value is None:
|
||||
self.persist.pop(ATTR_AUDIO_INPUT, None)
|
||||
else:
|
||||
self.persist[ATTR_AUDIO_INPUT] = value
|
||||
"""Set audio input settings."""
|
||||
self.persist[ATTR_AUDIO_INPUT] = value
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
@@ -333,14 +341,14 @@ class Addon(AddonModel):
|
||||
return Path(self.path_data, "options.json")
|
||||
|
||||
@property
|
||||
def path_asound(self):
|
||||
def path_pulse(self):
|
||||
"""Return path to asound config."""
|
||||
return Path(self.sys_config.path_tmp, f"{self.slug}_asound")
|
||||
return Path(self.sys_config.path_tmp, f"{self.slug}_pulse")
|
||||
|
||||
@property
|
||||
def path_extern_asound(self):
|
||||
def path_extern_pulse(self):
|
||||
"""Return path to asound config for Docker."""
|
||||
return Path(self.sys_config.path_extern_tmp, f"{self.slug}_asound")
|
||||
return Path(self.sys_config.path_extern_tmp, f"{self.slug}_pulse")
|
||||
|
||||
def save_persist(self):
|
||||
"""Save data of add-on."""
|
||||
@@ -371,13 +379,6 @@ class Addon(AddonModel):
|
||||
|
||||
raise AddonsError()
|
||||
|
||||
def remove_discovery(self):
|
||||
"""Remove all discovery message from add-on."""
|
||||
for message in self.sys_discovery.list_messages:
|
||||
if message.addon != self.slug:
|
||||
continue
|
||||
self.sys_discovery.remove(message)
|
||||
|
||||
async def remove_data(self):
|
||||
"""Remove add-on data."""
|
||||
if not self.path_data.is_dir():
|
||||
@@ -386,20 +387,28 @@ class Addon(AddonModel):
|
||||
_LOGGER.info("Remove add-on data folder %s", self.path_data)
|
||||
await remove_data(self.path_data)
|
||||
|
||||
def write_asound(self):
|
||||
def write_pulse(self):
|
||||
"""Write asound config to file and return True on success."""
|
||||
asound_config = self.sys_host.alsa.asound(
|
||||
alsa_input=self.audio_input, alsa_output=self.audio_output
|
||||
pulse_config = self.sys_audio.pulse_client(
|
||||
input_profile=self.audio_input, output_profile=self.audio_output
|
||||
)
|
||||
|
||||
try:
|
||||
with self.path_asound.open("w") as config_file:
|
||||
config_file.write(asound_config)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Add-on %s can't write asound: %s", self.slug, err)
|
||||
raise AddonsError()
|
||||
# Cleanup wrong maps
|
||||
if self.path_pulse.is_dir():
|
||||
shutil.rmtree(self.path_pulse, ignore_errors=True)
|
||||
|
||||
_LOGGER.debug("Add-on %s write asound: %s", self.slug, self.path_asound)
|
||||
# Write pulse config
|
||||
try:
|
||||
with self.path_pulse.open("w") as config_file:
|
||||
config_file.write(pulse_config)
|
||||
except OSError as err:
|
||||
_LOGGER.error(
|
||||
"Add-on %s can't write pulse/client.config: %s", self.slug, err
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Add-on %s write pulse/client.config: %s", self.slug, self.path_pulse
|
||||
)
|
||||
|
||||
async def install_apparmor(self) -> None:
|
||||
"""Install or Update AppArmor profile for Add-on."""
|
||||
@@ -475,7 +484,7 @@ class Addon(AddonModel):
|
||||
|
||||
# Sound
|
||||
if self.with_audio:
|
||||
self.write_asound()
|
||||
self.write_pulse()
|
||||
|
||||
# Start Add-on
|
||||
try:
|
@@ -1,4 +1,4 @@
|
||||
"""Hass.io add-on build environment."""
|
||||
"""Supervisor add-on build environment."""
|
||||
from __future__ import annotations
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Dict
|
||||
@@ -16,7 +16,7 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
"""Handle build options for add-ons."""
|
||||
|
||||
def __init__(self, coresys: CoreSys, addon: AnyAddon) -> None:
|
||||
"""Initialize Hass.io add-on builder."""
|
||||
"""Initialize Supervisor add-on builder."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self.addon = addon
|
||||
|
@@ -1,4 +1,4 @@
|
||||
"""Init file for Hass.io add-on data."""
|
||||
"""Init file for Supervisor add-on data."""
|
||||
from copy import deepcopy
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
@@ -23,7 +23,7 @@ Config = Dict[str, Any]
|
||||
|
||||
|
||||
class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
"""Hold data for installed Add-ons inside Hass.io."""
|
||||
"""Hold data for installed Add-ons inside Supervisor."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize data holder."""
|
@@ -1,4 +1,4 @@
|
||||
"""Init file for Hass.io add-ons."""
|
||||
"""Init file for Supervisor add-ons."""
|
||||
from pathlib import Path
|
||||
from typing import Any, Awaitable, Dict, List, Optional
|
||||
|
||||
@@ -6,6 +6,7 @@ from packaging import version as pkg_version
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_ADVANCED,
|
||||
ATTR_APPARMOR,
|
||||
ATTR_ARCH,
|
||||
ATTR_AUDIO,
|
||||
@@ -30,6 +31,7 @@ from ..const import (
|
||||
ATTR_HOST_PID,
|
||||
ATTR_IMAGE,
|
||||
ATTR_INGRESS,
|
||||
ATTR_INIT,
|
||||
ATTR_KERNEL_MODULES,
|
||||
ATTR_LEGACY,
|
||||
ATTR_LOCATON,
|
||||
@@ -48,6 +50,7 @@ from ..const import (
|
||||
ATTR_SERVICES,
|
||||
ATTR_SLUG,
|
||||
ATTR_SNAPSHOT_EXCLUDE,
|
||||
ATTR_STAGE,
|
||||
ATTR_STARTUP,
|
||||
ATTR_STDIN,
|
||||
ATTR_TIMEOUT,
|
||||
@@ -55,13 +58,15 @@ from ..const import (
|
||||
ATTR_UDEV,
|
||||
ATTR_URL,
|
||||
ATTR_VERSION,
|
||||
ATTR_VIDEO,
|
||||
ATTR_WEBUI,
|
||||
SECURITY_DEFAULT,
|
||||
SECURITY_DISABLE,
|
||||
SECURITY_PROFILE,
|
||||
AddonStages,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .validate import RE_SERVICE, RE_VOLUME, validate_options
|
||||
from .validate import RE_SERVICE, RE_VOLUME, schema_ui_options, validate_options
|
||||
|
||||
Data = Dict[str, Any]
|
||||
|
||||
@@ -132,13 +137,13 @@ class AddonModel(CoreSysAttributes):
|
||||
return None
|
||||
|
||||
@property
|
||||
def hassio_token(self) -> Optional[str]:
|
||||
"""Return access token for Hass.io API."""
|
||||
def supervisor_token(self) -> Optional[str]:
|
||||
"""Return access token for Supervisor API."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def ingress_token(self) -> Optional[str]:
|
||||
"""Return access token for Hass.io API."""
|
||||
"""Return access token for Supervisor API."""
|
||||
return None
|
||||
|
||||
@property
|
||||
@@ -189,6 +194,16 @@ class AddonModel(CoreSysAttributes):
|
||||
"""Return startup type of add-on."""
|
||||
return self.data.get(ATTR_STARTUP)
|
||||
|
||||
@property
|
||||
def advanced(self) -> bool:
|
||||
"""Return advanced mode of add-on."""
|
||||
return self.data[ATTR_ADVANCED]
|
||||
|
||||
@property
|
||||
def stage(self) -> AddonStages:
|
||||
"""Return stage mode of add-on."""
|
||||
return self.data[ATTR_STAGE]
|
||||
|
||||
@property
|
||||
def services_role(self) -> Dict[str, str]:
|
||||
"""Return dict of services with rights."""
|
||||
@@ -312,7 +327,7 @@ class AddonModel(CoreSysAttributes):
|
||||
|
||||
@property
|
||||
def access_hassio_api(self) -> bool:
|
||||
"""Return True if the add-on access to Hass.io REASTful API."""
|
||||
"""Return True if the add-on access to Supervisor REASTful API."""
|
||||
return self.data[ATTR_HASSIO_API]
|
||||
|
||||
@property
|
||||
@@ -322,7 +337,7 @@ class AddonModel(CoreSysAttributes):
|
||||
|
||||
@property
|
||||
def hassio_role(self) -> str:
|
||||
"""Return Hass.io role for API."""
|
||||
"""Return Supervisor role for API."""
|
||||
return self.data[ATTR_HASSIO_ROLE]
|
||||
|
||||
@property
|
||||
@@ -330,6 +345,11 @@ class AddonModel(CoreSysAttributes):
|
||||
"""Return Exclude list for snapshot."""
|
||||
return self.data.get(ATTR_SNAPSHOT_EXCLUDE, [])
|
||||
|
||||
@property
|
||||
def default_init(self) -> bool:
|
||||
"""Return True if the add-on have no own init."""
|
||||
return self.data[ATTR_INIT]
|
||||
|
||||
@property
|
||||
def with_stdin(self) -> bool:
|
||||
"""Return True if the add-on access use stdin input."""
|
||||
@@ -380,6 +400,11 @@ class AddonModel(CoreSysAttributes):
|
||||
"""Return True if the add-on access to audio."""
|
||||
return self.data[ATTR_AUDIO]
|
||||
|
||||
@property
|
||||
def with_video(self) -> bool:
|
||||
"""Return True if the add-on access to video."""
|
||||
return self.data[ATTR_VIDEO]
|
||||
|
||||
@property
|
||||
def homeassistant_version(self) -> Optional[str]:
|
||||
"""Return min Home Assistant version they needed by Add-on."""
|
||||
@@ -405,6 +430,11 @@ class AddonModel(CoreSysAttributes):
|
||||
"""Return True if a changelog exists."""
|
||||
return self.path_changelog.exists()
|
||||
|
||||
@property
|
||||
def with_documentation(self) -> bool:
|
||||
"""Return True if a documentation exists."""
|
||||
return self.path_documentation.exists()
|
||||
|
||||
@property
|
||||
def supported_arch(self) -> List[str]:
|
||||
"""Return list of supported arch."""
|
||||
@@ -455,6 +485,11 @@ class AddonModel(CoreSysAttributes):
|
||||
"""Return path to add-on changelog."""
|
||||
return Path(self.path_location, "CHANGELOG.md")
|
||||
|
||||
@property
|
||||
def path_documentation(self) -> Path:
|
||||
"""Return path to add-on changelog."""
|
||||
return Path(self.path_location, "DOCS.md")
|
||||
|
||||
@property
|
||||
def path_apparmor(self) -> Path:
|
||||
"""Return path to custom AppArmor profile."""
|
||||
@@ -469,6 +504,15 @@ class AddonModel(CoreSysAttributes):
|
||||
return vol.Schema(dict)
|
||||
return vol.Schema(vol.All(dict, validate_options(self.coresys, raw_schema)))
|
||||
|
||||
@property
|
||||
def schema_ui(self) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Create a UI schema for add-on options."""
|
||||
raw_schema = self.data[ATTR_SCHEMA]
|
||||
|
||||
if isinstance(raw_schema, bool):
|
||||
return None
|
||||
return schema_ui_options(raw_schema)
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Compaired add-on objects."""
|
||||
if not isinstance(other, AddonModel):
|
@@ -39,8 +39,10 @@ def rating_security(addon: AddonModel) -> int:
|
||||
elif addon.apparmor == SECURITY_PROFILE:
|
||||
rating += 1
|
||||
|
||||
# Home Assistant Login
|
||||
if addon.access_auth_api:
|
||||
# Home Assistant Login & Ingress
|
||||
if addon.with_ingress:
|
||||
rating += 2
|
||||
elif addon.access_auth_api:
|
||||
rating += 1
|
||||
|
||||
# Privileged options
|
||||
@@ -57,7 +59,7 @@ def rating_security(addon: AddonModel) -> int:
|
||||
):
|
||||
rating += -1
|
||||
|
||||
# API Hass.io role
|
||||
# API Supervisor role
|
||||
if addon.hassio_role == ROLE_MANAGER:
|
||||
rating += -1
|
||||
elif addon.hassio_role == ROLE_ADMIN:
|
@@ -2,7 +2,7 @@
|
||||
import logging
|
||||
import re
|
||||
import secrets
|
||||
from typing import Any, Dict
|
||||
from typing import Any, Dict, List
|
||||
import uuid
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -10,6 +10,7 @@ import voluptuous as vol
|
||||
from ..const import (
|
||||
ARCH_ALL,
|
||||
ATTR_ACCESS_TOKEN,
|
||||
ATTR_ADVANCED,
|
||||
ATTR_APPARMOR,
|
||||
ATTR_ARCH,
|
||||
ATTR_ARGS,
|
||||
@@ -43,6 +44,7 @@ from ..const import (
|
||||
ATTR_INGRESS_PANEL,
|
||||
ATTR_INGRESS_PORT,
|
||||
ATTR_INGRESS_TOKEN,
|
||||
ATTR_INIT,
|
||||
ATTR_KERNEL_MODULES,
|
||||
ATTR_LEGACY,
|
||||
ATTR_LOCATON,
|
||||
@@ -64,6 +66,7 @@ from ..const import (
|
||||
ATTR_SLUG,
|
||||
ATTR_SNAPSHOT_EXCLUDE,
|
||||
ATTR_SQUASH,
|
||||
ATTR_STAGE,
|
||||
ATTR_STARTUP,
|
||||
ATTR_STATE,
|
||||
ATTR_STDIN,
|
||||
@@ -75,6 +78,7 @@ from ..const import (
|
||||
ATTR_USER,
|
||||
ATTR_UUID,
|
||||
ATTR_VERSION,
|
||||
ATTR_VIDEO,
|
||||
ATTR_WEBUI,
|
||||
BOOT_AUTO,
|
||||
BOOT_MANUAL,
|
||||
@@ -86,28 +90,29 @@ from ..const import (
|
||||
STARTUP_SERVICES,
|
||||
STATE_STARTED,
|
||||
STATE_STOPPED,
|
||||
AddonStages,
|
||||
)
|
||||
from ..coresys import CoreSys
|
||||
from ..discovery.validate import valid_discovery_service
|
||||
from ..validate import (
|
||||
ALSA_DEVICE,
|
||||
DOCKER_PORTS,
|
||||
DOCKER_PORTS_DESCRIPTION,
|
||||
NETWORK_PORT,
|
||||
TOKEN,
|
||||
UUID_MATCH,
|
||||
network_port,
|
||||
token,
|
||||
uuid_match,
|
||||
)
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|ro))?$")
|
||||
RE_SERVICE = re.compile(r"^(?P<service>mqtt):(?P<rights>provide|want|need)$")
|
||||
RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$")
|
||||
|
||||
V_STR = "str"
|
||||
V_INT = "int"
|
||||
V_FLOAT = "float"
|
||||
V_BOOL = "bool"
|
||||
V_PASSWORD = "password"
|
||||
V_EMAIL = "email"
|
||||
V_URL = "url"
|
||||
V_PORT = "port"
|
||||
@@ -118,6 +123,7 @@ RE_SCHEMA_ELEMENT = re.compile(
|
||||
r"^(?:"
|
||||
r"|bool|email|url|port"
|
||||
r"|str(?:\((?P<s_min>\d+)?,(?P<s_max>\d+)?\))?"
|
||||
r"|password(?:\((?P<p_min>\d+)?,(?P<p_max>\d+)?\))?"
|
||||
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
|
||||
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
|
||||
r"|match\((?P<match>.*)\)"
|
||||
@@ -125,7 +131,16 @@ RE_SCHEMA_ELEMENT = re.compile(
|
||||
r")\??$"
|
||||
)
|
||||
|
||||
_SCHEMA_LENGTH_PARTS = ("i_min", "i_max", "f_min", "f_max", "s_min", "s_max")
|
||||
_SCHEMA_LENGTH_PARTS = (
|
||||
"i_min",
|
||||
"i_max",
|
||||
"f_min",
|
||||
"f_max",
|
||||
"s_min",
|
||||
"s_max",
|
||||
"p_min",
|
||||
"p_max",
|
||||
)
|
||||
|
||||
RE_DOCKER_IMAGE = re.compile(r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$")
|
||||
RE_DOCKER_IMAGE_BUILD = re.compile(
|
||||
@@ -138,18 +153,18 @@ SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
|
||||
MACHINE_ALL = [
|
||||
"intel-nuc",
|
||||
"odroid-c2",
|
||||
"odroid-n2",
|
||||
"odroid-xu",
|
||||
"orangepi-prime",
|
||||
"qemux86",
|
||||
"qemux86-64",
|
||||
"qemuarm",
|
||||
"qemuarm-64",
|
||||
"qemuarm",
|
||||
"qemux86-64",
|
||||
"qemux86",
|
||||
"raspberrypi",
|
||||
"raspberrypi2",
|
||||
"raspberrypi3",
|
||||
"raspberrypi3-64",
|
||||
"raspberrypi4",
|
||||
"raspberrypi3",
|
||||
"raspberrypi4-64",
|
||||
"raspberrypi4",
|
||||
"tinker",
|
||||
]
|
||||
|
||||
@@ -175,6 +190,9 @@ SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Required(ATTR_STARTUP): vol.All(_simple_startup, vol.In(STARTUP_ALL)),
|
||||
vol.Required(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_INIT, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_STAGE, default=AddonStages.STABLE): vol.Coerce(AddonStages),
|
||||
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_PORTS_DESCRIPTION): DOCKER_PORTS_DESCRIPTION,
|
||||
vol.Optional(ATTR_WEBUI): vol.Match(
|
||||
@@ -182,7 +200,7 @@ SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
),
|
||||
vol.Optional(ATTR_INGRESS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_INGRESS_PORT, default=8099): vol.Any(
|
||||
NETWORK_PORT, vol.Equal(0)
|
||||
network_port, vol.Equal(0)
|
||||
),
|
||||
vol.Optional(ATTR_INGRESS_ENTRY): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): vol.Coerce(str),
|
||||
@@ -203,6 +221,7 @@ SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_FULL_ACCESS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_VIDEO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_KERNEL_MODULES, default=False): vol.Boolean(),
|
||||
@@ -242,7 +261,7 @@ SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
),
|
||||
vol.Optional(ATTR_IMAGE): vol.Match(RE_DOCKER_IMAGE),
|
||||
vol.Optional(ATTR_TIMEOUT, default=10): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=10, max=120)
|
||||
vol.Coerce(int), vol.Range(min=10, max=300)
|
||||
),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
@@ -269,8 +288,8 @@ SCHEMA_ADDON_USER = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): TOKEN,
|
||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): uuid_match,
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): token,
|
||||
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): vol.Coerce(
|
||||
str
|
||||
),
|
||||
@@ -278,8 +297,8 @@ SCHEMA_ADDON_USER = vol.Schema(
|
||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
|
||||
},
|
||||
@@ -373,7 +392,7 @@ def _single_validate(coresys: CoreSys, typ: str, value: Any, key: str):
|
||||
if group_value:
|
||||
range_args[group_name[2:]] = float(group_value)
|
||||
|
||||
if typ.startswith(V_STR):
|
||||
if typ.startswith(V_STR) or typ.startswith(V_PASSWORD):
|
||||
return vol.All(str(value), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_INT):
|
||||
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
|
||||
@@ -386,7 +405,7 @@ def _single_validate(coresys: CoreSys, typ: str, value: Any, key: str):
|
||||
elif typ.startswith(V_URL):
|
||||
return vol.Url()(value)
|
||||
elif typ.startswith(V_PORT):
|
||||
return NETWORK_PORT(value)
|
||||
return network_port(value)
|
||||
elif typ.startswith(V_MATCH):
|
||||
return vol.Match(match.group("match"))(str(value))
|
||||
elif typ.startswith(V_LIST):
|
||||
@@ -439,3 +458,117 @@ def _check_missing_options(origin, exists, root):
|
||||
if isinstance(origin[miss_opt], str) and origin[miss_opt].endswith("?"):
|
||||
continue
|
||||
raise vol.Invalid(f"Missing option {miss_opt} in {root}")
|
||||
|
||||
|
||||
def schema_ui_options(raw_schema: Dict[str, Any]) -> List[Dict[str, Any]]:
|
||||
"""Generate UI schema."""
|
||||
ui_schema = []
|
||||
|
||||
# read options
|
||||
for key, value in raw_schema.items():
|
||||
if isinstance(value, list):
|
||||
# nested value list
|
||||
_nested_ui_list(ui_schema, value, key)
|
||||
elif isinstance(value, dict):
|
||||
# nested value dict
|
||||
_nested_ui_dict(ui_schema, value, key)
|
||||
else:
|
||||
# normal value
|
||||
_single_ui_option(ui_schema, value, key)
|
||||
|
||||
return ui_schema
|
||||
|
||||
|
||||
def _single_ui_option(
|
||||
ui_schema: List[Dict[str, Any]], value: str, key: str, multiple: bool = False
|
||||
) -> None:
|
||||
"""Validate a single element."""
|
||||
ui_node = {"name": key}
|
||||
|
||||
# If multiple
|
||||
if multiple:
|
||||
ui_node["multiple"] = True
|
||||
|
||||
# Parse extend data from type
|
||||
match = RE_SCHEMA_ELEMENT.match(value)
|
||||
|
||||
# Prepare range
|
||||
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||
group_value = match.group(group_name)
|
||||
if not group_value:
|
||||
continue
|
||||
if group_name[2:] == "min":
|
||||
ui_node["lengthMin"] = float(group_value)
|
||||
elif group_name[2:] == "max":
|
||||
ui_node["lengthMax"] = float(group_value)
|
||||
|
||||
# If required
|
||||
if value.endswith("?"):
|
||||
ui_node["optional"] = True
|
||||
else:
|
||||
ui_node["required"] = True
|
||||
|
||||
# Data types
|
||||
if value.startswith(V_STR):
|
||||
ui_node["type"] = "string"
|
||||
elif value.startswith(V_PASSWORD):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "password"
|
||||
elif value.startswith(V_INT):
|
||||
ui_node["type"] = "integer"
|
||||
elif value.startswith(V_FLOAT):
|
||||
ui_node["type"] = "float"
|
||||
elif value.startswith(V_BOOL):
|
||||
ui_node["type"] = "boolean"
|
||||
elif value.startswith(V_EMAIL):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "email"
|
||||
elif value.startswith(V_URL):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "url"
|
||||
elif value.startswith(V_PORT):
|
||||
ui_node["type"] = "integer"
|
||||
elif value.startswith(V_MATCH):
|
||||
ui_node["type"] = "string"
|
||||
elif value.startswith(V_LIST):
|
||||
ui_node["type"] = "select"
|
||||
ui_node["options"] = match.group("list").split("|")
|
||||
|
||||
ui_schema.append(ui_node)
|
||||
|
||||
|
||||
def _nested_ui_list(
|
||||
ui_schema: List[Dict[str, Any]], option_list: List[Any], key: str
|
||||
) -> None:
|
||||
"""UI nested list items."""
|
||||
try:
|
||||
element = option_list[0]
|
||||
except IndexError:
|
||||
_LOGGER.error("Invalid schema %s", key)
|
||||
return
|
||||
|
||||
if isinstance(element, dict):
|
||||
_nested_ui_dict(ui_schema, element, key, multiple=True)
|
||||
else:
|
||||
_single_ui_option(ui_schema, element, key, multiple=True)
|
||||
|
||||
|
||||
def _nested_ui_dict(
|
||||
ui_schema: List[Dict[str, Any]],
|
||||
option_dict: Dict[str, Any],
|
||||
key: str,
|
||||
multiple: bool = False,
|
||||
) -> None:
|
||||
"""UI nested dict items."""
|
||||
ui_node = {"name": key, "type": "schema", "optional": True, "multiple": multiple}
|
||||
|
||||
nested_schema = []
|
||||
for c_key, c_value in option_dict.items():
|
||||
# Nested?
|
||||
if isinstance(c_value, list):
|
||||
_nested_ui_list(nested_schema, c_value, c_key)
|
||||
else:
|
||||
_single_ui_option(nested_schema, c_value, c_key)
|
||||
|
||||
ui_node["schema"] = nested_schema
|
||||
ui_schema.append(ui_node)
|
@@ -1,4 +1,4 @@
|
||||
"""Init file for Hass.io RESTful API."""
|
||||
"""Init file for Supervisor RESTful API."""
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
@@ -7,11 +7,13 @@ from aiohttp import web
|
||||
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from .addons import APIAddons
|
||||
from .audio import APIAudio
|
||||
from .auth import APIAuth
|
||||
from .cli import APICli
|
||||
from .discovery import APIDiscovery
|
||||
from .dns import APICoreDNS
|
||||
from .hardware import APIHardware
|
||||
from .hassos import APIHassOS
|
||||
from .os import APIOS
|
||||
from .homeassistant import APIHomeAssistant
|
||||
from .host import APIHost
|
||||
from .info import APIInfo
|
||||
@@ -25,15 +27,19 @@ from .supervisor import APISupervisor
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
MAX_CLIENT_SIZE: int = 1024 ** 2 * 16
|
||||
|
||||
|
||||
class RestAPI(CoreSysAttributes):
|
||||
"""Handle RESTful API for Hass.io."""
|
||||
"""Handle RESTful API for Supervisor."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self.security: SecurityMiddleware = SecurityMiddleware(coresys)
|
||||
self.webapp: web.Application = web.Application(
|
||||
middlewares=[self.security.token_validation]
|
||||
client_max_size=MAX_CLIENT_SIZE,
|
||||
middlewares=[self.security.token_validation],
|
||||
)
|
||||
|
||||
# service stuff
|
||||
@@ -44,7 +50,8 @@ class RestAPI(CoreSysAttributes):
|
||||
"""Register REST API Calls."""
|
||||
self._register_supervisor()
|
||||
self._register_host()
|
||||
self._register_hassos()
|
||||
self._register_os()
|
||||
self._register_cli()
|
||||
self._register_hardware()
|
||||
self._register_homeassistant()
|
||||
self._register_proxy()
|
||||
@@ -57,6 +64,7 @@ class RestAPI(CoreSysAttributes):
|
||||
self._register_info()
|
||||
self._register_auth()
|
||||
self._register_dns()
|
||||
self._register_audio()
|
||||
|
||||
def _register_host(self) -> None:
|
||||
"""Register hostcontrol functions."""
|
||||
@@ -78,17 +86,29 @@ class RestAPI(CoreSysAttributes):
|
||||
]
|
||||
)
|
||||
|
||||
def _register_hassos(self) -> None:
|
||||
"""Register HassOS functions."""
|
||||
api_hassos = APIHassOS()
|
||||
api_hassos.coresys = self.coresys
|
||||
def _register_os(self) -> None:
|
||||
"""Register OS functions."""
|
||||
api_os = APIOS()
|
||||
api_os.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/hassos/info", api_hassos.info),
|
||||
web.post("/hassos/update", api_hassos.update),
|
||||
web.post("/hassos/update/cli", api_hassos.update_cli),
|
||||
web.post("/hassos/config/sync", api_hassos.config_sync),
|
||||
web.get("/os/info", api_os.info),
|
||||
web.post("/os/update", api_os.update),
|
||||
web.post("/os/config/sync", api_os.config_sync),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_cli(self) -> None:
|
||||
"""Register HA cli functions."""
|
||||
api_cli = APICli()
|
||||
api_cli.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/cli/info", api_cli.info),
|
||||
web.get("/cli/stats", api_cli.stats),
|
||||
web.post("/cli/update", api_cli.update),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -117,7 +137,9 @@ class RestAPI(CoreSysAttributes):
|
||||
api_auth = APIAuth()
|
||||
api_auth.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([web.post("/auth", api_auth.auth)])
|
||||
self.webapp.add_routes(
|
||||
[web.post("/auth", api_auth.auth), web.post("/auth/reset", api_auth.reset)]
|
||||
)
|
||||
|
||||
def _register_supervisor(self) -> None:
|
||||
"""Register Supervisor functions."""
|
||||
@@ -144,6 +166,17 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/core/info", api_hass.info),
|
||||
web.get("/core/logs", api_hass.logs),
|
||||
web.get("/core/stats", api_hass.stats),
|
||||
web.post("/core/options", api_hass.options),
|
||||
web.post("/core/update", api_hass.update),
|
||||
web.post("/core/restart", api_hass.restart),
|
||||
web.post("/core/stop", api_hass.stop),
|
||||
web.post("/core/start", api_hass.start),
|
||||
web.post("/core/check", api_hass.check),
|
||||
web.post("/core/rebuild", api_hass.rebuild),
|
||||
# Remove with old Supervisor fallback
|
||||
web.get("/homeassistant/info", api_hass.info),
|
||||
web.get("/homeassistant/logs", api_hass.logs),
|
||||
web.get("/homeassistant/stats", api_hass.stats),
|
||||
@@ -164,6 +197,13 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/core/api/websocket", api_proxy.websocket),
|
||||
web.get("/core/websocket", api_proxy.websocket),
|
||||
web.get("/core/api/stream", api_proxy.stream),
|
||||
web.post("/core/api/{path:.+}", api_proxy.api),
|
||||
web.get("/core/api/{path:.+}", api_proxy.api),
|
||||
web.get("/core/api/", api_proxy.api),
|
||||
# Remove with old Supervisor fallback
|
||||
web.get("/homeassistant/api/websocket", api_proxy.websocket),
|
||||
web.get("/homeassistant/websocket", api_proxy.websocket),
|
||||
web.get("/homeassistant/api/stream", api_proxy.stream),
|
||||
@@ -195,6 +235,7 @@ class RestAPI(CoreSysAttributes):
|
||||
web.get("/addons/{addon}/icon", api_addons.icon),
|
||||
web.get("/addons/{addon}/logo", api_addons.logo),
|
||||
web.get("/addons/{addon}/changelog", api_addons.changelog),
|
||||
web.get("/addons/{addon}/documentation", api_addons.documentation),
|
||||
web.post("/addons/{addon}/stdin", api_addons.stdin),
|
||||
web.post("/addons/{addon}/security", api_addons.security),
|
||||
web.get("/addons/{addon}/stats", api_addons.stats),
|
||||
@@ -280,6 +321,29 @@ class RestAPI(CoreSysAttributes):
|
||||
web.post("/dns/update", api_dns.update),
|
||||
web.post("/dns/options", api_dns.options),
|
||||
web.post("/dns/restart", api_dns.restart),
|
||||
web.post("/dns/reset", api_dns.reset),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_audio(self) -> None:
|
||||
"""Register Audio functions."""
|
||||
api_audio = APIAudio()
|
||||
api_audio.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/audio/info", api_audio.info),
|
||||
web.get("/audio/stats", api_audio.stats),
|
||||
web.get("/audio/logs", api_audio.logs),
|
||||
web.post("/audio/update", api_audio.update),
|
||||
web.post("/audio/restart", api_audio.restart),
|
||||
web.post("/audio/reload", api_audio.reload),
|
||||
web.post("/audio/profile", api_audio.set_profile),
|
||||
web.post("/audio/volume/{source}/application", api_audio.set_volume),
|
||||
web.post("/audio/volume/{source}", api_audio.set_volume),
|
||||
web.post("/audio/mute/{source}/application", api_audio.set_mute),
|
||||
web.post("/audio/mute/{source}", api_audio.set_mute),
|
||||
web.post("/audio/default/{source}", api_audio.set_default),
|
||||
]
|
||||
)
|
||||
|
@@ -1,4 +1,4 @@
|
||||
"""Init file for Hass.io Home Assistant RESTful API."""
|
||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict, List
|
||||
@@ -7,10 +7,11 @@ from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..addons import AnyAddon
|
||||
from ..docker.stats import DockerStats
|
||||
from ..addons.addon import Addon
|
||||
from ..addons.utils import rating_security
|
||||
from ..const import (
|
||||
ATTR_ADDONS,
|
||||
ATTR_ADVANCED,
|
||||
ATTR_APPARMOR,
|
||||
ATTR_ARCH,
|
||||
ATTR_AUDIO,
|
||||
@@ -32,6 +33,7 @@ from ..const import (
|
||||
ATTR_DISCOVERY,
|
||||
ATTR_DNS,
|
||||
ATTR_DOCKER_API,
|
||||
ATTR_DOCUMENTATION,
|
||||
ATTR_FULL_ACCESS,
|
||||
ATTR_GPIO,
|
||||
ATTR_HASSIO_API,
|
||||
@@ -52,7 +54,7 @@ from ..const import (
|
||||
ATTR_INSTALLED,
|
||||
ATTR_IP_ADDRESS,
|
||||
ATTR_KERNEL_MODULES,
|
||||
ATTR_LAST_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_LOGO,
|
||||
ATTR_LONG_DESCRIPTION,
|
||||
ATTR_MACHINE,
|
||||
@@ -71,14 +73,17 @@ from ..const import (
|
||||
ATTR_RATING,
|
||||
ATTR_REPOSITORIES,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SCHEMA,
|
||||
ATTR_SERVICES,
|
||||
ATTR_SLUG,
|
||||
ATTR_SOURCE,
|
||||
ATTR_STAGE,
|
||||
ATTR_STATE,
|
||||
ATTR_STDIN,
|
||||
ATTR_UDEV,
|
||||
ATTR_URL,
|
||||
ATTR_VERSION,
|
||||
ATTR_VIDEO,
|
||||
ATTR_WEBUI,
|
||||
BOOT_AUTO,
|
||||
BOOT_MANUAL,
|
||||
@@ -89,8 +94,9 @@ from ..const import (
|
||||
STATE_NONE,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..docker.stats import DockerStats
|
||||
from ..exceptions import APIError
|
||||
from ..validate import ALSA_DEVICE, DOCKER_PORTS
|
||||
from ..validate import DOCKER_PORTS
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@@ -101,10 +107,10 @@ SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS),
|
||||
vol.Optional(ATTR_NETWORK): vol.Maybe(DOCKER_PORTS),
|
||||
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_INGRESS_PANEL): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
@@ -124,7 +130,10 @@ class APIAddons(CoreSysAttributes):
|
||||
|
||||
# Lookup itself
|
||||
if addon_slug == "self":
|
||||
return request.get(REQUEST_FROM)
|
||||
addon = request.get(REQUEST_FROM)
|
||||
if not isinstance(addon, Addon):
|
||||
raise APIError("Self is not an Addon")
|
||||
return addon
|
||||
|
||||
addon = self.sys_addons.get(addon_slug)
|
||||
if not addon:
|
||||
@@ -145,6 +154,8 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_ADVANCED: addon.advanced,
|
||||
ATTR_STAGE: addon.stage,
|
||||
ATTR_VERSION: addon.latest_version,
|
||||
ATTR_INSTALLED: addon.version if addon.is_installed else None,
|
||||
ATTR_AVAILABLE: addon.available,
|
||||
@@ -188,14 +199,17 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_DNS: addon.dns,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_LONG_DESCRIPTION: addon.long_description,
|
||||
ATTR_ADVANCED: addon.advanced,
|
||||
ATTR_STAGE: addon.stage,
|
||||
ATTR_AUTO_UPDATE: None,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_VERSION: None,
|
||||
ATTR_LAST_VERSION: addon.latest_version,
|
||||
ATTR_VERSION_LATEST: addon.latest_version,
|
||||
ATTR_PROTECTED: addon.protected,
|
||||
ATTR_RATING: rating_security(addon),
|
||||
ATTR_BOOT: addon.boot,
|
||||
ATTR_OPTIONS: addon.options,
|
||||
ATTR_SCHEMA: addon.schema_ui,
|
||||
ATTR_ARCH: addon.supported_arch,
|
||||
ATTR_MACHINE: addon.supported_machine,
|
||||
ATTR_HOMEASSISTANT: addon.homeassistant_version,
|
||||
@@ -217,6 +231,7 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
ATTR_CHANGELOG: addon.with_changelog,
|
||||
ATTR_DOCUMENTATION: addon.with_documentation,
|
||||
ATTR_STDIN: addon.with_stdin,
|
||||
ATTR_WEBUI: None,
|
||||
ATTR_HASSIO_API: addon.access_hassio_api,
|
||||
@@ -228,6 +243,7 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_DEVICETREE: addon.with_devicetree,
|
||||
ATTR_UDEV: addon.with_udev,
|
||||
ATTR_DOCKER_API: addon.access_docker_api,
|
||||
ATTR_VIDEO: addon.with_video,
|
||||
ATTR_AUDIO: addon.with_audio,
|
||||
ATTR_AUDIO_INPUT: None,
|
||||
ATTR_AUDIO_OUTPUT: None,
|
||||
@@ -407,6 +423,16 @@ class APIAddons(CoreSysAttributes):
|
||||
with addon.path_changelog.open("r") as changelog:
|
||||
return changelog.read()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||
async def documentation(self, request: web.Request) -> str:
|
||||
"""Return documentation from add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_documentation:
|
||||
raise APIError("No documentation found!")
|
||||
|
||||
with addon.path_documentation.open("r") as documentation:
|
||||
return documentation.read()
|
||||
|
||||
@api_process
|
||||
async def stdin(self, request: web.Request) -> None:
|
||||
"""Write to stdin of add-on."""
|
170
supervisor/api/audio.py
Normal file
170
supervisor/api/audio.py
Normal file
@@ -0,0 +1,170 @@
|
||||
"""Init file for Supervisor Audio RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict
|
||||
|
||||
from aiohttp import web
|
||||
import attr
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_ACTIVE,
|
||||
ATTR_APPLICATION,
|
||||
ATTR_AUDIO,
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_CARD,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_HOST,
|
||||
ATTR_INDEX,
|
||||
ATTR_INPUT,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_NAME,
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_OUTPUT,
|
||||
ATTR_VERSION,
|
||||
ATTR_VOLUME,
|
||||
CONTENT_TYPE_BINARY,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..host.sound import StreamType
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
|
||||
SCHEMA_VOLUME = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_INDEX): vol.Coerce(int),
|
||||
vol.Required(ATTR_VOLUME): vol.Coerce(float),
|
||||
}
|
||||
)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_MUTE = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_INDEX): vol.Coerce(int),
|
||||
vol.Required(ATTR_ACTIVE): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_DEFAULT = vol.Schema({vol.Required(ATTR_NAME): vol.Coerce(str)})
|
||||
|
||||
SCHEMA_PROFILE = vol.Schema(
|
||||
{vol.Required(ATTR_CARD): vol.Coerce(str), vol.Required(ATTR_NAME): vol.Coerce(str)}
|
||||
)
|
||||
|
||||
|
||||
class APIAudio(CoreSysAttributes):
|
||||
"""Handle RESTful API for Audio functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return Audio information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_audio.version,
|
||||
ATTR_VERSION_LATEST: self.sys_audio.latest_version,
|
||||
ATTR_HOST: str(self.sys_docker.network.audio),
|
||||
ATTR_AUDIO: {
|
||||
ATTR_CARD: [attr.asdict(card) for card in self.sys_host.sound.cards],
|
||||
ATTR_INPUT: [
|
||||
attr.asdict(stream) for stream in self.sys_host.sound.inputs
|
||||
],
|
||||
ATTR_OUTPUT: [
|
||||
attr.asdict(stream) for stream in self.sys_host.sound.outputs
|
||||
],
|
||||
ATTR_APPLICATION: [
|
||||
attr.asdict(stream) for stream in self.sys_host.sound.applications
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_audio.stats()
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_MEMORY_PERCENT: stats.memory_percent,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update Audio plugin."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_audio.latest_version)
|
||||
|
||||
if version == self.sys_audio.version:
|
||||
raise APIError("Version {} is already in use".format(version))
|
||||
await asyncio.shield(self.sys_audio.update(version))
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return Audio Docker logs."""
|
||||
return self.sys_audio.logs()
|
||||
|
||||
@api_process
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Restart Audio plugin."""
|
||||
return asyncio.shield(self.sys_audio.restart())
|
||||
|
||||
@api_process
|
||||
def reload(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Reload Audio information."""
|
||||
return asyncio.shield(self.sys_host.sound.update())
|
||||
|
||||
@api_process
|
||||
async def set_volume(self, request: web.Request) -> None:
|
||||
"""Set audio volume on stream."""
|
||||
source: StreamType = StreamType(request.match_info.get("source"))
|
||||
application: bool = request.path.endswith("application")
|
||||
body = await api_validate(SCHEMA_VOLUME, request)
|
||||
|
||||
await asyncio.shield(
|
||||
self.sys_host.sound.set_volume(
|
||||
source, body[ATTR_INDEX], body[ATTR_VOLUME], application
|
||||
)
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def set_mute(self, request: web.Request) -> None:
|
||||
"""Mute audio volume on stream."""
|
||||
source: StreamType = StreamType(request.match_info.get("source"))
|
||||
application: bool = request.path.endswith("application")
|
||||
body = await api_validate(SCHEMA_MUTE, request)
|
||||
|
||||
await asyncio.shield(
|
||||
self.sys_host.sound.set_mute(
|
||||
source, body[ATTR_INDEX], body[ATTR_ACTIVE], application
|
||||
)
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def set_default(self, request: web.Request) -> None:
|
||||
"""Set audio default stream."""
|
||||
source: StreamType = StreamType(request.match_info.get("source"))
|
||||
body = await api_validate(SCHEMA_DEFAULT, request)
|
||||
|
||||
await asyncio.shield(self.sys_host.sound.set_default(source, body[ATTR_NAME]))
|
||||
|
||||
@api_process
|
||||
async def set_profile(self, request: web.Request) -> None:
|
||||
"""Set audio default sources."""
|
||||
body = await api_validate(SCHEMA_PROFILE, request)
|
||||
|
||||
await asyncio.shield(
|
||||
self.sys_host.sound.ativate_profile(body[ATTR_CARD], body[ATTR_NAME])
|
||||
)
|
@@ -1,22 +1,39 @@
|
||||
"""Init file for Hass.io auth/SSO RESTful API."""
|
||||
"""Init file for Supervisor auth/SSO RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
from aiohttp import BasicAuth
|
||||
from aiohttp import BasicAuth, web
|
||||
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE
|
||||
from aiohttp.web_exceptions import HTTPUnauthorized
|
||||
from aiohttp.hdrs import CONTENT_TYPE, AUTHORIZATION, WWW_AUTHENTICATE
|
||||
import voluptuous as vol
|
||||
|
||||
from .utils import api_process
|
||||
from ..const import REQUEST_FROM, CONTENT_TYPE_JSON, CONTENT_TYPE_URL
|
||||
from ..addons.addon import Addon
|
||||
from ..const import (
|
||||
ATTR_PASSWORD,
|
||||
ATTR_USERNAME,
|
||||
CONTENT_TYPE_JSON,
|
||||
CONTENT_TYPE_URL,
|
||||
REQUEST_FROM,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIForbidden
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_PASSWORD_RESET = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_USERNAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_PASSWORD): vol.Coerce(str),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class APIAuth(CoreSysAttributes):
|
||||
"""Handle RESTful API for auth functions."""
|
||||
|
||||
def _process_basic(self, request, addon):
|
||||
def _process_basic(self, request: web.Request, addon: Addon) -> bool:
|
||||
"""Process login request with basic auth.
|
||||
|
||||
Return a coroutine.
|
||||
@@ -24,7 +41,9 @@ class APIAuth(CoreSysAttributes):
|
||||
auth = BasicAuth.decode(request.headers[AUTHORIZATION])
|
||||
return self.sys_auth.check_login(addon, auth.login, auth.password)
|
||||
|
||||
def _process_dict(self, request, addon, data):
|
||||
def _process_dict(
|
||||
self, request: web.Request, addon: Addon, data: Dict[str, str]
|
||||
) -> bool:
|
||||
"""Process login with dict data.
|
||||
|
||||
Return a coroutine.
|
||||
@@ -35,7 +54,7 @@ class APIAuth(CoreSysAttributes):
|
||||
return self.sys_auth.check_login(addon, username, password)
|
||||
|
||||
@api_process
|
||||
async def auth(self, request):
|
||||
async def auth(self, request: web.Request) -> bool:
|
||||
"""Process login request."""
|
||||
addon = request[REQUEST_FROM]
|
||||
|
||||
@@ -57,5 +76,13 @@ class APIAuth(CoreSysAttributes):
|
||||
return await self._process_dict(request, addon, data)
|
||||
|
||||
raise HTTPUnauthorized(
|
||||
headers={WWW_AUTHENTICATE: 'Basic realm="Hass.io Authentication"'}
|
||||
headers={WWW_AUTHENTICATE: 'Basic realm="Home Assistant Authentication"'}
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def reset(self, request: web.Request) -> None:
|
||||
"""Process reset password request."""
|
||||
body: Dict[str, str] = await api_validate(SCHEMA_PASSWORD_RESET, request)
|
||||
await asyncio.shield(
|
||||
self.sys_auth.change_password(body[ATTR_USERNAME], body[ATTR_PASSWORD])
|
||||
)
|
62
supervisor/api/cli.py
Normal file
62
supervisor/api/cli.py
Normal file
@@ -0,0 +1,62 @@
|
||||
"""Init file for Supervisor HA cli RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
|
||||
|
||||
class APICli(CoreSysAttributes):
|
||||
"""Handle RESTful API for HA Cli functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return HA cli information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_cli.version,
|
||||
ATTR_VERSION_LATEST: self.sys_cli.latest_version,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_cli.stats()
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_MEMORY_PERCENT: stats.memory_percent,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update HA CLI."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_cli.latest_version)
|
||||
|
||||
await asyncio.shield(self.sys_cli.update(version))
|
@@ -1,4 +1,4 @@
|
||||
"""Init file for Hass.io network RESTful API."""
|
||||
"""Init file for Supervisor network RESTful API."""
|
||||
import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_validate
|
@@ -1,4 +1,4 @@
|
||||
"""Init file for Hass.io DNS RESTful API."""
|
||||
"""Init file for Supervisor DNS RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict
|
||||
@@ -11,7 +11,7 @@ from ..const import (
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_HOST,
|
||||
ATTR_LATEST_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_LOCALS,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
@@ -24,13 +24,13 @@ from ..const import (
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..validate import DNS_SERVER_LIST
|
||||
from ..validate import dns_server_list
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_SERVERS): DNS_SERVER_LIST})
|
||||
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_SERVERS): dns_server_list})
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
|
||||
@@ -43,7 +43,7 @@ class APICoreDNS(CoreSysAttributes):
|
||||
"""Return DNS information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_dns.version,
|
||||
ATTR_LATEST_VERSION: self.sys_dns.latest_version,
|
||||
ATTR_VERSION_LATEST: self.sys_dns.latest_version,
|
||||
ATTR_HOST: str(self.sys_docker.network.dns),
|
||||
ATTR_SERVERS: self.sys_dns.servers,
|
||||
ATTR_LOCALS: self.sys_host.network.dns_servers,
|
||||
@@ -95,3 +95,8 @@ class APICoreDNS(CoreSysAttributes):
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Restart CoreDNS plugin."""
|
||||
return asyncio.shield(self.sys_dns.restart())
|
||||
|
||||
@api_process
|
||||
def reset(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Reset CoreDNS plugin."""
|
||||
return asyncio.shield(self.sys_dns.reset())
|
@@ -1,4 +1,4 @@
|
||||
"""Init file for Hass.io hardware RESTful API."""
|
||||
"""Init file for Supervisor hardware RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
@@ -37,11 +37,17 @@ class APIHardware(CoreSysAttributes):
|
||||
|
||||
@api_process
|
||||
async def audio(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Show ALSA audio devices."""
|
||||
"""Show pulse audio profiles."""
|
||||
return {
|
||||
ATTR_AUDIO: {
|
||||
ATTR_INPUT: self.sys_host.alsa.input_devices,
|
||||
ATTR_OUTPUT: self.sys_host.alsa.output_devices,
|
||||
ATTR_INPUT: {
|
||||
profile.name: profile.description
|
||||
for profile in self.sys_host.sound.inputs
|
||||
},
|
||||
ATTR_OUTPUT: {
|
||||
profile.name: profile.description
|
||||
for profile in self.sys_host.sound.outputs
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@@ -1,39 +1,40 @@
|
||||
"""Init file for Hass.io Home Assistant RESTful API."""
|
||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Coroutine, Dict, Any
|
||||
from typing import Any, Coroutine, Dict
|
||||
|
||||
import voluptuous as vol
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_ARCH,
|
||||
ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT,
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_BOOT,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_CUSTOM,
|
||||
ATTR_IMAGE,
|
||||
ATTR_LAST_VERSION,
|
||||
ATTR_IP_ADDRESS,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_MACHINE,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_PORT,
|
||||
ATTR_REFRESH_TOKEN,
|
||||
ATTR_SSL,
|
||||
ATTR_VERSION,
|
||||
ATTR_WAIT_BOOT,
|
||||
ATTR_WATCHDOG,
|
||||
ATTR_IP_ADDRESS,
|
||||
CONTENT_TYPE_BINARY,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..validate import DOCKER_IMAGE, NETWORK_PORT
|
||||
from ..validate import docker_image, network_port
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@@ -42,14 +43,15 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_BOOT): vol.Boolean(),
|
||||
vol.Inclusive(ATTR_IMAGE, "custom_hass"): vol.Maybe(DOCKER_IMAGE),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_PORT): NETWORK_PORT,
|
||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Inclusive(ATTR_IMAGE, "custom_hass"): vol.Maybe(docker_image),
|
||||
vol.Inclusive(ATTR_VERSION_LATEST, "custom_hass"): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_PORT): network_port,
|
||||
vol.Optional(ATTR_SSL): vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
|
||||
vol.Optional(ATTR_WAIT_BOOT): vol.All(vol.Coerce(int), vol.Range(min=60)),
|
||||
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(vol.Coerce(str)),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -64,7 +66,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_homeassistant.version,
|
||||
ATTR_LAST_VERSION: self.sys_homeassistant.latest_version,
|
||||
ATTR_VERSION_LATEST: self.sys_homeassistant.latest_version,
|
||||
ATTR_MACHINE: self.sys_homeassistant.machine,
|
||||
ATTR_IP_ADDRESS: str(self.sys_homeassistant.ip_address),
|
||||
ATTR_ARCH: self.sys_homeassistant.arch,
|
||||
@@ -75,6 +77,10 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
ATTR_SSL: self.sys_homeassistant.api_ssl,
|
||||
ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
|
||||
ATTR_WAIT_BOOT: self.sys_homeassistant.wait_boot,
|
||||
ATTR_AUDIO_INPUT: self.sys_homeassistant.audio_input,
|
||||
ATTR_AUDIO_OUTPUT: self.sys_homeassistant.audio_output,
|
||||
# Remove end of Q3 2020
|
||||
"last_version": self.sys_homeassistant.latest_version,
|
||||
}
|
||||
|
||||
@api_process
|
||||
@@ -82,9 +88,9 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
"""Set Home Assistant options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_IMAGE in body and ATTR_LAST_VERSION in body:
|
||||
if ATTR_IMAGE in body and ATTR_VERSION_LATEST in body:
|
||||
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
||||
self.sys_homeassistant.latest_version = body[ATTR_LAST_VERSION]
|
||||
self.sys_homeassistant.latest_version = body[ATTR_VERSION_LATEST]
|
||||
|
||||
if ATTR_BOOT in body:
|
||||
self.sys_homeassistant.boot = body[ATTR_BOOT]
|
||||
@@ -92,10 +98,6 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
if ATTR_PORT in body:
|
||||
self.sys_homeassistant.api_port = body[ATTR_PORT]
|
||||
|
||||
if ATTR_PASSWORD in body:
|
||||
self.sys_homeassistant.api_password = body[ATTR_PASSWORD]
|
||||
self.sys_homeassistant.refresh_token = None
|
||||
|
||||
if ATTR_SSL in body:
|
||||
self.sys_homeassistant.api_ssl = body[ATTR_SSL]
|
||||
|
||||
@@ -107,7 +109,12 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
|
||||
if ATTR_REFRESH_TOKEN in body:
|
||||
self.sys_homeassistant.refresh_token = body[ATTR_REFRESH_TOKEN]
|
||||
self.sys_homeassistant.api_password = None
|
||||
|
||||
if ATTR_AUDIO_INPUT in body:
|
||||
self.sys_homeassistant.audio_input = body[ATTR_AUDIO_INPUT]
|
||||
|
||||
if ATTR_AUDIO_OUTPUT in body:
|
||||
self.sys_homeassistant.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
|
||||
self.sys_homeassistant.save_data()
|
||||
|
@@ -1,4 +1,4 @@
|
||||
"""Init file for Hass.io host RESTful API."""
|
||||
"""Init file for Supervisor host RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
@@ -1,4 +1,4 @@
|
||||
"""Init file for Hass.io info RESTful API."""
|
||||
"""Init file for Supervisor info RESTful API."""
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
@@ -1,4 +1,4 @@
|
||||
"""Hass.io Add-on ingress service."""
|
||||
"""Supervisor Add-on ingress service."""
|
||||
import asyncio
|
||||
from ipaddress import ip_address
|
||||
import logging
|
||||
@@ -23,6 +23,7 @@ from ..const import (
|
||||
ATTR_ENABLE,
|
||||
COOKIE_INGRESS,
|
||||
HEADER_TOKEN,
|
||||
HEADER_TOKEN_OLD,
|
||||
REQUEST_FROM,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
@@ -80,7 +81,7 @@ class APIIngress(CoreSysAttributes):
|
||||
async def handler(
|
||||
self, request: web.Request
|
||||
) -> Union[web.Response, web.StreamResponse, web.WebSocketResponse]:
|
||||
"""Route data to Hass.io ingress service."""
|
||||
"""Route data to Supervisor ingress service."""
|
||||
self._check_ha_access(request)
|
||||
|
||||
# Check Ingress Session
|
||||
@@ -204,7 +205,16 @@ def _init_header(
|
||||
|
||||
# filter flags
|
||||
for name, value in request.headers.items():
|
||||
if name in (hdrs.CONTENT_LENGTH, hdrs.CONTENT_ENCODING, istr(HEADER_TOKEN)):
|
||||
if name in (
|
||||
hdrs.CONTENT_LENGTH,
|
||||
hdrs.CONTENT_ENCODING,
|
||||
hdrs.SEC_WEBSOCKET_EXTENSIONS,
|
||||
hdrs.SEC_WEBSOCKET_PROTOCOL,
|
||||
hdrs.SEC_WEBSOCKET_VERSION,
|
||||
hdrs.SEC_WEBSOCKET_KEY,
|
||||
istr(HEADER_TOKEN),
|
||||
istr(HEADER_TOKEN_OLD),
|
||||
):
|
||||
continue
|
||||
headers[name] = value
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user