mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-16 04:29:22 +00:00
Compare commits
133 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
2e53a48504 | ||
![]() |
8e4db0c3ec | ||
![]() |
4072b06faf | ||
![]() |
a2cf7ece70 | ||
![]() |
734fe3afde | ||
![]() |
7f3bc91c1d | ||
![]() |
9c2c95757d | ||
![]() |
b5ed6c586a | ||
![]() |
35033d1f76 | ||
![]() |
9e41d0c5b0 | ||
![]() |
62e92fada9 | ||
![]() |
ae0a1a657f | ||
![]() |
81e511ba8e | ||
![]() |
d89cb91c8c | ||
![]() |
dc31b6e6fe | ||
![]() |
930a32de1a | ||
![]() |
e40f2ed8e3 | ||
![]() |
abbd3d1078 | ||
![]() |
63c9948456 | ||
![]() |
b6c81d779a | ||
![]() |
2480c83169 | ||
![]() |
334cc66cf6 | ||
![]() |
3cf189ad94 | ||
![]() |
6ffb94a0f5 | ||
![]() |
3593826441 | ||
![]() |
0a0a62f238 | ||
![]() |
41ce9913d2 | ||
![]() |
b77c42384d | ||
![]() |
138bb12f98 | ||
![]() |
4fe2859f4e | ||
![]() |
0768b2b4bc | ||
![]() |
e6f1772a93 | ||
![]() |
5374b2b3b9 | ||
![]() |
1196788856 | ||
![]() |
9f3f47eb80 | ||
![]() |
1a90a478f2 | ||
![]() |
ee773f3b63 | ||
![]() |
5ffc27f60c | ||
![]() |
4c13dfb43c | ||
![]() |
bc099f0d81 | ||
![]() |
b26dd0af19 | ||
![]() |
0dee5bd763 | ||
![]() |
0765387ad8 | ||
![]() |
a07517bd3c | ||
![]() |
e5f0d80d96 | ||
![]() |
2fc5e3b7d9 | ||
![]() |
778bc46848 | ||
![]() |
882586b246 | ||
![]() |
b7c07a2555 | ||
![]() |
814b504fa9 | ||
![]() |
7ae430e7a8 | ||
![]() |
0e7e95ba20 | ||
![]() |
e577d8acb2 | ||
![]() |
0a76ab5054 | ||
![]() |
03c5596e04 | ||
![]() |
3af4e14e83 | ||
![]() |
7c8cf57820 | ||
![]() |
8d84a8a62e | ||
![]() |
08c45060bd | ||
![]() |
7ca8d2811b | ||
![]() |
bb6898b032 | ||
![]() |
cd86c6814e | ||
![]() |
b67e116650 | ||
![]() |
57ce411fb6 | ||
![]() |
85ed4d9e8d | ||
![]() |
ccb39da569 | ||
![]() |
dd7ba64d32 | ||
![]() |
de3edb1654 | ||
![]() |
d262151727 | ||
![]() |
a37c90af96 | ||
![]() |
0a3a752b4c | ||
![]() |
0a34f427f8 | ||
![]() |
157740e374 | ||
![]() |
b0e994f3f5 | ||
![]() |
f374852801 | ||
![]() |
709f034f2e | ||
![]() |
6d6deb8c66 | ||
![]() |
5771b417bc | ||
![]() |
51efcefdab | ||
![]() |
d31ab5139d | ||
![]() |
ce18183daa | ||
![]() |
b8b73cf880 | ||
![]() |
5291e6c1f3 | ||
![]() |
626a9f06c4 | ||
![]() |
72338eb5b8 | ||
![]() |
7bd77c6e99 | ||
![]() |
69151b962a | ||
![]() |
86305d4fe4 | ||
![]() |
d5c3850a3f | ||
![]() |
3e645b6175 | ||
![]() |
89dc78bc05 | ||
![]() |
164c403d05 | ||
![]() |
5e8007453f | ||
![]() |
0a0d97b084 | ||
![]() |
eb604ed92d | ||
![]() |
c47828dbaa | ||
![]() |
ea437dc745 | ||
![]() |
c16a208b39 | ||
![]() |
55d803b2a0 | ||
![]() |
611f6f2829 | ||
![]() |
b94df76731 | ||
![]() |
218619e7f0 | ||
![]() |
273eed901a | ||
![]() |
8ea712a937 | ||
![]() |
658449a7a0 | ||
![]() |
968c471591 | ||
![]() |
b4665f3907 | ||
![]() |
496cee1ec4 | ||
![]() |
0f8c80f3ba | ||
![]() |
6c28f82239 | ||
![]() |
def32abb57 | ||
![]() |
f57a241b9e | ||
![]() |
11a7e8b15d | ||
![]() |
fa4f7697b7 | ||
![]() |
6098b7de8e | ||
![]() |
0a382ce54d | ||
![]() |
dd53aaa30c | ||
![]() |
31e175a15a | ||
![]() |
4c80727bcc | ||
![]() |
b2c3157361 | ||
![]() |
dc4f38ebd0 | ||
![]() |
7c9437c6ee | ||
![]() |
9ce9e10dfd | ||
![]() |
4e94043bca | ||
![]() |
749d45bf13 | ||
![]() |
ce99b3e259 | ||
![]() |
2c84daefab | ||
![]() |
dc1933fa88 | ||
![]() |
6970cebf80 | ||
![]() |
a234006de2 | ||
![]() |
2484149323 | ||
![]() |
778148424c | ||
![]() |
55f4a2395e |
43
.devcontainer/Dockerfile
Normal file
43
.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
FROM python:3.7
|
||||||
|
|
||||||
|
WORKDIR /workspaces
|
||||||
|
|
||||||
|
# Install Node/Yarn for Frontent
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
curl \
|
||||||
|
git \
|
||||||
|
apt-utils \
|
||||||
|
apt-transport-https \
|
||||||
|
&& curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
|
||||||
|
&& echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list \
|
||||||
|
&& apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
nodejs \
|
||||||
|
yarn \
|
||||||
|
&& curl -o - https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
ENV NVM_DIR /root/.nvm
|
||||||
|
|
||||||
|
# Install docker
|
||||||
|
# https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
apt-transport-https \
|
||||||
|
ca-certificates \
|
||||||
|
curl \
|
||||||
|
software-properties-common \
|
||||||
|
gpg-agent \
|
||||||
|
&& curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add - \
|
||||||
|
&& add-apt-repository "deb https://download.docker.com/linux/debian $(lsb_release -cs) stable" \
|
||||||
|
&& apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
docker-ce \
|
||||||
|
docker-ce-cli \
|
||||||
|
containerd.io \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install Python dependencies from requirements.txt if it exists
|
||||||
|
COPY requirements.txt requirements_tests.txt /workspaces/
|
||||||
|
RUN pip install -r requirements.txt \
|
||||||
|
&& pip3 install -r requirements_tests.txt \
|
||||||
|
&& pip install black tox
|
||||||
|
|
||||||
|
# Set the default shell to bash instead of sh
|
||||||
|
ENV SHELL /bin/bash
|
29
.devcontainer/devcontainer.json
Normal file
29
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
// See https://aka.ms/vscode-remote/devcontainer.json for format details.
|
||||||
|
{
|
||||||
|
"name": "Hass.io dev",
|
||||||
|
"context": "..",
|
||||||
|
"dockerFile": "Dockerfile",
|
||||||
|
"appPort": "9123:8123",
|
||||||
|
"runArgs": [
|
||||||
|
"-e",
|
||||||
|
"GIT_EDITOR='code --wait'",
|
||||||
|
"--privileged"
|
||||||
|
],
|
||||||
|
"extensions": [
|
||||||
|
"ms-python.python"
|
||||||
|
],
|
||||||
|
"settings": {
|
||||||
|
"python.pythonPath": "/usr/local/bin/python",
|
||||||
|
"python.linting.pylintEnabled": true,
|
||||||
|
"python.linting.enabled": true,
|
||||||
|
"python.formatting.provider": "black",
|
||||||
|
"python.formatting.blackArgs": [
|
||||||
|
"--target-version",
|
||||||
|
"py37"
|
||||||
|
],
|
||||||
|
"editor.formatOnPaste": false,
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.formatOnType": true,
|
||||||
|
"files.trimTrailingWhitespace": true
|
||||||
|
}
|
||||||
|
}
|
@@ -1,13 +1,23 @@
|
|||||||
# General files
|
# General files
|
||||||
.git
|
.git
|
||||||
.github
|
.github
|
||||||
|
.devcontainer
|
||||||
|
.vscode
|
||||||
|
|
||||||
# Test related files
|
# Test related files
|
||||||
.tox
|
.tox
|
||||||
|
|
||||||
# Temporary files
|
# Temporary files
|
||||||
**/__pycache__
|
**/__pycache__
|
||||||
|
.pytest_cache
|
||||||
|
|
||||||
# virtualenv
|
# virtualenv
|
||||||
venv/
|
venv/
|
||||||
ENV/
|
|
||||||
|
# HA
|
||||||
|
home-assistant-polymer/*
|
||||||
|
misc/*
|
||||||
|
script/*
|
||||||
|
|
||||||
|
# Test ENV
|
||||||
|
data/
|
||||||
|
4
.gitignore
vendored
4
.gitignore
vendored
@@ -92,4 +92,6 @@ ENV/
|
|||||||
.pylint.d/
|
.pylint.d/
|
||||||
|
|
||||||
# VS Code
|
# VS Code
|
||||||
.vscode/
|
.vscode/*
|
||||||
|
!.vscode/cSpell.json
|
||||||
|
!.vscode/tasks.json
|
||||||
|
92
.vscode/tasks.json
vendored
Normal file
92
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
{
|
||||||
|
"version": "2.0.0",
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"label": "Run Testenv",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "./scripts/test_env.sh",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true,
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Run Testenv CLI",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "docker run --rm -ti -v /etc/machine-id:/etc/machine-id --network=hassio --add-host hassio:172.30.32.2 homeassistant/amd64-hassio-cli:dev",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true,
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Update UI",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "./scripts/update-frontend.sh",
|
||||||
|
"group": {
|
||||||
|
"kind": "build",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Pytest",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "pytest --timeout=10 tests",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true,
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Flake8",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "flake8 hassio tests",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true,
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Pylint",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "pylint hassio",
|
||||||
|
"dependsOn": [
|
||||||
|
"Install all Requirements"
|
||||||
|
],
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true,
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
59
API.md
59
API.md
@@ -44,6 +44,8 @@ The addons from `addons` are only installed one.
|
|||||||
"logging": "debug|info|warning|error|critical",
|
"logging": "debug|info|warning|error|critical",
|
||||||
"ip_address": "ip address",
|
"ip_address": "ip address",
|
||||||
"wait_boot": "int",
|
"wait_boot": "int",
|
||||||
|
"debug": "bool",
|
||||||
|
"debug_block": "bool",
|
||||||
"addons": [
|
"addons": [
|
||||||
{
|
{
|
||||||
"name": "xy bla",
|
"name": "xy bla",
|
||||||
@@ -80,6 +82,8 @@ Optional:
|
|||||||
"channel": "stable|beta|dev",
|
"channel": "stable|beta|dev",
|
||||||
"timezone": "TIMEZONE",
|
"timezone": "TIMEZONE",
|
||||||
"wait_boot": "int",
|
"wait_boot": "int",
|
||||||
|
"debug": "bool",
|
||||||
|
"debug_block": "bool",
|
||||||
"logging": "debug|info|warning|error|critical",
|
"logging": "debug|info|warning|error|critical",
|
||||||
"addons_repositories": [
|
"addons_repositories": [
|
||||||
"REPO_URL"
|
"REPO_URL"
|
||||||
@@ -101,6 +105,7 @@ Output is the raw docker log.
|
|||||||
"cpu_percent": 0.0,
|
"cpu_percent": 0.0,
|
||||||
"memory_usage": 283123,
|
"memory_usage": 283123,
|
||||||
"memory_limit": 329392,
|
"memory_limit": 329392,
|
||||||
|
"memory_percent": 1.4,
|
||||||
"network_tx": 0,
|
"network_tx": 0,
|
||||||
"network_rx": 0,
|
"network_rx": 0,
|
||||||
"blk_read": 0,
|
"blk_read": 0,
|
||||||
@@ -108,6 +113,10 @@ Output is the raw docker log.
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
- GET `/supervisor/repair`
|
||||||
|
|
||||||
|
Repair overlayfs issue and restore lost images
|
||||||
|
|
||||||
### Snapshot
|
### Snapshot
|
||||||
|
|
||||||
- GET `/snapshots`
|
- GET `/snapshots`
|
||||||
@@ -413,6 +422,7 @@ Proxy to real websocket instance.
|
|||||||
"cpu_percent": 0.0,
|
"cpu_percent": 0.0,
|
||||||
"memory_usage": 283123,
|
"memory_usage": 283123,
|
||||||
"memory_limit": 329392,
|
"memory_limit": 329392,
|
||||||
|
"memory_percent": 1.4,
|
||||||
"network_tx": 0,
|
"network_tx": 0,
|
||||||
"network_rx": 0,
|
"network_rx": 0,
|
||||||
"blk_read": 0,
|
"blk_read": 0,
|
||||||
@@ -465,6 +475,8 @@ Get all available addons.
|
|||||||
{
|
{
|
||||||
"name": "xy bla",
|
"name": "xy bla",
|
||||||
"slug": "xdssd_xybla",
|
"slug": "xdssd_xybla",
|
||||||
|
"hostname": "xdssd-xybla",
|
||||||
|
"dns": [],
|
||||||
"description": "description",
|
"description": "description",
|
||||||
"long_description": "null|markdown",
|
"long_description": "null|markdown",
|
||||||
"auto_update": "bool",
|
"auto_update": "bool",
|
||||||
@@ -490,6 +502,7 @@ Get all available addons.
|
|||||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||||
"apparmor": "disable|default|profile",
|
"apparmor": "disable|default|profile",
|
||||||
"devices": ["/dev/xy"],
|
"devices": ["/dev/xy"],
|
||||||
|
"udev": "bool",
|
||||||
"auto_uart": "bool",
|
"auto_uart": "bool",
|
||||||
"icon": "bool",
|
"icon": "bool",
|
||||||
"logo": "bool",
|
"logo": "bool",
|
||||||
@@ -585,6 +598,7 @@ Write data to add-on stdin
|
|||||||
"cpu_percent": 0.0,
|
"cpu_percent": 0.0,
|
||||||
"memory_usage": 283123,
|
"memory_usage": 283123,
|
||||||
"memory_limit": 329392,
|
"memory_limit": 329392,
|
||||||
|
"memory_percent": 1.4,
|
||||||
"network_tx": 0,
|
"network_tx": 0,
|
||||||
"network_rx": 0,
|
"network_rx": 0,
|
||||||
"blk_read": 0,
|
"blk_read": 0,
|
||||||
@@ -726,7 +740,50 @@ return:
|
|||||||
"arch": "arch",
|
"arch": "arch",
|
||||||
"supported_arch": ["arch1", "arch2"],
|
"supported_arch": ["arch1", "arch2"],
|
||||||
"channel": "stable|beta|dev",
|
"channel": "stable|beta|dev",
|
||||||
"logging": "debug|info|warning|error|critical"
|
"logging": "debug|info|warning|error|critical",
|
||||||
|
"timezone": "Europe/Zurich"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### DNS
|
||||||
|
|
||||||
|
- GET `/dns/info`
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"host": "ip-address",
|
||||||
|
"version": "1",
|
||||||
|
"latest_version": "2",
|
||||||
|
"servers": ["dns://8.8.8.8"]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/dns/options`
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"servers": ["dns://8.8.8.8"]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/dns/update`
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": "VERSION"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- GET `/dns/logs`
|
||||||
|
|
||||||
|
- GET `/dns/stats`
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"cpu_percent": 0.0,
|
||||||
|
"memory_usage": 283123,
|
||||||
|
"memory_limit": 329392,
|
||||||
|
"memory_percent": 1.4,
|
||||||
|
"network_tx": 0,
|
||||||
|
"network_rx": 0,
|
||||||
|
"blk_read": 0,
|
||||||
|
"blk_write": 0
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
33
Dockerfile
33
Dockerfile
@@ -10,24 +10,29 @@ RUN apk add --no-cache \
|
|||||||
socat \
|
socat \
|
||||||
glib \
|
glib \
|
||||||
libstdc++ \
|
libstdc++ \
|
||||||
|
eudev \
|
||||||
eudev-libs
|
eudev-libs
|
||||||
|
|
||||||
|
ARG BUILD_ARCH
|
||||||
|
WORKDIR /usr/src
|
||||||
|
|
||||||
# Install requirements
|
# Install requirements
|
||||||
COPY requirements.txt /usr/src/
|
COPY requirements.txt .
|
||||||
RUN apk add --no-cache --virtual .build-dependencies \
|
RUN export MAKEFLAGS="-j$(nproc)" \
|
||||||
make \
|
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
|
||||||
g++ \
|
"https://wheels.home-assistant.io/alpine-$(cut -d '.' -f 1-2 < /etc/alpine-release)/${BUILD_ARCH}/" \
|
||||||
openssl-dev \
|
-r ./requirements.txt \
|
||||||
libffi-dev \
|
&& rm -f requirements.txt
|
||||||
musl-dev \
|
|
||||||
&& export MAKEFLAGS="-j$(nproc)" \
|
|
||||||
&& pip3 install --no-cache-dir -r /usr/src/requirements.txt \
|
|
||||||
&& apk del .build-dependencies \
|
|
||||||
&& rm -f /usr/src/requirements.txt
|
|
||||||
|
|
||||||
# Install HassIO
|
# Install HassIO
|
||||||
COPY . /usr/src/hassio
|
COPY . hassio
|
||||||
RUN pip3 install --no-cache-dir /usr/src/hassio \
|
RUN pip3 install --no-cache-dir -e ./hassio \
|
||||||
&& rm -rf /usr/src/hassio
|
&& python3 -m compileall ./hassio/hassio
|
||||||
|
|
||||||
|
|
||||||
|
# Initialize udev daemon, handle CMD
|
||||||
|
COPY entry.sh /bin/
|
||||||
|
ENTRYPOINT ["/bin/entry.sh"]
|
||||||
|
|
||||||
|
WORKDIR /
|
||||||
CMD [ "python3", "-m", "hassio" ]
|
CMD [ "python3", "-m", "hassio" ]
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
[](https://dev.azure.com/home-assistant/Home%20Assistant/_build/latest?definitionId=2&branchName=dev)
|
[](https://dev.azure.com/home-assistant/Hass.io/_build/latest?definitionId=2&branchName=dev)
|
||||||
|
|
||||||
# Hass.io
|
# Hass.io
|
||||||
|
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
# https://dev.azure.com/home-assistant
|
# https://dev.azure.com/home-assistant
|
||||||
|
|
||||||
trigger:
|
trigger:
|
||||||
|
batch: true
|
||||||
branches:
|
branches:
|
||||||
include:
|
include:
|
||||||
- master
|
- master
|
||||||
@@ -11,120 +12,157 @@ trigger:
|
|||||||
exclude:
|
exclude:
|
||||||
- untagged*
|
- untagged*
|
||||||
pr:
|
pr:
|
||||||
- dev
|
- dev
|
||||||
|
|
||||||
variables:
|
variables:
|
||||||
versionHadolint: 'v1.16.3'
|
- name: basePythonTag
|
||||||
versionBuilder: '1.1'
|
value: '3.7-alpine3.10'
|
||||||
|
- name: versionHadolint
|
||||||
jobs:
|
value: 'v1.16.3'
|
||||||
|
- name: versionBuilder
|
||||||
- job: 'Tox'
|
value: '4.4'
|
||||||
pool:
|
- name: versionWheels
|
||||||
vmImage: 'ubuntu-16.04'
|
value: '1.0-3.7-alpine3.10'
|
||||||
steps:
|
- group: docker
|
||||||
- task: UsePythonVersion@0
|
- group: wheels
|
||||||
displayName: 'Use Python $(python.version)'
|
|
||||||
inputs:
|
|
||||||
versionSpec: '3.7'
|
|
||||||
- script: pip install tox
|
|
||||||
displayName: 'Install Tox'
|
|
||||||
- script: tox
|
|
||||||
displayName: 'Run Tox'
|
|
||||||
|
|
||||||
|
|
||||||
- job: 'JQ'
|
stages:
|
||||||
pool:
|
|
||||||
vmImage: 'ubuntu-16.04'
|
|
||||||
steps:
|
|
||||||
- script: sudo apt-get install -y jq
|
|
||||||
displayName: 'Install JQ'
|
|
||||||
- bash: |
|
|
||||||
shopt -s globstar
|
|
||||||
cat **/*.json | jq '.'
|
|
||||||
displayName: 'Run JQ'
|
|
||||||
|
|
||||||
|
- stage: 'Test'
|
||||||
|
jobs:
|
||||||
|
- job: 'Tox'
|
||||||
|
pool:
|
||||||
|
vmImage: 'ubuntu-latest'
|
||||||
|
steps:
|
||||||
|
- task: UsePythonVersion@0
|
||||||
|
displayName: 'Use Python 3.7'
|
||||||
|
inputs:
|
||||||
|
versionSpec: '3.7'
|
||||||
|
- script: pip install tox
|
||||||
|
displayName: 'Install Tox'
|
||||||
|
- script: tox
|
||||||
|
displayName: 'Run Tox'
|
||||||
|
- job: 'Black'
|
||||||
|
pool:
|
||||||
|
vmImage: 'ubuntu-latest'
|
||||||
|
steps:
|
||||||
|
- task: UsePythonVersion@0
|
||||||
|
displayName: 'Use Python $(python.version)'
|
||||||
|
inputs:
|
||||||
|
versionSpec: '3.7'
|
||||||
|
- script: pip install black
|
||||||
|
displayName: 'Install black'
|
||||||
|
- script: black --target-version py37 --check hassio tests
|
||||||
|
displayName: 'Run Black'
|
||||||
|
- job: 'JQ'
|
||||||
|
pool:
|
||||||
|
vmImage: 'ubuntu-latest'
|
||||||
|
steps:
|
||||||
|
- script: sudo apt-get install -y jq
|
||||||
|
displayName: 'Install JQ'
|
||||||
|
- bash: |
|
||||||
|
shopt -s globstar
|
||||||
|
cat **/*.json | jq '.'
|
||||||
|
displayName: 'Run JQ'
|
||||||
|
- job: 'Hadolint'
|
||||||
|
pool:
|
||||||
|
vmImage: 'ubuntu-latest'
|
||||||
|
steps:
|
||||||
|
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
|
||||||
|
displayName: 'Install Hadolint'
|
||||||
|
- script: |
|
||||||
|
sudo docker run --rm -i \
|
||||||
|
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
|
||||||
|
hadolint/hadolint:$(versionHadolint) < Dockerfile
|
||||||
|
displayName: 'Run Hadolint'
|
||||||
|
|
||||||
- job: 'Hadolint'
|
- stage: 'Wheels'
|
||||||
pool:
|
jobs:
|
||||||
vmImage: 'ubuntu-16.04'
|
- job: 'Wheels'
|
||||||
steps:
|
condition: eq(variables['Build.SourceBranchName'], 'dev')
|
||||||
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
|
timeoutInMinutes: 360
|
||||||
displayName: 'Install Hadolint'
|
pool:
|
||||||
- script: |
|
vmImage: 'ubuntu-latest'
|
||||||
sudo docker run --rm -i \
|
strategy:
|
||||||
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
|
maxParallel: 5
|
||||||
hadolint/hadolint:$(versionHadolint) < Dockerfile
|
matrix:
|
||||||
displayName: 'Run Hadolint'
|
amd64:
|
||||||
|
buildArch: 'amd64'
|
||||||
|
i386:
|
||||||
|
buildArch: 'i386'
|
||||||
|
armhf:
|
||||||
|
buildArch: 'armhf'
|
||||||
|
armv7:
|
||||||
|
buildArch: 'armv7'
|
||||||
|
aarch64:
|
||||||
|
buildArch: 'aarch64'
|
||||||
|
steps:
|
||||||
|
- script: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y --no-install-recommends \
|
||||||
|
qemu-user-static \
|
||||||
|
binfmt-support \
|
||||||
|
curl
|
||||||
|
|
||||||
|
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
|
||||||
|
sudo update-binfmts --enable qemu-arm
|
||||||
|
sudo update-binfmts --enable qemu-aarch64
|
||||||
|
displayName: 'Initial cross build'
|
||||||
|
- script: |
|
||||||
|
mkdir -p .ssh
|
||||||
|
echo -e "-----BEGIN RSA PRIVATE KEY-----\n$(wheelsSSH)\n-----END RSA PRIVATE KEY-----" >> .ssh/id_rsa
|
||||||
|
ssh-keyscan -H $(wheelsHost) >> .ssh/known_hosts
|
||||||
|
chmod 600 .ssh/*
|
||||||
|
displayName: 'Install ssh key'
|
||||||
|
- script: sudo docker pull homeassistant/$(buildArch)-wheels:$(versionWheels)
|
||||||
|
displayName: 'Install wheels builder'
|
||||||
|
- script: |
|
||||||
|
sudo docker run --rm -v $(pwd):/data:ro -v $(pwd)/.ssh:/root/.ssh:rw \
|
||||||
|
homeassistant/$(buildArch)-wheels:$(versionWheels) \
|
||||||
|
--apk "build-base;libffi-dev;openssl-dev" \
|
||||||
|
--index $(wheelsIndex) \
|
||||||
|
--requirement requirements.txt \
|
||||||
|
--upload rsync \
|
||||||
|
--remote wheels@$(wheelsHost):/opt/wheels
|
||||||
|
displayName: 'Run wheels build'
|
||||||
|
|
||||||
- job: 'ReleaseDEV'
|
- stage: 'Deploy'
|
||||||
condition: and(eq(variables['Build.SourceBranchName'], 'dev'), succeeded('JQ'), succeeded('Tox'), succeeded('Hadolint'))
|
jobs:
|
||||||
dependsOn:
|
- job: 'VersionValidate'
|
||||||
- 'JQ'
|
condition: or(startsWith(variables['Build.SourceBranch'], 'refs/tags'), eq(variables['Build.SourceBranchName'], 'dev'))
|
||||||
- 'Tox'
|
pool:
|
||||||
- 'Hadolint'
|
vmImage: 'ubuntu-latest'
|
||||||
pool:
|
steps:
|
||||||
vmImage: 'ubuntu-16.04'
|
- task: UsePythonVersion@0
|
||||||
strategy:
|
displayName: 'Use Python 3.7'
|
||||||
maxParallel: 2
|
inputs:
|
||||||
matrix:
|
versionSpec: '3.7'
|
||||||
amd64:
|
- script: |
|
||||||
buildArch: 'amd64'
|
setup_version="$(python setup.py -V)"
|
||||||
i386:
|
branch_version="$(Build.SourceBranchName)"
|
||||||
buildArch: 'i386'
|
|
||||||
armhf:
|
|
||||||
buildArch: 'armhf'
|
|
||||||
armv7:
|
|
||||||
buildArch: 'armv7'
|
|
||||||
aarch64:
|
|
||||||
buildArch: 'aarch64'
|
|
||||||
steps:
|
|
||||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
|
||||||
displayName: 'Docker hub login'
|
|
||||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
|
||||||
displayName: 'Install Builder'
|
|
||||||
- script: |
|
|
||||||
sudo docker run --rm --privileged \
|
|
||||||
-v ~/.docker:/root/.docker \
|
|
||||||
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
|
||||||
homeassistant/amd64-builder:$(versionBuilder) \
|
|
||||||
--supervisor "--$(buildArch)" \
|
|
||||||
-t /data --version dev --docker-hub homeassistant
|
|
||||||
displayName: 'Build DEV'
|
|
||||||
|
|
||||||
|
if [ "${branch_version}" == "dev" ]; then
|
||||||
- job: 'Release'
|
exit 0
|
||||||
condition: and(startsWith(variables['Build.SourceBranch'], 'refs/tags'), succeeded('JQ'), succeeded('Tox'), succeeded('Hadolint'))
|
elif [ "${setup_version}" != "${branch_version}" ]; then
|
||||||
dependsOn:
|
echo "Version of tag ${branch_version} don't match with ${setup_version}!"
|
||||||
- 'JQ'
|
exit 1
|
||||||
- 'Tox'
|
fi
|
||||||
- 'Hadolint'
|
displayName: 'Check version of branch/tag'
|
||||||
pool:
|
- job: 'Release'
|
||||||
vmImage: 'ubuntu-16.04'
|
dependsOn:
|
||||||
strategy:
|
- 'VersionValidate'
|
||||||
maxParallel: 5
|
pool:
|
||||||
matrix:
|
vmImage: 'ubuntu-latest'
|
||||||
amd64:
|
steps:
|
||||||
buildArch: 'amd64'
|
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
||||||
i386:
|
displayName: 'Docker hub login'
|
||||||
buildArch: 'i386'
|
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
||||||
armhf:
|
displayName: 'Install Builder'
|
||||||
buildArch: 'armhf'
|
- script: |
|
||||||
armv7:
|
sudo docker run --rm --privileged \
|
||||||
buildArch: 'armv7'
|
-v ~/.docker:/root/.docker \
|
||||||
aarch64:
|
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
||||||
buildArch: 'aarch64'
|
homeassistant/amd64-builder:$(versionBuilder) \
|
||||||
steps:
|
--supervisor $(basePythonTag) --version $(Build.SourceBranchName) \
|
||||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
--all -t /data --docker-hub homeassistant
|
||||||
displayName: 'Docker hub login'
|
displayName: 'Build Release'
|
||||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
|
||||||
displayName: 'Install Builder'
|
|
||||||
- script: |
|
|
||||||
sudo docker run --rm --privileged \
|
|
||||||
-v ~/.docker:/root/.docker \
|
|
||||||
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
|
||||||
homeassistant/amd64-builder:$(versionBuilder) \
|
|
||||||
--supervisor "--$(buildArch)" \
|
|
||||||
-t /data --docker-hub homeassistant
|
|
||||||
displayName: 'Build Release'
|
|
||||||
|
13
entry.sh
Executable file
13
entry.sh
Executable file
@@ -0,0 +1,13 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
udevd --daemon
|
||||||
|
udevadm trigger
|
||||||
|
|
||||||
|
if CMD="$(command -v "$1")"; then
|
||||||
|
shift
|
||||||
|
exec "$CMD" "$@"
|
||||||
|
else
|
||||||
|
echo "Command not found: $1"
|
||||||
|
exit 1
|
||||||
|
fi
|
@@ -38,7 +38,9 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
_LOGGER.info("Initialize Hass.io setup")
|
_LOGGER.info("Initialize Hass.io setup")
|
||||||
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
||||||
|
loop.run_until_complete(coresys.core.connect())
|
||||||
|
|
||||||
|
bootstrap.supervisor_debugger(coresys)
|
||||||
bootstrap.migrate_system_env(coresys)
|
bootstrap.migrate_system_env(coresys)
|
||||||
|
|
||||||
_LOGGER.info("Setup HassIO")
|
_LOGGER.info("Setup HassIO")
|
||||||
|
@@ -1,158 +1,319 @@
|
|||||||
"""Init file for Hass.io add-ons."""
|
"""Init file for Hass.io add-ons."""
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from contextlib import suppress
|
||||||
import logging
|
import logging
|
||||||
|
import tarfile
|
||||||
|
from typing import Dict, List, Optional, Union
|
||||||
|
|
||||||
|
from ..const import BOOT_AUTO, STATE_STARTED
|
||||||
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
|
from ..exceptions import (
|
||||||
|
AddonsError,
|
||||||
|
AddonsNotSupportedError,
|
||||||
|
CoreDNSError,
|
||||||
|
DockerAPIError,
|
||||||
|
HomeAssistantAPIError,
|
||||||
|
HostAppArmorError,
|
||||||
|
)
|
||||||
|
from ..store.addon import AddonStore
|
||||||
from .addon import Addon
|
from .addon import Addon
|
||||||
from .repository import Repository
|
|
||||||
from .data import AddonsData
|
from .data import AddonsData
|
||||||
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO, STATE_STARTED
|
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
|
AnyAddon = Union[Addon, AddonStore]
|
||||||
|
|
||||||
|
|
||||||
class AddonManager(CoreSysAttributes):
|
class AddonManager(CoreSysAttributes):
|
||||||
"""Manage add-ons inside Hass.io."""
|
"""Manage add-ons inside Hass.io."""
|
||||||
|
|
||||||
def __init__(self, coresys):
|
def __init__(self, coresys: CoreSys):
|
||||||
"""Initialize Docker base wrapper."""
|
"""Initialize Docker base wrapper."""
|
||||||
self.coresys = coresys
|
self.coresys: CoreSys = coresys
|
||||||
self.data = AddonsData(coresys)
|
self.data: AddonsData = AddonsData(coresys)
|
||||||
self.addons_obj = {}
|
self.local: Dict[str, Addon] = {}
|
||||||
self.repositories_obj = {}
|
self.store: Dict[str, AddonStore] = {}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def list_addons(self):
|
def all(self) -> List[AnyAddon]:
|
||||||
"""Return a list of all add-ons."""
|
"""Return a list of all add-ons."""
|
||||||
return list(self.addons_obj.values())
|
addons = {**self.store, **self.local}
|
||||||
|
return list(addons.values())
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def list_installed(self):
|
def installed(self) -> List[Addon]:
|
||||||
"""Return a list of installed add-ons."""
|
"""Return a list of all installed add-ons."""
|
||||||
return [addon for addon in self.addons_obj.values()
|
return list(self.local.values())
|
||||||
if addon.is_installed]
|
|
||||||
|
|
||||||
@property
|
def get(self, addon_slug: str) -> Optional[AnyAddon]:
|
||||||
def list_repositories(self):
|
"""Return an add-on from slug.
|
||||||
"""Return list of add-on repositories."""
|
|
||||||
return list(self.repositories_obj.values())
|
|
||||||
|
|
||||||
def get(self, addon_slug):
|
Prio:
|
||||||
"""Return an add-on from slug."""
|
1 - Local
|
||||||
return self.addons_obj.get(addon_slug)
|
2 - Store
|
||||||
|
"""
|
||||||
|
if addon_slug in self.local:
|
||||||
|
return self.local[addon_slug]
|
||||||
|
return self.store.get(addon_slug)
|
||||||
|
|
||||||
def from_token(self, token):
|
def from_token(self, token: str) -> Optional[Addon]:
|
||||||
"""Return an add-on from Hass.io token."""
|
"""Return an add-on from Hass.io token."""
|
||||||
for addon in self.list_addons:
|
for addon in self.installed:
|
||||||
if addon.is_installed and token == addon.hassio_token:
|
if token == addon.hassio_token:
|
||||||
return addon
|
return addon
|
||||||
return None
|
return None
|
||||||
|
|
||||||
async def load(self):
|
async def load(self) -> None:
|
||||||
"""Start up add-on management."""
|
"""Start up add-on management."""
|
||||||
self.data.reload()
|
|
||||||
|
|
||||||
# Init Hass.io built-in repositories
|
|
||||||
repositories = \
|
|
||||||
set(self.sys_config.addons_repositories) | BUILTIN_REPOSITORIES
|
|
||||||
|
|
||||||
# Init custom repositories and load add-ons
|
|
||||||
await self.load_repositories(repositories)
|
|
||||||
|
|
||||||
async def reload(self):
|
|
||||||
"""Update add-ons from repository and reload list."""
|
|
||||||
tasks = [repository.update() for repository in
|
|
||||||
self.repositories_obj.values()]
|
|
||||||
if tasks:
|
|
||||||
await asyncio.wait(tasks)
|
|
||||||
|
|
||||||
# read data from repositories
|
|
||||||
self.data.reload()
|
|
||||||
|
|
||||||
# update addons
|
|
||||||
await self.load_addons()
|
|
||||||
|
|
||||||
async def load_repositories(self, list_repositories):
|
|
||||||
"""Add a new custom repository."""
|
|
||||||
new_rep = set(list_repositories)
|
|
||||||
old_rep = set(self.repositories_obj)
|
|
||||||
|
|
||||||
# add new repository
|
|
||||||
async def _add_repository(url):
|
|
||||||
"""Helper function to async add repository."""
|
|
||||||
repository = Repository(self.coresys, url)
|
|
||||||
if not await repository.load():
|
|
||||||
_LOGGER.error("Can't load from repository %s", url)
|
|
||||||
return
|
|
||||||
self.repositories_obj[url] = repository
|
|
||||||
|
|
||||||
# don't add built-in repository to config
|
|
||||||
if url not in BUILTIN_REPOSITORIES:
|
|
||||||
self.sys_config.add_addon_repository(url)
|
|
||||||
|
|
||||||
tasks = [_add_repository(url) for url in new_rep - old_rep]
|
|
||||||
if tasks:
|
|
||||||
await asyncio.wait(tasks)
|
|
||||||
|
|
||||||
# del new repository
|
|
||||||
for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
|
|
||||||
self.repositories_obj.pop(url).remove()
|
|
||||||
self.sys_config.drop_addon_repository(url)
|
|
||||||
|
|
||||||
# update data
|
|
||||||
self.data.reload()
|
|
||||||
await self.load_addons()
|
|
||||||
|
|
||||||
async def load_addons(self):
|
|
||||||
"""Update/add internal add-on store."""
|
|
||||||
all_addons = set(self.data.system) | set(self.data.cache)
|
|
||||||
|
|
||||||
# calc diff
|
|
||||||
add_addons = all_addons - set(self.addons_obj)
|
|
||||||
del_addons = set(self.addons_obj) - all_addons
|
|
||||||
|
|
||||||
_LOGGER.info("Load add-ons: %d all - %d new - %d remove",
|
|
||||||
len(all_addons), len(add_addons), len(del_addons))
|
|
||||||
|
|
||||||
# new addons
|
|
||||||
tasks = []
|
tasks = []
|
||||||
for addon_slug in add_addons:
|
for slug in self.data.system:
|
||||||
addon = Addon(self.coresys, addon_slug)
|
addon = self.local[slug] = Addon(self.coresys, slug)
|
||||||
|
|
||||||
tasks.append(addon.load())
|
tasks.append(addon.load())
|
||||||
self.addons_obj[addon_slug] = addon
|
|
||||||
|
|
||||||
|
# Run initial tasks
|
||||||
|
_LOGGER.info("Found %d installed add-ons", len(tasks))
|
||||||
if tasks:
|
if tasks:
|
||||||
await asyncio.wait(tasks)
|
await asyncio.wait(tasks)
|
||||||
|
|
||||||
# remove
|
# Sync DNS
|
||||||
for addon_slug in del_addons:
|
await self.sync_dns()
|
||||||
self.addons_obj.pop(addon_slug)
|
|
||||||
|
|
||||||
async def boot(self, stage):
|
async def boot(self, stage: str) -> None:
|
||||||
"""Boot add-ons with mode auto."""
|
"""Boot add-ons with mode auto."""
|
||||||
tasks = []
|
tasks = []
|
||||||
for addon in self.addons_obj.values():
|
for addon in self.installed:
|
||||||
if addon.is_installed and addon.boot == BOOT_AUTO and \
|
if addon.boot != BOOT_AUTO or addon.startup != stage:
|
||||||
addon.startup == stage:
|
continue
|
||||||
tasks.append(addon.start())
|
tasks.append(addon.start())
|
||||||
|
|
||||||
_LOGGER.info("Startup %s run %d add-ons", stage, len(tasks))
|
_LOGGER.info("Phase '%s' start %d add-ons", stage, len(tasks))
|
||||||
if tasks:
|
if tasks:
|
||||||
await asyncio.wait(tasks)
|
await asyncio.wait(tasks)
|
||||||
await asyncio.sleep(self.sys_config.wait_boot)
|
await asyncio.sleep(self.sys_config.wait_boot)
|
||||||
|
|
||||||
async def shutdown(self, stage):
|
async def shutdown(self, stage: str) -> None:
|
||||||
"""Shutdown addons."""
|
"""Shutdown addons."""
|
||||||
tasks = []
|
tasks = []
|
||||||
for addon in self.addons_obj.values():
|
for addon in self.installed:
|
||||||
if addon.is_installed and \
|
if await addon.state() != STATE_STARTED or addon.startup != stage:
|
||||||
await addon.state() == STATE_STARTED and \
|
continue
|
||||||
addon.startup == stage:
|
tasks.append(addon.stop())
|
||||||
tasks.append(addon.stop())
|
|
||||||
|
|
||||||
_LOGGER.info("Shutdown %s stop %d add-ons", stage, len(tasks))
|
_LOGGER.info("Phase '%s' stop %d add-ons", stage, len(tasks))
|
||||||
if tasks:
|
if tasks:
|
||||||
await asyncio.wait(tasks)
|
await asyncio.wait(tasks)
|
||||||
|
|
||||||
|
async def install(self, slug: str) -> None:
|
||||||
|
"""Install an add-on."""
|
||||||
|
if slug in self.local:
|
||||||
|
_LOGGER.warning("Add-on %s is already installed", slug)
|
||||||
|
return
|
||||||
|
store = self.store.get(slug)
|
||||||
|
|
||||||
|
if not store:
|
||||||
|
_LOGGER.error("Add-on %s not exists", slug)
|
||||||
|
raise AddonsError()
|
||||||
|
|
||||||
|
if not store.available:
|
||||||
|
_LOGGER.error("Add-on %s not supported on that platform", slug)
|
||||||
|
raise AddonsNotSupportedError()
|
||||||
|
|
||||||
|
self.data.install(store)
|
||||||
|
addon = Addon(self.coresys, slug)
|
||||||
|
|
||||||
|
if not addon.path_data.is_dir():
|
||||||
|
_LOGGER.info("Create Home Assistant add-on data folder %s", addon.path_data)
|
||||||
|
addon.path_data.mkdir()
|
||||||
|
|
||||||
|
# Setup/Fix AppArmor profile
|
||||||
|
await addon.install_apparmor()
|
||||||
|
|
||||||
|
try:
|
||||||
|
await addon.instance.install(store.version, store.image)
|
||||||
|
except DockerAPIError:
|
||||||
|
self.data.uninstall(addon)
|
||||||
|
raise AddonsError() from None
|
||||||
|
else:
|
||||||
|
self.local[slug] = addon
|
||||||
|
_LOGGER.info("Add-on '%s' successfully installed", slug)
|
||||||
|
|
||||||
|
async def uninstall(self, slug: str) -> None:
|
||||||
|
"""Remove an add-on."""
|
||||||
|
if slug not in self.local:
|
||||||
|
_LOGGER.warning("Add-on %s is not installed", slug)
|
||||||
|
return
|
||||||
|
addon = self.local.get(slug)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await addon.instance.remove()
|
||||||
|
except DockerAPIError:
|
||||||
|
raise AddonsError() from None
|
||||||
|
|
||||||
|
await addon.remove_data()
|
||||||
|
|
||||||
|
# Cleanup audio settings
|
||||||
|
if addon.path_asound.exists():
|
||||||
|
with suppress(OSError):
|
||||||
|
addon.path_asound.unlink()
|
||||||
|
|
||||||
|
# Cleanup AppArmor profile
|
||||||
|
with suppress(HostAppArmorError):
|
||||||
|
await addon.uninstall_apparmor()
|
||||||
|
|
||||||
|
# Cleanup Ingress panel from sidebar
|
||||||
|
if addon.ingress_panel:
|
||||||
|
addon.ingress_panel = False
|
||||||
|
with suppress(HomeAssistantAPIError):
|
||||||
|
await self.sys_ingress.update_hass_panel(addon)
|
||||||
|
|
||||||
|
# Cleanup internal data
|
||||||
|
addon.remove_discovery()
|
||||||
|
|
||||||
|
self.data.uninstall(addon)
|
||||||
|
self.local.pop(slug)
|
||||||
|
|
||||||
|
_LOGGER.info("Add-on '%s' successfully removed", slug)
|
||||||
|
|
||||||
|
async def update(self, slug: str) -> None:
|
||||||
|
"""Update add-on."""
|
||||||
|
if slug not in self.local:
|
||||||
|
_LOGGER.error("Add-on %s is not installed", slug)
|
||||||
|
raise AddonsError()
|
||||||
|
addon = self.local.get(slug)
|
||||||
|
|
||||||
|
if addon.is_detached:
|
||||||
|
_LOGGER.error("Add-on %s is not available inside store", slug)
|
||||||
|
raise AddonsError()
|
||||||
|
store = self.store.get(slug)
|
||||||
|
|
||||||
|
if addon.version == store.version:
|
||||||
|
_LOGGER.warning("No update available for add-on %s", slug)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Check if available, Maybe something have changed
|
||||||
|
if not store.available:
|
||||||
|
_LOGGER.error("Add-on %s not supported on that platform", slug)
|
||||||
|
raise AddonsNotSupportedError()
|
||||||
|
|
||||||
|
# Update instance
|
||||||
|
last_state = await addon.state()
|
||||||
|
try:
|
||||||
|
await addon.instance.update(store.version, store.image)
|
||||||
|
|
||||||
|
# Cleanup
|
||||||
|
with suppress(DockerAPIError):
|
||||||
|
await addon.instance.cleanup()
|
||||||
|
except DockerAPIError:
|
||||||
|
raise AddonsError() from None
|
||||||
|
else:
|
||||||
|
self.data.update(store)
|
||||||
|
_LOGGER.info("Add-on '%s' successfully updated", slug)
|
||||||
|
|
||||||
|
# Setup/Fix AppArmor profile
|
||||||
|
await addon.install_apparmor()
|
||||||
|
|
||||||
|
# restore state
|
||||||
|
if last_state == STATE_STARTED:
|
||||||
|
await addon.start()
|
||||||
|
|
||||||
|
async def rebuild(self, slug: str) -> None:
|
||||||
|
"""Perform a rebuild of local build add-on."""
|
||||||
|
if slug not in self.local:
|
||||||
|
_LOGGER.error("Add-on %s is not installed", slug)
|
||||||
|
raise AddonsError()
|
||||||
|
addon = self.local.get(slug)
|
||||||
|
|
||||||
|
if addon.is_detached:
|
||||||
|
_LOGGER.error("Add-on %s is not available inside store", slug)
|
||||||
|
raise AddonsError()
|
||||||
|
store = self.store.get(slug)
|
||||||
|
|
||||||
|
# Check if a rebuild is possible now
|
||||||
|
if addon.version != store.version:
|
||||||
|
_LOGGER.error("Version changed, use Update instead Rebuild")
|
||||||
|
raise AddonsError()
|
||||||
|
if not addon.need_build:
|
||||||
|
_LOGGER.error("Can't rebuild a image based add-on")
|
||||||
|
raise AddonsNotSupportedError()
|
||||||
|
|
||||||
|
# remove docker container but not addon config
|
||||||
|
last_state = await addon.state()
|
||||||
|
try:
|
||||||
|
await addon.instance.remove()
|
||||||
|
await addon.instance.install(addon.version)
|
||||||
|
except DockerAPIError:
|
||||||
|
raise AddonsError() from None
|
||||||
|
else:
|
||||||
|
self.data.update(store)
|
||||||
|
_LOGGER.info("Add-on '%s' successfully rebuilded", slug)
|
||||||
|
|
||||||
|
# restore state
|
||||||
|
if last_state == STATE_STARTED:
|
||||||
|
await addon.start()
|
||||||
|
|
||||||
|
async def restore(self, slug: str, tar_file: tarfile.TarFile) -> None:
|
||||||
|
"""Restore state of an add-on."""
|
||||||
|
if slug not in self.local:
|
||||||
|
_LOGGER.debug("Add-on %s is not local available for restore")
|
||||||
|
addon = Addon(self.coresys, slug)
|
||||||
|
else:
|
||||||
|
_LOGGER.debug("Add-on %s is local available for restore")
|
||||||
|
addon = self.local[slug]
|
||||||
|
|
||||||
|
await addon.restore(tar_file)
|
||||||
|
|
||||||
|
# Check if new
|
||||||
|
if slug in self.local:
|
||||||
|
return
|
||||||
|
|
||||||
|
_LOGGER.info("Detect new Add-on after restore %s", slug)
|
||||||
|
self.local[slug] = addon
|
||||||
|
|
||||||
|
async def repair(self) -> None:
|
||||||
|
"""Repair local add-ons."""
|
||||||
|
needs_repair: List[Addon] = []
|
||||||
|
|
||||||
|
# Evaluate Add-ons to repair
|
||||||
|
for addon in self.installed:
|
||||||
|
if await addon.instance.exists():
|
||||||
|
continue
|
||||||
|
needs_repair.append(addon)
|
||||||
|
|
||||||
|
_LOGGER.info("Found %d add-ons to repair", len(needs_repair))
|
||||||
|
if not needs_repair:
|
||||||
|
return
|
||||||
|
|
||||||
|
for addon in needs_repair:
|
||||||
|
_LOGGER.info("Start repair for add-on: %s", addon.slug)
|
||||||
|
|
||||||
|
with suppress(DockerAPIError, KeyError):
|
||||||
|
# Need pull a image again
|
||||||
|
if not addon.need_build:
|
||||||
|
await addon.instance.install(addon.version, addon.image)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Need local lookup
|
||||||
|
elif addon.need_build and not addon.is_detached:
|
||||||
|
store = self.store[addon.slug]
|
||||||
|
# If this add-on is available for rebuild
|
||||||
|
if addon.version == store.version:
|
||||||
|
await addon.instance.install(addon.version, addon.image)
|
||||||
|
continue
|
||||||
|
|
||||||
|
_LOGGER.error("Can't repair %s", addon.slug)
|
||||||
|
with suppress(AddonsError):
|
||||||
|
await self.uninstall(addon.slug)
|
||||||
|
|
||||||
|
async def sync_dns(self) -> None:
|
||||||
|
"""Sync add-ons DNS names."""
|
||||||
|
# Update hosts
|
||||||
|
for addon in self.installed:
|
||||||
|
if not await addon.instance.is_running():
|
||||||
|
continue
|
||||||
|
self.sys_dns.add_host(
|
||||||
|
ipv4=addon.ip_address, names=[addon.hostname], write=False
|
||||||
|
)
|
||||||
|
|
||||||
|
# Write hosts files
|
||||||
|
with suppress(CoreDNSError):
|
||||||
|
self.sys_dns.write_hosts()
|
||||||
|
File diff suppressed because it is too large
Load Diff
@@ -9,34 +9,31 @@ from ..utils.json import JsonConfig
|
|||||||
from .validate import SCHEMA_BUILD_CONFIG
|
from .validate import SCHEMA_BUILD_CONFIG
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .addon import Addon
|
from . import AnyAddon
|
||||||
|
|
||||||
|
|
||||||
class AddonBuild(JsonConfig, CoreSysAttributes):
|
class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||||
"""Handle build options for add-ons."""
|
"""Handle build options for add-ons."""
|
||||||
|
|
||||||
def __init__(self, coresys: CoreSys, slug: str) -> None:
|
def __init__(self, coresys: CoreSys, addon: AnyAddon) -> None:
|
||||||
"""Initialize Hass.io add-on builder."""
|
"""Initialize Hass.io add-on builder."""
|
||||||
self.coresys: CoreSys = coresys
|
self.coresys: CoreSys = coresys
|
||||||
self._id: str = slug
|
self.addon = addon
|
||||||
|
|
||||||
super().__init__(
|
super().__init__(
|
||||||
Path(self.addon.path_location, 'build.json'), SCHEMA_BUILD_CONFIG)
|
Path(self.addon.path_location, "build.json"), SCHEMA_BUILD_CONFIG
|
||||||
|
)
|
||||||
|
|
||||||
def save_data(self):
|
def save_data(self):
|
||||||
"""Ignore save function."""
|
"""Ignore save function."""
|
||||||
|
raise RuntimeError()
|
||||||
@property
|
|
||||||
def addon(self) -> Addon:
|
|
||||||
"""Return add-on of build data."""
|
|
||||||
return self.sys_addons.get(self._id)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def base_image(self) -> str:
|
def base_image(self) -> str:
|
||||||
"""Base images for this add-on."""
|
"""Base images for this add-on."""
|
||||||
return self._data[ATTR_BUILD_FROM].get(
|
return self._data[ATTR_BUILD_FROM].get(
|
||||||
self.sys_arch.default,
|
self.sys_arch.default, f"homeassistant/{self.sys_arch.default}-base:latest"
|
||||||
f"homeassistant/{self.sys_arch.default}-base:latest")
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def squash(self) -> bool:
|
def squash(self) -> bool:
|
||||||
@@ -51,28 +48,28 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
|||||||
def get_docker_args(self, version):
|
def get_docker_args(self, version):
|
||||||
"""Create a dict with Docker build arguments."""
|
"""Create a dict with Docker build arguments."""
|
||||||
args = {
|
args = {
|
||||||
'path': str(self.addon.path_location),
|
"path": str(self.addon.path_location),
|
||||||
'tag': f"{self.addon.image}:{version}",
|
"tag": f"{self.addon.image}:{version}",
|
||||||
'pull': True,
|
"pull": True,
|
||||||
'forcerm': True,
|
"forcerm": True,
|
||||||
'squash': self.squash,
|
"squash": self.squash,
|
||||||
'labels': {
|
"labels": {
|
||||||
'io.hass.version': version,
|
"io.hass.version": version,
|
||||||
'io.hass.arch': self.sys_arch.default,
|
"io.hass.arch": self.sys_arch.default,
|
||||||
'io.hass.type': META_ADDON,
|
"io.hass.type": META_ADDON,
|
||||||
'io.hass.name': self._fix_label('name'),
|
"io.hass.name": self._fix_label("name"),
|
||||||
'io.hass.description': self._fix_label('description'),
|
"io.hass.description": self._fix_label("description"),
|
||||||
},
|
},
|
||||||
'buildargs': {
|
"buildargs": {
|
||||||
'BUILD_FROM': self.base_image,
|
"BUILD_FROM": self.base_image,
|
||||||
'BUILD_VERSION': version,
|
"BUILD_VERSION": version,
|
||||||
'BUILD_ARCH': self.sys_arch.default,
|
"BUILD_ARCH": self.sys_arch.default,
|
||||||
**self.additional_args,
|
**self.additional_args,
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
if self.addon.url:
|
if self.addon.url:
|
||||||
args['labels']['io.hass.url'] = self.addon.url
|
args["labels"]["io.hass.url"] = self.addon.url
|
||||||
|
|
||||||
return args
|
return args
|
||||||
|
|
||||||
|
@@ -1,38 +1,34 @@
|
|||||||
"""Init file for Hass.io add-on data."""
|
"""Init file for Hass.io add-on data."""
|
||||||
|
from copy import deepcopy
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from typing import Any, Dict
|
||||||
|
|
||||||
import voluptuous as vol
|
|
||||||
from voluptuous.humanize import humanize_error
|
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_LOCATON,
|
ATTR_IMAGE,
|
||||||
ATTR_REPOSITORY,
|
ATTR_OPTIONS,
|
||||||
ATTR_SLUG,
|
|
||||||
ATTR_SYSTEM,
|
ATTR_SYSTEM,
|
||||||
ATTR_USER,
|
ATTR_USER,
|
||||||
|
ATTR_VERSION,
|
||||||
FILE_HASSIO_ADDONS,
|
FILE_HASSIO_ADDONS,
|
||||||
REPOSITORY_CORE,
|
|
||||||
REPOSITORY_LOCAL,
|
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
from ..exceptions import JsonFileError
|
from ..utils.json import JsonConfig
|
||||||
from ..utils.json import JsonConfig, read_json_file
|
from ..store.addon import AddonStore
|
||||||
from .utils import extract_hash_from_path
|
from .addon import Addon
|
||||||
from .validate import SCHEMA_ADDON_CONFIG, SCHEMA_ADDONS_FILE, SCHEMA_REPOSITORY_CONFIG
|
from .validate import SCHEMA_ADDONS_FILE
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
Config = Dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
class AddonsData(JsonConfig, CoreSysAttributes):
|
class AddonsData(JsonConfig, CoreSysAttributes):
|
||||||
"""Hold data for Add-ons inside Hass.io."""
|
"""Hold data for installed Add-ons inside Hass.io."""
|
||||||
|
|
||||||
def __init__(self, coresys):
|
def __init__(self, coresys: CoreSys):
|
||||||
"""Initialize data holder."""
|
"""Initialize data holder."""
|
||||||
super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDONS_FILE)
|
super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDONS_FILE)
|
||||||
self.coresys = coresys
|
self.coresys: CoreSys = coresys
|
||||||
self._repositories = {}
|
|
||||||
self._cache = {}
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def user(self):
|
def user(self):
|
||||||
@@ -44,93 +40,34 @@ class AddonsData(JsonConfig, CoreSysAttributes):
|
|||||||
"""Return local add-on data."""
|
"""Return local add-on data."""
|
||||||
return self._data[ATTR_SYSTEM]
|
return self._data[ATTR_SYSTEM]
|
||||||
|
|
||||||
@property
|
def install(self, addon: AddonStore) -> None:
|
||||||
def cache(self):
|
"""Set addon as installed."""
|
||||||
"""Return add-on data from cache/repositories."""
|
self.system[addon.slug] = deepcopy(addon.data)
|
||||||
return self._cache
|
self.user[addon.slug] = {
|
||||||
|
ATTR_OPTIONS: {},
|
||||||
|
ATTR_VERSION: addon.version,
|
||||||
|
ATTR_IMAGE: addon.image,
|
||||||
|
}
|
||||||
|
self.save_data()
|
||||||
|
|
||||||
@property
|
def uninstall(self, addon: Addon) -> None:
|
||||||
def repositories(self):
|
"""Set add-on as uninstalled."""
|
||||||
"""Return add-on data from repositories."""
|
self.system.pop(addon.slug, None)
|
||||||
return self._repositories
|
self.user.pop(addon.slug, None)
|
||||||
|
self.save_data()
|
||||||
|
|
||||||
def reload(self):
|
def update(self, addon: AddonStore) -> None:
|
||||||
"""Read data from add-on repository."""
|
"""Update version of add-on."""
|
||||||
self._cache = {}
|
self.system[addon.slug] = deepcopy(addon.data)
|
||||||
self._repositories = {}
|
self.user[addon.slug].update(
|
||||||
|
{ATTR_VERSION: addon.version, ATTR_IMAGE: addon.image}
|
||||||
|
)
|
||||||
|
self.save_data()
|
||||||
|
|
||||||
# read core repository
|
def restore(self, slug: str, user: Config, system: Config, image: str) -> None:
|
||||||
self._read_addons_folder(self.sys_config.path_addons_core, REPOSITORY_CORE)
|
"""Restore data to add-on."""
|
||||||
|
self.user[slug] = deepcopy(user)
|
||||||
|
self.system[slug] = deepcopy(system)
|
||||||
|
|
||||||
# read local repository
|
self.user[slug][ATTR_IMAGE] = image
|
||||||
self._read_addons_folder(self.sys_config.path_addons_local, REPOSITORY_LOCAL)
|
self.save_data()
|
||||||
|
|
||||||
# add built-in repositories information
|
|
||||||
self._set_builtin_repositories()
|
|
||||||
|
|
||||||
# read custom git repositories
|
|
||||||
for repository_element in self.sys_config.path_addons_git.iterdir():
|
|
||||||
if repository_element.is_dir():
|
|
||||||
self._read_git_repository(repository_element)
|
|
||||||
|
|
||||||
def _read_git_repository(self, path):
|
|
||||||
"""Process a custom repository folder."""
|
|
||||||
slug = extract_hash_from_path(path)
|
|
||||||
|
|
||||||
# exists repository json
|
|
||||||
repository_file = Path(path, "repository.json")
|
|
||||||
try:
|
|
||||||
repository_info = SCHEMA_REPOSITORY_CONFIG(read_json_file(repository_file))
|
|
||||||
except JsonFileError:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Can't read repository information from %s", repository_file
|
|
||||||
)
|
|
||||||
return
|
|
||||||
except vol.Invalid:
|
|
||||||
_LOGGER.warning("Repository parse error %s", repository_file)
|
|
||||||
return
|
|
||||||
|
|
||||||
# process data
|
|
||||||
self._repositories[slug] = repository_info
|
|
||||||
self._read_addons_folder(path, slug)
|
|
||||||
|
|
||||||
def _read_addons_folder(self, path, repository):
|
|
||||||
"""Read data from add-ons folder."""
|
|
||||||
for addon in path.glob("**/config.json"):
|
|
||||||
try:
|
|
||||||
addon_config = read_json_file(addon)
|
|
||||||
except JsonFileError:
|
|
||||||
_LOGGER.warning("Can't read %s from repository %s", addon, repository)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# validate
|
|
||||||
try:
|
|
||||||
addon_config = SCHEMA_ADDON_CONFIG(addon_config)
|
|
||||||
except vol.Invalid as ex:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Can't read %s: %s", addon, humanize_error(addon_config, ex)
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Generate slug
|
|
||||||
addon_slug = "{}_{}".format(repository, addon_config[ATTR_SLUG])
|
|
||||||
|
|
||||||
# store
|
|
||||||
addon_config[ATTR_REPOSITORY] = repository
|
|
||||||
addon_config[ATTR_LOCATON] = str(addon.parent)
|
|
||||||
self._cache[addon_slug] = addon_config
|
|
||||||
|
|
||||||
def _set_builtin_repositories(self):
|
|
||||||
"""Add local built-in repository into dataset."""
|
|
||||||
try:
|
|
||||||
builtin_file = Path(__file__).parent.joinpath("built-in.json")
|
|
||||||
builtin_data = read_json_file(builtin_file)
|
|
||||||
except JsonFileError:
|
|
||||||
_LOGGER.warning("Can't read built-in json")
|
|
||||||
return
|
|
||||||
|
|
||||||
# core repository
|
|
||||||
self._repositories[REPOSITORY_CORE] = builtin_data[REPOSITORY_CORE]
|
|
||||||
|
|
||||||
# local repository
|
|
||||||
self._repositories[REPOSITORY_LOCAL] = builtin_data[REPOSITORY_LOCAL]
|
|
||||||
|
514
hassio/addons/model.py
Normal file
514
hassio/addons/model.py
Normal file
@@ -0,0 +1,514 @@
|
|||||||
|
"""Init file for Hass.io add-ons."""
|
||||||
|
from distutils.version import StrictVersion
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Awaitable, Dict, List, Optional
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from ..const import (
|
||||||
|
ATTR_APPARMOR,
|
||||||
|
ATTR_ARCH,
|
||||||
|
ATTR_AUDIO,
|
||||||
|
ATTR_AUTH_API,
|
||||||
|
ATTR_AUTO_UART,
|
||||||
|
ATTR_BOOT,
|
||||||
|
ATTR_DESCRIPTON,
|
||||||
|
ATTR_DEVICES,
|
||||||
|
ATTR_DEVICETREE,
|
||||||
|
ATTR_DISCOVERY,
|
||||||
|
ATTR_DOCKER_API,
|
||||||
|
ATTR_ENVIRONMENT,
|
||||||
|
ATTR_FULL_ACCESS,
|
||||||
|
ATTR_GPIO,
|
||||||
|
ATTR_HASSIO_API,
|
||||||
|
ATTR_HASSIO_ROLE,
|
||||||
|
ATTR_HOMEASSISTANT,
|
||||||
|
ATTR_HOMEASSISTANT_API,
|
||||||
|
ATTR_HOST_DBUS,
|
||||||
|
ATTR_HOST_IPC,
|
||||||
|
ATTR_HOST_NETWORK,
|
||||||
|
ATTR_HOST_PID,
|
||||||
|
ATTR_IMAGE,
|
||||||
|
ATTR_INGRESS,
|
||||||
|
ATTR_KERNEL_MODULES,
|
||||||
|
ATTR_LEGACY,
|
||||||
|
ATTR_LOCATON,
|
||||||
|
ATTR_MACHINE,
|
||||||
|
ATTR_MAP,
|
||||||
|
ATTR_NAME,
|
||||||
|
ATTR_OPTIONS,
|
||||||
|
ATTR_PANEL_ADMIN,
|
||||||
|
ATTR_PANEL_ICON,
|
||||||
|
ATTR_PANEL_TITLE,
|
||||||
|
ATTR_PORTS,
|
||||||
|
ATTR_PORTS_DESCRIPTION,
|
||||||
|
ATTR_PRIVILEGED,
|
||||||
|
ATTR_REPOSITORY,
|
||||||
|
ATTR_SCHEMA,
|
||||||
|
ATTR_SERVICES,
|
||||||
|
ATTR_SLUG,
|
||||||
|
ATTR_STARTUP,
|
||||||
|
ATTR_STDIN,
|
||||||
|
ATTR_TIMEOUT,
|
||||||
|
ATTR_TMPFS,
|
||||||
|
ATTR_UDEV,
|
||||||
|
ATTR_URL,
|
||||||
|
ATTR_VERSION,
|
||||||
|
ATTR_WEBUI,
|
||||||
|
SECURITY_DEFAULT,
|
||||||
|
SECURITY_DISABLE,
|
||||||
|
SECURITY_PROFILE,
|
||||||
|
)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
from .validate import RE_SERVICE, RE_VOLUME, validate_options
|
||||||
|
|
||||||
|
Data = Dict[str, Any]
|
||||||
|
|
||||||
|
|
||||||
|
class AddonModel(CoreSysAttributes):
|
||||||
|
"""Add-on Data layout."""
|
||||||
|
|
||||||
|
slug: str = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def data(self) -> Data:
|
||||||
|
"""Return Add-on config/data."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_installed(self) -> bool:
|
||||||
|
"""Return True if an add-on is installed."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_detached(self) -> bool:
|
||||||
|
"""Return True if add-on is detached."""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available(self) -> bool:
|
||||||
|
"""Return True if this add-on is available on this platform."""
|
||||||
|
return self._available(self.data)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def options(self) -> Dict[str, Any]:
|
||||||
|
"""Return options with local changes."""
|
||||||
|
return self.data[ATTR_OPTIONS]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def boot(self) -> bool:
|
||||||
|
"""Return boot config with prio local settings."""
|
||||||
|
return self.data[ATTR_BOOT]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def auto_update(self) -> Optional[bool]:
|
||||||
|
"""Return if auto update is enable."""
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
"""Return name of add-on."""
|
||||||
|
return self.data[ATTR_NAME]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hostname(self) -> str:
|
||||||
|
"""Return slug/id of add-on."""
|
||||||
|
return self.slug.replace("_", "-")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dns(self) -> List[str]:
|
||||||
|
"""Return list of DNS name for that add-on."""
|
||||||
|
return []
|
||||||
|
|
||||||
|
@property
|
||||||
|
def timeout(self) -> int:
|
||||||
|
"""Return timeout of addon for docker stop."""
|
||||||
|
return self.data[ATTR_TIMEOUT]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def uuid(self) -> Optional[str]:
|
||||||
|
"""Return an API token for this add-on."""
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hassio_token(self) -> Optional[str]:
|
||||||
|
"""Return access token for Hass.io API."""
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ingress_token(self) -> Optional[str]:
|
||||||
|
"""Return access token for Hass.io API."""
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ingress_entry(self) -> Optional[str]:
|
||||||
|
"""Return ingress external URL."""
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def description(self) -> str:
|
||||||
|
"""Return description of add-on."""
|
||||||
|
return self.data[ATTR_DESCRIPTON]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def long_description(self) -> Optional[str]:
|
||||||
|
"""Return README.md as long_description."""
|
||||||
|
readme = Path(self.path_location, "README.md")
|
||||||
|
|
||||||
|
# If readme not exists
|
||||||
|
if not readme.exists():
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Return data
|
||||||
|
with readme.open("r") as readme_file:
|
||||||
|
return readme_file.read()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def repository(self) -> str:
|
||||||
|
"""Return repository of add-on."""
|
||||||
|
return self.data[ATTR_REPOSITORY]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def latest_version(self) -> str:
|
||||||
|
"""Return latest version of add-on."""
|
||||||
|
return self.data[ATTR_VERSION]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def version(self) -> str:
|
||||||
|
"""Return version of add-on."""
|
||||||
|
return self.data[ATTR_VERSION]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def protected(self) -> bool:
|
||||||
|
"""Return if add-on is in protected mode."""
|
||||||
|
return True
|
||||||
|
|
||||||
|
@property
|
||||||
|
def startup(self) -> Optional[str]:
|
||||||
|
"""Return startup type of add-on."""
|
||||||
|
return self.data.get(ATTR_STARTUP)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def services_role(self) -> Dict[str, str]:
|
||||||
|
"""Return dict of services with rights."""
|
||||||
|
services_list = self.data.get(ATTR_SERVICES, [])
|
||||||
|
|
||||||
|
services = {}
|
||||||
|
for data in services_list:
|
||||||
|
service = RE_SERVICE.match(data)
|
||||||
|
services[service.group("service")] = service.group("rights")
|
||||||
|
|
||||||
|
return services
|
||||||
|
|
||||||
|
@property
|
||||||
|
def discovery(self) -> List[str]:
|
||||||
|
"""Return list of discoverable components/platforms."""
|
||||||
|
return self.data.get(ATTR_DISCOVERY, [])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ports_description(self) -> Optional[Dict[str, str]]:
|
||||||
|
"""Return descriptions of ports."""
|
||||||
|
return self.data.get(ATTR_PORTS_DESCRIPTION)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ports(self) -> Optional[Dict[str, Optional[int]]]:
|
||||||
|
"""Return ports of add-on."""
|
||||||
|
return self.data.get(ATTR_PORTS)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ingress_url(self) -> Optional[str]:
|
||||||
|
"""Return URL to ingress url."""
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def webui(self) -> Optional[str]:
|
||||||
|
"""Return URL to webui or None."""
|
||||||
|
return self.data.get(ATTR_WEBUI)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ingress_port(self) -> Optional[int]:
|
||||||
|
"""Return Ingress port."""
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def panel_icon(self) -> str:
|
||||||
|
"""Return panel icon for Ingress frame."""
|
||||||
|
return self.data[ATTR_PANEL_ICON]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def panel_title(self) -> str:
|
||||||
|
"""Return panel icon for Ingress frame."""
|
||||||
|
return self.data.get(ATTR_PANEL_TITLE, self.name)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def panel_admin(self) -> str:
|
||||||
|
"""Return panel icon for Ingress frame."""
|
||||||
|
return self.data[ATTR_PANEL_ADMIN]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def host_network(self) -> bool:
|
||||||
|
"""Return True if add-on run on host network."""
|
||||||
|
return self.data[ATTR_HOST_NETWORK]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def host_pid(self) -> bool:
|
||||||
|
"""Return True if add-on run on host PID namespace."""
|
||||||
|
return self.data[ATTR_HOST_PID]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def host_ipc(self) -> bool:
|
||||||
|
"""Return True if add-on run on host IPC namespace."""
|
||||||
|
return self.data[ATTR_HOST_IPC]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def host_dbus(self) -> bool:
|
||||||
|
"""Return True if add-on run on host D-BUS."""
|
||||||
|
return self.data[ATTR_HOST_DBUS]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def devices(self) -> Optional[List[str]]:
|
||||||
|
"""Return devices of add-on."""
|
||||||
|
return self.data.get(ATTR_DEVICES, [])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def auto_uart(self) -> bool:
|
||||||
|
"""Return True if we should map all UART device."""
|
||||||
|
return self.data[ATTR_AUTO_UART]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tmpfs(self) -> Optional[str]:
|
||||||
|
"""Return tmpfs of add-on."""
|
||||||
|
return self.data.get(ATTR_TMPFS)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def environment(self) -> Optional[Dict[str, str]]:
|
||||||
|
"""Return environment of add-on."""
|
||||||
|
return self.data.get(ATTR_ENVIRONMENT)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def privileged(self) -> List[str]:
|
||||||
|
"""Return list of privilege."""
|
||||||
|
return self.data.get(ATTR_PRIVILEGED, [])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def apparmor(self) -> str:
|
||||||
|
"""Return True if AppArmor is enabled."""
|
||||||
|
if not self.data.get(ATTR_APPARMOR):
|
||||||
|
return SECURITY_DISABLE
|
||||||
|
elif self.sys_host.apparmor.exists(self.slug):
|
||||||
|
return SECURITY_PROFILE
|
||||||
|
return SECURITY_DEFAULT
|
||||||
|
|
||||||
|
@property
|
||||||
|
def legacy(self) -> bool:
|
||||||
|
"""Return if the add-on don't support Home Assistant labels."""
|
||||||
|
return self.data[ATTR_LEGACY]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def access_docker_api(self) -> bool:
|
||||||
|
"""Return if the add-on need read-only Docker API access."""
|
||||||
|
return self.data[ATTR_DOCKER_API]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def access_hassio_api(self) -> bool:
|
||||||
|
"""Return True if the add-on access to Hass.io REASTful API."""
|
||||||
|
return self.data[ATTR_HASSIO_API]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def access_homeassistant_api(self) -> bool:
|
||||||
|
"""Return True if the add-on access to Home Assistant API proxy."""
|
||||||
|
return self.data[ATTR_HOMEASSISTANT_API]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hassio_role(self) -> str:
|
||||||
|
"""Return Hass.io role for API."""
|
||||||
|
return self.data[ATTR_HASSIO_ROLE]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_stdin(self) -> bool:
|
||||||
|
"""Return True if the add-on access use stdin input."""
|
||||||
|
return self.data[ATTR_STDIN]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_ingress(self) -> bool:
|
||||||
|
"""Return True if the add-on access support ingress."""
|
||||||
|
return self.data[ATTR_INGRESS]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ingress_panel(self) -> Optional[bool]:
|
||||||
|
"""Return True if the add-on access support ingress."""
|
||||||
|
return None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_gpio(self) -> bool:
|
||||||
|
"""Return True if the add-on access to GPIO interface."""
|
||||||
|
return self.data[ATTR_GPIO]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_udev(self) -> bool:
|
||||||
|
"""Return True if the add-on have his own udev."""
|
||||||
|
return self.data[ATTR_UDEV]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_kernel_modules(self) -> bool:
|
||||||
|
"""Return True if the add-on access to kernel modules."""
|
||||||
|
return self.data[ATTR_KERNEL_MODULES]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_full_access(self) -> bool:
|
||||||
|
"""Return True if the add-on want full access to hardware."""
|
||||||
|
return self.data[ATTR_FULL_ACCESS]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_devicetree(self) -> bool:
|
||||||
|
"""Return True if the add-on read access to devicetree."""
|
||||||
|
return self.data[ATTR_DEVICETREE]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def access_auth_api(self) -> bool:
|
||||||
|
"""Return True if the add-on access to login/auth backend."""
|
||||||
|
return self.data[ATTR_AUTH_API]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_audio(self) -> bool:
|
||||||
|
"""Return True if the add-on access to audio."""
|
||||||
|
return self.data[ATTR_AUDIO]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def homeassistant_version(self) -> Optional[str]:
|
||||||
|
"""Return min Home Assistant version they needed by Add-on."""
|
||||||
|
return self.data.get(ATTR_HOMEASSISTANT)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def url(self) -> Optional[str]:
|
||||||
|
"""Return URL of add-on."""
|
||||||
|
return self.data.get(ATTR_URL)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_icon(self) -> bool:
|
||||||
|
"""Return True if an icon exists."""
|
||||||
|
return self.path_icon.exists()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_logo(self) -> bool:
|
||||||
|
"""Return True if a logo exists."""
|
||||||
|
return self.path_logo.exists()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_changelog(self) -> bool:
|
||||||
|
"""Return True if a changelog exists."""
|
||||||
|
return self.path_changelog.exists()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def supported_arch(self) -> List[str]:
|
||||||
|
"""Return list of supported arch."""
|
||||||
|
return self.data[ATTR_ARCH]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def supported_machine(self) -> List[str]:
|
||||||
|
"""Return list of supported machine."""
|
||||||
|
return self.data.get(ATTR_MACHINE, [])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def image(self) -> str:
|
||||||
|
"""Generate image name from data."""
|
||||||
|
return self._image(self.data)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def need_build(self) -> bool:
|
||||||
|
"""Return True if this add-on need a local build."""
|
||||||
|
return ATTR_IMAGE not in self.data
|
||||||
|
|
||||||
|
@property
|
||||||
|
def map_volumes(self) -> Dict[str, str]:
|
||||||
|
"""Return a dict of {volume: policy} from add-on."""
|
||||||
|
volumes = {}
|
||||||
|
for volume in self.data[ATTR_MAP]:
|
||||||
|
result = RE_VOLUME.match(volume)
|
||||||
|
volumes[result.group(1)] = result.group(2) or "ro"
|
||||||
|
|
||||||
|
return volumes
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_location(self) -> Path:
|
||||||
|
"""Return path to this add-on."""
|
||||||
|
return Path(self.data[ATTR_LOCATON])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_icon(self) -> Path:
|
||||||
|
"""Return path to add-on icon."""
|
||||||
|
return Path(self.path_location, "icon.png")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_logo(self) -> Path:
|
||||||
|
"""Return path to add-on logo."""
|
||||||
|
return Path(self.path_location, "logo.png")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_changelog(self) -> Path:
|
||||||
|
"""Return path to add-on changelog."""
|
||||||
|
return Path(self.path_location, "CHANGELOG.md")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_apparmor(self) -> Path:
|
||||||
|
"""Return path to custom AppArmor profile."""
|
||||||
|
return Path(self.path_location, "apparmor.txt")
|
||||||
|
|
||||||
|
@property
|
||||||
|
def schema(self) -> vol.Schema:
|
||||||
|
"""Create a schema for add-on options."""
|
||||||
|
raw_schema = self.data[ATTR_SCHEMA]
|
||||||
|
|
||||||
|
if isinstance(raw_schema, bool):
|
||||||
|
return vol.Schema(dict)
|
||||||
|
return vol.Schema(vol.All(dict, validate_options(raw_schema)))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
"""Compaired add-on objects."""
|
||||||
|
if not isinstance(other, AddonModel):
|
||||||
|
return False
|
||||||
|
return self.slug == other.slug
|
||||||
|
|
||||||
|
def _available(self, config) -> bool:
|
||||||
|
"""Return True if this add-on is available on this platform."""
|
||||||
|
# Architecture
|
||||||
|
if not self.sys_arch.is_supported(config[ATTR_ARCH]):
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Machine / Hardware
|
||||||
|
machine = config.get(ATTR_MACHINE)
|
||||||
|
if machine and self.sys_machine not in machine:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Home Assistant
|
||||||
|
version = config.get(ATTR_HOMEASSISTANT) or self.sys_homeassistant.version
|
||||||
|
if StrictVersion(self.sys_homeassistant.version) < StrictVersion(version):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _image(self, config) -> str:
|
||||||
|
"""Generate image name from data."""
|
||||||
|
# Repository with Dockerhub images
|
||||||
|
if ATTR_IMAGE in config:
|
||||||
|
arch = self.sys_arch.match(config[ATTR_ARCH])
|
||||||
|
return config[ATTR_IMAGE].format(arch=arch)
|
||||||
|
|
||||||
|
# local build
|
||||||
|
return f"{config[ATTR_REPOSITORY]}/{self.sys_arch.default}-addon-{config[ATTR_SLUG]}"
|
||||||
|
|
||||||
|
def install(self) -> Awaitable[None]:
|
||||||
|
"""Install this add-on."""
|
||||||
|
return self.sys_addons.install(self.slug)
|
||||||
|
|
||||||
|
def uninstall(self) -> Awaitable[None]:
|
||||||
|
"""Uninstall this add-on."""
|
||||||
|
return self.sys_addons.uninstall(self.slug)
|
||||||
|
|
||||||
|
def update(self) -> Awaitable[None]:
|
||||||
|
"""Update this add-on."""
|
||||||
|
return self.sys_addons.update(self.slug)
|
||||||
|
|
||||||
|
def rebuild(self) -> Awaitable[None]:
|
||||||
|
"""Rebuild this add-on."""
|
||||||
|
return self.sys_addons.rebuild(self.slug)
|
@@ -2,10 +2,8 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import hashlib
|
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import re
|
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
@@ -20,16 +18,14 @@ from ..const import (
|
|||||||
SECURITY_DISABLE,
|
SECURITY_DISABLE,
|
||||||
SECURITY_PROFILE,
|
SECURITY_PROFILE,
|
||||||
)
|
)
|
||||||
from ..exceptions import AddonsNotSupportedError
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .addon import Addon
|
from .model import AddonModel
|
||||||
|
|
||||||
RE_SHA1 = re.compile(r"[a-f0-9]{8}")
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def rating_security(addon: Addon) -> int:
|
def rating_security(addon: AddonModel) -> int:
|
||||||
"""Return 1-6 for security rating.
|
"""Return 1-6 for security rating.
|
||||||
|
|
||||||
1 = not secure
|
1 = not secure
|
||||||
@@ -86,34 +82,6 @@ def rating_security(addon: Addon) -> int:
|
|||||||
return max(min(6, rating), 1)
|
return max(min(6, rating), 1)
|
||||||
|
|
||||||
|
|
||||||
def get_hash_from_repository(name: str) -> str:
|
|
||||||
"""Generate a hash from repository."""
|
|
||||||
key = name.lower().encode()
|
|
||||||
return hashlib.sha1(key).hexdigest()[:8]
|
|
||||||
|
|
||||||
|
|
||||||
def extract_hash_from_path(path: Path) -> str:
|
|
||||||
"""Extract repo id from path."""
|
|
||||||
repository_dir = path.parts[-1]
|
|
||||||
|
|
||||||
if not RE_SHA1.match(repository_dir):
|
|
||||||
return get_hash_from_repository(repository_dir)
|
|
||||||
return repository_dir
|
|
||||||
|
|
||||||
|
|
||||||
def check_installed(method):
|
|
||||||
"""Wrap function with check if add-on is installed."""
|
|
||||||
|
|
||||||
async def wrap_check(addon, *args, **kwargs):
|
|
||||||
"""Return False if not installed or the function."""
|
|
||||||
if not addon.is_installed:
|
|
||||||
_LOGGER.error("Addon %s is not installed", addon.slug)
|
|
||||||
raise AddonsNotSupportedError()
|
|
||||||
return await method(addon, *args, **kwargs)
|
|
||||||
|
|
||||||
return wrap_check
|
|
||||||
|
|
||||||
|
|
||||||
async def remove_data(folder: Path) -> None:
|
async def remove_data(folder: Path) -> None:
|
||||||
"""Remove folder and reset privileged."""
|
"""Remove folder and reset privileged."""
|
||||||
try:
|
try:
|
||||||
|
@@ -39,21 +39,20 @@ from ..const import (
|
|||||||
ATTR_IMAGE,
|
ATTR_IMAGE,
|
||||||
ATTR_INGRESS,
|
ATTR_INGRESS,
|
||||||
ATTR_INGRESS_ENTRY,
|
ATTR_INGRESS_ENTRY,
|
||||||
|
ATTR_INGRESS_PANEL,
|
||||||
ATTR_INGRESS_PORT,
|
ATTR_INGRESS_PORT,
|
||||||
ATTR_INGRESS_TOKEN,
|
ATTR_INGRESS_TOKEN,
|
||||||
ATTR_INGRESS_PANEL,
|
|
||||||
ATTR_PANEL_ADMIN,
|
|
||||||
ATTR_PANEL_ICON,
|
|
||||||
ATTR_PANEL_TITLE,
|
|
||||||
ATTR_KERNEL_MODULES,
|
ATTR_KERNEL_MODULES,
|
||||||
ATTR_LEGACY,
|
ATTR_LEGACY,
|
||||||
ATTR_LOCATON,
|
ATTR_LOCATON,
|
||||||
ATTR_MACHINE,
|
ATTR_MACHINE,
|
||||||
ATTR_MAINTAINER,
|
|
||||||
ATTR_MAP,
|
ATTR_MAP,
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
ATTR_NETWORK,
|
ATTR_NETWORK,
|
||||||
ATTR_OPTIONS,
|
ATTR_OPTIONS,
|
||||||
|
ATTR_PANEL_ADMIN,
|
||||||
|
ATTR_PANEL_ICON,
|
||||||
|
ATTR_PANEL_TITLE,
|
||||||
ATTR_PORTS,
|
ATTR_PORTS,
|
||||||
ATTR_PORTS_DESCRIPTION,
|
ATTR_PORTS_DESCRIPTION,
|
||||||
ATTR_PRIVILEGED,
|
ATTR_PRIVILEGED,
|
||||||
@@ -69,6 +68,7 @@ from ..const import (
|
|||||||
ATTR_SYSTEM,
|
ATTR_SYSTEM,
|
||||||
ATTR_TIMEOUT,
|
ATTR_TIMEOUT,
|
||||||
ATTR_TMPFS,
|
ATTR_TMPFS,
|
||||||
|
ATTR_UDEV,
|
||||||
ATTR_URL,
|
ATTR_URL,
|
||||||
ATTR_USER,
|
ATTR_USER,
|
||||||
ATTR_UUID,
|
ATTR_UUID,
|
||||||
@@ -101,14 +101,14 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|ro))?$")
|
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|ro))?$")
|
||||||
RE_SERVICE = re.compile(r"^(?P<service>mqtt):(?P<rights>provide|want|need)$")
|
RE_SERVICE = re.compile(r"^(?P<service>mqtt):(?P<rights>provide|want|need)$")
|
||||||
|
|
||||||
V_STR = 'str'
|
V_STR = "str"
|
||||||
V_INT = 'int'
|
V_INT = "int"
|
||||||
V_FLOAT = 'float'
|
V_FLOAT = "float"
|
||||||
V_BOOL = 'bool'
|
V_BOOL = "bool"
|
||||||
V_EMAIL = 'email'
|
V_EMAIL = "email"
|
||||||
V_URL = 'url'
|
V_URL = "url"
|
||||||
V_PORT = 'port'
|
V_PORT = "port"
|
||||||
V_MATCH = 'match'
|
V_MATCH = "match"
|
||||||
|
|
||||||
RE_SCHEMA_ELEMENT = re.compile(
|
RE_SCHEMA_ELEMENT = re.compile(
|
||||||
r"^(?:"
|
r"^(?:"
|
||||||
@@ -119,18 +119,30 @@ RE_SCHEMA_ELEMENT = re.compile(
|
|||||||
r")\??$"
|
r")\??$"
|
||||||
)
|
)
|
||||||
|
|
||||||
RE_DOCKER_IMAGE = re.compile(
|
RE_DOCKER_IMAGE = re.compile(r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$")
|
||||||
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$")
|
|
||||||
RE_DOCKER_IMAGE_BUILD = re.compile(
|
RE_DOCKER_IMAGE_BUILD = re.compile(
|
||||||
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$")
|
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$"
|
||||||
|
)
|
||||||
|
|
||||||
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
|
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
|
||||||
|
|
||||||
|
|
||||||
MACHINE_ALL = [
|
MACHINE_ALL = [
|
||||||
'intel-nuc', 'odroid-c2', 'odroid-xu', 'orangepi-prime', 'qemux86',
|
"intel-nuc",
|
||||||
'qemux86-64', 'qemuarm', 'qemuarm-64', 'raspberrypi', 'raspberrypi2',
|
"odroid-c2",
|
||||||
'raspberrypi3', 'raspberrypi3-64', 'tinker',
|
"odroid-xu",
|
||||||
|
"orangepi-prime",
|
||||||
|
"qemux86",
|
||||||
|
"qemux86-64",
|
||||||
|
"qemuarm",
|
||||||
|
"qemuarm-64",
|
||||||
|
"raspberrypi",
|
||||||
|
"raspberrypi2",
|
||||||
|
"raspberrypi3",
|
||||||
|
"raspberrypi3-64",
|
||||||
|
"raspberrypi4",
|
||||||
|
"raspberrypi4-64",
|
||||||
|
"tinker",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -144,138 +156,158 @@ def _simple_startup(value):
|
|||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_ADDON_CONFIG = vol.Schema({
|
SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
{
|
||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||||
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
|
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||||
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
|
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_MACHINE): [vol.In(MACHINE_ALL)],
|
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
|
||||||
vol.Optional(ATTR_URL): vol.Url(),
|
vol.Optional(ATTR_MACHINE): [vol.In(MACHINE_ALL)],
|
||||||
vol.Required(ATTR_STARTUP):
|
vol.Optional(ATTR_URL): vol.Url(),
|
||||||
vol.All(_simple_startup, vol.In(STARTUP_ALL)),
|
vol.Required(ATTR_STARTUP): vol.All(_simple_startup, vol.In(STARTUP_ALL)),
|
||||||
vol.Required(ATTR_BOOT):
|
vol.Required(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
||||||
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
vol.Optional(ATTR_PORTS_DESCRIPTION): DOCKER_PORTS_DESCRIPTION,
|
||||||
vol.Optional(ATTR_PORTS_DESCRIPTION): DOCKER_PORTS_DESCRIPTION,
|
vol.Optional(ATTR_WEBUI): vol.Match(
|
||||||
vol.Optional(ATTR_WEBUI):
|
r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"
|
||||||
vol.Match(r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"),
|
),
|
||||||
vol.Optional(ATTR_INGRESS, default=False): vol.Boolean(),
|
vol.Optional(ATTR_INGRESS, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_INGRESS_PORT, default=8099): vol.Any(NETWORK_PORT, vol.Equal(0)),
|
vol.Optional(ATTR_INGRESS_PORT, default=8099): vol.Any(
|
||||||
vol.Optional(ATTR_INGRESS_ENTRY): vol.Coerce(str),
|
NETWORK_PORT, vol.Equal(0)
|
||||||
vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): vol.Coerce(str),
|
),
|
||||||
vol.Optional(ATTR_PANEL_TITLE): vol.Coerce(str),
|
vol.Optional(ATTR_INGRESS_ENTRY): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_PANEL_ADMIN, default=True): vol.Boolean(),
|
vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Maybe(vol.Coerce(str)),
|
vol.Optional(ATTR_PANEL_TITLE): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
vol.Optional(ATTR_PANEL_ADMIN, default=True): vol.Boolean(),
|
||||||
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
|
vol.Optional(ATTR_HOMEASSISTANT): vol.Maybe(vol.Coerce(str)),
|
||||||
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_TMPFS):
|
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
||||||
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
vol.Optional(ATTR_UDEV, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
vol.Optional(ATTR_TMPFS): vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
||||||
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||||
vol.Optional(ATTR_FULL_ACCESS, default=False): vol.Boolean(),
|
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
||||||
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
vol.Optional(ATTR_FULL_ACCESS, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
|
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_KERNEL_MODULES, default=False): vol.Boolean(),
|
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_HASSIO_ROLE, default=ROLE_DEFAULT): vol.In(ROLE_ALL),
|
vol.Optional(ATTR_KERNEL_MODULES, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
|
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
|
vol.Optional(ATTR_HASSIO_ROLE, default=ROLE_DEFAULT): vol.In(ROLE_ALL),
|
||||||
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
|
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
|
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
|
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
||||||
vol.Required(ATTR_OPTIONS): dict,
|
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
||||||
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
|
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
|
||||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [
|
vol.Required(ATTR_OPTIONS): dict,
|
||||||
vol.Any(
|
vol.Required(ATTR_SCHEMA): vol.Any(
|
||||||
SCHEMA_ELEMENT,
|
vol.Schema(
|
||||||
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
|
{
|
||||||
|
vol.Coerce(str): vol.Any(
|
||||||
|
SCHEMA_ELEMENT,
|
||||||
|
[
|
||||||
|
vol.Any(
|
||||||
|
SCHEMA_ELEMENT,
|
||||||
|
{
|
||||||
|
vol.Coerce(str): vol.Any(
|
||||||
|
SCHEMA_ELEMENT, [SCHEMA_ELEMENT]
|
||||||
|
)
|
||||||
|
},
|
||||||
|
)
|
||||||
|
],
|
||||||
|
vol.Schema(
|
||||||
|
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
|
||||||
|
),
|
||||||
|
)
|
||||||
|
}
|
||||||
),
|
),
|
||||||
], vol.Schema({
|
False,
|
||||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])
|
),
|
||||||
}))
|
vol.Optional(ATTR_IMAGE): vol.Match(RE_DOCKER_IMAGE),
|
||||||
}), False),
|
vol.Optional(ATTR_TIMEOUT, default=10): vol.All(
|
||||||
vol.Optional(ATTR_IMAGE):
|
vol.Coerce(int), vol.Range(min=10, max=120)
|
||||||
vol.Match(RE_DOCKER_IMAGE),
|
),
|
||||||
vol.Optional(ATTR_TIMEOUT, default=10):
|
|
||||||
vol.All(vol.Coerce(int), vol.Range(min=10, max=120)),
|
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA_REPOSITORY_CONFIG = vol.Schema({
|
|
||||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_URL): vol.Url(),
|
|
||||||
vol.Optional(ATTR_MAINTAINER): vol.Coerce(str),
|
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA_BUILD_CONFIG = vol.Schema({
|
|
||||||
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema({
|
|
||||||
vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD),
|
|
||||||
}),
|
|
||||||
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_ARGS, default=dict): vol.Schema({
|
|
||||||
vol.Coerce(str): vol.Coerce(str)
|
|
||||||
}),
|
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA_ADDON_USER = vol.Schema({
|
|
||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
|
|
||||||
vol.Optional(ATTR_ACCESS_TOKEN): TOKEN,
|
|
||||||
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
|
||||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_BOOT):
|
|
||||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
|
||||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
|
||||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
|
||||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
|
||||||
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
|
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
|
|
||||||
vol.Required(ATTR_LOCATON): vol.Coerce(str),
|
|
||||||
vol.Required(ATTR_REPOSITORY): vol.Coerce(str),
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_ADDONS_FILE = vol.Schema({
|
|
||||||
vol.Optional(ATTR_USER, default=dict): {
|
|
||||||
vol.Coerce(str): SCHEMA_ADDON_USER,
|
|
||||||
},
|
},
|
||||||
vol.Optional(ATTR_SYSTEM, default=dict): {
|
extra=vol.REMOVE_EXTRA,
|
||||||
vol.Coerce(str): SCHEMA_ADDON_SYSTEM,
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_BUILD_CONFIG = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema(
|
||||||
|
{vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD)}
|
||||||
|
),
|
||||||
|
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_ARGS, default=dict): vol.Schema(
|
||||||
|
{vol.Coerce(str): vol.Coerce(str)}
|
||||||
|
),
|
||||||
|
},
|
||||||
|
extra=vol.REMOVE_EXTRA,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_ADDON_USER = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
|
||||||
|
vol.Optional(ATTR_ACCESS_TOKEN): TOKEN,
|
||||||
|
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): vol.Coerce(
|
||||||
|
str
|
||||||
|
),
|
||||||
|
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
||||||
|
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||||
|
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
||||||
|
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
||||||
|
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
||||||
|
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
|
||||||
|
},
|
||||||
|
extra=vol.REMOVE_EXTRA,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend(
|
||||||
|
{
|
||||||
|
vol.Required(ATTR_LOCATON): vol.Coerce(str),
|
||||||
|
vol.Required(ATTR_REPOSITORY): vol.Coerce(str),
|
||||||
}
|
}
|
||||||
})
|
)
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_ADDON_SNAPSHOT = vol.Schema({
|
SCHEMA_ADDONS_FILE = vol.Schema(
|
||||||
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
|
{
|
||||||
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
vol.Optional(ATTR_USER, default=dict): {vol.Coerce(str): SCHEMA_ADDON_USER},
|
||||||
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
|
vol.Optional(ATTR_SYSTEM, default=dict): {vol.Coerce(str): SCHEMA_ADDON_SYSTEM},
|
||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
}
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
SCHEMA_ADDON_SNAPSHOT = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
|
||||||
|
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
||||||
|
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
|
||||||
|
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||||
|
},
|
||||||
|
extra=vol.REMOVE_EXTRA,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def validate_options(raw_schema):
|
def validate_options(raw_schema):
|
||||||
"""Validate schema."""
|
"""Validate schema."""
|
||||||
|
|
||||||
def validate(struct):
|
def validate(struct):
|
||||||
"""Create schema validator for add-ons options."""
|
"""Create schema validator for add-ons options."""
|
||||||
options = {}
|
options = {}
|
||||||
@@ -301,7 +333,7 @@ def validate_options(raw_schema):
|
|||||||
except (IndexError, KeyError):
|
except (IndexError, KeyError):
|
||||||
raise vol.Invalid(f"Type error for {key}") from None
|
raise vol.Invalid(f"Type error for {key}") from None
|
||||||
|
|
||||||
_check_missing_options(raw_schema, options, 'root')
|
_check_missing_options(raw_schema, options, "root")
|
||||||
return options
|
return options
|
||||||
|
|
||||||
return validate
|
return validate
|
||||||
@@ -320,7 +352,7 @@ def _single_validate(typ, value, key):
|
|||||||
|
|
||||||
# prepare range
|
# prepare range
|
||||||
range_args = {}
|
range_args = {}
|
||||||
for group_name in ('i_min', 'i_max', 'f_min', 'f_max'):
|
for group_name in ("i_min", "i_max", "f_min", "f_max"):
|
||||||
group_value = match.group(group_name)
|
group_value = match.group(group_name)
|
||||||
if group_value:
|
if group_value:
|
||||||
range_args[group_name[2:]] = float(group_value)
|
range_args[group_name[2:]] = float(group_value)
|
||||||
@@ -340,7 +372,7 @@ def _single_validate(typ, value, key):
|
|||||||
elif typ.startswith(V_PORT):
|
elif typ.startswith(V_PORT):
|
||||||
return NETWORK_PORT(value)
|
return NETWORK_PORT(value)
|
||||||
elif typ.startswith(V_MATCH):
|
elif typ.startswith(V_MATCH):
|
||||||
return vol.Match(match.group('match'))(str(value))
|
return vol.Match(match.group("match"))(str(value))
|
||||||
|
|
||||||
raise vol.Invalid(f"Fatal error for {key} type {typ}")
|
raise vol.Invalid(f"Fatal error for {key} type {typ}")
|
||||||
|
|
||||||
@@ -372,8 +404,7 @@ def _nested_validate_dict(typ, data_dict, key):
|
|||||||
|
|
||||||
# Nested?
|
# Nested?
|
||||||
if isinstance(typ[c_key], list):
|
if isinstance(typ[c_key], list):
|
||||||
options[c_key] = _nested_validate_list(typ[c_key][0],
|
options[c_key] = _nested_validate_list(typ[c_key][0], c_value, c_key)
|
||||||
c_value, c_key)
|
|
||||||
else:
|
else:
|
||||||
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
||||||
|
|
||||||
@@ -385,7 +416,6 @@ def _check_missing_options(origin, exists, root):
|
|||||||
"""Check if all options are exists."""
|
"""Check if all options are exists."""
|
||||||
missing = set(origin) - set(exists)
|
missing = set(origin) - set(exists)
|
||||||
for miss_opt in missing:
|
for miss_opt in missing:
|
||||||
if isinstance(origin[miss_opt], str) and \
|
if isinstance(origin[miss_opt], str) and origin[miss_opt].endswith("?"):
|
||||||
origin[miss_opt].endswith("?"):
|
|
||||||
continue
|
continue
|
||||||
raise vol.Invalid(f"Missing option {miss_opt} in {root}")
|
raise vol.Invalid(f"Missing option {miss_opt} in {root}")
|
||||||
|
@@ -9,6 +9,7 @@ from ..coresys import CoreSys, CoreSysAttributes
|
|||||||
from .addons import APIAddons
|
from .addons import APIAddons
|
||||||
from .auth import APIAuth
|
from .auth import APIAuth
|
||||||
from .discovery import APIDiscovery
|
from .discovery import APIDiscovery
|
||||||
|
from .dns import APICoreDNS
|
||||||
from .hardware import APIHardware
|
from .hardware import APIHardware
|
||||||
from .hassos import APIHassOS
|
from .hassos import APIHassOS
|
||||||
from .homeassistant import APIHomeAssistant
|
from .homeassistant import APIHomeAssistant
|
||||||
@@ -32,7 +33,8 @@ class RestAPI(CoreSysAttributes):
|
|||||||
self.coresys: CoreSys = coresys
|
self.coresys: CoreSys = coresys
|
||||||
self.security: SecurityMiddleware = SecurityMiddleware(coresys)
|
self.security: SecurityMiddleware = SecurityMiddleware(coresys)
|
||||||
self.webapp: web.Application = web.Application(
|
self.webapp: web.Application = web.Application(
|
||||||
middlewares=[self.security.token_validation])
|
middlewares=[self.security.token_validation]
|
||||||
|
)
|
||||||
|
|
||||||
# service stuff
|
# service stuff
|
||||||
self._runner: web.AppRunner = web.AppRunner(self.webapp)
|
self._runner: web.AppRunner = web.AppRunner(self.webapp)
|
||||||
@@ -54,234 +56,247 @@ class RestAPI(CoreSysAttributes):
|
|||||||
self._register_services()
|
self._register_services()
|
||||||
self._register_info()
|
self._register_info()
|
||||||
self._register_auth()
|
self._register_auth()
|
||||||
|
self._register_dns()
|
||||||
|
|
||||||
def _register_host(self) -> None:
|
def _register_host(self) -> None:
|
||||||
"""Register hostcontrol functions."""
|
"""Register hostcontrol functions."""
|
||||||
api_host = APIHost()
|
api_host = APIHost()
|
||||||
api_host.coresys = self.coresys
|
api_host.coresys = self.coresys
|
||||||
|
|
||||||
self.webapp.add_routes([
|
self.webapp.add_routes(
|
||||||
web.get('/host/info', api_host.info),
|
[
|
||||||
web.post('/host/reboot', api_host.reboot),
|
web.get("/host/info", api_host.info),
|
||||||
web.post('/host/shutdown', api_host.shutdown),
|
web.post("/host/reboot", api_host.reboot),
|
||||||
web.post('/host/reload', api_host.reload),
|
web.post("/host/shutdown", api_host.shutdown),
|
||||||
web.post('/host/options', api_host.options),
|
web.post("/host/reload", api_host.reload),
|
||||||
web.get('/host/services', api_host.services),
|
web.post("/host/options", api_host.options),
|
||||||
web.post('/host/services/{service}/stop', api_host.service_stop),
|
web.get("/host/services", api_host.services),
|
||||||
web.post('/host/services/{service}/start', api_host.service_start),
|
web.post("/host/services/{service}/stop", api_host.service_stop),
|
||||||
web.post('/host/services/{service}/restart',
|
web.post("/host/services/{service}/start", api_host.service_start),
|
||||||
api_host.service_restart),
|
web.post("/host/services/{service}/restart", api_host.service_restart),
|
||||||
web.post('/host/services/{service}/reload',
|
web.post("/host/services/{service}/reload", api_host.service_reload),
|
||||||
api_host.service_reload),
|
]
|
||||||
])
|
)
|
||||||
|
|
||||||
def _register_hassos(self) -> None:
|
def _register_hassos(self) -> None:
|
||||||
"""Register HassOS functions."""
|
"""Register HassOS functions."""
|
||||||
api_hassos = APIHassOS()
|
api_hassos = APIHassOS()
|
||||||
api_hassos.coresys = self.coresys
|
api_hassos.coresys = self.coresys
|
||||||
|
|
||||||
self.webapp.add_routes([
|
self.webapp.add_routes(
|
||||||
web.get('/hassos/info', api_hassos.info),
|
[
|
||||||
web.post('/hassos/update', api_hassos.update),
|
web.get("/hassos/info", api_hassos.info),
|
||||||
web.post('/hassos/update/cli', api_hassos.update_cli),
|
web.post("/hassos/update", api_hassos.update),
|
||||||
web.post('/hassos/config/sync', api_hassos.config_sync),
|
web.post("/hassos/update/cli", api_hassos.update_cli),
|
||||||
])
|
web.post("/hassos/config/sync", api_hassos.config_sync),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def _register_hardware(self) -> None:
|
def _register_hardware(self) -> None:
|
||||||
"""Register hardware functions."""
|
"""Register hardware functions."""
|
||||||
api_hardware = APIHardware()
|
api_hardware = APIHardware()
|
||||||
api_hardware.coresys = self.coresys
|
api_hardware.coresys = self.coresys
|
||||||
|
|
||||||
self.webapp.add_routes([
|
self.webapp.add_routes(
|
||||||
web.get('/hardware/info', api_hardware.info),
|
[
|
||||||
web.get('/hardware/audio', api_hardware.audio),
|
web.get("/hardware/info", api_hardware.info),
|
||||||
])
|
web.get("/hardware/audio", api_hardware.audio),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def _register_info(self) -> None:
|
def _register_info(self) -> None:
|
||||||
"""Register info functions."""
|
"""Register info functions."""
|
||||||
api_info = APIInfo()
|
api_info = APIInfo()
|
||||||
api_info.coresys = self.coresys
|
api_info.coresys = self.coresys
|
||||||
|
|
||||||
self.webapp.add_routes([
|
self.webapp.add_routes([web.get("/info", api_info.info)])
|
||||||
web.get('/info', api_info.info),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_auth(self) -> None:
|
def _register_auth(self) -> None:
|
||||||
"""Register auth functions."""
|
"""Register auth functions."""
|
||||||
api_auth = APIAuth()
|
api_auth = APIAuth()
|
||||||
api_auth.coresys = self.coresys
|
api_auth.coresys = self.coresys
|
||||||
|
|
||||||
self.webapp.add_routes([
|
self.webapp.add_routes([web.post("/auth", api_auth.auth)])
|
||||||
web.post('/auth', api_auth.auth),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_supervisor(self) -> None:
|
def _register_supervisor(self) -> None:
|
||||||
"""Register Supervisor functions."""
|
"""Register Supervisor functions."""
|
||||||
api_supervisor = APISupervisor()
|
api_supervisor = APISupervisor()
|
||||||
api_supervisor.coresys = self.coresys
|
api_supervisor.coresys = self.coresys
|
||||||
|
|
||||||
self.webapp.add_routes([
|
self.webapp.add_routes(
|
||||||
web.get('/supervisor/ping', api_supervisor.ping),
|
[
|
||||||
web.get('/supervisor/info', api_supervisor.info),
|
web.get("/supervisor/ping", api_supervisor.ping),
|
||||||
web.get('/supervisor/stats', api_supervisor.stats),
|
web.get("/supervisor/info", api_supervisor.info),
|
||||||
web.get('/supervisor/logs', api_supervisor.logs),
|
web.get("/supervisor/stats", api_supervisor.stats),
|
||||||
web.post('/supervisor/update', api_supervisor.update),
|
web.get("/supervisor/logs", api_supervisor.logs),
|
||||||
web.post('/supervisor/reload', api_supervisor.reload),
|
web.post("/supervisor/update", api_supervisor.update),
|
||||||
web.post('/supervisor/options', api_supervisor.options),
|
web.post("/supervisor/reload", api_supervisor.reload),
|
||||||
])
|
web.post("/supervisor/options", api_supervisor.options),
|
||||||
|
web.post("/supervisor/repair", api_supervisor.repair),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def _register_homeassistant(self) -> None:
|
def _register_homeassistant(self) -> None:
|
||||||
"""Register Home Assistant functions."""
|
"""Register Home Assistant functions."""
|
||||||
api_hass = APIHomeAssistant()
|
api_hass = APIHomeAssistant()
|
||||||
api_hass.coresys = self.coresys
|
api_hass.coresys = self.coresys
|
||||||
|
|
||||||
self.webapp.add_routes([
|
self.webapp.add_routes(
|
||||||
web.get('/homeassistant/info', api_hass.info),
|
[
|
||||||
web.get('/homeassistant/logs', api_hass.logs),
|
web.get("/homeassistant/info", api_hass.info),
|
||||||
web.get('/homeassistant/stats', api_hass.stats),
|
web.get("/homeassistant/logs", api_hass.logs),
|
||||||
web.post('/homeassistant/options', api_hass.options),
|
web.get("/homeassistant/stats", api_hass.stats),
|
||||||
web.post('/homeassistant/update', api_hass.update),
|
web.post("/homeassistant/options", api_hass.options),
|
||||||
web.post('/homeassistant/restart', api_hass.restart),
|
web.post("/homeassistant/update", api_hass.update),
|
||||||
web.post('/homeassistant/stop', api_hass.stop),
|
web.post("/homeassistant/restart", api_hass.restart),
|
||||||
web.post('/homeassistant/start', api_hass.start),
|
web.post("/homeassistant/stop", api_hass.stop),
|
||||||
web.post('/homeassistant/check', api_hass.check),
|
web.post("/homeassistant/start", api_hass.start),
|
||||||
web.post('/homeassistant/rebuild', api_hass.rebuild),
|
web.post("/homeassistant/check", api_hass.check),
|
||||||
])
|
web.post("/homeassistant/rebuild", api_hass.rebuild),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def _register_proxy(self) -> None:
|
def _register_proxy(self) -> None:
|
||||||
"""Register Home Assistant API Proxy."""
|
"""Register Home Assistant API Proxy."""
|
||||||
api_proxy = APIProxy()
|
api_proxy = APIProxy()
|
||||||
api_proxy.coresys = self.coresys
|
api_proxy.coresys = self.coresys
|
||||||
|
|
||||||
self.webapp.add_routes([
|
self.webapp.add_routes(
|
||||||
web.get('/homeassistant/api/websocket', api_proxy.websocket),
|
[
|
||||||
web.get('/homeassistant/websocket', api_proxy.websocket),
|
web.get("/homeassistant/api/websocket", api_proxy.websocket),
|
||||||
web.get('/homeassistant/api/stream', api_proxy.stream),
|
web.get("/homeassistant/websocket", api_proxy.websocket),
|
||||||
web.post('/homeassistant/api/{path:.+}', api_proxy.api),
|
web.get("/homeassistant/api/stream", api_proxy.stream),
|
||||||
web.get('/homeassistant/api/{path:.+}', api_proxy.api),
|
web.post("/homeassistant/api/{path:.+}", api_proxy.api),
|
||||||
web.get('/homeassistant/api/', api_proxy.api),
|
web.get("/homeassistant/api/{path:.+}", api_proxy.api),
|
||||||
])
|
web.get("/homeassistant/api/", api_proxy.api),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def _register_addons(self) -> None:
|
def _register_addons(self) -> None:
|
||||||
"""Register Add-on functions."""
|
"""Register Add-on functions."""
|
||||||
api_addons = APIAddons()
|
api_addons = APIAddons()
|
||||||
api_addons.coresys = self.coresys
|
api_addons.coresys = self.coresys
|
||||||
|
|
||||||
self.webapp.add_routes([
|
self.webapp.add_routes(
|
||||||
web.get('/addons', api_addons.list),
|
[
|
||||||
web.post('/addons/reload', api_addons.reload),
|
web.get("/addons", api_addons.list),
|
||||||
web.get('/addons/{addon}/info', api_addons.info),
|
web.post("/addons/reload", api_addons.reload),
|
||||||
web.post('/addons/{addon}/install', api_addons.install),
|
web.get("/addons/{addon}/info", api_addons.info),
|
||||||
web.post('/addons/{addon}/uninstall', api_addons.uninstall),
|
web.post("/addons/{addon}/install", api_addons.install),
|
||||||
web.post('/addons/{addon}/start', api_addons.start),
|
web.post("/addons/{addon}/uninstall", api_addons.uninstall),
|
||||||
web.post('/addons/{addon}/stop', api_addons.stop),
|
web.post("/addons/{addon}/start", api_addons.start),
|
||||||
web.post('/addons/{addon}/restart', api_addons.restart),
|
web.post("/addons/{addon}/stop", api_addons.stop),
|
||||||
web.post('/addons/{addon}/update', api_addons.update),
|
web.post("/addons/{addon}/restart", api_addons.restart),
|
||||||
web.post('/addons/{addon}/options', api_addons.options),
|
web.post("/addons/{addon}/update", api_addons.update),
|
||||||
web.post('/addons/{addon}/rebuild', api_addons.rebuild),
|
web.post("/addons/{addon}/options", api_addons.options),
|
||||||
web.get('/addons/{addon}/logs', api_addons.logs),
|
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
|
||||||
web.get('/addons/{addon}/icon', api_addons.icon),
|
web.get("/addons/{addon}/logs", api_addons.logs),
|
||||||
web.get('/addons/{addon}/logo', api_addons.logo),
|
web.get("/addons/{addon}/icon", api_addons.icon),
|
||||||
web.get('/addons/{addon}/changelog', api_addons.changelog),
|
web.get("/addons/{addon}/logo", api_addons.logo),
|
||||||
web.post('/addons/{addon}/stdin', api_addons.stdin),
|
web.get("/addons/{addon}/changelog", api_addons.changelog),
|
||||||
web.post('/addons/{addon}/security', api_addons.security),
|
web.post("/addons/{addon}/stdin", api_addons.stdin),
|
||||||
web.get('/addons/{addon}/stats', api_addons.stats),
|
web.post("/addons/{addon}/security", api_addons.security),
|
||||||
])
|
web.get("/addons/{addon}/stats", api_addons.stats),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def _register_ingress(self) -> None:
|
def _register_ingress(self) -> None:
|
||||||
"""Register Ingress functions."""
|
"""Register Ingress functions."""
|
||||||
api_ingress = APIIngress()
|
api_ingress = APIIngress()
|
||||||
api_ingress.coresys = self.coresys
|
api_ingress.coresys = self.coresys
|
||||||
|
|
||||||
self.webapp.add_routes([
|
self.webapp.add_routes(
|
||||||
web.post('/ingress/session', api_ingress.create_session),
|
[
|
||||||
web.get('/ingress/panels', api_ingress.panels),
|
web.post("/ingress/session", api_ingress.create_session),
|
||||||
web.view('/ingress/{token}/{path:.*}', api_ingress.handler),
|
web.get("/ingress/panels", api_ingress.panels),
|
||||||
])
|
web.view("/ingress/{token}/{path:.*}", api_ingress.handler),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def _register_snapshots(self) -> None:
|
def _register_snapshots(self) -> None:
|
||||||
"""Register snapshots functions."""
|
"""Register snapshots functions."""
|
||||||
api_snapshots = APISnapshots()
|
api_snapshots = APISnapshots()
|
||||||
api_snapshots.coresys = self.coresys
|
api_snapshots.coresys = self.coresys
|
||||||
|
|
||||||
self.webapp.add_routes([
|
self.webapp.add_routes(
|
||||||
web.get('/snapshots', api_snapshots.list),
|
[
|
||||||
web.post('/snapshots/reload', api_snapshots.reload),
|
web.get("/snapshots", api_snapshots.list),
|
||||||
web.post('/snapshots/new/full', api_snapshots.snapshot_full),
|
web.post("/snapshots/reload", api_snapshots.reload),
|
||||||
web.post('/snapshots/new/partial', api_snapshots.snapshot_partial),
|
web.post("/snapshots/new/full", api_snapshots.snapshot_full),
|
||||||
web.post('/snapshots/new/upload', api_snapshots.upload),
|
web.post("/snapshots/new/partial", api_snapshots.snapshot_partial),
|
||||||
web.get('/snapshots/{snapshot}/info', api_snapshots.info),
|
web.post("/snapshots/new/upload", api_snapshots.upload),
|
||||||
web.post('/snapshots/{snapshot}/remove', api_snapshots.remove),
|
web.get("/snapshots/{snapshot}/info", api_snapshots.info),
|
||||||
web.post('/snapshots/{snapshot}/restore/full',
|
web.post("/snapshots/{snapshot}/remove", api_snapshots.remove),
|
||||||
api_snapshots.restore_full),
|
web.post(
|
||||||
web.post('/snapshots/{snapshot}/restore/partial',
|
"/snapshots/{snapshot}/restore/full", api_snapshots.restore_full
|
||||||
api_snapshots.restore_partial),
|
),
|
||||||
web.get('/snapshots/{snapshot}/download', api_snapshots.download),
|
web.post(
|
||||||
])
|
"/snapshots/{snapshot}/restore/partial",
|
||||||
|
api_snapshots.restore_partial,
|
||||||
|
),
|
||||||
|
web.get("/snapshots/{snapshot}/download", api_snapshots.download),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def _register_services(self) -> None:
|
def _register_services(self) -> None:
|
||||||
"""Register services functions."""
|
"""Register services functions."""
|
||||||
api_services = APIServices()
|
api_services = APIServices()
|
||||||
api_services.coresys = self.coresys
|
api_services.coresys = self.coresys
|
||||||
|
|
||||||
self.webapp.add_routes([
|
self.webapp.add_routes(
|
||||||
web.get('/services', api_services.list),
|
[
|
||||||
web.get('/services/{service}', api_services.get_service),
|
web.get("/services", api_services.list),
|
||||||
web.post('/services/{service}', api_services.set_service),
|
web.get("/services/{service}", api_services.get_service),
|
||||||
web.delete('/services/{service}', api_services.del_service),
|
web.post("/services/{service}", api_services.set_service),
|
||||||
])
|
web.delete("/services/{service}", api_services.del_service),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def _register_discovery(self) -> None:
|
def _register_discovery(self) -> None:
|
||||||
"""Register discovery functions."""
|
"""Register discovery functions."""
|
||||||
api_discovery = APIDiscovery()
|
api_discovery = APIDiscovery()
|
||||||
api_discovery.coresys = self.coresys
|
api_discovery.coresys = self.coresys
|
||||||
|
|
||||||
self.webapp.add_routes([
|
self.webapp.add_routes(
|
||||||
web.get('/discovery', api_discovery.list),
|
[
|
||||||
web.get('/discovery/{uuid}', api_discovery.get_discovery),
|
web.get("/discovery", api_discovery.list),
|
||||||
web.delete('/discovery/{uuid}', api_discovery.del_discovery),
|
web.get("/discovery/{uuid}", api_discovery.get_discovery),
|
||||||
web.post('/discovery', api_discovery.set_discovery),
|
web.delete("/discovery/{uuid}", api_discovery.del_discovery),
|
||||||
])
|
web.post("/discovery", api_discovery.set_discovery),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _register_dns(self) -> None:
|
||||||
|
"""Register DNS functions."""
|
||||||
|
api_dns = APICoreDNS()
|
||||||
|
api_dns.coresys = self.coresys
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get("/dns/info", api_dns.info),
|
||||||
|
web.get("/dns/stats", api_dns.stats),
|
||||||
|
web.get("/dns/logs", api_dns.logs),
|
||||||
|
web.post("/dns/update", api_dns.update),
|
||||||
|
web.post("/dns/options", api_dns.options),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def _register_panel(self) -> None:
|
def _register_panel(self) -> None:
|
||||||
"""Register panel for Home Assistant."""
|
"""Register panel for Home Assistant."""
|
||||||
panel_dir = Path(__file__).parent.joinpath("panel")
|
panel_dir = Path(__file__).parent.joinpath("panel")
|
||||||
|
self.webapp.add_routes([web.static("/app", panel_dir)])
|
||||||
def create_response(panel_file):
|
|
||||||
"""Create a function to generate a response."""
|
|
||||||
path = panel_dir.joinpath(f"{panel_file!s}.html")
|
|
||||||
return lambda request: web.FileResponse(path)
|
|
||||||
|
|
||||||
# This route is for backwards compatibility with HA < 0.58
|
|
||||||
self.webapp.add_routes(
|
|
||||||
[web.get('/panel', create_response('hassio-main-es5'))])
|
|
||||||
|
|
||||||
# This route is for backwards compatibility with HA 0.58 - 0.61
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/panel_es5', create_response('hassio-main-es5')),
|
|
||||||
web.get('/panel_latest', create_response('hassio-main-latest')),
|
|
||||||
])
|
|
||||||
|
|
||||||
# This route is for backwards compatibility with HA 0.62 - 0.70
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/app-es5/index.html', create_response('index')),
|
|
||||||
web.get('/app-es5/hassio-app.html', create_response('hassio-app')),
|
|
||||||
])
|
|
||||||
|
|
||||||
# This route is for HA > 0.70
|
|
||||||
self.webapp.add_routes([web.static('/app', panel_dir)])
|
|
||||||
|
|
||||||
async def start(self) -> None:
|
async def start(self) -> None:
|
||||||
"""Run RESTful API webserver."""
|
"""Run RESTful API webserver."""
|
||||||
await self._runner.setup()
|
await self._runner.setup()
|
||||||
self._site = web.TCPSite(
|
self._site = web.TCPSite(
|
||||||
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5)
|
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await self._site.start()
|
await self._site.start()
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
_LOGGER.fatal("Failed to create HTTP server at 0.0.0.0:80 -> %s",
|
_LOGGER.fatal("Failed to create HTTP server at 0.0.0.0:80 -> %s", err)
|
||||||
err)
|
|
||||||
else:
|
else:
|
||||||
_LOGGER.info("Start API on %s", self.sys_docker.network.supervisor)
|
_LOGGER.info("Start API on %s", self.sys_docker.network.supervisor)
|
||||||
|
|
||||||
|
@@ -7,7 +7,8 @@ from aiohttp import web
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from ..addons.addon import Addon
|
from ..addons import AnyAddon
|
||||||
|
from ..docker.stats import DockerStats
|
||||||
from ..addons.utils import rating_security
|
from ..addons.utils import rating_security
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ADDONS,
|
ATTR_ADDONS,
|
||||||
@@ -30,6 +31,7 @@ from ..const import (
|
|||||||
ATTR_DEVICES,
|
ATTR_DEVICES,
|
||||||
ATTR_DEVICETREE,
|
ATTR_DEVICETREE,
|
||||||
ATTR_DISCOVERY,
|
ATTR_DISCOVERY,
|
||||||
|
ATTR_DNS,
|
||||||
ATTR_DOCKER_API,
|
ATTR_DOCKER_API,
|
||||||
ATTR_FULL_ACCESS,
|
ATTR_FULL_ACCESS,
|
||||||
ATTR_GPIO,
|
ATTR_GPIO,
|
||||||
@@ -41,12 +43,13 @@ from ..const import (
|
|||||||
ATTR_HOST_IPC,
|
ATTR_HOST_IPC,
|
||||||
ATTR_HOST_NETWORK,
|
ATTR_HOST_NETWORK,
|
||||||
ATTR_HOST_PID,
|
ATTR_HOST_PID,
|
||||||
|
ATTR_HOSTNAME,
|
||||||
ATTR_ICON,
|
ATTR_ICON,
|
||||||
ATTR_INGRESS,
|
ATTR_INGRESS,
|
||||||
ATTR_INGRESS_ENTRY,
|
ATTR_INGRESS_ENTRY,
|
||||||
|
ATTR_INGRESS_PANEL,
|
||||||
ATTR_INGRESS_PORT,
|
ATTR_INGRESS_PORT,
|
||||||
ATTR_INGRESS_URL,
|
ATTR_INGRESS_URL,
|
||||||
ATTR_INGRESS_PANEL,
|
|
||||||
ATTR_INSTALLED,
|
ATTR_INSTALLED,
|
||||||
ATTR_IP_ADDRESS,
|
ATTR_IP_ADDRESS,
|
||||||
ATTR_KERNEL_MODULES,
|
ATTR_KERNEL_MODULES,
|
||||||
@@ -56,6 +59,7 @@ from ..const import (
|
|||||||
ATTR_MACHINE,
|
ATTR_MACHINE,
|
||||||
ATTR_MAINTAINER,
|
ATTR_MAINTAINER,
|
||||||
ATTR_MEMORY_LIMIT,
|
ATTR_MEMORY_LIMIT,
|
||||||
|
ATTR_MEMORY_PERCENT,
|
||||||
ATTR_MEMORY_USAGE,
|
ATTR_MEMORY_USAGE,
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
ATTR_NETWORK,
|
ATTR_NETWORK,
|
||||||
@@ -73,6 +77,7 @@ from ..const import (
|
|||||||
ATTR_SOURCE,
|
ATTR_SOURCE,
|
||||||
ATTR_STATE,
|
ATTR_STATE,
|
||||||
ATTR_STDIN,
|
ATTR_STDIN,
|
||||||
|
ATTR_UDEV,
|
||||||
ATTR_URL,
|
ATTR_URL,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
ATTR_WEBUI,
|
ATTR_WEBUI,
|
||||||
@@ -82,6 +87,7 @@ from ..const import (
|
|||||||
CONTENT_TYPE_PNG,
|
CONTENT_TYPE_PNG,
|
||||||
CONTENT_TYPE_TEXT,
|
CONTENT_TYPE_TEXT,
|
||||||
REQUEST_FROM,
|
REQUEST_FROM,
|
||||||
|
STATE_NONE,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError
|
from ..exceptions import APIError
|
||||||
@@ -90,35 +96,35 @@ from .utils import api_process, api_process_raw, api_validate
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
SCHEMA_VERSION = vol.Schema({
|
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
|
||||||
})
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_OPTIONS = vol.Schema({
|
SCHEMA_OPTIONS = vol.Schema(
|
||||||
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
{
|
||||||
vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS),
|
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||||
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
|
vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS),
|
||||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
|
||||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
||||||
vol.Optional(ATTR_INGRESS_PANEL): vol.Boolean(),
|
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
||||||
})
|
vol.Optional(ATTR_INGRESS_PANEL): vol.Boolean(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_SECURITY = vol.Schema({
|
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
|
||||||
vol.Optional(ATTR_PROTECTED): vol.Boolean(),
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
class APIAddons(CoreSysAttributes):
|
class APIAddons(CoreSysAttributes):
|
||||||
"""Handle RESTful API for add-on functions."""
|
"""Handle RESTful API for add-on functions."""
|
||||||
|
|
||||||
def _extract_addon(self, request: web.Request, check_installed: bool = True) -> Addon:
|
def _extract_addon(
|
||||||
|
self, request: web.Request, check_installed: bool = True
|
||||||
|
) -> AnyAddon:
|
||||||
"""Return addon, throw an exception it it doesn't exist."""
|
"""Return addon, throw an exception it it doesn't exist."""
|
||||||
addon_slug = request.match_info.get('addon')
|
addon_slug: str = request.match_info.get("addon")
|
||||||
|
|
||||||
# Lookup itself
|
# Lookup itself
|
||||||
if addon_slug == 'self':
|
if addon_slug == "self":
|
||||||
return request.get(REQUEST_FROM)
|
return request.get(REQUEST_FROM)
|
||||||
|
|
||||||
addon = self.sys_addons.get(addon_slug)
|
addon = self.sys_addons.get(addon_slug)
|
||||||
@@ -134,57 +140,59 @@ class APIAddons(CoreSysAttributes):
|
|||||||
async def list(self, request: web.Request) -> Dict[str, Any]:
|
async def list(self, request: web.Request) -> Dict[str, Any]:
|
||||||
"""Return all add-ons or repositories."""
|
"""Return all add-ons or repositories."""
|
||||||
data_addons = []
|
data_addons = []
|
||||||
for addon in self.sys_addons.list_addons:
|
for addon in self.sys_addons.all:
|
||||||
data_addons.append({
|
data_addons.append(
|
||||||
ATTR_NAME: addon.name,
|
{
|
||||||
ATTR_SLUG: addon.slug,
|
ATTR_NAME: addon.name,
|
||||||
ATTR_DESCRIPTON: addon.description,
|
ATTR_SLUG: addon.slug,
|
||||||
ATTR_VERSION: addon.latest_version,
|
ATTR_DESCRIPTON: addon.description,
|
||||||
ATTR_INSTALLED: addon.version_installed,
|
ATTR_VERSION: addon.latest_version,
|
||||||
ATTR_AVAILABLE: addon.available,
|
ATTR_INSTALLED: addon.version if addon.is_installed else None,
|
||||||
ATTR_DETACHED: addon.is_detached,
|
ATTR_AVAILABLE: addon.available,
|
||||||
ATTR_REPOSITORY: addon.repository,
|
ATTR_DETACHED: addon.is_detached,
|
||||||
ATTR_BUILD: addon.need_build,
|
ATTR_REPOSITORY: addon.repository,
|
||||||
ATTR_URL: addon.url,
|
ATTR_BUILD: addon.need_build,
|
||||||
ATTR_ICON: addon.with_icon,
|
ATTR_URL: addon.url,
|
||||||
ATTR_LOGO: addon.with_logo,
|
ATTR_ICON: addon.with_icon,
|
||||||
})
|
ATTR_LOGO: addon.with_logo,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
data_repositories = []
|
data_repositories = []
|
||||||
for repository in self.sys_addons.list_repositories:
|
for repository in self.sys_store.all:
|
||||||
data_repositories.append({
|
data_repositories.append(
|
||||||
ATTR_SLUG: repository.slug,
|
{
|
||||||
ATTR_NAME: repository.name,
|
ATTR_SLUG: repository.slug,
|
||||||
ATTR_SOURCE: repository.source,
|
ATTR_NAME: repository.name,
|
||||||
ATTR_URL: repository.url,
|
ATTR_SOURCE: repository.source,
|
||||||
ATTR_MAINTAINER: repository.maintainer,
|
ATTR_URL: repository.url,
|
||||||
})
|
ATTR_MAINTAINER: repository.maintainer,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
return {
|
return {ATTR_ADDONS: data_addons, ATTR_REPOSITORIES: data_repositories}
|
||||||
ATTR_ADDONS: data_addons,
|
|
||||||
ATTR_REPOSITORIES: data_repositories,
|
|
||||||
}
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def reload(self, request: web.Request) -> None:
|
async def reload(self, request: web.Request) -> None:
|
||||||
"""Reload all add-on data."""
|
"""Reload all add-on data from store."""
|
||||||
await asyncio.shield(self.sys_addons.reload())
|
await asyncio.shield(self.sys_store.reload())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||||
"""Return add-on information."""
|
"""Return add-on information."""
|
||||||
addon = self._extract_addon(request, check_installed=False)
|
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||||
|
|
||||||
return {
|
data = {
|
||||||
ATTR_NAME: addon.name,
|
ATTR_NAME: addon.name,
|
||||||
ATTR_SLUG: addon.slug,
|
ATTR_SLUG: addon.slug,
|
||||||
|
ATTR_HOSTNAME: addon.hostname,
|
||||||
|
ATTR_DNS: addon.dns,
|
||||||
ATTR_DESCRIPTON: addon.description,
|
ATTR_DESCRIPTON: addon.description,
|
||||||
ATTR_LONG_DESCRIPTION: addon.long_description,
|
ATTR_LONG_DESCRIPTION: addon.long_description,
|
||||||
ATTR_VERSION: addon.version_installed,
|
ATTR_AUTO_UPDATE: None,
|
||||||
ATTR_AUTO_UPDATE: addon.auto_update,
|
|
||||||
ATTR_REPOSITORY: addon.repository,
|
ATTR_REPOSITORY: addon.repository,
|
||||||
|
ATTR_VERSION: None,
|
||||||
ATTR_LAST_VERSION: addon.latest_version,
|
ATTR_LAST_VERSION: addon.latest_version,
|
||||||
ATTR_STATE: await addon.state(),
|
|
||||||
ATTR_PROTECTED: addon.protected,
|
ATTR_PROTECTED: addon.protected,
|
||||||
ATTR_RATING: rating_security(addon),
|
ATTR_RATING: rating_security(addon),
|
||||||
ATTR_BOOT: addon.boot,
|
ATTR_BOOT: addon.boot,
|
||||||
@@ -193,6 +201,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
ATTR_MACHINE: addon.supported_machine,
|
ATTR_MACHINE: addon.supported_machine,
|
||||||
ATTR_HOMEASSISTANT: addon.homeassistant_version,
|
ATTR_HOMEASSISTANT: addon.homeassistant_version,
|
||||||
ATTR_URL: addon.url,
|
ATTR_URL: addon.url,
|
||||||
|
ATTR_STATE: STATE_NONE,
|
||||||
ATTR_DETACHED: addon.is_detached,
|
ATTR_DETACHED: addon.is_detached,
|
||||||
ATTR_AVAILABLE: addon.available,
|
ATTR_AVAILABLE: addon.available,
|
||||||
ATTR_BUILD: addon.need_build,
|
ATTR_BUILD: addon.need_build,
|
||||||
@@ -209,8 +218,8 @@ class APIAddons(CoreSysAttributes):
|
|||||||
ATTR_ICON: addon.with_icon,
|
ATTR_ICON: addon.with_icon,
|
||||||
ATTR_LOGO: addon.with_logo,
|
ATTR_LOGO: addon.with_logo,
|
||||||
ATTR_CHANGELOG: addon.with_changelog,
|
ATTR_CHANGELOG: addon.with_changelog,
|
||||||
ATTR_WEBUI: addon.webui,
|
|
||||||
ATTR_STDIN: addon.with_stdin,
|
ATTR_STDIN: addon.with_stdin,
|
||||||
|
ATTR_WEBUI: None,
|
||||||
ATTR_HASSIO_API: addon.access_hassio_api,
|
ATTR_HASSIO_API: addon.access_hassio_api,
|
||||||
ATTR_HASSIO_ROLE: addon.hassio_role,
|
ATTR_HASSIO_ROLE: addon.hassio_role,
|
||||||
ATTR_AUTH_API: addon.access_auth_api,
|
ATTR_AUTH_API: addon.access_auth_api,
|
||||||
@@ -218,29 +227,49 @@ class APIAddons(CoreSysAttributes):
|
|||||||
ATTR_GPIO: addon.with_gpio,
|
ATTR_GPIO: addon.with_gpio,
|
||||||
ATTR_KERNEL_MODULES: addon.with_kernel_modules,
|
ATTR_KERNEL_MODULES: addon.with_kernel_modules,
|
||||||
ATTR_DEVICETREE: addon.with_devicetree,
|
ATTR_DEVICETREE: addon.with_devicetree,
|
||||||
|
ATTR_UDEV: addon.with_udev,
|
||||||
ATTR_DOCKER_API: addon.access_docker_api,
|
ATTR_DOCKER_API: addon.access_docker_api,
|
||||||
ATTR_AUDIO: addon.with_audio,
|
ATTR_AUDIO: addon.with_audio,
|
||||||
ATTR_AUDIO_INPUT: addon.audio_input,
|
ATTR_AUDIO_INPUT: None,
|
||||||
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
ATTR_AUDIO_OUTPUT: None,
|
||||||
ATTR_SERVICES: _pretty_services(addon),
|
ATTR_SERVICES: _pretty_services(addon),
|
||||||
ATTR_DISCOVERY: addon.discovery,
|
ATTR_DISCOVERY: addon.discovery,
|
||||||
ATTR_IP_ADDRESS: str(addon.ip_address),
|
ATTR_IP_ADDRESS: None,
|
||||||
ATTR_INGRESS: addon.with_ingress,
|
ATTR_INGRESS: addon.with_ingress,
|
||||||
ATTR_INGRESS_ENTRY: addon.ingress_entry,
|
ATTR_INGRESS_ENTRY: None,
|
||||||
ATTR_INGRESS_URL: addon.ingress_url,
|
ATTR_INGRESS_URL: None,
|
||||||
ATTR_INGRESS_PORT: addon.ingress_port,
|
ATTR_INGRESS_PORT: None,
|
||||||
ATTR_INGRESS_PANEL: addon.ingress_panel,
|
ATTR_INGRESS_PANEL: None,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if addon.is_installed:
|
||||||
|
data.update(
|
||||||
|
{
|
||||||
|
ATTR_STATE: await addon.state(),
|
||||||
|
ATTR_WEBUI: addon.webui,
|
||||||
|
ATTR_INGRESS_ENTRY: addon.ingress_entry,
|
||||||
|
ATTR_INGRESS_URL: addon.ingress_url,
|
||||||
|
ATTR_INGRESS_PORT: addon.ingress_port,
|
||||||
|
ATTR_INGRESS_PANEL: addon.ingress_panel,
|
||||||
|
ATTR_AUDIO_INPUT: addon.audio_input,
|
||||||
|
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
||||||
|
ATTR_AUTO_UPDATE: addon.auto_update,
|
||||||
|
ATTR_IP_ADDRESS: str(addon.ip_address),
|
||||||
|
ATTR_VERSION: addon.version,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def options(self, request: web.Request) -> None:
|
async def options(self, request: web.Request) -> None:
|
||||||
"""Store user options for add-on."""
|
"""Store user options for add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
|
|
||||||
addon_schema = SCHEMA_OPTIONS.extend({
|
addon_schema = SCHEMA_OPTIONS.extend(
|
||||||
vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema),
|
{vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema)}
|
||||||
})
|
)
|
||||||
body = await api_validate(addon_schema, request)
|
body: Dict[str, Any] = await api_validate(addon_schema, request)
|
||||||
|
|
||||||
if ATTR_OPTIONS in body:
|
if ATTR_OPTIONS in body:
|
||||||
addon.options = body[ATTR_OPTIONS]
|
addon.options = body[ATTR_OPTIONS]
|
||||||
@@ -258,30 +287,31 @@ class APIAddons(CoreSysAttributes):
|
|||||||
addon.ingress_panel = body[ATTR_INGRESS_PANEL]
|
addon.ingress_panel = body[ATTR_INGRESS_PANEL]
|
||||||
await self.sys_ingress.update_hass_panel(addon)
|
await self.sys_ingress.update_hass_panel(addon)
|
||||||
|
|
||||||
addon.save_data()
|
addon.save_persist()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def security(self, request: web.Request) -> None:
|
async def security(self, request: web.Request) -> None:
|
||||||
"""Store security options for add-on."""
|
"""Store security options for add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
body = await api_validate(SCHEMA_SECURITY, request)
|
body: Dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
|
||||||
|
|
||||||
if ATTR_PROTECTED in body:
|
if ATTR_PROTECTED in body:
|
||||||
_LOGGER.warning("Protected flag changing for %s!", addon.slug)
|
_LOGGER.warning("Protected flag changing for %s!", addon.slug)
|
||||||
addon.protected = body[ATTR_PROTECTED]
|
addon.protected = body[ATTR_PROTECTED]
|
||||||
|
|
||||||
addon.save_data()
|
addon.save_persist()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||||
"""Return resource information."""
|
"""Return resource information."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
stats = await addon.stats()
|
stats: DockerStats = await addon.stats()
|
||||||
|
|
||||||
return {
|
return {
|
||||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||||
|
ATTR_MEMORY_PERCENT: stats.memory_percent,
|
||||||
ATTR_NETWORK_RX: stats.network_rx,
|
ATTR_NETWORK_RX: stats.network_rx,
|
||||||
ATTR_NETWORK_TX: stats.network_tx,
|
ATTR_NETWORK_TX: stats.network_tx,
|
||||||
ATTR_BLK_READ: stats.blk_read,
|
ATTR_BLK_READ: stats.blk_read,
|
||||||
@@ -291,19 +321,19 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
def install(self, request: web.Request) -> Awaitable[None]:
|
def install(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Install add-on."""
|
"""Install add-on."""
|
||||||
addon = self._extract_addon(request, check_installed=False)
|
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||||
return asyncio.shield(addon.install())
|
return asyncio.shield(addon.install())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def uninstall(self, request: web.Request) -> Awaitable[None]:
|
def uninstall(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Uninstall add-on."""
|
"""Uninstall add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
return asyncio.shield(addon.uninstall())
|
return asyncio.shield(addon.uninstall())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def start(self, request: web.Request) -> Awaitable[None]:
|
def start(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Start add-on."""
|
"""Start add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
|
|
||||||
# check options
|
# check options
|
||||||
options = addon.options
|
options = addon.options
|
||||||
@@ -317,15 +347,15 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
def stop(self, request: web.Request) -> Awaitable[None]:
|
def stop(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Stop add-on."""
|
"""Stop add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
return asyncio.shield(addon.stop())
|
return asyncio.shield(addon.stop())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def update(self, request: web.Request) -> Awaitable[None]:
|
def update(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Update add-on."""
|
"""Update add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
|
|
||||||
if addon.latest_version == addon.version_installed:
|
if addon.latest_version == addon.version:
|
||||||
raise APIError("No update available!")
|
raise APIError("No update available!")
|
||||||
|
|
||||||
return asyncio.shield(addon.update())
|
return asyncio.shield(addon.update())
|
||||||
@@ -333,13 +363,13 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Restart add-on."""
|
"""Restart add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
return asyncio.shield(addon.restart())
|
return asyncio.shield(addon.restart())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Rebuild local build add-on."""
|
"""Rebuild local build add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
if not addon.need_build:
|
if not addon.need_build:
|
||||||
raise APIError("Only local build addons are supported")
|
raise APIError("Only local build addons are supported")
|
||||||
|
|
||||||
@@ -348,43 +378,43 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||||
"""Return logs from add-on."""
|
"""Return logs from add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
return addon.logs()
|
return addon.logs()
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_PNG)
|
@api_process_raw(CONTENT_TYPE_PNG)
|
||||||
async def icon(self, request: web.Request) -> bytes:
|
async def icon(self, request: web.Request) -> bytes:
|
||||||
"""Return icon from add-on."""
|
"""Return icon from add-on."""
|
||||||
addon = self._extract_addon(request, check_installed=False)
|
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||||
if not addon.with_icon:
|
if not addon.with_icon:
|
||||||
raise APIError("No icon found!")
|
raise APIError("No icon found!")
|
||||||
|
|
||||||
with addon.path_icon.open('rb') as png:
|
with addon.path_icon.open("rb") as png:
|
||||||
return png.read()
|
return png.read()
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_PNG)
|
@api_process_raw(CONTENT_TYPE_PNG)
|
||||||
async def logo(self, request: web.Request) -> bytes:
|
async def logo(self, request: web.Request) -> bytes:
|
||||||
"""Return logo from add-on."""
|
"""Return logo from add-on."""
|
||||||
addon = self._extract_addon(request, check_installed=False)
|
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||||
if not addon.with_logo:
|
if not addon.with_logo:
|
||||||
raise APIError("No logo found!")
|
raise APIError("No logo found!")
|
||||||
|
|
||||||
with addon.path_logo.open('rb') as png:
|
with addon.path_logo.open("rb") as png:
|
||||||
return png.read()
|
return png.read()
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_TEXT)
|
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||||
async def changelog(self, request: web.Request) -> str:
|
async def changelog(self, request: web.Request) -> str:
|
||||||
"""Return changelog from add-on."""
|
"""Return changelog from add-on."""
|
||||||
addon = self._extract_addon(request, check_installed=False)
|
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||||
if not addon.with_changelog:
|
if not addon.with_changelog:
|
||||||
raise APIError("No changelog found!")
|
raise APIError("No changelog found!")
|
||||||
|
|
||||||
with addon.path_changelog.open('r') as changelog:
|
with addon.path_changelog.open("r") as changelog:
|
||||||
return changelog.read()
|
return changelog.read()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def stdin(self, request: web.Request) -> None:
|
async def stdin(self, request: web.Request) -> None:
|
||||||
"""Write to stdin of add-on."""
|
"""Write to stdin of add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon: AnyAddon = self._extract_addon(request)
|
||||||
if not addon.with_stdin:
|
if not addon.with_stdin:
|
||||||
raise APIError("STDIN not supported by add-on")
|
raise APIError("STDIN not supported by add-on")
|
||||||
|
|
||||||
@@ -392,15 +422,15 @@ class APIAddons(CoreSysAttributes):
|
|||||||
await asyncio.shield(addon.write_stdin(data))
|
await asyncio.shield(addon.write_stdin(data))
|
||||||
|
|
||||||
|
|
||||||
def _pretty_devices(addon: Addon) -> List[str]:
|
def _pretty_devices(addon: AnyAddon) -> List[str]:
|
||||||
"""Return a simplified device list."""
|
"""Return a simplified device list."""
|
||||||
dev_list = addon.devices
|
dev_list = addon.devices
|
||||||
if not dev_list:
|
if not dev_list:
|
||||||
return None
|
return None
|
||||||
return [row.split(':')[0] for row in dev_list]
|
return [row.split(":")[0] for row in dev_list]
|
||||||
|
|
||||||
|
|
||||||
def _pretty_services(addon: Addon) -> List[str]:
|
def _pretty_services(addon: AnyAddon) -> List[str]:
|
||||||
"""Return a simplified services role list."""
|
"""Return a simplified services role list."""
|
||||||
services = []
|
services = []
|
||||||
for name, access in addon.services_role.items():
|
for name, access in addon.services_role.items():
|
||||||
|
@@ -29,8 +29,8 @@ class APIAuth(CoreSysAttributes):
|
|||||||
|
|
||||||
Return a coroutine.
|
Return a coroutine.
|
||||||
"""
|
"""
|
||||||
username = data.get('username') or data.get('user')
|
username = data.get("username") or data.get("user")
|
||||||
password = data.get('password')
|
password = data.get("password")
|
||||||
|
|
||||||
return self.sys_auth.check_login(addon, username, password)
|
return self.sys_auth.check_login(addon, username, password)
|
||||||
|
|
||||||
@@ -56,6 +56,6 @@ class APIAuth(CoreSysAttributes):
|
|||||||
data = await request.post()
|
data = await request.post()
|
||||||
return await self._process_dict(request, addon, data)
|
return await self._process_dict(request, addon, data)
|
||||||
|
|
||||||
raise HTTPUnauthorized(headers={
|
raise HTTPUnauthorized(
|
||||||
WWW_AUTHENTICATE: "Basic realm=\"Hass.io Authentication\""
|
headers={WWW_AUTHENTICATE: 'Basic realm="Hass.io Authentication"'}
|
||||||
})
|
)
|
||||||
|
89
hassio/api/dns.py
Normal file
89
hassio/api/dns.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
"""Init file for Hass.io DNS RESTful API."""
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from typing import Any, Awaitable, Dict
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from ..const import (
|
||||||
|
ATTR_BLK_READ,
|
||||||
|
ATTR_BLK_WRITE,
|
||||||
|
ATTR_CPU_PERCENT,
|
||||||
|
ATTR_HOST,
|
||||||
|
ATTR_LATEST_VERSION,
|
||||||
|
ATTR_MEMORY_LIMIT,
|
||||||
|
ATTR_MEMORY_USAGE,
|
||||||
|
ATTR_MEMORY_PERCENT,
|
||||||
|
ATTR_NETWORK_RX,
|
||||||
|
ATTR_NETWORK_TX,
|
||||||
|
ATTR_SERVERS,
|
||||||
|
ATTR_VERSION,
|
||||||
|
CONTENT_TYPE_BINARY,
|
||||||
|
)
|
||||||
|
from ..coresys import CoreSysAttributes
|
||||||
|
from ..exceptions import APIError
|
||||||
|
from ..validate import DNS_SERVER_LIST
|
||||||
|
from .utils import api_process, api_process_raw, api_validate
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_SERVERS): DNS_SERVER_LIST})
|
||||||
|
|
||||||
|
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||||
|
|
||||||
|
|
||||||
|
class APICoreDNS(CoreSysAttributes):
|
||||||
|
"""Handle RESTful API for DNS functions."""
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||||
|
"""Return DNS information."""
|
||||||
|
return {
|
||||||
|
ATTR_VERSION: self.sys_dns.version,
|
||||||
|
ATTR_LATEST_VERSION: self.sys_dns.latest_version,
|
||||||
|
ATTR_HOST: str(self.sys_docker.network.dns),
|
||||||
|
ATTR_SERVERS: self.sys_dns.servers,
|
||||||
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def options(self, request: web.Request) -> None:
|
||||||
|
"""Set DNS options."""
|
||||||
|
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||||
|
|
||||||
|
if ATTR_SERVERS in body:
|
||||||
|
self.sys_dns.servers = body[ATTR_SERVERS]
|
||||||
|
|
||||||
|
self.sys_dns.save_data()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||||
|
"""Return resource information."""
|
||||||
|
stats = await self.sys_dns.stats()
|
||||||
|
|
||||||
|
return {
|
||||||
|
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||||
|
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||||
|
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||||
|
ATTR_MEMORY_PERCENT: stats.memory_percent,
|
||||||
|
ATTR_NETWORK_RX: stats.network_rx,
|
||||||
|
ATTR_NETWORK_TX: stats.network_tx,
|
||||||
|
ATTR_BLK_READ: stats.blk_read,
|
||||||
|
ATTR_BLK_WRITE: stats.blk_write,
|
||||||
|
}
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def update(self, request: web.Request) -> None:
|
||||||
|
"""Update DNS plugin."""
|
||||||
|
body = await api_validate(SCHEMA_VERSION, request)
|
||||||
|
version = body.get(ATTR_VERSION, self.sys_dns.latest_version)
|
||||||
|
|
||||||
|
if version == self.sys_dns.version:
|
||||||
|
raise APIError("Version {} is already in use".format(version))
|
||||||
|
await asyncio.shield(self.sys_dns.update(version))
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
|
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||||
|
"""Return DNS Docker logs."""
|
||||||
|
return self.sys_dns.logs()
|
@@ -3,7 +3,13 @@ import logging
|
|||||||
|
|
||||||
from .utils import api_process
|
from .utils import api_process
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_SERIAL, ATTR_DISK, ATTR_GPIO, ATTR_AUDIO, ATTR_INPUT, ATTR_OUTPUT)
|
ATTR_SERIAL,
|
||||||
|
ATTR_DISK,
|
||||||
|
ATTR_GPIO,
|
||||||
|
ATTR_AUDIO,
|
||||||
|
ATTR_INPUT,
|
||||||
|
ATTR_OUTPUT,
|
||||||
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -16,7 +22,9 @@ class APIHardware(CoreSysAttributes):
|
|||||||
async def info(self, request):
|
async def info(self, request):
|
||||||
"""Show hardware info."""
|
"""Show hardware info."""
|
||||||
return {
|
return {
|
||||||
ATTR_SERIAL: list(self.sys_hardware.serial_devices),
|
ATTR_SERIAL: list(
|
||||||
|
self.sys_hardware.serial_devices | self.sys_hardware.serial_by_id
|
||||||
|
),
|
||||||
ATTR_INPUT: list(self.sys_hardware.input_devices),
|
ATTR_INPUT: list(self.sys_hardware.input_devices),
|
||||||
ATTR_DISK: list(self.sys_hardware.disk_devices),
|
ATTR_DISK: list(self.sys_hardware.disk_devices),
|
||||||
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
|
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
|
||||||
|
@@ -18,6 +18,7 @@ from ..const import (
|
|||||||
ATTR_MACHINE,
|
ATTR_MACHINE,
|
||||||
ATTR_MEMORY_LIMIT,
|
ATTR_MEMORY_LIMIT,
|
||||||
ATTR_MEMORY_USAGE,
|
ATTR_MEMORY_USAGE,
|
||||||
|
ATTR_MEMORY_PERCENT,
|
||||||
ATTR_NETWORK_RX,
|
ATTR_NETWORK_RX,
|
||||||
ATTR_NETWORK_TX,
|
ATTR_NETWORK_TX,
|
||||||
ATTR_PASSWORD,
|
ATTR_PASSWORD,
|
||||||
@@ -41,8 +42,8 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
SCHEMA_OPTIONS = vol.Schema(
|
SCHEMA_OPTIONS = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_BOOT): vol.Boolean(),
|
vol.Optional(ATTR_BOOT): vol.Boolean(),
|
||||||
vol.Inclusive(ATTR_IMAGE, "custom_hass"): vol.Maybe(vol.Coerce(str)),
|
vol.Inclusive(ATTR_IMAGE, "custom_hass"): vol.Maybe(DOCKER_IMAGE),
|
||||||
vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Any(None, DOCKER_IMAGE),
|
vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Maybe(vol.Coerce(str)),
|
||||||
vol.Optional(ATTR_PORT): NETWORK_PORT,
|
vol.Optional(ATTR_PORT): NETWORK_PORT,
|
||||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
|
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
|
||||||
vol.Optional(ATTR_SSL): vol.Boolean(),
|
vol.Optional(ATTR_SSL): vol.Boolean(),
|
||||||
@@ -106,6 +107,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
|
|
||||||
if ATTR_REFRESH_TOKEN in body:
|
if ATTR_REFRESH_TOKEN in body:
|
||||||
self.sys_homeassistant.refresh_token = body[ATTR_REFRESH_TOKEN]
|
self.sys_homeassistant.refresh_token = body[ATTR_REFRESH_TOKEN]
|
||||||
|
self.sys_homeassistant.api_password = None
|
||||||
|
|
||||||
self.sys_homeassistant.save_data()
|
self.sys_homeassistant.save_data()
|
||||||
|
|
||||||
@@ -120,6 +122,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||||
|
ATTR_MEMORY_PERCENT: stats.memory_percent,
|
||||||
ATTR_NETWORK_RX: stats.network_rx,
|
ATTR_NETWORK_RX: stats.network_rx,
|
||||||
ATTR_NETWORK_TX: stats.network_tx,
|
ATTR_NETWORK_TX: stats.network_tx,
|
||||||
ATTR_BLK_READ: stats.blk_read,
|
ATTR_BLK_READ: stats.blk_read,
|
||||||
|
@@ -6,18 +6,25 @@ import voluptuous as vol
|
|||||||
|
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_HOSTNAME, ATTR_FEATURES, ATTR_KERNEL, ATTR_OPERATING_SYSTEM,
|
ATTR_HOSTNAME,
|
||||||
ATTR_CHASSIS, ATTR_DEPLOYMENT, ATTR_STATE, ATTR_NAME, ATTR_DESCRIPTON,
|
ATTR_FEATURES,
|
||||||
ATTR_SERVICES, ATTR_CPE)
|
ATTR_KERNEL,
|
||||||
|
ATTR_OPERATING_SYSTEM,
|
||||||
|
ATTR_CHASSIS,
|
||||||
|
ATTR_DEPLOYMENT,
|
||||||
|
ATTR_STATE,
|
||||||
|
ATTR_NAME,
|
||||||
|
ATTR_DESCRIPTON,
|
||||||
|
ATTR_SERVICES,
|
||||||
|
ATTR_CPE,
|
||||||
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
SERVICE = 'service'
|
SERVICE = "service"
|
||||||
|
|
||||||
SCHEMA_OPTIONS = vol.Schema({
|
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): vol.Coerce(str)})
|
||||||
vol.Optional(ATTR_HOSTNAME): vol.Coerce(str),
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
class APIHost(CoreSysAttributes):
|
class APIHost(CoreSysAttributes):
|
||||||
@@ -44,7 +51,8 @@ class APIHost(CoreSysAttributes):
|
|||||||
# hostname
|
# hostname
|
||||||
if ATTR_HOSTNAME in body:
|
if ATTR_HOSTNAME in body:
|
||||||
await asyncio.shield(
|
await asyncio.shield(
|
||||||
self.sys_host.control.set_hostname(body[ATTR_HOSTNAME]))
|
self.sys_host.control.set_hostname(body[ATTR_HOSTNAME])
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def reboot(self, request):
|
def reboot(self, request):
|
||||||
@@ -66,15 +74,15 @@ class APIHost(CoreSysAttributes):
|
|||||||
"""Return list of available services."""
|
"""Return list of available services."""
|
||||||
services = []
|
services = []
|
||||||
for unit in self.sys_host.services:
|
for unit in self.sys_host.services:
|
||||||
services.append({
|
services.append(
|
||||||
ATTR_NAME: unit.name,
|
{
|
||||||
ATTR_DESCRIPTON: unit.description,
|
ATTR_NAME: unit.name,
|
||||||
ATTR_STATE: unit.state,
|
ATTR_DESCRIPTON: unit.description,
|
||||||
})
|
ATTR_STATE: unit.state,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
return {
|
return {ATTR_SERVICES: services}
|
||||||
ATTR_SERVICES: services
|
|
||||||
}
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def service_start(self, request):
|
def service_start(self, request):
|
||||||
|
@@ -14,6 +14,7 @@ from ..const import (
|
|||||||
ATTR_MACHINE,
|
ATTR_MACHINE,
|
||||||
ATTR_SUPERVISOR,
|
ATTR_SUPERVISOR,
|
||||||
ATTR_SUPPORTED_ARCH,
|
ATTR_SUPPORTED_ARCH,
|
||||||
|
ATTR_TIMEZONE,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from .utils import api_process
|
from .utils import api_process
|
||||||
@@ -37,4 +38,5 @@ class APIInfo(CoreSysAttributes):
|
|||||||
ATTR_SUPPORTED_ARCH: self.sys_arch.supported,
|
ATTR_SUPPORTED_ARCH: self.sys_arch.supported,
|
||||||
ATTR_CHANNEL: self.sys_updater.channel,
|
ATTR_CHANNEL: self.sys_updater.channel,
|
||||||
ATTR_LOGGING: self.sys_config.logging,
|
ATTR_LOGGING: self.sys_config.logging,
|
||||||
|
ATTR_TIMEZONE: self.sys_timezone,
|
||||||
}
|
}
|
||||||
|
@@ -158,7 +158,12 @@ class APIIngress(CoreSysAttributes):
|
|||||||
source_header = _init_header(request, addon)
|
source_header = _init_header(request, addon)
|
||||||
|
|
||||||
async with self.sys_websession.request(
|
async with self.sys_websession.request(
|
||||||
request.method, url, headers=source_header, params=request.query, data=data
|
request.method,
|
||||||
|
url,
|
||||||
|
headers=source_header,
|
||||||
|
params=request.query,
|
||||||
|
allow_redirects=False,
|
||||||
|
data=data,
|
||||||
) as result:
|
) as result:
|
||||||
headers = _response_header(result)
|
headers = _response_header(result)
|
||||||
|
|
||||||
|
2
hassio/api/panel/chunk.04bcaa18b59728e10be9.js
Normal file
2
hassio/api/panel/chunk.04bcaa18b59728e10be9.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.04bcaa18b59728e10be9.js.gz
Normal file
BIN
hassio/api/panel/chunk.04bcaa18b59728e10be9.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.04bcaa18b59728e10be9.js.map
Normal file
1
hassio/api/panel/chunk.04bcaa18b59728e10be9.js.map
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
(window.webpackJsonp=window.webpackJsonp||[]).push([[7],{102:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(121),i=t.n(e),o=t(123),u=t.n(o),a=i.a,c=u.a}}]);
|
|
Binary file not shown.
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.510634470d399e194ace.js","sourceRoot":""}
|
|
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.564a2f7b1c38ddaa4ce0.js","sourceRoot":""}
|
|
3
hassio/api/panel/chunk.598ae99dfd641ab3a30c.js
Normal file
3
hassio/api/panel/chunk.598ae99dfd641ab3a30c.js
Normal file
File diff suppressed because one or more lines are too long
@@ -8,18 +8,6 @@ Code distributed by Google as part of the polymer project is also
|
|||||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
|
||||||
* @fileoverview
|
|
||||||
* @suppress {checkPrototypalTypes}
|
|
||||||
* @license Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
|
||||||
* This code may only be used under the BSD style license found at
|
|
||||||
* http://polymer.github.io/LICENSE.txt The complete set of authors may be found
|
|
||||||
* at http://polymer.github.io/AUTHORS.txt The complete set of contributors may
|
|
||||||
* be found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by
|
|
||||||
* Google as part of the polymer project is also subject to an additional IP
|
|
||||||
* rights grant found at http://polymer.github.io/PATENTS.txt
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@license
|
@license
|
||||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
BIN
hassio/api/panel/chunk.598ae99dfd641ab3a30c.js.gz
Normal file
BIN
hassio/api/panel/chunk.598ae99dfd641ab3a30c.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.598ae99dfd641ab3a30c.js.map
Normal file
1
hassio/api/panel/chunk.598ae99dfd641ab3a30c.js.map
Normal file
File diff suppressed because one or more lines are too long
Binary file not shown.
2
hassio/api/panel/chunk.5dd33a3a20657ed46a19.js
Normal file
2
hassio/api/panel/chunk.5dd33a3a20657ed46a19.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.5dd33a3a20657ed46a19.js.gz
Normal file
BIN
hassio/api/panel/chunk.5dd33a3a20657ed46a19.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.5dd33a3a20657ed46a19.js.map
Normal file
1
hassio/api/panel/chunk.5dd33a3a20657ed46a19.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"sources":["webpack:///./src/ingress-view/hassio-ingress-view.ts"],"names":["customElement","HassioIngressView","property","this","_addon","html","_templateObject2","name","ingress_url","_templateObject","changedProps","_get","_getPrototypeOf","prototype","call","has","addon","route","path","substr","oldRoute","get","oldAddon","undefined","_fetchData","_callee","addonSlug","_ref","_ref2","regeneratorRuntime","wrap","_context","prev","next","Promise","all","fetchHassioAddonInfo","hass","catch","Error","createHassioSession","sent","_slicedToArray","ingress","t0","console","error","alert","message","history","back","stop","css","_templateObject3","LitElement"],"mappings":"4gSAmBCA,YAAc,0CACTC,smBACHC,kEACAA,mEACAA,4EAED,WACE,OAAKC,KAAKC,OAMHC,YAAPC,IAC0BH,KAAKC,OAAOG,KACpBJ,KAAKC,OAAOI,aAPrBH,YAAPI,0CAYJ,SAAkBC,GAGhB,GAFAC,EAAAC,EApBEX,EAoBFY,WAAA,eAAAV,MAAAW,KAAAX,KAAmBO,GAEdA,EAAaK,IAAI,SAAtB,CAIA,IAAMC,EAAQb,KAAKc,MAAMC,KAAKC,OAAO,GAE/BC,EAAWV,EAAaW,IAAI,SAC5BC,EAAWF,EAAWA,EAASF,KAAKC,OAAO,QAAKI,EAElDP,GAASA,IAAUM,GACrBnB,KAAKqB,WAAWR,0FAIpB,SAAAS,EAAyBC,GAAzB,IAAAC,EAAAC,EAAAZ,EAAA,OAAAa,mBAAAC,KAAA,SAAAC,GAAA,cAAAA,EAAAC,KAAAD,EAAAE,MAAA,cAAAF,EAAAC,KAAA,EAAAD,EAAAE,KAAA,EAE0BC,QAAQC,IAAI,CAChCC,YAAqBjC,KAAKkC,KAAMX,GAAWY,MAAM,WAC/C,MAAM,IAAIC,MAAM,iCAElBC,YAAoBrC,KAAKkC,MAAMC,MAAM,WACnC,MAAM,IAAIC,MAAM,2CAPxB,UAAAZ,EAAAI,EAAAU,KAAAb,EAAAc,EAAAf,EAAA,IAEWX,EAFXY,EAAA,IAWee,QAXf,CAAAZ,EAAAE,KAAA,cAYY,IAAIM,MAAM,wCAZtB,OAeIpC,KAAKC,OAASY,EAflBe,EAAAE,KAAA,iBAAAF,EAAAC,KAAA,GAAAD,EAAAa,GAAAb,EAAA,SAkBIc,QAAQC,MAARf,EAAAa,IACAG,MAAMhB,EAAAa,GAAII,SAAW,mCACrBC,QAAQC,OApBZ,yBAAAnB,EAAAoB,SAAA1B,EAAAtB,KAAA,yRAwBA,WACE,OAAOiD,YAAPC,UA7D4BC","file":"chunk.5dd33a3a20657ed46a19.js","sourcesContent":["import {\n LitElement,\n customElement,\n property,\n TemplateResult,\n html,\n PropertyValues,\n CSSResult,\n css,\n} from \"lit-element\";\nimport { HomeAssistant, Route } from \"../../../src/types\";\nimport {\n createHassioSession,\n HassioAddonDetails,\n fetchHassioAddonInfo,\n} from \"../../../src/data/hassio\";\nimport \"../../../src/layouts/hass-loading-screen\";\nimport \"../../../src/layouts/hass-subpage\";\n\n@customElement(\"hassio-ingress-view\")\nclass HassioIngressView extends LitElement {\n @property() public hass!: HomeAssistant;\n @property() public route!: Route;\n @property() private _addon?: HassioAddonDetails;\n\n protected render(): TemplateResult | void {\n if (!this._addon) {\n return html`\n <hass-loading-screen></hass-loading-screen>\n `;\n }\n\n return html`\n <hass-subpage .header=${this._addon.name} hassio>\n <iframe src=${this._addon.ingress_url}></iframe>\n </hass-subpage>\n `;\n }\n\n protected updated(changedProps: PropertyValues) {\n super.firstUpdated(changedProps);\n\n if (!changedProps.has(\"route\")) {\n return;\n }\n\n const addon = this.route.path.substr(1);\n\n const oldRoute = changedProps.get(\"route\") as this[\"route\"] | undefined;\n const oldAddon = oldRoute ? oldRoute.path.substr(1) : undefined;\n\n if (addon && addon !== oldAddon) {\n this._fetchData(addon);\n }\n }\n\n private async _fetchData(addonSlug: string) {\n try {\n const [addon] = await Promise.all([\n fetchHassioAddonInfo(this.hass, addonSlug).catch(() => {\n throw new Error(\"Failed to fetch add-on info\");\n }),\n createHassioSession(this.hass).catch(() => {\n throw new Error(\"Failed to create an ingress session\");\n }),\n ]);\n\n if (!addon.ingress) {\n throw new Error(\"This add-on does not support ingress\");\n }\n\n this._addon = addon;\n } catch (err) {\n // tslint:disable-next-line\n console.error(err);\n alert(err.message || \"Unknown error starting ingress.\");\n history.back();\n }\n }\n\n static get styles(): CSSResult {\n return css`\n iframe {\n display: block;\n width: 100%;\n height: 100%;\n border: 0;\n }\n paper-icon-button {\n color: var(--text-primary-color);\n }\n `;\n }\n}\n\ndeclare global {\n interface HTMLElementTagNameMap {\n \"hassio-ingress-view\": HassioIngressView;\n }\n}\n"],"sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.659084fef4e3b7b66a76.js","sourceRoot":""}
|
|
3
hassio/api/panel/chunk.6685a7f98b13655ab808.js
Normal file
3
hassio/api/panel/chunk.6685a7f98b13655ab808.js
Normal file
File diff suppressed because one or more lines are too long
@@ -34,27 +34,6 @@ part of the polymer project is also subject to an additional IP rights grant
|
|||||||
found at http://polymer.github.io/PATENTS.txt
|
found at http://polymer.github.io/PATENTS.txt
|
||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
|
||||||
@license
|
|
||||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
|
||||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
|
||||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
|
||||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
|
||||||
Code distributed by Google as part of the polymer project is also
|
|
||||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
@license
|
|
||||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
|
||||||
This code may only be used under the BSD style license found at
|
|
||||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
|
||||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
|
||||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
|
||||||
part of the polymer project is also subject to an additional IP rights grant
|
|
||||||
found at http://polymer.github.io/PATENTS.txt
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@license
|
@license
|
||||||
Copyright 2018 Google Inc. All Rights Reserved.
|
Copyright 2018 Google Inc. All Rights Reserved.
|
||||||
@@ -133,6 +112,27 @@ and limitations under the License.
|
|||||||
* THE SOFTWARE.
|
* THE SOFTWARE.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
@license
|
||||||
|
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||||
|
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||||
|
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||||
|
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||||
|
Code distributed by Google as part of the polymer project is also
|
||||||
|
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
@license
|
||||||
|
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||||
|
This code may only be used under the BSD style license found at
|
||||||
|
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||||
|
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||||
|
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||||
|
part of the polymer project is also subject to an additional IP rights grant
|
||||||
|
found at http://polymer.github.io/PATENTS.txt
|
||||||
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @license
|
* @license
|
||||||
* Copyright (c) 2018 The Polymer Project Authors. All rights reserved.
|
* Copyright (c) 2018 The Polymer Project Authors. All rights reserved.
|
||||||
@@ -178,3 +178,12 @@ found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
|||||||
part of the polymer project is also subject to an additional IP rights grant
|
part of the polymer project is also subject to an additional IP rights grant
|
||||||
found at http://polymer.github.io/PATENTS.txt
|
found at http://polymer.github.io/PATENTS.txt
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
/*!
|
||||||
|
* Fuse.js v3.4.4 - Lightweight fuzzy-search (http://fusejs.io)
|
||||||
|
*
|
||||||
|
* Copyright (c) 2012-2017 Kirollos Risk (http://kiro.me)
|
||||||
|
* All Rights Reserved. Apache Software License 2.0
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*/
|
BIN
hassio/api/panel/chunk.6685a7f98b13655ab808.js.gz
Normal file
BIN
hassio/api/panel/chunk.6685a7f98b13655ab808.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.6685a7f98b13655ab808.js.map
Normal file
1
hassio/api/panel/chunk.6685a7f98b13655ab808.js.map
Normal file
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.6e9c87e51920a9c354e5.js","sourceRoot":""}
|
|
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.6f4702eafe52425373ed.js.gz
Normal file
BIN
hassio/api/panel/chunk.6f4702eafe52425373ed.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.6f4702eafe52425373ed.js.map
Normal file
1
hassio/api/panel/chunk.6f4702eafe52425373ed.js.map
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
2
hassio/api/panel/chunk.7c785f796f428abae18d.js
Normal file
2
hassio/api/panel/chunk.7c785f796f428abae18d.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.7c785f796f428abae18d.js.gz
Normal file
BIN
hassio/api/panel/chunk.7c785f796f428abae18d.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.7c785f796f428abae18d.js.map
Normal file
1
hassio/api/panel/chunk.7c785f796f428abae18d.js.map
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.7f411ffa9df152cb8f05.js.gz
Normal file
BIN
hassio/api/panel/chunk.7f411ffa9df152cb8f05.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.7f411ffa9df152cb8f05.js.map
Normal file
1
hassio/api/panel/chunk.7f411ffa9df152cb8f05.js.map
Normal file
File diff suppressed because one or more lines are too long
2
hassio/api/panel/chunk.7f8cce5798f837214ef8.js
Normal file
2
hassio/api/panel/chunk.7f8cce5798f837214ef8.js
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
(window.webpackJsonp=window.webpackJsonp||[]).push([[9],{101:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(124),i=t.n(e),o=t(126),u=t.n(o),a=i.a,c=u.a}}]);
|
||||||
|
//# sourceMappingURL=chunk.7f8cce5798f837214ef8.js.map
|
BIN
hassio/api/panel/chunk.7f8cce5798f837214ef8.js.gz
Normal file
BIN
hassio/api/panel/chunk.7f8cce5798f837214ef8.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.7f8cce5798f837214ef8.js.map
Normal file
1
hassio/api/panel/chunk.7f8cce5798f837214ef8.js.map
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"sources":["webpack:///../src/resources/load_markdown.js"],"names":["__webpack_require__","r","__webpack_exports__","d","marked","filterXSS","marked__WEBPACK_IMPORTED_MODULE_0__","marked__WEBPACK_IMPORTED_MODULE_0___default","n","xss__WEBPACK_IMPORTED_MODULE_1__","xss__WEBPACK_IMPORTED_MODULE_1___default","marked_","filterXSS_"],"mappings":"0FAAAA,EAAAC,EAAAC,GAAAF,EAAAG,EAAAD,EAAA,2BAAAE,IAAAJ,EAAAG,EAAAD,EAAA,8BAAAG,IAAA,IAAAC,EAAAN,EAAA,KAAAO,EAAAP,EAAAQ,EAAAF,GAAAG,EAAAT,EAAA,KAAAU,EAAAV,EAAAQ,EAAAC,GAGaL,EAASO,IACTN,EAAYO","file":"chunk.7f8cce5798f837214ef8.js","sourcesContent":["import marked_ from \"marked\";\nimport filterXSS_ from \"xss\";\n\nexport const marked = marked_;\nexport const filterXSS = filterXSS_;\n"],"sourceRoot":""}
|
2
hassio/api/panel/chunk.87d3a6d0178fb26762cf.js
Normal file
2
hassio/api/panel/chunk.87d3a6d0178fb26762cf.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.87d3a6d0178fb26762cf.js.gz
Normal file
BIN
hassio/api/panel/chunk.87d3a6d0178fb26762cf.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.87d3a6d0178fb26762cf.js.map
Normal file
1
hassio/api/panel/chunk.87d3a6d0178fb26762cf.js.map
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.9d7374dae6137783dda4.js.gz
Normal file
BIN
hassio/api/panel/chunk.9d7374dae6137783dda4.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.9d7374dae6137783dda4.js.map
Normal file
1
hassio/api/panel/chunk.9d7374dae6137783dda4.js.map
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.a7e5fb452cd1b3a5faef.js","sourceRoot":""}
|
|
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.af7784dbf07df8e24819.js.gz
Normal file
BIN
hassio/api/panel/chunk.af7784dbf07df8e24819.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.af7784dbf07df8e24819.js.map
Normal file
1
hassio/api/panel/chunk.af7784dbf07df8e24819.js.map
Normal file
File diff suppressed because one or more lines are too long
3
hassio/api/panel/chunk.b15efbd4fb2c8cac0ad4.js
Normal file
3
hassio/api/panel/chunk.b15efbd4fb2c8cac0ad4.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.b15efbd4fb2c8cac0ad4.js.gz
Normal file
BIN
hassio/api/panel/chunk.b15efbd4fb2c8cac0ad4.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.b15efbd4fb2c8cac0ad4.js.map
Normal file
1
hassio/api/panel/chunk.b15efbd4fb2c8cac0ad4.js.map
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
2
hassio/api/panel/chunk.f1156b978f6f3143a651.js
Normal file
2
hassio/api/panel/chunk.f1156b978f6f3143a651.js
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/api/panel/chunk.f1156b978f6f3143a651.js.gz
Normal file
BIN
hassio/api/panel/chunk.f1156b978f6f3143a651.js.gz
Normal file
Binary file not shown.
1
hassio/api/panel/chunk.f1156b978f6f3143a651.js.map
Normal file
1
hassio/api/panel/chunk.f1156b978f6f3143a651.js.map
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.f15d7f41c0d302cbbc7a.js","sourceRoot":""}
|
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user