mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-13 19:19:21 +00:00
Compare commits
411 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
598108d294 | ||
![]() |
a0261dbbcc | ||
![]() |
2418122b46 | ||
![]() |
f104e60afa | ||
![]() |
ed45f27f3e | ||
![]() |
40aa5c9caf | ||
![]() |
14b1ea4eb0 | ||
![]() |
5052a339e3 | ||
![]() |
2321890dde | ||
![]() |
4cb5770ee0 | ||
![]() |
3a35561d1d | ||
![]() |
6fbec53f8a | ||
![]() |
c707934018 | ||
![]() |
efd8efa248 | ||
![]() |
979861b764 | ||
![]() |
cdc53a159c | ||
![]() |
a203ed9cc5 | ||
![]() |
5cab5f0c08 | ||
![]() |
25ea80e169 | ||
![]() |
f43b4e9e24 | ||
![]() |
160fbb2589 | ||
![]() |
c85aa664e1 | ||
![]() |
51dcbf5db7 | ||
![]() |
fa114a4a03 | ||
![]() |
d7fd58bdb9 | ||
![]() |
38b0aea8e2 | ||
![]() |
41eade9325 | ||
![]() |
e64cf41aec | ||
![]() |
02872b5e75 | ||
![]() |
e4d49bb459 | ||
![]() |
d38b7d5a82 | ||
![]() |
537c5d3197 | ||
![]() |
575df2fcf6 | ||
![]() |
c08c3c6b37 | ||
![]() |
2acf28609e | ||
![]() |
bb59d0431e | ||
![]() |
1c7b1f1462 | ||
![]() |
f32d17d924 | ||
![]() |
928a4d8dce | ||
![]() |
dd3ba93308 | ||
![]() |
7e1b179cdd | ||
![]() |
a9a2c35f06 | ||
![]() |
58b88a6919 | ||
![]() |
f937876a1b | ||
![]() |
8193f43634 | ||
![]() |
1d3f880f82 | ||
![]() |
ef2fa8d2e2 | ||
![]() |
51997b3e7c | ||
![]() |
98785b00e2 | ||
![]() |
8d3694884d | ||
![]() |
a2821a98ad | ||
![]() |
8d552ae15c | ||
![]() |
6db4c60f47 | ||
![]() |
805c0385a0 | ||
![]() |
cea6e7a9f2 | ||
![]() |
127073c01b | ||
![]() |
30fe36ae05 | ||
![]() |
58bd677832 | ||
![]() |
1a3b369dd7 | ||
![]() |
6e38216abd | ||
![]() |
efcfc1f841 | ||
![]() |
8dea50ce83 | ||
![]() |
7a5a01bdcc | ||
![]() |
bd1450a682 | ||
![]() |
c538c1ce7f | ||
![]() |
b6d59c4f64 | ||
![]() |
a758ccaf5c | ||
![]() |
e8b04cc20a | ||
![]() |
9bcb15dbc0 | ||
![]() |
1e953167b6 | ||
![]() |
979586cdb2 | ||
![]() |
cd31fad56d | ||
![]() |
ff57d88e2a | ||
![]() |
06cb5e171e | ||
![]() |
a8b70a2e13 | ||
![]() |
948019ccee | ||
![]() |
89ed109505 | ||
![]() |
fae246c503 | ||
![]() |
2411b4287d | ||
![]() |
b3308ecbe0 | ||
![]() |
3541cbff5e | ||
![]() |
838ba7ff36 | ||
![]() |
e9802f92c9 | ||
![]() |
016fd24859 | ||
![]() |
d315e81ab2 | ||
![]() |
97c38b8534 | ||
![]() |
011e2b3df5 | ||
![]() |
e3ee9a299f | ||
![]() |
d73c10f874 | ||
![]() |
9e448b46ba | ||
![]() |
9f09c46789 | ||
![]() |
fe6634551a | ||
![]() |
22a7931a7c | ||
![]() |
94f112512f | ||
![]() |
b6509dca1f | ||
![]() |
620234e708 | ||
![]() |
d50e866cec | ||
![]() |
76ad6dca02 | ||
![]() |
cdb1520a63 | ||
![]() |
bbef706a33 | ||
![]() |
835509901f | ||
![]() |
b51f9586c4 | ||
![]() |
fc83cb9559 | ||
![]() |
f5f5f829ac | ||
![]() |
930eed4500 | ||
![]() |
01a8b58054 | ||
![]() |
eba1d01fc2 | ||
![]() |
84755836c9 | ||
![]() |
c9585033cb | ||
![]() |
2d312c276f | ||
![]() |
3b0d0e9928 | ||
![]() |
8307b153e3 | ||
![]() |
dfaffe3ec5 | ||
![]() |
8d7b15cbeb | ||
![]() |
00969a67ac | ||
![]() |
a374d4e817 | ||
![]() |
f5dda39f63 | ||
![]() |
fb5d54d5fe | ||
![]() |
d392b35fdd | ||
![]() |
3ceec006ac | ||
![]() |
62a574c6bd | ||
![]() |
821c10b2bd | ||
![]() |
fa3269a098 | ||
![]() |
a9bdab4b49 | ||
![]() |
0df5b7d87b | ||
![]() |
4861fc70ce | ||
![]() |
47c443bb92 | ||
![]() |
9cb4b49597 | ||
![]() |
865523fd37 | ||
![]() |
1df35a6fe1 | ||
![]() |
e70c9d8a30 | ||
![]() |
7d6b00ea4a | ||
![]() |
e5fc985915 | ||
![]() |
71ccaa2bd0 | ||
![]() |
e127f23a08 | ||
![]() |
495f9f2373 | ||
![]() |
27274286db | ||
![]() |
85ba886029 | ||
![]() |
2f3a868e42 | ||
![]() |
a51b80f456 | ||
![]() |
f27a426879 | ||
![]() |
19ca485c28 | ||
![]() |
7deed55c2d | ||
![]() |
4c5c6f072c | ||
![]() |
f174e08ad6 | ||
![]() |
2658f95347 | ||
![]() |
311c981d1a | ||
![]() |
d6d3bf0583 | ||
![]() |
a1a601a4d3 | ||
![]() |
14776eae76 | ||
![]() |
bef4034ab8 | ||
![]() |
ad988f2a24 | ||
![]() |
6599ae0ee0 | ||
![]() |
4f1ed690cd | ||
![]() |
4ffaee6013 | ||
![]() |
e1ce19547e | ||
![]() |
039040b972 | ||
![]() |
7a1af3d346 | ||
![]() |
1e98774b62 | ||
![]() |
4b4d6c6866 | ||
![]() |
65ff83d359 | ||
![]() |
e509c804ae | ||
![]() |
992827e225 | ||
![]() |
083e97add8 | ||
![]() |
05378d18c0 | ||
![]() |
3dd465acc9 | ||
![]() |
8f6e36f781 | ||
![]() |
85fe56db57 | ||
![]() |
8e07429e47 | ||
![]() |
ced6d702b9 | ||
![]() |
25d7de4dfa | ||
![]() |
82754c0dfe | ||
![]() |
e604b022ee | ||
![]() |
6b29022822 | ||
![]() |
2e671cc5ee | ||
![]() |
f25692b98c | ||
![]() |
c4a011b261 | ||
![]() |
a935bac20b | ||
![]() |
0a3a98cb42 | ||
![]() |
adb39ca93f | ||
![]() |
5fdc340e58 | ||
![]() |
bb64dca6e6 | ||
![]() |
685788bcdf | ||
![]() |
e949aa35f3 | ||
![]() |
fc80bf0df4 | ||
![]() |
bd9740e866 | ||
![]() |
3a260a8fd9 | ||
![]() |
c87e6a5a42 | ||
![]() |
8bc3319523 | ||
![]() |
bdfcf1a2df | ||
![]() |
7f4284f2af | ||
![]() |
fd69120aa6 | ||
![]() |
5df60b17e8 | ||
![]() |
cb835b5ae6 | ||
![]() |
9eab92513a | ||
![]() |
29e8f50ab8 | ||
![]() |
aa0496b236 | ||
![]() |
06e9cec21a | ||
![]() |
0fe27088df | ||
![]() |
54d226116d | ||
![]() |
4b37e30680 | ||
![]() |
7c5f710deb | ||
![]() |
5a3ebaf683 | ||
![]() |
233da0e48f | ||
![]() |
96380d8d28 | ||
![]() |
c84a0edf20 | ||
![]() |
a3cf445c93 | ||
![]() |
3f31979f66 | ||
![]() |
44416edfd2 | ||
![]() |
351c45da75 | ||
![]() |
e27c5dad15 | ||
![]() |
dc510f22ac | ||
![]() |
1b78011f8b | ||
![]() |
a908828bf4 | ||
![]() |
55b7eb62f6 | ||
![]() |
10e8fcf3b9 | ||
![]() |
f1b0c05447 | ||
![]() |
de22bd688e | ||
![]() |
9fe35b4fb5 | ||
![]() |
f13d08d37a | ||
![]() |
a0ecb46584 | ||
![]() |
0c57df0c8e | ||
![]() |
9c902c5c69 | ||
![]() |
af412c3105 | ||
![]() |
ec43448163 | ||
![]() |
9f7e0ecd55 | ||
![]() |
e50515a17c | ||
![]() |
7c345db6fe | ||
![]() |
51c2268c1e | ||
![]() |
51feca05a5 | ||
![]() |
3889504292 | ||
![]() |
7bd6ff374a | ||
![]() |
44fa34203a | ||
![]() |
ff351c7f6d | ||
![]() |
960b00d85a | ||
![]() |
18e3eacd7f | ||
![]() |
f4a1da33c4 | ||
![]() |
49de5be44e | ||
![]() |
383657e8ce | ||
![]() |
3af970ead6 | ||
![]() |
6caec79958 | ||
![]() |
33bbd92d9b | ||
![]() |
9dba78fbcd | ||
![]() |
630d85ec78 | ||
![]() |
f0d46e8671 | ||
![]() |
db0593f0b2 | ||
![]() |
1d83c0c77a | ||
![]() |
5e5fd3a79b | ||
![]() |
c61995aab8 | ||
![]() |
37c393f857 | ||
![]() |
8e043a01c1 | ||
![]() |
c7b6b2ddb3 | ||
![]() |
522f68bf68 | ||
![]() |
7d4866234f | ||
![]() |
7aa5bcfc7c | ||
![]() |
04b59f0896 | ||
![]() |
796f9a203e | ||
![]() |
22c8cda0d7 | ||
![]() |
1cf534ccc5 | ||
![]() |
6d8c821148 | ||
![]() |
264e9665b0 | ||
![]() |
53fa8e48c0 | ||
![]() |
e406aa4144 | ||
![]() |
4953ba5077 | ||
![]() |
0a97ac0578 | ||
![]() |
56af4752f4 | ||
![]() |
81413d08ed | ||
![]() |
2bc2a476d9 | ||
![]() |
4d070a65c6 | ||
![]() |
6185fbaf26 | ||
![]() |
698a126b93 | ||
![]() |
acf921f55d | ||
![]() |
f5a78c88f8 | ||
![]() |
206ece1575 | ||
![]() |
a8028dbe10 | ||
![]() |
c605af6ccc | ||
![]() |
b7b8e6c40e | ||
![]() |
3fcb1de419 | ||
![]() |
12034fe5fc | ||
![]() |
56959d781a | ||
![]() |
9a2f025646 | ||
![]() |
12cc163058 | ||
![]() |
74971d9753 | ||
![]() |
a9157e3a9f | ||
![]() |
b96697b708 | ||
![]() |
81e6896391 | ||
![]() |
2dcaa3608d | ||
![]() |
e21671ec5e | ||
![]() |
7841f14163 | ||
![]() |
cc9f594ab4 | ||
![]() |
ebfaaeaa6b | ||
![]() |
ffa91e150d | ||
![]() |
06fa9f9a9e | ||
![]() |
9f203c42ec | ||
![]() |
5d0d34a4af | ||
![]() |
c2cfc0d3d4 | ||
![]() |
0f4810d41f | ||
![]() |
175848f2a8 | ||
![]() |
472bd66f4d | ||
![]() |
168ea32d2c | ||
![]() |
e82d6b1ea4 | ||
![]() |
6c60ca088c | ||
![]() |
83e8f935fd | ||
![]() |
71867302a4 | ||
![]() |
8bcc402c5f | ||
![]() |
72b7d2a123 | ||
![]() |
20c1183450 | ||
![]() |
0bbfbd2544 | ||
![]() |
350bd9c32f | ||
![]() |
dcca8b0a9a | ||
![]() |
f77b479e45 | ||
![]() |
216565affb | ||
![]() |
6f235c2a11 | ||
![]() |
27a770bd1d | ||
![]() |
ef15b67571 | ||
![]() |
6aad966c52 | ||
![]() |
9811f11859 | ||
![]() |
13148ec7fb | ||
![]() |
b2d7464790 | ||
![]() |
ce84e185ad | ||
![]() |
c3f5ee43b6 | ||
![]() |
e2dc1a4471 | ||
![]() |
e787e59b49 | ||
![]() |
f0ed2eba2b | ||
![]() |
2364e1e652 | ||
![]() |
cc56944d75 | ||
![]() |
69cea9fc96 | ||
![]() |
fcebc9d1ed | ||
![]() |
9350e4f961 | ||
![]() |
387e0ad03e | ||
![]() |
61fec8b290 | ||
![]() |
1228baebf4 | ||
![]() |
a30063e85c | ||
![]() |
524cebac4d | ||
![]() |
c94114a566 | ||
![]() |
b6ec7a9e64 | ||
![]() |
69be7a6d22 | ||
![]() |
58155c35f9 | ||
![]() |
7b2377291f | ||
![]() |
657ee84e39 | ||
![]() |
2e4b545265 | ||
![]() |
2de1d35dd1 | ||
![]() |
2b082b362d | ||
![]() |
dfdd0d6b4b | ||
![]() |
a00e81c03f | ||
![]() |
776e6bb418 | ||
![]() |
b31fca656e | ||
![]() |
fa783a0d2c | ||
![]() |
96c0fbaf10 | ||
![]() |
24f7801ddc | ||
![]() |
8e83e007e9 | ||
![]() |
d0db466e67 | ||
![]() |
3010bd4eb6 | ||
![]() |
069bed8815 | ||
![]() |
d2088ae5f8 | ||
![]() |
0ca5a241bb | ||
![]() |
dff32a8e84 | ||
![]() |
4a20344652 | ||
![]() |
98b969ef06 | ||
![]() |
c8cb8aecf7 | ||
![]() |
73e8875018 | ||
![]() |
02aed9c084 | ||
![]() |
89148f8fff | ||
![]() |
6bde527f5c | ||
![]() |
d62aabc01b | ||
![]() |
82299a3799 | ||
![]() |
c02f30dd7e | ||
![]() |
e91983adb4 | ||
![]() |
ff88359429 | ||
![]() |
5a60d5cbe8 | ||
![]() |
2b41ffe019 | ||
![]() |
1c23e26f93 | ||
![]() |
3d555f951d | ||
![]() |
6d39b4d7cd | ||
![]() |
4fe5d09f01 | ||
![]() |
e52af3bfb4 | ||
![]() |
0467b33cd5 | ||
![]() |
14167f6e13 | ||
![]() |
7a1aba6f81 | ||
![]() |
920f7f2ece | ||
![]() |
06fadbd70f | ||
![]() |
d4f486864f | ||
![]() |
d3a21303d9 | ||
![]() |
e1cbfdd84b | ||
![]() |
87170a4497 | ||
![]() |
ae6f8bd345 | ||
![]() |
b9496e0972 | ||
![]() |
c36a6dcd65 | ||
![]() |
19ca836b78 | ||
![]() |
8a6ea7ab50 | ||
![]() |
6721b8f265 | ||
![]() |
9393521f98 | ||
![]() |
398b24e0ab | ||
![]() |
374bcf8073 | ||
![]() |
7e3859e2f5 | ||
![]() |
490ec0d462 | ||
![]() |
15bf1ee50e | ||
![]() |
6376d92a0d | ||
![]() |
10230b0b4c | ||
![]() |
2495cda5ec | ||
![]() |
ae8ddca040 | ||
![]() |
0212d027fb | ||
![]() |
a3096153ab | ||
![]() |
7434ca9e99 | ||
![]() |
4ac7f7dcf0 | ||
![]() |
e9f5b13aa5 | ||
![]() |
1fbb6d46ea | ||
![]() |
8dbfea75b1 | ||
![]() |
3b3840c087 | ||
![]() |
a21353909d | ||
![]() |
5497ed885a |
@@ -1,4 +1,4 @@
|
|||||||
FROM python:3.7
|
FROM mcr.microsoft.com/vscode/devcontainers/python:0-3.8
|
||||||
|
|
||||||
WORKDIR /workspaces
|
WORKDIR /workspaces
|
||||||
|
|
||||||
@@ -13,7 +13,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
|||||||
&& apt-get update && apt-get install -y --no-install-recommends \
|
&& apt-get update && apt-get install -y --no-install-recommends \
|
||||||
nodejs \
|
nodejs \
|
||||||
yarn \
|
yarn \
|
||||||
&& curl -o - https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash \
|
&& curl -o - https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | bash \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
ENV NVM_DIR /root/.nvm
|
ENV NVM_DIR /root/.nvm
|
||||||
|
|
||||||
@@ -33,11 +33,17 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
|||||||
containerd.io \
|
containerd.io \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install tools
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
jq \
|
||||||
|
dbus \
|
||||||
|
network-manager \
|
||||||
|
libpulse0 \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Install Python dependencies from requirements.txt if it exists
|
# Install Python dependencies from requirements.txt if it exists
|
||||||
COPY requirements.txt requirements_tests.txt ./
|
COPY requirements.txt requirements_tests.txt ./
|
||||||
RUN pip3 install -r requirements.txt -r requirements_tests.txt \
|
RUN pip3 install -U setuptools pip \
|
||||||
|
&& pip3 install -r requirements.txt -r requirements_tests.txt \
|
||||||
&& pip3 install tox \
|
&& pip3 install tox \
|
||||||
&& rm -f requirements.txt requirements_tests.txt
|
&& rm -f requirements.txt requirements_tests.txt
|
||||||
|
|
||||||
# Set the default shell to bash instead of sh
|
|
||||||
ENV SHELL /bin/bash
|
|
||||||
|
@@ -1,31 +1,32 @@
|
|||||||
// See https://aka.ms/vscode-remote/devcontainer.json for format details.
|
|
||||||
{
|
{
|
||||||
"name": "Hass.io dev",
|
"name": "Supervisor dev",
|
||||||
"context": "..",
|
"context": "..",
|
||||||
"dockerFile": "Dockerfile",
|
"dockerFile": "Dockerfile",
|
||||||
"appPort": "9123:8123",
|
"appPort": "9123:8123",
|
||||||
"runArgs": [
|
"postCreateCommand": "pre-commit install",
|
||||||
"-e",
|
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
|
||||||
"GIT_EDITOR=code --wait",
|
"extensions": [
|
||||||
"--privileged"
|
"ms-python.python",
|
||||||
],
|
"ms-python.vscode-pylance",
|
||||||
"extensions": [
|
"visualstudioexptteam.vscodeintellicode",
|
||||||
"ms-python.python",
|
"esbenp.prettier-vscode"
|
||||||
"visualstudioexptteam.vscodeintellicode",
|
],
|
||||||
"esbenp.prettier-vscode"
|
"settings": {
|
||||||
],
|
"terminal.integrated.shell.linux": "/bin/bash",
|
||||||
"settings": {
|
"editor.formatOnPaste": false,
|
||||||
"python.pythonPath": "/usr/local/bin/python",
|
"editor.formatOnSave": true,
|
||||||
"python.linting.pylintEnabled": true,
|
"editor.formatOnType": true,
|
||||||
"python.linting.enabled": true,
|
"files.trimTrailingWhitespace": true,
|
||||||
"python.formatting.provider": "black",
|
"python.pythonPath": "/usr/local/bin/python3",
|
||||||
"python.formatting.blackArgs": [
|
"python.linting.pylintEnabled": true,
|
||||||
"--target-version",
|
"python.linting.enabled": true,
|
||||||
"py37"
|
"python.formatting.provider": "black",
|
||||||
],
|
"python.formatting.blackArgs": ["--target-version", "py38"],
|
||||||
"editor.formatOnPaste": false,
|
"python.formatting.blackPath": "/usr/local/bin/black",
|
||||||
"editor.formatOnSave": true,
|
"python.linting.banditPath": "/usr/local/bin/bandit",
|
||||||
"editor.formatOnType": true,
|
"python.linting.flake8Path": "/usr/local/bin/flake8",
|
||||||
"files.trimTrailingWhitespace": true
|
"python.linting.mypyPath": "/usr/local/bin/mypy",
|
||||||
}
|
"python.linting.pylintPath": "/usr/local/bin/pylint",
|
||||||
}
|
"python.linting.pydocstylePath": "/usr/local/bin/pydocstyle"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@@ -14,10 +14,10 @@
|
|||||||
# virtualenv
|
# virtualenv
|
||||||
venv/
|
venv/
|
||||||
|
|
||||||
# HA
|
# Data
|
||||||
home-assistant-polymer/*
|
home-assistant-polymer/
|
||||||
misc/*
|
script/
|
||||||
script/*
|
tests/
|
||||||
|
|
||||||
# Test ENV
|
# Test ENV
|
||||||
data/
|
data/
|
||||||
|
13
.github/ISSUE_TEMPLATE.md
vendored
13
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,15 +1,15 @@
|
|||||||
<!-- READ THIS FIRST:
|
<!-- READ THIS FIRST:
|
||||||
- If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/
|
- If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/
|
||||||
- Make sure you are running the latest version of Home Assistant before reporting an issue: https://github.com/home-assistant/home-assistant/releases
|
- Make sure you are running the latest version of Home Assistant before reporting an issue: https://github.com/home-assistant/core/releases
|
||||||
- Do not report issues for components here, plaese refer to https://github.com/home-assistant/home-assistant/issues
|
- Do not report issues for integrations here, please refer to https://github.com/home-assistant/core/issues
|
||||||
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
|
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
|
||||||
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
|
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
|
||||||
- If you have a problem with a Add-on, make a issue on there repository.
|
- If you have a problem with an add-on, make an issue in its repository.
|
||||||
-->
|
-->
|
||||||
|
|
||||||
**Home Assistant release with the issue:**
|
**Home Assistant release with the issue:**
|
||||||
<!--
|
<!--
|
||||||
- Frontend -> Developer tools -> Info
|
- Frontend -> Configuration -> Info
|
||||||
- Or use this command: hass --version
|
- Or use this command: hass --version
|
||||||
-->
|
-->
|
||||||
|
|
||||||
@@ -20,10 +20,9 @@ Please provide details about your environment.
|
|||||||
|
|
||||||
**Supervisor logs:**
|
**Supervisor logs:**
|
||||||
<!--
|
<!--
|
||||||
- Frontend -> Hass.io -> System
|
- Frontend -> Supervisor -> System
|
||||||
- Or use this command: hassio su logs
|
- Or use this command: ha supervisor logs
|
||||||
-->
|
-->
|
||||||
|
|
||||||
|
|
||||||
**Description of problem:**
|
**Description of problem:**
|
||||||
|
|
||||||
|
14
.github/dependabot.yml
vendored
Normal file
14
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: pip
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
time: "06:00"
|
||||||
|
open-pull-requests-limit: 10
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
time: "06:00"
|
||||||
|
open-pull-requests-limit: 10
|
2
.github/lock.yml
vendored
2
.github/lock.yml
vendored
@@ -24,4 +24,4 @@ only: pulls
|
|||||||
|
|
||||||
# Optionally, specify configuration settings just for `issues` or `pulls`
|
# Optionally, specify configuration settings just for `issues` or `pulls`
|
||||||
issues:
|
issues:
|
||||||
daysUntilLock: 30
|
daysUntilLock: 30
|
||||||
|
3
.github/stale.yml
vendored
3
.github/stale.yml
vendored
@@ -6,8 +6,9 @@ daysUntilClose: 7
|
|||||||
exemptLabels:
|
exemptLabels:
|
||||||
- pinned
|
- pinned
|
||||||
- security
|
- security
|
||||||
|
- rfc
|
||||||
# Label to use when marking an issue as stale
|
# Label to use when marking an issue as stale
|
||||||
staleLabel: wontfix
|
staleLabel: stale
|
||||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||||
markComment: >
|
markComment: >
|
||||||
This issue has been automatically marked as stale because it has not had
|
This issue has been automatically marked as stale because it has not had
|
||||||
|
432
.github/workflows/ci.yaml
vendored
Normal file
432
.github/workflows/ci.yaml
vendored
Normal file
@@ -0,0 +1,432 @@
|
|||||||
|
name: CI
|
||||||
|
|
||||||
|
# yamllint disable-line rule:truthy
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- dev
|
||||||
|
- master
|
||||||
|
pull_request: ~
|
||||||
|
|
||||||
|
env:
|
||||||
|
DEFAULT_PYTHON: 3.8
|
||||||
|
PRE_COMMIT_HOME: ~/.cache/pre-commit
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Separate job to pre-populate the base dependency cache
|
||||||
|
# This prevent upcoming jobs to do the same individually
|
||||||
|
prepare:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
python-version: [3.8]
|
||||||
|
name: Prepare Python ${{ matrix.python-version }} dependencies
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
|
id: python
|
||||||
|
uses: actions/setup-python@v2.1.2
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-
|
||||||
|
- name: Create Python virtual environment
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
python -m venv venv
|
||||||
|
. venv/bin/activate
|
||||||
|
pip install -U pip setuptools
|
||||||
|
pip install -r requirements.txt -r requirements_tests.txt
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-pre-commit-
|
||||||
|
- name: Install pre-commit dependencies
|
||||||
|
if: steps.cache-precommit.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit install-hooks
|
||||||
|
|
||||||
|
lint-black:
|
||||||
|
name: Check black
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.1.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run black
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
black --target-version py38 --check supervisor tests setup.py
|
||||||
|
|
||||||
|
lint-dockerfile:
|
||||||
|
name: Check Dockerfile
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Register hadolint problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||||
|
- name: Check Dockerfile
|
||||||
|
uses: docker://hadolint/hadolint:v1.18.0
|
||||||
|
with:
|
||||||
|
args: hadolint Dockerfile
|
||||||
|
|
||||||
|
lint-executable-shebangs:
|
||||||
|
name: Check executables
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.1.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Register check executables problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
||||||
|
- name: Run executables check
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
||||||
|
|
||||||
|
lint-flake8:
|
||||||
|
name: Check flake8
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.1.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Register flake8 problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/flake8.json"
|
||||||
|
- name: Run flake8
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
flake8 supervisor tests
|
||||||
|
|
||||||
|
lint-isort:
|
||||||
|
name: Check isort
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.1.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run isort
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
|
||||||
|
|
||||||
|
lint-json:
|
||||||
|
name: Check JSON
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.1.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Register check-json problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/check-json.json"
|
||||||
|
- name: Run check-json
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual check-json --all-files
|
||||||
|
|
||||||
|
lint-pylint:
|
||||||
|
name: Check pylint
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.1.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Register pylint problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/pylint.json"
|
||||||
|
- name: Run pylint
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pylint supervisor tests
|
||||||
|
|
||||||
|
lint-pyupgrade:
|
||||||
|
name: Check pyupgrade
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.1.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run pyupgrade
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
|
||||||
|
|
||||||
|
pytest:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
python-version: [3.8]
|
||||||
|
name: Run tests Python ${{ matrix.python-version }}
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
|
uses: actions/setup-python@v2.1.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Install additional system dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y --no-install-recommends libpulse0 libudev1
|
||||||
|
- name: Register Python problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||||
|
- name: Install Pytest Annotation plugin
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
# Ideally this should be part of our dependencies
|
||||||
|
# However this plugin is fairly new and doesn't run correctly
|
||||||
|
# on a non-GitHub environment.
|
||||||
|
pip install pytest-github-actions-annotate-failures
|
||||||
|
- name: Run pytest
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pytest \
|
||||||
|
-qq \
|
||||||
|
--timeout=10 \
|
||||||
|
--durations=10 \
|
||||||
|
--cov supervisor \
|
||||||
|
-o console_output_style=count \
|
||||||
|
tests
|
||||||
|
- name: Upload coverage artifact
|
||||||
|
uses: actions/upload-artifact@v2.1.4
|
||||||
|
with:
|
||||||
|
name: coverage-${{ matrix.python-version }}
|
||||||
|
path: .coverage
|
||||||
|
|
||||||
|
coverage:
|
||||||
|
name: Process test coverage
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: pytest
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.1.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Download all coverage artifacts
|
||||||
|
uses: actions/download-artifact@v2
|
||||||
|
- name: Combine coverage results
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
coverage combine coverage*/.coverage*
|
||||||
|
coverage report
|
||||||
|
coverage xml
|
||||||
|
- name: Upload coverage to Codecov
|
||||||
|
uses: codecov/codecov-action@v1.0.13
|
14
.github/workflows/matchers/check-executables-have-shebangs.json
vendored
Normal file
14
.github/workflows/matchers/check-executables-have-shebangs.json
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "check-executables-have-shebangs",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):\\s(.+)$",
|
||||||
|
"file": 1,
|
||||||
|
"message": 2
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
16
.github/workflows/matchers/check-json.json
vendored
Normal file
16
.github/workflows/matchers/check-json.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "check-json",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):\\s(.+\\sline\\s(\\d+)\\scolumn\\s(\\d+).+)$",
|
||||||
|
"file": 1,
|
||||||
|
"message": 2,
|
||||||
|
"line": 3,
|
||||||
|
"column": 4
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
30
.github/workflows/matchers/flake8.json
vendored
Normal file
30
.github/workflows/matchers/flake8.json
vendored
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "flake8-error",
|
||||||
|
"severity": "error",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"message": 4
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"owner": "flake8-warning",
|
||||||
|
"severity": "warning",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"message": 4
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
16
.github/workflows/matchers/hadolint.json
vendored
Normal file
16
.github/workflows/matchers/hadolint.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "hadolint",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):(\\d+)\\s+((DL\\d{4}).+)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"message": 3,
|
||||||
|
"code": 4
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
32
.github/workflows/matchers/pylint.json
vendored
Normal file
32
.github/workflows/matchers/pylint.json
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "pylint-error",
|
||||||
|
"severity": "error",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):(\\d+):(\\d+):\\s(([EF]\\d{4}):\\s.+)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"message": 4,
|
||||||
|
"code": 5
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"owner": "pylint-warning",
|
||||||
|
"severity": "warning",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):(\\d+):(\\d+):\\s(([CRW]\\d{4}):\\s.+)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"message": 4,
|
||||||
|
"code": 5
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
18
.github/workflows/matchers/python.json
vendored
Normal file
18
.github/workflows/matchers/python.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "python",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^\\s*File\\s\\\"(.*)\\\",\\sline\\s(\\d+),\\sin\\s(.*)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"regexp": "^\\s*raise\\s(.*)\\(\\'(.*)\\'\\)$",
|
||||||
|
"message": 2
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
15
.github/workflows/release-drafter.yml
vendored
Normal file
15
.github/workflows/release-drafter.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
name: Release Drafter
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
# branches to consider in the event; optional, defaults to all
|
||||||
|
branches:
|
||||||
|
- dev
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
update_release_draft:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: release-drafter/release-drafter@v5
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
21
.github/workflows/sentry.yaml
vendored
Normal file
21
.github/workflows/sentry.yaml
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
name: Sentry Release
|
||||||
|
|
||||||
|
# yamllint disable-line rule:truthy
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [published, prereleased]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
createSentryRelease:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Sentry Release
|
||||||
|
uses: getsentry/action-release@v1.0.1
|
||||||
|
env:
|
||||||
|
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||||
|
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||||
|
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||||
|
with:
|
||||||
|
environment: production
|
5
.gitignore
vendored
5
.gitignore
vendored
@@ -95,3 +95,8 @@ ENV/
|
|||||||
.vscode/*
|
.vscode/*
|
||||||
!.vscode/cSpell.json
|
!.vscode/cSpell.json
|
||||||
!.vscode/tasks.json
|
!.vscode/tasks.json
|
||||||
|
!.vscode/launch.json
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
/.mypy_cache/*
|
||||||
|
/.dmypy.json
|
||||||
|
34
.pre-commit-config.yaml
Normal file
34
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
repos:
|
||||||
|
- repo: https://github.com/psf/black
|
||||||
|
rev: 20.8b1
|
||||||
|
hooks:
|
||||||
|
- id: black
|
||||||
|
args:
|
||||||
|
- --safe
|
||||||
|
- --quiet
|
||||||
|
- --target-version
|
||||||
|
- py38
|
||||||
|
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
||||||
|
- repo: https://gitlab.com/pycqa/flake8
|
||||||
|
rev: 3.8.3
|
||||||
|
hooks:
|
||||||
|
- id: flake8
|
||||||
|
additional_dependencies:
|
||||||
|
- flake8-docstrings==1.5.0
|
||||||
|
- pydocstyle==5.0.2
|
||||||
|
files: ^(supervisor|script|tests)/.+\.py$
|
||||||
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
|
rev: v3.1.0
|
||||||
|
hooks:
|
||||||
|
- id: check-executables-have-shebangs
|
||||||
|
stages: [manual]
|
||||||
|
- id: check-json
|
||||||
|
- repo: https://github.com/pre-commit/mirrors-isort
|
||||||
|
rev: v4.3.21
|
||||||
|
hooks:
|
||||||
|
- id: isort
|
||||||
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
|
rev: v2.6.2
|
||||||
|
hooks:
|
||||||
|
- id: pyupgrade
|
||||||
|
args: [--py37-plus]
|
18
.vscode/launch.json
vendored
Normal file
18
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"version": "0.2.0",
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"name": "Supervisor remote debug",
|
||||||
|
"type": "python",
|
||||||
|
"request": "attach",
|
||||||
|
"port": 33333,
|
||||||
|
"host": "172.30.32.2",
|
||||||
|
"pathMappings": [
|
||||||
|
{
|
||||||
|
"localRoot": "${workspaceFolder}",
|
||||||
|
"remoteRoot": "/usr/src/supervisor"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
180
.vscode/tasks.json
vendored
180
.vscode/tasks.json
vendored
@@ -1,92 +1,90 @@
|
|||||||
{
|
{
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"tasks": [
|
"tasks": [
|
||||||
{
|
{
|
||||||
"label": "Run Testenv",
|
"label": "Run Testenv",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "./scripts/test_env.sh",
|
"command": "./scripts/test_env.sh",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "test",
|
"kind": "test",
|
||||||
"isDefault": true,
|
"isDefault": true
|
||||||
},
|
},
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "always",
|
"reveal": "always",
|
||||||
"panel": "new"
|
"panel": "new"
|
||||||
},
|
},
|
||||||
"problemMatcher": []
|
"problemMatcher": []
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Run Testenv CLI",
|
"label": "Run Testenv CLI",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "docker run --rm -ti -v /etc/machine-id:/etc/machine-id --network=hassio --add-host hassio:172.30.32.2 homeassistant/amd64-hassio-cli:dev",
|
"command": "docker exec -ti hassio_cli /usr/bin/cli.sh",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "test",
|
"kind": "test",
|
||||||
"isDefault": true,
|
"isDefault": true
|
||||||
},
|
},
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "always",
|
"reveal": "always",
|
||||||
"panel": "new"
|
"panel": "new"
|
||||||
},
|
},
|
||||||
"problemMatcher": []
|
"problemMatcher": []
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Update UI",
|
"label": "Update UI",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "./scripts/update-frontend.sh",
|
"command": "./scripts/update-frontend.sh",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "build",
|
"kind": "build",
|
||||||
"isDefault": true
|
"isDefault": true
|
||||||
},
|
},
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "always",
|
"reveal": "always",
|
||||||
"panel": "new"
|
"panel": "new"
|
||||||
},
|
},
|
||||||
"problemMatcher": []
|
"problemMatcher": []
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Pytest",
|
"label": "Pytest",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "pytest --timeout=10 tests",
|
"command": "pytest --timeout=10 tests",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "test",
|
"kind": "test",
|
||||||
"isDefault": true,
|
"isDefault": true
|
||||||
},
|
},
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "always",
|
"reveal": "always",
|
||||||
"panel": "new"
|
"panel": "new"
|
||||||
},
|
},
|
||||||
"problemMatcher": []
|
"problemMatcher": []
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Flake8",
|
"label": "Flake8",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "flake8 hassio tests",
|
"command": "flake8 supervisor tests",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "test",
|
"kind": "test",
|
||||||
"isDefault": true,
|
"isDefault": true
|
||||||
},
|
},
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "always",
|
"reveal": "always",
|
||||||
"panel": "new"
|
"panel": "new"
|
||||||
},
|
},
|
||||||
"problemMatcher": []
|
"problemMatcher": []
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Pylint",
|
"label": "Pylint",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "pylint hassio",
|
"command": "pylint supervisor",
|
||||||
"dependsOn": [
|
"dependsOn": ["Install all Requirements"],
|
||||||
"Install all Requirements"
|
"group": {
|
||||||
],
|
"kind": "test",
|
||||||
"group": {
|
"isDefault": true
|
||||||
"kind": "test",
|
},
|
||||||
"isDefault": true,
|
"presentation": {
|
||||||
},
|
"reveal": "always",
|
||||||
"presentation": {
|
"panel": "new"
|
||||||
"reveal": "always",
|
},
|
||||||
"panel": "new"
|
"problemMatcher": []
|
||||||
},
|
}
|
||||||
"problemMatcher": []
|
]
|
||||||
}
|
}
|
||||||
]
|
|
||||||
}
|
|
||||||
|
454
API.md
454
API.md
@@ -2,7 +2,8 @@
|
|||||||
|
|
||||||
## Supervisor RESTful API
|
## Supervisor RESTful API
|
||||||
|
|
||||||
Interface for Home Assistant to control things from supervisor.
|
The RESTful API for Home Assistant allows you to control things around
|
||||||
|
around the Supervisor and other components.
|
||||||
|
|
||||||
On error / Code 400:
|
On error / Code 400:
|
||||||
|
|
||||||
@@ -22,8 +23,10 @@ On success / Code 200:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
For access to API you need use a authorization header with a `Bearer` token.
|
To access the API you need use an authorization header with a `Bearer` token.
|
||||||
They are available for Add-ons and the Home Assistant using the `SUPERVISOR_TOKEN` environment variable.
|
|
||||||
|
The token is available for add-ons and Home Assistant using the
|
||||||
|
`SUPERVISOR_TOKEN` environment variable.
|
||||||
|
|
||||||
### Supervisor
|
### Supervisor
|
||||||
|
|
||||||
@@ -33,20 +36,23 @@ This API call don't need a token.
|
|||||||
|
|
||||||
- GET `/supervisor/info`
|
- GET `/supervisor/info`
|
||||||
|
|
||||||
The addons from `addons` are only installed one.
|
Shows the installed add-ons from `addons`.
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"version": "INSTALL_VERSION",
|
"version": "INSTALL_VERSION",
|
||||||
"last_version": "LAST_VERSION",
|
"version_latest": "version_latest",
|
||||||
"arch": "armhf|aarch64|i386|amd64",
|
"arch": "armhf|aarch64|i386|amd64",
|
||||||
"channel": "stable|beta|dev",
|
"channel": "stable|beta|dev",
|
||||||
"timezone": "TIMEZONE",
|
"timezone": "TIMEZONE",
|
||||||
|
"healthy": "bool",
|
||||||
|
"supported": "bool",
|
||||||
"logging": "debug|info|warning|error|critical",
|
"logging": "debug|info|warning|error|critical",
|
||||||
"ip_address": "ip address",
|
"ip_address": "ip address",
|
||||||
"wait_boot": "int",
|
"wait_boot": "int",
|
||||||
"debug": "bool",
|
"debug": "bool",
|
||||||
"debug_block": "bool",
|
"debug_block": "bool",
|
||||||
|
"diagnostics": "None|bool",
|
||||||
"addons": [
|
"addons": [
|
||||||
{
|
{
|
||||||
"name": "xy bla",
|
"name": "xy bla",
|
||||||
@@ -90,11 +96,11 @@ Optional:
|
|||||||
|
|
||||||
- POST `/supervisor/reload`
|
- POST `/supervisor/reload`
|
||||||
|
|
||||||
Reload addons/version.
|
Reload the add-ons/version.
|
||||||
|
|
||||||
- GET `/supervisor/logs`
|
- GET `/supervisor/logs`
|
||||||
|
|
||||||
Output is the raw docker log.
|
Output is the raw Docker log.
|
||||||
|
|
||||||
- GET `/supervisor/stats`
|
- GET `/supervisor/stats`
|
||||||
|
|
||||||
@@ -113,7 +119,7 @@ Output is the raw docker log.
|
|||||||
|
|
||||||
- GET `/supervisor/repair`
|
- GET `/supervisor/repair`
|
||||||
|
|
||||||
Repair overlayfs issue and restore lost images
|
Repair overlayfs issue and restore lost images.
|
||||||
|
|
||||||
### Snapshot
|
### Snapshot
|
||||||
|
|
||||||
@@ -134,7 +140,6 @@ Repair overlayfs issue and restore lost images
|
|||||||
```
|
```
|
||||||
|
|
||||||
- POST `/snapshots/reload`
|
- POST `/snapshots/reload`
|
||||||
|
|
||||||
- POST `/snapshots/new/upload`
|
- POST `/snapshots/new/upload`
|
||||||
|
|
||||||
return:
|
return:
|
||||||
@@ -208,9 +213,7 @@ return:
|
|||||||
```
|
```
|
||||||
|
|
||||||
- POST `/snapshots/{slug}/remove`
|
- POST `/snapshots/{slug}/remove`
|
||||||
|
|
||||||
- GET `/snapshots/{slug}/download`
|
- GET `/snapshots/{slug}/download`
|
||||||
|
|
||||||
- POST `/snapshots/{slug}/restore/full`
|
- POST `/snapshots/{slug}/restore/full`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
@@ -233,25 +236,29 @@ return:
|
|||||||
### Host
|
### Host
|
||||||
|
|
||||||
- POST `/host/reload`
|
- POST `/host/reload`
|
||||||
|
|
||||||
- POST `/host/shutdown`
|
- POST `/host/shutdown`
|
||||||
|
|
||||||
- POST `/host/reboot`
|
- POST `/host/reboot`
|
||||||
|
|
||||||
- GET `/host/info`
|
- GET `/host/info`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"hostname": "hostname|null",
|
|
||||||
"features": ["shutdown", "reboot", "hostname", "services", "hassos"],
|
|
||||||
"operating_system": "HassOS XY|Ubuntu 16.4|null",
|
|
||||||
"kernel": "4.15.7|null",
|
|
||||||
"chassis": "specific|null",
|
"chassis": "specific|null",
|
||||||
|
"cpe": "xy|null",
|
||||||
"deployment": "stable|beta|dev|null",
|
"deployment": "stable|beta|dev|null",
|
||||||
"cpe": "xy|null"
|
"disk_total": 32.0,
|
||||||
|
"disk_used": 30.0,
|
||||||
|
"disk_free": 2.0,
|
||||||
|
"features": ["shutdown", "reboot", "hostname", "services", "hassos"],
|
||||||
|
"hostname": "hostname|null",
|
||||||
|
"kernel": "4.15.7|null",
|
||||||
|
"operating_system": "HassOS XY|Ubuntu 16.4|null"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
- GET `/host/logs`
|
||||||
|
|
||||||
|
Return the host log messages (dmesg).
|
||||||
|
|
||||||
- POST `/host/options`
|
- POST `/host/options`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
@@ -291,9 +298,7 @@ return:
|
|||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"version": "2.3",
|
"version": "2.3",
|
||||||
"version_cli": "7",
|
|
||||||
"version_latest": "2.4",
|
"version_latest": "2.4",
|
||||||
"version_cli_latest": "8",
|
|
||||||
"board": "ova|rpi",
|
"board": "ova|rpi",
|
||||||
"boot": "rauc boot slot"
|
"boot": "rauc boot slot"
|
||||||
}
|
}
|
||||||
@@ -307,17 +312,9 @@ return:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
- POST `/os/update/cli`
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"version": "optional"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/os/config/sync`
|
- POST `/os/config/sync`
|
||||||
|
|
||||||
Load host configs from a USB stick.
|
Load host configurations from an USB stick.
|
||||||
|
|
||||||
### Hardware
|
### Hardware
|
||||||
|
|
||||||
@@ -360,7 +357,7 @@ Load host configs from a USB stick.
|
|||||||
|
|
||||||
- POST `/hardware/trigger`
|
- POST `/hardware/trigger`
|
||||||
|
|
||||||
Trigger an udev reload
|
Trigger an UDEV reload.
|
||||||
|
|
||||||
### Home Assistant
|
### Home Assistant
|
||||||
|
|
||||||
@@ -369,17 +366,18 @@ Trigger an udev reload
|
|||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"version": "INSTALL_VERSION",
|
"version": "INSTALL_VERSION",
|
||||||
"last_version": "LAST_VERSION",
|
"version_latest": "version_latest",
|
||||||
"arch": "arch",
|
"arch": "arch",
|
||||||
"machine": "Image machine type",
|
"machine": "Image machine type",
|
||||||
"ip_address": "ip address",
|
"ip_address": "ip address",
|
||||||
"image": "str",
|
"image": "str",
|
||||||
"custom": "bool -> if custom image",
|
|
||||||
"boot": "bool",
|
"boot": "bool",
|
||||||
"port": 8123,
|
"port": 8123,
|
||||||
"ssl": "bool",
|
"ssl": "bool",
|
||||||
"watchdog": "bool",
|
"watchdog": "bool",
|
||||||
"wait_boot": 600
|
"wait_boot": 600,
|
||||||
|
"audio_input": "null|profile",
|
||||||
|
"audio_output": "null|profile"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -408,16 +406,18 @@ Output is the raw Docker log.
|
|||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"image": "Optional|null",
|
"image": "Optional|null",
|
||||||
"last_version": "Optional for custom image|null",
|
"version_latest": "Optional for custom image|null",
|
||||||
"port": "port for access core",
|
"port": "port for access core",
|
||||||
"ssl": "bool",
|
"ssl": "bool",
|
||||||
"refresh_token": "",
|
"refresh_token": "",
|
||||||
"watchdog": "bool",
|
"watchdog": "bool",
|
||||||
"wait_boot": 600
|
"wait_boot": 600,
|
||||||
|
"audio_input": "null|profile",
|
||||||
|
"audio_output": "null|profile"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
Image with `null` and last_version with `null` reset this options.
|
Image with `null` and `version_latest` with `null` reset this options.
|
||||||
|
|
||||||
- POST/GET `/core/api`
|
- POST/GET `/core/api`
|
||||||
|
|
||||||
@@ -442,6 +442,65 @@ Proxy to Home Assistant Core websocket.
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Network
|
||||||
|
|
||||||
|
Network operations over the API
|
||||||
|
|
||||||
|
#### GET `/network/info`
|
||||||
|
|
||||||
|
Get network information
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"interfaces": {
|
||||||
|
"enp0s31f6": {
|
||||||
|
"ip_address": "192.168.2.148/24",
|
||||||
|
"gateway": "192.168.2.1",
|
||||||
|
"id": "Wired connection 1",
|
||||||
|
"type": "802-3-ethernet",
|
||||||
|
"nameservers": ["192.168.2.1"],
|
||||||
|
"method": "static",
|
||||||
|
"primary": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### GET `/network/interface/{interface}/info`
|
||||||
|
|
||||||
|
Get information for a single interface
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"ip_address": "192.168.2.148/24",
|
||||||
|
"gateway": "192.168.2.1",
|
||||||
|
"id": "Wired connection 1",
|
||||||
|
"type": "802-3-ethernet",
|
||||||
|
"nameservers": ["192.168.2.1"],
|
||||||
|
"method": "dhcp",
|
||||||
|
"primary": true
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### POST `/network/interface/{interface}/update`
|
||||||
|
|
||||||
|
Update information for a single interface
|
||||||
|
|
||||||
|
**Options:**
|
||||||
|
|
||||||
|
| Option | Description |
|
||||||
|
| --------- | ---------------------------------------------------------------------- |
|
||||||
|
| `address` | The new IP address for the interface in the X.X.X.X/XX format |
|
||||||
|
| `dns` | List of DNS servers to use |
|
||||||
|
| `gateway` | The gateway the interface should use |
|
||||||
|
| `method` | Set if the interface should use DHCP or not, can be `dhcp` or `static` |
|
||||||
|
|
||||||
|
_All options are optional._
|
||||||
|
|
||||||
|
**NB!: If you change the `address` or `gateway` you may need to reconnect to the new address**
|
||||||
|
|
||||||
|
The result will be a updated object.
|
||||||
|
|
||||||
### RESTful for API add-ons
|
### RESTful for API add-ons
|
||||||
|
|
||||||
If an add-on will call itself, you can use `/addons/self/...`.
|
If an add-on will call itself, you can use `/addons/self/...`.
|
||||||
@@ -460,7 +519,7 @@ Get all available add-ons.
|
|||||||
"advanced": "bool",
|
"advanced": "bool",
|
||||||
"stage": "stable|experimental|deprecated",
|
"stage": "stable|experimental|deprecated",
|
||||||
"repository": "core|local|REP_ID",
|
"repository": "core|local|REP_ID",
|
||||||
"version": "LAST_VERSION",
|
"version": "version_latest",
|
||||||
"installed": "none|INSTALL_VERSION",
|
"installed": "none|INSTALL_VERSION",
|
||||||
"detached": "bool",
|
"detached": "bool",
|
||||||
"available": "bool",
|
"available": "bool",
|
||||||
@@ -504,8 +563,9 @@ Get all available add-ons.
|
|||||||
"homeassistant": "null|min Home Assistant Core version",
|
"homeassistant": "null|min Home Assistant Core version",
|
||||||
"repository": "12345678|null",
|
"repository": "12345678|null",
|
||||||
"version": "null|VERSION_INSTALLED",
|
"version": "null|VERSION_INSTALLED",
|
||||||
"last_version": "LAST_VERSION",
|
"version_latest": "version_latest",
|
||||||
"state": "none|started|stopped",
|
"state": "none|started|stopped",
|
||||||
|
"startup": "initialize|system|services|application|once",
|
||||||
"boot": "auto|manual",
|
"boot": "auto|manual",
|
||||||
"build": "bool",
|
"build": "bool",
|
||||||
"options": "{}",
|
"options": "{}",
|
||||||
@@ -535,6 +595,7 @@ Get all available add-ons.
|
|||||||
"stdin": "bool",
|
"stdin": "bool",
|
||||||
"webui": "null|http(s)://[HOST]:port/xy/zx",
|
"webui": "null|http(s)://[HOST]:port/xy/zx",
|
||||||
"gpio": "bool",
|
"gpio": "bool",
|
||||||
|
"usb": "[physical_path_to_usb_device]",
|
||||||
"kernel_modules": "bool",
|
"kernel_modules": "bool",
|
||||||
"devicetree": "bool",
|
"devicetree": "bool",
|
||||||
"docker_api": "bool",
|
"docker_api": "bool",
|
||||||
@@ -549,19 +610,17 @@ Get all available add-ons.
|
|||||||
"ingress_entry": "null|/api/hassio_ingress/slug",
|
"ingress_entry": "null|/api/hassio_ingress/slug",
|
||||||
"ingress_url": "null|/api/hassio_ingress/slug/entry.html",
|
"ingress_url": "null|/api/hassio_ingress/slug/entry.html",
|
||||||
"ingress_port": "null|int",
|
"ingress_port": "null|int",
|
||||||
"ingress_panel": "null|bool"
|
"ingress_panel": "null|bool",
|
||||||
|
"watchdog": "null|bool"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
- GET `/addons/{addon}/icon`
|
- GET `/addons/{addon}/icon`
|
||||||
|
|
||||||
- GET `/addons/{addon}/logo`
|
- GET `/addons/{addon}/logo`
|
||||||
|
|
||||||
- GET `/addons/{addon}/changelog`
|
- GET `/addons/{addon}/changelog`
|
||||||
|
|
||||||
- GET `/addons/{addon}/documentation`
|
- GET `/addons/{addon}/documentation`
|
||||||
|
|
||||||
- POST `/addons/{addon}/options`
|
- POST `/addons/{addon}/options`
|
||||||
|
- POST `/addons/{addon}/options/validate`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
@@ -573,11 +632,12 @@ Get all available add-ons.
|
|||||||
"options": {},
|
"options": {},
|
||||||
"audio_output": "null|0,0",
|
"audio_output": "null|0,0",
|
||||||
"audio_input": "null|0,0",
|
"audio_input": "null|0,0",
|
||||||
"ingress_panel": "bool"
|
"ingress_panel": "bool",
|
||||||
|
"watchdog": "bool"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
Reset custom network/audio/options, set it `null`.
|
Reset custom network, audio and options, set it to `null`.
|
||||||
|
|
||||||
- POST `/addons/{addon}/security`
|
- POST `/addons/{addon}/security`
|
||||||
|
|
||||||
@@ -590,28 +650,22 @@ This function is not callable by itself.
|
|||||||
```
|
```
|
||||||
|
|
||||||
- POST `/addons/{addon}/start`
|
- POST `/addons/{addon}/start`
|
||||||
|
|
||||||
- POST `/addons/{addon}/stop`
|
- POST `/addons/{addon}/stop`
|
||||||
|
|
||||||
- POST `/addons/{addon}/install`
|
- POST `/addons/{addon}/install`
|
||||||
|
|
||||||
- POST `/addons/{addon}/uninstall`
|
- POST `/addons/{addon}/uninstall`
|
||||||
|
|
||||||
- POST `/addons/{addon}/update`
|
- POST `/addons/{addon}/update`
|
||||||
|
|
||||||
- GET `/addons/{addon}/logs`
|
- GET `/addons/{addon}/logs`
|
||||||
|
|
||||||
Output is the raw Docker log.
|
Output is the raw Docker log.
|
||||||
|
|
||||||
- POST `/addons/{addon}/restart`
|
- POST `/addons/{addon}/restart`
|
||||||
|
|
||||||
- POST `/addons/{addon}/rebuild`
|
- POST `/addons/{addon}/rebuild`
|
||||||
|
|
||||||
Only supported for local build addons
|
Only supported for local build add-ons.
|
||||||
|
|
||||||
- POST `/addons/{addon}/stdin`
|
- POST `/addons/{addon}/stdin`
|
||||||
|
|
||||||
Write data to add-on stdin
|
Write data to add-on stdin.
|
||||||
|
|
||||||
- GET `/addons/{addon}/stats`
|
- GET `/addons/{addon}/stats`
|
||||||
|
|
||||||
@@ -632,7 +686,7 @@ Write data to add-on stdin
|
|||||||
|
|
||||||
- POST `/ingress/session`
|
- POST `/ingress/session`
|
||||||
|
|
||||||
Create a new Session for access to ingress service.
|
Create a new session for access to the ingress service.
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
@@ -659,8 +713,8 @@ Return a list of enabled panels.
|
|||||||
|
|
||||||
- VIEW `/ingress/{token}`
|
- VIEW `/ingress/{token}`
|
||||||
|
|
||||||
Ingress WebUI for this Add-on. The addon need support HASS Auth!
|
Ingress WebUI for this add-on. The add-on need support for the Home Assistant
|
||||||
Need ingress session as cookie.
|
authentication system. Needs an ingress session as cookie.
|
||||||
|
|
||||||
### discovery
|
### discovery
|
||||||
|
|
||||||
@@ -675,7 +729,10 @@ Need ingress session as cookie.
|
|||||||
"uuid": "uuid",
|
"uuid": "uuid",
|
||||||
"config": {}
|
"config": {}
|
||||||
}
|
}
|
||||||
]
|
],
|
||||||
|
"services": {
|
||||||
|
"ozw": ["core_zwave"]
|
||||||
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -792,10 +849,12 @@ return:
|
|||||||
"supervisor": "version",
|
"supervisor": "version",
|
||||||
"homeassistant": "version",
|
"homeassistant": "version",
|
||||||
"hassos": "null|version",
|
"hassos": "null|version",
|
||||||
|
"docker": "version",
|
||||||
"hostname": "name",
|
"hostname": "name",
|
||||||
"machine": "type",
|
"machine": "type",
|
||||||
"arch": "arch",
|
"arch": "arch",
|
||||||
"supported_arch": ["arch1", "arch2"],
|
"supported_arch": ["arch1", "arch2"],
|
||||||
|
"supported": "bool",
|
||||||
"channel": "stable|beta|dev",
|
"channel": "stable|beta|dev",
|
||||||
"logging": "debug|info|warning|error|critical",
|
"logging": "debug|info|warning|error|critical",
|
||||||
"timezone": "Europe/Zurich"
|
"timezone": "Europe/Zurich"
|
||||||
@@ -810,7 +869,7 @@ return:
|
|||||||
{
|
{
|
||||||
"host": "ip-address",
|
"host": "ip-address",
|
||||||
"version": "1",
|
"version": "1",
|
||||||
"latest_version": "2",
|
"version_latest": "2",
|
||||||
"servers": ["dns://8.8.8.8"],
|
"servers": ["dns://8.8.8.8"],
|
||||||
"locals": ["dns://xy"]
|
"locals": ["dns://xy"]
|
||||||
}
|
}
|
||||||
@@ -853,18 +912,281 @@ return:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### Auth / SSO API
|
### CLI
|
||||||
|
|
||||||
You can use the user system on homeassistant. We handle this auth system on
|
- GET `/cli/info`
|
||||||
supervisor.
|
|
||||||
|
|
||||||
You can call post `/auth`
|
```json
|
||||||
|
{
|
||||||
|
"version": "1",
|
||||||
|
"version_latest": "2"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/cli/update`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": "VERSION"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- GET `/cli/stats`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"cpu_percent": 0.0,
|
||||||
|
"memory_usage": 283123,
|
||||||
|
"memory_limit": 329392,
|
||||||
|
"memory_percent": 1.4,
|
||||||
|
"network_tx": 0,
|
||||||
|
"network_rx": 0,
|
||||||
|
"blk_read": 0,
|
||||||
|
"blk_write": 0
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Multicast
|
||||||
|
|
||||||
|
- GET `/multicast/info`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": "1",
|
||||||
|
"version_latest": "2"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/multicast/update`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": "VERSION"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/multicast/restart`
|
||||||
|
|
||||||
|
- GET `/multicast/logs`
|
||||||
|
|
||||||
|
- GET `/multicast/stats`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"cpu_percent": 0.0,
|
||||||
|
"memory_usage": 283123,
|
||||||
|
"memory_limit": 329392,
|
||||||
|
"memory_percent": 1.4,
|
||||||
|
"network_tx": 0,
|
||||||
|
"network_rx": 0,
|
||||||
|
"blk_read": 0,
|
||||||
|
"blk_write": 0
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Audio
|
||||||
|
|
||||||
|
- GET `/audio/info`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"host": "ip-address",
|
||||||
|
"version": "1",
|
||||||
|
"latest_version": "2",
|
||||||
|
"audio": {
|
||||||
|
"card": [
|
||||||
|
{
|
||||||
|
"name": "...",
|
||||||
|
"index": 1,
|
||||||
|
"driver": "...",
|
||||||
|
"profiles": [
|
||||||
|
{
|
||||||
|
"name": "...",
|
||||||
|
"description": "...",
|
||||||
|
"active": false
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"input": [
|
||||||
|
{
|
||||||
|
"name": "...",
|
||||||
|
"index": 0,
|
||||||
|
"description": "...",
|
||||||
|
"volume": 0.3,
|
||||||
|
"mute": false,
|
||||||
|
"default": false,
|
||||||
|
"card": "null|int",
|
||||||
|
"applications": [
|
||||||
|
{
|
||||||
|
"name": "...",
|
||||||
|
"index": 0,
|
||||||
|
"stream_index": 0,
|
||||||
|
"stream_type": "INPUT",
|
||||||
|
"volume": 0.3,
|
||||||
|
"mute": false,
|
||||||
|
"addon": ""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"output": [
|
||||||
|
{
|
||||||
|
"name": "...",
|
||||||
|
"index": 0,
|
||||||
|
"description": "...",
|
||||||
|
"volume": 0.3,
|
||||||
|
"mute": false,
|
||||||
|
"default": false,
|
||||||
|
"card": "null|int",
|
||||||
|
"applications": [
|
||||||
|
{
|
||||||
|
"name": "...",
|
||||||
|
"index": 0,
|
||||||
|
"stream_index": 0,
|
||||||
|
"stream_type": "OUTPUT",
|
||||||
|
"volume": 0.3,
|
||||||
|
"mute": false,
|
||||||
|
"addon": ""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"application": [
|
||||||
|
{
|
||||||
|
"name": "...",
|
||||||
|
"index": 0,
|
||||||
|
"stream_index": 0,
|
||||||
|
"stream_type": "OUTPUT",
|
||||||
|
"volume": 0.3,
|
||||||
|
"mute": false,
|
||||||
|
"addon": ""
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/audio/update`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"version": "VERSION"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/audio/restart`
|
||||||
|
|
||||||
|
- POST `/audio/reload`
|
||||||
|
|
||||||
|
- GET `/audio/logs`
|
||||||
|
|
||||||
|
- POST `/audio/volume/input`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"index": "...",
|
||||||
|
"volume": 0.5
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/audio/volume/output`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"index": "...",
|
||||||
|
"volume": 0.5
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/audio/volume/{output|input}/application`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"index": "...",
|
||||||
|
"volume": 0.5
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/audio/mute/input`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"index": "...",
|
||||||
|
"active": false
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/audio/mute/output`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"index": "...",
|
||||||
|
"active": false
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/audio/mute/{output|input}/application`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"index": "...",
|
||||||
|
"active": false
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/audio/default/input`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "..."
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/audio/default/output`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "..."
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- POST `/audio/profile`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"card": "...",
|
||||||
|
"name": "..."
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- GET `/audio/stats`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"cpu_percent": 0.0,
|
||||||
|
"memory_usage": 283123,
|
||||||
|
"memory_limit": 329392,
|
||||||
|
"memory_percent": 1.4,
|
||||||
|
"network_tx": 0,
|
||||||
|
"network_rx": 0,
|
||||||
|
"blk_read": 0,
|
||||||
|
"blk_write": 0
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Authentication/SSO API
|
||||||
|
|
||||||
|
You can use the user system from Home Assistant. The auth system can be handled
|
||||||
|
with the Supervisor.
|
||||||
|
|
||||||
|
`/auth` is accepting POST calls.
|
||||||
|
|
||||||
We support:
|
We support:
|
||||||
|
|
||||||
- Json `{ "user|name": "...", "password": "..." }`
|
- JSON: `{ "user|name": "...", "password": "..." }`
|
||||||
- application/x-www-form-urlencoded `user|name=...&password=...`
|
- `application/x-www-form-urlencoded`: `user|name=...&password=...`
|
||||||
- BasicAuth
|
- Basic Authentication
|
||||||
|
|
||||||
* POST `/auth/reset`
|
* POST `/auth/reset`
|
||||||
|
|
||||||
|
41
Dockerfile
41
Dockerfile
@@ -1,37 +1,40 @@
|
|||||||
ARG BUILD_FROM
|
ARG BUILD_FROM
|
||||||
FROM $BUILD_FROM
|
FROM $BUILD_FROM
|
||||||
|
|
||||||
|
ENV \
|
||||||
|
S6_SERVICES_GRACETIME=10000 \
|
||||||
|
SUPERVISOR_API=http://localhost
|
||||||
|
|
||||||
# Install base
|
# Install base
|
||||||
RUN apk add --no-cache \
|
RUN \
|
||||||
openssl \
|
apk add --no-cache \
|
||||||
libffi \
|
eudev \
|
||||||
musl \
|
eudev-libs \
|
||||||
git \
|
git \
|
||||||
socat \
|
glib \
|
||||||
glib \
|
libffi \
|
||||||
eudev \
|
libpulse \
|
||||||
eudev-libs
|
musl \
|
||||||
|
openssl
|
||||||
|
|
||||||
ARG BUILD_ARCH
|
ARG BUILD_ARCH
|
||||||
WORKDIR /usr/src
|
WORKDIR /usr/src
|
||||||
|
|
||||||
# Install requirements
|
# Install requirements
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
RUN export MAKEFLAGS="-j$(nproc)" \
|
RUN \
|
||||||
|
export MAKEFLAGS="-j$(nproc)" \
|
||||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
|
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
|
||||||
"https://wheels.home-assistant.io/alpine-$(cut -d '.' -f 1-2 < /etc/alpine-release)/${BUILD_ARCH}/" \
|
"https://wheels.home-assistant.io/alpine-$(cut -d '.' -f 1-2 < /etc/alpine-release)/${BUILD_ARCH}/" \
|
||||||
-r ./requirements.txt \
|
-r ./requirements.txt \
|
||||||
&& rm -f requirements.txt
|
&& rm -f requirements.txt
|
||||||
|
|
||||||
# Install HassIO
|
# Install Home Assistant Supervisor
|
||||||
COPY . hassio
|
COPY . supervisor
|
||||||
RUN pip3 install --no-cache-dir -e ./hassio \
|
RUN \
|
||||||
&& python3 -m compileall ./hassio/hassio
|
pip3 install --no-cache-dir -e ./supervisor \
|
||||||
|
&& python3 -m compileall ./supervisor/supervisor
|
||||||
|
|
||||||
|
|
||||||
# Initialize udev daemon, handle CMD
|
|
||||||
COPY entry.sh /bin/
|
|
||||||
ENTRYPOINT ["/bin/entry.sh"]
|
|
||||||
|
|
||||||
WORKDIR /
|
WORKDIR /
|
||||||
CMD [ "python3", "-m", "hassio" ]
|
COPY rootfs /
|
||||||
|
4
LICENSE
4
LICENSE
@@ -178,7 +178,7 @@
|
|||||||
APPENDIX: How to apply the Apache License to your work.
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
To apply the Apache License to your work, attach the following
|
||||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
replaced with your own identifying information. (Don't include
|
replaced with your own identifying information. (Don't include
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
comment syntax for the file format. We also recommend that a
|
comment syntax for the file format. We also recommend that a
|
||||||
@@ -186,7 +186,7 @@
|
|||||||
same "printed page" as the copyright notice for easier
|
same "printed page" as the copyright notice for easier
|
||||||
identification within third-party archives.
|
identification within third-party archives.
|
||||||
|
|
||||||
Copyright 2017 Pascal Vizeli
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
you may not use this file except in compliance with the License.
|
you may not use this file except in compliance with the License.
|
||||||
|
@@ -1,3 +1,3 @@
|
|||||||
include LICENSE.md
|
include LICENSE.md
|
||||||
graft hassio
|
graft supervisor
|
||||||
recursive-exclude * *.py[co]
|
recursive-exclude * *.py[co]
|
||||||
|
30
README.md
30
README.md
@@ -1,30 +1,26 @@
|
|||||||
[](https://dev.azure.com/home-assistant/Hass.io/_build/latest?definitionId=2&branchName=dev)
|
# Home Assistant Supervisor
|
||||||
|
|
||||||
# Hass.io
|
|
||||||
|
|
||||||
## First private cloud solution for home automation
|
## First private cloud solution for home automation
|
||||||
|
|
||||||
Hass.io is a Docker-based system for managing your Home Assistant installation
|
Home Assistant (former Hass.io) is a container-based system for managing your
|
||||||
and related applications. The system is controlled via Home Assistant which
|
Home Assistant Core installation and related applications. The system is
|
||||||
communicates with the Supervisor. The Supervisor provides an API to manage the
|
controlled via Home Assistant which communicates with the Supervisor. The
|
||||||
installation. This includes changing network settings or installing
|
Supervisor provides an API to manage the installation. This includes changing
|
||||||
and updating software.
|
network settings or installing and updating software.
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
Installation instructions can be found at <https://home-assistant.io/hassio>.
|
Installation instructions can be found at https://home-assistant.io/hassio.
|
||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
The development of the supervisor is a bit tricky. Not difficult but tricky.
|
The development of the Supervisor is not difficult but tricky.
|
||||||
|
|
||||||
- You can use the builder to build your supervisor: https://github.com/home-assistant/hassio-builder
|
- You can use the builder to create your Supervisor: https://github.com/home-assistant/hassio-builder
|
||||||
- Go into a HassOS device or VM and pull your supervisor.
|
- Access a HassOS device or VM and pull your Supervisor.
|
||||||
- Set the developer modus with cli `hassio supervisor options --channel=dev`
|
- Set the developer modus with the CLI tool: `ha supervisor options --channel=dev`
|
||||||
- Tag it as `homeassistant/xy-hassio-supervisor:latest`
|
- Tag it as `homeassistant/xy-hassio-supervisor:latest`
|
||||||
- Restart the service like `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
|
- Restart the service with `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
|
||||||
- Test your changes
|
- Test your changes
|
||||||
|
|
||||||
Small Bugfix or improvements, make a PR. Significant change makes first an RFC.
|
For small bugfixes or improvements, make a PR. For significant changes open a RFC first, please. Thanks.
|
||||||
|
@@ -17,10 +17,14 @@ jobs:
|
|||||||
pool:
|
pool:
|
||||||
vmImage: "ubuntu-latest"
|
vmImage: "ubuntu-latest"
|
||||||
steps:
|
steps:
|
||||||
|
- script: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y libpulse0 libudev1
|
||||||
|
displayName: "Install Host library"
|
||||||
- task: UsePythonVersion@0
|
- task: UsePythonVersion@0
|
||||||
displayName: "Use Python 3.7"
|
displayName: "Use Python 3.8"
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "3.7"
|
versionSpec: "3.8"
|
||||||
- script: pip install tox
|
- script: pip install tox
|
||||||
displayName: "Install Tox"
|
displayName: "Install Tox"
|
||||||
- script: tox
|
- script: tox
|
||||||
|
@@ -10,10 +10,8 @@ trigger:
|
|||||||
- "*"
|
- "*"
|
||||||
pr: none
|
pr: none
|
||||||
variables:
|
variables:
|
||||||
- name: basePythonTag
|
|
||||||
value: "3.7-alpine3.11"
|
|
||||||
- name: versionBuilder
|
- name: versionBuilder
|
||||||
value: "6.9"
|
value: "7.0"
|
||||||
- group: docker
|
- group: docker
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
@@ -22,9 +20,9 @@ jobs:
|
|||||||
vmImage: "ubuntu-latest"
|
vmImage: "ubuntu-latest"
|
||||||
steps:
|
steps:
|
||||||
- task: UsePythonVersion@0
|
- task: UsePythonVersion@0
|
||||||
displayName: "Use Python 3.7"
|
displayName: "Use Python 3.8"
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "3.7"
|
versionSpec: "3.8"
|
||||||
- script: |
|
- script: |
|
||||||
setup_version="$(python setup.py -V)"
|
setup_version="$(python setup.py -V)"
|
||||||
branch_version="$(Build.SourceBranchName)"
|
branch_version="$(Build.SourceBranchName)"
|
||||||
@@ -51,6 +49,5 @@ jobs:
|
|||||||
-v ~/.docker:/root/.docker \
|
-v ~/.docker:/root/.docker \
|
||||||
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
||||||
homeassistant/amd64-builder:$(versionBuilder) \
|
homeassistant/amd64-builder:$(versionBuilder) \
|
||||||
--supervisor $(basePythonTag) --version $(Build.SourceBranchName) \
|
--generic $(Build.SourceBranchName) --all -t /data
|
||||||
--all -t /data --docker-hub homeassistant
|
|
||||||
displayName: "Build Release"
|
displayName: "Build Release"
|
||||||
|
@@ -8,53 +8,20 @@ trigger:
|
|||||||
pr: none
|
pr: none
|
||||||
variables:
|
variables:
|
||||||
- name: versionWheels
|
- name: versionWheels
|
||||||
value: "1.6-3.7-alpine3.11"
|
value: '1.13.0-3.8-alpine3.12'
|
||||||
- group: wheels
|
resources:
|
||||||
|
repositories:
|
||||||
|
- repository: azure
|
||||||
|
type: github
|
||||||
|
name: 'home-assistant/ci-azure'
|
||||||
|
endpoint: 'home-assistant'
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
- job: "Wheels"
|
- template: templates/azp-job-wheels.yaml@azure
|
||||||
timeoutInMinutes: 360
|
parameters:
|
||||||
pool:
|
builderVersion: '$(versionWheels)'
|
||||||
vmImage: "ubuntu-latest"
|
builderApk: 'build-base;libffi-dev;openssl-dev'
|
||||||
strategy:
|
builderPip: 'Cython'
|
||||||
maxParallel: 5
|
skipBinary: 'aiohttp'
|
||||||
matrix:
|
wheelsRequirement: 'requirements.txt'
|
||||||
amd64:
|
|
||||||
buildArch: "amd64"
|
|
||||||
i386:
|
|
||||||
buildArch: "i386"
|
|
||||||
armhf:
|
|
||||||
buildArch: "armhf"
|
|
||||||
armv7:
|
|
||||||
buildArch: "armv7"
|
|
||||||
aarch64:
|
|
||||||
buildArch: "aarch64"
|
|
||||||
steps:
|
|
||||||
- script: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y --no-install-recommends \
|
|
||||||
qemu-user-static \
|
|
||||||
binfmt-support \
|
|
||||||
curl
|
|
||||||
|
|
||||||
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
|
|
||||||
sudo update-binfmts --enable qemu-arm
|
|
||||||
sudo update-binfmts --enable qemu-aarch64
|
|
||||||
displayName: "Initial cross build"
|
|
||||||
- script: |
|
|
||||||
mkdir -p .ssh
|
|
||||||
echo -e "-----BEGIN RSA PRIVATE KEY-----\n$(wheelsSSH)\n-----END RSA PRIVATE KEY-----" >> .ssh/id_rsa
|
|
||||||
ssh-keyscan -H $(wheelsHost) >> .ssh/known_hosts
|
|
||||||
chmod 600 .ssh/*
|
|
||||||
displayName: "Install ssh key"
|
|
||||||
- script: sudo docker pull homeassistant/$(buildArch)-wheels:$(versionWheels)
|
|
||||||
displayName: "Install wheels builder"
|
|
||||||
- script: |
|
|
||||||
sudo docker run --rm -v $(pwd):/data:ro -v $(pwd)/.ssh:/root/.ssh:rw \
|
|
||||||
homeassistant/$(buildArch)-wheels:$(versionWheels) \
|
|
||||||
--apk "build-base;libffi-dev;openssl-dev" \
|
|
||||||
--index $(wheelsIndex) \
|
|
||||||
--requirement requirements.txt \
|
|
||||||
--upload rsync \
|
|
||||||
--remote wheels@$(wheelsHost):/opt/wheels
|
|
||||||
displayName: "Run wheels build"
|
|
||||||
|
13
build.json
Normal file
13
build.json
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"image": "homeassistant/{arch}-hassio-supervisor",
|
||||||
|
"build_from": {
|
||||||
|
"aarch64": "homeassistant/aarch64-base-python:3.8-alpine3.12",
|
||||||
|
"armhf": "homeassistant/armhf-base-python:3.8-alpine3.12",
|
||||||
|
"armv7": "homeassistant/armv7-base-python:3.8-alpine3.12",
|
||||||
|
"amd64": "homeassistant/amd64-base-python:3.8-alpine3.12",
|
||||||
|
"i386": "homeassistant/i386-base-python:3.8-alpine3.12"
|
||||||
|
},
|
||||||
|
"labels": {
|
||||||
|
"io.hass.type": "supervisor"
|
||||||
|
}
|
||||||
|
}
|
11
codecov.yaml
Normal file
11
codecov.yaml
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
codecov:
|
||||||
|
branch: dev
|
||||||
|
coverage:
|
||||||
|
status:
|
||||||
|
project:
|
||||||
|
default:
|
||||||
|
target: 40
|
||||||
|
threshold: 0.09
|
||||||
|
comment: false
|
||||||
|
github_checks:
|
||||||
|
annotations: false
|
13
entry.sh
13
entry.sh
@@ -1,13 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
udevd --daemon
|
|
||||||
udevadm trigger
|
|
||||||
|
|
||||||
if CMD="$(command -v "$1")"; then
|
|
||||||
shift
|
|
||||||
exec "$CMD" "$@"
|
|
||||||
else
|
|
||||||
echo "Command not found: $1"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
@@ -1 +0,0 @@
|
|||||||
"""Init file for Hass.io."""
|
|
@@ -1,51 +0,0 @@
|
|||||||
"""Init file for Hass.io hardware RESTful API."""
|
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
from typing import Any, Dict
|
|
||||||
|
|
||||||
from aiohttp import web
|
|
||||||
|
|
||||||
from .utils import api_process
|
|
||||||
from ..const import (
|
|
||||||
ATTR_SERIAL,
|
|
||||||
ATTR_DISK,
|
|
||||||
ATTR_GPIO,
|
|
||||||
ATTR_AUDIO,
|
|
||||||
ATTR_INPUT,
|
|
||||||
ATTR_OUTPUT,
|
|
||||||
)
|
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class APIHardware(CoreSysAttributes):
|
|
||||||
"""Handle RESTful API for hardware functions."""
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
|
||||||
"""Show hardware info."""
|
|
||||||
return {
|
|
||||||
ATTR_SERIAL: list(
|
|
||||||
self.sys_hardware.serial_devices | self.sys_hardware.serial_by_id
|
|
||||||
),
|
|
||||||
ATTR_INPUT: list(self.sys_hardware.input_devices),
|
|
||||||
ATTR_DISK: list(self.sys_hardware.disk_devices),
|
|
||||||
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
|
|
||||||
ATTR_AUDIO: self.sys_hardware.audio_devices,
|
|
||||||
}
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def audio(self, request: web.Request) -> Dict[str, Any]:
|
|
||||||
"""Show ALSA audio devices."""
|
|
||||||
return {
|
|
||||||
ATTR_AUDIO: {
|
|
||||||
ATTR_INPUT: self.sys_host.alsa.input_devices,
|
|
||||||
ATTR_OUTPUT: self.sys_host.alsa.output_devices,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
def trigger(self, request: web.Request) -> None:
|
|
||||||
"""Trigger a udev device reload."""
|
|
||||||
return asyncio.shield(self.sys_hardware.udev_trigger())
|
|
@@ -1,59 +0,0 @@
|
|||||||
"""Init file for Hass.io HassOS RESTful API."""
|
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
from typing import Any, Awaitable, Dict
|
|
||||||
|
|
||||||
from aiohttp import web
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from ..const import (
|
|
||||||
ATTR_BOARD,
|
|
||||||
ATTR_BOOT,
|
|
||||||
ATTR_VERSION,
|
|
||||||
ATTR_VERSION_CLI,
|
|
||||||
ATTR_VERSION_CLI_LATEST,
|
|
||||||
ATTR_VERSION_LATEST,
|
|
||||||
)
|
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
from .utils import api_process, api_validate
|
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
|
||||||
|
|
||||||
|
|
||||||
class APIHassOS(CoreSysAttributes):
|
|
||||||
"""Handle RESTful API for HassOS functions."""
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
|
||||||
"""Return HassOS information."""
|
|
||||||
return {
|
|
||||||
ATTR_VERSION: self.sys_hassos.version,
|
|
||||||
ATTR_VERSION_CLI: self.sys_hassos.version_cli,
|
|
||||||
ATTR_VERSION_LATEST: self.sys_hassos.version_latest,
|
|
||||||
ATTR_VERSION_CLI_LATEST: self.sys_hassos.version_cli_latest,
|
|
||||||
ATTR_BOARD: self.sys_hassos.board,
|
|
||||||
ATTR_BOOT: self.sys_dbus.rauc.boot_slot,
|
|
||||||
}
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def update(self, request: web.Request) -> None:
|
|
||||||
"""Update HassOS."""
|
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
|
||||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_latest)
|
|
||||||
|
|
||||||
await asyncio.shield(self.sys_hassos.update(version))
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def update_cli(self, request: web.Request) -> None:
|
|
||||||
"""Update HassOS CLI."""
|
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
|
||||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_cli_latest)
|
|
||||||
|
|
||||||
await asyncio.shield(self.sys_hassos.update_cli(version))
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
def config_sync(self, request: web.Request) -> Awaitable[None]:
|
|
||||||
"""Trigger config reload on HassOS."""
|
|
||||||
return asyncio.shield(self.sys_hassos.config_sync())
|
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1,16 +0,0 @@
|
|||||||
/**
|
|
||||||
@license
|
|
||||||
Copyright 2018 Google Inc. All Rights Reserved.
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
you may not use this file except in compliance with the License.
|
|
||||||
You may obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
See the License for the specific language governing permissions and
|
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
|||||||
(self.webpackJsonp=self.webpackJsonp||[]).push([[2],{176:function(e,r,n){"use strict";n.r(r),n.d(r,"codeMirror",function(){return c}),n.d(r,"codeMirrorCss",function(){return i});var a=n(54),o=n.n(a),s=n(169),t=(n(170),n(171),n(11));o.a.commands.save=function(e){Object(t.a)(e.getWrapperElement(),"editor-save")};var c=o.a,i=s.a}}]);
|
|
||||||
//# sourceMappingURL=chunk.92a11ac1b80e0d7839d2.js.map
|
|
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{"version":3,"sources":["webpack:///./src/resources/codemirror.ts"],"names":["__webpack_require__","r","__webpack_exports__","d","codeMirror","codeMirrorCss","codemirror__WEBPACK_IMPORTED_MODULE_0__","codemirror__WEBPACK_IMPORTED_MODULE_0___default","n","codemirror_lib_codemirror_css__WEBPACK_IMPORTED_MODULE_1__","_common_dom_fire_event__WEBPACK_IMPORTED_MODULE_4__","_CodeMirror","commands","save","cm","fireEvent","getWrapperElement","_codeMirrorCss"],"mappings":"sFAAAA,EAAAC,EAAAC,GAAAF,EAAAG,EAAAD,EAAA,+BAAAE,IAAAJ,EAAAG,EAAAD,EAAA,kCAAAG,IAAA,IAAAC,EAAAN,EAAA,IAAAO,EAAAP,EAAAQ,EAAAF,GAAAG,EAAAT,EAAA,KAAAU,GAAAV,EAAA,KAAAA,EAAA,KAAAA,EAAA,KAQAW,IAAYC,SAASC,KAAO,SAACC,GAC3BC,YAAUD,EAAGE,oBAAqB,gBAE7B,IAAMZ,EAAkBO,IAClBN,EAAqBY","file":"chunk.92a11ac1b80e0d7839d2.js","sourcesContent":["// @ts-ignore\nimport _CodeMirror, { Editor } from \"codemirror\";\n// @ts-ignore\nimport _codeMirrorCss from \"codemirror/lib/codemirror.css\";\nimport \"codemirror/mode/yaml/yaml\";\nimport \"codemirror/mode/jinja2/jinja2\";\nimport { fireEvent } from \"../common/dom/fire_event\";\n\n_CodeMirror.commands.save = (cm: Editor) => {\n fireEvent(cm.getWrapperElement(), \"editor-save\");\n};\nexport const codeMirror: any = _CodeMirror;\nexport const codeMirrorCss: any = _codeMirrorCss;\n"],"sourceRoot":""}
|
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{"version":3,"sources":["webpack:///./hassio/src/ingress-view/hassio-ingress-view.ts"],"names":["customElement","HassioIngressView","property","this","_addon","html","_templateObject2","name","ingress_url","_templateObject","changedProps","_get","_getPrototypeOf","prototype","call","has","addon","route","path","substr","oldRoute","get","oldAddon","undefined","_fetchData","addonSlug","_ref","_ref2","regeneratorRuntime","async","_context","prev","next","awrap","Promise","all","fetchHassioAddonInfo","hass","Error","createHassioSession","sent","_slicedToArray","ingress","t0","console","error","alert","message","history","back","stop","css","_templateObject3","LitElement"],"mappings":"6/RAmBCA,YAAc,0CACTC,smBACHC,kEACAA,mEACAA,4EAED,WACE,OAAKC,KAAKC,OAMHC,YAAPC,IAC0BH,KAAKC,OAAOG,KACpBJ,KAAKC,OAAOI,aAPrBH,YAAPI,0CAYJ,SAAkBC,GAGhB,GAFAC,EAAAC,EApBEX,EAoBFY,WAAA,eAAAV,MAAAW,KAAAX,KAAmBO,GAEdA,EAAaK,IAAI,SAAtB,CAIA,IAAMC,EAAQb,KAAKc,MAAMC,KAAKC,OAAO,GAE/BC,EAAWV,EAAaW,IAAI,SAC5BC,EAAWF,EAAWA,EAASF,KAAKC,OAAO,QAAKI,EAElDP,GAASA,IAAUM,GACrBnB,KAAKqB,WAAWR,4CAIpB,SAAyBS,GAAzB,IAAAC,EAAAC,EAAAX,EAAA,OAAAY,mBAAAC,MAAA,SAAAC,GAAA,cAAAA,EAAAC,KAAAD,EAAAE,MAAA,cAAAF,EAAAC,KAAA,EAAAD,EAAAE,KAAA,EAAAJ,mBAAAK,MAE0BC,QAAQC,IAAI,CAChCC,YAAqBjC,KAAKkC,KAAMZ,GAAhC,MAAiD,WAC/C,MAAM,IAAIa,MAAM,iCAElBC,YAAoBpC,KAAKkC,MAAzB,MAAqC,WACnC,MAAM,IAAIC,MAAM,4CAPxB,UAAAZ,EAAAI,EAAAU,KAAAb,EAAAc,EAAAf,EAAA,IAEWV,EAFXW,EAAA,IAWee,QAXf,CAAAZ,EAAAE,KAAA,cAYY,IAAIM,MAAM,wCAZtB,OAeInC,KAAKC,OAASY,EAflBc,EAAAE,KAAA,iBAAAF,EAAAC,KAAA,GAAAD,EAAAa,GAAAb,EAAA,SAkBIc,QAAQC,MAARf,EAAAa,IACAG,MAAMhB,EAAAa,GAAII,SAAW,mCACrBC,QAAQC,OApBZ,yBAAAnB,EAAAoB,SAAA,KAAA/C,KAAA,qDAwBA,WACE,OAAOgD,YAAPC,UA7D4BC","file":"chunk.990ee58006b248f55d23.js","sourcesContent":["import {\n LitElement,\n customElement,\n property,\n TemplateResult,\n html,\n PropertyValues,\n CSSResult,\n css,\n} from \"lit-element\";\nimport { HomeAssistant, Route } from \"../../../src/types\";\nimport { createHassioSession } from \"../../../src/data/hassio/supervisor\";\nimport {\n HassioAddonDetails,\n fetchHassioAddonInfo,\n} from \"../../../src/data/hassio/addon\";\nimport \"../../../src/layouts/hass-loading-screen\";\nimport \"../../../src/layouts/hass-subpage\";\n\n@customElement(\"hassio-ingress-view\")\nclass HassioIngressView extends LitElement {\n @property() public hass!: HomeAssistant;\n @property() public route!: Route;\n @property() private _addon?: HassioAddonDetails;\n\n protected render(): TemplateResult {\n if (!this._addon) {\n return html`\n <hass-loading-screen></hass-loading-screen>\n `;\n }\n\n return html`\n <hass-subpage .header=${this._addon.name} hassio>\n <iframe src=${this._addon.ingress_url}></iframe>\n </hass-subpage>\n `;\n }\n\n protected updated(changedProps: PropertyValues) {\n super.firstUpdated(changedProps);\n\n if (!changedProps.has(\"route\")) {\n return;\n }\n\n const addon = this.route.path.substr(1);\n\n const oldRoute = changedProps.get(\"route\") as this[\"route\"] | undefined;\n const oldAddon = oldRoute ? oldRoute.path.substr(1) : undefined;\n\n if (addon && addon !== oldAddon) {\n this._fetchData(addon);\n }\n }\n\n private async _fetchData(addonSlug: string) {\n try {\n const [addon] = await Promise.all([\n fetchHassioAddonInfo(this.hass, addonSlug).catch(() => {\n throw new Error(\"Failed to fetch add-on info\");\n }),\n createHassioSession(this.hass).catch(() => {\n throw new Error(\"Failed to create an ingress session\");\n }),\n ]);\n\n if (!addon.ingress) {\n throw new Error(\"This add-on does not support ingress\");\n }\n\n this._addon = addon;\n } catch (err) {\n // tslint:disable-next-line\n console.error(err);\n alert(err.message || \"Unknown error starting ingress.\");\n history.back();\n }\n }\n\n static get styles(): CSSResult {\n return css`\n iframe {\n display: block;\n width: 100%;\n height: 100%;\n border: 0;\n }\n paper-icon-button {\n color: var(--text-primary-color);\n }\n `;\n }\n}\n\ndeclare global {\n interface HTMLElementTagNameMap {\n \"hassio-ingress-view\": HassioIngressView;\n }\n}\n"],"sourceRoot":""}
|
|
File diff suppressed because one or more lines are too long
@@ -1,10 +0,0 @@
|
|||||||
/**
|
|
||||||
@license
|
|
||||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
|
||||||
This code may only be used under the BSD style license found at
|
|
||||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
|
||||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
|
||||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
|
||||||
part of the polymer project is also subject to an additional IP rights grant
|
|
||||||
found at http://polymer.github.io/PATENTS.txt
|
|
||||||
*/
|
|
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
|||||||
(self.webpackJsonp=self.webpackJsonp||[]).push([[12],[,function(o,t){var n=document.createElement("template");n.setAttribute("style","display: none;"),n.innerHTML='<style>\n@font-face {\nfont-family: "Roboto";\nsrc:\n local("Roboto Thin"),\n local("Roboto-Thin"),\n url(/static/fonts/roboto/Roboto-Thin.woff2) format("woff2");\nfont-weight: 100;\nfont-style: normal;\n}\n@font-face {\nfont-family: "Roboto";\nsrc:\n local("Roboto Thin Italic"),\n local("Roboto-ThinItalic"),\n url(/static/fonts/roboto/Roboto-ThinItalic.woff2) format("woff2");\nfont-weight: 100;\nfont-style: italic;\n}\n@font-face {\nfont-family: "Roboto";\nsrc:\n local("Roboto Light"),\n local("Roboto-Light"),\n url(/static/fonts/roboto/Roboto-Light.woff2) format("woff2");\nfont-weight: 300;\nfont-style: normal;\n}\n@font-face {\nfont-family: "Roboto";\nsrc:\n local("Roboto Light Italic"),\n local("Roboto-LightItalic"),\n url(/static/fonts/roboto/Roboto-LightItalic.woff2) format("woff2");\nfont-weight: 300;\nfont-style: italic;\n}\n@font-face {\nfont-family: "Roboto";\nsrc:\n local("Roboto Regular"),\n local("Roboto-Regular"),\n url(/static/fonts/roboto/Roboto-Regular.woff2) format("woff2");\nfont-weight: 400;\nfont-style: normal;\n}\n@font-face {\nfont-family: "Roboto";\nsrc:\n local("Roboto Italic"),\n local("Roboto-Italic"),\n url(/static/fonts/roboto/Roboto-RegularItalic.woff2) format("woff2");\nfont-weight: 400;\nfont-style: italic;\n}\n@font-face {\nfont-family: "Roboto";\nsrc:\n local("Roboto Medium"),\n local("Roboto-Medium"),\n url(/static/fonts/roboto/Roboto-Medium.woff2) format("woff2");\nfont-weight: 500;\nfont-style: normal;\n}\n@font-face {\nfont-family: "Roboto";\nsrc:\n local("Roboto Medium Italic"),\n local("Roboto-MediumItalic"),\n url(/static/fonts/roboto/Roboto-MediumItalic.woff2) format("woff2");\nfont-weight: 500;\nfont-style: italic;\n}\n@font-face {\nfont-family: "Roboto";\nsrc:\n local("Roboto Bold"),\n local("Roboto-Bold"),\n url(/static/fonts/roboto/Roboto-Bold.woff2) format("woff2");\nfont-weight: 700;\nfont-style: normal;\n}\n@font-face {\nfont-family: "Roboto";\nsrc:\n local("Roboto Bold Italic"),\n local("Roboto-BoldItalic"),\n url(/static/fonts/roboto/Roboto-BoldItalic.woff2) format("woff2");\nfont-weight: 700;\nfont-style: italic;\n}\n@font-face {\nfont-family: "Roboto";\nsrc:\n local("Roboto Black"),\n local("Roboto-Black"),\n url(/static/fonts/roboto/Roboto-Black.woff2) format("woff2");\nfont-weight: 900;\nfont-style: normal;\n}\n@font-face {\nfont-family: "Roboto";\nsrc:\n local("Roboto Black Italic"),\n local("Roboto-BlackItalic"),\n url(/static/fonts/roboto/Roboto-BlackItalic.woff2) format("woff2");\nfont-weight: 900;\nfont-style: italic;\n}\n</style>',document.head.appendChild(n.content)}]]);
|
|
||||||
//# sourceMappingURL=chunk.b2dce600432c76a53d8c.js.map
|
|
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{"version":3,"sources":["webpack:///./src/resources/roboto.js"],"names":["documentContainer","document","createElement","setAttribute","innerHTML","head","appendChild","content"],"mappings":"qEAAA,IAAMA,EAAoBC,SAASC,cAAc,YACjDF,EAAkBG,aAAa,QAAS,kBAExCH,EAAkBI,UAAlB,6gFA+GAH,SAASI,KAAKC,YAAYN,EAAkBO","file":"chunk.b2dce600432c76a53d8c.js","sourcesContent":["const documentContainer = document.createElement(\"template\");\ndocumentContainer.setAttribute(\"style\", \"display: none;\");\n\ndocumentContainer.innerHTML = `<style>\n@font-face {\nfont-family: \"Roboto\";\nsrc:\n local(\"Roboto Thin\"),\n local(\"Roboto-Thin\"),\n url(/static/fonts/roboto/Roboto-Thin.woff2) format(\"woff2\");\nfont-weight: 100;\nfont-style: normal;\n}\n@font-face {\nfont-family: \"Roboto\";\nsrc:\n local(\"Roboto Thin Italic\"),\n local(\"Roboto-ThinItalic\"),\n url(/static/fonts/roboto/Roboto-ThinItalic.woff2) format(\"woff2\");\nfont-weight: 100;\nfont-style: italic;\n}\n@font-face {\nfont-family: \"Roboto\";\nsrc:\n local(\"Roboto Light\"),\n local(\"Roboto-Light\"),\n url(/static/fonts/roboto/Roboto-Light.woff2) format(\"woff2\");\nfont-weight: 300;\nfont-style: normal;\n}\n@font-face {\nfont-family: \"Roboto\";\nsrc:\n local(\"Roboto Light Italic\"),\n local(\"Roboto-LightItalic\"),\n url(/static/fonts/roboto/Roboto-LightItalic.woff2) format(\"woff2\");\nfont-weight: 300;\nfont-style: italic;\n}\n@font-face {\nfont-family: \"Roboto\";\nsrc:\n local(\"Roboto Regular\"),\n local(\"Roboto-Regular\"),\n url(/static/fonts/roboto/Roboto-Regular.woff2) format(\"woff2\");\nfont-weight: 400;\nfont-style: normal;\n}\n@font-face {\nfont-family: \"Roboto\";\nsrc:\n local(\"Roboto Italic\"),\n local(\"Roboto-Italic\"),\n url(/static/fonts/roboto/Roboto-RegularItalic.woff2) format(\"woff2\");\nfont-weight: 400;\nfont-style: italic;\n}\n@font-face {\nfont-family: \"Roboto\";\nsrc:\n local(\"Roboto Medium\"),\n local(\"Roboto-Medium\"),\n url(/static/fonts/roboto/Roboto-Medium.woff2) format(\"woff2\");\nfont-weight: 500;\nfont-style: normal;\n}\n@font-face {\nfont-family: \"Roboto\";\nsrc:\n local(\"Roboto Medium Italic\"),\n local(\"Roboto-MediumItalic\"),\n url(/static/fonts/roboto/Roboto-MediumItalic.woff2) format(\"woff2\");\nfont-weight: 500;\nfont-style: italic;\n}\n@font-face {\nfont-family: \"Roboto\";\nsrc:\n local(\"Roboto Bold\"),\n local(\"Roboto-Bold\"),\n url(/static/fonts/roboto/Roboto-Bold.woff2) format(\"woff2\");\nfont-weight: 700;\nfont-style: normal;\n}\n@font-face {\nfont-family: \"Roboto\";\nsrc:\n local(\"Roboto Bold Italic\"),\n local(\"Roboto-BoldItalic\"),\n url(/static/fonts/roboto/Roboto-BoldItalic.woff2) format(\"woff2\");\nfont-weight: 700;\nfont-style: italic;\n}\n@font-face {\nfont-family: \"Roboto\";\nsrc:\n local(\"Roboto Black\"),\n local(\"Roboto-Black\"),\n url(/static/fonts/roboto/Roboto-Black.woff2) format(\"woff2\");\nfont-weight: 900;\nfont-style: normal;\n}\n@font-face {\nfont-family: \"Roboto\";\nsrc:\n local(\"Roboto Black Italic\"),\n local(\"Roboto-BlackItalic\"),\n url(/static/fonts/roboto/Roboto-BlackItalic.woff2) format(\"woff2\");\nfont-weight: 900;\nfont-style: italic;\n}\n</style>`;\n\ndocument.head.appendChild(documentContainer.content);\n\n/**\n@license\nCopyright (c) 2015 The Polymer Project Authors. All rights reserved.\nThis code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt\nThe complete set of authors may be found at http://polymer.github.io/AUTHORS.txt\nThe complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt\nCode distributed by Google as part of the polymer project is also\nsubject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt\n*/\n/*\n FIXME(polymer-modulizer): the above comments were extracted\n from HTML and may be out of place here. Review them and\n then delete this comment!\n*/\n"],"sourceRoot":""}
|
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1,20 +0,0 @@
|
|||||||
/**
|
|
||||||
@license
|
|
||||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
|
||||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
|
||||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
|
||||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
|
||||||
Code distributed by Google as part of the polymer project is also
|
|
||||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
@license
|
|
||||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
|
||||||
This code may only be used under the BSD style license found at
|
|
||||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
|
||||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
|
||||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
|
||||||
part of the polymer project is also subject to an additional IP rights grant
|
|
||||||
found at http://polymer.github.io/PATENTS.txt
|
|
||||||
*/
|
|
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1,28 +0,0 @@
|
|||||||
/**
|
|
||||||
@license
|
|
||||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
|
||||||
This code may only be used under the BSD style license found at
|
|
||||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
|
||||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
|
||||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
|
||||||
part of the polymer project is also subject to an additional IP rights grant
|
|
||||||
found at http://polymer.github.io/PATENTS.txt
|
|
||||||
*/
|
|
||||||
|
|
||||||
/*!
|
|
||||||
* The buffer module from node.js, for the browser.
|
|
||||||
*
|
|
||||||
* @author Feross Aboukhadijeh <feross@feross.org> <http://feross.org>
|
|
||||||
* @license MIT
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
@license
|
|
||||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
|
||||||
This code may only be used under the BSD style license found at
|
|
||||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
|
||||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
|
||||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
|
||||||
part of the polymer project is also subject to an additional IP rights grant
|
|
||||||
found at http://polymer.github.io/PATENTS.txt
|
|
||||||
*/
|
|
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
|||||||
!function(e){function n(n){for(var t,o,a=n[0],i=n[1],c=0,f=[];c<a.length;c++)o=a[c],Object.prototype.hasOwnProperty.call(r,o)&&r[o]&&f.push(r[o][0]),r[o]=0;for(t in i)Object.prototype.hasOwnProperty.call(i,t)&&(e[t]=i[t]);for(u&&u(n);f.length;)f.shift()()}var t={},r={3:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var a=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=a);var i,c=document.createElement("script");c.charset="utf-8",c.timeout=120,o.nc&&c.setAttribute("nonce",o.nc),c.src=function(e){return o.p+"chunk."+{0:"50202a3f8d4670c9454d",1:"9cea224f33b375867edd",2:"c0a46a38d689ab648885",4:"b21a4609308c9b8ef180",5:"a1b6b616fc89c412f5b6",6:"900c5d3fab8b6ebdcbc6",7:"a4f9950b101883805252",8:"884d6e32c83f99e41040",9:"84aaaba4c4734f1c2e21",10:"12902324b918e12549ba"}[e]+".js"}(e);var u=new Error;i=function(n){c.onerror=c.onload=null,clearTimeout(f);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),a=n&&n.target&&n.target.src;u.message="Loading chunk "+e+" failed.\n("+o+": "+a+")",u.name="ChunkLoadError",u.type=o,u.request=a,t[1](u)}r[e]=void 0}};var f=setTimeout(function(){i({type:"timeout",target:c})},12e4);c.onerror=c.onload=i,document.head.appendChild(c)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var a=self.webpackJsonp=self.webpackJsonp||[],i=a.push.bind(a);a.push=n,a=a.slice();for(var c=0;c<a.length;c++)n(a[c]);var u=i;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(0),t.e(5)]).then(t.bind(null,2)),Promise.all([t.e(0),t.e(10),t.e(7)]).then(t.bind(null,1))});var r=document.createElement("style");r.innerHTML="\nbody {\n font-family: Roboto, sans-serif;\n -moz-osx-font-smoothing: grayscale;\n -webkit-font-smoothing: antialiased;\n font-weight: 400;\n margin: 0;\n padding: 0;\n height: 100vh;\n}\n",document.head.appendChild(r)}]);
|
|
||||||
//# sourceMappingURL=entrypoint.582baa2f.js.map
|
|
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
|||||||
!function(e){function n(n){for(var t,o,a=n[0],i=n[1],c=0,d=[];c<a.length;c++)o=a[c],Object.prototype.hasOwnProperty.call(r,o)&&r[o]&&d.push(r[o][0]),r[o]=0;for(t in i)Object.prototype.hasOwnProperty.call(i,t)&&(e[t]=i[t]);for(u&&u(n);d.length;)d.shift()()}var t={},r={6:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var a=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=a);var i,c=document.createElement("script");c.charset="utf-8",c.timeout=120,o.nc&&c.setAttribute("nonce",o.nc),c.src=function(e){return o.p+"chunk."+{0:"87b1d37fc9b8a6f7e2a6",1:"e46c606dd9100816af4e",2:"92a11ac1b80e0d7839d2",3:"429840c83fad61bc51a8",4:"715824f4764bdbe425b1",5:"9d371c8143226d4eaaee",7:"43e40fd69686ad51301d",8:"0b82745c7bdffe5c1404",9:"990ee58006b248f55d23",10:"4d45ee0a3d852768f97e",11:"b60200a57d6f63941b30",12:"b2dce600432c76a53d8c",13:"8527374a266cecf93aa9",14:"f49e500cf58ea310d452",15:"d4931d72592ad48ba2be"}[e]+".js"}(e);var u=new Error;i=function(n){c.onerror=c.onload=null,clearTimeout(d);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),a=n&&n.target&&n.target.src;u.message="Loading chunk "+e+" failed.\n("+o+": "+a+")",u.name="ChunkLoadError",u.type=o,u.request=a,t[1](u)}r[e]=void 0}};var d=setTimeout(function(){i({type:"timeout",target:c})},12e4);c.onerror=c.onload=i,document.head.appendChild(c)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var a=self.webpackJsonp=self.webpackJsonp||[],i=a.push.bind(a);a.push=n,a=a.slice();for(var c=0;c<a.length;c++)n(a[c]);var u=i;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){t.e(12).then(t.t.bind(null,1,7)),Promise.all([t.e(1),t.e(8)]).then(t.bind(null,3)),Promise.all([t.e(1),t.e(15),t.e(10)]).then(t.bind(null,2))});var r=document.createElement("style");r.innerHTML="\nbody {\n font-family: Roboto, sans-serif;\n -moz-osx-font-smoothing: grayscale;\n -webkit-font-smoothing: antialiased;\n font-weight: 400;\n margin: 0;\n padding: 0;\n height: 100vh;\n}\n",document.head.appendChild(r)}]);
|
|
||||||
//# sourceMappingURL=entrypoint.js.map
|
|
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,38 +0,0 @@
|
|||||||
<!doctype html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<title>Hass.io</title>
|
|
||||||
<meta name='viewport' content='width=device-width, user-scalable=no'>
|
|
||||||
<style>
|
|
||||||
body {
|
|
||||||
height: 100vh;
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
<script src='/frontend_es5/custom-elements-es5-adapter.js'></script>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<hassio-app></hassio-app>
|
|
||||||
<script>
|
|
||||||
function addScript(src) {
|
|
||||||
var e = document.createElement('script');
|
|
||||||
e.src = src;
|
|
||||||
document.write(e.outerHTML);
|
|
||||||
}
|
|
||||||
var webComponentsSupported = (
|
|
||||||
'customElements' in window &&
|
|
||||||
'import' in document.createElement('link') &&
|
|
||||||
'content' in document.createElement('template'));
|
|
||||||
if (!webComponentsSupported) {
|
|
||||||
addScript('/static/webcomponents-lite.js');
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
<!--
|
|
||||||
Disabled while we make Home Assistant able to serve the right files.
|
|
||||||
<script src="./app.js"></script>
|
|
||||||
-->
|
|
||||||
<link rel='import' href='./hassio-app.html'>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
Binary file not shown.
@@ -1,43 +0,0 @@
|
|||||||
{
|
|
||||||
"vendors~confirmation~hassio-addon-view.js": "/api/hassio/app/chunk.87b1d37fc9b8a6f7e2a6.js",
|
|
||||||
"vendors~confirmation~hassio-addon-view.js.map": "/api/hassio/app/chunk.87b1d37fc9b8a6f7e2a6.js.map",
|
|
||||||
"vendors~hassio-icons~hassio-main.js": "/api/hassio/app/chunk.e46c606dd9100816af4e.js",
|
|
||||||
"vendors~hassio-icons~hassio-main.js.map": "/api/hassio/app/chunk.e46c606dd9100816af4e.js.map",
|
|
||||||
"codemirror.js": "/api/hassio/app/chunk.92a11ac1b80e0d7839d2.js",
|
|
||||||
"codemirror.js.map": "/api/hassio/app/chunk.92a11ac1b80e0d7839d2.js.map",
|
|
||||||
"confirmation.js": "/api/hassio/app/chunk.429840c83fad61bc51a8.js",
|
|
||||||
"confirmation.js.map": "/api/hassio/app/chunk.429840c83fad61bc51a8.js.map",
|
|
||||||
"dialog-hassio-markdown.js": "/api/hassio/app/chunk.715824f4764bdbe425b1.js",
|
|
||||||
"dialog-hassio-markdown.js.map": "/api/hassio/app/chunk.715824f4764bdbe425b1.js.map",
|
|
||||||
"dialog-hassio-snapshot.js": "/api/hassio/app/chunk.9d371c8143226d4eaaee.js",
|
|
||||||
"dialog-hassio-snapshot.js.map": "/api/hassio/app/chunk.9d371c8143226d4eaaee.js.map",
|
|
||||||
"entrypoint.js": "/api/hassio/app/entrypoint.js",
|
|
||||||
"entrypoint.js.map": "/api/hassio/app/entrypoint.js.map",
|
|
||||||
"hassio-addon-view.js": "/api/hassio/app/chunk.43e40fd69686ad51301d.js",
|
|
||||||
"hassio-addon-view.js.map": "/api/hassio/app/chunk.43e40fd69686ad51301d.js.map",
|
|
||||||
"hassio-icons.js": "/api/hassio/app/chunk.0b82745c7bdffe5c1404.js",
|
|
||||||
"hassio-icons.js.map": "/api/hassio/app/chunk.0b82745c7bdffe5c1404.js.map",
|
|
||||||
"hassio-ingress-view.js": "/api/hassio/app/chunk.990ee58006b248f55d23.js",
|
|
||||||
"hassio-ingress-view.js.map": "/api/hassio/app/chunk.990ee58006b248f55d23.js.map",
|
|
||||||
"hassio-main.js": "/api/hassio/app/chunk.4d45ee0a3d852768f97e.js",
|
|
||||||
"hassio-main.js.map": "/api/hassio/app/chunk.4d45ee0a3d852768f97e.js.map",
|
|
||||||
"mdi-icons.js": "/api/hassio/app/chunk.b60200a57d6f63941b30.js",
|
|
||||||
"mdi-icons.js.map": "/api/hassio/app/chunk.b60200a57d6f63941b30.js.map",
|
|
||||||
"roboto.js": "/api/hassio/app/chunk.b2dce600432c76a53d8c.js",
|
|
||||||
"roboto.js.map": "/api/hassio/app/chunk.b2dce600432c76a53d8c.js.map",
|
|
||||||
"vendors~codemirror.js": "/api/hassio/app/chunk.8527374a266cecf93aa9.js",
|
|
||||||
"vendors~codemirror.js.map": "/api/hassio/app/chunk.8527374a266cecf93aa9.js.map",
|
|
||||||
"vendors~hassio-addon-view.js": "/api/hassio/app/chunk.f49e500cf58ea310d452.js",
|
|
||||||
"vendors~hassio-addon-view.js.map": "/api/hassio/app/chunk.f49e500cf58ea310d452.js.map",
|
|
||||||
"vendors~hassio-main.js": "/api/hassio/app/chunk.d4931d72592ad48ba2be.js",
|
|
||||||
"vendors~hassio-main.js.map": "/api/hassio/app/chunk.d4931d72592ad48ba2be.js.map",
|
|
||||||
"201359fd5a526afe13ef.worker.js": "/api/hassio/app/201359fd5a526afe13ef.worker.js",
|
|
||||||
"201359fd5a526afe13ef.worker.js.map": "/api/hassio/app/201359fd5a526afe13ef.worker.js.map",
|
|
||||||
"chunk.429840c83fad61bc51a8.js.LICENSE": "/api/hassio/app/chunk.429840c83fad61bc51a8.js.LICENSE",
|
|
||||||
"chunk.715824f4764bdbe425b1.js.LICENSE": "/api/hassio/app/chunk.715824f4764bdbe425b1.js.LICENSE",
|
|
||||||
"chunk.87b1d37fc9b8a6f7e2a6.js.LICENSE": "/api/hassio/app/chunk.87b1d37fc9b8a6f7e2a6.js.LICENSE",
|
|
||||||
"chunk.9d371c8143226d4eaaee.js.LICENSE": "/api/hassio/app/chunk.9d371c8143226d4eaaee.js.LICENSE",
|
|
||||||
"chunk.d4931d72592ad48ba2be.js.LICENSE": "/api/hassio/app/chunk.d4931d72592ad48ba2be.js.LICENSE",
|
|
||||||
"chunk.e46c606dd9100816af4e.js.LICENSE": "/api/hassio/app/chunk.e46c606dd9100816af4e.js.LICENSE",
|
|
||||||
"chunk.f49e500cf58ea310d452.js.LICENSE": "/api/hassio/app/chunk.f49e500cf58ea310d452.js.LICENSE"
|
|
||||||
}
|
|
201
hassio/core.py
201
hassio/core.py
@@ -1,201 +0,0 @@
|
|||||||
"""Main file for Hass.io."""
|
|
||||||
from contextlib import suppress
|
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import async_timeout
|
|
||||||
|
|
||||||
from .coresys import CoreSysAttributes
|
|
||||||
from .const import (
|
|
||||||
STARTUP_SYSTEM,
|
|
||||||
STARTUP_SERVICES,
|
|
||||||
STARTUP_APPLICATION,
|
|
||||||
STARTUP_INITIALIZE,
|
|
||||||
)
|
|
||||||
from .exceptions import HassioError, HomeAssistantError, SupervisorUpdateError
|
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class HassIO(CoreSysAttributes):
|
|
||||||
"""Main object of Hass.io."""
|
|
||||||
|
|
||||||
def __init__(self, coresys):
|
|
||||||
"""Initialize Hass.io object."""
|
|
||||||
self.coresys = coresys
|
|
||||||
|
|
||||||
async def connect(self):
|
|
||||||
"""Connect Supervisor container."""
|
|
||||||
await self.sys_supervisor.load()
|
|
||||||
|
|
||||||
async def setup(self):
|
|
||||||
"""Setup HassIO orchestration."""
|
|
||||||
# Load DBus
|
|
||||||
await self.sys_dbus.load()
|
|
||||||
|
|
||||||
# Load Host
|
|
||||||
await self.sys_host.load()
|
|
||||||
|
|
||||||
# Load CoreDNS
|
|
||||||
await self.sys_dns.load()
|
|
||||||
|
|
||||||
# Load Home Assistant
|
|
||||||
await self.sys_homeassistant.load()
|
|
||||||
|
|
||||||
# Load CPU/Arch
|
|
||||||
await self.sys_arch.load()
|
|
||||||
|
|
||||||
# Load HassOS
|
|
||||||
await self.sys_hassos.load()
|
|
||||||
|
|
||||||
# Load Stores
|
|
||||||
await self.sys_store.load()
|
|
||||||
|
|
||||||
# Load Add-ons
|
|
||||||
await self.sys_addons.load()
|
|
||||||
|
|
||||||
# rest api views
|
|
||||||
await self.sys_api.load()
|
|
||||||
|
|
||||||
# load last available data
|
|
||||||
await self.sys_updater.load()
|
|
||||||
|
|
||||||
# load last available data
|
|
||||||
await self.sys_snapshots.load()
|
|
||||||
|
|
||||||
# load services
|
|
||||||
await self.sys_services.load()
|
|
||||||
|
|
||||||
# Load discovery
|
|
||||||
await self.sys_discovery.load()
|
|
||||||
|
|
||||||
# Load ingress
|
|
||||||
await self.sys_ingress.load()
|
|
||||||
|
|
||||||
# Load secrets
|
|
||||||
await self.sys_secrets.load()
|
|
||||||
|
|
||||||
async def start(self):
|
|
||||||
"""Start Hass.io orchestration."""
|
|
||||||
await self.sys_api.start()
|
|
||||||
|
|
||||||
# Mark booted partition as healthy
|
|
||||||
if self.sys_hassos.available:
|
|
||||||
await self.sys_hassos.mark_healthy()
|
|
||||||
|
|
||||||
# On release channel, try update itself
|
|
||||||
if self.sys_supervisor.need_update:
|
|
||||||
try:
|
|
||||||
if self.sys_dev:
|
|
||||||
_LOGGER.warning("Ignore Hass.io updates on dev!")
|
|
||||||
else:
|
|
||||||
await self.sys_supervisor.update()
|
|
||||||
except SupervisorUpdateError:
|
|
||||||
_LOGGER.fatal(
|
|
||||||
"Can't update supervisor! This will break some Add-ons or affect "
|
|
||||||
"future version of Home Assistant!"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Start addon mark as initialize
|
|
||||||
await self.sys_addons.boot(STARTUP_INITIALIZE)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# HomeAssistant is already running / supervisor have only reboot
|
|
||||||
if self.sys_hardware.last_boot == self.sys_config.last_boot:
|
|
||||||
_LOGGER.info("Hass.io reboot detected")
|
|
||||||
return
|
|
||||||
|
|
||||||
# reset register services / discovery
|
|
||||||
self.sys_services.reset()
|
|
||||||
|
|
||||||
# start addon mark as system
|
|
||||||
await self.sys_addons.boot(STARTUP_SYSTEM)
|
|
||||||
|
|
||||||
# start addon mark as services
|
|
||||||
await self.sys_addons.boot(STARTUP_SERVICES)
|
|
||||||
|
|
||||||
# run HomeAssistant
|
|
||||||
if (
|
|
||||||
self.sys_homeassistant.boot
|
|
||||||
and not await self.sys_homeassistant.is_running()
|
|
||||||
):
|
|
||||||
with suppress(HomeAssistantError):
|
|
||||||
await self.sys_homeassistant.start()
|
|
||||||
else:
|
|
||||||
_LOGGER.info("Skip start of Home Assistant")
|
|
||||||
|
|
||||||
# start addon mark as application
|
|
||||||
await self.sys_addons.boot(STARTUP_APPLICATION)
|
|
||||||
|
|
||||||
# store new last boot
|
|
||||||
self._update_last_boot()
|
|
||||||
|
|
||||||
finally:
|
|
||||||
# Add core tasks into scheduler
|
|
||||||
await self.sys_tasks.load()
|
|
||||||
|
|
||||||
# If landingpage / run upgrade in background
|
|
||||||
if self.sys_homeassistant.version == "landingpage":
|
|
||||||
self.sys_create_task(self.sys_homeassistant.install())
|
|
||||||
|
|
||||||
_LOGGER.info("Hass.io is up and running")
|
|
||||||
|
|
||||||
async def stop(self):
|
|
||||||
"""Stop a running orchestration."""
|
|
||||||
# don't process scheduler anymore
|
|
||||||
self.sys_scheduler.suspend = True
|
|
||||||
|
|
||||||
# store new last boot / prevent time adjustments
|
|
||||||
self._update_last_boot()
|
|
||||||
|
|
||||||
# process async stop tasks
|
|
||||||
try:
|
|
||||||
with async_timeout.timeout(10):
|
|
||||||
await asyncio.wait(
|
|
||||||
[
|
|
||||||
self.sys_api.stop(),
|
|
||||||
self.sys_websession.close(),
|
|
||||||
self.sys_websession_ssl.close(),
|
|
||||||
self.sys_ingress.unload(),
|
|
||||||
self.sys_dns.unload(),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
except asyncio.TimeoutError:
|
|
||||||
_LOGGER.warning("Force Shutdown!")
|
|
||||||
|
|
||||||
_LOGGER.info("Hass.io is down")
|
|
||||||
|
|
||||||
async def shutdown(self):
|
|
||||||
"""Shutdown all running containers in correct order."""
|
|
||||||
await self.sys_addons.shutdown(STARTUP_APPLICATION)
|
|
||||||
|
|
||||||
# Close Home Assistant
|
|
||||||
with suppress(HassioError):
|
|
||||||
await self.sys_homeassistant.stop()
|
|
||||||
|
|
||||||
await self.sys_addons.shutdown(STARTUP_SERVICES)
|
|
||||||
await self.sys_addons.shutdown(STARTUP_SYSTEM)
|
|
||||||
await self.sys_addons.shutdown(STARTUP_INITIALIZE)
|
|
||||||
|
|
||||||
def _update_last_boot(self):
|
|
||||||
"""Update last boot time."""
|
|
||||||
self.sys_config.last_boot = self.sys_hardware.last_boot
|
|
||||||
self.sys_config.save_data()
|
|
||||||
|
|
||||||
async def repair(self):
|
|
||||||
"""Repair system integrity."""
|
|
||||||
_LOGGER.info("Start repairing of Hass.io Environment")
|
|
||||||
await self.sys_run_in_executor(self.sys_docker.repair)
|
|
||||||
|
|
||||||
# Restore core functionality
|
|
||||||
await self.sys_dns.repair()
|
|
||||||
await self.sys_addons.repair()
|
|
||||||
await self.sys_homeassistant.repair()
|
|
||||||
|
|
||||||
# Fix HassOS specific
|
|
||||||
if self.sys_hassos.available:
|
|
||||||
await self.sys_hassos.repair_cli()
|
|
||||||
|
|
||||||
# Tag version for latest
|
|
||||||
await self.sys_supervisor.repair()
|
|
||||||
_LOGGER.info("Finished repairing of Hass.io Environment")
|
|
@@ -1,17 +0,0 @@
|
|||||||
pcm.!default {
|
|
||||||
type asym
|
|
||||||
capture.pcm "mic"
|
|
||||||
playback.pcm "speaker"
|
|
||||||
}
|
|
||||||
pcm.mic {
|
|
||||||
type plug
|
|
||||||
slave {
|
|
||||||
pcm "hw:$input"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pcm.speaker {
|
|
||||||
type plug
|
|
||||||
slave {
|
|
||||||
pcm "hw:$output"
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,18 +0,0 @@
|
|||||||
{
|
|
||||||
"raspberrypi3": {
|
|
||||||
"bcm2835 - bcm2835 ALSA": {
|
|
||||||
"0,0": "Raspberry Jack",
|
|
||||||
"0,1": "Raspberry HDMI"
|
|
||||||
},
|
|
||||||
"output": "0,0",
|
|
||||||
"input": null
|
|
||||||
},
|
|
||||||
"raspberrypi2": {
|
|
||||||
"output": "0,0",
|
|
||||||
"input": null
|
|
||||||
},
|
|
||||||
"raspberrypi": {
|
|
||||||
"output": "0,0",
|
|
||||||
"input": null
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,15 +0,0 @@
|
|||||||
.:53 {
|
|
||||||
log
|
|
||||||
errors
|
|
||||||
hosts /config/hosts {
|
|
||||||
fallthrough
|
|
||||||
}
|
|
||||||
template ANY AAAA local.hass.io hassio {
|
|
||||||
rcode NOERROR
|
|
||||||
}
|
|
||||||
forward . $servers {
|
|
||||||
except local.hass.io
|
|
||||||
policy sequential
|
|
||||||
health_check 10s
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,11 +0,0 @@
|
|||||||
"""Discovery service for Home Panel."""
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from hassio.validate import network_port
|
|
||||||
|
|
||||||
from ..const import ATTR_HOST, ATTR_PORT
|
|
||||||
|
|
||||||
|
|
||||||
SCHEMA = vol.Schema(
|
|
||||||
{vol.Required(ATTR_HOST): vol.Coerce(str), vol.Required(ATTR_PORT): network_port}
|
|
||||||
)
|
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user