mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-11 10:09:21 +00:00
Compare commits
462 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
dd3ba93308 | ||
![]() |
7e1b179cdd | ||
![]() |
a9a2c35f06 | ||
![]() |
58b88a6919 | ||
![]() |
f937876a1b | ||
![]() |
8193f43634 | ||
![]() |
1d3f880f82 | ||
![]() |
ef2fa8d2e2 | ||
![]() |
51997b3e7c | ||
![]() |
98785b00e2 | ||
![]() |
8d3694884d | ||
![]() |
a2821a98ad | ||
![]() |
8d552ae15c | ||
![]() |
6db4c60f47 | ||
![]() |
805c0385a0 | ||
![]() |
cea6e7a9f2 | ||
![]() |
127073c01b | ||
![]() |
30fe36ae05 | ||
![]() |
58bd677832 | ||
![]() |
1a3b369dd7 | ||
![]() |
6e38216abd | ||
![]() |
efcfc1f841 | ||
![]() |
8dea50ce83 | ||
![]() |
7a5a01bdcc | ||
![]() |
bd1450a682 | ||
![]() |
c538c1ce7f | ||
![]() |
b6d59c4f64 | ||
![]() |
a758ccaf5c | ||
![]() |
e8b04cc20a | ||
![]() |
9bcb15dbc0 | ||
![]() |
1e953167b6 | ||
![]() |
979586cdb2 | ||
![]() |
cd31fad56d | ||
![]() |
ff57d88e2a | ||
![]() |
06cb5e171e | ||
![]() |
a8b70a2e13 | ||
![]() |
948019ccee | ||
![]() |
89ed109505 | ||
![]() |
fae246c503 | ||
![]() |
2411b4287d | ||
![]() |
b3308ecbe0 | ||
![]() |
3541cbff5e | ||
![]() |
838ba7ff36 | ||
![]() |
e9802f92c9 | ||
![]() |
016fd24859 | ||
![]() |
d315e81ab2 | ||
![]() |
97c38b8534 | ||
![]() |
011e2b3df5 | ||
![]() |
e3ee9a299f | ||
![]() |
d73c10f874 | ||
![]() |
9e448b46ba | ||
![]() |
9f09c46789 | ||
![]() |
fe6634551a | ||
![]() |
22a7931a7c | ||
![]() |
94f112512f | ||
![]() |
b6509dca1f | ||
![]() |
620234e708 | ||
![]() |
d50e866cec | ||
![]() |
76ad6dca02 | ||
![]() |
cdb1520a63 | ||
![]() |
bbef706a33 | ||
![]() |
835509901f | ||
![]() |
b51f9586c4 | ||
![]() |
fc83cb9559 | ||
![]() |
f5f5f829ac | ||
![]() |
930eed4500 | ||
![]() |
01a8b58054 | ||
![]() |
eba1d01fc2 | ||
![]() |
84755836c9 | ||
![]() |
c9585033cb | ||
![]() |
2d312c276f | ||
![]() |
3b0d0e9928 | ||
![]() |
8307b153e3 | ||
![]() |
dfaffe3ec5 | ||
![]() |
8d7b15cbeb | ||
![]() |
00969a67ac | ||
![]() |
a374d4e817 | ||
![]() |
f5dda39f63 | ||
![]() |
fb5d54d5fe | ||
![]() |
d392b35fdd | ||
![]() |
3ceec006ac | ||
![]() |
62a574c6bd | ||
![]() |
821c10b2bd | ||
![]() |
fa3269a098 | ||
![]() |
a9bdab4b49 | ||
![]() |
0df5b7d87b | ||
![]() |
4861fc70ce | ||
![]() |
47c443bb92 | ||
![]() |
9cb4b49597 | ||
![]() |
865523fd37 | ||
![]() |
1df35a6fe1 | ||
![]() |
e70c9d8a30 | ||
![]() |
7d6b00ea4a | ||
![]() |
e5fc985915 | ||
![]() |
71ccaa2bd0 | ||
![]() |
e127f23a08 | ||
![]() |
495f9f2373 | ||
![]() |
27274286db | ||
![]() |
85ba886029 | ||
![]() |
2f3a868e42 | ||
![]() |
a51b80f456 | ||
![]() |
f27a426879 | ||
![]() |
19ca485c28 | ||
![]() |
7deed55c2d | ||
![]() |
4c5c6f072c | ||
![]() |
f174e08ad6 | ||
![]() |
2658f95347 | ||
![]() |
311c981d1a | ||
![]() |
d6d3bf0583 | ||
![]() |
a1a601a4d3 | ||
![]() |
14776eae76 | ||
![]() |
bef4034ab8 | ||
![]() |
ad988f2a24 | ||
![]() |
6599ae0ee0 | ||
![]() |
4f1ed690cd | ||
![]() |
4ffaee6013 | ||
![]() |
e1ce19547e | ||
![]() |
039040b972 | ||
![]() |
7a1af3d346 | ||
![]() |
1e98774b62 | ||
![]() |
4b4d6c6866 | ||
![]() |
65ff83d359 | ||
![]() |
e509c804ae | ||
![]() |
992827e225 | ||
![]() |
083e97add8 | ||
![]() |
05378d18c0 | ||
![]() |
3dd465acc9 | ||
![]() |
8f6e36f781 | ||
![]() |
85fe56db57 | ||
![]() |
8e07429e47 | ||
![]() |
ced6d702b9 | ||
![]() |
25d7de4dfa | ||
![]() |
82754c0dfe | ||
![]() |
e604b022ee | ||
![]() |
6b29022822 | ||
![]() |
2e671cc5ee | ||
![]() |
f25692b98c | ||
![]() |
c4a011b261 | ||
![]() |
a935bac20b | ||
![]() |
0a3a98cb42 | ||
![]() |
adb39ca93f | ||
![]() |
5fdc340e58 | ||
![]() |
bb64dca6e6 | ||
![]() |
685788bcdf | ||
![]() |
e949aa35f3 | ||
![]() |
fc80bf0df4 | ||
![]() |
bd9740e866 | ||
![]() |
3a260a8fd9 | ||
![]() |
c87e6a5a42 | ||
![]() |
8bc3319523 | ||
![]() |
bdfcf1a2df | ||
![]() |
7f4284f2af | ||
![]() |
fd69120aa6 | ||
![]() |
5df60b17e8 | ||
![]() |
cb835b5ae6 | ||
![]() |
9eab92513a | ||
![]() |
29e8f50ab8 | ||
![]() |
aa0496b236 | ||
![]() |
06e9cec21a | ||
![]() |
0fe27088df | ||
![]() |
54d226116d | ||
![]() |
4b37e30680 | ||
![]() |
7c5f710deb | ||
![]() |
5a3ebaf683 | ||
![]() |
233da0e48f | ||
![]() |
96380d8d28 | ||
![]() |
c84a0edf20 | ||
![]() |
a3cf445c93 | ||
![]() |
3f31979f66 | ||
![]() |
44416edfd2 | ||
![]() |
351c45da75 | ||
![]() |
e27c5dad15 | ||
![]() |
dc510f22ac | ||
![]() |
1b78011f8b | ||
![]() |
a908828bf4 | ||
![]() |
55b7eb62f6 | ||
![]() |
10e8fcf3b9 | ||
![]() |
f1b0c05447 | ||
![]() |
de22bd688e | ||
![]() |
9fe35b4fb5 | ||
![]() |
f13d08d37a | ||
![]() |
a0ecb46584 | ||
![]() |
0c57df0c8e | ||
![]() |
9c902c5c69 | ||
![]() |
af412c3105 | ||
![]() |
ec43448163 | ||
![]() |
9f7e0ecd55 | ||
![]() |
e50515a17c | ||
![]() |
7c345db6fe | ||
![]() |
51c2268c1e | ||
![]() |
51feca05a5 | ||
![]() |
3889504292 | ||
![]() |
7bd6ff374a | ||
![]() |
44fa34203a | ||
![]() |
ff351c7f6d | ||
![]() |
960b00d85a | ||
![]() |
18e3eacd7f | ||
![]() |
f4a1da33c4 | ||
![]() |
49de5be44e | ||
![]() |
383657e8ce | ||
![]() |
3af970ead6 | ||
![]() |
6caec79958 | ||
![]() |
33bbd92d9b | ||
![]() |
9dba78fbcd | ||
![]() |
630d85ec78 | ||
![]() |
f0d46e8671 | ||
![]() |
db0593f0b2 | ||
![]() |
1d83c0c77a | ||
![]() |
5e5fd3a79b | ||
![]() |
c61995aab8 | ||
![]() |
37c393f857 | ||
![]() |
8e043a01c1 | ||
![]() |
c7b6b2ddb3 | ||
![]() |
522f68bf68 | ||
![]() |
7d4866234f | ||
![]() |
7aa5bcfc7c | ||
![]() |
04b59f0896 | ||
![]() |
796f9a203e | ||
![]() |
22c8cda0d7 | ||
![]() |
1cf534ccc5 | ||
![]() |
6d8c821148 | ||
![]() |
264e9665b0 | ||
![]() |
53fa8e48c0 | ||
![]() |
e406aa4144 | ||
![]() |
4953ba5077 | ||
![]() |
0a97ac0578 | ||
![]() |
56af4752f4 | ||
![]() |
81413d08ed | ||
![]() |
2bc2a476d9 | ||
![]() |
4d070a65c6 | ||
![]() |
6185fbaf26 | ||
![]() |
698a126b93 | ||
![]() |
acf921f55d | ||
![]() |
f5a78c88f8 | ||
![]() |
206ece1575 | ||
![]() |
a8028dbe10 | ||
![]() |
c605af6ccc | ||
![]() |
b7b8e6c40e | ||
![]() |
3fcb1de419 | ||
![]() |
12034fe5fc | ||
![]() |
56959d781a | ||
![]() |
9a2f025646 | ||
![]() |
12cc163058 | ||
![]() |
74971d9753 | ||
![]() |
a9157e3a9f | ||
![]() |
b96697b708 | ||
![]() |
81e6896391 | ||
![]() |
2dcaa3608d | ||
![]() |
e21671ec5e | ||
![]() |
7841f14163 | ||
![]() |
cc9f594ab4 | ||
![]() |
ebfaaeaa6b | ||
![]() |
ffa91e150d | ||
![]() |
06fa9f9a9e | ||
![]() |
9f203c42ec | ||
![]() |
5d0d34a4af | ||
![]() |
c2cfc0d3d4 | ||
![]() |
0f4810d41f | ||
![]() |
175848f2a8 | ||
![]() |
472bd66f4d | ||
![]() |
168ea32d2c | ||
![]() |
e82d6b1ea4 | ||
![]() |
6c60ca088c | ||
![]() |
83e8f935fd | ||
![]() |
71867302a4 | ||
![]() |
8bcc402c5f | ||
![]() |
72b7d2a123 | ||
![]() |
20c1183450 | ||
![]() |
0bbfbd2544 | ||
![]() |
350bd9c32f | ||
![]() |
dcca8b0a9a | ||
![]() |
f77b479e45 | ||
![]() |
216565affb | ||
![]() |
6f235c2a11 | ||
![]() |
27a770bd1d | ||
![]() |
ef15b67571 | ||
![]() |
6aad966c52 | ||
![]() |
9811f11859 | ||
![]() |
13148ec7fb | ||
![]() |
b2d7464790 | ||
![]() |
ce84e185ad | ||
![]() |
c3f5ee43b6 | ||
![]() |
e2dc1a4471 | ||
![]() |
e787e59b49 | ||
![]() |
f0ed2eba2b | ||
![]() |
2364e1e652 | ||
![]() |
cc56944d75 | ||
![]() |
69cea9fc96 | ||
![]() |
fcebc9d1ed | ||
![]() |
9350e4f961 | ||
![]() |
387e0ad03e | ||
![]() |
61fec8b290 | ||
![]() |
1228baebf4 | ||
![]() |
a30063e85c | ||
![]() |
524cebac4d | ||
![]() |
c94114a566 | ||
![]() |
b6ec7a9e64 | ||
![]() |
69be7a6d22 | ||
![]() |
58155c35f9 | ||
![]() |
7b2377291f | ||
![]() |
657ee84e39 | ||
![]() |
2e4b545265 | ||
![]() |
2de1d35dd1 | ||
![]() |
2b082b362d | ||
![]() |
dfdd0d6b4b | ||
![]() |
a00e81c03f | ||
![]() |
776e6bb418 | ||
![]() |
b31fca656e | ||
![]() |
fa783a0d2c | ||
![]() |
96c0fbaf10 | ||
![]() |
24f7801ddc | ||
![]() |
8e83e007e9 | ||
![]() |
d0db466e67 | ||
![]() |
3010bd4eb6 | ||
![]() |
069bed8815 | ||
![]() |
d2088ae5f8 | ||
![]() |
0ca5a241bb | ||
![]() |
dff32a8e84 | ||
![]() |
4a20344652 | ||
![]() |
98b969ef06 | ||
![]() |
c8cb8aecf7 | ||
![]() |
73e8875018 | ||
![]() |
02aed9c084 | ||
![]() |
89148f8fff | ||
![]() |
6bde527f5c | ||
![]() |
d62aabc01b | ||
![]() |
82299a3799 | ||
![]() |
c02f30dd7e | ||
![]() |
e91983adb4 | ||
![]() |
ff88359429 | ||
![]() |
5a60d5cbe8 | ||
![]() |
2b41ffe019 | ||
![]() |
1c23e26f93 | ||
![]() |
3d555f951d | ||
![]() |
6d39b4d7cd | ||
![]() |
4fe5d09f01 | ||
![]() |
e52af3bfb4 | ||
![]() |
0467b33cd5 | ||
![]() |
14167f6e13 | ||
![]() |
7a1aba6f81 | ||
![]() |
920f7f2ece | ||
![]() |
06fadbd70f | ||
![]() |
d4f486864f | ||
![]() |
d3a21303d9 | ||
![]() |
e1cbfdd84b | ||
![]() |
87170a4497 | ||
![]() |
ae6f8bd345 | ||
![]() |
b9496e0972 | ||
![]() |
c36a6dcd65 | ||
![]() |
19ca836b78 | ||
![]() |
8a6ea7ab50 | ||
![]() |
6721b8f265 | ||
![]() |
9393521f98 | ||
![]() |
398b24e0ab | ||
![]() |
374bcf8073 | ||
![]() |
7e3859e2f5 | ||
![]() |
490ec0d462 | ||
![]() |
15bf1ee50e | ||
![]() |
6376d92a0d | ||
![]() |
10230b0b4c | ||
![]() |
2495cda5ec | ||
![]() |
ae8ddca040 | ||
![]() |
0212d027fb | ||
![]() |
a3096153ab | ||
![]() |
7434ca9e99 | ||
![]() |
4ac7f7dcf0 | ||
![]() |
e9f5b13aa5 | ||
![]() |
1fbb6d46ea | ||
![]() |
8dbfea75b1 | ||
![]() |
3b3840c087 | ||
![]() |
a21353909d | ||
![]() |
5497ed885a | ||
![]() |
39baea759a | ||
![]() |
80ddb1d262 | ||
![]() |
e24987a610 | ||
![]() |
9e5c276e3b | ||
![]() |
c33d31996d | ||
![]() |
aa1f08fe8a | ||
![]() |
d78689554a | ||
![]() |
5bee1d851c | ||
![]() |
ddb8eef4d1 | ||
![]() |
da513e7347 | ||
![]() |
4279d7fd16 | ||
![]() |
934eab2e8c | ||
![]() |
2a31edc768 | ||
![]() |
fcdd66dc6e | ||
![]() |
a65d3222b9 | ||
![]() |
36179596a0 | ||
![]() |
c083c850c1 | ||
![]() |
ff903d7b5a | ||
![]() |
dd603e1ec2 | ||
![]() |
a2f06b1553 | ||
![]() |
8115d2b3d3 | ||
![]() |
4f97bb9e0b | ||
![]() |
84d24a2c4d | ||
![]() |
b709061656 | ||
![]() |
cd9034b3f1 | ||
![]() |
25d324c73a | ||
![]() |
3a834d1a73 | ||
![]() |
e9fecb817d | ||
![]() |
56e70d7ec4 | ||
![]() |
2e73a85aa9 | ||
![]() |
1e119e9c03 | ||
![]() |
6f6e5c97df | ||
![]() |
6ef99974cf | ||
![]() |
8984b9aef6 | ||
![]() |
63e08b15bc | ||
![]() |
319b2b5d4c | ||
![]() |
bae7bb8ce4 | ||
![]() |
0b44df366c | ||
![]() |
f253c797af | ||
![]() |
0a8b1c2797 | ||
![]() |
3b45fb417b | ||
![]() |
2a2d92e3c5 | ||
![]() |
a320e42ed5 | ||
![]() |
fdef712e01 | ||
![]() |
5717ac19d7 | ||
![]() |
33d7d76fee | ||
![]() |
73bdaa623c | ||
![]() |
8ca8f59a0b | ||
![]() |
745af3c039 | ||
![]() |
5d17e1011a | ||
![]() |
826464c41b | ||
![]() |
a643df8cac | ||
![]() |
24ded99286 | ||
![]() |
6646eee504 | ||
![]() |
f55c10914e | ||
![]() |
b1e768f69e | ||
![]() |
4702f8bd5e | ||
![]() |
69959b2c97 | ||
![]() |
9d6f4f5392 | ||
![]() |
36b9a609bf | ||
![]() |
36ae0c82b6 | ||
![]() |
e11011ee51 | ||
![]() |
9125211a57 | ||
![]() |
3a4ef6ceb3 | ||
![]() |
ca82993278 | ||
![]() |
0925af91e3 | ||
![]() |
80bc32243c | ||
![]() |
f0d232880d | ||
![]() |
7c790dbbd9 | ||
![]() |
899b17e992 | ||
![]() |
d1b4521290 | ||
![]() |
9bb4feef29 | ||
![]() |
4bcdc98a31 | ||
![]() |
26f8c1df92 | ||
![]() |
a481ad73f3 | ||
![]() |
e4ac17fea6 | ||
![]() |
bcd940e95b | ||
![]() |
5365aa4466 | ||
![]() |
a0d106529c | ||
![]() |
bf1a9ec42d | ||
![]() |
fc5d97562f | ||
![]() |
f5c171e44f | ||
![]() |
a3c3f15806 | ||
![]() |
ef58a219ec | ||
![]() |
6708fe36e3 | ||
![]() |
e02fa2824c | ||
![]() |
a20f927082 | ||
![]() |
6d71e3fe81 | ||
![]() |
4056fcd75d | ||
![]() |
1e723cf0e3 |
@@ -1,4 +1,4 @@
|
||||
FROM python:3.7
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/python:0-3.8
|
||||
|
||||
WORKDIR /workspaces
|
||||
|
||||
@@ -13,7 +13,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
&& apt-get update && apt-get install -y --no-install-recommends \
|
||||
nodejs \
|
||||
yarn \
|
||||
&& curl -o - https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash \
|
||||
&& curl -o - https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | bash \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
ENV NVM_DIR /root/.nvm
|
||||
|
||||
@@ -33,11 +33,16 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
containerd.io \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install tools
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
jq \
|
||||
dbus \
|
||||
network-manager \
|
||||
libpulse0 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Python dependencies from requirements.txt if it exists
|
||||
COPY requirements.txt requirements_tests.txt ./
|
||||
RUN pip3 install -r requirements.txt -r requirements_tests.txt \
|
||||
&& pip3 install tox \
|
||||
&& rm -f requirements.txt requirements_tests.txt
|
||||
|
||||
# Set the default shell to bash instead of sh
|
||||
ENV SHELL /bin/bash
|
||||
|
@@ -1,31 +1,32 @@
|
||||
// See https://aka.ms/vscode-remote/devcontainer.json for format details.
|
||||
{
|
||||
"name": "Hass.io dev",
|
||||
"context": "..",
|
||||
"dockerFile": "Dockerfile",
|
||||
"appPort": "9123:8123",
|
||||
"runArgs": [
|
||||
"-e",
|
||||
"GIT_EDITOR=code --wait",
|
||||
"--privileged"
|
||||
],
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"esbenp.prettier-vscode"
|
||||
],
|
||||
"settings": {
|
||||
"python.pythonPath": "/usr/local/bin/python",
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.blackArgs": [
|
||||
"--target-version",
|
||||
"py37"
|
||||
],
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true
|
||||
}
|
||||
}
|
||||
"name": "Supervisor dev",
|
||||
"context": "..",
|
||||
"dockerFile": "Dockerfile",
|
||||
"appPort": "9123:8123",
|
||||
"postCreateCommand": "pre-commit install",
|
||||
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"esbenp.prettier-vscode"
|
||||
],
|
||||
"settings": {
|
||||
"terminal.integrated.shell.linux": "/bin/bash",
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"python.pythonPath": "/usr/local/bin/python3",
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.blackArgs": ["--target-version", "py38"],
|
||||
"python.formatting.blackPath": "/usr/local/bin/black",
|
||||
"python.linting.banditPath": "/usr/local/bin/bandit",
|
||||
"python.linting.flake8Path": "/usr/local/bin/flake8",
|
||||
"python.linting.mypyPath": "/usr/local/bin/mypy",
|
||||
"python.linting.pylintPath": "/usr/local/bin/pylint",
|
||||
"python.linting.pydocstylePath": "/usr/local/bin/pydocstyle"
|
||||
}
|
||||
}
|
||||
|
@@ -14,10 +14,10 @@
|
||||
# virtualenv
|
||||
venv/
|
||||
|
||||
# HA
|
||||
home-assistant-polymer/*
|
||||
misc/*
|
||||
script/*
|
||||
# Data
|
||||
home-assistant-polymer/
|
||||
script/
|
||||
tests/
|
||||
|
||||
# Test ENV
|
||||
data/
|
||||
|
13
.github/ISSUE_TEMPLATE.md
vendored
13
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,15 +1,15 @@
|
||||
<!-- READ THIS FIRST:
|
||||
- If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/
|
||||
- Make sure you are running the latest version of Home Assistant before reporting an issue: https://github.com/home-assistant/home-assistant/releases
|
||||
- Do not report issues for components here, plaese refer to https://github.com/home-assistant/home-assistant/issues
|
||||
- Make sure you are running the latest version of Home Assistant before reporting an issue: https://github.com/home-assistant/core/releases
|
||||
- Do not report issues for integrations here, please refer to https://github.com/home-assistant/core/issues
|
||||
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
|
||||
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
|
||||
- If you have a problem with a Add-on, make a issue on there repository.
|
||||
- If you have a problem with an add-on, make an issue in its repository.
|
||||
-->
|
||||
|
||||
**Home Assistant release with the issue:**
|
||||
<!--
|
||||
- Frontend -> Developer tools -> Info
|
||||
- Frontend -> Configuration -> Info
|
||||
- Or use this command: hass --version
|
||||
-->
|
||||
|
||||
@@ -20,10 +20,9 @@ Please provide details about your environment.
|
||||
|
||||
**Supervisor logs:**
|
||||
<!--
|
||||
- Frontend -> Hass.io -> System
|
||||
- Or use this command: hassio su logs
|
||||
- Frontend -> Supervisor -> System
|
||||
- Or use this command: ha supervisor logs
|
||||
-->
|
||||
|
||||
|
||||
**Description of problem:**
|
||||
|
||||
|
14
.github/dependabot.yml
vendored
Normal file
14
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "06:00"
|
||||
open-pull-requests-limit: 10
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "06:00"
|
||||
open-pull-requests-limit: 10
|
27
.github/lock.yml
vendored
Normal file
27
.github/lock.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
# Configuration for Lock Threads - https://github.com/dessant/lock-threads
|
||||
|
||||
# Number of days of inactivity before a closed issue or pull request is locked
|
||||
daysUntilLock: 1
|
||||
|
||||
# Skip issues and pull requests created before a given timestamp. Timestamp must
|
||||
# follow ISO 8601 (`YYYY-MM-DD`). Set to `false` to disable
|
||||
skipCreatedBefore: 2020-01-01
|
||||
|
||||
# Issues and pull requests with these labels will be ignored. Set to `[]` to disable
|
||||
exemptLabels: []
|
||||
|
||||
# Label to add before locking, such as `outdated`. Set to `false` to disable
|
||||
lockLabel: false
|
||||
|
||||
# Comment to post before locking. Set to `false` to disable
|
||||
lockComment: false
|
||||
|
||||
# Assign `resolved` as the reason for locking. Set to `false` to disable
|
||||
setLockReason: false
|
||||
|
||||
# Limit to only `issues` or `pulls`
|
||||
only: pulls
|
||||
|
||||
# Optionally, specify configuration settings just for `issues` or `pulls`
|
||||
issues:
|
||||
daysUntilLock: 30
|
3
.github/stale.yml
vendored
3
.github/stale.yml
vendored
@@ -6,8 +6,9 @@ daysUntilClose: 7
|
||||
exemptLabels:
|
||||
- pinned
|
||||
- security
|
||||
- rfc
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: wontfix
|
||||
staleLabel: stale
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
|
432
.github/workflows/ci.yaml
vendored
Normal file
432
.github/workflows/ci.yaml
vendored
Normal file
@@ -0,0 +1,432 @@
|
||||
name: CI
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
- master
|
||||
pull_request: ~
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: 3.8
|
||||
PRE_COMMIT_HOME: ~/.cache/pre-commit
|
||||
|
||||
jobs:
|
||||
# Separate job to pre-populate the base dependency cache
|
||||
# This prevent upcoming jobs to do the same individually
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
name: Prepare Python ${{ matrix.python-version }} dependencies
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v2.1.2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
pip install -U pip setuptools
|
||||
pip install -r requirements.txt -r requirements_tests.txt
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pre-commit-
|
||||
- name: Install pre-commit dependencies
|
||||
if: steps.cache-precommit.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit install-hooks
|
||||
|
||||
lint-black:
|
||||
name: Check black
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.2
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run black
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
black --target-version py38 --check supervisor tests setup.py
|
||||
|
||||
lint-dockerfile:
|
||||
name: Check Dockerfile
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Register hadolint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||
- name: Check Dockerfile
|
||||
uses: docker://hadolint/hadolint:v1.18.0
|
||||
with:
|
||||
args: hadolint Dockerfile
|
||||
|
||||
lint-executable-shebangs:
|
||||
name: Check executables
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.2
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register check executables problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
||||
- name: Run executables check
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
||||
|
||||
lint-flake8:
|
||||
name: Check flake8
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.2
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register flake8 problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/flake8.json"
|
||||
- name: Run flake8
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
flake8 supervisor tests
|
||||
|
||||
lint-isort:
|
||||
name: Check isort
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.2
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run isort
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
|
||||
|
||||
lint-json:
|
||||
name: Check JSON
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.2
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register check-json problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/check-json.json"
|
||||
- name: Run check-json
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual check-json --all-files
|
||||
|
||||
lint-pylint:
|
||||
name: Check pylint
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.2
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register pylint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pylint.json"
|
||||
- name: Run pylint
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pylint supervisor tests
|
||||
|
||||
lint-pyupgrade:
|
||||
name: Check pyupgrade
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.2
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run pyupgrade
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
|
||||
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
name: Run tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2.1.2
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Install additional system dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends libpulse0 libudev1
|
||||
- name: Register Python problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||
- name: Install Pytest Annotation plugin
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
# Ideally this should be part of our dependencies
|
||||
# However this plugin is fairly new and doesn't run correctly
|
||||
# on a non-GitHub environment.
|
||||
pip install pytest-github-actions-annotate-failures
|
||||
- name: Run pytest
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pytest \
|
||||
-qq \
|
||||
--timeout=10 \
|
||||
--durations=10 \
|
||||
--cov supervisor \
|
||||
-o console_output_style=count \
|
||||
tests
|
||||
- name: Upload coverage artifact
|
||||
uses: actions/upload-artifact@v2.1.4
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}
|
||||
path: .coverage
|
||||
|
||||
coverage:
|
||||
name: Process test coverage
|
||||
runs-on: ubuntu-latest
|
||||
needs: pytest
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.2
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v2
|
||||
- name: Combine coverage results
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
coverage combine coverage*/.coverage*
|
||||
coverage report
|
||||
coverage xml
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v1.0.13
|
14
.github/workflows/matchers/check-executables-have-shebangs.json
vendored
Normal file
14
.github/workflows/matchers/check-executables-have-shebangs.json
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "check-executables-have-shebangs",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):\\s(.+)$",
|
||||
"file": 1,
|
||||
"message": 2
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
16
.github/workflows/matchers/check-json.json
vendored
Normal file
16
.github/workflows/matchers/check-json.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "check-json",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):\\s(.+\\sline\\s(\\d+)\\scolumn\\s(\\d+).+)$",
|
||||
"file": 1,
|
||||
"message": 2,
|
||||
"line": 3,
|
||||
"column": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
30
.github/workflows/matchers/flake8.json
vendored
Normal file
30
.github/workflows/matchers/flake8.json
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "flake8-error",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"owner": "flake8-warning",
|
||||
"severity": "warning",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
16
.github/workflows/matchers/hadolint.json
vendored
Normal file
16
.github/workflows/matchers/hadolint.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "hadolint",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):(\\d+)\\s+((DL\\d{4}).+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"message": 3,
|
||||
"code": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
32
.github/workflows/matchers/pylint.json
vendored
Normal file
32
.github/workflows/matchers/pylint.json
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "pylint-error",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):(\\d+):(\\d+):\\s(([EF]\\d{4}):\\s.+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4,
|
||||
"code": 5
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"owner": "pylint-warning",
|
||||
"severity": "warning",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):(\\d+):(\\d+):\\s(([CRW]\\d{4}):\\s.+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4,
|
||||
"code": 5
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
18
.github/workflows/matchers/python.json
vendored
Normal file
18
.github/workflows/matchers/python.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "python",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^\\s*File\\s\\\"(.*)\\\",\\sline\\s(\\d+),\\sin\\s(.*)$",
|
||||
"file": 1,
|
||||
"line": 2
|
||||
},
|
||||
{
|
||||
"regexp": "^\\s*raise\\s(.*)\\(\\'(.*)\\'\\)$",
|
||||
"message": 2
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
15
.github/workflows/release-drafter.yml
vendored
Normal file
15
.github/workflows/release-drafter.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
name: Release Drafter
|
||||
|
||||
on:
|
||||
push:
|
||||
# branches to consider in the event; optional, defaults to all
|
||||
branches:
|
||||
- dev
|
||||
|
||||
jobs:
|
||||
update_release_draft:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: release-drafter/release-drafter@v5
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
21
.github/workflows/sentry.yaml
vendored
Normal file
21
.github/workflows/sentry.yaml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
name: Sentry Release
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
release:
|
||||
types: [published, prereleased]
|
||||
|
||||
jobs:
|
||||
createSentryRelease:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Sentry Release
|
||||
uses: getsentry/action-release@v1.0.1
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
with:
|
||||
environment: production
|
5
.gitignore
vendored
5
.gitignore
vendored
@@ -95,3 +95,8 @@ ENV/
|
||||
.vscode/*
|
||||
!.vscode/cSpell.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
|
||||
# mypy
|
||||
/.mypy_cache/*
|
||||
/.dmypy.json
|
||||
|
32
.pre-commit-config.yaml
Normal file
32
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,32 @@
|
||||
repos:
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 20.8b1
|
||||
hooks:
|
||||
- id: black
|
||||
args:
|
||||
- --safe
|
||||
- --quiet
|
||||
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
||||
- repo: https://gitlab.com/pycqa/flake8
|
||||
rev: 3.8.3
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies:
|
||||
- flake8-docstrings==1.5.0
|
||||
- pydocstyle==5.0.2
|
||||
files: ^(supervisor|script|tests)/.+\.py$
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v3.1.0
|
||||
hooks:
|
||||
- id: check-executables-have-shebangs
|
||||
stages: [manual]
|
||||
- id: check-json
|
||||
- repo: https://github.com/pre-commit/mirrors-isort
|
||||
rev: v4.3.21
|
||||
hooks:
|
||||
- id: isort
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v2.6.2
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py37-plus]
|
18
.vscode/launch.json
vendored
Normal file
18
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Supervisor remote debug",
|
||||
"type": "python",
|
||||
"request": "attach",
|
||||
"port": 33333,
|
||||
"host": "172.30.32.2",
|
||||
"pathMappings": [
|
||||
{
|
||||
"localRoot": "${workspaceFolder}",
|
||||
"remoteRoot": "/usr/src/supervisor"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
180
.vscode/tasks.json
vendored
180
.vscode/tasks.json
vendored
@@ -1,92 +1,90 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Run Testenv",
|
||||
"type": "shell",
|
||||
"command": "./scripts/test_env.sh",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Run Testenv CLI",
|
||||
"type": "shell",
|
||||
"command": "docker run --rm -ti -v /etc/machine-id:/etc/machine-id --network=hassio --add-host hassio:172.30.32.2 homeassistant/amd64-hassio-cli:dev",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Update UI",
|
||||
"type": "shell",
|
||||
"command": "./scripts/update-frontend.sh",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pytest",
|
||||
"type": "shell",
|
||||
"command": "pytest --timeout=10 tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Flake8",
|
||||
"type": "shell",
|
||||
"command": "flake8 hassio tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pylint",
|
||||
"type": "shell",
|
||||
"command": "pylint hassio",
|
||||
"dependsOn": [
|
||||
"Install all Requirements"
|
||||
],
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Run Testenv",
|
||||
"type": "shell",
|
||||
"command": "./scripts/test_env.sh",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Run Testenv CLI",
|
||||
"type": "shell",
|
||||
"command": "docker exec -ti hassio_cli /usr/bin/cli.sh",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Update UI",
|
||||
"type": "shell",
|
||||
"command": "./scripts/update-frontend.sh",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pytest",
|
||||
"type": "shell",
|
||||
"command": "pytest --timeout=10 tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Flake8",
|
||||
"type": "shell",
|
||||
"command": "flake8 supervisor tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pylint",
|
||||
"type": "shell",
|
||||
"command": "pylint supervisor",
|
||||
"dependsOn": ["Install all Requirements"],
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
||||
|
40
Dockerfile
40
Dockerfile
@@ -1,37 +1,39 @@
|
||||
ARG BUILD_FROM
|
||||
FROM $BUILD_FROM
|
||||
|
||||
ENV \
|
||||
S6_SERVICES_GRACETIME=10000
|
||||
|
||||
# Install base
|
||||
RUN apk add --no-cache \
|
||||
openssl \
|
||||
libffi \
|
||||
musl \
|
||||
git \
|
||||
socat \
|
||||
glib \
|
||||
eudev \
|
||||
eudev-libs
|
||||
RUN \
|
||||
apk add --no-cache \
|
||||
eudev \
|
||||
eudev-libs \
|
||||
git \
|
||||
glib \
|
||||
libffi \
|
||||
libpulse \
|
||||
musl \
|
||||
openssl
|
||||
|
||||
ARG BUILD_ARCH
|
||||
WORKDIR /usr/src
|
||||
|
||||
# Install requirements
|
||||
COPY requirements.txt .
|
||||
RUN export MAKEFLAGS="-j$(nproc)" \
|
||||
RUN \
|
||||
export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
|
||||
"https://wheels.home-assistant.io/alpine-$(cut -d '.' -f 1-2 < /etc/alpine-release)/${BUILD_ARCH}/" \
|
||||
-r ./requirements.txt \
|
||||
&& rm -f requirements.txt
|
||||
|
||||
# Install HassIO
|
||||
COPY . hassio
|
||||
RUN pip3 install --no-cache-dir -e ./hassio \
|
||||
&& python3 -m compileall ./hassio/hassio
|
||||
# Install Home Assistant Supervisor
|
||||
COPY . supervisor
|
||||
RUN \
|
||||
pip3 install --no-cache-dir -e ./supervisor \
|
||||
&& python3 -m compileall ./supervisor/supervisor
|
||||
|
||||
|
||||
# Initialize udev daemon, handle CMD
|
||||
COPY entry.sh /bin/
|
||||
ENTRYPOINT ["/bin/entry.sh"]
|
||||
|
||||
WORKDIR /
|
||||
CMD [ "python3", "-m", "hassio" ]
|
||||
COPY rootfs /
|
||||
|
4
LICENSE
4
LICENSE
@@ -178,7 +178,7 @@
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
@@ -186,7 +186,7 @@
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2017 Pascal Vizeli
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
@@ -1,3 +1,3 @@
|
||||
include LICENSE.md
|
||||
graft hassio
|
||||
graft supervisor
|
||||
recursive-exclude * *.py[co]
|
||||
|
30
README.md
30
README.md
@@ -1,30 +1,26 @@
|
||||
[](https://dev.azure.com/home-assistant/Hass.io/_build/latest?definitionId=2&branchName=dev)
|
||||
|
||||
# Hass.io
|
||||
# Home Assistant Supervisor
|
||||
|
||||
## First private cloud solution for home automation
|
||||
|
||||
Hass.io is a Docker-based system for managing your Home Assistant installation
|
||||
and related applications. The system is controlled via Home Assistant which
|
||||
communicates with the Supervisor. The Supervisor provides an API to manage the
|
||||
installation. This includes changing network settings or installing
|
||||
and updating software.
|
||||
|
||||

|
||||
Home Assistant (former Hass.io) is a container-based system for managing your
|
||||
Home Assistant Core installation and related applications. The system is
|
||||
controlled via Home Assistant which communicates with the Supervisor. The
|
||||
Supervisor provides an API to manage the installation. This includes changing
|
||||
network settings or installing and updating software.
|
||||
|
||||
## Installation
|
||||
|
||||
Installation instructions can be found at <https://home-assistant.io/hassio>.
|
||||
Installation instructions can be found at https://home-assistant.io/hassio.
|
||||
|
||||
## Development
|
||||
|
||||
The development of the supervisor is a bit tricky. Not difficult but tricky.
|
||||
The development of the Supervisor is not difficult but tricky.
|
||||
|
||||
- You can use the builder to build your supervisor: https://github.com/home-assistant/hassio-builder
|
||||
- Go into a HassOS device or VM and pull your supervisor.
|
||||
- Set the developer modus with cli `hassio supervisor options --channel=dev`
|
||||
- You can use the builder to create your Supervisor: https://github.com/home-assistant/hassio-builder
|
||||
- Access a HassOS device or VM and pull your Supervisor.
|
||||
- Set the developer modus with the CLI tool: `ha supervisor options --channel=dev`
|
||||
- Tag it as `homeassistant/xy-hassio-supervisor:latest`
|
||||
- Restart the service like `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
|
||||
- Restart the service with `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
|
||||
- Test your changes
|
||||
|
||||
Small Bugfix or improvements, make a PR. Significant change makes first an RFC.
|
||||
For small bugfixes or improvements, make a PR. For significant changes open a RFC first, please. Thanks.
|
||||
|
52
azure-pipelines-ci.yml
Normal file
52
azure-pipelines-ci.yml
Normal file
@@ -0,0 +1,52 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
- dev
|
||||
pr:
|
||||
- dev
|
||||
variables:
|
||||
- name: versionHadolint
|
||||
value: "v1.16.3"
|
||||
|
||||
jobs:
|
||||
- job: "Tox"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libpulse0 libudev1
|
||||
displayName: "Install Host library"
|
||||
- task: UsePythonVersion@0
|
||||
displayName: "Use Python 3.8"
|
||||
inputs:
|
||||
versionSpec: "3.8"
|
||||
- script: pip install tox
|
||||
displayName: "Install Tox"
|
||||
- script: tox
|
||||
displayName: "Run Tox"
|
||||
- job: "JQ"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: sudo apt-get install -y jq
|
||||
displayName: "Install JQ"
|
||||
- bash: |
|
||||
shopt -s globstar
|
||||
cat **/*.json | jq '.'
|
||||
displayName: "Run JQ"
|
||||
- job: "Hadolint"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
|
||||
displayName: "Install Hadolint"
|
||||
- script: |
|
||||
sudo docker run --rm -i \
|
||||
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
|
||||
hadolint/hadolint:$(versionHadolint) < Dockerfile
|
||||
displayName: "Run Hadolint"
|
53
azure-pipelines-release.yml
Normal file
53
azure-pipelines-release.yml
Normal file
@@ -0,0 +1,53 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- dev
|
||||
tags:
|
||||
include:
|
||||
- "*"
|
||||
pr: none
|
||||
variables:
|
||||
- name: versionBuilder
|
||||
value: "7.0"
|
||||
- group: docker
|
||||
|
||||
jobs:
|
||||
- job: "VersionValidate"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: "Use Python 3.8"
|
||||
inputs:
|
||||
versionSpec: "3.8"
|
||||
- script: |
|
||||
setup_version="$(python setup.py -V)"
|
||||
branch_version="$(Build.SourceBranchName)"
|
||||
|
||||
if [ "${branch_version}" == "dev" ]; then
|
||||
exit 0
|
||||
elif [ "${setup_version}" != "${branch_version}" ]; then
|
||||
echo "Version of tag ${branch_version} don't match with ${setup_version}!"
|
||||
exit 1
|
||||
fi
|
||||
displayName: "Check version of branch/tag"
|
||||
- job: "Release"
|
||||
dependsOn:
|
||||
- "VersionValidate"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
||||
displayName: "Docker hub login"
|
||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
||||
displayName: "Install Builder"
|
||||
- script: |
|
||||
sudo docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--generic $(Build.SourceBranchName) --all -t /data
|
||||
displayName: "Build Release"
|
27
azure-pipelines-wheels.yml
Normal file
27
azure-pipelines-wheels.yml
Normal file
@@ -0,0 +1,27 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- dev
|
||||
pr: none
|
||||
variables:
|
||||
- name: versionWheels
|
||||
value: '1.13.0-3.8-alpine3.12'
|
||||
resources:
|
||||
repositories:
|
||||
- repository: azure
|
||||
type: github
|
||||
name: 'home-assistant/ci-azure'
|
||||
endpoint: 'home-assistant'
|
||||
|
||||
|
||||
jobs:
|
||||
- template: templates/azp-job-wheels.yaml@azure
|
||||
parameters:
|
||||
builderVersion: '$(versionWheels)'
|
||||
builderApk: 'build-base;libffi-dev;openssl-dev'
|
||||
builderPip: 'Cython'
|
||||
skipBinary: 'aiohttp'
|
||||
wheelsRequirement: 'requirements.txt'
|
@@ -1,156 +0,0 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
- dev
|
||||
tags:
|
||||
include:
|
||||
- '*'
|
||||
exclude:
|
||||
- untagged*
|
||||
pr:
|
||||
- dev
|
||||
variables:
|
||||
- name: basePythonTag
|
||||
value: '3.7-alpine3.10'
|
||||
- name: versionHadolint
|
||||
value: 'v1.16.3'
|
||||
- name: versionBuilder
|
||||
value: '4.4'
|
||||
- name: versionWheels
|
||||
value: '1.0-3.7-alpine3.10'
|
||||
- group: docker
|
||||
- group: wheels
|
||||
|
||||
|
||||
stages:
|
||||
|
||||
- stage: 'Test'
|
||||
jobs:
|
||||
- job: 'Tox'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: 'Use Python 3.7'
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
- script: pip install tox
|
||||
displayName: 'Install Tox'
|
||||
- script: tox
|
||||
displayName: 'Run Tox'
|
||||
- job: 'JQ'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- script: sudo apt-get install -y jq
|
||||
displayName: 'Install JQ'
|
||||
- bash: |
|
||||
shopt -s globstar
|
||||
cat **/*.json | jq '.'
|
||||
displayName: 'Run JQ'
|
||||
- job: 'Hadolint'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
|
||||
displayName: 'Install Hadolint'
|
||||
- script: |
|
||||
sudo docker run --rm -i \
|
||||
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
|
||||
hadolint/hadolint:$(versionHadolint) < Dockerfile
|
||||
displayName: 'Run Hadolint'
|
||||
|
||||
- stage: 'Wheels'
|
||||
jobs:
|
||||
- job: 'Wheels'
|
||||
condition: eq(variables['Build.SourceBranchName'], 'dev')
|
||||
timeoutInMinutes: 360
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
strategy:
|
||||
maxParallel: 5
|
||||
matrix:
|
||||
amd64:
|
||||
buildArch: 'amd64'
|
||||
i386:
|
||||
buildArch: 'i386'
|
||||
armhf:
|
||||
buildArch: 'armhf'
|
||||
armv7:
|
||||
buildArch: 'armv7'
|
||||
aarch64:
|
||||
buildArch: 'aarch64'
|
||||
steps:
|
||||
- script: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
qemu-user-static \
|
||||
binfmt-support \
|
||||
curl
|
||||
|
||||
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
|
||||
sudo update-binfmts --enable qemu-arm
|
||||
sudo update-binfmts --enable qemu-aarch64
|
||||
displayName: 'Initial cross build'
|
||||
- script: |
|
||||
mkdir -p .ssh
|
||||
echo -e "-----BEGIN RSA PRIVATE KEY-----\n$(wheelsSSH)\n-----END RSA PRIVATE KEY-----" >> .ssh/id_rsa
|
||||
ssh-keyscan -H $(wheelsHost) >> .ssh/known_hosts
|
||||
chmod 600 .ssh/*
|
||||
displayName: 'Install ssh key'
|
||||
- script: sudo docker pull homeassistant/$(buildArch)-wheels:$(versionWheels)
|
||||
displayName: 'Install wheels builder'
|
||||
- script: |
|
||||
sudo docker run --rm -v $(pwd):/data:ro -v $(pwd)/.ssh:/root/.ssh:rw \
|
||||
homeassistant/$(buildArch)-wheels:$(versionWheels) \
|
||||
--apk "build-base;libffi-dev;openssl-dev" \
|
||||
--index $(wheelsIndex) \
|
||||
--requirement requirements.txt \
|
||||
--upload rsync \
|
||||
--remote wheels@$(wheelsHost):/opt/wheels
|
||||
displayName: 'Run wheels build'
|
||||
|
||||
- stage: 'Deploy'
|
||||
jobs:
|
||||
- job: 'VersionValidate'
|
||||
condition: or(startsWith(variables['Build.SourceBranch'], 'refs/tags'), eq(variables['Build.SourceBranchName'], 'dev'))
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: 'Use Python 3.7'
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
- script: |
|
||||
setup_version="$(python setup.py -V)"
|
||||
branch_version="$(Build.SourceBranchName)"
|
||||
|
||||
if [ "${branch_version}" == "dev" ]; then
|
||||
exit 0
|
||||
elif [ "${setup_version}" != "${branch_version}" ]; then
|
||||
echo "Version of tag ${branch_version} don't match with ${setup_version}!"
|
||||
exit 1
|
||||
fi
|
||||
displayName: 'Check version of branch/tag'
|
||||
- job: 'Release'
|
||||
dependsOn:
|
||||
- 'VersionValidate'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
||||
displayName: 'Docker hub login'
|
||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
||||
displayName: 'Install Builder'
|
||||
- script: |
|
||||
sudo docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--supervisor $(basePythonTag) --version $(Build.SourceBranchName) \
|
||||
--all -t /data --docker-hub homeassistant
|
||||
displayName: 'Build Release'
|
13
build.json
Normal file
13
build.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"image": "homeassistant/{arch}-hassio-supervisor",
|
||||
"build_from": {
|
||||
"aarch64": "homeassistant/aarch64-base-python:3.8-alpine3.12",
|
||||
"armhf": "homeassistant/armhf-base-python:3.8-alpine3.12",
|
||||
"armv7": "homeassistant/armv7-base-python:3.8-alpine3.12",
|
||||
"amd64": "homeassistant/amd64-base-python:3.8-alpine3.12",
|
||||
"i386": "homeassistant/i386-base-python:3.8-alpine3.12"
|
||||
},
|
||||
"labels": {
|
||||
"io.hass.type": "supervisor"
|
||||
}
|
||||
}
|
9
codecov.yaml
Normal file
9
codecov.yaml
Normal file
@@ -0,0 +1,9 @@
|
||||
codecov:
|
||||
branch: dev
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: 40
|
||||
threshold: 0.09
|
||||
comment: false
|
13
entry.sh
13
entry.sh
@@ -1,13 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
udevd --daemon
|
||||
udevadm trigger
|
||||
|
||||
if CMD="$(command -v "$1")"; then
|
||||
shift
|
||||
exec "$CMD" "$@"
|
||||
else
|
||||
echo "Command not found: $1"
|
||||
exit 1
|
||||
fi
|
@@ -1 +0,0 @@
|
||||
"""Init file for Hass.io."""
|
@@ -1,51 +0,0 @@
|
||||
"""Init file for Hass.io hardware RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from .utils import api_process
|
||||
from ..const import (
|
||||
ATTR_SERIAL,
|
||||
ATTR_DISK,
|
||||
ATTR_GPIO,
|
||||
ATTR_AUDIO,
|
||||
ATTR_INPUT,
|
||||
ATTR_OUTPUT,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIHardware(CoreSysAttributes):
|
||||
"""Handle RESTful API for hardware functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Show hardware info."""
|
||||
return {
|
||||
ATTR_SERIAL: list(
|
||||
self.sys_hardware.serial_devices | self.sys_hardware.serial_by_id
|
||||
),
|
||||
ATTR_INPUT: list(self.sys_hardware.input_devices),
|
||||
ATTR_DISK: list(self.sys_hardware.disk_devices),
|
||||
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
|
||||
ATTR_AUDIO: self.sys_hardware.audio_devices,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def audio(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Show ALSA audio devices."""
|
||||
return {
|
||||
ATTR_AUDIO: {
|
||||
ATTR_INPUT: self.sys_host.alsa.input_devices,
|
||||
ATTR_OUTPUT: self.sys_host.alsa.output_devices,
|
||||
}
|
||||
}
|
||||
|
||||
@api_process
|
||||
def trigger(self, request: web.Request) -> None:
|
||||
"""Trigger a udev device reload."""
|
||||
return asyncio.shield(self.sys_hardware.udev_trigger())
|
@@ -1,57 +0,0 @@
|
||||
"""Init file for Hass.io HassOS RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict
|
||||
|
||||
import voluptuous as vol
|
||||
from aiohttp import web
|
||||
|
||||
from ..const import (
|
||||
ATTR_BOARD,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_CLI,
|
||||
ATTR_VERSION_CLI_LATEST,
|
||||
ATTR_VERSION_LATEST,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
|
||||
|
||||
class APIHassOS(CoreSysAttributes):
|
||||
"""Handle RESTful API for HassOS functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return HassOS information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_hassos.version,
|
||||
ATTR_VERSION_CLI: self.sys_hassos.version_cli,
|
||||
ATTR_VERSION_LATEST: self.sys_hassos.version_latest,
|
||||
ATTR_VERSION_CLI_LATEST: self.sys_hassos.version_cli_latest,
|
||||
ATTR_BOARD: self.sys_hassos.board,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update HassOS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_latest)
|
||||
|
||||
await asyncio.shield(self.sys_hassos.update(version))
|
||||
|
||||
@api_process
|
||||
async def update_cli(self, request: web.Request) -> None:
|
||||
"""Update HassOS CLI."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_cli_latest)
|
||||
|
||||
await asyncio.shield(self.sys_hassos.update_cli(version))
|
||||
|
||||
@api_process
|
||||
def config_sync(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Trigger config reload on HassOS."""
|
||||
return asyncio.shield(self.sys_hassos.config_sync())
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1,20 +0,0 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"sources":["webpack:///./src/ingress-view/hassio-ingress-view.ts"],"names":["customElement","HassioIngressView","property","this","_addon","html","_templateObject2","name","ingress_url","_templateObject","changedProps","_get","_getPrototypeOf","prototype","call","has","addon","route","path","substr","oldRoute","get","oldAddon","undefined","_fetchData","_callee","addonSlug","_ref","_ref2","regeneratorRuntime","wrap","_context","prev","next","Promise","all","fetchHassioAddonInfo","hass","catch","Error","createHassioSession","sent","_slicedToArray","ingress","t0","console","error","alert","message","history","back","stop","css","_templateObject3","LitElement"],"mappings":"4gSAmBCA,YAAc,0CACTC,smBACHC,kEACAA,mEACAA,4EAED,WACE,OAAKC,KAAKC,OAMHC,YAAPC,IAC0BH,KAAKC,OAAOG,KACpBJ,KAAKC,OAAOI,aAPrBH,YAAPI,0CAYJ,SAAkBC,GAGhB,GAFAC,EAAAC,EApBEX,EAoBFY,WAAA,eAAAV,MAAAW,KAAAX,KAAmBO,GAEdA,EAAaK,IAAI,SAAtB,CAIA,IAAMC,EAAQb,KAAKc,MAAMC,KAAKC,OAAO,GAE/BC,EAAWV,EAAaW,IAAI,SAC5BC,EAAWF,EAAWA,EAASF,KAAKC,OAAO,QAAKI,EAElDP,GAASA,IAAUM,GACrBnB,KAAKqB,WAAWR,0FAIpB,SAAAS,EAAyBC,GAAzB,IAAAC,EAAAC,EAAAZ,EAAA,OAAAa,mBAAAC,KAAA,SAAAC,GAAA,cAAAA,EAAAC,KAAAD,EAAAE,MAAA,cAAAF,EAAAC,KAAA,EAAAD,EAAAE,KAAA,EAE0BC,QAAQC,IAAI,CAChCC,YAAqBjC,KAAKkC,KAAMX,GAAWY,MAAM,WAC/C,MAAM,IAAIC,MAAM,iCAElBC,YAAoBrC,KAAKkC,MAAMC,MAAM,WACnC,MAAM,IAAIC,MAAM,2CAPxB,UAAAZ,EAAAI,EAAAU,KAAAb,EAAAc,EAAAf,EAAA,IAEWX,EAFXY,EAAA,IAWee,QAXf,CAAAZ,EAAAE,KAAA,cAYY,IAAIM,MAAM,wCAZtB,OAeIpC,KAAKC,OAASY,EAflBe,EAAAE,KAAA,iBAAAF,EAAAC,KAAA,GAAAD,EAAAa,GAAAb,EAAA,SAkBIc,QAAQC,MAARf,EAAAa,IACAG,MAAMhB,EAAAa,GAAII,SAAW,mCACrBC,QAAQC,OApBZ,yBAAAnB,EAAAoB,SAAA1B,EAAAtB,KAAA,yRAwBA,WACE,OAAOiD,YAAPC,UA7D4BC","file":"chunk.5dd33a3a20657ed46a19.js","sourcesContent":["import {\n LitElement,\n customElement,\n property,\n TemplateResult,\n html,\n PropertyValues,\n CSSResult,\n css,\n} from \"lit-element\";\nimport { HomeAssistant, Route } from \"../../../src/types\";\nimport {\n createHassioSession,\n HassioAddonDetails,\n fetchHassioAddonInfo,\n} from \"../../../src/data/hassio\";\nimport \"../../../src/layouts/hass-loading-screen\";\nimport \"../../../src/layouts/hass-subpage\";\n\n@customElement(\"hassio-ingress-view\")\nclass HassioIngressView extends LitElement {\n @property() public hass!: HomeAssistant;\n @property() public route!: Route;\n @property() private _addon?: HassioAddonDetails;\n\n protected render(): TemplateResult | void {\n if (!this._addon) {\n return html`\n <hass-loading-screen></hass-loading-screen>\n `;\n }\n\n return html`\n <hass-subpage .header=${this._addon.name} hassio>\n <iframe src=${this._addon.ingress_url}></iframe>\n </hass-subpage>\n `;\n }\n\n protected updated(changedProps: PropertyValues) {\n super.firstUpdated(changedProps);\n\n if (!changedProps.has(\"route\")) {\n return;\n }\n\n const addon = this.route.path.substr(1);\n\n const oldRoute = changedProps.get(\"route\") as this[\"route\"] | undefined;\n const oldAddon = oldRoute ? oldRoute.path.substr(1) : undefined;\n\n if (addon && addon !== oldAddon) {\n this._fetchData(addon);\n }\n }\n\n private async _fetchData(addonSlug: string) {\n try {\n const [addon] = await Promise.all([\n fetchHassioAddonInfo(this.hass, addonSlug).catch(() => {\n throw new Error(\"Failed to fetch add-on info\");\n }),\n createHassioSession(this.hass).catch(() => {\n throw new Error(\"Failed to create an ingress session\");\n }),\n ]);\n\n if (!addon.ingress) {\n throw new Error(\"This add-on does not support ingress\");\n }\n\n this._addon = addon;\n } catch (err) {\n // tslint:disable-next-line\n console.error(err);\n alert(err.message || \"Unknown error starting ingress.\");\n history.back();\n }\n }\n\n static get styles(): CSSResult {\n return css`\n iframe {\n display: block;\n width: 100%;\n height: 100%;\n border: 0;\n }\n paper-icon-button {\n color: var(--text-primary-color);\n }\n `;\n }\n}\n\ndeclare global {\n interface HTMLElementTagNameMap {\n \"hassio-ingress-view\": HassioIngressView;\n }\n}\n"],"sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1,21 +0,0 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
||||
(window.webpackJsonp=window.webpackJsonp||[]).push([[9],{101:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(124),i=t.n(e),o=t(126),u=t.n(o),a=i.a,c=u.a}}]);
|
||||
//# sourceMappingURL=chunk.7f8cce5798f837214ef8.js.map
|
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"sources":["webpack:///../src/resources/load_markdown.js"],"names":["__webpack_require__","r","__webpack_exports__","d","marked","filterXSS","marked__WEBPACK_IMPORTED_MODULE_0__","marked__WEBPACK_IMPORTED_MODULE_0___default","n","xss__WEBPACK_IMPORTED_MODULE_1__","xss__WEBPACK_IMPORTED_MODULE_1___default","marked_","filterXSS_"],"mappings":"0FAAAA,EAAAC,EAAAC,GAAAF,EAAAG,EAAAD,EAAA,2BAAAE,IAAAJ,EAAAG,EAAAD,EAAA,8BAAAG,IAAA,IAAAC,EAAAN,EAAA,KAAAO,EAAAP,EAAAQ,EAAAF,GAAAG,EAAAT,EAAA,KAAAU,EAAAV,EAAAQ,EAAAC,GAGaL,EAASO,IACTN,EAAYO","file":"chunk.7f8cce5798f837214ef8.js","sourcesContent":["import marked_ from \"marked\";\nimport filterXSS_ from \"xss\";\n\nexport const marked = marked_;\nexport const filterXSS = filterXSS_;\n"],"sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1,31 +0,0 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
||||
!function(e){function n(n){for(var t,o,a=n[0],i=n[1],f=0,c=[];f<a.length;f++)o=a[f],r[o]&&c.push(r[o][0]),r[o]=0;for(t in i)Object.prototype.hasOwnProperty.call(i,t)&&(e[t]=i[t]);for(u&&u(n);c.length;)c.shift()()}var t={},r={4:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var a=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=a);var i,f=document.createElement("script");f.charset="utf-8",f.timeout=120,o.nc&&f.setAttribute("nonce",o.nc),f.src=function(e){return o.p+"chunk."+{0:"7f411ffa9df152cb8f05",1:"598ae99dfd641ab3a30c",2:"af7784dbf07df8e24819",3:"b15efbd4fb2c8cac0ad4",5:"87d3a6d0178fb26762cf",6:"6f4702eafe52425373ed",7:"5dd33a3a20657ed46a19",8:"7c785f796f428abae18d",9:"7f8cce5798f837214ef8",10:"04bcaa18b59728e10be9",11:"9d7374dae6137783dda4",12:"6685a7f98b13655ab808",13:"f1156b978f6f3143a651"}[e]+".js"}(e),i=function(n){f.onerror=f.onload=null,clearTimeout(u);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),a=n&&n.target&&n.target.src,i=new Error("Loading chunk "+e+" failed.\n("+o+": "+a+")");i.type=o,i.request=a,t[1](i)}r[e]=void 0}};var u=setTimeout(function(){i({type:"timeout",target:f})},12e4);f.onerror=f.onload=i,document.head.appendChild(f)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var a=window.webpackJsonp=window.webpackJsonp||[],i=a.push.bind(a);a.push=n,a=a.slice();for(var f=0;f<a.length;f++)n(a[f]);var u=i;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(1),t.e(6)]).then(t.bind(null,2)),Promise.all([t.e(1),t.e(12),t.e(8)]).then(t.bind(null,1))});var r=document.createElement("style");r.innerHTML="\nbody {\n font-family: Roboto, sans-serif;\n -moz-osx-font-smoothing: grayscale;\n -webkit-font-smoothing: antialiased;\n font-weight: 400;\n margin: 0;\n padding: 0;\n height: 100vh;\n}\n",document.head.appendChild(r)}]);
|
||||
//# sourceMappingURL=entrypoint.js.map
|
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,38 +0,0 @@
|
||||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Hass.io</title>
|
||||
<meta name='viewport' content='width=device-width, user-scalable=no'>
|
||||
<style>
|
||||
body {
|
||||
height: 100vh;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
</style>
|
||||
<script src='/frontend_es5/custom-elements-es5-adapter.js'></script>
|
||||
</head>
|
||||
<body>
|
||||
<hassio-app></hassio-app>
|
||||
<script>
|
||||
function addScript(src) {
|
||||
var e = document.createElement('script');
|
||||
e.src = src;
|
||||
document.write(e.outerHTML);
|
||||
}
|
||||
var webComponentsSupported = (
|
||||
'customElements' in window &&
|
||||
'import' in document.createElement('link') &&
|
||||
'content' in document.createElement('template'));
|
||||
if (!webComponentsSupported) {
|
||||
addScript('/static/webcomponents-lite.js');
|
||||
}
|
||||
</script>
|
||||
<!--
|
||||
Disabled while we make Home Assistant able to serve the right files.
|
||||
<script src="./app.js"></script>
|
||||
-->
|
||||
<link rel='import' href='./hassio-app.html'>
|
||||
</body>
|
||||
</html>
|
Binary file not shown.
192
hassio/core.py
192
hassio/core.py
@@ -1,192 +0,0 @@
|
||||
"""Main file for Hass.io."""
|
||||
from contextlib import suppress
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import async_timeout
|
||||
|
||||
from .coresys import CoreSysAttributes
|
||||
from .const import (
|
||||
STARTUP_SYSTEM,
|
||||
STARTUP_SERVICES,
|
||||
STARTUP_APPLICATION,
|
||||
STARTUP_INITIALIZE,
|
||||
)
|
||||
from .exceptions import HassioError, HomeAssistantError, SupervisorUpdateError
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HassIO(CoreSysAttributes):
|
||||
"""Main object of Hass.io."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Hass.io object."""
|
||||
self.coresys = coresys
|
||||
|
||||
async def connect(self):
|
||||
"""Connect Supervisor container."""
|
||||
await self.sys_supervisor.load()
|
||||
|
||||
async def setup(self):
|
||||
"""Setup HassIO orchestration."""
|
||||
# Load DBus
|
||||
await self.sys_dbus.load()
|
||||
|
||||
# Load Host
|
||||
await self.sys_host.load()
|
||||
|
||||
# Load CoreDNS
|
||||
await self.sys_dns.load()
|
||||
|
||||
# Load Home Assistant
|
||||
await self.sys_homeassistant.load()
|
||||
|
||||
# Load CPU/Arch
|
||||
await self.sys_arch.load()
|
||||
|
||||
# Load HassOS
|
||||
await self.sys_hassos.load()
|
||||
|
||||
# Load Stores
|
||||
await self.sys_store.load()
|
||||
|
||||
# Load Add-ons
|
||||
await self.sys_addons.load()
|
||||
|
||||
# rest api views
|
||||
await self.sys_api.load()
|
||||
|
||||
# load last available data
|
||||
await self.sys_updater.load()
|
||||
|
||||
# load last available data
|
||||
await self.sys_snapshots.load()
|
||||
|
||||
# load services
|
||||
await self.sys_services.load()
|
||||
|
||||
# Load discovery
|
||||
await self.sys_discovery.load()
|
||||
|
||||
# Load ingress
|
||||
await self.sys_ingress.load()
|
||||
|
||||
# Load secrets
|
||||
await self.sys_secrets.load()
|
||||
|
||||
async def start(self):
|
||||
"""Start Hass.io orchestration."""
|
||||
await self.sys_api.start()
|
||||
|
||||
# on release channel, try update itself
|
||||
if self.sys_supervisor.need_update:
|
||||
try:
|
||||
if self.sys_dev:
|
||||
_LOGGER.warning("Ignore Hass.io updates on dev!")
|
||||
else:
|
||||
await self.sys_supervisor.update()
|
||||
except SupervisorUpdateError:
|
||||
_LOGGER.fatal(
|
||||
"Can't update supervisor! This will break some Add-ons or affect "
|
||||
"future version of Home Assistant!"
|
||||
)
|
||||
|
||||
# start addon mark as initialize
|
||||
await self.sys_addons.boot(STARTUP_INITIALIZE)
|
||||
|
||||
try:
|
||||
# HomeAssistant is already running / supervisor have only reboot
|
||||
if self.sys_hardware.last_boot == self.sys_config.last_boot:
|
||||
_LOGGER.info("Hass.io reboot detected")
|
||||
return
|
||||
|
||||
# reset register services / discovery
|
||||
self.sys_services.reset()
|
||||
|
||||
# start addon mark as system
|
||||
await self.sys_addons.boot(STARTUP_SYSTEM)
|
||||
|
||||
# start addon mark as services
|
||||
await self.sys_addons.boot(STARTUP_SERVICES)
|
||||
|
||||
# run HomeAssistant
|
||||
if self.sys_homeassistant.boot:
|
||||
with suppress(HomeAssistantError):
|
||||
await self.sys_homeassistant.start()
|
||||
|
||||
# start addon mark as application
|
||||
await self.sys_addons.boot(STARTUP_APPLICATION)
|
||||
|
||||
# store new last boot
|
||||
self._update_last_boot()
|
||||
|
||||
finally:
|
||||
# Add core tasks into scheduler
|
||||
await self.sys_tasks.load()
|
||||
|
||||
# If landingpage / run upgrade in background
|
||||
if self.sys_homeassistant.version == "landingpage":
|
||||
self.sys_create_task(self.sys_homeassistant.install())
|
||||
|
||||
_LOGGER.info("Hass.io is up and running")
|
||||
|
||||
async def stop(self):
|
||||
"""Stop a running orchestration."""
|
||||
# don't process scheduler anymore
|
||||
self.sys_scheduler.suspend = True
|
||||
|
||||
# store new last boot / prevent time adjustments
|
||||
self._update_last_boot()
|
||||
|
||||
# process async stop tasks
|
||||
try:
|
||||
with async_timeout.timeout(10):
|
||||
await asyncio.wait(
|
||||
[
|
||||
self.sys_api.stop(),
|
||||
self.sys_websession.close(),
|
||||
self.sys_websession_ssl.close(),
|
||||
self.sys_ingress.unload(),
|
||||
self.sys_dns.unload(),
|
||||
]
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.warning("Force Shutdown!")
|
||||
|
||||
_LOGGER.info("Hass.io is down")
|
||||
|
||||
async def shutdown(self):
|
||||
"""Shutdown all running containers in correct order."""
|
||||
await self.sys_addons.shutdown(STARTUP_APPLICATION)
|
||||
|
||||
# Close Home Assistant
|
||||
with suppress(HassioError):
|
||||
await self.sys_homeassistant.stop()
|
||||
|
||||
await self.sys_addons.shutdown(STARTUP_SERVICES)
|
||||
await self.sys_addons.shutdown(STARTUP_SYSTEM)
|
||||
await self.sys_addons.shutdown(STARTUP_INITIALIZE)
|
||||
|
||||
def _update_last_boot(self):
|
||||
"""Update last boot time."""
|
||||
self.sys_config.last_boot = self.sys_hardware.last_boot
|
||||
self.sys_config.save_data()
|
||||
|
||||
async def repair(self):
|
||||
"""Repair system integrity."""
|
||||
_LOGGER.info("Start repairing of Hass.io Environment")
|
||||
await self.sys_run_in_executor(self.sys_docker.repair)
|
||||
|
||||
# Restore core functionality
|
||||
await self.sys_dns.repair()
|
||||
await self.sys_addons.repair()
|
||||
await self.sys_homeassistant.repair()
|
||||
|
||||
# Fix HassOS specific
|
||||
if self.sys_hassos.available:
|
||||
await self.sys_hassos.repair_cli()
|
||||
|
||||
# Tag version for latest
|
||||
await self.sys_supervisor.repair()
|
||||
_LOGGER.info("Finished repairing of Hass.io Environment")
|
@@ -1,58 +0,0 @@
|
||||
{
|
||||
"raspberrypi": [
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi2": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi3": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi3-64": [
|
||||
"aarch64",
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi4": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi4-64": [
|
||||
"aarch64",
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"tinker": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"odroid-c2": [
|
||||
"aarch64"
|
||||
],
|
||||
"odroid-xu": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"orangepi-prime": [
|
||||
"aarch64"
|
||||
],
|
||||
"qemux86": [
|
||||
"i386"
|
||||
],
|
||||
"qemux86-64": [
|
||||
"amd64",
|
||||
"i386"
|
||||
],
|
||||
"qemuarm": [
|
||||
"armhf"
|
||||
],
|
||||
"qemuarm-64": [
|
||||
"aarch64"
|
||||
],
|
||||
"intel-nuc": [
|
||||
"amd64",
|
||||
"i386"
|
||||
]
|
||||
}
|
@@ -1,17 +0,0 @@
|
||||
pcm.!default {
|
||||
type asym
|
||||
capture.pcm "mic"
|
||||
playback.pcm "speaker"
|
||||
}
|
||||
pcm.mic {
|
||||
type plug
|
||||
slave {
|
||||
pcm "hw:$input"
|
||||
}
|
||||
}
|
||||
pcm.speaker {
|
||||
type plug
|
||||
slave {
|
||||
pcm "hw:$output"
|
||||
}
|
||||
}
|
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"raspberrypi3": {
|
||||
"bcm2835 - bcm2835 ALSA": {
|
||||
"0,0": "Raspberry Jack",
|
||||
"0,1": "Raspberry HDMI"
|
||||
},
|
||||
"output": "0,0",
|
||||
"input": null
|
||||
},
|
||||
"raspberrypi2": {
|
||||
"output": "0,0",
|
||||
"input": null
|
||||
},
|
||||
"raspberrypi": {
|
||||
"output": "0,0",
|
||||
"input": null
|
||||
}
|
||||
}
|
@@ -1,15 +0,0 @@
|
||||
.:53 {
|
||||
log
|
||||
errors
|
||||
hosts /config/hosts {
|
||||
fallthrough
|
||||
}
|
||||
template ANY AAAA local.hass.io hassio {
|
||||
rcode NOERROR
|
||||
}
|
||||
forward . $servers {
|
||||
except local.hass.io
|
||||
policy sequential
|
||||
health_check 10s
|
||||
}
|
||||
}
|
@@ -1,57 +0,0 @@
|
||||
"""D-Bus interface for rauc."""
|
||||
import logging
|
||||
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
from ..exceptions import DBusError, DBusInterfaceError
|
||||
from ..utils.gdbus import DBus
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
DBUS_NAME = "de.pengutronix.rauc"
|
||||
DBUS_OBJECT = "/"
|
||||
|
||||
|
||||
class Rauc(DBusInterface):
|
||||
"""Handle D-Bus interface for rauc."""
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to D-Bus."""
|
||||
try:
|
||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
||||
except DBusError:
|
||||
_LOGGER.warning("Can't connect to rauc")
|
||||
except DBusInterfaceError:
|
||||
_LOGGER.warning("Host has no rauc support. OTA updates have been disabled.")
|
||||
|
||||
@dbus_connected
|
||||
def install(self, raucb_file):
|
||||
"""Install rauc bundle file.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Installer.Install(raucb_file)
|
||||
|
||||
@dbus_connected
|
||||
def get_slot_status(self):
|
||||
"""Get slot status.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Installer.GetSlotStatus()
|
||||
|
||||
@dbus_connected
|
||||
def get_properties(self):
|
||||
"""Return rauc informations.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.get_properties(f"{DBUS_NAME}.Installer")
|
||||
|
||||
@dbus_connected
|
||||
def signal_completed(self):
|
||||
"""Return a signal wrapper for completed signal.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.wait_signal(f"{DBUS_NAME}.Installer.Completed")
|
@@ -1,38 +0,0 @@
|
||||
"""HassOS Cli docker object."""
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .interface import DockerInterface
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerHassOSCli(DockerInterface, CoreSysAttributes):
|
||||
"""Docker Hass.io wrapper for HassOS Cli."""
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of HassOS CLI image."""
|
||||
return f"homeassistant/{self.sys_arch.supervisor}-hassio-cli"
|
||||
|
||||
def _stop(self, remove_container=True):
|
||||
"""Don't need stop."""
|
||||
return True
|
||||
|
||||
def _attach(self, tag: str):
|
||||
"""Attach to running Docker container.
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self.sys_docker.images.get(f"{self.image}:{tag}")
|
||||
|
||||
except docker.errors.DockerException:
|
||||
_LOGGER.warning("Can't find a HassOS CLI %s", self.image)
|
||||
|
||||
else:
|
||||
self._meta = image.attrs
|
||||
_LOGGER.info(
|
||||
"Found HassOS CLI %s with version %s", self.image, self.version
|
||||
)
|
@@ -1,617 +0,0 @@
|
||||
"""Home Assistant control object."""
|
||||
import asyncio
|
||||
from contextlib import asynccontextmanager, suppress
|
||||
from datetime import datetime, timedelta
|
||||
from ipaddress import IPv4Address
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
import secrets
|
||||
import time
|
||||
from typing import Any, AsyncContextManager, Awaitable, Dict, Optional
|
||||
from uuid import UUID
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import hdrs
|
||||
import attr
|
||||
from packaging import version as pkg_version
|
||||
|
||||
from .const import (
|
||||
ATTR_ACCESS_TOKEN,
|
||||
ATTR_BOOT,
|
||||
ATTR_IMAGE,
|
||||
ATTR_LAST_VERSION,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_PORT,
|
||||
ATTR_REFRESH_TOKEN,
|
||||
ATTR_SSL,
|
||||
ATTR_UUID,
|
||||
ATTR_VERSION,
|
||||
ATTR_WAIT_BOOT,
|
||||
ATTR_WATCHDOG,
|
||||
FILE_HASSIO_HOMEASSISTANT,
|
||||
HEADER_HA_ACCESS,
|
||||
)
|
||||
from .coresys import CoreSys, CoreSysAttributes
|
||||
from .docker.homeassistant import DockerHomeAssistant
|
||||
from .docker.stats import DockerStats
|
||||
from .exceptions import (
|
||||
DockerAPIError,
|
||||
HomeAssistantAPIError,
|
||||
HomeAssistantAuthError,
|
||||
HomeAssistantError,
|
||||
HomeAssistantUpdateError,
|
||||
)
|
||||
from .utils import check_port, convert_to_ascii, process_lock
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_HASS_CONFIG
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml")
|
||||
|
||||
|
||||
@attr.s(frozen=True)
|
||||
class ConfigResult:
|
||||
"""Return object from config check."""
|
||||
|
||||
valid = attr.ib()
|
||||
log = attr.ib()
|
||||
|
||||
|
||||
class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
"""Home Assistant core object for handle it."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize Home Assistant object."""
|
||||
super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG)
|
||||
self.coresys: CoreSys = coresys
|
||||
self.instance: DockerHomeAssistant = DockerHomeAssistant(coresys)
|
||||
self.lock: asyncio.Lock = asyncio.Lock(loop=coresys.loop)
|
||||
self._error_state: bool = False
|
||||
|
||||
# We don't persist access tokens. Instead we fetch new ones when needed
|
||||
self.access_token: Optional[str] = None
|
||||
self._access_token_expires: Optional[datetime] = None
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Prepare Home Assistant object."""
|
||||
try:
|
||||
# Evaluate Version if we lost this information
|
||||
if not self.version:
|
||||
self.version = await self.instance.get_latest_version(
|
||||
key=pkg_version.parse
|
||||
)
|
||||
|
||||
await self.instance.attach(tag=self.version)
|
||||
except DockerAPIError:
|
||||
_LOGGER.info("No Home Assistant Docker image %s found.", self.image)
|
||||
await self.install_landingpage()
|
||||
else:
|
||||
self.version = self.instance.version
|
||||
self.save_data()
|
||||
|
||||
@property
|
||||
def machine(self) -> str:
|
||||
"""Return the system machines."""
|
||||
return self.instance.machine
|
||||
|
||||
@property
|
||||
def arch(self) -> str:
|
||||
"""Return arch of running Home Assistant."""
|
||||
return self.instance.arch
|
||||
|
||||
@property
|
||||
def error_state(self) -> bool:
|
||||
"""Return True if system is in error."""
|
||||
return self._error_state
|
||||
|
||||
@property
|
||||
def ip_address(self) -> IPv4Address:
|
||||
"""Return IP of Home Assistant instance."""
|
||||
return self.instance.ip_address
|
||||
|
||||
@property
|
||||
def api_port(self) -> int:
|
||||
"""Return network port to Home Assistant instance."""
|
||||
return self._data[ATTR_PORT]
|
||||
|
||||
@api_port.setter
|
||||
def api_port(self, value: int) -> None:
|
||||
"""Set network port for Home Assistant instance."""
|
||||
self._data[ATTR_PORT] = value
|
||||
|
||||
@property
|
||||
def api_password(self) -> str:
|
||||
"""Return password for Home Assistant instance."""
|
||||
return self._data.get(ATTR_PASSWORD)
|
||||
|
||||
@api_password.setter
|
||||
def api_password(self, value: str):
|
||||
"""Set password for Home Assistant instance."""
|
||||
self._data[ATTR_PASSWORD] = value
|
||||
|
||||
@property
|
||||
def api_ssl(self) -> bool:
|
||||
"""Return if we need ssl to Home Assistant instance."""
|
||||
return self._data[ATTR_SSL]
|
||||
|
||||
@api_ssl.setter
|
||||
def api_ssl(self, value: bool):
|
||||
"""Set SSL for Home Assistant instance."""
|
||||
self._data[ATTR_SSL] = value
|
||||
|
||||
@property
|
||||
def api_url(self) -> str:
|
||||
"""Return API url to Home Assistant."""
|
||||
return "{}://{}:{}".format(
|
||||
"https" if self.api_ssl else "http", self.ip_address, self.api_port
|
||||
)
|
||||
|
||||
@property
|
||||
def watchdog(self) -> bool:
|
||||
"""Return True if the watchdog should protect Home Assistant."""
|
||||
return self._data[ATTR_WATCHDOG]
|
||||
|
||||
@watchdog.setter
|
||||
def watchdog(self, value: bool):
|
||||
"""Return True if the watchdog should protect Home Assistant."""
|
||||
self._data[ATTR_WATCHDOG] = value
|
||||
|
||||
@property
|
||||
def wait_boot(self) -> int:
|
||||
"""Return time to wait for Home Assistant startup."""
|
||||
return self._data[ATTR_WAIT_BOOT]
|
||||
|
||||
@wait_boot.setter
|
||||
def wait_boot(self, value: int):
|
||||
"""Set time to wait for Home Assistant startup."""
|
||||
self._data[ATTR_WAIT_BOOT] = value
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str:
|
||||
"""Return last available version of Home Assistant."""
|
||||
if self.is_custom_image:
|
||||
return self._data.get(ATTR_LAST_VERSION)
|
||||
return self.sys_updater.version_homeassistant
|
||||
|
||||
@latest_version.setter
|
||||
def latest_version(self, value: str):
|
||||
"""Set last available version of Home Assistant."""
|
||||
if value:
|
||||
self._data[ATTR_LAST_VERSION] = value
|
||||
else:
|
||||
self._data.pop(ATTR_LAST_VERSION, None)
|
||||
|
||||
@property
|
||||
def image(self) -> str:
|
||||
"""Return image name of the Home Assistant container."""
|
||||
if self._data.get(ATTR_IMAGE):
|
||||
return self._data[ATTR_IMAGE]
|
||||
return os.environ["HOMEASSISTANT_REPOSITORY"]
|
||||
|
||||
@image.setter
|
||||
def image(self, value: str):
|
||||
"""Set image name of Home Assistant container."""
|
||||
if value:
|
||||
self._data[ATTR_IMAGE] = value
|
||||
else:
|
||||
self._data.pop(ATTR_IMAGE, None)
|
||||
|
||||
@property
|
||||
def is_custom_image(self) -> bool:
|
||||
"""Return True if a custom image is used."""
|
||||
return all(attr in self._data for attr in (ATTR_IMAGE, ATTR_LAST_VERSION))
|
||||
|
||||
@property
|
||||
def version(self) -> Optional[str]:
|
||||
"""Return version of local version."""
|
||||
return self._data.get(ATTR_VERSION)
|
||||
|
||||
@version.setter
|
||||
def version(self, value: str) -> None:
|
||||
"""Set installed version."""
|
||||
self._data[ATTR_VERSION] = value
|
||||
|
||||
@property
|
||||
def boot(self) -> bool:
|
||||
"""Return True if Home Assistant boot is enabled."""
|
||||
return self._data[ATTR_BOOT]
|
||||
|
||||
@boot.setter
|
||||
def boot(self, value: bool):
|
||||
"""Set Home Assistant boot options."""
|
||||
self._data[ATTR_BOOT] = value
|
||||
|
||||
@property
|
||||
def uuid(self) -> UUID:
|
||||
"""Return a UUID of this Home Assistant instance."""
|
||||
return self._data[ATTR_UUID]
|
||||
|
||||
@property
|
||||
def hassio_token(self) -> str:
|
||||
"""Return an access token for the Hass.io API."""
|
||||
return self._data.get(ATTR_ACCESS_TOKEN)
|
||||
|
||||
@property
|
||||
def refresh_token(self) -> str:
|
||||
"""Return the refresh token to authenticate with Home Assistant."""
|
||||
return self._data.get(ATTR_REFRESH_TOKEN)
|
||||
|
||||
@refresh_token.setter
|
||||
def refresh_token(self, value: str):
|
||||
"""Set Home Assistant refresh_token."""
|
||||
self._data[ATTR_REFRESH_TOKEN] = value
|
||||
|
||||
@process_lock
|
||||
async def install_landingpage(self) -> None:
|
||||
"""Install a landing page."""
|
||||
_LOGGER.info("Setup HomeAssistant landingpage")
|
||||
while True:
|
||||
try:
|
||||
await self.instance.install("landingpage")
|
||||
except DockerAPIError:
|
||||
_LOGGER.warning("Fails install landingpage, retry after 30sec")
|
||||
await asyncio.sleep(30)
|
||||
else:
|
||||
break
|
||||
|
||||
self.version = self.instance.version
|
||||
self.save_data()
|
||||
|
||||
@process_lock
|
||||
async def install(self) -> None:
|
||||
"""Install a landing page."""
|
||||
_LOGGER.info("Setup Home Assistant")
|
||||
while True:
|
||||
# read homeassistant tag and install it
|
||||
if not self.latest_version:
|
||||
await self.sys_updater.reload()
|
||||
|
||||
tag = self.latest_version
|
||||
if tag:
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.update(tag)
|
||||
break
|
||||
_LOGGER.warning("Error on install Home Assistant. Retry in 30sec")
|
||||
await asyncio.sleep(30)
|
||||
|
||||
_LOGGER.info("Home Assistant docker now installed")
|
||||
self.version = self.instance.version
|
||||
self.save_data()
|
||||
|
||||
# finishing
|
||||
try:
|
||||
_LOGGER.info("Start Home Assistant")
|
||||
await self._start()
|
||||
except HomeAssistantError:
|
||||
_LOGGER.error("Can't start Home Assistant!")
|
||||
|
||||
# Cleanup
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.cleanup()
|
||||
|
||||
@process_lock
|
||||
async def update(self, version: Optional[str] = None) -> None:
|
||||
"""Update HomeAssistant version."""
|
||||
version = version or self.latest_version
|
||||
rollback = self.version if not self.error_state else None
|
||||
running = await self.instance.is_running()
|
||||
exists = await self.instance.exists()
|
||||
|
||||
if exists and version == self.instance.version:
|
||||
_LOGGER.warning("Version %s is already installed", version)
|
||||
return
|
||||
|
||||
# process an update
|
||||
async def _update(to_version: str) -> None:
|
||||
"""Run Home Assistant update."""
|
||||
_LOGGER.info("Update Home Assistant to version %s", to_version)
|
||||
try:
|
||||
await self.instance.update(to_version)
|
||||
except DockerAPIError:
|
||||
_LOGGER.warning("Update Home Assistant image fails")
|
||||
raise HomeAssistantUpdateError() from None
|
||||
else:
|
||||
self.version = self.instance.version
|
||||
|
||||
if running:
|
||||
await self._start()
|
||||
|
||||
_LOGGER.info("Successful run Home Assistant %s", to_version)
|
||||
self.save_data()
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.cleanup()
|
||||
|
||||
# Update Home Assistant
|
||||
with suppress(HomeAssistantError):
|
||||
await _update(version)
|
||||
return
|
||||
|
||||
# Update going wrong, revert it
|
||||
if self.error_state and rollback:
|
||||
_LOGGER.fatal("HomeAssistant update fails -> rollback!")
|
||||
await _update(rollback)
|
||||
else:
|
||||
raise HomeAssistantUpdateError()
|
||||
|
||||
async def _start(self) -> None:
|
||||
"""Start Home Assistant Docker & wait."""
|
||||
if await self.instance.is_running():
|
||||
_LOGGER.warning("Home Assistant is already running!")
|
||||
return
|
||||
|
||||
# Create new API token
|
||||
self._data[ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
|
||||
self.save_data()
|
||||
|
||||
try:
|
||||
await self.instance.run()
|
||||
except DockerAPIError:
|
||||
raise HomeAssistantError() from None
|
||||
await self._block_till_run()
|
||||
|
||||
@process_lock
|
||||
async def start(self) -> None:
|
||||
"""Run Home Assistant docker."""
|
||||
try:
|
||||
if await self.instance.is_running():
|
||||
await self.instance.restart()
|
||||
elif await self.instance.is_initialize():
|
||||
await self.instance.start()
|
||||
else:
|
||||
await self._start()
|
||||
return
|
||||
|
||||
await self._block_till_run()
|
||||
except DockerAPIError:
|
||||
raise HomeAssistantError() from None
|
||||
|
||||
@process_lock
|
||||
async def stop(self) -> None:
|
||||
"""Stop Home Assistant Docker.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
try:
|
||||
return await self.instance.stop(remove_container=False)
|
||||
except DockerAPIError:
|
||||
raise HomeAssistantError() from None
|
||||
|
||||
@process_lock
|
||||
async def restart(self) -> None:
|
||||
"""Restart Home Assistant Docker."""
|
||||
try:
|
||||
await self.instance.restart()
|
||||
except DockerAPIError:
|
||||
raise HomeAssistantError() from None
|
||||
|
||||
await self._block_till_run()
|
||||
|
||||
@process_lock
|
||||
async def rebuild(self) -> None:
|
||||
"""Rebuild Home Assistant Docker container."""
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.stop()
|
||||
await self._start()
|
||||
|
||||
def logs(self) -> Awaitable[bytes]:
|
||||
"""Get HomeAssistant docker logs.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.logs()
|
||||
|
||||
async def stats(self) -> DockerStats:
|
||||
"""Return stats of Home Assistant.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
try:
|
||||
return await self.instance.stats()
|
||||
except DockerAPIError:
|
||||
raise HomeAssistantError() from None
|
||||
|
||||
def is_running(self) -> Awaitable[bool]:
|
||||
"""Return True if Docker container is running.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.is_running()
|
||||
|
||||
def is_fails(self) -> Awaitable[bool]:
|
||||
"""Return True if a Docker container is fails state.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.is_fails()
|
||||
|
||||
@property
|
||||
def in_progress(self) -> bool:
|
||||
"""Return True if a task is in progress."""
|
||||
return self.instance.in_progress or self.lock.locked()
|
||||
|
||||
async def check_config(self) -> ConfigResult:
|
||||
"""Run Home Assistant config check."""
|
||||
result = await self.instance.execute_command(
|
||||
"python3 -m homeassistant -c /config --script check_config"
|
||||
)
|
||||
|
||||
# if not valid
|
||||
if result.exit_code is None:
|
||||
_LOGGER.error("Fatal error on config check!")
|
||||
raise HomeAssistantError()
|
||||
|
||||
# parse output
|
||||
log = convert_to_ascii(result.output)
|
||||
if result.exit_code != 0 or RE_YAML_ERROR.search(log):
|
||||
_LOGGER.error("Invalid Home Assistant config found!")
|
||||
return ConfigResult(False, log)
|
||||
|
||||
_LOGGER.info("Home Assistant config is valid")
|
||||
return ConfigResult(True, log)
|
||||
|
||||
async def ensure_access_token(self) -> None:
|
||||
"""Ensures there is an access token."""
|
||||
if (
|
||||
self.access_token is not None
|
||||
and self._access_token_expires > datetime.utcnow()
|
||||
):
|
||||
return
|
||||
|
||||
with suppress(asyncio.TimeoutError, aiohttp.ClientError):
|
||||
async with self.sys_websession_ssl.post(
|
||||
f"{self.api_url}/auth/token",
|
||||
timeout=30,
|
||||
data={
|
||||
"grant_type": "refresh_token",
|
||||
"refresh_token": self.refresh_token,
|
||||
},
|
||||
) as resp:
|
||||
if resp.status != 200:
|
||||
_LOGGER.error("Can't update Home Assistant access token!")
|
||||
raise HomeAssistantAuthError()
|
||||
|
||||
_LOGGER.info("Updated Home Assistant API token")
|
||||
tokens = await resp.json()
|
||||
self.access_token = tokens["access_token"]
|
||||
self._access_token_expires = datetime.utcnow() + timedelta(
|
||||
seconds=tokens["expires_in"]
|
||||
)
|
||||
|
||||
@asynccontextmanager
|
||||
async def make_request(
|
||||
self,
|
||||
method: str,
|
||||
path: str,
|
||||
json: Optional[Dict[str, Any]] = None,
|
||||
content_type: Optional[str] = None,
|
||||
data: Any = None,
|
||||
timeout: int = 30,
|
||||
params: Optional[Dict[str, str]] = None,
|
||||
headers: Optional[Dict[str, str]] = None,
|
||||
) -> AsyncContextManager[aiohttp.ClientResponse]:
|
||||
"""Async context manager to make a request with right auth."""
|
||||
url = f"{self.api_url}/{path}"
|
||||
headers = headers or {}
|
||||
|
||||
# Passthrough content type
|
||||
if content_type is not None:
|
||||
headers[hdrs.CONTENT_TYPE] = content_type
|
||||
|
||||
# Set old API Password
|
||||
if not self.refresh_token and self.api_password:
|
||||
headers[HEADER_HA_ACCESS] = self.api_password
|
||||
|
||||
for _ in (1, 2):
|
||||
# Prepare Access token
|
||||
if self.refresh_token:
|
||||
await self.ensure_access_token()
|
||||
headers[hdrs.AUTHORIZATION] = f"Bearer {self.access_token}"
|
||||
|
||||
try:
|
||||
async with getattr(self.sys_websession_ssl, method)(
|
||||
url,
|
||||
data=data,
|
||||
timeout=timeout,
|
||||
json=json,
|
||||
headers=headers,
|
||||
params=params,
|
||||
) as resp:
|
||||
# Access token expired
|
||||
if resp.status == 401 and self.refresh_token:
|
||||
self.access_token = None
|
||||
continue
|
||||
yield resp
|
||||
return
|
||||
except (asyncio.TimeoutError, aiohttp.ClientError) as err:
|
||||
_LOGGER.error("Error on call %s: %s", url, err)
|
||||
break
|
||||
|
||||
raise HomeAssistantAPIError()
|
||||
|
||||
async def check_api_state(self) -> bool:
|
||||
"""Return True if Home Assistant up and running."""
|
||||
with suppress(HomeAssistantAPIError):
|
||||
async with self.make_request("get", "api/") as resp:
|
||||
if resp.status in (200, 201):
|
||||
return True
|
||||
status = resp.status
|
||||
_LOGGER.warning("Home Assistant API config mismatch: %s", status)
|
||||
|
||||
return False
|
||||
|
||||
async def _block_till_run(self) -> None:
|
||||
"""Block until Home-Assistant is booting up or startup timeout."""
|
||||
start_time = time.monotonic()
|
||||
|
||||
# Database migration
|
||||
migration_progress = False
|
||||
migration_file = Path(self.sys_config.path_homeassistant, ".migration_progress")
|
||||
|
||||
# PIP installation
|
||||
pip_progress = False
|
||||
pip_file = Path(self.sys_config.path_homeassistant, ".pip_progress")
|
||||
|
||||
while True:
|
||||
await asyncio.sleep(5)
|
||||
|
||||
# 1: Check if Container is is_running
|
||||
if not await self.instance.is_running():
|
||||
_LOGGER.error("Home Assistant has crashed!")
|
||||
break
|
||||
|
||||
# 2: Check if API response
|
||||
if await self.sys_run_in_executor(
|
||||
check_port, self.ip_address, self.api_port
|
||||
):
|
||||
_LOGGER.info("Detect a running Home Assistant instance")
|
||||
self._error_state = False
|
||||
return
|
||||
|
||||
# 3: Running DB Migration
|
||||
if migration_file.exists():
|
||||
if not migration_progress:
|
||||
migration_progress = True
|
||||
_LOGGER.info("Home Assistant record migration in progress")
|
||||
continue
|
||||
if migration_progress:
|
||||
migration_progress = False # Reset start time
|
||||
start_time = time.monotonic()
|
||||
_LOGGER.info("Home Assistant record migration done")
|
||||
|
||||
# 4: Running PIP installation
|
||||
if pip_file.exists():
|
||||
if not pip_progress:
|
||||
pip_progress = True
|
||||
_LOGGER.info("Home Assistant pip installation in progress")
|
||||
continue
|
||||
if pip_progress:
|
||||
pip_progress = False # Reset start time
|
||||
start_time = time.monotonic()
|
||||
_LOGGER.info("Home Assistant pip installation done")
|
||||
|
||||
# 5: Timeout
|
||||
if time.monotonic() - start_time > self.wait_boot:
|
||||
_LOGGER.warning("Don't wait anymore of Home Assistant startup!")
|
||||
break
|
||||
|
||||
self._error_state = True
|
||||
raise HomeAssistantError()
|
||||
|
||||
async def repair(self):
|
||||
"""Repair local Home Assistant data."""
|
||||
if await self.instance.exists():
|
||||
return
|
||||
|
||||
_LOGGER.info("Repair Home Assistant %s", self.version)
|
||||
await self.sys_run_in_executor(
|
||||
self.sys_docker.network.stale_cleanup, self.instance.name
|
||||
)
|
||||
|
||||
# Pull image
|
||||
try:
|
||||
await self.instance.install(self.version)
|
||||
except DockerAPIError:
|
||||
_LOGGER.error("Repairing of Home Assistant fails")
|
@@ -1,138 +0,0 @@
|
||||
"""Host Audio support."""
|
||||
import logging
|
||||
import json
|
||||
from pathlib import Path
|
||||
from string import Template
|
||||
|
||||
import attr
|
||||
|
||||
from ..const import ATTR_INPUT, ATTR_OUTPUT, ATTR_DEVICES, ATTR_NAME, CHAN_ID, CHAN_TYPE
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s()
|
||||
class DefaultConfig:
|
||||
"""Default config input/output ALSA channel."""
|
||||
|
||||
input: str = attr.ib()
|
||||
output: str = attr.ib()
|
||||
|
||||
|
||||
AUDIODB_JSON: Path = Path(__file__).parents[1].joinpath("data/audiodb.json")
|
||||
ASOUND_TMPL: Path = Path(__file__).parents[1].joinpath("data/asound.tmpl")
|
||||
|
||||
|
||||
class AlsaAudio(CoreSysAttributes):
|
||||
"""Handle Audio ALSA host data."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize ALSA audio system."""
|
||||
self.coresys = coresys
|
||||
self._data = {ATTR_INPUT: {}, ATTR_OUTPUT: {}}
|
||||
self._cache = 0
|
||||
self._default = None
|
||||
|
||||
@property
|
||||
def input_devices(self):
|
||||
"""Return list of ALSA input devices."""
|
||||
self._update_device()
|
||||
return self._data[ATTR_INPUT]
|
||||
|
||||
@property
|
||||
def output_devices(self):
|
||||
"""Return list of ALSA output devices."""
|
||||
self._update_device()
|
||||
return self._data[ATTR_OUTPUT]
|
||||
|
||||
def _update_device(self):
|
||||
"""Update Internal device DB."""
|
||||
current_id = hash(frozenset(self.sys_hardware.audio_devices))
|
||||
|
||||
# Need rebuild?
|
||||
if current_id == self._cache:
|
||||
return
|
||||
|
||||
# Clean old stuff
|
||||
self._data[ATTR_INPUT].clear()
|
||||
self._data[ATTR_OUTPUT].clear()
|
||||
|
||||
# Init database
|
||||
_LOGGER.info("Update ALSA device list")
|
||||
database = self._audio_database()
|
||||
|
||||
# Process devices
|
||||
for dev_id, dev_data in self.sys_hardware.audio_devices.items():
|
||||
for chan_info in dev_data[ATTR_DEVICES]:
|
||||
chan_id = chan_info[CHAN_ID]
|
||||
chan_type = chan_info[CHAN_TYPE]
|
||||
alsa_id = f"{dev_id},{chan_id}"
|
||||
dev_name = dev_data[ATTR_NAME]
|
||||
|
||||
# Lookup type
|
||||
if chan_type.endswith("playback"):
|
||||
key = ATTR_OUTPUT
|
||||
elif chan_type.endswith("capture"):
|
||||
key = ATTR_INPUT
|
||||
else:
|
||||
_LOGGER.warning("Unknown channel type: %s", chan_type)
|
||||
continue
|
||||
|
||||
# Use name from DB or a generic name
|
||||
self._data[key][alsa_id] = (
|
||||
database.get(self.sys_machine, {})
|
||||
.get(dev_name, {})
|
||||
.get(alsa_id, f"{dev_name}: {chan_id}")
|
||||
)
|
||||
|
||||
self._cache = current_id
|
||||
|
||||
@staticmethod
|
||||
def _audio_database():
|
||||
"""Read local json audio data into dict."""
|
||||
try:
|
||||
return json.loads(AUDIODB_JSON.read_text())
|
||||
except (ValueError, OSError) as err:
|
||||
_LOGGER.warning("Can't read audio DB: %s", err)
|
||||
|
||||
return {}
|
||||
|
||||
@property
|
||||
def default(self):
|
||||
"""Generate ALSA default setting."""
|
||||
# Init defaults
|
||||
if self._default is None:
|
||||
database = self._audio_database()
|
||||
alsa_input = database.get(self.sys_machine, {}).get(ATTR_INPUT)
|
||||
alsa_output = database.get(self.sys_machine, {}).get(ATTR_OUTPUT)
|
||||
|
||||
self._default = DefaultConfig(alsa_input, alsa_output)
|
||||
|
||||
# Search exists/new output
|
||||
if self._default.output is None and self.output_devices:
|
||||
self._default.output = next(iter(self.output_devices))
|
||||
_LOGGER.info("Detect output device %s", self._default.output)
|
||||
|
||||
# Search exists/new input
|
||||
if self._default.input is None and self.input_devices:
|
||||
self._default.input = next(iter(self.input_devices))
|
||||
_LOGGER.info("Detect input device %s", self._default.input)
|
||||
|
||||
return self._default
|
||||
|
||||
def asound(self, alsa_input=None, alsa_output=None):
|
||||
"""Generate an asound data."""
|
||||
alsa_input = alsa_input or self.default.input
|
||||
alsa_output = alsa_output or self.default.output
|
||||
|
||||
# Read Template
|
||||
try:
|
||||
asound_data = ASOUND_TMPL.read_text()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't read asound.tmpl: %s", err)
|
||||
return ""
|
||||
|
||||
# Process Template
|
||||
asound_template = Template(asound_data)
|
||||
return asound_template.safe_substitute(input=alsa_input, output=alsa_output)
|
@@ -1 +0,0 @@
|
||||
"""Special object and tools for Hass.io."""
|
@@ -1,49 +0,0 @@
|
||||
"""Setup the internal DNS service for host applications."""
|
||||
import asyncio
|
||||
import logging
|
||||
import shlex
|
||||
from ipaddress import IPv4Address
|
||||
from typing import Optional
|
||||
|
||||
import async_timeout
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
COMMAND = "socat UDP-RECVFROM:53,fork UDP-SENDTO:{!s}:53"
|
||||
|
||||
|
||||
class DNSForward:
|
||||
"""Manage DNS forwarding to internal DNS."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize DNS forwarding."""
|
||||
self.proc: Optional[asyncio.Process] = None
|
||||
|
||||
async def start(self, dns_server: IPv4Address) -> None:
|
||||
"""Start DNS forwarding."""
|
||||
try:
|
||||
self.proc = await asyncio.create_subprocess_exec(
|
||||
*shlex.split(COMMAND.format(dns_server)),
|
||||
stdin=asyncio.subprocess.DEVNULL,
|
||||
stdout=asyncio.subprocess.DEVNULL,
|
||||
stderr=asyncio.subprocess.DEVNULL,
|
||||
)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't start DNS forwarding: %s", err)
|
||||
else:
|
||||
_LOGGER.info("Start DNS port forwarding to %s", dns_server)
|
||||
|
||||
async def stop(self) -> None:
|
||||
"""Stop DNS forwarding."""
|
||||
if not self.proc:
|
||||
_LOGGER.warning("DNS forwarding is not running!")
|
||||
return
|
||||
|
||||
self.proc.kill()
|
||||
try:
|
||||
with async_timeout.timeout(5):
|
||||
await self.proc.wait()
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.warning("Stop waiting for DNS shutdown")
|
||||
|
||||
_LOGGER.info("Stop DNS forwarding")
|
@@ -1,75 +0,0 @@
|
||||
"""Schedule for Hass.io."""
|
||||
import asyncio
|
||||
from datetime import date, datetime, time, timedelta
|
||||
import logging
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
INTERVAL = "interval"
|
||||
REPEAT = "repeat"
|
||||
CALL = "callback"
|
||||
TASK = "task"
|
||||
|
||||
|
||||
class Scheduler:
|
||||
"""Schedule task inside Hass.io."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize task schedule."""
|
||||
self.loop = asyncio.get_running_loop()
|
||||
self._data = {}
|
||||
self.suspend = False
|
||||
|
||||
def register_task(self, coro_callback, interval, repeat=True):
|
||||
"""Schedule a coroutine.
|
||||
|
||||
The coroutine need to be a callback without arguments.
|
||||
"""
|
||||
task_id = hash(coro_callback)
|
||||
|
||||
# Generate data
|
||||
opts = {CALL: coro_callback, INTERVAL: interval, REPEAT: repeat}
|
||||
|
||||
# Schedule task
|
||||
self._data[task_id] = opts
|
||||
self._schedule_task(interval, task_id)
|
||||
|
||||
return task_id
|
||||
|
||||
def _run_task(self, task_id):
|
||||
"""Run a scheduled task."""
|
||||
data = self._data[task_id]
|
||||
|
||||
if not self.suspend:
|
||||
self.loop.create_task(data[CALL]())
|
||||
|
||||
if data[REPEAT]:
|
||||
self._schedule_task(data[INTERVAL], task_id)
|
||||
else:
|
||||
self._data.pop(task_id)
|
||||
|
||||
def _schedule_task(self, interval, task_id):
|
||||
"""Schedule a task on loop."""
|
||||
if isinstance(interval, (int, float)):
|
||||
job = self.loop.call_later(interval, self._run_task, task_id)
|
||||
elif isinstance(interval, time):
|
||||
today = datetime.combine(date.today(), interval)
|
||||
tomorrow = datetime.combine(date.today() + timedelta(days=1), interval)
|
||||
|
||||
# Check if we run it today or next day
|
||||
if today > datetime.today():
|
||||
calc = today
|
||||
else:
|
||||
calc = tomorrow
|
||||
|
||||
job = self.loop.call_at(calc.timestamp(), self._run_task, task_id)
|
||||
else:
|
||||
_LOGGER.fatal(
|
||||
"Unknown interval %s (type: %s) for scheduler %s",
|
||||
interval,
|
||||
type(interval),
|
||||
task_id,
|
||||
)
|
||||
|
||||
# Store job
|
||||
self._data[task_id][TASK] = job
|
@@ -1,12 +0,0 @@
|
||||
"""Validate services schema."""
|
||||
import voluptuous as vol
|
||||
|
||||
from ..utils.validate import schema_or
|
||||
from .const import SERVICE_MQTT
|
||||
from .modules.mqtt import SCHEMA_CONFIG_MQTT
|
||||
|
||||
|
||||
SCHEMA_SERVICES_CONFIG = vol.Schema(
|
||||
{vol.Optional(SERVICE_MQTT, default=dict): schema_or(SCHEMA_CONFIG_MQTT)},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
236
hassio/tasks.py
236
hassio/tasks.py
@@ -1,236 +0,0 @@
|
||||
"""A collection of tasks."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from .coresys import CoreSysAttributes
|
||||
from .exceptions import HomeAssistantError, CoreDNSError
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
HASS_WATCHDOG_API = "HASS_WATCHDOG_API"
|
||||
|
||||
RUN_UPDATE_SUPERVISOR = 29100
|
||||
RUN_UPDATE_ADDONS = 57600
|
||||
RUN_UPDATE_HASSOSCLI = 28100
|
||||
RUN_UPDATE_DNS = 30100
|
||||
|
||||
RUN_RELOAD_ADDONS = 10800
|
||||
RUN_RELOAD_SNAPSHOTS = 72000
|
||||
RUN_RELOAD_HOST = 7600
|
||||
RUN_RELOAD_UPDATER = 7200
|
||||
RUN_RELOAD_INGRESS = 930
|
||||
|
||||
RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15
|
||||
RUN_WATCHDOG_HOMEASSISTANT_API = 300
|
||||
|
||||
RUN_WATCHDOG_DNS_DOCKER = 20
|
||||
|
||||
|
||||
class Tasks(CoreSysAttributes):
|
||||
"""Handle Tasks inside Hass.io."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Tasks."""
|
||||
self.coresys = coresys
|
||||
self.jobs = set()
|
||||
self._cache = {}
|
||||
|
||||
async def load(self):
|
||||
"""Add Tasks to scheduler."""
|
||||
# Update
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(self._update_addons, RUN_UPDATE_ADDONS)
|
||||
)
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(
|
||||
self._update_supervisor, RUN_UPDATE_SUPERVISOR
|
||||
)
|
||||
)
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(
|
||||
self._update_hassos_cli, RUN_UPDATE_HASSOSCLI
|
||||
)
|
||||
)
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(self._update_dns, RUN_UPDATE_DNS)
|
||||
)
|
||||
|
||||
# Reload
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(self.sys_store.reload, RUN_RELOAD_ADDONS)
|
||||
)
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(
|
||||
self.sys_updater.reload, RUN_RELOAD_UPDATER
|
||||
)
|
||||
)
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(
|
||||
self.sys_snapshots.reload, RUN_RELOAD_SNAPSHOTS
|
||||
)
|
||||
)
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(self.sys_host.reload, RUN_RELOAD_HOST)
|
||||
)
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(
|
||||
self.sys_ingress.reload, RUN_RELOAD_INGRESS
|
||||
)
|
||||
)
|
||||
|
||||
# Watchdog
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(
|
||||
self._watchdog_homeassistant_docker, RUN_WATCHDOG_HOMEASSISTANT_DOCKER
|
||||
)
|
||||
)
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(
|
||||
self._watchdog_homeassistant_api, RUN_WATCHDOG_HOMEASSISTANT_API
|
||||
)
|
||||
)
|
||||
self.jobs.add(
|
||||
self.sys_scheduler.register_task(
|
||||
self._watchdog_dns_docker, RUN_WATCHDOG_DNS_DOCKER
|
||||
)
|
||||
)
|
||||
|
||||
_LOGGER.info("All core tasks are scheduled")
|
||||
|
||||
async def _update_addons(self):
|
||||
"""Check if an update is available for an Add-on and update it."""
|
||||
tasks = []
|
||||
for addon in self.sys_addons.all:
|
||||
if not addon.is_installed or not addon.auto_update:
|
||||
continue
|
||||
|
||||
if addon.version == addon.latest_version:
|
||||
continue
|
||||
|
||||
if addon.test_update_schema():
|
||||
tasks.append(addon.update())
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Add-on %s will be ignored, schema tests fails", addon.slug
|
||||
)
|
||||
|
||||
if tasks:
|
||||
_LOGGER.info("Add-on auto update process %d tasks", len(tasks))
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
async def _update_supervisor(self):
|
||||
"""Check and run update of Supervisor Hass.io."""
|
||||
if not self.sys_supervisor.need_update:
|
||||
return
|
||||
|
||||
# don't perform an update on dev channel
|
||||
if self.sys_dev:
|
||||
_LOGGER.warning("Ignore Hass.io update on dev channel!")
|
||||
return
|
||||
|
||||
_LOGGER.info("Found new Hass.io version")
|
||||
await self.sys_supervisor.update()
|
||||
|
||||
async def _watchdog_homeassistant_docker(self):
|
||||
"""Check running state of Docker and start if they is close."""
|
||||
# if Home Assistant is active
|
||||
if (
|
||||
not await self.sys_homeassistant.is_fails()
|
||||
or not self.sys_homeassistant.watchdog
|
||||
or self.sys_homeassistant.error_state
|
||||
):
|
||||
return
|
||||
|
||||
# if Home Assistant is running
|
||||
if (
|
||||
self.sys_homeassistant.in_progress
|
||||
or await self.sys_homeassistant.is_running()
|
||||
):
|
||||
return
|
||||
|
||||
_LOGGER.warning("Watchdog found a problem with Home Assistant Docker!")
|
||||
try:
|
||||
await self.sys_homeassistant.start()
|
||||
except HomeAssistantError:
|
||||
_LOGGER.error("Watchdog Home Assistant reanimation fails!")
|
||||
|
||||
async def _watchdog_homeassistant_api(self):
|
||||
"""Create scheduler task for monitoring running state of API.
|
||||
|
||||
Try 2 times to call API before we restart Home-Assistant. Maybe we had
|
||||
a delay in our system.
|
||||
"""
|
||||
# If Home-Assistant is active
|
||||
if (
|
||||
not await self.sys_homeassistant.is_fails()
|
||||
or not self.sys_homeassistant.watchdog
|
||||
or self.sys_homeassistant.error_state
|
||||
):
|
||||
return
|
||||
|
||||
# Init cache data
|
||||
retry_scan = self._cache.get(HASS_WATCHDOG_API, 0)
|
||||
|
||||
# If Home-Assistant API is up
|
||||
if (
|
||||
self.sys_homeassistant.in_progress
|
||||
or await self.sys_homeassistant.check_api_state()
|
||||
):
|
||||
return
|
||||
|
||||
# Look like we run into a problem
|
||||
retry_scan += 1
|
||||
if retry_scan == 1:
|
||||
self._cache[HASS_WATCHDOG_API] = retry_scan
|
||||
_LOGGER.warning("Watchdog miss API response from Home Assistant")
|
||||
return
|
||||
|
||||
_LOGGER.error("Watchdog found a problem with Home Assistant API!")
|
||||
try:
|
||||
await self.sys_homeassistant.restart()
|
||||
except HomeAssistantError:
|
||||
_LOGGER.error("Watchdog Home Assistant reanimation fails!")
|
||||
finally:
|
||||
self._cache[HASS_WATCHDOG_API] = 0
|
||||
|
||||
async def _update_hassos_cli(self):
|
||||
"""Check and run update of HassOS CLI."""
|
||||
if not self.sys_hassos.need_cli_update:
|
||||
return
|
||||
|
||||
# don't perform an update on dev channel
|
||||
if self.sys_dev:
|
||||
_LOGGER.warning("Ignore HassOS CLI update on dev channel!")
|
||||
return
|
||||
|
||||
_LOGGER.info("Found new HassOS CLI version")
|
||||
await self.sys_hassos.update_cli()
|
||||
|
||||
async def _update_dns(self):
|
||||
"""Check and run update of CoreDNS plugin."""
|
||||
if not self.sys_dns.need_update:
|
||||
return
|
||||
|
||||
# don't perform an update on dev channel
|
||||
if self.sys_dev:
|
||||
_LOGGER.warning("Ignore CoreDNS update on dev channel!")
|
||||
return
|
||||
|
||||
_LOGGER.info("Found new CoreDNS plugin version")
|
||||
await self.sys_dns.update()
|
||||
|
||||
async def _watchdog_dns_docker(self):
|
||||
"""Check running state of Docker and start if they is close."""
|
||||
# if Home Assistant is active
|
||||
if await self.sys_dns.is_running():
|
||||
return
|
||||
_LOGGER.warning("Watchdog found a problem with CoreDNS plugin!")
|
||||
|
||||
if await self.sys_dns.is_fails():
|
||||
_LOGGER.warning("CoreDNS plugin is in fails state / Reset config")
|
||||
await self.sys_dns.reset()
|
||||
|
||||
try:
|
||||
await self.sys_dns.start()
|
||||
except CoreDNSError:
|
||||
_LOGGER.error("Watchdog CoreDNS reanimation fails!")
|
@@ -1,129 +0,0 @@
|
||||
"""Fetch last versions from webserver."""
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
from datetime import timedelta
|
||||
import json
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
import aiohttp
|
||||
|
||||
from .const import (
|
||||
ATTR_CHANNEL,
|
||||
ATTR_DNS,
|
||||
ATTR_HASSIO,
|
||||
ATTR_HASSOS,
|
||||
ATTR_HASSOS_CLI,
|
||||
ATTR_HOMEASSISTANT,
|
||||
FILE_HASSIO_UPDATER,
|
||||
URL_HASSIO_VERSION,
|
||||
)
|
||||
from .coresys import CoreSysAttributes
|
||||
from .exceptions import HassioUpdaterError
|
||||
from .utils import AsyncThrottle
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_UPDATER_CONFIG
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Updater(JsonConfig, CoreSysAttributes):
|
||||
"""Fetch last versions from version.json."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize updater."""
|
||||
super().__init__(FILE_HASSIO_UPDATER, SCHEMA_UPDATER_CONFIG)
|
||||
self.coresys = coresys
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Update internal data."""
|
||||
with suppress(HassioUpdaterError):
|
||||
await self.fetch_data()
|
||||
|
||||
async def reload(self) -> None:
|
||||
"""Update internal data."""
|
||||
with suppress(HassioUpdaterError):
|
||||
await self.fetch_data()
|
||||
|
||||
@property
|
||||
def version_homeassistant(self) -> Optional[str]:
|
||||
"""Return latest version of Home Assistant."""
|
||||
return self._data.get(ATTR_HOMEASSISTANT)
|
||||
|
||||
@property
|
||||
def version_hassio(self) -> Optional[str]:
|
||||
"""Return latest version of Hass.io."""
|
||||
return self._data.get(ATTR_HASSIO)
|
||||
|
||||
@property
|
||||
def version_hassos(self) -> Optional[str]:
|
||||
"""Return latest version of HassOS."""
|
||||
return self._data.get(ATTR_HASSOS)
|
||||
|
||||
@property
|
||||
def version_hassos_cli(self) -> Optional[str]:
|
||||
"""Return latest version of HassOS cli."""
|
||||
return self._data.get(ATTR_HASSOS_CLI)
|
||||
|
||||
@property
|
||||
def version_dns(self) -> Optional[str]:
|
||||
"""Return latest version of Hass.io DNS."""
|
||||
return self._data.get(ATTR_DNS)
|
||||
|
||||
@property
|
||||
def channel(self) -> str:
|
||||
"""Return upstream channel of Hass.io instance."""
|
||||
return self._data[ATTR_CHANNEL]
|
||||
|
||||
@channel.setter
|
||||
def channel(self, value: str):
|
||||
"""Set upstream mode."""
|
||||
self._data[ATTR_CHANNEL] = value
|
||||
|
||||
@AsyncThrottle(timedelta(seconds=60))
|
||||
async def fetch_data(self):
|
||||
"""Fetch current versions from Github.
|
||||
|
||||
Is a coroutine.
|
||||
"""
|
||||
url = URL_HASSIO_VERSION.format(channel=self.channel)
|
||||
machine = self.sys_machine or "default"
|
||||
board = self.sys_hassos.board
|
||||
|
||||
try:
|
||||
_LOGGER.info("Fetch update data from %s", url)
|
||||
async with self.sys_websession.get(url, timeout=10) as request:
|
||||
data = await request.json(content_type=None)
|
||||
|
||||
except (aiohttp.ClientError, asyncio.TimeoutError) as err:
|
||||
_LOGGER.warning("Can't fetch versions from %s: %s", url, err)
|
||||
raise HassioUpdaterError() from None
|
||||
|
||||
except json.JSONDecodeError as err:
|
||||
_LOGGER.warning("Can't parse versions from %s: %s", url, err)
|
||||
raise HassioUpdaterError() from None
|
||||
|
||||
# data valid?
|
||||
if not data or data.get(ATTR_CHANNEL) != self.channel:
|
||||
_LOGGER.warning("Invalid data from %s", url)
|
||||
raise HassioUpdaterError() from None
|
||||
|
||||
try:
|
||||
# update supervisor version
|
||||
self._data[ATTR_HASSIO] = data["supervisor"]
|
||||
self._data[ATTR_DNS] = data["dns"]
|
||||
|
||||
# update Home Assistant version
|
||||
self._data[ATTR_HOMEASSISTANT] = data["homeassistant"][machine]
|
||||
|
||||
# update hassos version
|
||||
if self.sys_hassos.available and board:
|
||||
self._data[ATTR_HASSOS] = data["hassos"][board]
|
||||
self._data[ATTR_HASSOS_CLI] = data["hassos-cli"]
|
||||
|
||||
except KeyError as err:
|
||||
_LOGGER.warning("Can't process version data: %s", err)
|
||||
raise HassioUpdaterError() from None
|
||||
|
||||
else:
|
||||
self.save_data()
|
@@ -1,163 +0,0 @@
|
||||
"""Validate functions."""
|
||||
import re
|
||||
import uuid
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .const import (
|
||||
ATTR_ACCESS_TOKEN,
|
||||
ATTR_ADDONS_CUSTOM_LIST,
|
||||
ATTR_BOOT,
|
||||
ATTR_CHANNEL,
|
||||
ATTR_DEBUG,
|
||||
ATTR_DEBUG_BLOCK,
|
||||
ATTR_DNS,
|
||||
ATTR_HASSIO,
|
||||
ATTR_HASSOS,
|
||||
ATTR_HASSOS_CLI,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_IMAGE,
|
||||
ATTR_LAST_BOOT,
|
||||
ATTR_LAST_VERSION,
|
||||
ATTR_LOGGING,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_PORT,
|
||||
ATTR_PORTS,
|
||||
ATTR_REFRESH_TOKEN,
|
||||
ATTR_SERVERS,
|
||||
ATTR_SESSION,
|
||||
ATTR_SSL,
|
||||
ATTR_TIMEZONE,
|
||||
ATTR_UUID,
|
||||
ATTR_VERSION,
|
||||
ATTR_WAIT_BOOT,
|
||||
ATTR_WATCHDOG,
|
||||
CHANNEL_BETA,
|
||||
CHANNEL_DEV,
|
||||
CHANNEL_STABLE,
|
||||
DNS_SERVERS,
|
||||
)
|
||||
from .utils.validate import validate_timezone
|
||||
|
||||
RE_REPOSITORY = re.compile(r"^(?P<url>[^#]+)(?:#(?P<branch>[\w\-]+))?$")
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
||||
WAIT_BOOT = vol.All(vol.Coerce(int), vol.Range(min=1, max=60))
|
||||
DOCKER_IMAGE = vol.Match(r"^[\w{}]+/[\-\w{}]+$")
|
||||
ALSA_DEVICE = vol.Maybe(vol.Match(r"\d+,\d+"))
|
||||
CHANNELS = vol.In([CHANNEL_STABLE, CHANNEL_BETA, CHANNEL_DEV])
|
||||
UUID_MATCH = vol.Match(r"^[0-9a-f]{32}$")
|
||||
SHA256 = vol.Match(r"^[0-9a-f]{64}$")
|
||||
TOKEN = vol.Match(r"^[0-9a-f]{32,256}$")
|
||||
LOG_LEVEL = vol.In(["debug", "info", "warning", "error", "critical"])
|
||||
DNS_URL = vol.Match(r"^dns://\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$")
|
||||
DNS_SERVER_LIST = vol.All([DNS_URL], vol.Length(max=8))
|
||||
|
||||
|
||||
def validate_repository(repository):
|
||||
"""Validate a valid repository."""
|
||||
data = RE_REPOSITORY.match(repository)
|
||||
if not data:
|
||||
raise vol.Invalid("No valid repository format!")
|
||||
|
||||
# Validate URL
|
||||
# pylint: disable=no-value-for-parameter
|
||||
vol.Url()(data.group("url"))
|
||||
|
||||
return repository
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
REPOSITORIES = vol.All([validate_repository], vol.Unique())
|
||||
|
||||
|
||||
DOCKER_PORTS = vol.Schema(
|
||||
{
|
||||
vol.All(vol.Coerce(str), vol.Match(r"^\d+(?:/tcp|/udp)?$")): vol.Maybe(
|
||||
NETWORK_PORT
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
DOCKER_PORTS_DESCRIPTION = vol.Schema(
|
||||
{vol.All(vol.Coerce(str), vol.Match(r"^\d+(?:/tcp|/udp)?$")): vol.Coerce(str)}
|
||||
)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_HASS_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
|
||||
vol.Optional(ATTR_VERSION): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): TOKEN,
|
||||
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||
vol.Inclusive(ATTR_IMAGE, "custom_hass"): DOCKER_IMAGE,
|
||||
vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_WAIT_BOOT, default=600): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=60)
|
||||
),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_UPDATER_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_CHANNEL, default=CHANNEL_STABLE): CHANNELS,
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Coerce(str),
|
||||
vol.Optional(ATTR_HASSIO): vol.Coerce(str),
|
||||
vol.Optional(ATTR_HASSOS): vol.Coerce(str),
|
||||
vol.Optional(ATTR_HASSOS_CLI): vol.Coerce(str),
|
||||
vol.Optional(ATTR_DNS): vol.Coerce(str),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_HASSIO_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_TIMEZONE, default="UTC"): validate_timezone,
|
||||
vol.Optional(ATTR_LAST_BOOT): vol.Coerce(str),
|
||||
vol.Optional(
|
||||
ATTR_ADDONS_CUSTOM_LIST,
|
||||
default=["https://github.com/hassio-addons/repository"],
|
||||
): REPOSITORIES,
|
||||
vol.Optional(ATTR_WAIT_BOOT, default=5): WAIT_BOOT,
|
||||
vol.Optional(ATTR_LOGGING, default="info"): LOG_LEVEL,
|
||||
vol.Optional(ATTR_DEBUG, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEBUG_BLOCK, default=False): vol.Boolean(),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_AUTH_CONFIG = vol.Schema({SHA256: SHA256})
|
||||
|
||||
|
||||
SCHEMA_INGRESS_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_SESSION, default=dict): vol.Schema(
|
||||
{TOKEN: vol.Coerce(float)}
|
||||
),
|
||||
vol.Required(ATTR_PORTS, default=dict): vol.Schema(
|
||||
{vol.Coerce(str): NETWORK_PORT}
|
||||
),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_DNS_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_VERSION): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_SERVERS, default=DNS_SERVERS): DNS_SERVER_LIST,
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
Submodule home-assistant-polymer updated: 8238b700b0...dc5b92030f
BIN
misc/hassio.png
BIN
misc/hassio.png
Binary file not shown.
Before Width: | Height: | Size: 37 KiB |
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user