Compare commits

...

118 Commits
163 ... 178

Author SHA1 Message Date
Pascal Vizeli
2e53a48504 Merge pull request #1224 from home-assistant/dev
Release 178
2019-08-16 13:26:45 +02:00
Pascal Vizeli
8e4db0c3ec Stripe resolv (#1226) 2019-08-16 13:22:07 +02:00
Pascal Vizeli
4072b06faf Fix issue on isntalled add-ons (#1225) 2019-08-16 13:12:39 +02:00
Pascal Vizeli
a2cf7ece70 Change handling with host files (#1223) 2019-08-16 12:47:32 +02:00
Pascal Vizeli
734fe3afde Bump version 178 2019-08-16 00:15:05 +02:00
Pascal Vizeli
7f3bc91c1d Merge pull request #1222 from home-assistant/dev
Release 177
2019-08-16 00:13:51 +02:00
Pascal Vizeli
9c2c95757d Validate dns better (#1221) 2019-08-15 23:48:14 +02:00
Franck Nijhof
b5ed6c586a Cleanup ingress panel on add-on uninstall (#1220)
* Cleanup ingress panel on add-on uninstall

* Update __init__.py
2019-08-15 23:05:03 +02:00
Franck Nijhof
35033d1f76 Allow manager role to access DNS API (#1219) 2019-08-15 22:38:34 +02:00
Pascal Vizeli
9e41d0c5b0 Bump version 177 2019-08-15 14:51:28 +02:00
Pascal Vizeli
62e92fada9 Merge pull request #1218 from home-assistant/dev
Release 176
2019-08-15 14:50:55 +02:00
dependabot-preview[bot]
ae0a1a657f Bump gitpython from 3.0.0 to 3.0.1 (#1216)
Bumps [gitpython](https://github.com/gitpython-developers/GitPython) from 3.0.0 to 3.0.1.
- [Release notes](https://github.com/gitpython-developers/GitPython/releases)
- [Changelog](https://github.com/gitpython-developers/GitPython/blob/master/CHANGES)
- [Commits](https://github.com/gitpython-developers/GitPython/compare/3.0.0...3.0.1)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
2019-08-15 14:46:16 +02:00
Pascal Vizeli
81e511ba8e Fix spell 2019-08-15 12:42:34 +00:00
Pascal Vizeli
d89cb91c8c Revert "Call update of resolv later (#1215)" (#1217)
This reverts commit dc31b6e6fe.
2019-08-15 14:42:05 +02:00
Pascal Vizeli
dc31b6e6fe Call update of resolv later (#1215) 2019-08-15 13:57:44 +02:00
Pascal Vizeli
930a32de1a Fix latest issue (#1214)
* Fix latest issue

* Use also update now

* Fix style
2019-08-15 12:42:21 +02:00
Pascal Vizeli
e40f2ed8e3 Bump version 176 2019-08-15 11:36:47 +02:00
Pascal Vizeli
abbd3d1078 Merge pull request #1213 from home-assistant/dev
Release 175
2019-08-15 11:36:06 +02:00
Pascal Vizeli
63c9948456 Add CoreDNS to update process (#1212) 2019-08-15 11:05:08 +02:00
Pascal Vizeli
b6c81d779a Use own coredns for supervisor 2019-08-15 08:51:42 +00:00
Pascal Vizeli
2480c83169 Fix socat command (#1211) 2019-08-15 10:17:41 +02:00
Pascal Vizeli
334cc66cf6 Bump Version 175 2019-08-14 15:39:44 +02:00
Pascal Vizeli
3cf189ad94 Merge pull request #1209 from home-assistant/dev
Release 174
2019-08-14 15:38:57 +02:00
dependabot-preview[bot]
6ffb94a0f5 Bump ptvsd from 4.3.1 to 4.3.2 (#1207)
Bumps [ptvsd](https://github.com/Microsoft/ptvsd) from 4.3.1 to 4.3.2.
- [Release notes](https://github.com/Microsoft/ptvsd/releases)
- [Commits](https://github.com/Microsoft/ptvsd/compare/v4.3.1...v4.3.2)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
2019-08-14 14:35:43 +02:00
Pascal Vizeli
3593826441 Fix issue with windows dev env 2019-08-14 10:37:39 +00:00
Pascal Vizeli
0a0a62f238 Addon provide his own udev support (#1206)
* Addon provide his own udev support

* upgrade logger
2019-08-14 12:29:00 +02:00
Pascal Vizeli
41ce9913d2 Stats percent (#1205)
* Fix stats and add Memory percent

* Fix tasks

* round percent
2019-08-14 10:47:11 +02:00
Pascal Vizeli
b77c42384d Add DNS to add-on (#1204) 2019-08-14 09:53:03 +02:00
Pascal Vizeli
138bb12f98 Add debug output to gdbus (#1203) 2019-08-13 21:25:04 +02:00
Pascal Vizeli
4fe2859f4e Rename scripts folder (#1202)
* Rename script folder

* Rename scripts
2019-08-13 14:39:32 +02:00
dependabot-preview[bot]
0768b2b4bc Bump ptvsd from 4.3.0 to 4.3.1 (#1200)
Bumps [ptvsd](https://github.com/Microsoft/ptvsd) from 4.3.0 to 4.3.1.
- [Release notes](https://github.com/Microsoft/ptvsd/releases)
- [Commits](https://github.com/Microsoft/ptvsd/compare/v4.3.0...v4.3.1)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
2019-08-13 14:35:41 +02:00
dependabot-preview[bot]
e6f1772a93 Bump gitpython from 2.1.13 to 3.0.0 (#1199)
Bumps [gitpython](https://github.com/gitpython-developers/GitPython) from 2.1.13 to 3.0.0.
- [Release notes](https://github.com/gitpython-developers/GitPython/releases)
- [Changelog](https://github.com/gitpython-developers/GitPython/blob/master/CHANGES)
- [Commits](https://github.com/gitpython-developers/GitPython/compare/2.1.13...3.0.0)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
2019-08-13 14:35:15 +02:00
dependabot-preview[bot]
5374b2b3b9 Bump voluptuous from 0.11.5 to 0.11.7 (#1201)
Bumps [voluptuous](https://github.com/alecthomas/voluptuous) from 0.11.5 to 0.11.7.
- [Release notes](https://github.com/alecthomas/voluptuous/releases)
- [Changelog](https://github.com/alecthomas/voluptuous/blob/master/CHANGELOG.md)
- [Commits](https://github.com/alecthomas/voluptuous/commits)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
2019-08-13 14:29:33 +02:00
Pascal Vizeli
1196788856 Add CoreDNS as DNS backend (#1195)
* Add CoreDNS / DNS configuration

* Support get version

* add version

* add coresys

* Add more logic

* move forwareder into dns

* Setup docker inside

* add docker to env

* Add more function

* more interface

* Update hosts template

* Add DNS folder

* Fix issues

* Add more logic

* Add handling for hosts

* Fix setting

* fix lint

* Fix some issues

* Fix issue

* Run with no cache

* Fix issue on validate

* Fix bug

* Allow to jump into dev mode

* Fix permission

* Fix issue

* Add dns search

* Add watchdog

* Fix set issues

* add API description

* Add API endpoint

* Add CLI support

* Fix logs + add hostname

* Add/remove DNS entry

* Fix attribute

* Fix style

* Better shutdown

* Remove ha from network mapping

* Add more options

* Fix env shutdown

* Add support for new repair function

* Start coreDNS faster after restart

* remove options

* Fix ha fix
2019-08-13 14:20:42 +02:00
Pascal Vizeli
9f3f47eb80 Bump version 174 2019-08-11 09:59:48 +02:00
Pascal Vizeli
1a90a478f2 Merge pull request #1197 from home-assistant/dev
Release 173
2019-08-11 09:39:17 +02:00
Pascal Vizeli
ee773f3b63 Fix hanging landingpage (#1196) 2019-08-11 09:05:47 +02:00
Pascal Vizeli
5ffc27f60c Bump version 173 2019-08-08 23:24:11 +02:00
Pascal Vizeli
4c13dfb43c Merge pull request #1194 from home-assistant/dev
Release 172
2019-08-08 23:21:26 +02:00
Pascal Vizeli
bc099f0d81 Fix Version detection with exists container (#1193) 2019-08-08 23:20:26 +02:00
Pascal Vizeli
b26dd0af19 Add better log output for repair (#1191) 2019-08-08 10:14:13 +02:00
Pascal Vizeli
0dee5bd763 Fix black formating args 2019-08-08 10:13:44 +02:00
Pascal Vizeli
0765387ad8 Bump version 172 2019-08-07 18:18:09 +02:00
Pascal Vizeli
a07517bd3c Merge pull request #1190 from home-assistant/dev
Release 171
2019-08-07 18:17:30 +02:00
Pascal Vizeli
e5f0d80d96 Start API server before he beform a self update (#1189) 2019-08-07 18:03:56 +02:00
Pascal Vizeli
2fc5e3b7d9 Repair / fixup docker overlayfs issues (#1170)
* Add a repair modus

* Add repair to add-ons

* repair to cli

* Add API call

* fix sync call

* Clean all images

* Fix repair

* Fix supervisor

* Add new function to core

* fix tagging

* better style

* use retag

* new retag function

* Fix lint

* Fix import export
2019-08-07 17:26:32 +02:00
Pascal Vizeli
778bc46848 Don't relay on latest with HA/Addons (#1175)
* Don't relay on latest with HA/Addons

* Fix latest on install

* Revert some options

* Fix attach

* migrate to new version handling

* Fix thread

* Fix is running

* Allow wait

* debug code

* Fix debug value

* Fix list

* Fix regex

* Some better log output

* Fix logic

* Improve cleanup handling

* Fix bug

* Cleanup old code

* Improve version handling

* Fix the way to attach
2019-08-07 09:51:27 +02:00
Pascal Vizeli
882586b246 Fix time adjustments on latest boot (#1187)
* Fix time adjustments on latest boot

* Fix spell
2019-08-06 09:24:22 +02:00
dependabot-preview[bot]
b7c07a2555 Bump pytz from 2019.1 to 2019.2 (#1184)
Bumps [pytz](https://github.com/stub42/pytz) from 2019.1 to 2019.2.
- [Release notes](https://github.com/stub42/pytz/releases)
- [Commits](https://github.com/stub42/pytz/compare/release_2019.1...release_2019.2)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
2019-08-02 10:32:04 +02:00
dependabot-preview[bot]
814b504fa9 Bump ptvsd from 4.2.10 to 4.3.0 (#1179)
Bumps [ptvsd](https://github.com/Microsoft/ptvsd) from 4.2.10 to 4.3.0.
- [Release notes](https://github.com/Microsoft/ptvsd/releases)
- [Commits](https://github.com/Microsoft/ptvsd/compare/v4.2.10...v4.3.0)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
2019-07-29 17:01:28 +02:00
dependabot-preview[bot]
7ae430e7a8 Bump gitpython from 2.1.12 to 2.1.13 (#1178)
Bumps [gitpython](https://github.com/gitpython-developers/GitPython) from 2.1.12 to 2.1.13.
- [Release notes](https://github.com/gitpython-developers/GitPython/releases)
- [Changelog](https://github.com/gitpython-developers/GitPython/blob/master/CHANGES)
- [Commits](https://github.com/gitpython-developers/GitPython/compare/2.1.12...2.1.13)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
2019-07-29 14:53:54 +02:00
dependabot-preview[bot]
0e7e95ba20 Bump gitpython from 2.1.11 to 2.1.12 (#1171)
Bumps [gitpython](https://github.com/gitpython-developers/GitPython) from 2.1.11 to 2.1.12.
- [Release notes](https://github.com/gitpython-developers/GitPython/releases)
- [Changelog](https://github.com/gitpython-developers/GitPython/blob/master/CHANGES)
- [Commits](https://github.com/gitpython-developers/GitPython/compare/2.1.11...2.1.12)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
2019-07-22 14:18:11 +02:00
Pascal Vizeli
e577d8acb2 Bump version 171 2019-07-19 11:49:00 +02:00
Pascal Vizeli
0a76ab5054 Merge pull request #1168 from home-assistant/dev
Release 170
2019-07-19 11:48:28 +02:00
Pascal Vizeli
03c5596e04 Fix machine version (#1167) 2019-07-19 11:47:55 +02:00
Pascal Vizeli
3af4e14e83 Bump version 170 2019-07-16 12:36:05 +02:00
Pascal Vizeli
7c8cf57820 Merge pull request #1164 from home-assistant/dev
Release 169
2019-07-16 12:35:10 +02:00
Pascal Vizeli
8d84a8a62e Update panel & support panel on devcontainer (#1163)
* Update panel & support panel on devcontainer

* small cleanups

* small size
2019-07-16 12:23:03 +02:00
Pascal Vizeli
08c45060bd Add support for RPi4 (#1162) 2019-07-16 10:33:56 +02:00
Pascal Vizeli
7ca8d2811b Update URL for version file (#1161) 2019-07-16 10:26:59 +02:00
Pascal Vizeli
bb6898b032 Update azure-pipelines.yml for Azure Pipelines 2019-07-12 09:57:55 +02:00
Pascal Vizeli
cd86c6814e Update azure-pipelines.yml for Azure Pipelines 2019-07-12 09:42:55 +02:00
Pascal Vizeli
b67e116650 Update azure-pipelines.yml 2019-07-12 09:41:40 +02:00
Pascal Vizeli
57ce411fb6 Update azure-pipelines.yml 2019-07-11 22:11:37 +02:00
Pascal Vizeli
85ed4d9e8d Update Dockerfile 2019-07-11 19:25:07 +02:00
dependabot-preview[bot]
ccb39da569 Bump flake8 from 3.7.7 to 3.7.8 (#1154)
Bumps [flake8](https://gitlab.com/pycqa/flake8) from 3.7.7 to 3.7.8.
- [Release notes](https://gitlab.com/pycqa/flake8/tags)
- [Commits](https://gitlab.com/pycqa/flake8/compare/3.7.7...3.7.8)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
2019-07-09 14:05:43 +02:00
Pascal Vizeli
dd7ba64d32 Bump version 169 2019-07-08 16:03:59 +02:00
Pascal Vizeli
de3edb1654 Merge pull request #1153 from home-assistant/dev
Release 168
2019-07-08 16:02:39 +02:00
dependabot-preview[bot]
d262151727 Bump pytest from 4.6.3 to 5.0.1 (#1148)
* Bump pytest from 4.6.3 to 5.0.1

Bumps [pytest](https://github.com/pytest-dev/pytest) from 4.6.3 to 5.0.1.
- [Release notes](https://github.com/pytest-dev/pytest/releases)
- [Changelog](https://github.com/pytest-dev/pytest/blob/master/CHANGELOG.rst)
- [Commits](https://github.com/pytest-dev/pytest/compare/4.6.3...5.0.1)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>

* Update tox.ini
2019-07-06 18:07:15 +02:00
Pascal Vizeli
a37c90af96 Forward Params (#1150) 2019-07-06 18:06:39 +02:00
Pascal Vizeli
0a3a752b4c Add timezone to info call (#1146) 2019-07-04 18:20:46 +02:00
Pascal Vizeli
0a34f427f8 Fix error on save special permission (#1145) 2019-07-04 17:30:42 +02:00
Pascal Vizeli
157740e374 Update devcontainer.json 2019-07-04 17:28:50 +02:00
Pascal Vizeli
b0e994f3f5 Bump version 168 2019-06-25 17:38:57 +02:00
Pascal Vizeli
f374852801 Merge pull request #1139 from home-assistant/dev
Release 167
2019-06-25 17:34:42 +02:00
Pascal Vizeli
709f034f2e New TimeZone handling (#1138)
* Remove function to get TZ from config file

* Readd old timezone handling

* Fix tests
2019-06-25 17:09:14 +02:00
Pascal Vizeli
6d6deb8c66 Cleanup udev handling (#1137)
* Cleanup udev handling

* Update hardware.py
2019-06-25 16:15:02 +02:00
Pascal Vizeli
5771b417bc sort import 2019-06-25 12:54:45 +00:00
Pascal Vizeli
51efcefdab Compile only hassio 2019-06-24 23:21:15 +00:00
Pascal Vizeli
d31ab5139d compile all 2019-06-24 23:09:08 +00:00
Pascal Vizeli
ce18183daa Allow update discovery messages (#1136)
* Allow update discovery messages

* Update __init__.py

* Update __init__.py

* Update __init__.py

* fix lint

* Fix style
2019-06-24 23:29:42 +02:00
Pascal Vizeli
b8b73cf880 remove diff wheels build 2019-06-24 19:16:26 +02:00
Pascal Vizeli
5291e6c1f3 Use multistage 2019-06-24 19:04:52 +02:00
Pascal Vizeli
626a9f06c4 Update to alpine 3.10 (#1135) 2019-06-24 18:49:43 +02:00
Pascal Vizeli
72338eb5b8 Add devcontainer support (#1134) 2019-06-24 14:48:10 +02:00
Jakub
7bd77c6e99 Append devlinks to serial dev_list (#1131)
* append devlinks to dev_list

* replace eudev-libs with eudev

* include only devlinks starting with /dev/serial/by-id

* add missing package, move udev init to entry.sh

* fix mode on entry.sh

* Update homeassistant.py

* Update homeassistant.py
2019-06-24 09:53:54 +02:00
dependabot-preview[bot]
69151b962a Bump pytest from 4.6.2 to 4.6.3 (#1125)
Bumps [pytest](https://github.com/pytest-dev/pytest) from 4.6.2 to 4.6.3.
- [Release notes](https://github.com/pytest-dev/pytest/releases)
- [Changelog](https://github.com/pytest-dev/pytest/blob/master/CHANGELOG.rst)
- [Commits](https://github.com/pytest-dev/pytest/compare/4.6.2...4.6.3)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
2019-06-21 09:50:42 +02:00
dependabot-preview[bot]
86305d4fe4 Bump docker from 4.0.1 to 4.0.2 (#1133)
Bumps [docker](https://github.com/docker/docker-py) from 4.0.1 to 4.0.2.
- [Release notes](https://github.com/docker/docker-py/releases)
- [Commits](https://github.com/docker/docker-py/compare/4.0.1...4.0.2)

Signed-off-by: dependabot-preview[bot] <support@dependabot.com>
2019-06-21 09:50:28 +02:00
Pascal Vizeli
d5c3850a3f Don't break on supervisor update (#1118)
* Don't break on supervisor update

* Update core.py

* Fix lint
2019-06-06 10:57:36 +02:00
Pascal Vizeli
3e645b6175 Fix timeout on check port (#1116) 2019-06-05 17:49:05 +02:00
Pascal Vizeli
89dc78bc05 Bump version 167 2019-06-05 17:10:44 +02:00
Pascal Vizeli
164c403d05 Merge pull request #1115 from home-assistant/dev
Release 166
2019-06-05 17:10:14 +02:00
Pascal Vizeli
5e8007453f detect native arch (#1114) 2019-06-05 16:59:43 +02:00
Pascal Vizeli
0a0d97b084 Support pip progress for HA 0.94 (#1113)
* Support pip progress for HA 0.94

* fix black

* add tests

* add test for adguard

* Fix lint
2019-06-05 14:46:03 +02:00
dependabot-preview[bot]
eb604ed92d Bump pytest from 4.6.1 to 4.6.2 (#1112)
Bumps [pytest](https://github.com/pytest-dev/pytest) from 4.6.1 to 4.6.2.
- [Release notes](https://github.com/pytest-dev/pytest/releases)
- [Changelog](https://github.com/pytest-dev/pytest/blob/master/CHANGELOG.rst)
- [Commits](https://github.com/pytest-dev/pytest/compare/4.6.1...4.6.2)
2019-06-04 23:47:35 +02:00
dependabot-preview[bot]
c47828dbaa Bump pytest from 4.5.0 to 4.6.1 (#1110)
Bumps [pytest](https://github.com/pytest-dev/pytest) from 4.5.0 to 4.6.1.
- [Release notes](https://github.com/pytest-dev/pytest/releases)
- [Changelog](https://github.com/pytest-dev/pytest/blob/master/CHANGELOG.rst)
- [Commits](https://github.com/pytest-dev/pytest/compare/4.5.0...4.6.1)
2019-06-04 14:43:20 +02:00
Pascal Vizeli
ea437dc745 Update azure-pipelines.yml for Azure Pipelines 2019-06-02 14:12:45 +02:00
Franck Nijhof
c16a208b39 Support for AdGuard Home discovery (#1107)
* Support for AdGuard Home discovery

* 👕 Darkened the sky
2019-06-02 07:47:12 +02:00
dependabot-preview[bot]
55d803b2a0 Bump cryptography from 2.6.1 to 2.7 (#1108)
Bumps [cryptography](https://github.com/pyca/cryptography) from 2.6.1 to 2.7.
- [Release notes](https://github.com/pyca/cryptography/releases)
- [Changelog](https://github.com/pyca/cryptography/blob/master/CHANGELOG.rst)
- [Commits](https://github.com/pyca/cryptography/compare/2.6.1...2.7)
2019-05-31 17:55:16 +02:00
Pascal Vizeli
611f6f2829 Don't follow requests itself (#1106)
* Don't follow requests itself

* Fix black lint
2019-05-31 13:53:46 +02:00
Pascal Vizeli
b94df76731 Update azure-pipelines.yml for Azure Pipelines 2019-05-31 12:42:37 +02:00
Pascal Vizeli
218619e7f0 Bump version 166 2019-05-31 12:36:06 +02:00
Pascal Vizeli
273eed901a Merge pull request #1105 from home-assistant/dev
Release 165
2019-05-31 12:35:34 +02:00
Pascal Vizeli
8ea712a937 Fix error on comparson (#1104) 2019-05-31 11:46:28 +02:00
Pascal Vizeli
658449a7a0 Update azure-pipelines.yml for Azure Pipelines 2019-05-30 17:43:30 +02:00
Pascal Vizeli
968c471591 Update azure-pipelines.yml for Azure Pipelines 2019-05-30 17:25:47 +02:00
Pascal Vizeli
b4665f3907 Add black support (#1101) 2019-05-27 12:35:06 +02:00
dependabot[bot]
496cee1ec4 Bump ptvsd from 4.2.9 to 4.2.10 (#1096)
Bumps [ptvsd](https://github.com/Microsoft/ptvsd) from 4.2.9 to 4.2.10.
- [Release notes](https://github.com/Microsoft/ptvsd/releases)
- [Commits](https://github.com/Microsoft/ptvsd/compare/v4.2.9...v4.2.10)

Signed-off-by: dependabot[bot] <support@dependabot.com>
2019-05-22 14:29:57 +02:00
dependabot[bot]
0f8c80f3ba Bump docker from 3.7.2 to 4.0.1 (#1093)
Bumps [docker](https://github.com/docker/docker-py) from 3.7.2 to 4.0.1.
- [Release notes](https://github.com/docker/docker-py/releases)
- [Commits](https://github.com/docker/docker-py/compare/3.7.2...4.0.1)

Signed-off-by: dependabot[bot] <support@dependabot.com>
2019-05-21 12:07:01 +02:00
Pascal Vizeli
6c28f82239 Bump version 165 2019-05-18 22:39:43 +02:00
Pascal Vizeli
def32abb57 Merge pull request #1090 from home-assistant/dev
Release 164
2019-05-18 22:37:47 +02:00
Pascal Vizeli
f57a241b9e Panel search function (#1089) 2019-05-18 22:31:07 +02:00
Pascal Vizeli
11a7e8b15d Fix error with repository without repository.json (#1088) 2019-05-18 22:17:32 +02:00
Pascal Vizeli
fa4f7697b7 Update azure-pipelines.yml for Azure Pipelines 2019-05-16 09:42:55 +02:00
dependabot[bot]
6098b7de8e Bump pytest from 4.4.2 to 4.5.0 (#1083)
Bumps [pytest](https://github.com/pytest-dev/pytest) from 4.4.2 to 4.5.0.
- [Release notes](https://github.com/pytest-dev/pytest/releases)
- [Changelog](https://github.com/pytest-dev/pytest/blob/master/CHANGELOG.rst)
- [Commits](https://github.com/pytest-dev/pytest/compare/4.4.2...4.5.0)

Signed-off-by: dependabot[bot] <support@dependabot.com>
2019-05-13 23:03:55 +02:00
dependabot[bot]
0a382ce54d Bump ptvsd from 4.2.8 to 4.2.9 (#1081)
Bumps [ptvsd](https://github.com/Microsoft/ptvsd) from 4.2.8 to 4.2.9.
- [Release notes](https://github.com/Microsoft/ptvsd/releases)
- [Commits](https://github.com/Microsoft/ptvsd/compare/v4.2.8...v4.2.9)

Signed-off-by: dependabot[bot] <support@dependabot.com>
2019-05-10 15:32:29 +02:00
dependabot[bot]
dd53aaa30c Bump pytest from 4.4.1 to 4.4.2 (#1080)
Bumps [pytest](https://github.com/pytest-dev/pytest) from 4.4.1 to 4.4.2.
- [Release notes](https://github.com/pytest-dev/pytest/releases)
- [Changelog](https://github.com/pytest-dev/pytest/blob/master/CHANGELOG.rst)
- [Commits](https://github.com/pytest-dev/pytest/compare/4.4.1...4.4.2)

Signed-off-by: dependabot[bot] <support@dependabot.com>
2019-05-09 14:11:58 +02:00
Pascal Vizeli
31e175a15a Bump version 164 2019-05-09 12:07:57 +02:00
178 changed files with 3140 additions and 1401 deletions

43
.devcontainer/Dockerfile Normal file
View File

@@ -0,0 +1,43 @@
FROM python:3.7
WORKDIR /workspaces
# Install Node/Yarn for Frontent
RUN apt-get update && apt-get install -y --no-install-recommends \
curl \
git \
apt-utils \
apt-transport-https \
&& curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
&& echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list \
&& apt-get update && apt-get install -y --no-install-recommends \
nodejs \
yarn \
&& curl -o - https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash \
&& rm -rf /var/lib/apt/lists/*
ENV NVM_DIR /root/.nvm
# Install docker
# https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/
RUN apt-get update && apt-get install -y --no-install-recommends \
apt-transport-https \
ca-certificates \
curl \
software-properties-common \
gpg-agent \
&& curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add - \
&& add-apt-repository "deb https://download.docker.com/linux/debian $(lsb_release -cs) stable" \
&& apt-get update && apt-get install -y --no-install-recommends \
docker-ce \
docker-ce-cli \
containerd.io \
&& rm -rf /var/lib/apt/lists/*
# Install Python dependencies from requirements.txt if it exists
COPY requirements.txt requirements_tests.txt /workspaces/
RUN pip install -r requirements.txt \
&& pip3 install -r requirements_tests.txt \
&& pip install black tox
# Set the default shell to bash instead of sh
ENV SHELL /bin/bash

View File

@@ -0,0 +1,29 @@
// See https://aka.ms/vscode-remote/devcontainer.json for format details.
{
"name": "Hass.io dev",
"context": "..",
"dockerFile": "Dockerfile",
"appPort": "9123:8123",
"runArgs": [
"-e",
"GIT_EDITOR='code --wait'",
"--privileged"
],
"extensions": [
"ms-python.python"
],
"settings": {
"python.pythonPath": "/usr/local/bin/python",
"python.linting.pylintEnabled": true,
"python.linting.enabled": true,
"python.formatting.provider": "black",
"python.formatting.blackArgs": [
"--target-version",
"py37"
],
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,
"files.trimTrailingWhitespace": true
}
}

View File

@@ -1,13 +1,23 @@
# General files
.git
.github
.devcontainer
.vscode
# Test related files
.tox
# Temporary files
**/__pycache__
.pytest_cache
# virtualenv
venv/
ENV/
# HA
home-assistant-polymer/*
misc/*
script/*
# Test ENV
data/

4
.gitignore vendored
View File

@@ -92,4 +92,6 @@ ENV/
.pylint.d/
# VS Code
.vscode/
.vscode/*
!.vscode/cSpell.json
!.vscode/tasks.json

92
.vscode/tasks.json vendored Normal file
View File

@@ -0,0 +1,92 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "Run Testenv",
"type": "shell",
"command": "./scripts/test_env.sh",
"group": {
"kind": "test",
"isDefault": true,
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Run Testenv CLI",
"type": "shell",
"command": "docker run --rm -ti -v /etc/machine-id:/etc/machine-id --network=hassio --add-host hassio:172.30.32.2 homeassistant/amd64-hassio-cli:dev",
"group": {
"kind": "test",
"isDefault": true,
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Update UI",
"type": "shell",
"command": "./scripts/update-frontend.sh",
"group": {
"kind": "build",
"isDefault": true
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Pytest",
"type": "shell",
"command": "pytest --timeout=10 tests",
"group": {
"kind": "test",
"isDefault": true,
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Flake8",
"type": "shell",
"command": "flake8 hassio tests",
"group": {
"kind": "test",
"isDefault": true,
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
},
{
"label": "Pylint",
"type": "shell",
"command": "pylint hassio",
"dependsOn": [
"Install all Requirements"
],
"group": {
"kind": "test",
"isDefault": true,
},
"presentation": {
"reveal": "always",
"panel": "new"
},
"problemMatcher": []
}
]
}

55
API.md
View File

@@ -105,6 +105,7 @@ Output is the raw docker log.
"cpu_percent": 0.0,
"memory_usage": 283123,
"memory_limit": 329392,
"memory_percent": 1.4,
"network_tx": 0,
"network_rx": 0,
"blk_read": 0,
@@ -112,6 +113,10 @@ Output is the raw docker log.
}
```
- GET `/supervisor/repair`
Repair overlayfs issue and restore lost images
### Snapshot
- GET `/snapshots`
@@ -417,6 +422,7 @@ Proxy to real websocket instance.
"cpu_percent": 0.0,
"memory_usage": 283123,
"memory_limit": 329392,
"memory_percent": 1.4,
"network_tx": 0,
"network_rx": 0,
"blk_read": 0,
@@ -469,6 +475,8 @@ Get all available addons.
{
"name": "xy bla",
"slug": "xdssd_xybla",
"hostname": "xdssd-xybla",
"dns": [],
"description": "description",
"long_description": "null|markdown",
"auto_update": "bool",
@@ -494,6 +502,7 @@ Get all available addons.
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
"apparmor": "disable|default|profile",
"devices": ["/dev/xy"],
"udev": "bool",
"auto_uart": "bool",
"icon": "bool",
"logo": "bool",
@@ -589,6 +598,7 @@ Write data to add-on stdin
"cpu_percent": 0.0,
"memory_usage": 283123,
"memory_limit": 329392,
"memory_percent": 1.4,
"network_tx": 0,
"network_rx": 0,
"blk_read": 0,
@@ -730,7 +740,50 @@ return:
"arch": "arch",
"supported_arch": ["arch1", "arch2"],
"channel": "stable|beta|dev",
"logging": "debug|info|warning|error|critical"
"logging": "debug|info|warning|error|critical",
"timezone": "Europe/Zurich"
}
```
### DNS
- GET `/dns/info`
```json
{
"host": "ip-address",
"version": "1",
"latest_version": "2",
"servers": ["dns://8.8.8.8"]
}
```
- POST `/dns/options`
```json
{
"servers": ["dns://8.8.8.8"]
}
```
- POST `/dns/update`
```json
{
"version": "VERSION"
}
```
- GET `/dns/logs`
- GET `/dns/stats`
```json
{
"cpu_percent": 0.0,
"memory_usage": 283123,
"memory_limit": 329392,
"memory_percent": 1.4,
"network_tx": 0,
"network_rx": 0,
"blk_read": 0,
"blk_write": 0
}
```

View File

@@ -1,8 +1,6 @@
ARG BUILD_FROM
FROM $BUILD_FROM
ARG BUILD_ARCH
# Install base
RUN apk add --no-cache \
openssl \
@@ -12,17 +10,29 @@ RUN apk add --no-cache \
socat \
glib \
libstdc++ \
eudev \
eudev-libs
ARG BUILD_ARCH
WORKDIR /usr/src
# Install requirements
COPY requirements.txt /usr/src/
COPY requirements.txt .
RUN export MAKEFLAGS="-j$(nproc)" \
&& pip3 install --no-cache-dir --find-links https://wheels.hass.io/alpine-3.9/${BUILD_ARCH}/ \
-r /usr/src/requirements.txt \
&& rm -f /usr/src/requirements.txt
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
"https://wheels.home-assistant.io/alpine-$(cut -d '.' -f 1-2 < /etc/alpine-release)/${BUILD_ARCH}/" \
-r ./requirements.txt \
&& rm -f requirements.txt
# Install HassIO
COPY . /usr/src/hassio
RUN pip3 install --no-cache-dir -e /usr/src/hassio
COPY . hassio
RUN pip3 install --no-cache-dir -e ./hassio \
&& python3 -m compileall ./hassio/hassio
# Initialize udev daemon, handle CMD
COPY entry.sh /bin/
ENTRYPOINT ["/bin/entry.sh"]
WORKDIR /
CMD [ "python3", "-m", "hassio" ]

View File

@@ -1,6 +1,7 @@
# https://dev.azure.com/home-assistant
trigger:
batch: true
branches:
include:
- master
@@ -11,146 +12,157 @@ trigger:
exclude:
- untagged*
pr:
- dev
- dev
variables:
- name: basePythonTag
value: '3.7-alpine3.10'
- name: versionHadolint
value: 'v1.16.3'
- name: versionBuilder
value: '3.2'
value: '4.4'
- name: versionWheels
value: '0.3'
value: '1.0-3.7-alpine3.10'
- group: docker
- group: wheels
jobs:
- job: 'Tox'
pool:
vmImage: 'ubuntu-16.04'
steps:
- task: UsePythonVersion@0
displayName: 'Use Python $(python.version)'
inputs:
versionSpec: '3.7'
- script: pip install tox
displayName: 'Install Tox'
- script: tox
displayName: 'Run Tox'
stages:
- stage: 'Test'
jobs:
- job: 'Tox'
pool:
vmImage: 'ubuntu-latest'
steps:
- task: UsePythonVersion@0
displayName: 'Use Python 3.7'
inputs:
versionSpec: '3.7'
- script: pip install tox
displayName: 'Install Tox'
- script: tox
displayName: 'Run Tox'
- job: 'Black'
pool:
vmImage: 'ubuntu-latest'
steps:
- task: UsePythonVersion@0
displayName: 'Use Python $(python.version)'
inputs:
versionSpec: '3.7'
- script: pip install black
displayName: 'Install black'
- script: black --target-version py37 --check hassio tests
displayName: 'Run Black'
- job: 'JQ'
pool:
vmImage: 'ubuntu-latest'
steps:
- script: sudo apt-get install -y jq
displayName: 'Install JQ'
- bash: |
shopt -s globstar
cat **/*.json | jq '.'
displayName: 'Run JQ'
- job: 'Hadolint'
pool:
vmImage: 'ubuntu-latest'
steps:
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
displayName: 'Install Hadolint'
- script: |
sudo docker run --rm -i \
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
hadolint/hadolint:$(versionHadolint) < Dockerfile
displayName: 'Run Hadolint'
- job: 'JQ'
pool:
vmImage: 'ubuntu-16.04'
steps:
- script: sudo apt-get install -y jq
displayName: 'Install JQ'
- bash: |
shopt -s globstar
cat **/*.json | jq '.'
displayName: 'Run JQ'
- stage: 'Wheels'
jobs:
- job: 'Wheels'
condition: eq(variables['Build.SourceBranchName'], 'dev')
timeoutInMinutes: 360
pool:
vmImage: 'ubuntu-latest'
strategy:
maxParallel: 5
matrix:
amd64:
buildArch: 'amd64'
i386:
buildArch: 'i386'
armhf:
buildArch: 'armhf'
armv7:
buildArch: 'armv7'
aarch64:
buildArch: 'aarch64'
steps:
- script: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends \
qemu-user-static \
binfmt-support \
curl
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
sudo update-binfmts --enable qemu-arm
sudo update-binfmts --enable qemu-aarch64
displayName: 'Initial cross build'
- script: |
mkdir -p .ssh
echo -e "-----BEGIN RSA PRIVATE KEY-----\n$(wheelsSSH)\n-----END RSA PRIVATE KEY-----" >> .ssh/id_rsa
ssh-keyscan -H $(wheelsHost) >> .ssh/known_hosts
chmod 600 .ssh/*
displayName: 'Install ssh key'
- script: sudo docker pull homeassistant/$(buildArch)-wheels:$(versionWheels)
displayName: 'Install wheels builder'
- script: |
sudo docker run --rm -v $(pwd):/data:ro -v $(pwd)/.ssh:/root/.ssh:rw \
homeassistant/$(buildArch)-wheels:$(versionWheels) \
--apk "build-base;libffi-dev;openssl-dev" \
--index $(wheelsIndex) \
--requirement requirements.txt \
--upload rsync \
--remote wheels@$(wheelsHost):/opt/wheels
displayName: 'Run wheels build'
- job: 'Hadolint'
pool:
vmImage: 'ubuntu-16.04'
steps:
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
displayName: 'Install Hadolint'
- script: |
sudo docker run --rm -i \
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
hadolint/hadolint:$(versionHadolint) < Dockerfile
displayName: 'Run Hadolint'
- stage: 'Deploy'
jobs:
- job: 'VersionValidate'
condition: or(startsWith(variables['Build.SourceBranch'], 'refs/tags'), eq(variables['Build.SourceBranchName'], 'dev'))
pool:
vmImage: 'ubuntu-latest'
steps:
- task: UsePythonVersion@0
displayName: 'Use Python 3.7'
inputs:
versionSpec: '3.7'
- script: |
setup_version="$(python setup.py -V)"
branch_version="$(Build.SourceBranchName)"
- job: 'Wheels'
condition: eq(variables['Build.SourceBranchName'], 'dev')
timeoutInMinutes: 360
pool:
vmImage: 'ubuntu-16.04'
strategy:
maxParallel: 3
matrix:
amd64:
buildArch: 'amd64'
i386:
buildArch: 'i386'
armhf:
buildArch: 'armhf'
armv7:
buildArch: 'armv7'
aarch64:
buildArch: 'aarch64'
steps:
- script: |
sudo apt-get install -y --no-install-recommends \
qemu-user-static \
binfmt-support
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
sudo update-binfmts --enable qemu-arm
sudo update-binfmts --enable qemu-aarch64
displayName: 'Initial cross build'
- script: |
mkdir -p .ssh
echo -e "-----BEGIN RSA PRIVATE KEY-----\n$(wheelsSSH)\n-----END RSA PRIVATE KEY-----" >> .ssh/id_rsa
ssh-keyscan -H $(wheelsHost) >> .ssh/known_hosts
chmod 600 .ssh/*
displayName: 'Install ssh key'
- script: sudo docker pull homeassistant/$(buildArch)-wheels:$(versionWheels)
displayName: 'Install wheels builder'
- script: |
sudo docker run --rm -v $(pwd):/data:ro -v $(pwd)/.ssh:/root/.ssh:rw \
homeassistant/$(buildArch)-wheels:$(versionWheels) \
--apk "build-base;libffi-dev;openssl-dev" \
--index https://wheels.hass.io \
--requirement requirements.txt \
--upload rsync \
--remote wheels@$(wheelsHost):/opt/wheels
displayName: 'Run wheels build'
- job: 'ReleaseDEV'
condition: and(eq(variables['Build.SourceBranchName'], 'dev'), succeeded('JQ'), succeeded('Tox'), succeeded('Hadolint'), succeeded('Wheels'))
dependsOn:
- 'JQ'
- 'Tox'
- 'Hadolint'
- 'Wheels'
pool:
vmImage: 'ubuntu-16.04'
steps:
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
displayName: 'Docker hub login'
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
displayName: 'Install Builder'
- script: |
sudo docker run --rm --privileged \
-v ~/.docker:/root/.docker \
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
homeassistant/amd64-builder:$(versionBuilder) \
--supervisor --all -t /data --version dev --docker-hub homeassistant
displayName: 'Build DEV'
- job: 'Release'
condition: and(startsWith(variables['Build.SourceBranch'], 'refs/tags'), succeeded('JQ'), succeeded('Tox'), succeeded('Hadolint'))
dependsOn:
- 'JQ'
- 'Tox'
- 'Hadolint'
pool:
vmImage: 'ubuntu-16.04'
steps:
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
displayName: 'Docker hub login'
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
displayName: 'Install Builder'
- script: |
sudo docker run --rm --privileged \
-v ~/.docker:/root/.docker \
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
homeassistant/amd64-builder:$(versionBuilder) \
--supervisor --all -t /data --docker-hub homeassistant
displayName: 'Build Release'
if [ "${branch_version}" == "dev" ]; then
exit 0
elif [ "${setup_version}" != "${branch_version}" ]; then
echo "Version of tag ${branch_version} don't match with ${setup_version}!"
exit 1
fi
displayName: 'Check version of branch/tag'
- job: 'Release'
dependsOn:
- 'VersionValidate'
pool:
vmImage: 'ubuntu-latest'
steps:
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
displayName: 'Docker hub login'
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
displayName: 'Install Builder'
- script: |
sudo docker run --rm --privileged \
-v ~/.docker:/root/.docker \
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
homeassistant/amd64-builder:$(versionBuilder) \
--supervisor $(basePythonTag) --version $(Build.SourceBranchName) \
--all -t /data --docker-hub homeassistant
displayName: 'Build Release'

13
entry.sh Executable file
View File

@@ -0,0 +1,13 @@
#!/bin/bash
set -e
udevd --daemon
udevadm trigger
if CMD="$(command -v "$1")"; then
shift
exec "$CMD" "$@"
else
echo "Command not found: $1"
exit 1
fi

View File

@@ -38,9 +38,10 @@ if __name__ == "__main__":
_LOGGER.info("Initialize Hass.io setup")
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
loop.run_until_complete(coresys.core.connect())
bootstrap.migrate_system_env(coresys)
bootstrap.supervisor_debugger(coresys)
bootstrap.migrate_system_env(coresys)
_LOGGER.info("Setup HassIO")
loop.run_until_complete(coresys.core.setup())

View File

@@ -10,7 +10,9 @@ from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import (
AddonsError,
AddonsNotSupportedError,
CoreDNSError,
DockerAPIError,
HomeAssistantAPIError,
HostAppArmorError,
)
from ..store.addon import AddonStore
@@ -73,6 +75,9 @@ class AddonManager(CoreSysAttributes):
if tasks:
await asyncio.wait(tasks)
# Sync DNS
await self.sync_dns()
async def boot(self, stage: str) -> None:
"""Boot add-ons with mode auto."""
tasks = []
@@ -110,16 +115,14 @@ class AddonManager(CoreSysAttributes):
raise AddonsError()
if not store.available:
_LOGGER.error(
"Add-on %s not supported on that platform", slug)
_LOGGER.error("Add-on %s not supported on that platform", slug)
raise AddonsNotSupportedError()
self.data.install(store)
addon = Addon(self.coresys, slug)
if not addon.path_data.is_dir():
_LOGGER.info(
"Create Home Assistant add-on data folder %s", addon.path_data)
_LOGGER.info("Create Home Assistant add-on data folder %s", addon.path_data)
addon.path_data.mkdir()
# Setup/Fix AppArmor profile
@@ -132,6 +135,7 @@ class AddonManager(CoreSysAttributes):
raise AddonsError() from None
else:
self.local[slug] = addon
_LOGGER.info("Add-on '%s' successfully installed", slug)
async def uninstall(self, slug: str) -> None:
"""Remove an add-on."""
@@ -156,11 +160,20 @@ class AddonManager(CoreSysAttributes):
with suppress(HostAppArmorError):
await addon.uninstall_apparmor()
# Cleanup Ingress panel from sidebar
if addon.ingress_panel:
addon.ingress_panel = False
with suppress(HomeAssistantAPIError):
await self.sys_ingress.update_hass_panel(addon)
# Cleanup internal data
addon.remove_discovery()
self.data.uninstall(addon)
self.local.pop(slug)
_LOGGER.info("Add-on '%s' successfully removed", slug)
async def update(self, slug: str) -> None:
"""Update add-on."""
if slug not in self.local:
@@ -179,17 +192,22 @@ class AddonManager(CoreSysAttributes):
# Check if available, Maybe something have changed
if not store.available:
_LOGGER.error(
"Add-on %s not supported on that platform", slug)
_LOGGER.error("Add-on %s not supported on that platform", slug)
raise AddonsNotSupportedError()
# Update instance
last_state = await addon.state()
try:
await addon.instance.update(store.version, store.image)
# Cleanup
with suppress(DockerAPIError):
await addon.instance.cleanup()
except DockerAPIError:
raise AddonsError() from None
self.data.update(store)
else:
self.data.update(store)
_LOGGER.info("Add-on '%s' successfully updated", slug)
# Setup/Fix AppArmor profile
await addon.install_apparmor()
@@ -227,6 +245,7 @@ class AddonManager(CoreSysAttributes):
raise AddonsError() from None
else:
self.data.update(store)
_LOGGER.info("Add-on '%s' successfully rebuilded", slug)
# restore state
if last_state == STATE_STARTED:
@@ -249,3 +268,52 @@ class AddonManager(CoreSysAttributes):
_LOGGER.info("Detect new Add-on after restore %s", slug)
self.local[slug] = addon
async def repair(self) -> None:
"""Repair local add-ons."""
needs_repair: List[Addon] = []
# Evaluate Add-ons to repair
for addon in self.installed:
if await addon.instance.exists():
continue
needs_repair.append(addon)
_LOGGER.info("Found %d add-ons to repair", len(needs_repair))
if not needs_repair:
return
for addon in needs_repair:
_LOGGER.info("Start repair for add-on: %s", addon.slug)
with suppress(DockerAPIError, KeyError):
# Need pull a image again
if not addon.need_build:
await addon.instance.install(addon.version, addon.image)
continue
# Need local lookup
elif addon.need_build and not addon.is_detached:
store = self.store[addon.slug]
# If this add-on is available for rebuild
if addon.version == store.version:
await addon.instance.install(addon.version, addon.image)
continue
_LOGGER.error("Can't repair %s", addon.slug)
with suppress(AddonsError):
await self.uninstall(addon.slug)
async def sync_dns(self) -> None:
"""Sync add-ons DNS names."""
# Update hosts
for addon in self.installed:
if not await addon.instance.is_running():
continue
self.sys_dns.add_host(
ipv4=addon.ip_address, names=[addon.hostname], write=False
)
# Write hosts files
with suppress(CoreDNSError):
self.sys_dns.write_hosts()

View File

@@ -1,7 +1,7 @@
"""Init file for Hass.io add-ons."""
from contextlib import suppress
from copy import deepcopy
from ipaddress import IPv4Address, ip_address
from ipaddress import IPv4Address
import logging
from pathlib import Path, PurePath
import re
@@ -9,7 +9,7 @@ import secrets
import shutil
import tarfile
from tempfile import TemporaryDirectory
from typing import Any, Awaitable, Dict, Optional
from typing import Any, Awaitable, Dict, List, Optional
import voluptuous as vol
from voluptuous.humanize import humanize_error
@@ -35,7 +35,7 @@ from ..const import (
ATTR_USER,
ATTR_UUID,
ATTR_VERSION,
STATE_NONE,
DNS_SUFFIX,
STATE_STARTED,
STATE_STOPPED,
)
@@ -59,7 +59,8 @@ _LOGGER = logging.getLogger(__name__)
RE_WEBUI = re.compile(
r"^(?:(?P<s_prefix>https?)|\[PROTO:(?P<t_proto>\w+)\])"
r":\/\/\[HOST\]:\[PORT:(?P<t_port>\d+)\](?P<s_suffix>.*)$")
r":\/\/\[HOST\]:\[PORT:(?P<t_port>\d+)\](?P<s_suffix>.*)$"
)
class Addon(AddonModel):
@@ -74,13 +75,11 @@ class Addon(AddonModel):
async def load(self) -> None:
"""Async initialize of object."""
with suppress(DockerAPIError):
await self.instance.attach()
await self.instance.attach(tag=self.version)
@property
def ip_address(self) -> IPv4Address:
"""Return IP of Add-on instance."""
if not self.is_installed:
return ip_address("0.0.0.0")
return self.instance.ip_address
@property
@@ -118,13 +117,15 @@ class Addon(AddonModel):
"""Return installed version."""
return self.persist[ATTR_VERSION]
@property
def dns(self) -> List[str]:
"""Return list of DNS name for that add-on."""
return [f"{self.hostname}.{DNS_SUFFIX}"]
@property
def options(self) -> Dict[str, Any]:
"""Return options with local changes."""
return {
**self.data[ATTR_OPTIONS],
**self.persist[ATTR_OPTIONS]
}
return {**self.data[ATTR_OPTIONS], **self.persist[ATTR_OPTIONS]}
@options.setter
def options(self, value: Optional[Dict[str, Any]]):
@@ -231,10 +232,10 @@ class Addon(AddonModel):
webui = RE_WEBUI.match(url)
# extract arguments
t_port = webui.group('t_port')
t_proto = webui.group('t_proto')
s_prefix = webui.group('s_prefix') or ""
s_suffix = webui.group('s_suffix') or ""
t_port = webui.group("t_port")
t_proto = webui.group("t_proto")
s_prefix = webui.group("s_prefix") or ""
s_suffix = webui.group("s_suffix") or ""
# search host port for this docker port
if self.ports is None:
@@ -248,7 +249,7 @@ class Addon(AddonModel):
# lookup the correct protocol from config
if t_proto:
proto = 'https' if self.options[t_proto] else 'http'
proto = "https" if self.options[t_proto] else "http"
else:
proto = s_prefix
@@ -353,8 +354,11 @@ class Addon(AddonModel):
schema(options)
write_json_file(self.path_options, options)
except vol.Invalid as ex:
_LOGGER.error("Add-on %s have wrong options: %s", self.slug,
humanize_error(options, ex))
_LOGGER.error(
"Add-on %s have wrong options: %s",
self.slug,
humanize_error(options, ex),
)
except JsonFileError:
_LOGGER.error("Add-on %s can't write options", self.slug)
else:
@@ -381,10 +385,11 @@ class Addon(AddonModel):
def write_asound(self):
"""Write asound config to file and return True on success."""
asound_config = self.sys_host.alsa.asound(
alsa_input=self.audio_input, alsa_output=self.audio_output)
alsa_input=self.audio_input, alsa_output=self.audio_output
)
try:
with self.path_asound.open('w') as config_file:
with self.path_asound.open("w") as config_file:
config_file.write(asound_config)
except OSError as err:
_LOGGER.error("Add-on %s can't write asound: %s", self.slug, err)
@@ -408,7 +413,7 @@ class Addon(AddonModel):
# Need install/update
with TemporaryDirectory(dir=self.sys_config.path_tmp) as tmp_folder:
profile_file = Path(tmp_folder, 'apparmor.txt')
profile_file = Path(tmp_folder, "apparmor.txt")
adjust_profile(self.slug, self.path_apparmor, profile_file)
await self.sys_host.apparmor.load_profile(self.slug, profile_file)
@@ -430,14 +435,10 @@ class Addon(AddonModel):
return True
# merge options
options = {
**self.persist[ATTR_OPTIONS],
**default_options,
}
options = {**self.persist[ATTR_OPTIONS], **default_options}
# create voluptuous
new_schema = \
vol.Schema(vol.All(dict, validate_options(new_raw_schema)))
new_schema = vol.Schema(vol.All(dict, validate_options(new_raw_schema)))
# validate
try:
@@ -449,9 +450,6 @@ class Addon(AddonModel):
async def state(self) -> str:
"""Return running state of add-on."""
if not self.is_installed:
return STATE_NONE
if await self.instance.is_running():
return STATE_STARTED
return STATE_STOPPED
@@ -525,7 +523,7 @@ class Addon(AddonModel):
# store local image
if self.need_build:
try:
await self.instance.export_image(Path(temp, 'image.tar'))
await self.instance.export_image(Path(temp, "image.tar"))
except DockerAPIError:
raise AddonsError() from None
@@ -538,14 +536,14 @@ class Addon(AddonModel):
# Store local configs/state
try:
write_json_file(Path(temp, 'addon.json'), data)
write_json_file(Path(temp, "addon.json"), data)
except JsonFileError:
_LOGGER.error("Can't save meta for %s", self.slug)
raise AddonsError() from None
# Store AppArmor Profile
if self.sys_host.apparmor.exists(self.slug):
profile = Path(temp, 'apparmor.txt')
profile = Path(temp, "apparmor.txt")
try:
self.sys_host.apparmor.backup_profile(self.slug, profile)
except HostAppArmorError:
@@ -585,7 +583,7 @@ class Addon(AddonModel):
# Read snapshot data
try:
data = read_json_file(Path(temp, 'addon.json'))
data = read_json_file(Path(temp, "addon.json"))
except JsonFileError:
raise AddonsError() from None
@@ -593,8 +591,11 @@ class Addon(AddonModel):
try:
data = SCHEMA_ADDON_SNAPSHOT(data)
except vol.Invalid as err:
_LOGGER.error("Can't validate %s, snapshot data: %s",
self.slug, humanize_error(data, err))
_LOGGER.error(
"Can't validate %s, snapshot data: %s",
self.slug,
humanize_error(data, err),
)
raise AddonsError() from None
# If available
@@ -605,17 +606,19 @@ class Addon(AddonModel):
# Restore local add-on informations
_LOGGER.info("Restore config for addon %s", self.slug)
restore_image = self._image(data[ATTR_SYSTEM])
self.sys_addons.data.restore(self.slug, data[ATTR_USER], data[ATTR_SYSTEM], restore_image)
self.sys_addons.data.restore(
self.slug, data[ATTR_USER], data[ATTR_SYSTEM], restore_image
)
# Check version / restore image
version = data[ATTR_VERSION]
if not await self.instance.exists():
_LOGGER.info("Restore/Install image for addon %s", self.slug)
image_file = Path(temp, 'image.tar')
image_file = Path(temp, "image.tar")
if image_file.is_file():
with suppress(DockerAPIError):
await self.instance.import_image(image_file, version)
await self.instance.import_image(image_file)
else:
with suppress(DockerAPIError):
await self.instance.install(version, restore_image)
@@ -643,11 +646,10 @@ class Addon(AddonModel):
raise AddonsError() from None
# Restore AppArmor
profile_file = Path(temp, 'apparmor.txt')
profile_file = Path(temp, "apparmor.txt")
if profile_file.exists():
try:
await self.sys_host.apparmor.load_profile(
self.slug, profile_file)
await self.sys_host.apparmor.load_profile(self.slug, profile_file)
except HostAppArmorError:
_LOGGER.error("Can't restore AppArmor profile")
raise AddonsError() from None

View File

@@ -21,7 +21,8 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
self.addon = addon
super().__init__(
Path(self.addon.path_location, 'build.json'), SCHEMA_BUILD_CONFIG)
Path(self.addon.path_location, "build.json"), SCHEMA_BUILD_CONFIG
)
def save_data(self):
"""Ignore save function."""
@@ -31,8 +32,8 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
def base_image(self) -> str:
"""Base images for this add-on."""
return self._data[ATTR_BUILD_FROM].get(
self.sys_arch.default,
f"homeassistant/{self.sys_arch.default}-base:latest")
self.sys_arch.default, f"homeassistant/{self.sys_arch.default}-base:latest"
)
@property
def squash(self) -> bool:
@@ -47,28 +48,28 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
def get_docker_args(self, version):
"""Create a dict with Docker build arguments."""
args = {
'path': str(self.addon.path_location),
'tag': f"{self.addon.image}:{version}",
'pull': True,
'forcerm': True,
'squash': self.squash,
'labels': {
'io.hass.version': version,
'io.hass.arch': self.sys_arch.default,
'io.hass.type': META_ADDON,
'io.hass.name': self._fix_label('name'),
'io.hass.description': self._fix_label('description'),
"path": str(self.addon.path_location),
"tag": f"{self.addon.image}:{version}",
"pull": True,
"forcerm": True,
"squash": self.squash,
"labels": {
"io.hass.version": version,
"io.hass.arch": self.sys_arch.default,
"io.hass.type": META_ADDON,
"io.hass.name": self._fix_label("name"),
"io.hass.description": self._fix_label("description"),
},
'buildargs': {
'BUILD_FROM': self.base_image,
'BUILD_VERSION': version,
'BUILD_ARCH': self.sys_arch.default,
"buildargs": {
"BUILD_FROM": self.base_image,
"BUILD_VERSION": version,
"BUILD_ARCH": self.sys_arch.default,
**self.additional_args,
}
},
}
if self.addon.url:
args['labels']['io.hass.url'] = self.addon.url
args["labels"]["io.hass.url"] = self.addon.url
return args

View File

@@ -59,10 +59,9 @@ class AddonsData(JsonConfig, CoreSysAttributes):
def update(self, addon: AddonStore) -> None:
"""Update version of add-on."""
self.system[addon.slug] = deepcopy(addon.data)
self.user[addon.slug].update({
ATTR_VERSION: addon.version,
ATTR_IMAGE: addon.image,
})
self.user[addon.slug].update(
{ATTR_VERSION: addon.version, ATTR_IMAGE: addon.image}
)
self.save_data()
def restore(self, slug: str, user: Config, system: Config, image: str) -> None:

View File

@@ -51,6 +51,7 @@ from ..const import (
ATTR_STDIN,
ATTR_TIMEOUT,
ATTR_TMPFS,
ATTR_UDEV,
ATTR_URL,
ATTR_VERSION,
ATTR_WEBUI,
@@ -59,7 +60,7 @@ from ..const import (
SECURITY_PROFILE,
)
from ..coresys import CoreSysAttributes
from .validate import MACHINE_ALL, RE_SERVICE, RE_VOLUME, validate_options
from .validate import RE_SERVICE, RE_VOLUME, validate_options
Data = Dict[str, Any]
@@ -109,6 +110,16 @@ class AddonModel(CoreSysAttributes):
"""Return name of add-on."""
return self.data[ATTR_NAME]
@property
def hostname(self) -> str:
"""Return slug/id of add-on."""
return self.slug.replace("_", "-")
@property
def dns(self) -> List[str]:
"""Return list of DNS name for that add-on."""
return []
@property
def timeout(self) -> int:
"""Return timeout of addon for docker stop."""
@@ -142,14 +153,14 @@ class AddonModel(CoreSysAttributes):
@property
def long_description(self) -> Optional[str]:
"""Return README.md as long_description."""
readme = Path(self.path_location, 'README.md')
readme = Path(self.path_location, "README.md")
# If readme not exists
if not readme.exists():
return None
# Return data
with readme.open('r') as readme_file:
with readme.open("r") as readme_file:
return readme_file.read()
@property
@@ -185,7 +196,7 @@ class AddonModel(CoreSysAttributes):
services = {}
for data in services_list:
service = RE_SERVICE.match(data)
services[service.group('service')] = service.group('rights')
services[service.group("service")] = service.group("rights")
return services
@@ -333,6 +344,11 @@ class AddonModel(CoreSysAttributes):
"""Return True if the add-on access to GPIO interface."""
return self.data[ATTR_GPIO]
@property
def with_udev(self) -> bool:
"""Return True if the add-on have his own udev."""
return self.data[ATTR_UDEV]
@property
def with_kernel_modules(self) -> bool:
"""Return True if the add-on access to kernel modules."""
@@ -391,7 +407,7 @@ class AddonModel(CoreSysAttributes):
@property
def supported_machine(self) -> List[str]:
"""Return list of supported machine."""
return self.data.get(ATTR_MACHINE, MACHINE_ALL)
return self.data.get(ATTR_MACHINE, [])
@property
def image(self) -> str:
@@ -409,7 +425,7 @@ class AddonModel(CoreSysAttributes):
volumes = {}
for volume in self.data[ATTR_MAP]:
result = RE_VOLUME.match(volume)
volumes[result.group(1)] = result.group(2) or 'ro'
volumes[result.group(1)] = result.group(2) or "ro"
return volumes
@@ -421,22 +437,22 @@ class AddonModel(CoreSysAttributes):
@property
def path_icon(self) -> Path:
"""Return path to add-on icon."""
return Path(self.path_location, 'icon.png')
return Path(self.path_location, "icon.png")
@property
def path_logo(self) -> Path:
"""Return path to add-on logo."""
return Path(self.path_location, 'logo.png')
return Path(self.path_location, "logo.png")
@property
def path_changelog(self) -> Path:
"""Return path to add-on changelog."""
return Path(self.path_location, 'CHANGELOG.md')
return Path(self.path_location, "CHANGELOG.md")
@property
def path_apparmor(self) -> Path:
"""Return path to custom AppArmor profile."""
return Path(self.path_location, 'apparmor.txt')
return Path(self.path_location, "apparmor.txt")
@property
def schema(self) -> vol.Schema:
@@ -449,9 +465,9 @@ class AddonModel(CoreSysAttributes):
def __eq__(self, other):
"""Compaired add-on objects."""
if self.slug == other.slug:
return True
return False
if not isinstance(other, AddonModel):
return False
return self.slug == other.slug
def _available(self, config) -> bool:
"""Return True if this add-on is available on this platform."""
@@ -460,8 +476,8 @@ class AddonModel(CoreSysAttributes):
return False
# Machine / Hardware
machine = config.get(ATTR_MACHINE) or MACHINE_ALL
if self.sys_machine not in machine:
machine = config.get(ATTR_MACHINE)
if machine and self.sys_machine not in machine:
return False
# Home Assistant

View File

@@ -39,12 +39,9 @@ from ..const import (
ATTR_IMAGE,
ATTR_INGRESS,
ATTR_INGRESS_ENTRY,
ATTR_INGRESS_PANEL,
ATTR_INGRESS_PORT,
ATTR_INGRESS_TOKEN,
ATTR_INGRESS_PANEL,
ATTR_PANEL_ADMIN,
ATTR_PANEL_ICON,
ATTR_PANEL_TITLE,
ATTR_KERNEL_MODULES,
ATTR_LEGACY,
ATTR_LOCATON,
@@ -53,6 +50,9 @@ from ..const import (
ATTR_NAME,
ATTR_NETWORK,
ATTR_OPTIONS,
ATTR_PANEL_ADMIN,
ATTR_PANEL_ICON,
ATTR_PANEL_TITLE,
ATTR_PORTS,
ATTR_PORTS_DESCRIPTION,
ATTR_PRIVILEGED,
@@ -68,6 +68,7 @@ from ..const import (
ATTR_SYSTEM,
ATTR_TIMEOUT,
ATTR_TMPFS,
ATTR_UDEV,
ATTR_URL,
ATTR_USER,
ATTR_UUID,
@@ -100,14 +101,14 @@ _LOGGER = logging.getLogger(__name__)
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|ro))?$")
RE_SERVICE = re.compile(r"^(?P<service>mqtt):(?P<rights>provide|want|need)$")
V_STR = 'str'
V_INT = 'int'
V_FLOAT = 'float'
V_BOOL = 'bool'
V_EMAIL = 'email'
V_URL = 'url'
V_PORT = 'port'
V_MATCH = 'match'
V_STR = "str"
V_INT = "int"
V_FLOAT = "float"
V_BOOL = "bool"
V_EMAIL = "email"
V_URL = "url"
V_PORT = "port"
V_MATCH = "match"
RE_SCHEMA_ELEMENT = re.compile(
r"^(?:"
@@ -118,18 +119,30 @@ RE_SCHEMA_ELEMENT = re.compile(
r")\??$"
)
RE_DOCKER_IMAGE = re.compile(
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$")
RE_DOCKER_IMAGE = re.compile(r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$")
RE_DOCKER_IMAGE_BUILD = re.compile(
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$")
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$"
)
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
MACHINE_ALL = [
'intel-nuc', 'odroid-c2', 'odroid-xu', 'orangepi-prime', 'qemux86',
'qemux86-64', 'qemuarm', 'qemuarm-64', 'raspberrypi', 'raspberrypi2',
'raspberrypi3', 'raspberrypi3-64', 'tinker',
"intel-nuc",
"odroid-c2",
"odroid-xu",
"orangepi-prime",
"qemux86",
"qemux86-64",
"qemuarm",
"qemuarm-64",
"raspberrypi",
"raspberrypi2",
"raspberrypi3",
"raspberrypi3-64",
"raspberrypi4",
"raspberrypi4-64",
"tinker",
]
@@ -143,130 +156,158 @@ def _simple_startup(value):
# pylint: disable=no-value-for-parameter
SCHEMA_ADDON_CONFIG = vol.Schema({
vol.Required(ATTR_NAME): vol.Coerce(str),
vol.Required(ATTR_VERSION): vol.Coerce(str),
vol.Required(ATTR_SLUG): vol.Coerce(str),
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
vol.Optional(ATTR_MACHINE): [vol.In(MACHINE_ALL)],
vol.Optional(ATTR_URL): vol.Url(),
vol.Required(ATTR_STARTUP):
vol.All(_simple_startup, vol.In(STARTUP_ALL)),
vol.Required(ATTR_BOOT):
vol.In([BOOT_AUTO, BOOT_MANUAL]),
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
vol.Optional(ATTR_PORTS_DESCRIPTION): DOCKER_PORTS_DESCRIPTION,
vol.Optional(ATTR_WEBUI):
vol.Match(r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"),
vol.Optional(ATTR_INGRESS, default=False): vol.Boolean(),
vol.Optional(ATTR_INGRESS_PORT, default=8099): vol.Any(NETWORK_PORT, vol.Equal(0)),
vol.Optional(ATTR_INGRESS_ENTRY): vol.Coerce(str),
vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): vol.Coerce(str),
vol.Optional(ATTR_PANEL_TITLE): vol.Coerce(str),
vol.Optional(ATTR_PANEL_ADMIN, default=True): vol.Boolean(),
vol.Optional(ATTR_HOMEASSISTANT): vol.Maybe(vol.Coerce(str)),
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
vol.Optional(ATTR_TMPFS):
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
vol.Optional(ATTR_FULL_ACCESS, default=False): vol.Boolean(),
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
vol.Optional(ATTR_KERNEL_MODULES, default=False): vol.Boolean(),
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
vol.Optional(ATTR_HASSIO_ROLE, default=ROLE_DEFAULT): vol.In(ROLE_ALL),
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
vol.Required(ATTR_OPTIONS): dict,
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [
vol.Any(
SCHEMA_ELEMENT,
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
SCHEMA_ADDON_CONFIG = vol.Schema(
{
vol.Required(ATTR_NAME): vol.Coerce(str),
vol.Required(ATTR_VERSION): vol.Coerce(str),
vol.Required(ATTR_SLUG): vol.Coerce(str),
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
vol.Optional(ATTR_MACHINE): [vol.In(MACHINE_ALL)],
vol.Optional(ATTR_URL): vol.Url(),
vol.Required(ATTR_STARTUP): vol.All(_simple_startup, vol.In(STARTUP_ALL)),
vol.Required(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
vol.Optional(ATTR_PORTS_DESCRIPTION): DOCKER_PORTS_DESCRIPTION,
vol.Optional(ATTR_WEBUI): vol.Match(
r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"
),
vol.Optional(ATTR_INGRESS, default=False): vol.Boolean(),
vol.Optional(ATTR_INGRESS_PORT, default=8099): vol.Any(
NETWORK_PORT, vol.Equal(0)
),
vol.Optional(ATTR_INGRESS_ENTRY): vol.Coerce(str),
vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): vol.Coerce(str),
vol.Optional(ATTR_PANEL_TITLE): vol.Coerce(str),
vol.Optional(ATTR_PANEL_ADMIN, default=True): vol.Boolean(),
vol.Optional(ATTR_HOMEASSISTANT): vol.Maybe(vol.Coerce(str)),
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
vol.Optional(ATTR_UDEV, default=False): vol.Boolean(),
vol.Optional(ATTR_TMPFS): vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
vol.Optional(ATTR_FULL_ACCESS, default=False): vol.Boolean(),
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
vol.Optional(ATTR_KERNEL_MODULES, default=False): vol.Boolean(),
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
vol.Optional(ATTR_HASSIO_ROLE, default=ROLE_DEFAULT): vol.In(ROLE_ALL),
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
vol.Required(ATTR_OPTIONS): dict,
vol.Required(ATTR_SCHEMA): vol.Any(
vol.Schema(
{
vol.Coerce(str): vol.Any(
SCHEMA_ELEMENT,
[
vol.Any(
SCHEMA_ELEMENT,
{
vol.Coerce(str): vol.Any(
SCHEMA_ELEMENT, [SCHEMA_ELEMENT]
)
},
)
],
vol.Schema(
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
),
)
}
),
], vol.Schema({
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])
}))
}), False),
vol.Optional(ATTR_IMAGE):
vol.Match(RE_DOCKER_IMAGE),
vol.Optional(ATTR_TIMEOUT, default=10):
vol.All(vol.Coerce(int), vol.Range(min=10, max=120)),
}, extra=vol.REMOVE_EXTRA)
# pylint: disable=no-value-for-parameter
SCHEMA_BUILD_CONFIG = vol.Schema({
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema({
vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD),
}),
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
vol.Optional(ATTR_ARGS, default=dict): vol.Schema({
vol.Coerce(str): vol.Coerce(str)
}),
}, extra=vol.REMOVE_EXTRA)
# pylint: disable=no-value-for-parameter
SCHEMA_ADDON_USER = vol.Schema({
vol.Required(ATTR_VERSION): vol.Coerce(str),
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
vol.Optional(ATTR_ACCESS_TOKEN): TOKEN,
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): vol.Coerce(str),
vol.Optional(ATTR_OPTIONS, default=dict): dict,
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
vol.Optional(ATTR_BOOT):
vol.In([BOOT_AUTO, BOOT_MANUAL]),
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
}, extra=vol.REMOVE_EXTRA)
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
vol.Required(ATTR_LOCATON): vol.Coerce(str),
vol.Required(ATTR_REPOSITORY): vol.Coerce(str),
})
SCHEMA_ADDONS_FILE = vol.Schema({
vol.Optional(ATTR_USER, default=dict): {
vol.Coerce(str): SCHEMA_ADDON_USER,
False,
),
vol.Optional(ATTR_IMAGE): vol.Match(RE_DOCKER_IMAGE),
vol.Optional(ATTR_TIMEOUT, default=10): vol.All(
vol.Coerce(int), vol.Range(min=10, max=120)
),
},
vol.Optional(ATTR_SYSTEM, default=dict): {
vol.Coerce(str): SCHEMA_ADDON_SYSTEM,
extra=vol.REMOVE_EXTRA,
)
# pylint: disable=no-value-for-parameter
SCHEMA_BUILD_CONFIG = vol.Schema(
{
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema(
{vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD)}
),
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
vol.Optional(ATTR_ARGS, default=dict): vol.Schema(
{vol.Coerce(str): vol.Coerce(str)}
),
},
extra=vol.REMOVE_EXTRA,
)
# pylint: disable=no-value-for-parameter
SCHEMA_ADDON_USER = vol.Schema(
{
vol.Required(ATTR_VERSION): vol.Coerce(str),
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
vol.Optional(ATTR_ACCESS_TOKEN): TOKEN,
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): vol.Coerce(
str
),
vol.Optional(ATTR_OPTIONS, default=dict): dict,
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
},
extra=vol.REMOVE_EXTRA,
)
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend(
{
vol.Required(ATTR_LOCATON): vol.Coerce(str),
vol.Required(ATTR_REPOSITORY): vol.Coerce(str),
}
})
)
SCHEMA_ADDON_SNAPSHOT = vol.Schema({
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
vol.Required(ATTR_VERSION): vol.Coerce(str),
}, extra=vol.REMOVE_EXTRA)
SCHEMA_ADDONS_FILE = vol.Schema(
{
vol.Optional(ATTR_USER, default=dict): {vol.Coerce(str): SCHEMA_ADDON_USER},
vol.Optional(ATTR_SYSTEM, default=dict): {vol.Coerce(str): SCHEMA_ADDON_SYSTEM},
}
)
SCHEMA_ADDON_SNAPSHOT = vol.Schema(
{
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
vol.Required(ATTR_VERSION): vol.Coerce(str),
},
extra=vol.REMOVE_EXTRA,
)
def validate_options(raw_schema):
"""Validate schema."""
def validate(struct):
"""Create schema validator for add-ons options."""
options = {}
@@ -292,7 +333,7 @@ def validate_options(raw_schema):
except (IndexError, KeyError):
raise vol.Invalid(f"Type error for {key}") from None
_check_missing_options(raw_schema, options, 'root')
_check_missing_options(raw_schema, options, "root")
return options
return validate
@@ -311,7 +352,7 @@ def _single_validate(typ, value, key):
# prepare range
range_args = {}
for group_name in ('i_min', 'i_max', 'f_min', 'f_max'):
for group_name in ("i_min", "i_max", "f_min", "f_max"):
group_value = match.group(group_name)
if group_value:
range_args[group_name[2:]] = float(group_value)
@@ -331,7 +372,7 @@ def _single_validate(typ, value, key):
elif typ.startswith(V_PORT):
return NETWORK_PORT(value)
elif typ.startswith(V_MATCH):
return vol.Match(match.group('match'))(str(value))
return vol.Match(match.group("match"))(str(value))
raise vol.Invalid(f"Fatal error for {key} type {typ}")
@@ -363,8 +404,7 @@ def _nested_validate_dict(typ, data_dict, key):
# Nested?
if isinstance(typ[c_key], list):
options[c_key] = _nested_validate_list(typ[c_key][0],
c_value, c_key)
options[c_key] = _nested_validate_list(typ[c_key][0], c_value, c_key)
else:
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
@@ -376,7 +416,6 @@ def _check_missing_options(origin, exists, root):
"""Check if all options are exists."""
missing = set(origin) - set(exists)
for miss_opt in missing:
if isinstance(origin[miss_opt], str) and \
origin[miss_opt].endswith("?"):
if isinstance(origin[miss_opt], str) and origin[miss_opt].endswith("?"):
continue
raise vol.Invalid(f"Missing option {miss_opt} in {root}")

View File

@@ -9,6 +9,7 @@ from ..coresys import CoreSys, CoreSysAttributes
from .addons import APIAddons
from .auth import APIAuth
from .discovery import APIDiscovery
from .dns import APICoreDNS
from .hardware import APIHardware
from .hassos import APIHassOS
from .homeassistant import APIHomeAssistant
@@ -32,7 +33,8 @@ class RestAPI(CoreSysAttributes):
self.coresys: CoreSys = coresys
self.security: SecurityMiddleware = SecurityMiddleware(coresys)
self.webapp: web.Application = web.Application(
middlewares=[self.security.token_validation])
middlewares=[self.security.token_validation]
)
# service stuff
self._runner: web.AppRunner = web.AppRunner(self.webapp)
@@ -54,211 +56,247 @@ class RestAPI(CoreSysAttributes):
self._register_services()
self._register_info()
self._register_auth()
self._register_dns()
def _register_host(self) -> None:
"""Register hostcontrol functions."""
api_host = APIHost()
api_host.coresys = self.coresys
self.webapp.add_routes([
web.get('/host/info', api_host.info),
web.post('/host/reboot', api_host.reboot),
web.post('/host/shutdown', api_host.shutdown),
web.post('/host/reload', api_host.reload),
web.post('/host/options', api_host.options),
web.get('/host/services', api_host.services),
web.post('/host/services/{service}/stop', api_host.service_stop),
web.post('/host/services/{service}/start', api_host.service_start),
web.post('/host/services/{service}/restart',
api_host.service_restart),
web.post('/host/services/{service}/reload',
api_host.service_reload),
])
self.webapp.add_routes(
[
web.get("/host/info", api_host.info),
web.post("/host/reboot", api_host.reboot),
web.post("/host/shutdown", api_host.shutdown),
web.post("/host/reload", api_host.reload),
web.post("/host/options", api_host.options),
web.get("/host/services", api_host.services),
web.post("/host/services/{service}/stop", api_host.service_stop),
web.post("/host/services/{service}/start", api_host.service_start),
web.post("/host/services/{service}/restart", api_host.service_restart),
web.post("/host/services/{service}/reload", api_host.service_reload),
]
)
def _register_hassos(self) -> None:
"""Register HassOS functions."""
api_hassos = APIHassOS()
api_hassos.coresys = self.coresys
self.webapp.add_routes([
web.get('/hassos/info', api_hassos.info),
web.post('/hassos/update', api_hassos.update),
web.post('/hassos/update/cli', api_hassos.update_cli),
web.post('/hassos/config/sync', api_hassos.config_sync),
])
self.webapp.add_routes(
[
web.get("/hassos/info", api_hassos.info),
web.post("/hassos/update", api_hassos.update),
web.post("/hassos/update/cli", api_hassos.update_cli),
web.post("/hassos/config/sync", api_hassos.config_sync),
]
)
def _register_hardware(self) -> None:
"""Register hardware functions."""
api_hardware = APIHardware()
api_hardware.coresys = self.coresys
self.webapp.add_routes([
web.get('/hardware/info', api_hardware.info),
web.get('/hardware/audio', api_hardware.audio),
])
self.webapp.add_routes(
[
web.get("/hardware/info", api_hardware.info),
web.get("/hardware/audio", api_hardware.audio),
]
)
def _register_info(self) -> None:
"""Register info functions."""
api_info = APIInfo()
api_info.coresys = self.coresys
self.webapp.add_routes([
web.get('/info', api_info.info),
])
self.webapp.add_routes([web.get("/info", api_info.info)])
def _register_auth(self) -> None:
"""Register auth functions."""
api_auth = APIAuth()
api_auth.coresys = self.coresys
self.webapp.add_routes([
web.post('/auth', api_auth.auth),
])
self.webapp.add_routes([web.post("/auth", api_auth.auth)])
def _register_supervisor(self) -> None:
"""Register Supervisor functions."""
api_supervisor = APISupervisor()
api_supervisor.coresys = self.coresys
self.webapp.add_routes([
web.get('/supervisor/ping', api_supervisor.ping),
web.get('/supervisor/info', api_supervisor.info),
web.get('/supervisor/stats', api_supervisor.stats),
web.get('/supervisor/logs', api_supervisor.logs),
web.post('/supervisor/update', api_supervisor.update),
web.post('/supervisor/reload', api_supervisor.reload),
web.post('/supervisor/options', api_supervisor.options),
])
self.webapp.add_routes(
[
web.get("/supervisor/ping", api_supervisor.ping),
web.get("/supervisor/info", api_supervisor.info),
web.get("/supervisor/stats", api_supervisor.stats),
web.get("/supervisor/logs", api_supervisor.logs),
web.post("/supervisor/update", api_supervisor.update),
web.post("/supervisor/reload", api_supervisor.reload),
web.post("/supervisor/options", api_supervisor.options),
web.post("/supervisor/repair", api_supervisor.repair),
]
)
def _register_homeassistant(self) -> None:
"""Register Home Assistant functions."""
api_hass = APIHomeAssistant()
api_hass.coresys = self.coresys
self.webapp.add_routes([
web.get('/homeassistant/info', api_hass.info),
web.get('/homeassistant/logs', api_hass.logs),
web.get('/homeassistant/stats', api_hass.stats),
web.post('/homeassistant/options', api_hass.options),
web.post('/homeassistant/update', api_hass.update),
web.post('/homeassistant/restart', api_hass.restart),
web.post('/homeassistant/stop', api_hass.stop),
web.post('/homeassistant/start', api_hass.start),
web.post('/homeassistant/check', api_hass.check),
web.post('/homeassistant/rebuild', api_hass.rebuild),
])
self.webapp.add_routes(
[
web.get("/homeassistant/info", api_hass.info),
web.get("/homeassistant/logs", api_hass.logs),
web.get("/homeassistant/stats", api_hass.stats),
web.post("/homeassistant/options", api_hass.options),
web.post("/homeassistant/update", api_hass.update),
web.post("/homeassistant/restart", api_hass.restart),
web.post("/homeassistant/stop", api_hass.stop),
web.post("/homeassistant/start", api_hass.start),
web.post("/homeassistant/check", api_hass.check),
web.post("/homeassistant/rebuild", api_hass.rebuild),
]
)
def _register_proxy(self) -> None:
"""Register Home Assistant API Proxy."""
api_proxy = APIProxy()
api_proxy.coresys = self.coresys
self.webapp.add_routes([
web.get('/homeassistant/api/websocket', api_proxy.websocket),
web.get('/homeassistant/websocket', api_proxy.websocket),
web.get('/homeassistant/api/stream', api_proxy.stream),
web.post('/homeassistant/api/{path:.+}', api_proxy.api),
web.get('/homeassistant/api/{path:.+}', api_proxy.api),
web.get('/homeassistant/api/', api_proxy.api),
])
self.webapp.add_routes(
[
web.get("/homeassistant/api/websocket", api_proxy.websocket),
web.get("/homeassistant/websocket", api_proxy.websocket),
web.get("/homeassistant/api/stream", api_proxy.stream),
web.post("/homeassistant/api/{path:.+}", api_proxy.api),
web.get("/homeassistant/api/{path:.+}", api_proxy.api),
web.get("/homeassistant/api/", api_proxy.api),
]
)
def _register_addons(self) -> None:
"""Register Add-on functions."""
api_addons = APIAddons()
api_addons.coresys = self.coresys
self.webapp.add_routes([
web.get('/addons', api_addons.list),
web.post('/addons/reload', api_addons.reload),
web.get('/addons/{addon}/info', api_addons.info),
web.post('/addons/{addon}/install', api_addons.install),
web.post('/addons/{addon}/uninstall', api_addons.uninstall),
web.post('/addons/{addon}/start', api_addons.start),
web.post('/addons/{addon}/stop', api_addons.stop),
web.post('/addons/{addon}/restart', api_addons.restart),
web.post('/addons/{addon}/update', api_addons.update),
web.post('/addons/{addon}/options', api_addons.options),
web.post('/addons/{addon}/rebuild', api_addons.rebuild),
web.get('/addons/{addon}/logs', api_addons.logs),
web.get('/addons/{addon}/icon', api_addons.icon),
web.get('/addons/{addon}/logo', api_addons.logo),
web.get('/addons/{addon}/changelog', api_addons.changelog),
web.post('/addons/{addon}/stdin', api_addons.stdin),
web.post('/addons/{addon}/security', api_addons.security),
web.get('/addons/{addon}/stats', api_addons.stats),
])
self.webapp.add_routes(
[
web.get("/addons", api_addons.list),
web.post("/addons/reload", api_addons.reload),
web.get("/addons/{addon}/info", api_addons.info),
web.post("/addons/{addon}/install", api_addons.install),
web.post("/addons/{addon}/uninstall", api_addons.uninstall),
web.post("/addons/{addon}/start", api_addons.start),
web.post("/addons/{addon}/stop", api_addons.stop),
web.post("/addons/{addon}/restart", api_addons.restart),
web.post("/addons/{addon}/update", api_addons.update),
web.post("/addons/{addon}/options", api_addons.options),
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
web.get("/addons/{addon}/logs", api_addons.logs),
web.get("/addons/{addon}/icon", api_addons.icon),
web.get("/addons/{addon}/logo", api_addons.logo),
web.get("/addons/{addon}/changelog", api_addons.changelog),
web.post("/addons/{addon}/stdin", api_addons.stdin),
web.post("/addons/{addon}/security", api_addons.security),
web.get("/addons/{addon}/stats", api_addons.stats),
]
)
def _register_ingress(self) -> None:
"""Register Ingress functions."""
api_ingress = APIIngress()
api_ingress.coresys = self.coresys
self.webapp.add_routes([
web.post('/ingress/session', api_ingress.create_session),
web.get('/ingress/panels', api_ingress.panels),
web.view('/ingress/{token}/{path:.*}', api_ingress.handler),
])
self.webapp.add_routes(
[
web.post("/ingress/session", api_ingress.create_session),
web.get("/ingress/panels", api_ingress.panels),
web.view("/ingress/{token}/{path:.*}", api_ingress.handler),
]
)
def _register_snapshots(self) -> None:
"""Register snapshots functions."""
api_snapshots = APISnapshots()
api_snapshots.coresys = self.coresys
self.webapp.add_routes([
web.get('/snapshots', api_snapshots.list),
web.post('/snapshots/reload', api_snapshots.reload),
web.post('/snapshots/new/full', api_snapshots.snapshot_full),
web.post('/snapshots/new/partial', api_snapshots.snapshot_partial),
web.post('/snapshots/new/upload', api_snapshots.upload),
web.get('/snapshots/{snapshot}/info', api_snapshots.info),
web.post('/snapshots/{snapshot}/remove', api_snapshots.remove),
web.post('/snapshots/{snapshot}/restore/full',
api_snapshots.restore_full),
web.post('/snapshots/{snapshot}/restore/partial',
api_snapshots.restore_partial),
web.get('/snapshots/{snapshot}/download', api_snapshots.download),
])
self.webapp.add_routes(
[
web.get("/snapshots", api_snapshots.list),
web.post("/snapshots/reload", api_snapshots.reload),
web.post("/snapshots/new/full", api_snapshots.snapshot_full),
web.post("/snapshots/new/partial", api_snapshots.snapshot_partial),
web.post("/snapshots/new/upload", api_snapshots.upload),
web.get("/snapshots/{snapshot}/info", api_snapshots.info),
web.post("/snapshots/{snapshot}/remove", api_snapshots.remove),
web.post(
"/snapshots/{snapshot}/restore/full", api_snapshots.restore_full
),
web.post(
"/snapshots/{snapshot}/restore/partial",
api_snapshots.restore_partial,
),
web.get("/snapshots/{snapshot}/download", api_snapshots.download),
]
)
def _register_services(self) -> None:
"""Register services functions."""
api_services = APIServices()
api_services.coresys = self.coresys
self.webapp.add_routes([
web.get('/services', api_services.list),
web.get('/services/{service}', api_services.get_service),
web.post('/services/{service}', api_services.set_service),
web.delete('/services/{service}', api_services.del_service),
])
self.webapp.add_routes(
[
web.get("/services", api_services.list),
web.get("/services/{service}", api_services.get_service),
web.post("/services/{service}", api_services.set_service),
web.delete("/services/{service}", api_services.del_service),
]
)
def _register_discovery(self) -> None:
"""Register discovery functions."""
api_discovery = APIDiscovery()
api_discovery.coresys = self.coresys
self.webapp.add_routes([
web.get('/discovery', api_discovery.list),
web.get('/discovery/{uuid}', api_discovery.get_discovery),
web.delete('/discovery/{uuid}', api_discovery.del_discovery),
web.post('/discovery', api_discovery.set_discovery),
])
self.webapp.add_routes(
[
web.get("/discovery", api_discovery.list),
web.get("/discovery/{uuid}", api_discovery.get_discovery),
web.delete("/discovery/{uuid}", api_discovery.del_discovery),
web.post("/discovery", api_discovery.set_discovery),
]
)
def _register_dns(self) -> None:
"""Register DNS functions."""
api_dns = APICoreDNS()
api_dns.coresys = self.coresys
self.webapp.add_routes(
[
web.get("/dns/info", api_dns.info),
web.get("/dns/stats", api_dns.stats),
web.get("/dns/logs", api_dns.logs),
web.post("/dns/update", api_dns.update),
web.post("/dns/options", api_dns.options),
]
)
def _register_panel(self) -> None:
"""Register panel for Home Assistant."""
panel_dir = Path(__file__).parent.joinpath("panel")
self.webapp.add_routes([web.static('/app', panel_dir)])
self.webapp.add_routes([web.static("/app", panel_dir)])
async def start(self) -> None:
"""Run RESTful API webserver."""
await self._runner.setup()
self._site = web.TCPSite(
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5)
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5
)
try:
await self._site.start()
except OSError as err:
_LOGGER.fatal("Failed to create HTTP server at 0.0.0.0:80 -> %s",
err)
_LOGGER.fatal("Failed to create HTTP server at 0.0.0.0:80 -> %s", err)
else:
_LOGGER.info("Start API on %s", self.sys_docker.network.supervisor)

View File

@@ -8,6 +8,7 @@ import voluptuous as vol
from voluptuous.humanize import humanize_error
from ..addons import AnyAddon
from ..docker.stats import DockerStats
from ..addons.utils import rating_security
from ..const import (
ATTR_ADDONS,
@@ -30,6 +31,7 @@ from ..const import (
ATTR_DEVICES,
ATTR_DEVICETREE,
ATTR_DISCOVERY,
ATTR_DNS,
ATTR_DOCKER_API,
ATTR_FULL_ACCESS,
ATTR_GPIO,
@@ -41,6 +43,7 @@ from ..const import (
ATTR_HOST_IPC,
ATTR_HOST_NETWORK,
ATTR_HOST_PID,
ATTR_HOSTNAME,
ATTR_ICON,
ATTR_INGRESS,
ATTR_INGRESS_ENTRY,
@@ -56,6 +59,7 @@ from ..const import (
ATTR_MACHINE,
ATTR_MAINTAINER,
ATTR_MEMORY_LIMIT,
ATTR_MEMORY_PERCENT,
ATTR_MEMORY_USAGE,
ATTR_NAME,
ATTR_NETWORK,
@@ -73,6 +77,7 @@ from ..const import (
ATTR_SOURCE,
ATTR_STATE,
ATTR_STDIN,
ATTR_UDEV,
ATTR_URL,
ATTR_VERSION,
ATTR_WEBUI,
@@ -91,35 +96,35 @@ from .utils import api_process, api_process_raw, api_validate
_LOGGER = logging.getLogger(__name__)
SCHEMA_VERSION = vol.Schema({
vol.Optional(ATTR_VERSION): vol.Coerce(str),
})
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
# pylint: disable=no-value-for-parameter
SCHEMA_OPTIONS = vol.Schema({
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS),
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
vol.Optional(ATTR_INGRESS_PANEL): vol.Boolean(),
})
SCHEMA_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS),
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
vol.Optional(ATTR_INGRESS_PANEL): vol.Boolean(),
}
)
# pylint: disable=no-value-for-parameter
SCHEMA_SECURITY = vol.Schema({
vol.Optional(ATTR_PROTECTED): vol.Boolean(),
})
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
class APIAddons(CoreSysAttributes):
"""Handle RESTful API for add-on functions."""
def _extract_addon(self, request: web.Request, check_installed: bool = True) -> AnyAddon:
def _extract_addon(
self, request: web.Request, check_installed: bool = True
) -> AnyAddon:
"""Return addon, throw an exception it it doesn't exist."""
addon_slug = request.match_info.get('addon')
addon_slug: str = request.match_info.get("addon")
# Lookup itself
if addon_slug == 'self':
if addon_slug == "self":
return request.get(REQUEST_FROM)
addon = self.sys_addons.get(addon_slug)
@@ -136,35 +141,36 @@ class APIAddons(CoreSysAttributes):
"""Return all add-ons or repositories."""
data_addons = []
for addon in self.sys_addons.all:
data_addons.append({
ATTR_NAME: addon.name,
ATTR_SLUG: addon.slug,
ATTR_DESCRIPTON: addon.description,
ATTR_VERSION: addon.latest_version,
ATTR_INSTALLED: addon.version if addon.is_installed else None,
ATTR_AVAILABLE: addon.available,
ATTR_DETACHED: addon.is_detached,
ATTR_REPOSITORY: addon.repository,
ATTR_BUILD: addon.need_build,
ATTR_URL: addon.url,
ATTR_ICON: addon.with_icon,
ATTR_LOGO: addon.with_logo,
})
data_addons.append(
{
ATTR_NAME: addon.name,
ATTR_SLUG: addon.slug,
ATTR_DESCRIPTON: addon.description,
ATTR_VERSION: addon.latest_version,
ATTR_INSTALLED: addon.version if addon.is_installed else None,
ATTR_AVAILABLE: addon.available,
ATTR_DETACHED: addon.is_detached,
ATTR_REPOSITORY: addon.repository,
ATTR_BUILD: addon.need_build,
ATTR_URL: addon.url,
ATTR_ICON: addon.with_icon,
ATTR_LOGO: addon.with_logo,
}
)
data_repositories = []
for repository in self.sys_store.all:
data_repositories.append({
ATTR_SLUG: repository.slug,
ATTR_NAME: repository.name,
ATTR_SOURCE: repository.source,
ATTR_URL: repository.url,
ATTR_MAINTAINER: repository.maintainer,
})
data_repositories.append(
{
ATTR_SLUG: repository.slug,
ATTR_NAME: repository.name,
ATTR_SOURCE: repository.source,
ATTR_URL: repository.url,
ATTR_MAINTAINER: repository.maintainer,
}
)
return {
ATTR_ADDONS: data_addons,
ATTR_REPOSITORIES: data_repositories,
}
return {ATTR_ADDONS: data_addons, ATTR_REPOSITORIES: data_repositories}
@api_process
async def reload(self, request: web.Request) -> None:
@@ -174,11 +180,13 @@ class APIAddons(CoreSysAttributes):
@api_process
async def info(self, request: web.Request) -> Dict[str, Any]:
"""Return add-on information."""
addon = self._extract_addon(request, check_installed=False)
addon: AnyAddon = self._extract_addon(request, check_installed=False)
data = {
ATTR_NAME: addon.name,
ATTR_SLUG: addon.slug,
ATTR_HOSTNAME: addon.hostname,
ATTR_DNS: addon.dns,
ATTR_DESCRIPTON: addon.description,
ATTR_LONG_DESCRIPTION: addon.long_description,
ATTR_AUTO_UPDATE: None,
@@ -219,6 +227,7 @@ class APIAddons(CoreSysAttributes):
ATTR_GPIO: addon.with_gpio,
ATTR_KERNEL_MODULES: addon.with_kernel_modules,
ATTR_DEVICETREE: addon.with_devicetree,
ATTR_UDEV: addon.with_udev,
ATTR_DOCKER_API: addon.access_docker_api,
ATTR_AUDIO: addon.with_audio,
ATTR_AUDIO_INPUT: None,
@@ -234,31 +243,33 @@ class APIAddons(CoreSysAttributes):
}
if addon.is_installed:
data.update({
ATTR_STATE: await addon.state(),
ATTR_WEBUI: addon.webui,
ATTR_INGRESS_ENTRY: addon.ingress_entry,
ATTR_INGRESS_URL: addon.ingress_url,
ATTR_INGRESS_PORT: addon.ingress_port,
ATTR_INGRESS_PANEL: addon.ingress_panel,
ATTR_AUDIO_INPUT: addon.audio_input,
ATTR_AUDIO_OUTPUT: addon.audio_output,
ATTR_AUTO_UPDATE: addon.auto_update,
ATTR_IP_ADDRESS: str(addon.ip_address),
ATTR_VERSION: addon.version,
})
data.update(
{
ATTR_STATE: await addon.state(),
ATTR_WEBUI: addon.webui,
ATTR_INGRESS_ENTRY: addon.ingress_entry,
ATTR_INGRESS_URL: addon.ingress_url,
ATTR_INGRESS_PORT: addon.ingress_port,
ATTR_INGRESS_PANEL: addon.ingress_panel,
ATTR_AUDIO_INPUT: addon.audio_input,
ATTR_AUDIO_OUTPUT: addon.audio_output,
ATTR_AUTO_UPDATE: addon.auto_update,
ATTR_IP_ADDRESS: str(addon.ip_address),
ATTR_VERSION: addon.version,
}
)
return data
@api_process
async def options(self, request: web.Request) -> None:
"""Store user options for add-on."""
addon = self._extract_addon(request)
addon: AnyAddon = self._extract_addon(request)
addon_schema = SCHEMA_OPTIONS.extend({
vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema),
})
body = await api_validate(addon_schema, request)
addon_schema = SCHEMA_OPTIONS.extend(
{vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema)}
)
body: Dict[str, Any] = await api_validate(addon_schema, request)
if ATTR_OPTIONS in body:
addon.options = body[ATTR_OPTIONS]
@@ -281,25 +292,26 @@ class APIAddons(CoreSysAttributes):
@api_process
async def security(self, request: web.Request) -> None:
"""Store security options for add-on."""
addon = self._extract_addon(request)
body = await api_validate(SCHEMA_SECURITY, request)
addon: AnyAddon = self._extract_addon(request)
body: Dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
if ATTR_PROTECTED in body:
_LOGGER.warning("Protected flag changing for %s!", addon.slug)
addon.protected = body[ATTR_PROTECTED]
addon.save_data()
addon.save_persist()
@api_process
async def stats(self, request: web.Request) -> Dict[str, Any]:
"""Return resource information."""
addon = self._extract_addon(request)
stats = await addon.stats()
addon: AnyAddon = self._extract_addon(request)
stats: DockerStats = await addon.stats()
return {
ATTR_CPU_PERCENT: stats.cpu_percent,
ATTR_MEMORY_USAGE: stats.memory_usage,
ATTR_MEMORY_LIMIT: stats.memory_limit,
ATTR_MEMORY_PERCENT: stats.memory_percent,
ATTR_NETWORK_RX: stats.network_rx,
ATTR_NETWORK_TX: stats.network_tx,
ATTR_BLK_READ: stats.blk_read,
@@ -309,19 +321,19 @@ class APIAddons(CoreSysAttributes):
@api_process
def install(self, request: web.Request) -> Awaitable[None]:
"""Install add-on."""
addon = self._extract_addon(request, check_installed=False)
addon: AnyAddon = self._extract_addon(request, check_installed=False)
return asyncio.shield(addon.install())
@api_process
def uninstall(self, request: web.Request) -> Awaitable[None]:
"""Uninstall add-on."""
addon = self._extract_addon(request)
addon: AnyAddon = self._extract_addon(request)
return asyncio.shield(addon.uninstall())
@api_process
def start(self, request: web.Request) -> Awaitable[None]:
"""Start add-on."""
addon = self._extract_addon(request)
addon: AnyAddon = self._extract_addon(request)
# check options
options = addon.options
@@ -335,13 +347,13 @@ class APIAddons(CoreSysAttributes):
@api_process
def stop(self, request: web.Request) -> Awaitable[None]:
"""Stop add-on."""
addon = self._extract_addon(request)
addon: AnyAddon = self._extract_addon(request)
return asyncio.shield(addon.stop())
@api_process
def update(self, request: web.Request) -> Awaitable[None]:
"""Update add-on."""
addon = self._extract_addon(request)
addon: AnyAddon = self._extract_addon(request)
if addon.latest_version == addon.version:
raise APIError("No update available!")
@@ -351,13 +363,13 @@ class APIAddons(CoreSysAttributes):
@api_process
def restart(self, request: web.Request) -> Awaitable[None]:
"""Restart add-on."""
addon = self._extract_addon(request)
addon: AnyAddon = self._extract_addon(request)
return asyncio.shield(addon.restart())
@api_process
def rebuild(self, request: web.Request) -> Awaitable[None]:
"""Rebuild local build add-on."""
addon = self._extract_addon(request)
addon: AnyAddon = self._extract_addon(request)
if not addon.need_build:
raise APIError("Only local build addons are supported")
@@ -366,43 +378,43 @@ class APIAddons(CoreSysAttributes):
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return logs from add-on."""
addon = self._extract_addon(request)
addon: AnyAddon = self._extract_addon(request)
return addon.logs()
@api_process_raw(CONTENT_TYPE_PNG)
async def icon(self, request: web.Request) -> bytes:
"""Return icon from add-on."""
addon = self._extract_addon(request, check_installed=False)
addon: AnyAddon = self._extract_addon(request, check_installed=False)
if not addon.with_icon:
raise APIError("No icon found!")
with addon.path_icon.open('rb') as png:
with addon.path_icon.open("rb") as png:
return png.read()
@api_process_raw(CONTENT_TYPE_PNG)
async def logo(self, request: web.Request) -> bytes:
"""Return logo from add-on."""
addon = self._extract_addon(request, check_installed=False)
addon: AnyAddon = self._extract_addon(request, check_installed=False)
if not addon.with_logo:
raise APIError("No logo found!")
with addon.path_logo.open('rb') as png:
with addon.path_logo.open("rb") as png:
return png.read()
@api_process_raw(CONTENT_TYPE_TEXT)
async def changelog(self, request: web.Request) -> str:
"""Return changelog from add-on."""
addon = self._extract_addon(request, check_installed=False)
addon: AnyAddon = self._extract_addon(request, check_installed=False)
if not addon.with_changelog:
raise APIError("No changelog found!")
with addon.path_changelog.open('r') as changelog:
with addon.path_changelog.open("r") as changelog:
return changelog.read()
@api_process
async def stdin(self, request: web.Request) -> None:
"""Write to stdin of add-on."""
addon = self._extract_addon(request)
addon: AnyAddon = self._extract_addon(request)
if not addon.with_stdin:
raise APIError("STDIN not supported by add-on")
@@ -415,7 +427,7 @@ def _pretty_devices(addon: AnyAddon) -> List[str]:
dev_list = addon.devices
if not dev_list:
return None
return [row.split(':')[0] for row in dev_list]
return [row.split(":")[0] for row in dev_list]
def _pretty_services(addon: AnyAddon) -> List[str]:

View File

@@ -29,8 +29,8 @@ class APIAuth(CoreSysAttributes):
Return a coroutine.
"""
username = data.get('username') or data.get('user')
password = data.get('password')
username = data.get("username") or data.get("user")
password = data.get("password")
return self.sys_auth.check_login(addon, username, password)
@@ -56,6 +56,6 @@ class APIAuth(CoreSysAttributes):
data = await request.post()
return await self._process_dict(request, addon, data)
raise HTTPUnauthorized(headers={
WWW_AUTHENTICATE: "Basic realm=\"Hass.io Authentication\""
})
raise HTTPUnauthorized(
headers={WWW_AUTHENTICATE: 'Basic realm="Hass.io Authentication"'}
)

89
hassio/api/dns.py Normal file
View File

@@ -0,0 +1,89 @@
"""Init file for Hass.io DNS RESTful API."""
import asyncio
import logging
from typing import Any, Awaitable, Dict
from aiohttp import web
import voluptuous as vol
from ..const import (
ATTR_BLK_READ,
ATTR_BLK_WRITE,
ATTR_CPU_PERCENT,
ATTR_HOST,
ATTR_LATEST_VERSION,
ATTR_MEMORY_LIMIT,
ATTR_MEMORY_USAGE,
ATTR_MEMORY_PERCENT,
ATTR_NETWORK_RX,
ATTR_NETWORK_TX,
ATTR_SERVERS,
ATTR_VERSION,
CONTENT_TYPE_BINARY,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..validate import DNS_SERVER_LIST
from .utils import api_process, api_process_raw, api_validate
_LOGGER = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_SERVERS): DNS_SERVER_LIST})
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
class APICoreDNS(CoreSysAttributes):
"""Handle RESTful API for DNS functions."""
@api_process
async def info(self, request: web.Request) -> Dict[str, Any]:
"""Return DNS information."""
return {
ATTR_VERSION: self.sys_dns.version,
ATTR_LATEST_VERSION: self.sys_dns.latest_version,
ATTR_HOST: str(self.sys_docker.network.dns),
ATTR_SERVERS: self.sys_dns.servers,
}
@api_process
async def options(self, request: web.Request) -> None:
"""Set DNS options."""
body = await api_validate(SCHEMA_OPTIONS, request)
if ATTR_SERVERS in body:
self.sys_dns.servers = body[ATTR_SERVERS]
self.sys_dns.save_data()
@api_process
async def stats(self, request: web.Request) -> Dict[str, Any]:
"""Return resource information."""
stats = await self.sys_dns.stats()
return {
ATTR_CPU_PERCENT: stats.cpu_percent,
ATTR_MEMORY_USAGE: stats.memory_usage,
ATTR_MEMORY_LIMIT: stats.memory_limit,
ATTR_MEMORY_PERCENT: stats.memory_percent,
ATTR_NETWORK_RX: stats.network_rx,
ATTR_NETWORK_TX: stats.network_tx,
ATTR_BLK_READ: stats.blk_read,
ATTR_BLK_WRITE: stats.blk_write,
}
@api_process
async def update(self, request: web.Request) -> None:
"""Update DNS plugin."""
body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self.sys_dns.latest_version)
if version == self.sys_dns.version:
raise APIError("Version {} is already in use".format(version))
await asyncio.shield(self.sys_dns.update(version))
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request: web.Request) -> Awaitable[bytes]:
"""Return DNS Docker logs."""
return self.sys_dns.logs()

View File

@@ -3,7 +3,13 @@ import logging
from .utils import api_process
from ..const import (
ATTR_SERIAL, ATTR_DISK, ATTR_GPIO, ATTR_AUDIO, ATTR_INPUT, ATTR_OUTPUT)
ATTR_SERIAL,
ATTR_DISK,
ATTR_GPIO,
ATTR_AUDIO,
ATTR_INPUT,
ATTR_OUTPUT,
)
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
@@ -16,7 +22,9 @@ class APIHardware(CoreSysAttributes):
async def info(self, request):
"""Show hardware info."""
return {
ATTR_SERIAL: list(self.sys_hardware.serial_devices),
ATTR_SERIAL: list(
self.sys_hardware.serial_devices | self.sys_hardware.serial_by_id
),
ATTR_INPUT: list(self.sys_hardware.input_devices),
ATTR_DISK: list(self.sys_hardware.disk_devices),
ATTR_GPIO: list(self.sys_hardware.gpio_devices),

View File

@@ -18,6 +18,7 @@ from ..const import (
ATTR_MACHINE,
ATTR_MEMORY_LIMIT,
ATTR_MEMORY_USAGE,
ATTR_MEMORY_PERCENT,
ATTR_NETWORK_RX,
ATTR_NETWORK_TX,
ATTR_PASSWORD,
@@ -121,6 +122,7 @@ class APIHomeAssistant(CoreSysAttributes):
ATTR_CPU_PERCENT: stats.cpu_percent,
ATTR_MEMORY_USAGE: stats.memory_usage,
ATTR_MEMORY_LIMIT: stats.memory_limit,
ATTR_MEMORY_PERCENT: stats.memory_percent,
ATTR_NETWORK_RX: stats.network_rx,
ATTR_NETWORK_TX: stats.network_tx,
ATTR_BLK_READ: stats.blk_read,

View File

@@ -6,18 +6,25 @@ import voluptuous as vol
from .utils import api_process, api_validate
from ..const import (
ATTR_HOSTNAME, ATTR_FEATURES, ATTR_KERNEL, ATTR_OPERATING_SYSTEM,
ATTR_CHASSIS, ATTR_DEPLOYMENT, ATTR_STATE, ATTR_NAME, ATTR_DESCRIPTON,
ATTR_SERVICES, ATTR_CPE)
ATTR_HOSTNAME,
ATTR_FEATURES,
ATTR_KERNEL,
ATTR_OPERATING_SYSTEM,
ATTR_CHASSIS,
ATTR_DEPLOYMENT,
ATTR_STATE,
ATTR_NAME,
ATTR_DESCRIPTON,
ATTR_SERVICES,
ATTR_CPE,
)
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
SERVICE = 'service'
SERVICE = "service"
SCHEMA_OPTIONS = vol.Schema({
vol.Optional(ATTR_HOSTNAME): vol.Coerce(str),
})
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): vol.Coerce(str)})
class APIHost(CoreSysAttributes):
@@ -44,7 +51,8 @@ class APIHost(CoreSysAttributes):
# hostname
if ATTR_HOSTNAME in body:
await asyncio.shield(
self.sys_host.control.set_hostname(body[ATTR_HOSTNAME]))
self.sys_host.control.set_hostname(body[ATTR_HOSTNAME])
)
@api_process
def reboot(self, request):
@@ -66,15 +74,15 @@ class APIHost(CoreSysAttributes):
"""Return list of available services."""
services = []
for unit in self.sys_host.services:
services.append({
ATTR_NAME: unit.name,
ATTR_DESCRIPTON: unit.description,
ATTR_STATE: unit.state,
})
services.append(
{
ATTR_NAME: unit.name,
ATTR_DESCRIPTON: unit.description,
ATTR_STATE: unit.state,
}
)
return {
ATTR_SERVICES: services
}
return {ATTR_SERVICES: services}
@api_process
def service_start(self, request):

View File

@@ -14,6 +14,7 @@ from ..const import (
ATTR_MACHINE,
ATTR_SUPERVISOR,
ATTR_SUPPORTED_ARCH,
ATTR_TIMEZONE,
)
from ..coresys import CoreSysAttributes
from .utils import api_process
@@ -37,4 +38,5 @@ class APIInfo(CoreSysAttributes):
ATTR_SUPPORTED_ARCH: self.sys_arch.supported,
ATTR_CHANNEL: self.sys_updater.channel,
ATTR_LOGGING: self.sys_config.logging,
ATTR_TIMEZONE: self.sys_timezone,
}

View File

@@ -158,7 +158,12 @@ class APIIngress(CoreSysAttributes):
source_header = _init_header(request, addon)
async with self.sys_websession.request(
request.method, url, headers=source_header, params=request.query, data=data
request.method,
url,
headers=source_header,
params=request.query,
allow_redirects=False,
data=data,
) as result:
headers = _response_header(result)

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1 +0,0 @@
(window.webpackJsonp=window.webpackJsonp||[]).push([[7],{102:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(121),i=t.n(e),o=t(123),u=t.n(o),a=i.a,c=u.a}}]);

View File

@@ -1 +0,0 @@
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.510634470d399e194ace.js","sourceRoot":""}

View File

@@ -1 +0,0 @@
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.564a2f7b1c38ddaa4ce0.js","sourceRoot":""}

File diff suppressed because one or more lines are too long

View File

@@ -8,18 +8,6 @@ Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
* @fileoverview
* @suppress {checkPrototypalTypes}
* @license Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt The complete set of authors may be found
* at http://polymer.github.io/AUTHORS.txt The complete set of contributors may
* be found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by
* Google as part of the polymer project is also subject to an additional IP
* rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@@ -0,0 +1 @@
{"version":3,"sources":["webpack:///./src/ingress-view/hassio-ingress-view.ts"],"names":["customElement","HassioIngressView","property","this","_addon","html","_templateObject2","name","ingress_url","_templateObject","changedProps","_get","_getPrototypeOf","prototype","call","has","addon","route","path","substr","oldRoute","get","oldAddon","undefined","_fetchData","_callee","addonSlug","_ref","_ref2","regeneratorRuntime","wrap","_context","prev","next","Promise","all","fetchHassioAddonInfo","hass","catch","Error","createHassioSession","sent","_slicedToArray","ingress","t0","console","error","alert","message","history","back","stop","css","_templateObject3","LitElement"],"mappings":"4gSAmBCA,YAAc,0CACTC,smBACHC,kEACAA,mEACAA,4EAED,WACE,OAAKC,KAAKC,OAMHC,YAAPC,IAC0BH,KAAKC,OAAOG,KACpBJ,KAAKC,OAAOI,aAPrBH,YAAPI,0CAYJ,SAAkBC,GAGhB,GAFAC,EAAAC,EApBEX,EAoBFY,WAAA,eAAAV,MAAAW,KAAAX,KAAmBO,GAEdA,EAAaK,IAAI,SAAtB,CAIA,IAAMC,EAAQb,KAAKc,MAAMC,KAAKC,OAAO,GAE/BC,EAAWV,EAAaW,IAAI,SAC5BC,EAAWF,EAAWA,EAASF,KAAKC,OAAO,QAAKI,EAElDP,GAASA,IAAUM,GACrBnB,KAAKqB,WAAWR,0FAIpB,SAAAS,EAAyBC,GAAzB,IAAAC,EAAAC,EAAAZ,EAAA,OAAAa,mBAAAC,KAAA,SAAAC,GAAA,cAAAA,EAAAC,KAAAD,EAAAE,MAAA,cAAAF,EAAAC,KAAA,EAAAD,EAAAE,KAAA,EAE0BC,QAAQC,IAAI,CAChCC,YAAqBjC,KAAKkC,KAAMX,GAAWY,MAAM,WAC/C,MAAM,IAAIC,MAAM,iCAElBC,YAAoBrC,KAAKkC,MAAMC,MAAM,WACnC,MAAM,IAAIC,MAAM,2CAPxB,UAAAZ,EAAAI,EAAAU,KAAAb,EAAAc,EAAAf,EAAA,IAEWX,EAFXY,EAAA,IAWee,QAXf,CAAAZ,EAAAE,KAAA,cAYY,IAAIM,MAAM,wCAZtB,OAeIpC,KAAKC,OAASY,EAflBe,EAAAE,KAAA,iBAAAF,EAAAC,KAAA,GAAAD,EAAAa,GAAAb,EAAA,SAkBIc,QAAQC,MAARf,EAAAa,IACAG,MAAMhB,EAAAa,GAAII,SAAW,mCACrBC,QAAQC,OApBZ,yBAAAnB,EAAAoB,SAAA1B,EAAAtB,KAAA,yRAwBA,WACE,OAAOiD,YAAPC,UA7D4BC","file":"chunk.5dd33a3a20657ed46a19.js","sourcesContent":["import {\n LitElement,\n customElement,\n property,\n TemplateResult,\n html,\n PropertyValues,\n CSSResult,\n css,\n} from \"lit-element\";\nimport { HomeAssistant, Route } from \"../../../src/types\";\nimport {\n createHassioSession,\n HassioAddonDetails,\n fetchHassioAddonInfo,\n} from \"../../../src/data/hassio\";\nimport \"../../../src/layouts/hass-loading-screen\";\nimport \"../../../src/layouts/hass-subpage\";\n\n@customElement(\"hassio-ingress-view\")\nclass HassioIngressView extends LitElement {\n @property() public hass!: HomeAssistant;\n @property() public route!: Route;\n @property() private _addon?: HassioAddonDetails;\n\n protected render(): TemplateResult | void {\n if (!this._addon) {\n return html`\n <hass-loading-screen></hass-loading-screen>\n `;\n }\n\n return html`\n <hass-subpage .header=${this._addon.name} hassio>\n <iframe src=${this._addon.ingress_url}></iframe>\n </hass-subpage>\n `;\n }\n\n protected updated(changedProps: PropertyValues) {\n super.firstUpdated(changedProps);\n\n if (!changedProps.has(\"route\")) {\n return;\n }\n\n const addon = this.route.path.substr(1);\n\n const oldRoute = changedProps.get(\"route\") as this[\"route\"] | undefined;\n const oldAddon = oldRoute ? oldRoute.path.substr(1) : undefined;\n\n if (addon && addon !== oldAddon) {\n this._fetchData(addon);\n }\n }\n\n private async _fetchData(addonSlug: string) {\n try {\n const [addon] = await Promise.all([\n fetchHassioAddonInfo(this.hass, addonSlug).catch(() => {\n throw new Error(\"Failed to fetch add-on info\");\n }),\n createHassioSession(this.hass).catch(() => {\n throw new Error(\"Failed to create an ingress session\");\n }),\n ]);\n\n if (!addon.ingress) {\n throw new Error(\"This add-on does not support ingress\");\n }\n\n this._addon = addon;\n } catch (err) {\n // tslint:disable-next-line\n console.error(err);\n alert(err.message || \"Unknown error starting ingress.\");\n history.back();\n }\n }\n\n static get styles(): CSSResult {\n return css`\n iframe {\n display: block;\n width: 100%;\n height: 100%;\n border: 0;\n }\n paper-icon-button {\n color: var(--text-primary-color);\n }\n `;\n }\n}\n\ndeclare global {\n interface HTMLElementTagNameMap {\n \"hassio-ingress-view\": HassioIngressView;\n }\n}\n"],"sourceRoot":""}

File diff suppressed because one or more lines are too long

View File

@@ -1 +0,0 @@
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.659084fef4e3b7b66a76.js","sourceRoot":""}

File diff suppressed because one or more lines are too long

View File

@@ -34,27 +34,6 @@ part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright 2018 Google Inc. All Rights Reserved.
@@ -133,6 +112,27 @@ and limitations under the License.
* THE SOFTWARE.
*/
/**
@license
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
* @license
* Copyright (c) 2018 The Polymer Project Authors. All rights reserved.
@@ -178,3 +178,12 @@ found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/*!
* Fuse.js v3.4.4 - Lightweight fuzzy-search (http://fusejs.io)
*
* Copyright (c) 2012-2017 Kirollos Risk (http://kiro.me)
* All Rights Reserved. Apache Software License 2.0
*
* http://www.apache.org/licenses/LICENSE-2.0
*/

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@@ -1 +0,0 @@
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.6e9c87e51920a9c354e5.js","sourceRoot":""}

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,2 @@
(window.webpackJsonp=window.webpackJsonp||[]).push([[9],{101:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(124),i=t.n(e),o=t(126),u=t.n(o),a=i.a,c=u.a}}]);
//# sourceMappingURL=chunk.7f8cce5798f837214ef8.js.map

Binary file not shown.

View File

@@ -0,0 +1 @@
{"version":3,"sources":["webpack:///../src/resources/load_markdown.js"],"names":["__webpack_require__","r","__webpack_exports__","d","marked","filterXSS","marked__WEBPACK_IMPORTED_MODULE_0__","marked__WEBPACK_IMPORTED_MODULE_0___default","n","xss__WEBPACK_IMPORTED_MODULE_1__","xss__WEBPACK_IMPORTED_MODULE_1___default","marked_","filterXSS_"],"mappings":"0FAAAA,EAAAC,EAAAC,GAAAF,EAAAG,EAAAD,EAAA,2BAAAE,IAAAJ,EAAAG,EAAAD,EAAA,8BAAAG,IAAA,IAAAC,EAAAN,EAAA,KAAAO,EAAAP,EAAAQ,EAAAF,GAAAG,EAAAT,EAAA,KAAAU,EAAAV,EAAAQ,EAAAC,GAGaL,EAASO,IACTN,EAAYO","file":"chunk.7f8cce5798f837214ef8.js","sourcesContent":["import marked_ from \"marked\";\nimport filterXSS_ from \"xss\";\n\nexport const marked = marked_;\nexport const filterXSS = filterXSS_;\n"],"sourceRoot":""}

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1 +0,0 @@
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.a7e5fb452cd1b3a5faef.js","sourceRoot":""}

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -1 +0,0 @@
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.f15d7f41c0d302cbbc7a.js","sourceRoot":""}

View File

@@ -1 +1,2 @@
!function(e){function n(n){for(var t,o,a=n[0],i=n[1],c=0,u=[];c<a.length;c++)o=a[c],r[o]&&u.push(r[o][0]),r[o]=0;for(t in i)Object.prototype.hasOwnProperty.call(i,t)&&(e[t]=i[t]);for(f&&f(n);u.length;)u.shift()()}var t={},r={4:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var a=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=a);var i,c=document.createElement("script");c.charset="utf-8",c.timeout=120,o.nc&&c.setAttribute("nonce",o.nc),c.src=function(e){return o.p+"chunk."+{0:"564a2f7b1c38ddaa4ce0",1:"659084fef4e3b7b66a76",2:"510634470d399e194ace",3:"f15d7f41c0d302cbbc7a",5:"5d31a1778f717ac8b063",6:"b60fb48c5280275dd7e2",7:"3a63ad36bccf4ea567fa",8:"a571dfa106202cc57af6",9:"a7e5fb452cd1b3a5faef",10:"b3340b3df270d20af4a1",11:"6e9c87e51920a9c354e5",12:"1b30ffdc501071af245c",13:"739b67c99ab56cdbd75d"}[e]+".js"}(e),i=function(n){c.onerror=c.onload=null,clearTimeout(f);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),a=n&&n.target&&n.target.src,i=new Error("Loading chunk "+e+" failed.\n("+o+": "+a+")");i.type=o,i.request=a,t[1](i)}r[e]=void 0}};var f=setTimeout(function(){i({type:"timeout",target:c})},12e4);c.onerror=c.onload=i,document.head.appendChild(c)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var a=window.webpackJsonp=window.webpackJsonp||[],i=a.push.bind(a);a.push=n,a=a.slice();for(var c=0;c<a.length;c++)n(a[c]);var f=i;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(1),t.e(5)]).then(t.bind(null,2)),Promise.all([t.e(1),t.e(9),t.e(6)]).then(t.bind(null,1))});var r=document.createElement("style");r.innerHTML="\nbody {\n font-family: Roboto, sans-serif;\n -moz-osx-font-smoothing: grayscale;\n -webkit-font-smoothing: antialiased;\n font-weight: 400;\n margin: 0;\n padding: 0;\n height: 100vh;\n}\n",document.head.appendChild(r)}]);
!function(e){function n(n){for(var t,o,a=n[0],i=n[1],f=0,c=[];f<a.length;f++)o=a[f],r[o]&&c.push(r[o][0]),r[o]=0;for(t in i)Object.prototype.hasOwnProperty.call(i,t)&&(e[t]=i[t]);for(u&&u(n);c.length;)c.shift()()}var t={},r={4:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var a=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=a);var i,f=document.createElement("script");f.charset="utf-8",f.timeout=120,o.nc&&f.setAttribute("nonce",o.nc),f.src=function(e){return o.p+"chunk."+{0:"7f411ffa9df152cb8f05",1:"598ae99dfd641ab3a30c",2:"af7784dbf07df8e24819",3:"b15efbd4fb2c8cac0ad4",5:"87d3a6d0178fb26762cf",6:"6f4702eafe52425373ed",7:"5dd33a3a20657ed46a19",8:"7c785f796f428abae18d",9:"7f8cce5798f837214ef8",10:"04bcaa18b59728e10be9",11:"9d7374dae6137783dda4",12:"6685a7f98b13655ab808",13:"f1156b978f6f3143a651"}[e]+".js"}(e),i=function(n){f.onerror=f.onload=null,clearTimeout(u);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),a=n&&n.target&&n.target.src,i=new Error("Loading chunk "+e+" failed.\n("+o+": "+a+")");i.type=o,i.request=a,t[1](i)}r[e]=void 0}};var u=setTimeout(function(){i({type:"timeout",target:f})},12e4);f.onerror=f.onload=i,document.head.appendChild(f)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var a=window.webpackJsonp=window.webpackJsonp||[],i=a.push.bind(a);a.push=n,a=a.slice();for(var f=0;f<a.length;f++)n(a[f]);var u=i;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(1),t.e(6)]).then(t.bind(null,2)),Promise.all([t.e(1),t.e(12),t.e(8)]).then(t.bind(null,1))});var r=document.createElement("style");r.innerHTML="\nbody {\n font-family: Roboto, sans-serif;\n -moz-osx-font-smoothing: grayscale;\n -webkit-font-smoothing: antialiased;\n font-weight: 400;\n margin: 0;\n padding: 0;\n height: 100vh;\n}\n",document.head.appendChild(r)}]);
//# sourceMappingURL=entrypoint.js.map

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More