Compare commits

...

851 Commits
0.56 ... 151

Author SHA1 Message Date
Pascal Vizeli
d054b6dbb7 Merge pull request #979 from home-assistant/dev
Release 151
2019-03-28 15:01:23 +01:00
Pascal Vizeli
3093165325 Update cryptography (#981) 2019-03-28 14:37:06 +01:00
Pascal Vizeli
fd9c5bd412 Make arch required (#978) 2019-03-28 14:23:46 +01:00
Pascal Vizeli
9a8850fecd Remove unused pylint 2019-03-28 14:13:36 +01:00
Pascal Vizeli
b12175ab9a Support for deconz discovery & cleanup (#974)
* Support for deconz discovery & cleanup

* Split discovery

* Fix lint

* Fix lint / import
2019-03-28 14:11:18 +01:00
Pascal Vizeli
b52f90187b Make homeassistant container constant (#808)
* Make homeassistant container constant

* Update homeassistant.py

* Update homeassistant.py

* Update interface.py

* Update homeassistant.py

* Fix handling

* add start function

* Add typing

* Fix lint

* Add API call

* Update logs

* Fix some issue with watchdog

* Fix lint
2019-03-27 17:20:05 +01:00
Pascal Vizeli
4eb02f474d Bump version 151 2019-03-20 22:09:42 +01:00
Pascal Vizeli
dfdcddfd0b Merge pull request #968 from home-assistant/dev
Release 150
2019-03-20 22:08:17 +01:00
Pascal Vizeli
0391277bad Fix panel for 0.90.0 (#967) 2019-03-20 22:03:31 +01:00
Pascal Vizeli
73643b9bfe Bump version 150 2019-03-19 21:29:47 +01:00
Pascal Vizeli
93a52b8382 Merge pull request #965 from home-assistant/dev
Release 149
2019-03-19 21:26:38 +01:00
Pascal Vizeli
7a91bb1f6c Update panel for 0.90.0 v6 (#963) 2019-03-19 19:01:52 +01:00
Pascal Vizeli
26efa998a1 Revert dev link (#956) 2019-03-18 09:42:31 +01:00
Pascal Vizeli
fc9f3fee0a Fix 2019-03-18 09:20:48 +01:00
cadwal
ec19bd570b Include serial device node links in container device mapping to allow for persistent names in the HA serial config (#944) 2019-03-18 09:05:04 +01:00
David McNett
3335bad9e1 Correct typo: 'ignore' -> 'ignored' (#947) 2019-03-18 09:02:29 +01:00
Pascal Vizeli
71ae334e24 Update pylint (#945) 2019-03-11 14:03:28 +01:00
Pascal Vizeli
0807651fbd Bump version 149 2019-03-08 11:59:41 +01:00
Pascal Vizeli
7026d42d77 Merge pull request #942 from home-assistant/dev
Release 148
2019-03-08 11:41:58 +01:00
Pascal Vizeli
31047b9ec2 Down or upgrade exists image on restore (#941) 2019-03-08 11:36:36 +01:00
Pascal Vizeli
714791de8f Bump version 148 2019-03-07 21:12:54 +01:00
Pascal Vizeli
c544fff2b2 Merge pull request #939 from home-assistant/dev
Release 147
2019-03-07 21:12:20 +01:00
Pascal Vizeli
fc45670686 Fix bug with update (#938) 2019-03-07 21:09:43 +01:00
Pascal Vizeli
5cefa0a2ee Bump version 147 2019-03-07 16:28:39 +01:00
Pascal Vizeli
a1910d4135 Merge pull request #937 from home-assistant/dev
Release 146
2019-03-07 16:28:09 +01:00
Pascal Vizeli
f1fecdde3a Enable Armv7 for Add-ons (#936)
* Enable Armv7 for Add-ons

* Cleanups

* fix tests
2019-03-07 16:00:41 +01:00
Pascal Vizeli
9ba4ea7d18 Check json files too 2019-03-07 10:03:07 +01:00
Pascal Vizeli
58a455d639 Fix lint 2019-03-04 10:09:34 +01:00
Pascal Vizeli
3ea85f6a28 Delete .travis.yml 2019-03-04 10:04:19 +01:00
Pascal Vizeli
4e1469ada4 Replace travis 2019-03-04 10:03:54 +01:00
Curtis Gibby
5778f78f28 Fix misspelling on "environment" (#934) 2019-03-04 10:00:41 +01:00
Pascal Vizeli
227125cc0b Change json error handling (#930)
* Change json error handling

* Typing + modern way to read file

* fix lint
2019-02-26 00:19:05 +01:00
Pascal Vizeli
b36e178c45 Bump version to 146 2019-02-21 17:24:01 +01:00
Pascal Vizeli
32c9198fb2 Merge pull request #929 from home-assistant/dev
Release 145
2019-02-21 17:21:43 +01:00
Pascal Vizeli
6983dcc267 Fix image arch version on restore/update (#928) 2019-02-21 16:40:49 +01:00
Pascal Vizeli
813fcc41f0 Bump version 145 2019-02-20 17:04:41 +01:00
Pascal Vizeli
f4e9dd0f1c Merge pull request #927 from home-assistant/dev
Release 144
2019-02-20 17:04:15 +01:00
Pascal Vizeli
7f074142bf Replace pycrpytodome with cryptocraphy (#923)
* Replace pycrpytodome with cryptocraphy

* Fix typing

* fix typing

* Fix lints

* Fix build

* Add musl libc

* Fix lint

* fix lint

* Fix algo

* Add more typing fix crypto imports v2

* Fix padding
2019-02-20 10:30:22 +01:00
Pascal Vizeli
b6df37628d Merge pull request #924 from home-assistant/feat-wait-time
Increase wait time for home assistant startup
2019-02-18 16:24:21 +01:00
Pascal Vizeli
7867eded50 Increase wait time for home assistant startup 2019-02-18 09:51:21 +01:00
Pascal Vizeli
311abb8a90 Bump version 144 2019-02-02 11:48:29 +01:00
Pascal Vizeli
21303f4b05 Merge pull request #913 from home-assistant/dev
Release 143
2019-02-02 11:47:13 +01:00
Pascal Vizeli
da3270af67 Fix that need_build work like image (#912) 2019-01-31 22:08:10 +01:00
Pascal Vizeli
35aae69f23 Support armv7 and allow support of multible arch types per CPU (#892)
* Support armv7 and first abstraction

* Change layout

* Add more type hints

* Fix imports

* Update

* move forward

* add tests

* fix type

* fix lint & tests

* fix tests

* Fix unittests

* Fix create folder

* cleanup

* Fix import order

* cleanup loop parameter

* cleanup init function

* Allow changeable image name

* fix setup

* Fix load of arch

* Fix lint

* Add typing

* fix init

* fix hassos cli problem & stick on supervisor arch

* address comments

* cleanup

* Fix image selfheal

* Add comment

* update uvloop

* remove uvloop

* fix tagging

* Fix install name

* Fix validate build config

* Abstract image_name from system cache
2019-01-31 18:47:44 +01:00
Franck Nijhof
118a2e1951 Revert "Delete move.yml" (#901)
This reverts commit 07c4058a8c.
2019-01-22 12:19:38 +01:00
Pascal Vizeli
9053341581 Fix wrong UTF-8 config files (#895)
* Fix wrong UTF-8 config files

* Fix lint

* Update data.py
2019-01-18 18:57:54 +01:00
Pascal Vizeli
27532a8a00 Update aioHttp 3.5.4 (#894) 2019-01-17 21:40:52 +01:00
Pascal Vizeli
7fdfa630b5 Bump version 143 2019-01-15 12:11:56 +01:00
Pascal Vizeli
3974d5859f Merge pull request #890 from home-assistant/dev
Release 142
2019-01-15 12:10:58 +01:00
Pascal Vizeli
aa1c765c4b Add support for SYS_MODULE (#889)
* Add support for SYS_MODULE

* Update flake stuff

* Fix lint

* Fix lint

* Fix lint

* Fix lint
2019-01-15 00:56:07 +01:00
Pascal Vizeli
e78385e7ea Support to map kernel modules ro into container (#888) 2019-01-14 23:20:30 +01:00
Pascal Vizeli
9d59b56c94 Fix lint 2019-01-14 23:20:07 +01:00
Pascal Vizeli
9d72dcabfc Support to map kernel modules ro into container 2019-01-14 21:57:14 +01:00
Pascal Vizeli
a0b5d0b67e Fix error on first run because the landing page already run (#886)
* Fix error on first run because the landing page already run

* Update homeassistant.py
2019-01-14 21:25:17 +01:00
Pascal Vizeli
2b5520405f Fix log info about update on dev (#885) 2019-01-14 20:05:03 +01:00
Pascal Vizeli
ca376b3fcd Update docker-py to 3.7.0 (#882)
* Update docker-py to 3.7.0

* Update __init__.py

* Update addon.py
2019-01-14 20:04:27 +01:00
Pascal Vizeli
11e3c0c547 Update aioHttp to 3.5.2 (#881) 2019-01-13 12:22:01 +01:00
Pascal Vizeli
9da136e037 Fix API descriptions 2019-01-02 23:31:35 +01:00
Pascal Vizeli
9b3e59d876 Merge pull request #861 from casperklein/patch-1
Duplicate entry removed.
2018-12-20 16:18:29 +01:00
Casper
7a592795b5 Duplicate entry removed. 2018-12-20 13:45:04 +01:00
Pascal Vizeli
5b92137699 Bump version 142 2018-12-11 23:46:01 +01:00
Pascal Vizeli
7520cdfeb4 Merge pull request #853 from home-assistant/dev
Release 141
2018-12-11 23:45:29 +01:00
Pascal Vizeli
0ada791e3a Update Panel for Home Assistant 0.84.0 (#852) 2018-12-11 20:54:30 +01:00
Pascal Vizeli
73afced4dc Bugfix stack trace on remove (#842) 2018-11-30 00:09:33 +01:00
Pascal Vizeli
633a2e93bf Create ISSUE_TEMPLATE.md 2018-11-22 14:53:49 +01:00
Pascal Vizeli
07c4058a8c Delete move.yml 2018-11-22 14:46:58 +01:00
Alastair D'Silva
b6f3938b14 Add support for the Orange Pi Prime (#829)
Signed-off-by: Alastair D'Silva <alastair@d-silva.org>
2018-11-21 17:03:25 +01:00
Pascal Vizeli
57534fac96 Bump version 141 2018-11-20 17:39:39 +01:00
Pascal Vizeli
4a03e72983 Merge pull request #827 from home-assistant/dev
Release 140
2018-11-20 17:39:12 +01:00
Pascal Vizeli
ddb29ea9b1 Speedup build 2018-11-20 17:17:04 +01:00
Pascal Vizeli
95179c30f7 Update Panel with new security functions (#826) 2018-11-20 17:13:55 +01:00
Pascal Vizeli
f49970ce2c Update .gitmodules 2018-11-20 12:25:45 +01:00
Pascal Vizeli
790818d1aa Update README.md 2018-11-20 10:56:19 +01:00
Pascal Vizeli
62f675e613 Fix documentation 2018-11-19 22:37:46 +01:00
Pascal Vizeli
f33434fb01 Downgrade discovery duplicate logging (#824) 2018-11-19 21:05:51 +01:00
Pascal Vizeli
254d6aee32 Small code cleanups (#822)
* Small code cleanups

* Update homeassistant.py
2018-11-19 16:44:21 +01:00
Pascal Vizeli
a5ecd597ed Add tests for add-ons map (#821) 2018-11-19 16:43:24 +01:00
Pascal Vizeli
0fab3e940a Merge pull request #820 from home-assistant/master
Master
2018-11-19 14:52:45 +01:00
Pascal Vizeli
60fbebc16b Rating add-on better they implement hass auth (#819)
* Rating add-on better they implement hass auth

* Update utils.py
2018-11-19 14:51:03 +01:00
Christian
ec366d8112 Provide options for legacy add-ons (#814)
* Provide options for legacy add-ons

* Remove whitespace from blank line

* Only provide primitive data types as Docker environment variable

* Fix linting issues

* Update addon.py
2018-11-19 12:05:12 +01:00
Christian
b8818788c9 Bugfix Add-on validate correct image url (#810)
* Bugfix Add-on validate correct image path

* Add tests for different add-on image urls
2018-11-18 19:29:23 +01:00
Pascal Vizeli
e23f6f6998 Update uvloop to version 0.11.3 (#818) 2018-11-18 12:08:59 +01:00
Pascal Vizeli
05b58d76b9 Add tests for hass.io (#817)
* Add tests for hass.io

* Fix folder

* Fix test command
2018-11-18 12:08:46 +01:00
Pascal Vizeli
644d13e3fa Bugfix Add-on validate on RO (#803) 2018-11-09 23:53:41 +01:00
Pascal Vizeli
9de71472d4 Remove links they are not needed 2018-11-09 10:26:01 +01:00
Pascal Vizeli
bf28227b91 Add developer guide 2018-11-09 10:25:29 +01:00
Pascal Vizeli
4c1ee49068 Bump version 140 2018-11-05 16:20:01 +01:00
Pascal Vizeli
6e7cf5e4c9 Merge pull request #796 from home-assistant/dev
Release 139
2018-11-05 16:19:17 +01:00
Pascal Vizeli
11f8c97347 Fix discovery update (#795)
* Update discovery.py

* Update discovery.py

* Update discovery.py

* Update discovery.py

* Update discovery.py

* Update discovery.py

* Update discovery.py
2018-11-05 14:59:57 +01:00
Pascal Vizeli
a1461fd518 Update requirements.txt 2018-11-05 13:53:16 +01:00
Pascal Vizeli
fa5c2e37d3 Discovery default config (#793) 2018-11-05 07:45:28 +01:00
luca-simonetti
1f091b20ad fix: use a different convention to handle multiple devices on same card (#767)
* fix: use a different convention to handle multiple devices on same card

* fix: use a different convention to handle multiple devices on same card

* Update alsa.py

* Update alsa.py
2018-11-02 10:47:25 +01:00
Pascal Vizeli
d3b4a03851 Catch exception on watchdog for pretty log (#778)
* Catch exception on watchdog for pretty log

* Update tasks.py
2018-10-29 16:40:19 +01:00
Jorim Tielemans
fb12fee59b Expand add-on installation error message (#783)
* Expand error message

Since an add-on is only available for certain machine and architecture combination we should log both.

* Update addon.py
2018-10-27 15:24:56 +02:00
Pascal Vizeli
7a87d2334a flake8 update to 3.6.0 (#777)
* flake8 update to 3.6.0

* fix lint
2018-10-27 15:23:26 +02:00
Pascal Vizeli
9591e71138 Update auth.py (#771) 2018-10-24 14:02:16 +02:00
Ville Skyttä
cecad526a2 Grammar and spelling fixes (#772) 2018-10-24 14:01:28 +02:00
Pascal Vizeli
53dab4ee45 Bump version 139 2018-10-16 12:52:19 +02:00
Pascal Vizeli
8abbba46c7 Merge pull request #766 from home-assistant/dev
Release 138
2018-10-16 12:51:47 +02:00
Pascal Vizeli
0f01ac1b59 Fix syntax 2018-10-16 12:45:06 +02:00
Pascal Vizeli
aa8ab593c0 Rename login_backend to auth_api (#764)
* Update const.py

* Update validate.py

* Update addon.py

* Update auth.py

* Update addons.py

* Update API.md
2018-10-16 12:33:40 +02:00
Pascal Vizeli
84f791220e Don't clean cache on fake auth (#765)
* Don't clean cache on fake auth

* Update auth.py
2018-10-16 12:30:24 +02:00
Pascal Vizeli
cee2c5469f Bump version 138 2018-10-15 15:25:29 +02:00
Pascal Vizeli
6e75964a8b Merge pull request #761 from home-assistant/dev
Release 137
2018-10-15 15:25:05 +02:00
Pascal Vizeli
5ab5036504 Fix proxy handling with failing connection (#760)
* Fix proxy handling with failing connection

* fix lint

* Fix exception handling

* clenaup error handling

* Fix type error

* Fix event stream

* Fix stream handling

* Fix

* Fix lint

* Handle

* Update proxy.py

* fix lint
2018-10-15 13:01:52 +02:00
Pascal Vizeli
000a3c1f7e Bump to 137 2018-10-12 14:39:47 +02:00
Pascal Vizeli
8ea123eb94 Merge pull request #754 from home-assistant/dev
Release 136
2018-10-12 14:39:18 +02:00
Pascal Vizeli
571c42ef7d Create role for backup add-ons (#755)
* Create role for backup add-ons

* Update validate.py

* Update security.py
2018-10-12 12:48:12 +02:00
Pascal Vizeli
8443da0b9f Add-on SSO support with Home Assistant auth system (#752)
* Create auth.py

* Finish auth cache

* Add documentation

* Add valid schema

* Update auth.py

* Update auth.py

* Update security.py

* Create auth.py

* Update coresys.py

* Update bootstrap.py

* Update const.py

* Update validate.py

* Update const.py

* Update addon.py

* Update auth.py

* Update __init__.py

* Update auth.py

* Update auth.py

* Update auth.py

* Update const.py

* Update auth.py

* Update auth.py

* Update auth.py

* Update validate.py

* Update coresys.py

* Update auth.py

* Update auth.py

* more security

* Update API.md

* Update auth.py

* Update auth.py

* Update auth.py

* Update auth.py

* Update auth.py

* Update homeassistant.py

* Update homeassistant.py
2018-10-12 12:21:48 +02:00
Pascal Vizeli
7dbbcf24c8 Check exists hardware for audio/gpio devices (#753)
* Update hardware.py

* Update addon.py

* Update hardware.py

* Update addon.py
2018-10-12 10:22:58 +02:00
Pascal Vizeli
468cb0c36b Rename info (#750)
* Rename version to info

* fix security
2018-10-10 16:46:34 +02:00
Pascal Vizeli
78e093df96 Bump version 136 2018-10-09 17:10:25 +02:00
Pascal Vizeli
ec4d7dab21 Merge pull request #749 from home-assistant/dev
Release 135
2018-10-09 17:08:19 +02:00
Pascal Vizeli
d00ee0adea Add hostname into version API call (#748) 2018-10-09 15:40:44 +02:00
Pascal Vizeli
55d5ee4ed4 Merge pull request #747 from mbo18/patch-1
Add missing tinker board
2018-10-09 14:00:44 +02:00
mbo18
0e51d74265 Add missing tinker board 2018-10-09 09:29:38 +02:00
Pascal Vizeli
916f3caedd Bump version 135 2018-10-08 00:21:59 +02:00
Pascal Vizeli
ff80ccce64 Merge pull request #745 from home-assistant/dev
Release 134
2018-10-08 00:20:20 +02:00
Pascal Vizeli
23f28b38e9 small code cleanups (#740)
* small code cleanups

* Update __init__.py

* Update homeassistant.py

* Update __init__.py

* Update homeassistant.py

* Update homeassistant.py

* Update __init__.py

* fix list

* Fix api call
2018-10-07 23:50:18 +02:00
Franck Nijhof
da425a0530 Adds support for privilege DAC_READ_SEARCH (#743)
* Adds support for privilege DAC_READ_SEARCH

* 🚑 Fixes security rating regarding privileges
2018-10-07 19:17:06 +02:00
Jorim Tielemans
79dca1608e Fix machine 'odroid-c2' (#744)
Odroid-cu2 does not exist AFAIK, it needs to be c2.
2018-10-07 19:16:29 +02:00
Pascal Vizeli
33b615e40d Fix manager access to /addons (#738) 2018-10-05 13:48:29 +02:00
Pascal Vizeli
c825c40c4d Bump version 134 2018-10-01 19:07:48 +02:00
Pascal Vizeli
8beb723cc2 Merge pull request #736 from home-assistant/dev
Release 133
2018-10-01 19:07:17 +02:00
Pascal Vizeli
94fd24c251 Bugfix message handling (#735) 2018-10-01 18:57:31 +02:00
Pascal Vizeli
bf75a8a439 Cleanup discovery data (#734)
* Cleanup discovery data

* Update API.md

* Update validate.py

* Update discovery.py

* Update const.py
2018-10-01 16:17:46 +02:00
Pascal Vizeli
36cdb05387 Don't allow add-on to update itself (#733) 2018-10-01 15:22:26 +02:00
Pascal Vizeli
dccc652d42 Bump version 133 2018-09-30 20:16:42 +02:00
Pascal Vizeli
74e03a9a2e Merge pull request #728 from home-assistant/dev
Release 132
2018-09-30 20:16:08 +02:00
Pascal Vizeli
2f6df3a946 Fix discovery on add-on uninstall (#731)
* Fix discovery on add-on uninstall

* Update discovery.py

* Update discovery.py
2018-09-30 18:24:10 +02:00
Pascal Vizeli
2872be6385 Update Panel (#730) 2018-09-30 17:58:26 +02:00
Pascal Vizeli
af19e95c81 Make discovery persistent (#727)
* Make discovery persistent

* fix file handling

* fix detection

* Smooth

* Fix ring import

* Fix handling

* fix schema

* fix validate

* fix discovery cleanup
2018-09-30 15:33:16 +02:00
Pascal Vizeli
e5451973bd Overwork Services/Discovery (#725)
* Update homeassistant.py

* Update validate.py

* Update exceptions.py

* Update services.py

* Update discovery.py

* fix gitignore

* Fix handling for discovery

* use object in ref

* lock down discovery API

* fix api

* Design

* Fix API

* fix lint

* fix

* Fix security layer

* add provide layer

* fix access

* change rating

* fix rights

* Fix API error handling

* raise error

* fix rights

* api

* fix handling

* fix

* debug

* debug json

* Fix validator

* fix error

* new url

* fix schema
2018-09-29 19:49:08 +02:00
Pascal Vizeli
4ef8c9d633 Change API for new UI & Add machine support (#720)
* Change API for new UI

* Update API.md

* Update validate.py

* Update addon.py

* Update API.md

* Update addons.py

* fix lint

* Update security.py

* Update version.py

* Update security.py

* Update security.py
2018-09-28 14:34:43 +02:00
Pascal Vizeli
4a9dcb540e Add support for long live token (#719)
* Add support for long live token

* Update proxy.py
2018-09-27 14:35:40 +02:00
Pascal Vizeli
61eefea358 Add version endpoint (#718)
* Add version endpoint

* Update API.md

* Update const.py

* Create version.py

* Update __init__.py

* Update security.py

* Update version.py
2018-09-26 11:39:45 +02:00
Pascal Vizeli
f2a5512bbf Fix not exists label bug (#717) 2018-09-25 13:46:48 +02:00
Pascal Vizeli
2f4e114f25 Fix wrong regex 2018-09-25 12:51:47 +02:00
Pascal Vizeli
c91bac2527 Add log to blacklist / reduce free calls (#713) 2018-09-24 17:03:21 +02:00
Pascal Vizeli
52da7605f5 Enable Security API (#710)
* Enable Security API

* Update addons.py

* Update proxy.py

* Update __init__.py

* Update security.py

* Fix lint
2018-09-24 15:11:33 +02:00
Fabian Affolter
267791833e Update docstrings, comments and log messages (#707) 2018-09-18 23:47:47 +02:00
Pascal Vizeli
67dcf1563b Bump version to 132 2018-09-18 21:20:10 +02:00
Pascal Vizeli
ccff0f5b9e Merge pull request #706 from home-assistant/dev
Release 131
2018-09-18 21:19:33 +02:00
Pascal Vizeli
9f8ad05471 Add API role system (#703)
* Add API role system

* Finish

* Simplify

* Fix lint

* Fix rights

* Fix lint

* Fix spell

* Fix log
2018-09-18 20:39:58 +02:00
Fabian Affolter
c2299ef8da Fix typos (#704) 2018-09-18 18:17:20 +02:00
Franck Nijhof
f5845564db 👕 Fixes a typo in method name (#702) 2018-09-17 23:11:53 +02:00
Franck Nijhof
17904d70d8 🚀 Adds venv to .dockerignore (#701) 2018-09-17 21:03:14 +02:00
Franck Nijhof
622e99e04c Adds host PID mode support for add-ons (#700)
*  Adds host PID mode support for add-ons.

* 🔒 Disables host PID mode when in protected mode

* 🚦 Adds more negative rating weight to host PID mode
2018-09-17 21:02:28 +02:00
Pascal Vizeli
061420f279 Make Label handling more robust (#696)
* Make Label handling more robust

* Update interface.py

* Update interface.py

* Update interface.py
2018-09-15 22:07:05 +02:00
Franck Nijhof
3d459f1b8b Adds support for SYS_PTRACE add-on privileges (#697) 2018-09-15 22:05:50 +02:00
Pascal Vizeli
5f3dd6190a Bump version 130 2018-09-10 00:02:27 +02:00
Pascal Vizeli
ac824d3af6 Merge pull request #691 from home-assistant/dev
Release 130
2018-09-10 00:00:56 +02:00
Pascal Vizeli
dd25c29544 Bugfix Proxy with new token (#690)
* Update proxy.py

* Update security.py
2018-09-09 23:47:35 +02:00
Pascal Vizeli
5cbdbffbb2 Bump version to 130 2018-09-08 00:17:05 +02:00
Pascal Vizeli
bb81f14c2c Merge pull request #688 from home-assistant/dev
Release 129
2018-09-08 00:16:17 +02:00
Pascal Vizeli
cecefd6972 Change access to API (#686)
* Update API.md

* Update API.md

* Update API.md

* Update addons.py

* Update addons.py

* Update addons.py

* Update addons.py

* Update __init__.py

* Update security.py

* Update security.py

* Update const.py

* Update validate.py

* Update __init__.py

* Update validate.py

* Update homeassistant.py

* Update homeassistant.py

* Update homeassistant.py

* Update addon.py

* Update addon.py

* Update homeassistant.py

* Fix lint

* Fix lint

* Backward combatibility

* Make token more robust

* Fix bug

* Logic error

* Fix access

* fix valid
2018-09-07 22:59:31 +02:00
Pascal Vizeli
ff7f6a0b4c Bump version 129 2018-08-29 10:16:04 +02:00
Pascal Vizeli
1dc9f35e12 Merge pull request #674 from home-assistant/dev
Release 128
2018-08-29 10:13:57 +02:00
Pascal Vizeli
051b63c7cc Fix access token property (#673)
* Fix access token property

* revert
2018-08-28 17:04:39 +02:00
Pascal Vizeli
aac4b9b24a Snapshot/Restore Home-Assistant token (#672)
* Snapshot/Restore Home-Assistant token

* Encrypt token & check api

* fix lint
2018-08-28 16:32:17 +02:00
Paulus Schoutsen
1a208a20b6 Handle access token expiration (#671) 2018-08-28 12:14:40 +02:00
Pascal Vizeli
b1e8722ead Update: pycryptodome to 3.6.6 (#670) 2018-08-28 12:04:32 +02:00
Pascal Vizeli
a66af6e903 Update aiohttp to 3.4.0 (#668)
Update: aiohttp to 3.4.0
2018-08-28 01:18:38 +02:00
Pascal Vizeli
0c345fc615 Bump version 128 2018-08-19 22:05:42 +02:00
Pascal Vizeli
087b082a6b Merge pull request #660 from home-assistant/dev
Release 127
2018-08-19 22:03:49 +02:00
Pascal Vizeli
0b85209eae Detect running record migration (#659)
* Detect running record migration

* Fix order

* Change order second one
2018-08-19 21:58:19 +02:00
Pascal Vizeli
d81bc7de46 Change rating 1-6 (#658) 2018-08-19 18:17:14 +02:00
Pascal Vizeli
e3a99b9f89 Fix /share inside whitelist (#657) 2018-08-18 15:05:18 +02:00
Pascal Vizeli
5d319b37ea Bump verison 127 2018-08-16 23:38:57 +02:00
Pascal Vizeli
9f25606986 Merge pull request #653 from home-assistant/dev
Release 126
2018-08-16 23:38:24 +02:00
Pascal Vizeli
ecd12732ee New generation of security and access (#652)
* New generation of security and access

* Update const.py

* Update validate.py

* Update addon.py

* Update validate.py

* Fix name

* Allow access

* Fix

* add logs

* change message

* add rating

* fix lint

* fix lint

* fix

* Fix
2018-08-16 22:49:08 +02:00
Pascal Vizeli
85fbde8e36 Fix Dockerfile 2018-08-16 01:42:56 +02:00
Pascal Vizeli
6e6c2c3efb Change timezone handling (#641)
* Change timezone handling

* Update dt.py

* Update homeassistant.py

* fix

* Use new timezone

* fix handling

* fix regex

* fix regex

* Rename old config

* fix lint

* simplify

* fix regex

* fix

* cleanup

* cleanup

* fix

* fix find

* mm
2018-08-16 01:40:20 +02:00
Pascal Vizeli
0d4a808449 Improve docker build cache for supervisor (#651) 2018-08-15 23:52:52 +02:00
Pascal Vizeli
087f746647 update docker API to 3.5.0 (#650) 2018-08-15 22:05:13 +02:00
Pascal Vizeli
640d66ad1a Update uvloop 0.11.2 (#648) 2018-08-15 21:38:57 +02:00
Pascal Vizeli
f5f5ed83af Bump version 126 2018-08-09 14:38:34 +02:00
Pascal Vizeli
95f01a1161 Merge pull request #640 from home-assistant/dev
Release 125
2018-08-09 14:37:56 +02:00
Pascal Vizeli
b84e7e7d94 Allow to reset token (#639)
* Allow to reset token

* Update homeassistant.py
2018-08-09 14:37:00 +02:00
Pascal Vizeli
5d7018f3f0 Bump version 125 2018-08-09 01:05:21 +02:00
Pascal Vizeli
d87a85ceb5 Merge pull request #636 from home-assistant/dev
Release 124
2018-08-09 01:03:47 +02:00
Pascal Vizeli
9ab6e80b6f Cleanup logging (#637)
* Cleanup logging

* simplify
2018-08-09 01:03:00 +02:00
Pascal Vizeli
78e91e859e Add add-on support for docker sock ro (#635)
* Add add-on support for docker sock ro

* fix
2018-08-09 00:42:33 +02:00
Pascal Vizeli
9eee8eade6 Fix gpio mapping on amd64 systems (#634) 2018-08-09 00:29:20 +02:00
Pascal Vizeli
124ce0b8b7 Update voluptuous 0.11.5 (#622) 2018-08-09 00:06:49 +02:00
Pascal Vizeli
00e7d96472 Fix new auth system (#633)
* Fix new auth system

* Update exceptions.py

* Update exceptions.py

* Update homeassistant.py

* Update homeassistant.py

* Update homeassistant.py

* Fix some API Errors

* fix lint
2018-08-09 00:05:08 +02:00
Pascal Vizeli
398815efd8 Bump version to 124 2018-08-08 19:22:12 +02:00
Pascal Vizeli
bdc2bdcf56 Merge pull request #631 from ndarilek/dev
Add SYS_RESOURCE to list of valid privileges
2018-08-07 17:06:15 +02:00
Nolan Darilek
68eafb0a7d Add SYS_RESOURCE to list of valid privileges 2018-08-07 03:21:21 +00:00
Pascal Vizeli
7ca2fd7193 Merge pull request #618 from home-assistant/dev
Release 123
2018-08-04 01:25:55 +02:00
Pascal Vizeli
ec823edd8f Cleanup docker image (#617) 2018-08-04 00:41:14 +02:00
Pascal Vizeli
858c7a1fa7 Bump version 123 2018-08-02 23:40:52 +02:00
Pascal Vizeli
6ac45a24fc Merge pull request #615 from home-assistant/dev
Release 122
2018-08-02 23:39:43 +02:00
Pascal Vizeli
9430b39042 Update uvloop version 0.11.1 (#614) 2018-08-02 23:18:40 +02:00
Pascal Vizeli
ae7466ccfe Fix UnicodeDecodeError with read json file (#613)
* Update json.py

* Update data.py
2018-08-02 21:48:50 +02:00
Simon Holzmayer
2c17fe5da8 Adapt regex validation to allow docker images from other registries (#608)
* Adapt regex validation to allow images from other registries than dockerhub

Issue #564

* Update validate.py
2018-07-30 12:34:42 +02:00
Pascal Vizeli
a0fb91af29 Use requirements.txt (#607)
* Create requirements.txt

* Update setup.py

* Update Dockerfile

* Update Dockerfile

* Update requirements.txt

* Update requirements.txt

* Update Dockerfile

* Update tox.ini
2018-07-27 16:34:47 +02:00
Pascal Vizeli
f626e31fd3 Bump version 122 2018-07-25 01:52:24 +02:00
Pascal Vizeli
0151a149fd Merge pull request #604 from home-assistant/dev
Release 121
2018-07-25 01:47:36 +02:00
Pascal Vizeli
9dea93142b Timeout shutdown (#603)
* Don't wait too long for shutdown

* Update log message

* Fix timeout

* Fast shudown
2018-07-25 01:46:54 +02:00
Pascal Vizeli
7f878bfac0 Bump version to 121 2018-07-25 01:39:33 +02:00
Pascal Vizeli
ebe9ae2341 Merge pull request #600 from home-assistant/dev
Release 120
2018-07-24 22:11:10 +02:00
Pascal Vizeli
e777bbd024 Fix bug with proxy (#599) 2018-07-24 22:00:46 +02:00
Pascal Vizeli
2116d56124 Bump version 120 2018-07-24 16:32:46 +02:00
Pascal Vizeli
0b6a82b018 Merge pull request #598 from home-assistant/dev
Release 119
2018-07-24 16:21:06 +02:00
Pascal Vizeli
b4ea28af4e Update uvloop to 0.11.0 (#597) 2018-07-24 16:16:26 +02:00
Pascal Vizeli
22f59712df Bump version 119 2018-07-23 13:01:51 +02:00
Pascal Vizeli
efe95f7bab Merge pull request #593 from home-assistant/dev
Release 118
2018-07-23 12:59:15 +02:00
Pascal Vizeli
200c68f67f Fix proxy data passthrougth (#592)
* Fix proxy data passthrougth

* Update homeassistant.py
2018-07-23 12:53:32 +02:00
Pascal Vizeli
dcefec7b99 Cleanup old stuff (#589) 2018-07-22 01:51:45 +02:00
Pascal Vizeli
5db798bcf8 Fix API for home-assistant (#588)
* Fix API for home-assistant

* Update API.md
2018-07-22 00:42:45 +02:00
Pascal Vizeli
70005296cc Bump version 118 2018-07-21 20:25:12 +02:00
Pascal Vizeli
f2bf8dea93 Merge pull request #585 from home-assistant/dev
Release 117
2018-07-21 20:21:18 +02:00
Pascal Vizeli
fee858c956 Fix exception is HomeAssistant allready running (#587) 2018-07-21 20:13:13 +02:00
Pascal Vizeli
e3ae48c8ff Remove geo ip (#586)
* Remove geo ip

* Update core.py

* Update dt.py
2018-07-21 19:45:11 +02:00
Pascal Vizeli
fa9e20385e Bugfix passwrod (#584) 2018-07-21 19:07:22 +02:00
Pascal Vizeli
f51c9704e0 Fix timeout on freegeoip (#581)
* Fix timeout on freegeoip

* Update updater.py

* Update supervisor.py

* Update dt.py

* Update hassos.py

* Update core.py

* Update hassos.py

* Update supervisor.py

* Update updater.py
2018-07-21 19:01:20 +02:00
Pascal Vizeli
57c58d81c0 Bump version 117 2018-07-21 00:06:34 +02:00
Pascal Vizeli
1ec1082068 Merge pull request #580 from home-assistant/dev
Release 116
2018-07-21 00:05:56 +02:00
Pascal Vizeli
35b7c2269c Support control of hassos-cli (#555)
* Support control of hassos-cli

* Update const.py

* Update validate.py

* Update supervisor.py

* Create hassos_cli.py

* Update hassos_cli.py

* Update hassos_cli.py

* Update hassos.py

* Update tasks.py

* Update hassos.py

* Update API.md

* Update API.md

* Update const.py

* Update hassos.py

* Update __init__.py

* Fix lint

* fix

* Fix logging

* change order

* Fix download
2018-07-20 23:45:36 +02:00
Pascal Vizeli
cc3e6ec6fd Fix stream error with aiohttp >= 3 (#579)
* Fix stream error with aiohttp >= 3

* Update proxy.py

* Update proxy.py

* Update proxy.py

* Update proxy.py

* Update proxy.py

* Update proxy.py
2018-07-20 22:28:56 +02:00
Paulus Schoutsen
4df42e054d Leverage access and refresh tokens if available (#575)
* Leverage access and refresh tokens if available

* Update homeassistant.py

* Update homeassistant.py

* Update proxy.py

* Migrate HomeAssistant to new exception layout

* Fix build for 3.7

* Cleanups

* Fix style

* fix log strings

* Fix new style

* Fix travis build

* python 3.7

* next try

* fix

* fix lint

* Fix lint p2

* Add logging

* Fix logging

* fix access

* Fix spell

* fix return

* Fix runtime

* Add to hass config
2018-07-20 16:55:48 +02:00
Pascal Vizeli
1b481e0b37 Fix small bugs (python37) (#577)
* Fix small bugs (python37)

* Update utils.py

* Update utils.py

* Update utils.py

* Update utils.py

* Update utils.py
2018-07-19 21:22:26 +02:00
Pascal Vizeli
3aa4cdf540 Fix remove data inside executor (#576) 2018-07-19 20:25:58 +02:00
Pascal Vizeli
029f277945 Reset readonly on remove data (#569)
* Reset readonly on remove data

* Update addon.py

* Update utils.py

* Fix lint

* Update utils.py

* Update utils.py

* Update utils.py

* Update utils.py

* Update addon.py
2018-07-19 12:44:16 +02:00
Pascal Vizeli
e7e0b9adda Fix-python7 compatibility (#573) 2018-07-19 01:18:43 +02:00
Pascal Vizeli
5fbff75da8 Support new base images (#571)
* Support new base images

* Update Dockerfile

* Update setup.py
2018-07-17 23:32:50 +02:00
Paulus Schoutsen
58299a0389 Add release drafter 2018-07-10 10:38:45 +02:00
Pascal Vizeli
1151d7e17b Bump version to 116 2018-07-06 13:10:16 +02:00
Pascal Vizeli
b56ed547e3 Merge pull request #559 from home-assistant/dev
Release 115
2018-07-06 13:09:32 +02:00
Pascal Vizeli
a71ebba940 Bugfix rollback if the hass instant is complete corrupt (#558) 2018-07-06 13:08:57 +02:00
Pascal Vizeli
4fcb516c75 Bump version to 115 2018-07-06 01:38:18 +02:00
Pascal Vizeli
22142d32d2 Merge remote-tracking branch 'origin/dev'
Release 114
2018-07-06 01:37:10 +02:00
Pascal Vizeli
21194f1411 Add hostname to UI (#557)
* Add hostname to UI

* Fix dbus call

* support boolean

* support types

* revert

* test

* test

* log

* fixup

* fix bug
2018-07-06 01:36:28 +02:00
Pascal Vizeli
09df046fa8 Fix problem with Repositories (#552)
* Fix problem with Repositories

* Update git.py

* Update git.py

* Update git.py

* Update git.py

* Update git.py

* Update git.py

* Update git.py

* Update git.py

* Update git.py

* Update git.py

* Update git.py

* fix lint

* fix

* reset origin

* Git cleanup
2018-07-05 23:21:54 +02:00
Pascal Vizeli
63d3889d5c Fix problem with options / hostname (#554) 2018-07-05 13:01:48 +02:00
Pascal Vizeli
0ffc0559e2 Map devicetree 2018-07-04 01:12:58 +02:00
Pascal Vizeli
78118a502c Map devicetree 2018-07-04 01:12:25 +02:00
Pascal Vizeli
946cc3d618 Bump version to 114 2018-07-04 00:53:19 +02:00
Pascal Vizeli
c40a3f18e9 Merge remote-tracking branch 'origin/dev'
Release 113
2018-07-04 00:51:36 +02:00
Pascal Vizeli
f01945bf8c Update addon.py (#550) 2018-07-04 00:51:01 +02:00
Pascal Vizeli
0f72db45f9 Bump version to 113 2018-07-03 23:02:21 +02:00
Pascal Vizeli
83510341b6 Merge remote-tracking branch 'origin/dev'
Release 112
2018-07-03 22:43:05 +02:00
Pascal Vizeli
70dd6593e4 Rollback homeassistant on failover (#549)
* Rollback homeassistant on failover

* Check running system
2018-07-03 22:41:50 +02:00
Pascal Vizeli
60ba2db561 Bump version to 112 2018-07-03 20:44:06 +02:00
Pascal Vizeli
5820d16419 Fix wrong mount options for devicetree (#548) 2018-07-03 18:44:45 +02:00
Pascal Vizeli
9f9ff0d1ad Merge remote-tracking branch 'origin/dev'
Release 111
2018-07-03 00:14:28 +02:00
Pascal Vizeli
806161e3ac Use machine-id from filesystem (#546)
* Use machine-id from filesystem

* Update security.py

* Update security.py

* fix lint
2018-07-01 22:28:32 +02:00
Pascal Vizeli
44ae9c7b63 Don't try to shutdown a not running API (#543) 2018-06-30 22:06:18 +02:00
Pascal Vizeli
75d24ba534 Bump version to 111 2018-06-30 22:03:21 +02:00
Pascal Vizeli
13243cd02c Merge remote-tracking branch 'origin/dev'
Release 110
2018-06-30 02:10:48 +02:00
Pascal Vizeli
411fad8a45 Fix scroll bugs (#542) 2018-06-30 01:59:33 +02:00
Pascal Vizeli
5fe9d63c79 Add HassOS OTA support on Hass.io (#536)
* Add HassOS OTA support on Hass.io

* Update dt.py

* Update updater.py

* add rauc dbus / initial dbus signal handling

* Update gdbus.py

* Update hassos.py

* Update const.py

* Update hassos.py

* Update exceptions.py

* Update hassos.py

* Update rauc.py

* Update rauc.py

* Update rauc.py

* Update hassos.py

* Update hassos.py

* Update hassos.py

* Update hassos.py

* Update hassos.py

* Update hassos.py

* Update hassos.py

* Update __init__.py

* Update hassos.py

* Update hassos.py

* Update updater.py

* Update updater.py

* Update exceptions.py

* Update exceptions.py

* Update hassos.py

* Update dt.py

* fix lint

* Fix update

* fix property

* tmp disabled

* fix path

* fix rauc

* info

* More details

* cleanup signal hadnling

* fix

* Fix lint
2018-06-30 01:48:58 +02:00
Pascal Vizeli
33095f8792 Bump version to 110 2018-06-29 22:23:00 +02:00
Pascal Vizeli
0253722369 Remove update config with allready installed one (#533)
* Remove update config with allready installed one

* fix lint
2018-06-28 12:22:27 +02:00
Pascal Vizeli
495c45564a Update libuv (#529) 2018-06-26 00:21:49 +02:00
Pascal Vizeli
8517b43e85 Merge pull request #526 from home-assistant/dev
Release 109
2018-06-23 00:58:29 +02:00
Pascal Vizeli
033ea4e7dc Panel with HassOS support (#525) 2018-06-23 00:52:19 +02:00
Pascal Vizeli
a0c9e5ad26 HassOS support (#522)
* Add support for hassos

* Name command

* Update host.py

* Create hassos.py

* Update const.py

* Update host.py

* Update API.md

* Update const.py

* Update __init__.py

* Update hassos.py

* Update hassos.py

* Update hassos.py

* Update hassos.py

* Update const.py

* Update API.md

* Update hassos.py

* Update hassos.py

* Update API.md

* Update const.py

* Update hassos.py

* Update __init__.py

* fix lint

* Fix lint v2

* remove old function

* fix attribute error

* inittialize hassos

* Fix link

* fix error handling

* Fix handling
2018-06-22 22:54:03 +02:00
Pascal Vizeli
408d6eafcc Bump version to 109 2018-06-21 12:21:59 +02:00
Pascal Vizeli
054e357483 Add support to map devicetree into add-on (#519)
* Add support to map devicetree into add-on

* Update const.py

* Update validate.py

* Update addon.py

* Update addons.py

* Update API.md
2018-06-21 12:19:14 +02:00
Pascal Vizeli
cb520bff23 Merge pull request #518 from home-assistant/fix-device-configs
Bugfix home-assistant config with devices
2018-06-21 12:18:20 +02:00
Pascal Vizeli
024ebe0026 Update homeassistant.py 2018-06-21 12:00:13 +02:00
Pascal Vizeli
7b62e2f07b Bugfix home-assistant config with devices 2018-06-21 11:51:04 +02:00
Pascal Vizeli
7d52b3ba01 Merge pull request #517 from home-assistant/dev
Release 108
2018-06-21 11:33:09 +02:00
Pascal Vizeli
46caa23319 Update version.json 2018-06-21 11:27:19 +02:00
Pascal Vizeli
9aa5eda2c8 Some bugfix (#516)
* Some bugfix

* Update apparmor.py

* Update apparmor.py

* Update apparmor.py

* Update apparmor.py
2018-06-20 23:25:08 +02:00
Pascal Vizeli
f48182a69c Add lost panel files (#515)
* Update Panel for 0.72

* Add new panel files
2018-06-20 21:07:33 +02:00
Pascal Vizeli
788f883490 Update Panel for 0.72 (#514) 2018-06-20 20:37:26 +02:00
Pascal Vizeli
e84e82d018 Fix aiohttp handling (#512) 2018-06-20 15:32:55 +02:00
Pascal Vizeli
20e73796b8 Change aiohttp handling for server (#511) 2018-06-20 12:17:33 +02:00
Pascal Vizeli
7769d6fff1 Cleanup and fixup Apparmor implementation (#509)
* Cleanup and fixup Apparmor implementation

* Update addon.py

* Update validate.py

* Create apparmor.py

* Update exceptions.py

* Update apparmor.py

* Create apparmor.py

* Update const.py

* Update bootstrap.py

* Update const.py

* Update config.py

* Update addons.py

* Update apparmor.py

* Add support for host AppArmor

* Update apparmor.py

* Update apparmor.py

* Update apparmor.py

* Update apparmor.py

* Update apparmor.py

* Update addon.py

* Update apparmor.py

* Update addon.py

* Update addon.py

* Update addon.py

* Update addon.py

* Update const.py

* Update supervisor.py

* Update supervisor.py

* Update supervisor.py

* Add snapshot support

* some cleanup

* Cleanup v2

* Update aiohttp

* fix lint

* fix bugs

* Add info logs
2018-06-20 00:09:18 +02:00
Pascal Vizeli
561e80c2be Extend Systemd Support / Apparmor (#506)
* Update systemd.py

* Update control.py

* Update control.py

* Create service.py

* Update info.py

* Rename hassio/host/asound.tmpl to hassio/host/data/asound.tmpl

* Rename hassio/host/audiodb.json to hassio/host/data/audiodb.json

* Update alsa.py

* Update alsa.py

* Update control.py

* Fix

* Enable call

* fix

* fix args

* Fix gdbus

* parse service data

* Change handling

* Fix states

* Fix parser for tuples

* Fix parser v2

* Fix tuple handling

* Fix regex string handling

* Faster tuple finder

* fix empty detector

* wrong order

* Finish

* fix lint

* better filtering

* fix match

* Fix mode string
2018-06-17 02:07:12 +02:00
Pascal Vizeli
96f47a4c32 Bump version to 108 2018-06-16 01:20:10 +02:00
Pascal Vizeli
7482d6dd45 Update last panel version (#508) 2018-06-15 22:55:57 +02:00
Pascal Vizeli
aea31ee6dd Support host tokens (#507)
* Update coresys.py

* Update bootstrap.py

* Update security.py

* fix lint

* Update bootstrap.py
2018-06-15 22:10:51 +02:00
Pascal Vizeli
de43965ecb Merge pull request #503 from home-assistant/dev
Release 107
2018-06-09 00:01:11 +02:00
Pascal Vizeli
baa61c6aa0 Bump version to 107 2018-06-08 23:48:43 +02:00
Pascal Vizeli
cb22dafb3c Fix bug with 0.70 (#502)
Signed-off-by: Pascal Vizeli <pvizeli@syshack.ch>
2018-06-08 23:47:59 +02:00
Pascal Vizeli
ea26784c3e Merge pull request #501 from home-assistant/dev
Release 106
2018-06-08 22:14:05 +02:00
Pascal Vizeli
72332ed40f New panel for 0.71 and 0.72 (#500)
Signed-off-by: Pascal Vizeli <pvizeli@syshack.ch>
2018-06-08 21:52:06 +02:00
Pascal Vizeli
46f2bf16a8 Bump version to 106 2018-06-08 21:51:08 +02:00
Ville Skyttä
e2725f8033 Spelling and grammar fixes (#499) 2018-06-08 21:32:06 +02:00
Pascal Vizeli
9084ac119f Fix version conflict 2018-05-29 19:40:16 +02:00
Pascal Vizeli
41943ba61a Delete .gitattributes 2018-05-29 19:38:00 +02:00
Pascal Vizeli
33794669a1 Last version.json update 2018-05-29 19:36:01 +02:00
Pascal Vizeli
fe155a4ff0 Read version from AWS (#488)
* Read version from AWS

* Update const.py

* Update updater.py

* Update updater.py

* Update updater.py

* Update updater.py

* Update updater.py

* Update const.py

* Update updater.py
2018-05-29 19:14:09 +02:00
Pascal Vizeli
124e487ef7 Support new panel generation (#487)
* Support new panel generation

* fix lint
2018-05-29 17:53:09 +02:00
Pascal Vizeli
f361916a60 Update docker timeout to 900sec (#486) 2018-05-29 17:37:20 +02:00
Pascal Vizeli
20afa1544b Bump version to 105 2018-05-29 00:22:12 +02:00
Pascal Vizeli
c08d5af4db Fix version conflicts 2018-05-29 00:21:24 +02:00
Pascal Vizeli
dc341c8af8 Fix version conflicts 2018-05-29 00:18:08 +02:00
Pascal Vizeli
2507b52adb Update Home Assistant to 0.70.0 2018-05-28 23:59:12 +02:00
Pascal Vizeli
1302708135 Update Home Assistant to 0.70.0 2018-05-28 23:58:45 +02:00
Pascal Vizeli
1314812f92 Update Home Assistant to 0.70.0 2018-05-28 23:53:28 +02:00
Pascal Vizeli
f739e3ed11 Update Hass.io to 104 2018-05-28 23:30:48 +02:00
Pascal Vizeli
abb526fc0f Update Panel / fix icons (#483) 2018-05-28 23:29:34 +02:00
Pascal Vizeli
efb1a24b8f Expose panel update (#482)
* Update __init__.py

* Update setup.py

* Update security.py

* Update setup.py

* Update __init__.py

* Update setup.py

* Update __init__.py
2018-05-28 23:16:03 +02:00
Pascal Vizeli
bc0835963d Bump version to 104 2018-05-28 21:28:19 +02:00
Pascal Vizeli
316190dff8 Fix new panel build for 0.70.0 (#481)
Signed-off-by: Pascal Vizeli <pvizeli@syshack.ch>
2018-05-28 21:24:17 +02:00
Pascal Vizeli
029ead0c7c Home Assistant 0.70.0b7 2018-05-27 10:52:10 +02:00
Paulus Schoutsen
a85172f30b Update to b7 2018-05-26 22:03:24 -04:00
Pascal Vizeli
dfe2532813 0.70.0b5 2018-05-26 22:28:47 +02:00
Pascal Vizeli
cf3bb23629 Home Assistant 0.70.0b5 2018-05-26 22:28:31 +02:00
Pascal Vizeli
2132042aca Update Home Assistant to version 0.70.0b3 2018-05-25 19:27:49 +02:00
Pascal Vizeli
19e448fc54 Update Home Assistant to version 0.70.0b3 2018-05-25 19:27:33 +02:00
c727
a4e0fb8e99 Update HA beta to 0.70.0b2 2018-05-22 15:03:18 +02:00
Paulus Schoutsen
5b72e2887e Update Hass.io to 0.70.0b2 2018-05-21 21:14:41 -04:00
Pascal Vizeli
d2b6ec1b7e Update Home Assistant to version 0.70.0b1 2018-05-21 15:38:04 +02:00
Paulus Schoutsen
4b541a23c4 Update Hass.io to 0.70.0b1 2018-05-21 09:27:11 -04:00
Pascal Vizeli
99869449ae Update Home Assistant to 0.70.0b0 2018-05-19 10:21:23 +02:00
Pascal Vizeli
eab73f3895 Update Home Assistant to 0.70.0b0 2018-05-19 10:20:55 +02:00
Pascal Vizeli
9e96615ffa Update Home Assistant to version 0.69.1 2018-05-13 10:20:56 +02:00
Pascal Vizeli
350010feb5 Update Home Assistant to version 0.69.1 2018-05-13 10:20:38 +02:00
Pascal Vizeli
7395e4620b Update Home Assistant to version 0.69.1 2018-05-13 10:20:18 +02:00
Pascal Vizeli
7d91ae4513 Update Home Assistant to 0.69.0 2018-05-11 22:32:38 +02:00
Pascal Vizeli
343f759983 Update Home Assistant to 0.69.0 2018-05-11 22:32:01 +02:00
Pascal Vizeli
24ee3f8cc0 Update Home Assistant to 0.69.0 2018-05-11 22:31:41 +02:00
Pascal Vizeli
c143eadb62 Update Home-Assistant 2018-05-09 20:31:22 +02:00
Pascal Vizeli
e7df38f4d1 Merge pull request #467 from home-assistant/rc
Hass.io 103
2018-05-09 15:47:18 +02:00
Pascal Vizeli
3e42318ac8 Merge branch 'master' into rc 2018-05-09 15:18:53 +02:00
Pascal Vizeli
c6e5d2932e Update Hass.io 2018-05-09 13:15:54 +02:00
Pascal Vizeli
1aaf21a350 Update Hass.io and Home Assistant 2018-05-09 13:15:35 +02:00
Pascal Vizeli
f185eece8a Update Hass.io and Home Assistant 2018-05-09 13:15:17 +02:00
Pascal Vizeli
9d951280ef Update const.py 2018-05-09 11:07:07 +02:00
Pascal Vizeli
3f598bafc0 Bugfix panel loading (#464) 2018-05-09 11:06:19 +02:00
Franck Nijhof
cddd859f56 🔈 Improves of audio devices handling (#463) 2018-05-08 14:27:17 +02:00
Pascal Vizeli
e7adf50ec1 Update Home Assistant 0.69.0b2 2018-05-07 23:54:11 +02:00
Pascal Vizeli
ac437f809a Update Home Assistant 0.69.0b2 2018-05-07 23:53:54 +02:00
Pascal Vizeli
f13dee9b9d Update Hass.io 2018-05-06 09:53:56 +02:00
Pascal Vizeli
00855c0909 Update Home Assistant and Hass.io 2018-05-06 09:53:28 +02:00
Pascal Vizeli
1fafed5a07 Update Home Assistant and Hass.io 2018-05-06 09:52:55 +02:00
Pascal Vizeli
7adb81b350 Update const.py 2018-05-06 09:45:46 +02:00
Pascal Vizeli
4647035b00 Bugfix Websession 2018-05-06 09:44:58 +02:00
Pascal Vizeli
8ad7344e02 Update Hass.io to version 103.1 2018-05-05 23:29:58 +02:00
Pascal Vizeli
f1c46b3385 Update Hass.io to version 103.1 2018-05-05 23:29:41 +02:00
Pascal Vizeli
7f84073b12 Update Hass.io to version 103.1 2018-05-05 23:29:24 +02:00
Pascal Vizeli
e383a11bb7 Pump version to fix 2018-05-05 23:19:56 +02:00
Pascal Vizeli
cc113e2251 Update Hass.io to version 103 2018-05-05 19:05:30 +02:00
Pascal Vizeli
c5a3830c7d Merge remote-tracking branch 'origin/dev' into rc 2018-05-04 21:53:40 +02:00
Pascal Vizeli
a2abadc970 Update hass.io to version 103 2018-05-04 21:39:12 +02:00
Pascal Vizeli
db444b89d3 Update gdbus.py (#460)
* Update gdbus.py

* Update gdbus.py
2018-05-04 20:58:23 +02:00
Pascal Vizeli
77881e8a58 Update Panel (#459)
* Update Panel

* Update core.py
2018-05-04 20:04:34 +02:00
Pascal Vizeli
0b15f88da3 Bugfixes (#457)
* Update gdbus.py

* Update gdbus.py

* Update gdbus.py

* Update gdbus.py

* Update gdbus.py

* Update gdbus.py

* Update gdbus.py

* Update gdbus.py

* Update gdbus.py

* Update gdbus.py
2018-05-03 23:22:48 +02:00
Pascal Vizeli
7c6bf96f6f shield host functions 2018-05-03 01:00:13 +02:00
Pascal Vizeli
dc77e2d8d9 Update gdbus.py 2018-05-03 00:52:57 +02:00
Pascal Vizeli
68824fab4f fix bug 2018-05-02 23:31:23 +02:00
Pascal Vizeli
d6b3a36714 Update Dockerfile 2018-05-02 22:52:08 +02:00
Pascal Vizeli
8ab1f703c7 Update Home Assistant to version 0.68.1 2018-05-01 07:24:34 +02:00
Pascal Vizeli
95a4e292aa Update Home Assistant to version 0.68.1 2018-05-01 07:24:03 +02:00
Pascal Vizeli
3b9252558f Update Home Assistant to version 0.68.1 2018-05-01 07:23:46 +02:00
Pascal Vizeli
4a324dccc6 Pump version to 103 2018-04-30 14:49:34 +02:00
Pascal Vizeli
8fffb0f8b5 Fix version 2018-04-30 14:46:59 +02:00
Pascal Vizeli
87adfce211 Update Hass.io to version 0.102 2018-04-30 14:45:31 +02:00
Tod Schmidt
297813f6e6 fix for asound.tmpl (#454) 2018-04-30 14:09:20 +02:00
Pascal Vizeli
362315852a Pump version to 0.102 2018-04-30 14:08:18 +02:00
Tod Schmidt
d221f36cf8 fix for asound.tmpl (#454) 2018-04-30 07:39:44 +02:00
Pascal Vizeli
9e18589b6b Update info.py 2018-04-28 10:51:25 +02:00
Pascal Vizeli
c4d09210e1 Update Home-Assistant to version 0.68.0 2018-04-28 10:16:57 +02:00
Pascal Vizeli
43797c5eb5 Update Home-Assistant to version 0.68.0 2018-04-28 10:16:28 +02:00
Pascal Vizeli
fe38fe94dc Update Home-Assistant to version 0.68.0 2018-04-28 10:16:06 +02:00
Pascal Vizeli
f185291eca Update control.py 2018-04-27 22:33:41 +02:00
Pascal Vizeli
7541ae6476 Update hostname.py 2018-04-27 22:31:37 +02:00
Pascal Vizeli
d94715be2b Merge pull request #451 from home-assistant/cleanups
Cleanups & restructs
2018-04-26 21:47:06 +02:00
Pascal Vizeli
99cc5972c8 Update __init__.py 2018-04-26 21:44:06 +02:00
Pascal Vizeli
3d101a24a1 Update API.md 2018-04-26 21:42:45 +02:00
Pascal Vizeli
2ed3ddf05b fix lint 2018-04-26 21:23:43 +02:00
Pascal Vizeli
10b3658bd7 Revert last changes 2018-04-26 20:51:29 +02:00
Pascal Vizeli
9f5903089e Cleanup API 2018-04-26 19:49:12 +02:00
Pascal Vizeli
0593885ed4 revert error 2018-04-26 19:25:10 +02:00
Pascal Vizeli
3efbe11d49 Cleanup 2018-04-26 19:23:52 +02:00
Pascal Vizeli
1c2e0e5749 Update host.py 2018-04-26 11:36:51 +02:00
Pascal Vizeli
f64da6a547 Update exceptions.py 2018-04-26 11:33:43 +02:00
Pascal Vizeli
94fba7e175 Update info.py 2018-04-26 11:32:15 +02:00
Pascal Vizeli
a59245e6bb Update __init__.py 2018-04-26 11:29:12 +02:00
Pascal Vizeli
217c1acc62 Update and rename power.py to control.py 2018-04-26 11:27:02 +02:00
Pascal Vizeli
2c0a68bd8f Update and rename local.py to info.py 2018-04-26 09:44:49 +02:00
Pascal Vizeli
e37ffd6107 Merge pull request #450 from home-assistant/dbus
Initial Dbus support
2018-04-26 00:17:45 +02:00
Pascal Vizeli
3bde598fa7 fix host 2018-04-25 23:49:45 +02:00
Pascal Vizeli
53f42ff934 fix attr 2018-04-25 23:36:44 +02:00
Pascal Vizeli
9041eb9e9a Fix attributes 2018-04-25 23:24:55 +02:00
Pascal Vizeli
70ac395232 fix bugs 2018-04-25 22:47:17 +02:00
Pascal Vizeli
82f68b4a7b fix dbus 2018-04-25 22:27:57 +02:00
Pascal Vizeli
2b2f3214e9 fix selecter 2018-04-25 22:12:27 +02:00
Pascal Vizeli
1c0d63a02e fix sys 2018-04-25 21:49:28 +02:00
Pascal Vizeli
de77215630 Update Home-Assistant to version 0.68.0b1 2018-04-25 21:43:06 +02:00
Pascal Vizeli
f300b843c1 Update Home-Assistant to version 0.68.0b1 2018-04-25 21:42:35 +02:00
Pascal Vizeli
0bb81136bb Add hostname function 2018-04-24 23:38:40 +02:00
Pascal Vizeli
2a81ced817 Update gdbus.py 2018-04-24 15:52:18 +02:00
Pascal Vizeli
7363951a9a Update gdbus.py 2018-04-24 15:40:14 +02:00
Pascal Vizeli
6f770b78af add interface dbus class 2018-04-23 23:30:21 +02:00
Pascal Vizeli
10219a348f fix lint 2018-04-23 21:56:54 +02:00
Pascal Vizeli
23d1013cfa Follow the correct shutdown flow 2018-04-23 21:45:06 +02:00
Pascal Vizeli
05980d4147 some cleanup more 2018-04-23 21:22:29 +02:00
Pascal Vizeli
e5e25c895f Fix error handling 2018-04-23 21:10:48 +02:00
Pascal Vizeli
b486883ff6 Cleanups 2018-04-23 15:32:23 +02:00
Pascal Vizeli
42dd4d9557 Update coresys.py 2018-04-23 09:05:52 +02:00
Pascal Vizeli
7dff9e09a7 Update bootstrap.py 2018-04-23 09:05:08 +02:00
Pascal Vizeli
c315b026a3 Update __init__.py 2018-04-23 08:58:06 +02:00
Pascal Vizeli
a4ba4c80e8 Update __init__.py 2018-04-23 08:57:39 +02:00
Pascal Vizeli
ccd48b63a2 Create __init__.py 2018-04-23 08:51:47 +02:00
Pascal Vizeli
6d5f70ced6 Rename hassio/misc/dbus/rauc.py to hassio/dbus/rauc.py 2018-04-23 08:51:15 +02:00
Pascal Vizeli
ccffb4b786 Rename hassio/misc/rauc.py to hassio/misc/dbus/rauc.py 2018-04-23 08:50:51 +02:00
Pascal Vizeli
68dbbe212c Rename hassio/misc/networkmanager.py to hassio/dbus/networkmanager.py 2018-04-23 08:50:18 +02:00
Pascal Vizeli
5df869e08a Rename hassio/misc/systemd.py to hassio/dbus/systemd.py 2018-04-23 08:49:56 +02:00
Pascal Vizeli
63b9e023b4 add hostmanager 2018-04-22 17:59:41 +02:00
Pascal Vizeli
8f357739ec code cleanups 2018-04-22 17:44:03 +02:00
Pascal Vizeli
808fc0f8b6 Log internal exceptions on API level 2018-04-22 10:16:24 +02:00
Pascal Vizeli
1a6f6085e6 Add API support for new handling 2018-04-22 10:15:07 +02:00
Pascal Vizeli
0de3e9a233 update handling 2018-04-22 09:59:43 +02:00
Pascal Vizeli
f1237f124f small down the footprint 2018-04-22 09:35:36 +02:00
Pascal Vizeli
69142b6fb0 Add systemd dbus 2018-04-21 23:56:36 +02:00
Pascal Vizeli
28f295a1e2 Cleanup 2018-04-21 22:33:06 +02:00
Pascal Vizeli
55c2127baa Cleanup Loop handling 2018-04-21 16:30:31 +02:00
Pascal Vizeli
265c36b345 Claim exceptions 2018-04-21 15:39:08 +02:00
Pascal Vizeli
9f081fe32f Update Home-Assistant to version 0.68.0b0 2018-04-21 00:04:15 +02:00
Pascal Vizeli
e4fb6ad727 Update Home-Assistant to version 0.68.0b0 2018-04-21 00:03:52 +02:00
Pascal Vizeli
1040a1624a fix lint 2018-04-20 23:40:58 +02:00
Pascal Vizeli
a2ee2852a0 Update gdbus.py 2018-04-20 16:10:59 +02:00
Pascal Vizeli
b2e3b726d9 Update gdbus.py 2018-04-20 16:01:43 +02:00
Pascal Vizeli
0f4e557552 Update gdbus.py 2018-04-20 15:59:04 +02:00
Pascal Vizeli
2efa9f9483 Update gdbus.py 2018-04-20 15:48:36 +02:00
Pascal Vizeli
43e6ca8f4a Update gdbus.py 2018-04-20 10:56:47 +02:00
Pascal Vizeli
34d67a7bcd Update gdbus.py 2018-04-20 10:15:29 +02:00
Pascal Vizeli
5a6051f9a1 Update gdbus.py 2018-04-20 09:58:49 +02:00
Pascal Vizeli
157e48f946 Initial Dbus support 2018-04-19 23:27:20 +02:00
Pascal Vizeli
9469a258ff Update Home-Assistant to version 0.67.1 2018-04-18 12:30:15 +02:00
Pascal Vizeli
fd0aeb5341 Update Home-Assistant to version 0.67.1 2018-04-18 12:29:52 +02:00
Pascal Vizeli
4d4a4ce043 Update Home-Assistant to version 0.67.1 2018-04-18 12:29:38 +02:00
Pascal Vizeli
678f77cc05 Pump version to 0.102 2018-04-14 10:50:42 +02:00
Pascal Vizeli
6c30248389 Update Home-Assistant to version 0.67.0 2018-04-14 10:04:43 +02:00
Pascal Vizeli
fda7c1cf11 Update Home-Assistant to version 0.67.0 2018-04-14 10:04:19 +02:00
Pascal Vizeli
364e5ec0b8 Update Home-Assistant to version 0.67.0 2018-04-14 10:03:51 +02:00
Pascal Vizeli
947bf7799c Fix version conflict 2018-04-14 01:28:44 +02:00
Pascal Vizeli
e22836d706 Fix version conflict 2018-04-14 01:22:39 +02:00
Pascal Vizeli
6c8fcbfb80 Update Hass.io to version 0.101 2018-04-14 01:18:00 +02:00
Pascal Vizeli
f1fe1877fe Merge pull request #442 from home-assistant/ui-101
Update panel audio
2018-04-14 00:59:37 +02:00
Pascal Vizeli
3c0831c8eb Update pannel audio 2018-04-14 00:52:11 +02:00
Pascal Vizeli
35b3f364c9 Merge pull request #441 from home-assistant/new_audio_system
Extend Audio support
2018-04-14 00:44:37 +02:00
Pascal Vizeli
c4299b51cd Clear device on changes 2018-04-14 00:30:28 +02:00
Pascal Vizeli
31caed20fa Fix device name 2018-04-14 00:27:31 +02:00
Pascal Vizeli
41fed656c1 Use now attr 2018-04-14 00:19:29 +02:00
Pascal Vizeli
c5ee2ebc49 fix v2 2018-04-13 23:58:46 +02:00
Pascal Vizeli
743a218219 fix bug 2018-04-13 23:45:03 +02:00
Pascal Vizeli
093ef17fb7 find the error 2018-04-13 23:31:40 +02:00
Pascal Vizeli
a41912be0a fix db 2018-04-13 23:21:42 +02:00
Pascal Vizeli
5becd51b50 test 4 2018-04-13 23:11:20 +02:00
Pascal Vizeli
ef7a375396 test 2 2018-04-13 22:56:36 +02:00
Pascal Vizeli
19879e3287 test 2018-04-13 22:42:27 +02:00
Pascal Vizeli
d1c4f342fc Fix bootstrap 2018-04-13 22:16:55 +02:00
Pascal Vizeli
2f62b7046c cleanup 2018-04-13 22:04:01 +02:00
Pascal Vizeli
0cca8f522b rename audio object 2018-04-13 21:19:57 +02:00
Pascal Vizeli
39decec001 Update Home-Assistant to version 0.67.0b1 2018-04-13 06:36:42 +02:00
Pascal Vizeli
3489db2768 Update Home-Assistant to version 0.67.0b1 2018-04-13 06:36:21 +02:00
Pascal Vizeli
3382688669 Fix name 2018-04-13 00:03:03 +02:00
Pascal Vizeli
cf00ce7d78 fix lint 2018-04-12 23:50:58 +02:00
Pascal Vizeli
2c714aa003 fix 1 2018-04-12 23:11:38 +02:00
Pascal Vizeli
1e7858bf06 fix url 2018-04-12 23:00:42 +02:00
Pascal Vizeli
4e428c2e41 Fix options 2018-04-12 22:39:26 +02:00
Pascal Vizeli
b95ab3e95a Stage API 2018-04-12 22:15:08 +02:00
Pascal Vizeli
0dd7f8fbaa Fix some comments 2018-04-12 22:07:41 +02:00
Pascal Vizeli
a2789ac540 Extend Audio support 2018-04-11 23:53:30 +02:00
Pascal Vizeli
a785e10a3f Merge pull request #440 from home-assistant/addon-slug
Add slug to add-on info
2018-04-11 20:01:15 +02:00
Pascal Vizeli
10dad5a209 Update API.md 2018-04-11 18:53:01 +02:00
Pascal Vizeli
9327b24d44 Add slug to add-on info 2018-04-11 18:51:03 +02:00
Pascal Vizeli
7d02bb2fe9 Pump version to 0.101 2018-04-10 21:21:15 +02:00
Pascal Vizeli
a2d3ee0d67 Fix version conflict 2018-04-10 21:18:47 +02:00
Pascal Vizeli
d29fab69e8 Merge remote-tracking branch 'origin/dev' into rc 2018-04-09 23:46:52 +02:00
Pascal Vizeli
6205f40298 Pump version to 0.100 2018-04-09 23:45:33 +02:00
Pascal Vizeli
6b169f3f17 Fix version 2018-04-09 23:44:52 +02:00
Pascal Vizeli
0d4a5a7ffb Fix version conflict 2018-04-09 23:41:00 +02:00
Pascal Vizeli
dac90d29dd Update Hass.io to version 1.0 2018-04-09 23:39:19 +02:00
Pascal Vizeli
7e815633e7 Merge pull request #437 from home-assistant/fix_restart
Abstract restart logic
2018-04-09 23:31:55 +02:00
Pascal Vizeli
f062f31ca2 Fix logic 2018-04-09 23:10:12 +02:00
Pascal Vizeli
1374f90433 cleanup version 2018-04-09 22:52:16 +02:00
Pascal Vizeli
b692b19a4d Fix log bug 2018-04-09 22:50:21 +02:00
Pascal Vizeli
92d5b14cf5 Abstract restart logic 2018-04-09 22:13:16 +02:00
Pascal Vizeli
6a84829c16 Merge pull request #436 from home-assistant/aiohttp_update
Update aioHttp 3.1.2 & Handling
2018-04-09 21:29:21 +02:00
Pascal Vizeli
7036ecbd0a Update aioHttp 3.1.2 & Handling 2018-04-09 21:01:06 +02:00
Pascal Vizeli
19b5059972 Pump version 1.0 2018-04-09 20:23:04 +02:00
Pascal Vizeli
cebc377fa7 Merge pull request #435 from home-assistant/fix_docker_char
Bugfix, remove unsupported characters
2018-04-09 20:22:03 +02:00
Pascal Vizeli
d36c3919d7 Update docker 2018-04-09 20:01:48 +02:00
Pascal Vizeli
0684427373 Bugfix, remove unsupported characters 2018-04-08 23:30:42 +02:00
Pascal Vizeli
8ff79e85bf Merge pull request #433 from home-assistant/secure
Add support for Seccomp/AppArmor profiles
2018-04-08 23:07:33 +02:00
Pascal Vizeli
ee4b28a490 Fix's & cleanup 2018-04-08 22:27:58 +02:00
Pascal Vizeli
fddd5b8860 Fix lint 2018-04-07 00:32:54 +02:00
Pascal Vizeli
72279072ac Add support for Seccomp/AppArmor profiles 2018-04-07 00:24:23 +02:00
Pascal Vizeli
0b70448273 Update Home-Assistant 0.67.0b0 2018-04-06 23:20:02 +02:00
Pascal Vizeli
4eb24fcbc5 Update Home-Assistant 0.67.0b0 2018-04-06 23:19:31 +02:00
Pascal Vizeli
06edf59d14 Update Home-Assistant to version 0.66.1 2018-04-02 09:47:39 +02:00
Pascal Vizeli
36ca851bc2 Update Home-Assistant to version 0.66.1 2018-04-02 09:47:21 +02:00
Pascal Vizeli
a4e453bf83 Update Home-Assistant to version 0.66.1 2018-04-02 09:47:05 +02:00
Pascal Vizeli
d211eec66f Update Home-Assistant to version 0.66.1b0 2018-04-01 11:08:34 +02:00
Pascal Vizeli
db8540d4ab Update Home-Assistant to version 0.66.1b0 2018-04-01 11:07:55 +02:00
Pascal Vizeli
30e270e7c0 Update Home-Assistant to version 0.66.0 2018-03-31 09:45:53 +02:00
Pascal Vizeli
9734307551 Update Home-Assistant to version 0.66.0 2018-03-31 09:45:32 +02:00
Pascal Vizeli
c650f8d1e1 Update Home-Assistant to version 0.66.0 2018-03-31 09:45:15 +02:00
Pascal Vizeli
10005898f8 Fix tag name 2018-03-30 15:10:35 +02:00
Pascal Vizeli
716389e0c1 Fix tag format 2018-03-30 15:10:02 +02:00
Pascal Vizeli
658729feb5 Update Home-Assistant to version 0.66.0.b3 2018-03-30 10:23:10 +02:00
Pascal Vizeli
ae7808eb2a Update Home-Assistant to version 0.66.0.b3 2018-03-30 10:22:41 +02:00
Pascal Vizeli
d8e0e9e0b0 Update Home-Assistant to version 0.66.0.b2 2018-03-27 09:24:59 +02:00
Pascal Vizeli
a860a3c122 Update Home-Assistant to version 0.66.0.b2 2018-03-27 09:24:38 +02:00
Pascal Vizeli
fe60d526b9 Revert home-assistant version 2018-03-24 23:34:22 +01:00
Pascal Vizeli
769904778f Merge remote-tracking branch 'origin/rc' 2018-03-24 23:32:37 +01:00
Pascal Vizeli
a3a40c79d6 Fix merge conflicts 2018-03-24 23:31:26 +01:00
Pascal Vizeli
b44f613136 Update hass.io to version 0.99 2018-03-24 23:09:32 +01:00
Pascal Vizeli
801be9c60b Create .gitattributes 2018-03-24 23:05:58 +01:00
Pascal Vizeli
b6db6a1287 Create .gitattributes 2018-03-24 23:04:37 +01:00
Pascal Vizeli
4181174bcc Create .gitattributes 2018-03-24 23:03:15 +01:00
Pascal Vizeli
3be46e6011 Update Home-Assistant to version 0.66.0.beta0 2018-03-24 22:58:52 +01:00
Pascal Vizeli
98b93efc5c Merge pull request #423 from home-assistant/beta
Change Upstream handling
2018-03-24 22:56:04 +01:00
Pascal Vizeli
6156019c2f Merge pull request #424 from home-assistant/ui-99
Update pannel for 0.99
2018-03-24 22:50:33 +01:00
Pascal Vizeli
80d60148a9 Update pannel for 0.99 2018-03-24 22:48:45 +01:00
Pascal Vizeli
8baf59a608 fix lint 2018-03-24 22:06:56 +01:00
Pascal Vizeli
b546365aaa Fix 2018-03-24 22:05:01 +01:00
Pascal Vizeli
0a68698912 rename mode to channel 2018-03-24 22:01:13 +01:00
Pascal Vizeli
45288a2491 Change Upstream handling 2018-03-24 21:44:44 +01:00
Pascal Vizeli
f34a175e4f Update Home-Assistant to version 0.66.0.beta0 2018-03-24 10:34:53 +01:00
Pascal Vizeli
6e7e145822 Update Home-Assistant to version 0.65.6 2018-03-21 22:19:38 +01:00
Pascal Vizeli
9abebe2d5d Update Home-Assistant to version 0.65.6 2018-03-21 22:19:08 +01:00
Pascal Vizeli
b0c5884c3f Update Home-Assistant to version 0.65.5 2018-03-15 12:11:01 +01:00
Pascal Vizeli
a79e6a8eea Update Home-Assistant to version 0.65.5 2018-03-15 12:10:16 +01:00
Pascal Vizeli
c1f1aed9ca Pump version to 0.99 2018-03-14 22:25:58 +01:00
Pascal Vizeli
65b0e17b5b Merge pull request #414 from home-assistant/dev
Release 0.98
2018-03-14 22:25:14 +01:00
Pascal Vizeli
6947131b47 Update Hass.io to version 0.98 2018-03-14 22:10:13 +01:00
Pascal Vizeli
914dd53da0 Merge pull request #411 from home-assistant/fix_watchdog
Use lock on homeassistant level
2018-03-14 21:52:20 +01:00
Pascal Vizeli
58616ef686 bugfix aiohttp 2018-03-14 21:12:08 +01:00
Pascal Vizeli
563e0c1e0e fix wrong startup blocking 2018-03-14 19:08:03 +01:00
Pascal Vizeli
437070fd7a Merge pull request #412 from home-assistant/fix-geoip
Fix URL for freegeoip
2018-03-13 23:34:06 +01:00
Pascal Vizeli
baa9cf451c Fix URL for freegeoip 2018-03-13 23:28:38 +01:00
Pascal Vizeli
c2918d4519 Use lock on homeassistant level 2018-03-13 23:09:53 +01:00
Pascal Vizeli
1efdcd4691 Merge remote-tracking branch 'origin/master' into dev 2018-03-13 21:31:56 +01:00
Pascal Vizeli
2a43087ed7 Pump version to 0.98 2018-03-13 16:06:44 +01:00
Pascal Vizeli
5716324934 Merge pull request #410 from home-assistant/dev
Release 0.97
2018-03-13 16:05:31 +01:00
Pascal Vizeli
ae267e0380 Merge branch 'master' into dev 2018-03-13 14:09:13 +01:00
Pascal Vizeli
3918a2a228 Update Home-Assistant version 0.65.4 2018-03-13 14:07:21 +01:00
Pascal Vizeli
e375fc36d3 Update Hass.io to version 0.97 2018-03-13 00:09:57 +01:00
Pascal Vizeli
f5e29b4651 Update panel to last (#408) 2018-03-12 23:51:09 +01:00
Pascal Vizeli
524d875516 Update aioHttp3 (#403)
* Update aioHttp3

* fix line ending

* fix close session
2018-03-12 23:40:06 +01:00
Pascal Vizeli
60bdc00ce9 Update Home-Assistant to version 0.65.3 2018-03-12 07:13:47 +01:00
Pascal Vizeli
073166190f Update Home-Assistant to version 0.65.3 2018-03-12 07:13:27 +01:00
Pascal Vizeli
b80e4d7d70 Update Home-Assistant to version 0.65.2 2018-03-11 23:58:16 +01:00
Pascal Vizeli
cc434e27cf Update Home-Assistant to version 0.65.2 2018-03-11 23:57:57 +01:00
Pascal Vizeli
8377e04b62 Update Home-Assistant to version 0.65.1 2018-03-11 20:32:43 +01:00
Pascal Vizeli
0a47fb9c83 Update Home-Assistant to version 0.65.1 2018-03-11 20:32:25 +01:00
Pascal Vizeli
a5d3c850e9 Update Home-Assistant to version 0.65.0 2018-03-09 23:32:47 +01:00
Pascal Vizeli
d6391f62be Update Home-Assistant to version 0.65.0 2018-03-09 23:10:27 +01:00
Pascal Vizeli
c6f302e448 Update ResinOS to version 1.3 2018-03-05 22:51:44 +01:00
Pascal Vizeli
9706022c21 Update ResinOS to version 1.3 2018-03-05 22:51:08 +01:00
Pascal Vizeli
1d858f4920 Update ResinOS to version 1.2 2018-03-04 00:43:24 +01:00
Pascal Vizeli
e09ba30d46 Update ResinOS to version 1.2 2018-03-04 00:43:00 +01:00
mark9white
38ec3d14ed Allow addons that require IPC_LOCK capability (#397) 2018-03-03 23:06:42 +01:00
Pascal Vizeli
8ee9380cc7 Pump version to 0.97 2018-03-03 11:15:39 +01:00
Pascal Vizeli
6e74e4c008 Fix version conflicts 2018-03-03 11:12:59 +01:00
Pascal Vizeli
5ebc58851b Update Hass.io to version 0.96 2018-03-03 11:08:00 +01:00
Pascal Vizeli
16b09bbfc5 Allow to use branch on repositories (#395)
* Allow to use branch on repositories

* Fix argument extraction

* fix lint
2018-03-03 11:00:58 +01:00
Pascal Vizeli
d4b5fc79f4 Update Home-Assistant to version 0.64.3 2018-03-03 00:07:04 +01:00
Pascal Vizeli
e51c044ccd Update Home-Assistant to version 0.64.3 2018-03-02 23:56:48 +01:00
Pascal Vizeli
d3b1ba81f7 Update panel for encrypted backups (#394)
* Update panel for encrypted backups

* fix lint
2018-03-02 23:23:40 +01:00
Pascal Vizeli
26f55f02c0 Update Home-Assistant to version 0.64.2 2018-03-02 07:01:42 +01:00
Pascal Vizeli
8050707ff9 Update Home-Assistant to version 0.64.2 2018-03-02 06:54:32 +01:00
c727
46252030cf Improve names for built-in repos (#391) 2018-03-01 19:00:21 +01:00
Pascal Vizeli
681fa835ef Update Home-Assistant to version 0.64.1 2018-02-28 08:16:18 +01:00
Pascal Vizeli
d6560eb976 Update Home-Assistant to version 0.64.1 2018-02-28 07:48:54 +01:00
Pascal Vizeli
3770b307af Pump version to 0.96 2018-02-26 22:55:53 +01:00
Pascal Vizeli
0dacbb31be Fix version conflicts 2018-02-26 22:53:31 +01:00
Pascal Vizeli
bbdbd756a7 Update Hass.io to version 0.95 2018-02-26 22:42:29 +01:00
Pascal Vizeli
508e38e622 Fix snapshot partial API (#389) 2018-02-26 22:26:39 +01:00
Pascal Vizeli
ffe45d0d02 Bugfix if no data is given for encryption (#387)
* Bugfix if no data is given for encryption

* Update snapshot.py
2018-02-26 22:17:25 +01:00
Pascal Vizeli
9206d1acf8 Update Home-Assistant to version 0.64 2018-02-26 06:10:40 +01:00
Pascal Vizeli
da867ef8ef Update Home-Assistant to version 0.64 2018-02-26 06:03:24 +01:00
Pascal Vizeli
4826201e51 Pump version to 0.95 2018-02-25 12:57:53 +01:00
Pascal Vizeli
463c97f9e7 Update Hass.io to version 0.94 2018-02-25 12:49:39 +01:00
Pascal Vizeli
3983928c6c Bugfix snapshot dialog (#380) 2018-02-25 12:18:05 +01:00
Pascal Vizeli
15e626027f Pump version to 0.94 2018-02-24 08:50:21 +01:00
Pascal Vizeli
d46810752e Update Hass.io to version 0.93 2018-02-24 08:46:53 +01:00
Pascal Vizeli
3d10b502a0 Bugfix panel system (#379) 2018-02-24 08:38:59 +01:00
Pascal Vizeli
433c5cef3b Stop home-assistant only if they will be restored (#377) 2018-02-23 22:22:38 +01:00
Pascal Vizeli
697caf553a Pump version to 0.93 2018-02-23 11:38:04 +01:00
Pascal Vizeli
1e11359c71 Fix version conflicts 2018-02-23 11:35:43 +01:00
Pascal Vizeli
5285431825 New panel (#374) 2018-02-23 11:13:53 +01:00
Pascal Vizeli
7743a572a9 Update Hass.io to version 0.92 2018-02-23 11:01:51 +01:00
Pascal Vizeli
3b974920d3 Return snapshot slug for snapshot/import (#372)
* Update __init__.py

* Update snapshots.py

* Update API.md

* Update __init__.py

* Update __init__.py
2018-02-23 10:52:35 +01:00
Pascal Vizeli
6bc9792248 Update setup.py (#373) 2018-02-23 10:37:14 +01:00
Pascal Vizeli
da55f6fb10 Pump version to 0.92 2018-02-23 10:34:21 +01:00
Pascal Vizeli
ffa90a3407 Update Home-Assistant to version 0.63.3 2018-02-18 22:16:46 +01:00
Pascal Vizeli
0a13ea3743 Update Home-Assistant to version 0.63.3 2018-02-18 22:15:39 +01:00
Pascal Vizeli
0e2e588145 Update utils.py 2018-02-18 12:31:06 +01:00
Pascal Vizeli
b8c50fee36 Update validate.py 2018-02-18 12:30:41 +01:00
Pascal Vizeli
8cb0b7c498 Update validate.py 2018-02-18 12:23:46 +01:00
Pascal Vizeli
699fcdafba Fix pw2 (#369)
* fix rate password

* convert int
2018-02-18 12:18:11 +01:00
Pascal Vizeli
b4d5aeb5d0 Update Hass.io to version 0.91 2018-02-18 12:15:54 +01:00
Pascal Vizeli
d067dd643e Fix password hack (#368) 2018-02-18 11:51:11 +01:00
Pascal Vizeli
65a2bf2d18 Pump version to 0.91 2018-02-18 11:01:13 +01:00
Pascal Vizeli
e826e8184f Update Hass.io to version 0.90 2018-02-18 10:59:58 +01:00
Pascal Vizeli
dacbde7d77 Extend the security of snapshots (#367)
* extend security

* fix lint
2018-02-18 10:57:05 +01:00
Pascal Vizeli
5b0587b672 Pump version to 0.90 2018-02-17 17:28:19 +01:00
Pascal Vizeli
f0320c0f6d Fix version conflicts 2018-02-17 16:29:36 +01:00
Pascal Vizeli
e05c32df25 Update Hass.io to version 0.89 2018-02-17 16:28:50 +01:00
c727
9c40c32e95 Add timezone to snapshot timestamp (#360)
* Add timezone to snapshot timestamp

```
old: 2018-02-14T15:13:46.391829
new: 2018-02-14T15:13:46.391829+00:00
```

* Update __init__.py

* Move code to dt util

* Lint

* Lint 2

* Update dt.py

* Update __init__.py
2018-02-17 16:13:23 +01:00
Pascal Vizeli
ac60de0360 Update security.py (#365) 2018-02-17 16:09:10 +01:00
Pascal Vizeli
587047f9d6 Add support for encrypted snapshot files (#354)
* Add support for encrypted files

* Update tar.py

* Update tar.py

* Update tar.py

* Update addon.py

* Update API.md

* Update API.md

* Update tar.py

* cleanup snapshot

* Update API.md

* Update const.py

* Update const.py

* Update validate.py

* Update homeassistant.py

* Update homeassistant.py

* Update validate.py

* Update validate.py

* Update snapshot.py

* Update utils.py

* Update snapshot.py

* Update utils.py

* Update snapshot.py

* Update validate.py

* Update snapshot.py

* Update validate.py

* Update const.py

* fix lint

* Update snapshot.py

* Update __init__.py

* Update snapshot.py

* Update __init__.py

* Update __init__.py

* Finish snapshot object

* Fix struct

* cleanup snapshot flow

* fix some points

* Add API upload

* fix lint

* Update voluptuous

* fix docker

* Update snapshots.py

* fix versions

* fix schema

* fix schema

* fix api

* fix path

* Handle import better

* fix routing

* fix bugs

* fix bug

* cleanup gz

* fix some bugs

* fix stage

* Fix

* fix

* protect None password

* fix API

* handle exception better

* fix

* fix remove of addons

* fix bug

* clenaup code

* fix none tasks

* Encrypt Home-Assistant

* fix decrypt

* fix binary
2018-02-17 15:52:33 +01:00
Fabian Affolter
e815223047 Merge pull request #363 from home-assistant/probot
Enable probot move
2018-02-16 13:25:16 +01:00
Fabian Affolter
b6fb5ab950 Enable probot move 2018-02-16 13:18:13 +01:00
Pascal Vizeli
a0906937c4 Update Home-Assistant to version 0.63.2 2018-02-14 22:08:09 +01:00
Pascal Vizeli
07c47df369 Update Home-Assistant to version 0.63.2 2018-02-14 21:12:18 +01:00
Pascal Vizeli
85e9a949cc Update Home-Assistant to version 0.63.1 2018-02-13 06:38:45 +01:00
Pascal Vizeli
3933fb0664 Update Home-Assistant to version 0.63.1 2018-02-13 06:26:45 +01:00
Pascal Vizeli
a885fbdb41 Pump version to 0.89 2018-02-11 23:03:46 +01:00
Pascal Vizeli
210793eb34 Update Home-Assistant to version 0.63 2018-02-11 09:31:19 +01:00
Pascal Vizeli
0235c7bce0 Update Home-Assistant to version 0.63 2018-02-11 09:22:31 +01:00
Pascal Vizeli
4419c0fc6c Update Hass.io to version 0.88 2018-02-11 01:53:18 +01:00
Pascal Vizeli
2f3701693d Fix bugs with docker api 3.0.1 and fix the version (#353)
* Fix version

* fix snapshot
2018-02-11 01:42:53 +01:00
Pascal Vizeli
3bf446cbdb Improve security layer (#352)
* Improve security layer

* Update logger

* Fix access

* Validate token

* fix

* fix some bugs

* fix lint
2018-02-11 00:05:20 +01:00
Pascal Vizeli
0c67cc13a1 Pump version to 0.88 2018-02-10 00:23:37 +01:00
Pascal Vizeli
0b80d7b6f4 Update Hass.io to version 0.87 2018-02-10 00:17:13 +01:00
Pascal Vizeli
23c35d4c80 Bugfix Check Config for Home-Assistant (#350)
* add logger

* Bugfix config check
2018-02-10 00:10:30 +01:00
Pascal Vizeli
e939c29efa Pump version to 0.87 2018-02-09 10:45:15 +01:00
Pascal Vizeli
ea0655b4e5 Fix version conflict 2018-02-09 10:43:44 +01:00
Pascal Vizeli
4117ce2e86 Update Hass.io to version 0.86 2018-02-09 01:36:25 +01:00
Pascal Vizeli
dec04386bf Add support for home-assistant bootup (#349)
* Add support for home-assistant bootup

* fix bug

* fix

* fix ip bug

* bugfix
2018-02-09 01:27:45 +01:00
Pascal Vizeli
b50756785e Add support to expose internal services (#339)
* Init services discovery

* extend it

* Add mqtt provider

* Service support

* More protocol stuff

* Update validate.py

* Update validate.py

* Update API.md

* Update API.md

* update api

* add API for services

* fix lint

* add security middleware

* Add discovery layout

* update

* Finish discovery

* improve discovery

* fix

* Update API

* Update api

* fix

* Fix lint

* Update API.md

* Update __init__.py

* Update API.md

* Update interface.py

* Update mqtt.py

* Update discovery.py

* Update const.py

* Update validate.py

* Update validate.py

* Update mqtt.py

* Update mqtt.py

* Update discovery.py

* Update discovery.py

* Update discovery.py

* Update interface.py

* Update mqtt.py

* Update mqtt.py

* Update services.py

* Update discovery.py

* Update discovery.py

* Update mqtt.py

* Update discovery.py

* Update services.py

* Update discovery.py

* Update discovery.py

* Update mqtt.py

* Update discovery.py

* fix aiohttp

* test

* Update const.py

* Update addon.py

* Update homeassistant.py

* Update const.py

* Update addon.py

* Update homeassistant.py

* Update addon.py

* Update security.py

* Update const.py

* Update validate.py

* Update const.py

* Update addon.py

* Update API.md

* Update addons.py

* Update addon.py

* Update validate.py

* Update security.py

* Update security.py

* Update const.py

* Update services.py

* Update discovery.py

* Update API.md

* Update services.py

* Update API.md

* Update services.py

* Update discovery.py

* Update discovery.py

* Update mqtt.py

* Update discovery.py

* Update discovery.py

* Update __init__.py

* Update mqtt.py

* Update security.py

* fix lint

* Update core.py

* Update API.md

* Update services.py
2018-02-08 17:19:47 +01:00
Pascal Vizeli
b9538bdc67 Change timeout to 300 (#348) 2018-02-08 12:34:30 +01:00
Pascal Vizeli
a928281bbe Update Home-Assistant to version 0.62.1 2018-01-31 13:00:28 +01:00
Pascal Vizeli
4533d17e27 Update Home-Assistant to version 0.62.1 2018-01-31 12:49:52 +01:00
Pascal Vizeli
546df6d001 Pump version to 0.86 2018-01-29 23:45:01 +01:00
Pascal Vizeli
f14eef62ae Fix version conflicts 2018-01-29 23:42:58 +01:00
Pascal Vizeli
ee86770570 Fix API URL 2018-01-29 23:27:31 +01:00
Pascal Vizeli
385a4e9f6f Update hass.io to version 0.85 2018-01-29 22:45:03 +01:00
Pascal Vizeli
142cdcffca Better error handling for proxy (#334) 2018-01-29 12:36:58 +01:00
Pascal Vizeli
eb6c753514 Add support for undocument ha version inside wesocket (#333) 2018-01-29 10:17:53 +01:00
Pascal Vizeli
c3b62c80fb Update HomeAssistant to version 0.62.0 2018-01-28 12:03:01 +01:00
Pascal Vizeli
f77e176a6e Update HomeAssistant to version 0.62.0 2018-01-28 09:02:23 +01:00
Pascal Vizeli
3f99dec858 Pump version to 0.85 2018-01-26 15:21:23 +01:00
Pascal Vizeli
81b0cf55b0 Update Hass.io to version 0.84 2018-01-26 14:37:31 +01:00
Pascal Vizeli
1d5d2dc731 Update new panel system (#330) 2018-01-26 14:07:22 +01:00
Franck Nijhof
04f5ee0a80 Adds support for add-on icons (#328)
* Adds support for add-on icons

* Update addons.py
2018-01-25 00:02:15 +01:00
Pascal Vizeli
7a02777cfb New panel (#326)
* Update __init__.py

* Update __init__.py

* Update __init__.py

* Update __init__.py
2018-01-23 13:54:11 +01:00
Pascal Vizeli
7257c44d27 Pump version to 0.84 2018-01-18 23:50:13 +01:00
Pascal Vizeli
cb15602814 Merge remote-tracking branch 'origin/dev' 2018-01-18 23:39:57 +01:00
Pascal Vizeli
0f2c333484 Update Hass.io to version 0.83 2018-01-18 23:36:46 +01:00
Pascal Vizeli
6f2cf2ef85 Robust json file handling with default reset on runtime (#321)
* Update json.py

* Update validate.py

* Update validate.py

* Update snapshots.py

* Update validate.py

* Update homeassistant.py

* Update validate.py

* Update snapshot.py

* Update snapshot.py

* Update snapshot.py

* Update json.py

* Update json.py

* Update json.py

* Update validate.py

* Update snapshots.py

* Update validate.py

* Update validate.py

* improve config updates

* fix lint

* update build

* fix schema

* fix validate

* fix lint

* fix some styles

* fix

* fix snapshot

* fix errors

* Update API
2018-01-18 23:33:05 +01:00
Pascal Vizeli
70a721a47d Reset default config with None (#320)
* Update addons.py

* Update addon.py

* Update API.md
2018-01-18 10:21:16 +01:00
Pascal Vizeli
b32947af98 Update HomeAssistant to version 0.61.1 2018-01-17 12:59:20 +01:00
Pascal Vizeli
94b44ec7fe Update HomeAssistant to version 0.61.1 2018-01-17 12:27:28 +01:00
Pascal Vizeli
5c8aa71c31 Pump version to 0.83 2018-01-16 12:45:14 +01:00
pvizeli
a6c424b7c8 Fix merge conflicts 2018-01-16 12:42:58 +01:00
Pascal Vizeli
38e40c342d Update hass.io to version 0.82 2018-01-16 12:23:11 +01:00
Pascal Vizeli
26d390b66e Add GET param support (#314) 2018-01-16 12:20:04 +01:00
Pascal Vizeli
baddafa552 Update HomeAssistant to version 0.61 2018-01-15 23:03:26 +01:00
Pascal Vizeli
f443d3052b Update HomeAssistant to version 0.61 2018-01-15 22:53:18 +01:00
Franck Nijhof
8fc27ff28e ✏️ Small typo in error msg (#311) 2018-01-15 22:52:13 +01:00
Franck Nijhof
3784d759f5 📚 Fixes markdownlint & spelling issue in the README file (#312) 2018-01-15 22:51:32 +01:00
Pascal Vizeli
61037f3852 Update network.py 2018-01-11 11:16:46 +01:00
Pascal Vizeli
db8aaecdbe Remove old security layer (#306)
* Remove old security layer

* remove unneded libs

* Update diagram
2018-01-10 22:27:47 +01:00
Pascal Vizeli
15a4541595 HomeAssistant API token (#303)
* Add a uuid to home-assistant

* Add API_TOKEN to homeassistant

* Update homeassistant.py

* Update addon.py
2018-01-10 18:14:32 +01:00
Pascal Vizeli
50ae8e2335 Pump version to 0.82 2018-01-08 15:26:34 +01:00
Pascal Vizeli
279df17ba4 Update hass.io to version 0.81 2018-01-08 15:03:14 +01:00
Pascal Vizeli
f8e6362283 Improve supervisor update handling (#300)
* Improve supervisor update handling

* fix message position
2018-01-08 14:55:13 +01:00
Pascal Vizeli
0c44064926 Disable ipv6 / DNS Resolve troubles (#299)
* Disable ipv6

* Disable search domain

* Update network.py

* Update __init__.py

* add options
2018-01-08 13:57:59 +01:00
Pascal Vizeli
73c437574c Pump version to 0.81 2018-01-07 18:11:30 +01:00
Pascal Vizeli
69a2182c04 Fix version conflict 2018-01-07 18:10:03 +01:00
Pascal Vizeli
ce80e6cd32 Update hass.io to version 0.80 2018-01-07 18:04:31 +01:00
Pascal Vizeli
054def09f7 Update panel for 0.80 (#298)
* Update pannel

* fix lint
2018-01-07 17:47:15 +01:00
Pascal Vizeli
eebe90bd14 Add support for stats & code cleanup (#297)
* Add support for stats & code cleanup

* Add more stats

* Move code into own object

* Add to API

* Update API

* Add error handling

* fix lint

* fix block io
2018-01-07 15:53:54 +01:00
Pascal Vizeli
6ea280ce60 Update HomeAssistant to version 0.60.1 2018-01-07 13:45:15 +01:00
Pascal Vizeli
e992b70f92 Update HomeAssistant to version 0.60.1 2018-01-07 13:38:09 +01:00
Pascal Vizeli
0f58bb35ba Bugfix return value supervisor update (#296)
* Update supervisor.py

* Update addon.py
2018-01-06 22:35:58 +01:00
Pascal Vizeli
56abfb6adc Pump version to 0.80 2018-01-05 18:22:51 +01:00
Pascal Vizeli
8352d61f8d Div. Bugfixes for 0.79 (#294)
* Bugfix supervisor logs

* fix list

* Update addon.py

* Update snapshot.py
2018-01-05 18:07:41 +01:00
Pascal Vizeli
51d585f299 Add community add-ons to defaults (#295) 2018-01-04 23:02:53 +01:00
Pascal Vizeli
d017a52922 Update hass.io to version 0.79 2018-01-04 13:26:28 +01:00
Pascal Vizeli
78ec0d1314 Remove home-assistant devices options (#293)
* Remove home-assistant devices options

* fix version mix/max snapshot

* fix wrong path

* fix import

* fix restore

* fix

* make exists call robust

* Update addon.py

* remove old custom function

* Update homeassistant.py

* Update homeassistant.py

* Update homeassistant.py

* Update snapshot.py

* Update validate.py

* Update snapshot.py

* Update homeassistant.py

* fix lint 1

* fix lint

* fix lint

* Update snapshot.py

* Update homeassistant.py

* Update homeassistant.py

* Update homeassistant.py
2018-01-04 12:52:17 +01:00
Pascal Vizeli
c84151e9e8 fix save 2018-01-04 10:54:57 +01:00
Pascal Vizeli
e8e599cb8c Update updater.py 2018-01-04 10:51:41 +01:00
florianj1
232b9ea239 Allow additional docker privileges (#292)
In order use a DVB adapter the capabilities SYS_TIME and SYS_NICE need to be granted.
2018-01-03 14:08:22 +01:00
Pascal Vizeli
1c49351e66 Refactory code / object handling (#289)
* Refactory code / object handling

* Next step

* fix lint

* Step 2

* Cleanup API code

* cleanup addons code

* cleanup data handling

* Cleanup addons data handling

* Cleanup docker api

* clean docker api p2

* next cleanup round

* cleanup start on snapshots

* update format strings

* fix setup

* fix lint

* fix lint

* fix lint

* fix tox

* Fix wrong import of datetime module

* Fix bug with attributes

* fix extraction

* Update core

* Update logs

* Expand scheduler

* add support for time interval objects

* next updates on tasks

* Fix some things

* Cleanup code / supervisor

* fix lint

* Fix some code styles

* rename stuff

* cleanup api call reload

* fix lock replacment

* fix lint

* fix lint

* fix bug

* fix wrong config links

* fix bugs

* fix bug

* Update version on startup

* Fix some bugs

* fix bug

* Fix snapshot

* Add wait boot options

* fix lint

* fix default config

* fix snapshot

* fix snapshot

* load snapshots on startup

* add log message at the end

* Some cleanups

* fix bug

* add logger

* add logger for supervisor update

* Add more logger
2018-01-02 21:21:29 +01:00
Pascal Vizeli
34d1f4725d Pump version to 0.79 2017-12-26 12:23:34 +01:00
Pascal Vizeli
7cd81dcc95 Update Hass.io to version 0.78 2017-12-26 12:11:32 +01:00
Pascal Vizeli
1bdd3d88de Bugfix SSL settings on proxy (#288)
* Bugfix SSL settings on proxy

* fix lint
2017-12-26 12:10:24 +01:00
Pascal Vizeli
d105552fa9 Pump version to 0.78 2017-12-26 01:47:01 +01:00
Pascal Vizeli
b5af35bd6c Fix version conflicts 2017-12-26 01:43:24 +01:00
Pascal Vizeli
7d46487491 Update hass.io to version 0.77 2017-12-26 01:38:22 +01:00
Pascal Vizeli
38a599011e Add long_description from README.md (#287)
* Add readme to API

* update name
2017-12-26 01:31:24 +01:00
Pascal Vizeli
e59e2fc8d7 Update API.md 2017-12-26 00:54:39 +01:00
Pascal Vizeli
b9ce405ada Add websocket proxy support (#286)
* Add websocket proxy support

* forward

* update proxy code

* fix import

* fix import

* fix

* reorder

* fix setup

* fix code

* stage al

* fix lint

* convert it into object

* fix lint

* fix url

* fix routing

* update log output

* fix future

* add loop

* Update log messages & error handling

* fix error message

* Update logging

* improve handling

* better error handling

* Fix server read

* fix cancel reader
2017-12-26 00:51:07 +01:00
Pascal Vizeli
d7df423deb Allow event stream over api proxy (#285)
* Allow event stream over api proxy

* fix lint

* fix lint

* cleanup code

* fix bug

* fix prepare

* Fix stream bug

* fix api request
2017-12-24 15:04:16 +01:00
Pascal Vizeli
99eea99e93 Update home-assistant to 0.60 2017-12-18 14:56:20 +01:00
Pascal Vizeli
63d82ce03e better merge base image (#280)
* better merge base image

* fix lint

* fix lint

* Update build.py

* fix lint
2017-12-18 10:30:31 +01:00
Pascal Vizeli
13a2c1ecd9 Update home-assistant to 0.60 2017-12-18 10:28:25 +01:00
Franck Nijhof
627ab4ee81 💄 Re-labeling of "By our self" (#282)
Changes it to "you", this improves the displaying of the maintainer for
local add-ons.

Ref #243
2017-12-14 23:34:09 +01:00
Pascal Vizeli
54f45539be Pump version to 0.77 2017-12-13 00:18:30 +01:00
Pascal Vizeli
53297205c8 Merge remote-tracking branch 'origin/dev' 2017-12-12 23:50:52 +01:00
Pascal Vizeli
0f09fdfcce Update hass.io to 0.76 2017-12-12 23:48:57 +01:00
Pascal Vizeli
24db0fdb86 Merge remote-tracking branch 'origin/dev' 2017-12-12 23:46:37 +01:00
Pascal Vizeli
7349234638 Use uvloop & aiohttp C extension (#279)
* Update Dockerfile

* Update __main__.py

* Update Dockerfile

* Update Dockerfile

* Update Dockerfile

* Update Dockerfile

* Update Dockerfile

* Update Dockerfile

* Update Dockerfile

* Update Dockerfile

* Update Dockerfile
2017-12-12 23:38:33 +01:00
Pascal Vizeli
c691f2a559 Auto mapping UART devices from host (#276)
* Add hardware to docker api

* set hardware to docker

* add loop to dns

* Use loop for dns

* Update const.py

* Update API.md

* Update validate.py

* Update addon.py

* Update addon.py

* fix lint

* style

* Update hardware.py
2017-12-12 20:01:02 +01:00
Pascal Vizeli
110cd32dc3 Update hardware.py (#275) 2017-12-12 10:41:05 +01:00
Pascal Vizeli
26d8dc0ec6 Add support for host dbus system (#274) 2017-12-12 09:10:39 +01:00
Pascal Vizeli
fd41bda828 Cleanup some API stuff (#272)
* Update API.md

* Update addons.py
2017-12-11 22:40:02 +01:00
Pascal Vizeli
1e3868bb70 Add support for changelog (#271) 2017-12-10 23:45:30 +01:00
Pascal Vizeli
ece6c644cf IPC (#267)
* Update API.md

* Update const.py

* Update addon.py

* Update validate.py

* Update addon.py

* Update addons.py

* fix lint
2017-12-10 23:29:51 +01:00
Pascal Vizeli
6a5bd5a014 Disable AppArmor/SecComp (#266)
Disable AppArmor
2017-12-10 23:10:25 +01:00
Pascal Vizeli
664334f1ad Move setup to python 3.6 2017-12-10 22:16:22 +01:00
Pascal Vizeli
e5e28747d4 Cleanup dockerfile 2017-12-10 22:13:40 +01:00
Pascal Vizeli
c7956d95ae Update Home-Assistant to 0.59.2 2017-12-06 19:30:27 +01:00
Pascal Vizeli
5ce6abdbb6 Update Home-Assistant to 0.59.2 2017-12-06 16:30:38 +01:00
Pascal Vizeli
fad0185c26 Update Home-Assistant to 0.59.1 2017-12-05 08:08:28 +01:00
Pascal Vizeli
86faf32709 Update Home-Assistant to 0.59.1 2017-12-04 18:14:53 +01:00
Pascal Vizeli
19f413796d Update Home-Assistant to version 0.59 2017-12-04 10:49:15 +01:00
Pascal Vizeli
8f94b4d63f Print error on invalid json (#263) 2017-11-30 20:23:20 +01:00
Pascal Vizeli
db263f84af Update Home-Assistant to 0.58.1 2017-11-25 09:50:45 +01:00
Pascal Vizeli
747810b729 Update Home-Assistant to 0.58.1 2017-11-25 09:50:23 +01:00
Pascal Vizeli
d6768f15a1 Pump version to 0.76 2017-11-24 22:20:54 +01:00
Pascal Vizeli
6c75957578 Fix version merge conflict 2017-11-24 22:18:13 +01:00
Pascal Vizeli
3a8307acfe Fix panel (#258) 2017-11-24 21:54:56 +01:00
Pascal Vizeli
f20c7d42ee Update home-assistant to version 0.58.1 2017-11-24 14:13:16 +01:00
Pascal Vizeli
9419fbff94 Add new pannel (#257) 2017-11-24 14:01:17 +01:00
Pascal Vizeli
3ac6c03637 Update Hass.io to 0.75 2017-11-22 16:46:14 +01:00
Pascal Vizeli
a95274f1b3 Use tini for home-assistant docker (#255) 2017-11-21 15:55:10 +01:00
Markus
9d2fb87cec typo (#254) 2017-11-20 13:48:04 +01:00
Paulus Schoutsen
ce9c3565b6 Hassio panel split (#249)
* Allow serving two types of panels

* Remove old panel

* Make backwards compatible

* Add comment
2017-11-20 13:44:04 +01:00
Pascal Vizeli
b0ec58ed1b Update Home-Assistant to 0.57.3 2017-11-13 20:47:36 +01:00
Pascal Vizeli
893a5f8dd3 Update Home-Assistant to 0.57.3 2017-11-13 18:44:12 +01:00
Pascal Vizeli
98064f6a90 Update Home-Assistant to 0.57.2 2017-11-06 11:25:25 +01:00
Pascal Vizeli
5146f89354 Update Home-Assistant to 0.57.2 2017-11-06 07:24:04 +01:00
Pascal Vizeli
fb46592d48 Pump version to 0.75 2017-11-05 00:35:04 +01:00
Pascal Vizeli
b4fb5ac681 Fix merge conflict 2017-11-05 00:32:17 +01:00
Pascal Vizeli
4b7201dc59 Update Home-Assistant to 0.57.1 2017-11-05 00:13:11 +01:00
Pascal Vizeli
3a5a4e4c27 Update hass.io to 0.74 2017-11-05 00:12:49 +01:00
Pascal Vizeli
70104a9280 Set api token for access requirements (#238)
* Set api token for access requirements

* fix uuid

* make robust

* fix names
2017-11-05 00:07:49 +01:00
Pascal Vizeli
efbc7b17a1 Use init system for add-ons (#237)
* Use init system for add-ons

* Update const.py

* Update validate.py

* Update addon.py

* Update addon.py

* remove options

* remove options p2

* remove options p3

* Update addon.py
2017-11-04 21:52:41 +01:00
Pascal Vizeli
64c5e20fc4 Update Home-Assistant to 0.57 2017-11-04 11:43:04 +01:00
Pascal Vizeli
13498afa97 Update Home-Assistant to 0.57 2017-11-04 11:33:55 +01:00
Pascal Vizeli
f6375f1bd6 Pump version to 0.74 2017-10-25 12:12:13 +02:00
pvizeli
8fd1599173 Fix merge conflict version 2017-10-25 12:09:33 +02:00
Pascal Vizeli
63302b73b0 Update hass.io to 0.73 2017-10-25 12:01:27 +02:00
Pascal Vizeli
f591f67a2a Show hardware GPIO interface (#233)
* Update hardware.py

* Update host.py

* Update API.md

* Update API.md

* fix lint
2017-10-25 11:50:00 +02:00
Pascal Vizeli
cda3184a55 Add support for legacy mode (#232)
* Add support for legacy mode

* Update const.py

* add legacy mode

* Update addon.py

* Update addon.py

* Update addon.py

* Update addon.py
2017-10-24 17:23:33 +02:00
Pascal Vizeli
afc811e975 Update Home-Assistant to 0.56.2 2017-10-24 06:59:19 +02:00
Pascal Vizeli
2e169dcb42 Update Home-Assistant to 0.56.2 2017-10-24 00:08:57 +02:00
Pascal Vizeli
34e24e184f Update Home-Assistant to 0.56.1 2017-10-23 00:26:49 +02:00
Pascal Vizeli
2e4751ed7d Update version.json 2017-10-23 00:16:38 +02:00
Pascal Vizeli
8c82c467d4 Fix aiohttp 2.3.1 (#231) 2017-10-22 14:05:45 +02:00
Pascal Vizeli
f3f6771534 Add a static entry for hassio api (#230) 2017-10-22 13:53:41 +02:00
Pascal Vizeli
0a75a4dcbc Update Home-Assistant to 0.56 2017-10-22 11:16:51 +02:00
Pascal Vizeli
1a4542fc4e Update Home-Assistant to 0.56 2017-10-22 10:34:31 +02:00
Pascal Vizeli
7e0525749e Pump version to 0.73 2017-10-17 22:49:35 +02:00
Pascal Vizeli
b33b26018d fix version conflict 2017-10-17 22:47:20 +02:00
Pascal Vizeli
66c93e7176 Minimize downtime to 1 sec (#223) 2017-10-17 22:25:29 +02:00
Pascal Vizeli
5674d32bad Update hass.io version 0.72 2017-10-17 21:54:10 +02:00
Pascal Vizeli
7a84972770 Better close/loop handling (#221)
* Better close/loop handling

* Update bootstrap.py

* Update __main__.py

* Update core.py

* Update __main__.py

* Update __main__.py

* Update supervisor.py

* Update supervisor.py

* Update const.py

* fix lint
2017-10-17 16:04:37 +02:00
Pascal Vizeli
638f0f5371 Update Home-Assistant to 0.55.2 2017-10-17 00:07:42 +02:00
Pascal Vizeli
dca1b6f1d3 Update Home-Assistant to 0.55.2 2017-10-17 00:02:17 +02:00
Pascal Vizeli
2b0ee109d6 Rollback Home-Assistant 0.55.1 2017-10-16 19:06:17 +02:00
Pascal Vizeli
e7430d87d7 Update docker image validate (#220) 2017-10-16 17:15:09 +02:00
Pascal Vizeli
9751c1de79 Update Home-Assistant to 0.55.1 2017-10-16 15:50:48 +02:00
Pascal Vizeli
c497167b64 Update Home-Assistant to 0.55.1 2017-10-16 15:50:08 +02:00
Pascal Vizeli
7fb2aca88b Pump version to 0.72 2017-10-13 22:28:15 +02:00
Pascal Vizeli
0d544845b1 Update hass.io to version 0.71 2017-10-13 22:14:11 +02:00
Pascal Vizeli
602eb472f9 Allow to set a option als optional (#218)
* Update validate.py

* Update validate.py

* Update validate.py

* Update validate.py

* Update validate.py

* fix bug

* Extend schema

* Update validate.py

* fix lint

* Update validate.py

* Update validate.py

* Fix deepmerge

* Update setup.py

* Update validate.py
2017-10-13 22:02:41 +02:00
Pascal Vizeli
f22fa46bdb Pump version to 0.71 2017-10-10 07:24:35 +02:00
Pascal Vizeli
4171a28260 Update Hass.io to 0.70 2017-10-10 07:10:01 +02:00
Pascal Vizeli
55365a631a Bugfix weburl (#217) 2017-10-10 07:08:56 +02:00
Pascal Vizeli
547415b30b Pump version to 0.70 2017-10-09 15:38:49 +02:00
pvizeli
cbf79f1fab Fix version merge conflict 2017-10-09 15:25:30 +02:00
Pascal Vizeli
31cc1dce82 Update Hass.io to version 0.69 2017-10-09 15:15:30 +02:00
Pascal Vizeli
8a11e6c845 Check if a option is missing inside nested lists (#216)
* Update validate.py

* fix lint
2017-10-09 14:08:29 +02:00
Pascal Vizeli
2df4f80aa5 More log output (#214)
* Update snapshot.py

* Update __init__.py

* Update snapshot.py

* Update snapshot.py

* fix lint
2017-10-09 13:30:15 +02:00
Pascal Vizeli
68566ee9e1 Update homeassistant.py (#215) 2017-10-09 13:21:21 +02:00
Pascal Vizeli
fe04b7ec59 Remove dedicated API calls (#212)
* Update addons.py

* Update API.md

* Update addon.py

* Update addon.py

* Update addons.py
2017-10-09 10:48:17 +02:00
Pascal Vizeli
38f96d7ddd Remove unknown options from input (#213)
* Update validate.py

* Update validate.py

* Cleanup unneeded code
2017-10-09 10:09:43 +02:00
Pascal Vizeli
2b2edd6e98 Update Home-Assistant to version 0.55 2017-10-08 09:49:06 +02:00
Pascal Vizeli
361969aca2 Update Home-Assistant to version 0.55 2017-10-08 09:43:07 +02:00
Pascal Vizeli
e61e7f41f2 Pump version to 0.69 2017-10-03 17:37:48 +02:00
Pascal Vizeli
75150fd149 Update hass.io to 0.68 2017-10-03 17:12:40 +02:00
Pascal Vizeli
bd1c8be1e1 Support new API for add-on STDIN support (#207)
* Add function to write data to add-on stdin with API

* Update API Doc

* Add to api

* Update addon.py
2017-10-03 16:44:48 +02:00
Pascal Vizeli
f167197640 New config for homeassistant_api & upgrade snapshot (#206)
* New config for homeassistant_api & upgrade snapshot

* fix lint
2017-10-03 11:47:35 +02:00
Pascal Vizeli
f084ecc007 Pump version to 0.68 2017-10-03 00:59:12 +02:00
Pascal Vizeli
65becbd0ae Update hass.io to 0.67 2017-10-03 00:41:30 +02:00
Pascal Vizeli
f38e28a4d9 Add interface and home-assistant api proxy (#205)
* Add initial for hass interface

* For better compatibility, remove extra options for cleanup old stuff

* Add new functions to api

* Add api proxy to home-assistant

* use const

* fix lint

* fix lint

* Add check_api_state function

* Add api watchdog

* Fix lint

* update output

* fix url

* Fix API call

* fix API documentation

* remove password

* fix api call to hass api only

* fix problem with config missmatch

* test

* Detect wrong ssl settings

* disable watchdog & add options

* Update API
2017-10-03 00:31:14 +02:00
Pascal Vizeli
2998cd94ff Allow dynamic handling of proto part (#203)
* Allow dynamic handling of proto part

* Fix lint

* fix bug
2017-10-01 00:03:06 +02:00
Pascal Vizeli
79e2f3e8ab Pump version to 0.67 2017-09-30 12:09:22 +02:00
Pascal Vizeli
13291f52f2 Update hass.io to 0.66 2017-09-30 12:03:37 +02:00
Pascal Vizeli
4baa80c3de Map sysfs devices/platform/soc data 2017-09-30 11:04:27 +02:00
Pascal Vizeli
be28a6b012 fix spell 2017-09-29 23:31:07 +02:00
Pascal Vizeli
d94ada6216 Pump version to 0.65 2017-09-29 22:12:52 +02:00
Pascal Vizeli
b2d7743e06 Update hass.io to 0.65 2017-09-29 21:56:15 +02:00
Pascal Vizeli
40324beb72 Add support for kernel gpio interface (#202)
* Add support for kernel gpio interface

* Update addon.py

* fix git python module change

* Update git.py
2017-09-29 21:42:33 +02:00
Pascal Vizeli
c02f6913b3 Extend label schema (#200)
* Update build.py

* Update build.py

* fix lint
2017-09-29 16:29:23 +02:00
Pascal Vizeli
d56af22d5e Dockerfiles for new build system 2017-09-27 17:17:08 +02:00
Pascal Vizeli
1795103086 Pump version to 0.65 2017-09-26 09:10:56 +02:00
pvizeli
02e1689dd1 Fix version confict 2017-09-26 08:48:31 +02:00
Pascal Vizeli
ab4d96331f Update Home-Assistant to 0.54 2017-09-23 12:03:08 +02:00
Pascal Vizeli
cb881cba28 Update Home-Assistant to 0.54 2017-09-23 12:02:49 +02:00
Pascal Vizeli
44b247f397 Update hass.io to 0.64 2017-09-19 22:06:48 +02:00
Pascal Vizeli
8bb43daf91 Remove support for custom configs / configs for other hass.io versions (#197)
* Remove support for custom configs

* not need since supervisor is autoupdate
2017-09-19 21:52:18 +02:00
Pascal Vizeli
a7e65613d6 Update validate.py (#196) 2017-09-19 20:43:44 +02:00
Pascal Vizeli
3c04c71401 Update build system to origin docker (#191)
* Update build system to origin docker

* Rename build env

* fix lint p1

* fix bug & add more log info for snapshot/restore

* fix exception

* Log build info

* revert last change

* fix regex
2017-09-19 18:06:34 +02:00
Pascal Vizeli
1353d52bd1 Reset json file to default on schema error (#193) 2017-09-19 17:51:16 +02:00
Pascal Vizeli
7701457791 Update resinos to 1.1 2017-09-18 22:05:46 +02:00
Pascal Vizeli
b7820bc6a6 Update resinos to 1.1 2017-09-18 22:05:11 +02:00
Pascal Vizeli
df66102de0 Pump version to 0.64 2017-09-17 14:42:35 +02:00
Pascal Vizeli
4b308d0de1 Fix version conflict 2017-09-17 14:40:20 +02:00
Pascal Vizeli
4448ba886b Update hass.io to version 0.63 2017-09-17 14:25:02 +02:00
Pascal Vizeli
f39006be01 Change flow and start landingpage faster (#189)
* Change flow and start landingpage faster

* run homeassistant after install

* Update homeassistant.py
2017-09-16 14:45:36 +02:00
Pascal Vizeli
e5204eef8a Update Home-Assistant to 0.53.1 2017-09-14 01:28:01 +02:00
Pascal Vizeli
1f07d47fd6 Update Home-Assistant to 0.53.1 2017-09-14 01:27:36 +02:00
Pascal Vizeli
ba352abf0b Pump version to 0.63 2017-09-12 20:13:41 +02:00
Pascal Vizeli
2bf440a744 Update hass.io to version 0.62 2017-09-12 20:10:15 +02:00
Pascal Vizeli
3b26136636 More schema options (#187)
* Extend the addon schema options

* convert range to float

* convert match to string

* fix lint

* cleanup

* fix lint

* fix options name
2017-09-12 19:38:26 +02:00
Pascal Vizeli
8249f042c0 Pump version to 0.62 2017-09-11 14:42:23 +02:00
Pascal Vizeli
84bbaeee5f Fix merge conflicts with versions 2017-09-11 14:41:12 +02:00
Pascal Vizeli
b7620b7adf Update version.json 2017-09-11 14:24:48 +02:00
Pascal Vizeli
5a80be9fd4 Allow stop/start home-assistant & flow of startup (#182)
* Allow config boot

* Read boot settings

* Use internal boot time for detect reboot

* Check if Home-Assistant need to watch

* Make datetime string and parse_datetime

* Add api calls

* fix lint p1

* Use new datetime parser for sessions and make a real default boot time

* fix lint p2

* only start docker if they is running

* convert to int (timestamp)

* add boot flag
2017-09-11 14:14:26 +02:00
Pascal Vizeli
a733886803 Pump version to 0.61 2017-09-11 10:03:15 +02:00
Pascal Vizeli
834fd29fab Update HomeAssistant to 0.53 2017-09-10 16:33:37 +02:00
Pascal Vizeli
fd1caf8aa6 Update HomeAssistant to 0.53 2017-09-10 16:33:17 +02:00
Pascal Vizeli
975c9e8061 Update Home-Assistant 0.52.1 2017-08-28 23:38:43 +02:00
Pascal Vizeli
0b3c5885ec Update Home-Assistant 0.52.1 2017-08-28 23:38:17 +02:00
Pascal Vizeli
711b63e2d0 Update Home-Assistant to version 0.52 (#173) 2017-08-26 19:42:55 +02:00
Pascal Vizeli
c7b833b5eb Update Home-Assistant 0.52 2017-08-26 13:49:11 +02:00
Pascal Vizeli
fd472b3084 Update Hass.io to 0.60 2017-08-25 16:47:06 +02:00
Pascal Vizeli
dcbb6a2160 Fix socat spawn (#172)
* Update dns.py

* Update dns.py
2017-08-25 16:41:48 +02:00
Pascal Vizeli
56fa1550d2 Pump version to 0.60 2017-08-24 22:40:19 +02:00
Pascal Vizeli
e1f97860ee Update hass.io to 0.59 2017-08-24 22:27:42 +02:00
Pascal Vizeli
6ab3fe18d9 Allow to see log also if there some process (#170) 2017-08-24 22:23:24 +02:00
Pascal Vizeli
7969f3dfd7 Remove default bridge (#168)
* Remove default bridge

* rename bridge
2017-08-24 21:49:36 +02:00
Pascal Vizeli
6f05b90e4e Pump hass.io to 0.59 2017-08-24 17:17:34 +02:00
Pascal Vizeli
3aa53d99d7 Update hass.io to 0.58 2017-08-24 17:07:26 +02:00
Pascal Vizeli
3525f5a02f Cleanup network mode & fix port mapping (#166)
* Cleanup network mode & fix port mapping

* Fix lint
2017-08-24 16:39:06 +02:00
Pascal Vizeli
04514a9f5c WIP: Network docker hassio (#159)
* Create hassio network layer / allow linking

* rename docker

* fix lint

* fix lint p2

* Set network options

* First version of network code

* Finish network layer

* Remove old api_endpoint stuff

* Add DNS forwarding

* Fix DNS recorder

* Fix lint p1

* Fix lint p2

* Fix lint p3

* Fix spell

* Fix ipam struct

* Fix ip to str

* Fix ip to str v2

* Fix spell

* Fix hass on host

* Fix host attach to network

* Cleanup network code

* Fix lint & add debug

* fix link

* Remove log

* Fix network

* fix reattach of supervisor

* set options

* Fix containers

* Fix remapping & add a test

* Fix dict bug

* Fix prop

* Test with run container

* Fix problem
2017-08-24 14:57:13 +02:00
Pascal Vizeli
1c915ef4cd Pump hass.io version to 0.58 2017-08-22 16:29:10 +02:00
pvizeli
b03a2c5c5f Merge remote-tracking branch 'origin/dev' 2017-08-22 16:16:01 +02:00
Pascal Vizeli
64988b285e Update hass.io to version 0.57 2017-08-22 16:15:17 +02:00
Pascal Vizeli
5c69dca7b3 New panel with poly2 (#163) 2017-08-22 15:44:14 +02:00
Pascal Vizeli
dfda7dc748 Better cleanup for local build add-ons (#161)
* Better cleanup for local build add-ons

* fix lint
2017-08-20 23:02:58 +02:00
Pascal Vizeli
cb7710c23f Add a error message if that is not a local build addon (#162) 2017-08-20 22:33:27 +02:00
Pascal Vizeli
f9b12a2eb2 Allow rebuild for local build addons (#158)
* Allow rebuild for local build addons

* fix lint
2017-08-19 22:44:39 +02:00
Pascal Vizeli
6a7617faad Use deepmerge for options (#157)
Add an optional extended description…
2017-08-18 15:57:13 +02:00
pvizeli
a94e6c5303 Merge remote-tracking branch 'origin/dev' 2017-08-16 11:38:07 +02:00
Pascal Vizeli
a3209c4bde Merge remote-tracking branch 'origin/dev' 2017-08-16 02:17:57 +02:00
pvizeli
09bba96940 Merge remote-tracking branch 'origin/dev' 2017-08-15 09:07:47 +02:00
Pascal Vizeli
6cab017042 Fix version conflict 2017-08-15 00:27:47 +02:00
Pascal Vizeli
d08343d040 Update HomeAssistant 0.51.2 2017-08-14 13:15:45 +02:00
Pascal Vizeli
8ab0ed5047 Update HomeAssistant 0.51.1 2017-08-13 14:59:50 +02:00
Pascal Vizeli
1382a7b36e Update to HomeAssistant 0.51 2017-08-13 09:24:05 +02:00
pvizeli
47491ca55b Fix merge conflict with versions 2017-08-10 11:24:21 +02:00
Pascal Vizeli
261bda82db Fix version merge conflict 2017-08-08 21:12:32 +02:00
Pascal Vizeli
f751b0e6fc Update homeassistant to 0.50.2 2017-08-01 11:35:07 +02:00
169 changed files with 10775 additions and 3584 deletions

13
.dockerignore Normal file
View File

@@ -0,0 +1,13 @@
# General files
.git
.github
# Test related files
.tox
# Temporary files
**/__pycache__
# virtualenv
venv/
ENV/

29
.github/ISSUE_TEMPLATE.md vendored Normal file
View File

@@ -0,0 +1,29 @@
<!-- READ THIS FIRST:
- If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/
- Make sure you are running the latest version of Home Assistant before reporting an issue: https://github.com/home-assistant/home-assistant/releases
- Do not report issues for components here, plaese refer to https://github.com/home-assistant/home-assistant/issues
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
- If you have a problem with a Add-on, make a issue on there repository.
-->
**Home Assistant release with the issue:**
<!--
- Frontend -> Developer tools -> Info
- Or use this command: hass --version
-->
**Operating environment (HassOS/Generic):**
<!--
Please provide details about your environment.
-->
**Supervisor logs:**
<!--
- Frontend -> Hass.io -> System
- Or use this command: hassio su logs
-->
**Description of problem:**

16
.github/main.workflow vendored Normal file
View File

@@ -0,0 +1,16 @@
workflow "tox" {
on = "push"
resolves = [
"Python 3.7",
"Json Files",
]
}
action "Python 3.7" {
uses = "home-assistant/actions/py37-tox@master"
}
action "Json Files" {
uses = "home-assistant/actions/jq@master"
args = "**/*.json"
}

13
.github/move.yml vendored Normal file
View File

@@ -0,0 +1,13 @@
# Configuration for move-issues - https://github.com/dessant/move-issues
# Delete the command comment. Ignored when the comment also contains other content
deleteCommand: true
# Close the source issue after moving
closeSourceIssue: true
# Lock the source issue after moving
lockSourceIssue: false
# Set custom aliases for targets
# aliases:
# r: repo
# or: owner/repo

4
.github/release-drafter.yml vendored Normal file
View File

@@ -0,0 +1,4 @@
template: |
## What's Changed
$CHANGES

3
.gitignore vendored
View File

@@ -90,3 +90,6 @@ ENV/
# pylint
.pylint.d/
# VS Code
.vscode/

1
.gitmodules vendored
View File

@@ -1,3 +1,4 @@
[submodule "home-assistant-polymer"]
path = home-assistant-polymer
url = https://github.com/home-assistant/home-assistant-polymer
branch = dev

View File

@@ -1,12 +0,0 @@
sudo: false
matrix:
fast_finish: true
include:
- python: "3.6"
cache:
directories:
- $HOME/.cache/pip
install: pip install -U tox
language: python
script: tox

498
API.md
View File

@@ -1,10 +1,10 @@
# Hass.io Server
# Hass.io
## Hass.io RESTful API
Interface for Home Assistant to control things from supervisor.
On error:
On error / Code 400:
```json
{
@@ -13,7 +13,7 @@ On error:
}
```
On success:
On success / Code 200:
```json
{
@@ -22,9 +22,14 @@ On success:
}
```
For access to API you need set the `X-HASSIO-KEY` they will be available for Add-ons/HomeAssistant with environment `HASSIO_TOKEN`.
### Hass.io
- GET `/supervisor/ping`
This API call don't need a token.
- GET `/supervisor/info`
The addons from `addons` are only installed one.
@@ -34,8 +39,9 @@ The addons from `addons` are only installed one.
"version": "INSTALL_VERSION",
"last_version": "LAST_VERSION",
"arch": "armhf|aarch64|i386|amd64",
"beta_channel": "true|false",
"channel": "stable|beta|dev",
"timezone": "TIMEZONE",
"wait_boot": "int",
"addons": [
{
"name": "xy bla",
@@ -44,6 +50,7 @@ The addons from `addons` are only installed one.
"repository": "12345678|null",
"version": "LAST_VERSION",
"installed": "INSTALL_VERSION",
"icon": "bool",
"logo": "bool",
"state": "started|stopped",
}
@@ -68,8 +75,9 @@ Optional:
```json
{
"beta_channel": "true|false",
"channel": "stable|beta|dev",
"timezone": "TIMEZONE",
"wait_boot": "int",
"addons_repositories": [
"REPO_URL"
]
@@ -84,44 +92,20 @@ Reload addons/version.
Output is the raw docker log.
### Security
- GET `/security/info`
- GET `/supervisor/stats`
```json
{
"initialize": "bool",
"totp": "bool"
"cpu_percent": 0.0,
"memory_usage": 283123,
"memory_limit": 329392,
"network_tx": 0,
"network_rx": 0,
"blk_read": 0,
"blk_write": 0
}
```
- POST `/security/options`
```json
{
"password": "xy"
}
```
- POST `/security/totp`
```json
{
"password": "xy"
}
```
Return QR-Code
- POST `/security/session`
```json
{
"password": "xy",
"totp": "null|123456"
}
```
### Backup/Snapshot
### Snapshot
- GET `/snapshots`
@@ -131,7 +115,9 @@ Return QR-Code
{
"slug": "SLUG",
"date": "ISO",
"name": "Custom name"
"name": "Custom name",
"type": "full|partial",
"protected": "bool"
}
]
}
@@ -139,11 +125,28 @@ Return QR-Code
- POST `/snapshots/reload`
- POST `/snapshots/new/upload`
return:
```json
{
"slug": ""
}
```
- POST `/snapshots/new/full`
```json
{
"name": "Optional"
"name": "Optional",
"password": "Optional"
}
```
return:
```json
{
"slug": ""
}
```
@@ -153,7 +156,15 @@ Return QR-Code
{
"name": "Optional",
"addons": ["ADDON_SLUG"],
"folders": ["FOLDER_NAME"]
"folders": ["FOLDER_NAME"],
"password": "Optional"
}
```
return:
```json
{
"slug": ""
}
```
@@ -168,15 +179,14 @@ Return QR-Code
"name": "custom snapshot name / description",
"date": "ISO",
"size": "SIZE_IN_MB",
"homeassistant": {
"version": "INSTALLED_HASS_VERSION",
"devices": []
},
"protected": "bool",
"homeassistant": "version",
"addons": [
{
"slug": "ADDON_SLUG",
"name": "NAME",
"version": "INSTALLED_VERSION"
"version": "INSTALLED_VERSION",
"size": "SIZE_IN_MB"
}
],
"repositories": ["URL"],
@@ -185,36 +195,47 @@ Return QR-Code
```
- POST `/snapshots/{slug}/remove`
- GET `/snapshots/{slug}/download`
- POST `/snapshots/{slug}/restore/full`
```json
{
"password": "Optional"
}
```
- POST `/snapshots/{slug}/restore/partial`
```json
{
"homeassistant": "bool",
"addons": ["ADDON_SLUG"],
"folders": ["FOLDER_NAME"]
"folders": ["FOLDER_NAME"],
"password": "Optional"
}
```
### Host
- POST `/host/reload`
- POST `/host/shutdown`
- POST `/host/reboot`
- GET `/host/info`
```json
{
"type": "",
"version": "",
"last_version": "",
"features": ["shutdown", "reboot", "update", "hostname", "network_info", "network_control"],
"hostname": "",
"os": "",
"audio": {
"input": "0,0",
"output": "0,0"
}
"hostname": "hostname|null",
"features": ["shutdown", "reboot", "hostname", "services", "hassos"],
"operating_system": "HassOS XY|Ubuntu 16.4|null",
"kernel": "4.15.7|null",
"chassis": "specific|null",
"deployment": "stable|beta|dev|null",
"cpe": "xy|null",
}
```
@@ -222,54 +243,98 @@ Return QR-Code
```json
{
"audio_input": "0,0",
"audio_output": "0,0"
"hostname": "",
}
```
- POST `/host/update`
- POST `/host/reload`
Optional:
#### Services
- GET `/host/services`
```json
{
"version": "VERSION"
"services": [
{
"name": "xy.service",
"description": "XY ...",
"state": "active|"
}
]
}
```
- GET `/host/hardware`
- POST `/host/service/{unit}/stop`
- POST `/host/service/{unit}/start`
- POST `/host/service/{unit}/reload`
### HassOS
- GET `/hassos/info`
```json
{
"version": "2.3",
"version_cli": "7",
"version_latest": "2.4",
"version_cli_latest": "8",
"board": "ova|rpi"
}
```
- POST `/hassos/update`
```json
{
"version": "optional"
}
```
- POST `/hassos/update/cli`
```json
{
"version": "optional"
}
```
- POST `/hassos/config/sync`
Load host configs from a USB stick.
### Hardware
- GET `/hardware/info`
```json
{
"serial": ["/dev/xy"],
"input": ["Input device name"],
"disk": ["/dev/sdax"],
"gpio": ["gpiochip0", "gpiochip100"],
"audio": {
"CARD_ID": {
"name": "xy",
"type": "microphone",
"devices": {
"DEV_ID": "type of device"
}
"devices": [
"chan_id": "channel ID",
"chan_type": "type of device"
]
}
}
}
```
### Network
- GET `/network/info`
- GET `/hardware/audio`
```json
{
"hostname": ""
}
```
- POST `/network/options`
```json
{
"hostname": "",
"audio": {
"input": {
"0,0": "Mic"
},
"output": {
"1,0": "Jack",
"1,1": "HDMI"
}
}
}
```
@@ -281,9 +346,15 @@ Optional:
{
"version": "INSTALL_VERSION",
"last_version": "LAST_VERSION",
"devices": [""],
"arch": "arch",
"machine": "Image machine type",
"image": "str",
"custom": "bool -> if custom image"
"custom": "bool -> if custom image",
"boot": "bool",
"port": 8123,
"ssl": "bool",
"watchdog": "bool",
"wait_boot": 600
}
```
@@ -302,21 +373,53 @@ Optional:
Output is the raw Docker log.
- POST `/homeassistant/restart`
- POST `/homeassistant/options`
- POST `/homeassistant/check`
- POST `/homeassistant/start`
- POST `/homeassistant/stop`
- POST `/homeassistant/rebuild`
- POST `/homeassistant/options`
```json
{
"devices": [],
"image": "Optional|null",
"last_version": "Optional for custom image|null"
"last_version": "Optional for custom image|null",
"port": "port for access hass",
"ssl": "bool",
"password": "",
"refresh_token": "",
"watchdog": "bool",
"wait_boot": 600
}
```
Image with `null` and last_version with `null` reset this options.
- POST/GET `/homeassistant/api`
Proxy to real home-assistant instance.
- GET `/homeassistant/websocket`
Proxy to real websocket instance.
- GET `/homeassistant/stats`
```json
{
"cpu_percent": 0.0,
"memory_usage": 283123,
"memory_limit": 329392,
"network_tx": 0,
"network_rx": 0,
"blk_read": 0,
"blk_write": 0
}
```
### RESTful for API addons
If an add-on will call itself, you can use `/addons/self/...`.
- GET `/addons`
Get all available addons.
@@ -328,18 +431,15 @@ Get all available addons.
"name": "xy bla",
"slug": "xy",
"description": "description",
"arch": ["armhf", "aarch64", "i386", "amd64"],
"repository": "core|local|REP_ID",
"version": "LAST_VERSION",
"installed": "none|INSTALL_VERSION",
"detached": "bool",
"available": "bool",
"build": "bool",
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
"devices": ["/dev/xy"],
"url": "null|url",
"logo": "bool",
"audio": "bool",
"hassio_api": "bool"
"icon": "bool",
"logo": "bool"
}
],
"repositories": [
@@ -360,10 +460,15 @@ Get all available addons.
```json
{
"name": "xy bla",
"slug": "xdssd_xybla",
"description": "description",
"long_description": "null|markdown",
"auto_update": "bool",
"url": "null|url of addon",
"detached": "bool",
"available": "bool",
"arch": ["armhf", "aarch64", "i386", "amd64"],
"machine": "[raspberrypi2, tinker]",
"repository": "12345678|null",
"version": "null|VERSION_INSTALLED",
"last_version": "LAST_VERSION",
@@ -373,19 +478,43 @@ Get all available addons.
"options": "{}",
"network": "{}|null",
"host_network": "bool",
"host_pid": "bool",
"host_ipc": "bool",
"host_dbus": "bool",
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
"apparmor": "disable|default|profile",
"devices": ["/dev/xy"],
"auto_uart": "bool",
"icon": "bool",
"logo": "bool",
"changelog": "bool",
"hassio_api": "bool",
"hassio_role": "default|homeassistant|manager|admin",
"homeassistant_api": "bool",
"auth_api": "bool",
"full_access": "bool",
"protected": "bool",
"rating": "1-6",
"stdin": "bool",
"webui": "null|http(s)://[HOST]:port/xy/zx",
"gpio": "bool",
"kernel_modules": "bool",
"devicetree": "bool",
"docker_api": "bool",
"audio": "bool",
"audio_input": "null|0,0",
"audio_output": "null|0,0"
"audio_output": "null|0,0",
"services_role": "['service:access']",
"discovery": "['service']"
}
```
- GET `/addons/{addon}/icon`
- GET `/addons/{addon}/logo`
- GET `/addons/{addon}/changelog`
- POST `/addons/{addon}/options`
```json
@@ -401,7 +530,17 @@ Get all available addons.
}
```
For reset custom network/audio settings, set it `null`.
Reset custom network/audio/options, set it `null`.
- POST `/addons/{addon}/security`
This function is not callable by itself.
```json
{
"protected": "bool",
}
```
- POST `/addons/{addon}/start`
@@ -409,71 +548,148 @@ For reset custom network/audio settings, set it `null`.
- POST `/addons/{addon}/install`
Optional:
```json
{
"version": "VERSION"
}
```
- POST `/addons/{addon}/uninstall`
- POST `/addons/{addon}/update`
Optional:
```json
{
"version": "VERSION"
}
```
- GET `/addons/{addon}/logs`
Output is the raw Docker log.
- POST `/addons/{addon}/restart`
## Host Control
- POST `/addons/{addon}/rebuild`
Communicate over UNIX socket with a host daemon.
Only supported for local build addons
- commands
- POST `/addons/{addon}/stdin`
```
# info
-> {'type', 'version', 'last_version', 'features', 'hostname'}
# reboot
# shutdown
# host-update [v]
Write data to add-on stdin
# hostname xy
# network info
-> {}
# network wlan ssd xy
# network wlan password xy
# network int ip xy
# network int netmask xy
# network int route xy
- GET `/addons/{addon}/stats`
```json
{
"cpu_percent": 0.0,
"memory_usage": 283123,
"memory_limit": 329392,
"network_tx": 0,
"network_rx": 0,
"blk_read": 0,
"blk_write": 0
}
```
Features:
### discovery
- shutdown
- reboot
- update
- hostname
- network_info
- network_control
Answer:
```
{}|OK|ERROR|WRONG
- GET `/discovery`
```json
{
"discovery": [
{
"addon": "slug",
"service": "name",
"uuid": "uuid",
"config": {}
}
]
}
```
- {}: json
- OK: call was successfully
- ERROR: error on call
- WRONG: not supported
- GET `/discovery/{UUID}`
```json
{
"addon": "slug",
"service": "name",
"uuid": "uuid",
"config": {}
}
```
- POST `/discovery`
```json
{
"service": "name",
"config": {}
}
```
return:
```json
{
"uuid": "uuid"
}
```
- DEL `/discovery/{UUID}`
### Services
- GET `/services`
```json
{
"services": [
{
"slug": "name",
"available": "bool",
"providers": "list"
}
]
}
```
#### MQTT
- GET `/services/mqtt`
```json
{
"addon": "name",
"host": "xy",
"port": "8883",
"ssl": "bool",
"username": "optional",
"password": "optional",
"protocol": "3.1.1"
}
```
- POST `/services/mqtt`
```json
{
"host": "xy",
"port": "8883",
"ssl": "bool|optional",
"username": "optional",
"password": "optional",
"protocol": "3.1.1"
}
```
- DEL `/services/mqtt`
### Misc
- GET `/info`
```json
{
"supervisor": "version",
"homeassistant": "version",
"hassos": "null|version",
"hostname": "name",
"machine": "type",
"arch": "arch",
"supported_arch": ["arch1", "arch2"],
"channel": "stable|beta|dev"
}
```
### Auth / SSO API
You can use the user system on homeassistant. We handle this auth system on
supervisor.
You can call post `/auth`
We support:
- Json `{ "user|name": "...", "password": "..." }`
- application/x-www-form-urlencoded `user|name=...&password=...`
- BasicAuth

33
Dockerfile Normal file
View File

@@ -0,0 +1,33 @@
ARG BUILD_FROM
FROM $BUILD_FROM
# Install base
RUN apk add --no-cache \
openssl \
libffi \
musl \
git \
socat \
glib \
libstdc++ \
eudev-libs
# Install requirements
COPY requirements.txt /usr/src/
RUN apk add --no-cache --virtual .build-dependencies \
make \
g++ \
openssl-dev \
libffi-dev \
musl-dev \
&& export MAKEFLAGS="-j$(nproc)" \
&& pip3 install --no-cache-dir -r /usr/src/requirements.txt \
&& apk del .build-dependencies \
&& rm -f /usr/src/requirements.txt
# Install HassIO
COPY . /usr/src/hassio
RUN pip3 install --no-cache-dir /usr/src/hassio \
&& rm -rf /usr/src/hassio
CMD [ "python3", "-m", "hassio" ]

View File

@@ -1,14 +1,28 @@
# Hass.io
### First private cloud solution for home automation.
## First private cloud solution for home automation
Hass.io is a Docker based system for managing your Home Assistant installation and related applications. The system is controlled via Home Assistant which communicates with the supervisor. The supervisor provides an API to manage the installation. This includes changing network settings or installing and updating software.
Hass.io is a Docker-based system for managing your Home Assistant installation
and related applications. The system is controlled via Home Assistant which
communicates with the Supervisor. The Supervisor provides an API to manage the
installation. This includes changing network settings or installing
and updating software.
![](misc/hassio.png?raw=true)
- [Hass.io Addons](https://github.com/home-assistant/hassio-addons)
- [Hass.io Build](https://github.com/home-assistant/hassio-build)
## Installation
Installation instructions can be found at [https://home-assistant.io/hassio](https://home-assistant.io/hassio).
Installation instructions can be found at <https://home-assistant.io/hassio>.
## Development
The development of the supervisor is a bit tricky. Not difficult but tricky.
- You can use the builder to build your supervisor: https://github.com/home-assistant/hassio-build/tree/master/builder
- Go into a HassOS device or VM and pull your supervisor.
- Set the developer modus on updater.json
- Tag it as `homeassistant/xy-hassio-supervisor:latest`
- Restart the service like `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
- Test your changes
Small Bugfix or improvements, make a PR. Significant change makes first an RFC.

View File

@@ -1 +1 @@
"""Init file for HassIO."""
"""Init file for Hass.io."""

View File

@@ -1,45 +1,59 @@
"""Main file for HassIO."""
"""Main file for Hass.io."""
import asyncio
from concurrent.futures import ThreadPoolExecutor
import logging
import sys
import hassio.bootstrap as bootstrap
import hassio.core as core
from hassio import bootstrap
_LOGGER = logging.getLogger(__name__)
def initialize_event_loop():
"""Attempt to use uvloop."""
try:
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
except ImportError:
pass
return asyncio.get_event_loop()
# pylint: disable=invalid-name
if __name__ == "__main__":
bootstrap.initialize_logging()
if not bootstrap.check_environment():
exit(1)
# Init async event loop
loop = initialize_event_loop()
loop = asyncio.get_event_loop()
# Check if all information are available to setup Hass.io
if not bootstrap.check_environment():
sys.exit(1)
# init executor pool
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
loop.set_default_executor(executor)
_LOGGER.info("Initialize Hassio setup")
config = bootstrap.initialize_system_data()
hassio = core.HassIO(loop, config)
_LOGGER.info("Initialize Hass.io setup")
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
bootstrap.migrate_system_env(config)
bootstrap.migrate_system_env(coresys)
_LOGGER.info("Run Hassio setup")
loop.run_until_complete(hassio.setup())
_LOGGER.info("Setup HassIO")
loop.run_until_complete(coresys.core.setup())
_LOGGER.info("Start Hassio")
loop.call_soon_threadsafe(loop.create_task, hassio.start())
loop.call_soon_threadsafe(bootstrap.reg_signal, loop, hassio)
loop.call_soon_threadsafe(loop.create_task, coresys.core.start())
loop.call_soon_threadsafe(bootstrap.reg_signal, loop)
_LOGGER.info("Run Hassio loop")
loop.run_forever()
try:
_LOGGER.info("Run Hass.io")
loop.run_forever()
finally:
_LOGGER.info("Stopping Hass.io")
loop.run_until_complete(coresys.core.stop())
executor.shutdown(wait=False)
loop.close()
_LOGGER.info("Cleanup system")
executor.shutdown(wait=False)
loop.close()
_LOGGER.info("Close Hassio")
sys.exit(hassio.exit_code)
_LOGGER.info("Close Hass.io")
sys.exit(0)

View File

@@ -1,60 +1,72 @@
"""Init file for HassIO addons."""
"""Init file for Hass.io add-ons."""
import asyncio
import logging
from .addon import Addon
from .repository import Repository
from .data import Data
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO
from .data import AddonsData
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO, STATE_STARTED
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
class AddonManager(object):
"""Manage addons inside HassIO."""
class AddonManager(CoreSysAttributes):
"""Manage add-ons inside Hass.io."""
def __init__(self, config, loop, dock):
"""Initialize docker base wrapper."""
self.loop = loop
self.config = config
self.dock = dock
self.data = Data(config)
self.addons = {}
self.repositories = {}
def __init__(self, coresys):
"""Initialize Docker base wrapper."""
self.coresys = coresys
self.data = AddonsData(coresys)
self.addons_obj = {}
self.repositories_obj = {}
@property
def list_addons(self):
"""Return a list of all addons."""
return list(self.addons.values())
"""Return a list of all add-ons."""
return list(self.addons_obj.values())
@property
def list_installed(self):
"""Return a list of installed add-ons."""
return [addon for addon in self.addons_obj.values()
if addon.is_installed]
@property
def list_repositories(self):
"""Return list of addon repositories."""
return list(self.repositories.values())
"""Return list of add-on repositories."""
return list(self.repositories_obj.values())
def get(self, addon_slug):
"""Return a adddon from slug."""
return self.addons.get(addon_slug)
"""Return an add-on from slug."""
return self.addons_obj.get(addon_slug)
async def prepare(self):
"""Startup addon management."""
def from_token(self, token):
"""Return an add-on from Hass.io token."""
for addon in self.list_addons:
if addon.is_installed and token == addon.hassio_token:
return addon
return None
async def load(self):
"""Start up add-on management."""
self.data.reload()
# init hassio built-in repositories
# Init Hass.io built-in repositories
repositories = \
set(self.config.addons_repositories) | BUILTIN_REPOSITORIES
set(self.sys_config.addons_repositories) | BUILTIN_REPOSITORIES
# init custom repositories & load addons
# Init custom repositories and load add-ons
await self.load_repositories(repositories)
async def reload(self):
"""Update addons from repo and reload list."""
"""Update add-ons from repository and reload list."""
tasks = [repository.update() for repository in
self.repositories.values()]
self.repositories_obj.values()]
if tasks:
await asyncio.wait(tasks, loop=self.loop)
await asyncio.wait(tasks)
# read data from repositories
self.data.reload()
@@ -65,69 +77,82 @@ class AddonManager(object):
async def load_repositories(self, list_repositories):
"""Add a new custom repository."""
new_rep = set(list_repositories)
old_rep = set(self.repositories)
old_rep = set(self.repositories_obj)
# add new repository
async def _add_repository(url):
"""Helper function to async add repository."""
repository = Repository(self.config, self.loop, self.data, url)
repository = Repository(self.coresys, url)
if not await repository.load():
_LOGGER.error("Can't load from repository %s", url)
return
self.repositories[url] = repository
self.repositories_obj[url] = repository
# don't add built-in repository to config
if url not in BUILTIN_REPOSITORIES:
self.config.add_addon_repository(url)
self.sys_config.add_addon_repository(url)
tasks = [_add_repository(url) for url in new_rep - old_rep]
if tasks:
await asyncio.wait(tasks, loop=self.loop)
await asyncio.wait(tasks)
# del new repository
for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
self.repositories.pop(url).remove()
self.config.drop_addon_repository(url)
self.repositories_obj.pop(url).remove()
self.sys_config.drop_addon_repository(url)
# update data
self.data.reload()
await self.load_addons()
async def load_addons(self):
"""Update/add internal addon store."""
"""Update/add internal add-on store."""
all_addons = set(self.data.system) | set(self.data.cache)
# calc diff
add_addons = all_addons - set(self.addons)
del_addons = set(self.addons) - all_addons
add_addons = all_addons - set(self.addons_obj)
del_addons = set(self.addons_obj) - all_addons
_LOGGER.info("Load addons: %d all - %d new - %d remove",
_LOGGER.info("Load add-ons: %d all - %d new - %d remove",
len(all_addons), len(add_addons), len(del_addons))
# new addons
tasks = []
for addon_slug in add_addons:
addon = Addon(
self.config, self.loop, self.dock, self.data, addon_slug)
addon = Addon(self.coresys, addon_slug)
tasks.append(addon.load())
self.addons[addon_slug] = addon
self.addons_obj[addon_slug] = addon
if tasks:
await asyncio.wait(tasks, loop=self.loop)
await asyncio.wait(tasks)
# remove
for addon_slug in del_addons:
self.addons.pop(addon_slug)
self.addons_obj.pop(addon_slug)
async def auto_boot(self, stage):
"""Boot addons with mode auto."""
async def boot(self, stage):
"""Boot add-ons with mode auto."""
tasks = []
for addon in self.addons.values():
for addon in self.addons_obj.values():
if addon.is_installed and addon.boot == BOOT_AUTO and \
addon.startup == stage:
tasks.append(addon.start())
_LOGGER.info("Startup %s run %d addons", stage, len(tasks))
_LOGGER.info("Startup %s run %d add-ons", stage, len(tasks))
if tasks:
await asyncio.wait(tasks, loop=self.loop)
await asyncio.wait(tasks)
await asyncio.sleep(self.sys_config.wait_boot)
async def shutdown(self, stage):
"""Shutdown addons."""
tasks = []
for addon in self.addons_obj.values():
if addon.is_installed and \
await addon.state() == STATE_STARTED and \
addon.startup == stage:
tasks.append(addon.stop())
_LOGGER.info("Shutdown %s stop %d add-ons", stage, len(tasks))
if tasks:
await asyncio.wait(tasks)

File diff suppressed because it is too large Load Diff

82
hassio/addons/build.py Normal file
View File

@@ -0,0 +1,82 @@
"""Hass.io add-on build environment."""
from __future__ import annotations
from pathlib import Path
from typing import TYPE_CHECKING, Dict
from ..const import ATTR_ARGS, ATTR_BUILD_FROM, ATTR_SQUASH, META_ADDON
from ..coresys import CoreSys, CoreSysAttributes
from ..utils.json import JsonConfig
from .validate import SCHEMA_BUILD_CONFIG
if TYPE_CHECKING:
from .addon import Addon
class AddonBuild(JsonConfig, CoreSysAttributes):
"""Handle build options for add-ons."""
def __init__(self, coresys: CoreSys, slug: str) -> None:
"""Initialize Hass.io add-on builder."""
self.coresys: CoreSys = coresys
self._id: str = slug
super().__init__(
Path(self.addon.path_location, 'build.json'), SCHEMA_BUILD_CONFIG)
def save_data(self):
"""Ignore save function."""
@property
def addon(self) -> Addon:
"""Return add-on of build data."""
return self.sys_addons.get(self._id)
@property
def base_image(self) -> str:
"""Base images for this add-on."""
return self._data[ATTR_BUILD_FROM].get(
self.sys_arch.default,
f"homeassistant/{self.sys_arch.default}-base:latest")
@property
def squash(self) -> bool:
"""Return True or False if squash is active."""
return self._data[ATTR_SQUASH]
@property
def additional_args(self) -> Dict[str, str]:
"""Return additional Docker build arguments."""
return self._data[ATTR_ARGS]
def get_docker_args(self, version):
"""Create a dict with Docker build arguments."""
args = {
'path': str(self.addon.path_location),
'tag': f"{self.addon.image}:{version}",
'pull': True,
'forcerm': True,
'squash': self.squash,
'labels': {
'io.hass.version': version,
'io.hass.arch': self.sys_arch.default,
'io.hass.type': META_ADDON,
'io.hass.name': self._fix_label('name'),
'io.hass.description': self._fix_label('description'),
},
'buildargs': {
'BUILD_FROM': self.base_image,
'BUILD_VERSION': version,
'BUILD_ARCH': self.sys_arch.default,
**self.additional_args,
}
}
if self.addon.url:
args['labels']['io.hass.url'] = self.addon.url
return args
def _fix_label(self, label_name: str) -> str:
"""Remove characters they are not supported."""
label = getattr(self.addon, label_name, "")
return label.replace("'", "")

View File

@@ -1,12 +1,12 @@
{
"local": {
"name": "Local Add-Ons",
"name": "Local add-ons",
"url": "https://home-assistant.io/hassio",
"maintainer": "By our self"
"maintainer": "you"
},
"core": {
"name": "Built-in Add-Ons",
"name": "Official add-ons",
"url": "https://home-assistant.io/addons",
"maintainer": "Home Assistant authors"
"maintainer": "Home Assistant"
}
}

View File

@@ -1,81 +1,78 @@
"""Init file for HassIO addons."""
import copy
"""Init file for Hass.io add-on data."""
import logging
import json
from pathlib import Path
import re
import voluptuous as vol
from voluptuous.humanize import humanize_error
from .util import extract_hash_from_path
from .validate import (
SCHEMA_ADDON_CONFIG, SCHEMA_ADDON_FILE, SCHEMA_REPOSITORY_CONFIG,
MAP_VOLUME)
from ..const import (
FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
from ..tools import JsonConfig, read_json_file
ATTR_LOCATON,
ATTR_REPOSITORY,
ATTR_SLUG,
ATTR_SYSTEM,
ATTR_USER,
FILE_HASSIO_ADDONS,
REPOSITORY_CORE,
REPOSITORY_LOCAL,
)
from ..coresys import CoreSysAttributes
from ..exceptions import JsonFileError
from ..utils.json import JsonConfig, read_json_file
from .utils import extract_hash_from_path
from .validate import SCHEMA_ADDON_CONFIG, SCHEMA_ADDONS_FILE, SCHEMA_REPOSITORY_CONFIG
_LOGGER = logging.getLogger(__name__)
RE_VOLUME = re.compile(MAP_VOLUME)
class AddonsData(JsonConfig, CoreSysAttributes):
"""Hold data for Add-ons inside Hass.io."""
class Data(JsonConfig):
"""Hold data for addons inside HassIO."""
def __init__(self, config):
def __init__(self, coresys):
"""Initialize data holder."""
super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDON_FILE)
self.config = config
super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDONS_FILE)
self.coresys = coresys
self._repositories = {}
self._cache = {}
@property
def user(self):
"""Return local addon user data."""
"""Return local add-on user data."""
return self._data[ATTR_USER]
@property
def system(self):
"""Return local addon data."""
"""Return local add-on data."""
return self._data[ATTR_SYSTEM]
@property
def cache(self):
"""Return addon data from cache/repositories."""
"""Return add-on data from cache/repositories."""
return self._cache
@property
def repositories(self):
"""Return addon data from repositories."""
"""Return add-on data from repositories."""
return self._repositories
def reload(self):
"""Read data from addons repository."""
"""Read data from add-on repository."""
self._cache = {}
self._repositories = {}
# read core repository
self._read_addons_folder(
self.config.path_addons_core, REPOSITORY_CORE)
self._read_addons_folder(self.sys_config.path_addons_core, REPOSITORY_CORE)
# read local repository
self._read_addons_folder(
self.config.path_addons_local, REPOSITORY_LOCAL)
self._read_addons_folder(self.sys_config.path_addons_local, REPOSITORY_LOCAL)
# add built-in repositories information
self._set_builtin_repositories()
# read custom git repositories
for repository_element in self.config.path_addons_git.iterdir():
for repository_element in self.sys_config.path_addons_git.iterdir():
if repository_element.is_dir():
self._read_git_repository(repository_element)
# update local data
self._merge_config()
def _read_git_repository(self, path):
"""Process a custom repository folder."""
slug = extract_hash_from_path(path)
@@ -83,15 +80,12 @@ class Data(JsonConfig):
# exists repository json
repository_file = Path(path, "repository.json")
try:
repository_info = SCHEMA_REPOSITORY_CONFIG(
read_json_file(repository_file)
repository_info = SCHEMA_REPOSITORY_CONFIG(read_json_file(repository_file))
except JsonFileError:
_LOGGER.warning(
"Can't read repository information from %s", repository_file
)
except (OSError, json.JSONDecodeError):
_LOGGER.warning("Can't read repository information from %s",
repository_file)
return
except vol.Invalid:
_LOGGER.warning("Repository parse error %s", repository_file)
return
@@ -101,65 +95,42 @@ class Data(JsonConfig):
self._read_addons_folder(path, slug)
def _read_addons_folder(self, path, repository):
"""Read data from addons folder."""
"""Read data from add-ons folder."""
for addon in path.glob("**/config.json"):
try:
addon_config = read_json_file(addon)
except JsonFileError:
_LOGGER.warning("Can't read %s from repository %s", addon, repository)
continue
# validate
# validate
try:
addon_config = SCHEMA_ADDON_CONFIG(addon_config)
# Generate slug
addon_slug = "{}_{}".format(
repository, addon_config[ATTR_SLUG])
# store
addon_config[ATTR_REPOSITORY] = repository
addon_config[ATTR_LOCATON] = str(addon.parent)
self._cache[addon_slug] = addon_config
except (OSError, json.JSONDecodeError):
_LOGGER.warning("Can't read %s", addon)
except vol.Invalid as ex:
_LOGGER.warning("Can't read %s -> %s", addon,
humanize_error(addon_config, ex))
_LOGGER.warning(
"Can't read %s: %s", addon, humanize_error(addon_config, ex)
)
continue
# Generate slug
addon_slug = "{}_{}".format(repository, addon_config[ATTR_SLUG])
# store
addon_config[ATTR_REPOSITORY] = repository
addon_config[ATTR_LOCATON] = str(addon.parent)
self._cache[addon_slug] = addon_config
def _set_builtin_repositories(self):
"""Add local built-in repository into dataset."""
try:
builtin_file = Path(__file__).parent.joinpath('built-in.json')
builtin_file = Path(__file__).parent.joinpath("built-in.json")
builtin_data = read_json_file(builtin_file)
except (OSError, json.JSONDecodeError) as err:
_LOGGER.warning("Can't read built-in.json -> %s", err)
except JsonFileError:
_LOGGER.warning("Can't read built-in json")
return
# core repository
self._repositories[REPOSITORY_CORE] = \
builtin_data[REPOSITORY_CORE]
self._repositories[REPOSITORY_CORE] = builtin_data[REPOSITORY_CORE]
# local repository
self._repositories[REPOSITORY_LOCAL] = \
builtin_data[REPOSITORY_LOCAL]
def _merge_config(self):
"""Update local config if they have update.
It need to be the same version as the local version is for merge.
"""
have_change = False
for addon in set(self.system):
# detached
if addon not in self._cache:
continue
cache = self._cache[addon]
data = self.system[addon]
if data[ATTR_VERSION] == cache[ATTR_VERSION]:
if data != cache:
self.system[addon] = copy.deepcopy(cache)
have_change = True
if have_change:
self.save()
self._repositories[REPOSITORY_LOCAL] = builtin_data[REPOSITORY_LOCAL]

View File

@@ -1,4 +1,4 @@
"""Init file for HassIO addons git."""
"""Init file for Hass.io add-on Git."""
import asyncio
import logging
import functools as ft
@@ -7,104 +7,149 @@ import shutil
import git
from .util import get_hash_from_repository
from ..const import URL_HASSIO_ADDONS
from .utils import get_hash_from_repository
from ..const import URL_HASSIO_ADDONS, ATTR_URL, ATTR_BRANCH
from ..coresys import CoreSysAttributes
from ..validate import RE_REPOSITORY
_LOGGER = logging.getLogger(__name__)
class GitRepo(object):
"""Manage addons git repo."""
class GitRepo(CoreSysAttributes):
"""Manage Add-on Git repository."""
def __init__(self, config, loop, path, url):
"""Initialize git base wrapper."""
self.config = config
self.loop = loop
def __init__(self, coresys, path, url):
"""Initialize Git base wrapper."""
self.coresys = coresys
self.repo = None
self.path = path
self.url = url
self._lock = asyncio.Lock(loop=loop)
self.lock = asyncio.Lock(loop=coresys.loop)
self._data = RE_REPOSITORY.match(url).groupdict()
@property
def url(self):
"""Return repository URL."""
return self._data[ATTR_URL]
@property
def branch(self):
"""Return repository branch."""
return self._data[ATTR_BRANCH]
async def load(self):
"""Init git addon repo."""
"""Init Git add-on repository."""
if not self.path.is_dir():
return await self.clone()
async with self._lock:
async with self.lock:
try:
_LOGGER.info("Load addon %s repository", self.path)
self.repo = await self.loop.run_in_executor(
None, git.Repo, str(self.path))
_LOGGER.info("Load add-on %s repository", self.path)
self.repo = await self.sys_run_in_executor(
git.Repo, str(self.path))
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
git.GitCommandError) as err:
_LOGGER.error("Can't load %s repo: %s.", self.path, err)
self._remove()
return False
return True
async def clone(self):
"""Clone git addon repo."""
async with self._lock:
"""Clone git add-on repository."""
async with self.lock:
git_args = {
attribute: value
for attribute, value in (
('recursive', True),
('branch', self.branch),
('depth', 1),
('shallow-submodules', True)
) if value is not None
}
try:
_LOGGER.info("Clone addon %s repository", self.url)
self.repo = await self.loop.run_in_executor(
None, ft.partial(
git.Repo.clone_from, self.url, str(self.path),
recursive=True))
_LOGGER.info("Clone add-on %s repository", self.url)
self.repo = await self.sys_run_in_executor(ft.partial(
git.Repo.clone_from, self.url, str(self.path),
**git_args
))
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
git.GitCommandError) as err:
_LOGGER.error("Can't clone %s repo: %s.", self.url, err)
_LOGGER.error("Can't clone %s repository: %s.", self.url, err)
self._remove()
return False
return True
async def pull(self):
"""Pull git addon repo."""
if self._lock.locked():
_LOGGER.warning("It is already a task in progress.")
"""Pull Git add-on repo."""
if self.lock.locked():
_LOGGER.warning("It is already a task in progress")
return False
async with self._lock:
async with self.lock:
_LOGGER.info("Update add-on %s repository", self.url)
branch = self.repo.active_branch.name
try:
_LOGGER.info("Pull addon %s repository", self.url)
await self.loop.run_in_executor(
None, self.repo.remotes.origin.pull)
# Download data
await self.sys_run_in_executor(ft.partial(
self.repo.remotes.origin.fetch, **{
'update-shallow': True,
'depth': 1,
}))
# Jump on top of that
await self.sys_run_in_executor(ft.partial(
self.repo.git.reset, f"origin/{branch}", hard=True))
# Cleanup old data
await self.sys_run_in_executor(ft.partial(
self.repo.git.clean, "-xdf"))
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
git.exc.GitCommandError) as err:
_LOGGER.error("Can't pull %s repo: %s.", self.url, err)
git.GitCommandError) as err:
_LOGGER.error("Can't update %s repo: %s.", self.url, err)
return False
return True
def _remove(self):
"""Remove a repository."""
if not self.path.is_dir():
return
def log_err(funct, path, _):
"""Log error."""
_LOGGER.warning("Can't remove %s", path)
shutil.rmtree(str(self.path), onerror=log_err)
class GitRepoHassIO(GitRepo):
"""HassIO addons repository."""
"""Hass.io add-ons repository."""
def __init__(self, config, loop):
"""Initialize git hassio addon repository."""
def __init__(self, coresys):
"""Initialize Git Hass.io add-on repository."""
super().__init__(
config, loop, config.path_addons_core, URL_HASSIO_ADDONS)
coresys, coresys.config.path_addons_core, URL_HASSIO_ADDONS)
class GitRepoCustom(GitRepo):
"""Custom addons repository."""
"""Custom add-ons repository."""
def __init__(self, config, loop, url):
"""Initialize git hassio addon repository."""
path = Path(config.path_addons_git, get_hash_from_repository(url))
def __init__(self, coresys, url):
"""Initialize custom Git Hass.io addo-n repository."""
path = Path(
coresys.config.path_addons_git,
get_hash_from_repository(url))
super().__init__(config, loop, path, url)
super().__init__(coresys, path, url)
def remove(self):
"""Remove a custom addon."""
if self.path.is_dir():
_LOGGER.info("Remove custom addon repository %s", self.url)
def log_err(funct, path, _):
"""Log error."""
_LOGGER.warning("Can't remove %s", path)
shutil.rmtree(str(self.path), onerror=log_err)
"""Remove a custom repository."""
_LOGGER.info("Remove custom add-on repository %s", self.url)
self._remove()

View File

@@ -1,18 +1,20 @@
"""Represent a HassIO repository."""
"""Represent a Hass.io repository."""
from .git import GitRepoHassIO, GitRepoCustom
from .util import get_hash_from_repository
from .utils import get_hash_from_repository
from ..const import (
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_NAME, ATTR_URL, ATTR_MAINTAINER)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
UNKNOWN = 'unknown'
class Repository(object):
"""Repository in HassIO."""
class Repository(CoreSysAttributes):
"""Repository in Hass.io."""
def __init__(self, config, loop, data, repository):
def __init__(self, coresys, repository):
"""Initialize repository object."""
self.data = data
self.coresys = coresys
self.source = None
self.git = None
@@ -20,16 +22,16 @@ class Repository(object):
self._id = repository
elif repository == REPOSITORY_CORE:
self._id = repository
self.git = GitRepoHassIO(config, loop)
self.git = GitRepoHassIO(coresys)
else:
self._id = get_hash_from_repository(repository)
self.git = GitRepoCustom(config, loop, repository)
self.git = GitRepoCustom(coresys, repository)
self.source = repository
@property
def _mesh(self):
"""Return data struct repository."""
return self.data.repositories.get(self._id, {})
return self.sys_addons.data.repositories.get(self._id, {})
@property
def slug(self):
@@ -43,7 +45,7 @@ class Repository(object):
@property
def url(self):
"""Return url of repository."""
"""Return URL of repository."""
return self._mesh.get(ATTR_URL, self.source)
@property
@@ -58,14 +60,14 @@ class Repository(object):
return True
async def update(self):
"""Update addon repository."""
"""Update add-on repository."""
if self.git:
return await self.git.pull()
return True
def remove(self):
"""Remove addon repository."""
"""Remove add-on repository."""
if self._id in (REPOSITORY_CORE, REPOSITORY_LOCAL):
raise RuntimeError("Can't remove built-in repositories!")
raise APIError("Can't remove built-in repositories!")
self.git.remove()

View File

@@ -1,35 +0,0 @@
"""Util addons functions."""
import hashlib
import logging
import re
RE_SHA1 = re.compile(r"[a-f0-9]{8}")
_LOGGER = logging.getLogger(__name__)
def get_hash_from_repository(name):
"""Generate a hash from repository."""
key = name.lower().encode()
return hashlib.sha1(key).hexdigest()[:8]
def extract_hash_from_path(path):
"""Extract repo id from path."""
repo_dir = path.parts[-1]
if not RE_SHA1.match(repo_dir):
return get_hash_from_repository(repo_dir)
return repo_dir
def check_installed(method):
"""Wrap function with check if addon is installed."""
async def wrap_check(addon, *args, **kwargs):
"""Return False if not installed or the function."""
if not addon.is_installed:
_LOGGER.error("Addon %s is not installed", addon.slug)
return False
return await method(addon, *args, **kwargs)
return wrap_check

130
hassio/addons/utils.py Normal file
View File

@@ -0,0 +1,130 @@
"""Util add-ons functions."""
from __future__ import annotations
import asyncio
import hashlib
import logging
from pathlib import Path
import re
from typing import TYPE_CHECKING
from ..const import (
PRIVILEGED_DAC_READ_SEARCH,
PRIVILEGED_NET_ADMIN,
PRIVILEGED_SYS_ADMIN,
PRIVILEGED_SYS_MODULE,
PRIVILEGED_SYS_PTRACE,
PRIVILEGED_SYS_RAWIO,
ROLE_ADMIN,
ROLE_MANAGER,
SECURITY_DISABLE,
SECURITY_PROFILE,
)
if TYPE_CHECKING:
from .addon import Addon
RE_SHA1 = re.compile(r"[a-f0-9]{8}")
_LOGGER = logging.getLogger(__name__)
def rating_security(addon: Addon) -> int:
"""Return 1-6 for security rating.
1 = not secure
6 = high secure
"""
rating = 5
# AppArmor
if addon.apparmor == SECURITY_DISABLE:
rating += -1
elif addon.apparmor == SECURITY_PROFILE:
rating += 1
# Home Assistant Login
if addon.access_auth_api:
rating += 1
# Privileged options
if any(
privilege in addon.privileged
for privilege in (
PRIVILEGED_NET_ADMIN,
PRIVILEGED_SYS_ADMIN,
PRIVILEGED_SYS_RAWIO,
PRIVILEGED_SYS_PTRACE,
PRIVILEGED_SYS_MODULE,
PRIVILEGED_DAC_READ_SEARCH,
)
):
rating += -1
# API Hass.io role
if addon.hassio_role == ROLE_MANAGER:
rating += -1
elif addon.hassio_role == ROLE_ADMIN:
rating += -2
# Not secure Networking
if addon.host_network:
rating += -1
# Insecure PID namespace
if addon.host_pid:
rating += -2
# Full Access
if addon.with_full_access:
rating += -2
# Docker Access
if addon.access_docker_api:
rating = 1
return max(min(6, rating), 1)
def get_hash_from_repository(name: str) -> str:
"""Generate a hash from repository."""
key = name.lower().encode()
return hashlib.sha1(key).hexdigest()[:8]
def extract_hash_from_path(path: Path) -> str:
"""Extract repo id from path."""
repository_dir = path.parts[-1]
if not RE_SHA1.match(repository_dir):
return get_hash_from_repository(repository_dir)
return repository_dir
def check_installed(method):
"""Wrap function with check if add-on is installed."""
async def wrap_check(addon, *args, **kwargs):
"""Return False if not installed or the function."""
if not addon.is_installed:
_LOGGER.error("Addon %s is not installed", addon.slug)
return False
return await method(addon, *args, **kwargs)
return wrap_check
async def remove_data(folder: Path) -> None:
"""Remove folder and reset privileged."""
try:
proc = await asyncio.create_subprocess_exec(
"rm", "-rf", str(folder), stdout=asyncio.subprocess.DEVNULL
)
_, error_msg = await proc.communicate()
except OSError as err:
error_msg = str(err)
else:
if proc.returncode == 0:
return
_LOGGER.error("Can't remove Add-on Data: %s", error_msg)

View File

@@ -1,21 +1,35 @@
"""Validate addons options schema."""
"""Validate add-ons options schema."""
import logging
import re
import uuid
import voluptuous as vol
from ..const import (
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_STARTUP,
ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, STARTUP_ONCE,
STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION, STARTUP_INITIALIZE,
BOOT_AUTO, BOOT_MANUAL, ATTR_SCHEMA, ATTR_IMAGE, ATTR_URL, ATTR_MAINTAINER,
ATTR_ARCH, ATTR_DEVICES, ATTR_ENVIRONMENT, ATTR_HOST_NETWORK, ARCH_ARMHF,
ARCH_AARCH64, ARCH_AMD64, ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED,
ATTR_USER, ATTR_STATE, ATTR_SYSTEM, STATE_STARTED, STATE_STOPPED,
ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT, ATTR_NETWORK,
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT,
ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API)
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_CHANNEL
ARCH_ALL, ATTR_ACCESS_TOKEN, ATTR_APPARMOR, ATTR_ARCH, ATTR_ARGS,
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_AUTH_API,
ATTR_AUTO_UART, ATTR_AUTO_UPDATE, ATTR_BOOT, ATTR_BUILD_FROM,
ATTR_DESCRIPTON, ATTR_DEVICES, ATTR_DEVICETREE, ATTR_DISCOVERY,
ATTR_DOCKER_API, ATTR_ENVIRONMENT, ATTR_FULL_ACCESS, ATTR_GPIO,
ATTR_HASSIO_API, ATTR_HASSIO_ROLE, ATTR_HOMEASSISTANT_API, ATTR_HOST_DBUS,
ATTR_HOST_IPC, ATTR_HOST_NETWORK, ATTR_HOST_PID, ATTR_IMAGE,
ATTR_KERNEL_MODULES, ATTR_LEGACY, ATTR_LOCATON, ATTR_MACHINE,
ATTR_MAINTAINER, ATTR_MAP, ATTR_NAME, ATTR_NETWORK, ATTR_OPTIONS,
ATTR_PORTS, ATTR_PRIVILEGED, ATTR_PROTECTED, ATTR_REPOSITORY, ATTR_SCHEMA,
ATTR_SERVICES, ATTR_SLUG, ATTR_SQUASH, ATTR_STARTUP, ATTR_STATE,
ATTR_STDIN, ATTR_SYSTEM, ATTR_TIMEOUT, ATTR_TMPFS, ATTR_URL, ATTR_USER,
ATTR_UUID, ATTR_VERSION, ATTR_WEBUI, BOOT_AUTO, BOOT_MANUAL,
PRIVILEGED_ALL, ROLE_ALL, ROLE_DEFAULT, STARTUP_ALL, STARTUP_APPLICATION,
STARTUP_SERVICES, STATE_STARTED, STATE_STOPPED)
from ..discovery.validate import valid_discovery_service
from ..validate import (
ALSA_DEVICE, DOCKER_PORTS, NETWORK_PORT, SHA256, UUID_MATCH)
_LOGGER = logging.getLogger(__name__)
MAP_VOLUME = r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$"
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|ro))?$")
RE_SERVICE = re.compile(r"^(?P<service>mqtt):(?P<rights>provide|want|need)$")
V_STR = 'str'
V_INT = 'int'
@@ -24,22 +38,29 @@ V_BOOL = 'bool'
V_EMAIL = 'email'
V_URL = 'url'
V_PORT = 'port'
V_MATCH = 'match'
ADDON_ELEMENT = vol.In([V_STR, V_INT, V_FLOAT, V_BOOL, V_EMAIL, V_URL, V_PORT])
RE_SCHEMA_ELEMENT = re.compile(
r"^(?:"
r"|str|bool|email|url|port"
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
r"|match\((?P<match>.*)\)"
r")\??$"
)
ARCH_ALL = [
ARCH_ARMHF, ARCH_AARCH64, ARCH_AMD64, ARCH_I386
]
RE_DOCKER_IMAGE = re.compile(
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$")
RE_DOCKER_IMAGE_BUILD = re.compile(
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$")
STARTUP_ALL = [
STARTUP_ONCE, STARTUP_INITIALIZE, STARTUP_SYSTEM, STARTUP_SERVICES,
STARTUP_APPLICATION
]
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
PRIVILEGED_ALL = [
"NET_ADMIN",
"SYS_ADMIN",
"SYS_RAWIO"
MACHINE_ALL = [
'intel-nuc', 'odroid-c2', 'odroid-xu', 'orangepi-prime', 'qemux86',
'qemux86-64', 'qemuarm', 'qemuarm-64', 'raspberrypi', 'raspberrypi2',
'raspberrypi3', 'raspberrypi3-64', 'tinker',
]
@@ -58,34 +79,58 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
vol.Required(ATTR_VERSION): vol.Coerce(str),
vol.Required(ATTR_SLUG): vol.Coerce(str),
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
vol.Optional(ATTR_MACHINE): [vol.In(MACHINE_ALL)],
vol.Optional(ATTR_URL): vol.Url(),
vol.Optional(ATTR_ARCH, default=ARCH_ALL): [vol.In(ARCH_ALL)],
vol.Required(ATTR_STARTUP):
vol.All(_simple_startup, vol.In(STARTUP_ALL)),
vol.Required(ATTR_BOOT):
vol.In([BOOT_AUTO, BOOT_MANUAL]),
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
vol.Optional(ATTR_WEBUI):
vol.Match(r"^(?:https?):\/\/\[HOST\]:\[PORT:\d+\].*$"),
vol.Match(r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"),
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
vol.Optional(ATTR_TMPFS):
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
vol.Optional(ATTR_MAP, default=[]): [vol.Match(MAP_VOLUME)],
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
vol.Optional(ATTR_FULL_ACCESS, default=False): vol.Boolean(),
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
vol.Optional(ATTR_KERNEL_MODULES, default=False): vol.Boolean(),
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
vol.Optional(ATTR_HASSIO_ROLE, default=ROLE_DEFAULT): vol.In(ROLE_ALL),
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
vol.Required(ATTR_OPTIONS): dict,
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
vol.Coerce(str): vol.Any(ADDON_ELEMENT, [
vol.Any(ADDON_ELEMENT, {vol.Coerce(str): ADDON_ELEMENT})
], vol.Schema({vol.Coerce(str): ADDON_ELEMENT}))
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [
vol.Any(
SCHEMA_ELEMENT,
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
),
], vol.Schema({
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])
}))
}), False),
vol.Optional(ATTR_IMAGE): vol.Match(r"\w*/\w*"),
vol.Optional(ATTR_IMAGE):
vol.Match(RE_DOCKER_IMAGE),
vol.Optional(ATTR_TIMEOUT, default=10):
vol.All(vol.Coerce(int), vol.Range(min=10, max=120))
}, extra=vol.ALLOW_EXTRA)
vol.All(vol.Coerce(int), vol.Range(min=10, max=120)),
}, extra=vol.REMOVE_EXTRA)
# pylint: disable=no-value-for-parameter
@@ -93,20 +138,36 @@ SCHEMA_REPOSITORY_CONFIG = vol.Schema({
vol.Required(ATTR_NAME): vol.Coerce(str),
vol.Optional(ATTR_URL): vol.Url(),
vol.Optional(ATTR_MAINTAINER): vol.Coerce(str),
}, extra=vol.ALLOW_EXTRA)
}, extra=vol.REMOVE_EXTRA)
# pylint: disable=no-value-for-parameter
SCHEMA_BUILD_CONFIG = vol.Schema({
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema({
vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD),
}),
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
vol.Optional(ATTR_ARGS, default=dict): vol.Schema({
vol.Coerce(str): vol.Coerce(str)
}),
}, extra=vol.REMOVE_EXTRA)
# pylint: disable=no-value-for-parameter
SCHEMA_ADDON_USER = vol.Schema({
vol.Required(ATTR_VERSION): vol.Coerce(str),
vol.Optional(ATTR_OPTIONS, default={}): dict,
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
vol.Optional(ATTR_ACCESS_TOKEN): SHA256,
vol.Optional(ATTR_OPTIONS, default=dict): dict,
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
vol.Optional(ATTR_BOOT):
vol.In([BOOT_AUTO, BOOT_MANUAL]),
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
})
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
}, extra=vol.REMOVE_EXTRA)
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
@@ -115,11 +176,11 @@ SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
})
SCHEMA_ADDON_FILE = vol.Schema({
vol.Optional(ATTR_USER, default={}): {
SCHEMA_ADDONS_FILE = vol.Schema({
vol.Optional(ATTR_USER, default=dict): {
vol.Coerce(str): SCHEMA_ADDON_USER,
},
vol.Optional(ATTR_SYSTEM, default={}): {
vol.Optional(ATTR_SYSTEM, default=dict): {
vol.Coerce(str): SCHEMA_ADDON_SYSTEM,
}
})
@@ -130,19 +191,21 @@ SCHEMA_ADDON_SNAPSHOT = vol.Schema({
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
vol.Required(ATTR_VERSION): vol.Coerce(str),
})
}, extra=vol.REMOVE_EXTRA)
def validate_options(raw_schema):
"""Validate schema."""
def validate(struct):
"""Create schema validator for addons options."""
"""Create schema validator for add-ons options."""
options = {}
# read options
for key, value in struct.items():
# Ignore unknown options / remove from list
if key not in raw_schema:
raise vol.Invalid("Unknown options {}.".format(key))
_LOGGER.warning("Unknown options %s", key)
continue
typ = raw_schema[key]
try:
@@ -156,41 +219,50 @@ def validate_options(raw_schema):
# normal value
options[key] = _single_validate(typ, value, key)
except (IndexError, KeyError):
raise vol.Invalid(
"Type error for {}.".format(key)) from None
raise vol.Invalid(f"Type error for {key}") from None
_check_missing_options(raw_schema, options, 'root')
return options
return validate
# pylint: disable=no-value-for-parameter
# pylint: disable=inconsistent-return-statements
def _single_validate(typ, value, key):
"""Validate a single element."""
try:
# if required argument
if value is None:
raise vol.Invalid("Missing required option '{}'.".format(key))
# if required argument
if value is None:
raise vol.Invalid(f"Missing required option '{key}'")
if typ == V_STR:
return str(value)
elif typ == V_INT:
return int(value)
elif typ == V_FLOAT:
return float(value)
elif typ == V_BOOL:
return vol.Boolean()(value)
elif typ == V_EMAIL:
return vol.Email()(value)
elif typ == V_URL:
return vol.Url()(value)
elif typ == V_PORT:
return NETWORK_PORT(value)
# parse extend data from type
match = RE_SCHEMA_ELEMENT.match(typ)
raise vol.Invalid("Fatal error for {} type {}".format(key, typ))
except ValueError:
raise vol.Invalid(
"Type {} error for '{}' on {}.".format(typ, value, key)) from None
# prepare range
range_args = {}
for group_name in ('i_min', 'i_max', 'f_min', 'f_max'):
group_value = match.group(group_name)
if group_value:
range_args[group_name[2:]] = float(group_value)
if typ.startswith(V_STR):
return str(value)
elif typ.startswith(V_INT):
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
elif typ.startswith(V_FLOAT):
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
elif typ.startswith(V_BOOL):
return vol.Boolean()(value)
elif typ.startswith(V_EMAIL):
return vol.Email()(value)
elif typ.startswith(V_URL):
return vol.Url()(value)
elif typ.startswith(V_PORT):
return NETWORK_PORT(value)
elif typ.startswith(V_MATCH):
return vol.Match(match.group('match'))(str(value))
raise vol.Invalid(f"Fatal error for {key} type {typ}")
def _nested_validate_list(typ, data_list, key):
@@ -198,17 +270,10 @@ def _nested_validate_list(typ, data_list, key):
options = []
for element in data_list:
# dict list
# Nested?
if isinstance(typ, dict):
c_options = {}
for c_key, c_value in element.items():
if c_key not in typ:
raise vol.Invalid(
"Unknown nested options {}".format(c_key))
c_options[c_key] = _single_validate(typ[c_key], c_value, c_key)
c_options = _nested_validate_dict(typ, element, key)
options.append(c_options)
# normal list
else:
options.append(_single_validate(typ, element, key))
@@ -220,9 +285,27 @@ def _nested_validate_dict(typ, data_dict, key):
options = {}
for c_key, c_value in data_dict.items():
# Ignore unknown options / remove from list
if c_key not in typ:
raise vol.Invalid("Unknow nested dict options {}".format(c_key))
_LOGGER.warning("Unknown options %s", c_key)
continue
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
# Nested?
if isinstance(typ[c_key], list):
options[c_key] = _nested_validate_list(typ[c_key][0],
c_value, c_key)
else:
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
_check_missing_options(typ, options, key)
return options
def _check_missing_options(origin, exists, root):
"""Check if all options are exists."""
missing = set(origin) - set(exists)
for miss_opt in missing:
if isinstance(origin[miss_opt], str) and \
origin[miss_opt].endswith("?"):
continue
raise vol.Invalid(f"Missing option {miss_opt} in {root}")

View File

@@ -1,161 +1,284 @@
"""Init file for HassIO rest api."""
"""Init file for Hass.io RESTful API."""
import logging
from pathlib import Path
from typing import Optional
from aiohttp import web
from ..coresys import CoreSys, CoreSysAttributes
from .addons import APIAddons
from .auth import APIAuth
from .discovery import APIDiscovery
from .hardware import APIHardware
from .hassos import APIHassOS
from .homeassistant import APIHomeAssistant
from .host import APIHost
from .network import APINetwork
from .supervisor import APISupervisor
from .security import APISecurity
from .info import APIInfo
from .proxy import APIProxy
from .security import SecurityMiddleware
from .services import APIServices
from .snapshots import APISnapshots
from .supervisor import APISupervisor
_LOGGER = logging.getLogger(__name__)
class RestAPI(object):
"""Handle rest api for hassio."""
class RestAPI(CoreSysAttributes):
"""Handle RESTful API for Hass.io."""
def __init__(self, config, loop):
"""Initialize docker base wrapper."""
self.config = config
self.loop = loop
self.webapp = web.Application(loop=self.loop)
def __init__(self, coresys: CoreSys):
"""Initialize Docker base wrapper."""
self.coresys: CoreSys = coresys
self.security: SecurityMiddleware = SecurityMiddleware(coresys)
self.webapp: web.Application = web.Application(
middlewares=[self.security.token_validation])
# service stuff
self._handler = None
self.server = None
self._runner: web.AppRunner = web.AppRunner(self.webapp)
self._site: Optional[web.TCPSite] = None
def register_host(self, host_control, hardware):
"""Register hostcontrol function."""
api_host = APIHost(self.config, self.loop, host_control, hardware)
async def load(self) -> None:
"""Register REST API Calls."""
self._register_supervisor()
self._register_host()
self._register_hassos()
self._register_hardware()
self._register_homeassistant()
self._register_proxy()
self._register_panel()
self._register_addons()
self._register_snapshots()
self._register_discovery()
self._register_services()
self._register_info()
self._register_auth()
self.webapp.router.add_get('/host/info', api_host.info)
self.webapp.router.add_get('/host/hardware', api_host.hardware)
self.webapp.router.add_post('/host/reboot', api_host.reboot)
self.webapp.router.add_post('/host/shutdown', api_host.shutdown)
self.webapp.router.add_post('/host/update', api_host.update)
self.webapp.router.add_post('/host/options', api_host.options)
def _register_host(self) -> None:
"""Register hostcontrol functions."""
api_host = APIHost()
api_host.coresys = self.coresys
def register_network(self, host_control):
"""Register network function."""
api_net = APINetwork(self.config, self.loop, host_control)
self.webapp.add_routes([
web.get('/host/info', api_host.info),
web.post('/host/reboot', api_host.reboot),
web.post('/host/shutdown', api_host.shutdown),
web.post('/host/reload', api_host.reload),
web.post('/host/options', api_host.options),
web.get('/host/services', api_host.services),
web.post('/host/services/{service}/stop', api_host.service_stop),
web.post('/host/services/{service}/start', api_host.service_start),
web.post('/host/services/{service}/restart',
api_host.service_restart),
web.post('/host/services/{service}/reload',
api_host.service_reload),
])
self.webapp.router.add_get('/network/info', api_net.info)
self.webapp.router.add_post('/network/options', api_net.options)
def _register_hassos(self) -> None:
"""Register HassOS functions."""
api_hassos = APIHassOS()
api_hassos.coresys = self.coresys
def register_supervisor(self, supervisor, snapshots, addons, host_control,
updater):
"""Register supervisor function."""
api_supervisor = APISupervisor(
self.config, self.loop, supervisor, snapshots, addons,
host_control, updater)
self.webapp.add_routes([
web.get('/hassos/info', api_hassos.info),
web.post('/hassos/update', api_hassos.update),
web.post('/hassos/update/cli', api_hassos.update_cli),
web.post('/hassos/config/sync', api_hassos.config_sync),
])
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
self.webapp.router.add_post(
'/supervisor/update', api_supervisor.update)
self.webapp.router.add_post(
'/supervisor/reload', api_supervisor.reload)
self.webapp.router.add_post(
'/supervisor/options', api_supervisor.options)
self.webapp.router.add_get('/supervisor/logs', api_supervisor.logs)
def _register_hardware(self) -> None:
"""Register hardware functions."""
api_hardware = APIHardware()
api_hardware.coresys = self.coresys
def register_homeassistant(self, dock_homeassistant):
"""Register homeassistant function."""
api_hass = APIHomeAssistant(self.config, self.loop, dock_homeassistant)
self.webapp.add_routes([
web.get('/hardware/info', api_hardware.info),
web.get('/hardware/audio', api_hardware.audio),
])
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
self.webapp.router.add_get('/homeassistant/logs', api_hass.logs)
self.webapp.router.add_post('/homeassistant/options', api_hass.options)
self.webapp.router.add_post('/homeassistant/update', api_hass.update)
self.webapp.router.add_post('/homeassistant/restart', api_hass.restart)
self.webapp.router.add_post('/homeassistant/check', api_hass.check)
def _register_info(self) -> None:
"""Register info functions."""
api_info = APIInfo()
api_info.coresys = self.coresys
def register_addons(self, addons):
"""Register homeassistant function."""
api_addons = APIAddons(self.config, self.loop, addons)
self.webapp.add_routes([
web.get('/info', api_info.info),
])
self.webapp.router.add_get('/addons', api_addons.list)
self.webapp.router.add_post('/addons/reload', api_addons.reload)
def _register_auth(self) -> None:
"""Register auth functions."""
api_auth = APIAuth()
api_auth.coresys = self.coresys
self.webapp.router.add_get('/addons/{addon}/info', api_addons.info)
self.webapp.router.add_post(
'/addons/{addon}/install', api_addons.install)
self.webapp.router.add_post(
'/addons/{addon}/uninstall', api_addons.uninstall)
self.webapp.router.add_post('/addons/{addon}/start', api_addons.start)
self.webapp.router.add_post('/addons/{addon}/stop', api_addons.stop)
self.webapp.router.add_post(
'/addons/{addon}/restart', api_addons.restart)
self.webapp.router.add_post(
'/addons/{addon}/update', api_addons.update)
self.webapp.router.add_post(
'/addons/{addon}/options', api_addons.options)
self.webapp.router.add_get('/addons/{addon}/logs', api_addons.logs)
self.webapp.router.add_get('/addons/{addon}/logo', api_addons.logo)
self.webapp.add_routes([
web.post('/auth', api_auth.auth),
])
def register_security(self):
"""Register security function."""
api_security = APISecurity(self.config, self.loop)
def _register_supervisor(self) -> None:
"""Register Supervisor functions."""
api_supervisor = APISupervisor()
api_supervisor.coresys = self.coresys
self.webapp.router.add_get('/security/info', api_security.info)
self.webapp.router.add_post('/security/options', api_security.options)
self.webapp.router.add_post('/security/totp', api_security.totp)
self.webapp.router.add_post('/security/session', api_security.session)
self.webapp.add_routes([
web.get('/supervisor/ping', api_supervisor.ping),
web.get('/supervisor/info', api_supervisor.info),
web.get('/supervisor/stats', api_supervisor.stats),
web.get('/supervisor/logs', api_supervisor.logs),
web.post('/supervisor/update', api_supervisor.update),
web.post('/supervisor/reload', api_supervisor.reload),
web.post('/supervisor/options', api_supervisor.options),
])
def register_snapshots(self, snapshots):
"""Register snapshots function."""
api_snapshots = APISnapshots(self.config, self.loop, snapshots)
def _register_homeassistant(self) -> None:
"""Register Home Assistant functions."""
api_hass = APIHomeAssistant()
api_hass.coresys = self.coresys
self.webapp.router.add_get('/snapshots', api_snapshots.list)
self.webapp.router.add_post('/snapshots/reload', api_snapshots.reload)
self.webapp.add_routes([
web.get('/homeassistant/info', api_hass.info),
web.get('/homeassistant/logs', api_hass.logs),
web.get('/homeassistant/stats', api_hass.stats),
web.post('/homeassistant/options', api_hass.options),
web.post('/homeassistant/update', api_hass.update),
web.post('/homeassistant/restart', api_hass.restart),
web.post('/homeassistant/stop', api_hass.stop),
web.post('/homeassistant/start', api_hass.start),
web.post('/homeassistant/check', api_hass.check),
web.post('/homeassistant/rebuild', api_hass.rebuild),
])
self.webapp.router.add_post(
'/snapshots/new/full', api_snapshots.snapshot_full)
self.webapp.router.add_post(
'/snapshots/new/partial', api_snapshots.snapshot_partial)
def _register_proxy(self) -> None:
"""Register Home Assistant API Proxy."""
api_proxy = APIProxy()
api_proxy.coresys = self.coresys
self.webapp.router.add_get(
'/snapshots/{snapshot}/info', api_snapshots.info)
self.webapp.router.add_post(
'/snapshots/{snapshot}/remove', api_snapshots.remove)
self.webapp.router.add_post(
'/snapshots/{snapshot}/restore/full', api_snapshots.restore_full)
self.webapp.router.add_post(
'/snapshots/{snapshot}/restore/partial',
api_snapshots.restore_partial)
self.webapp.add_routes([
web.get('/homeassistant/api/websocket', api_proxy.websocket),
web.get('/homeassistant/websocket', api_proxy.websocket),
web.get('/homeassistant/api/stream', api_proxy.stream),
web.post('/homeassistant/api/{path:.+}', api_proxy.api),
web.get('/homeassistant/api/{path:.+}', api_proxy.api),
web.get('/homeassistant/api/', api_proxy.api),
])
def register_panel(self):
"""Register panel for homeassistant."""
panel = Path(__file__).parents[1].joinpath('panel/hassio-main.html')
def _register_addons(self) -> None:
"""Register Add-on functions."""
api_addons = APIAddons()
api_addons.coresys = self.coresys
def get_panel(request):
"""Return file response with panel."""
return web.FileResponse(panel)
self.webapp.add_routes([
web.get('/addons', api_addons.list),
web.post('/addons/reload', api_addons.reload),
web.get('/addons/{addon}/info', api_addons.info),
web.post('/addons/{addon}/install', api_addons.install),
web.post('/addons/{addon}/uninstall', api_addons.uninstall),
web.post('/addons/{addon}/start', api_addons.start),
web.post('/addons/{addon}/stop', api_addons.stop),
web.post('/addons/{addon}/restart', api_addons.restart),
web.post('/addons/{addon}/update', api_addons.update),
web.post('/addons/{addon}/options', api_addons.options),
web.post('/addons/{addon}/rebuild', api_addons.rebuild),
web.get('/addons/{addon}/logs', api_addons.logs),
web.get('/addons/{addon}/icon', api_addons.icon),
web.get('/addons/{addon}/logo', api_addons.logo),
web.get('/addons/{addon}/changelog', api_addons.changelog),
web.post('/addons/{addon}/stdin', api_addons.stdin),
web.post('/addons/{addon}/security', api_addons.security),
web.get('/addons/{addon}/stats', api_addons.stats),
])
self.webapp.router.add_get('/panel', get_panel)
def _register_snapshots(self) -> None:
"""Register snapshots functions."""
api_snapshots = APISnapshots()
api_snapshots.coresys = self.coresys
async def start(self):
"""Run rest api webserver."""
self._handler = self.webapp.make_handler(loop=self.loop)
self.webapp.add_routes([
web.get('/snapshots', api_snapshots.list),
web.post('/snapshots/reload', api_snapshots.reload),
web.post('/snapshots/new/full', api_snapshots.snapshot_full),
web.post('/snapshots/new/partial', api_snapshots.snapshot_partial),
web.post('/snapshots/new/upload', api_snapshots.upload),
web.get('/snapshots/{snapshot}/info', api_snapshots.info),
web.post('/snapshots/{snapshot}/remove', api_snapshots.remove),
web.post('/snapshots/{snapshot}/restore/full',
api_snapshots.restore_full),
web.post('/snapshots/{snapshot}/restore/partial',
api_snapshots.restore_partial),
web.get('/snapshots/{snapshot}/download', api_snapshots.download),
])
def _register_services(self) -> None:
"""Register services functions."""
api_services = APIServices()
api_services.coresys = self.coresys
self.webapp.add_routes([
web.get('/services', api_services.list),
web.get('/services/{service}', api_services.get_service),
web.post('/services/{service}', api_services.set_service),
web.delete('/services/{service}', api_services.del_service),
])
def _register_discovery(self) -> None:
"""Register discovery functions."""
api_discovery = APIDiscovery()
api_discovery.coresys = self.coresys
self.webapp.add_routes([
web.get('/discovery', api_discovery.list),
web.get('/discovery/{uuid}', api_discovery.get_discovery),
web.delete('/discovery/{uuid}', api_discovery.del_discovery),
web.post('/discovery', api_discovery.set_discovery),
])
def _register_panel(self) -> None:
"""Register panel for Home Assistant."""
panel_dir = Path(__file__).parent.joinpath("panel")
def create_response(panel_file):
"""Create a function to generate a response."""
path = panel_dir.joinpath(f"{panel_file!s}.html")
return lambda request: web.FileResponse(path)
# This route is for backwards compatibility with HA < 0.58
self.webapp.add_routes(
[web.get('/panel', create_response('hassio-main-es5'))])
# This route is for backwards compatibility with HA 0.58 - 0.61
self.webapp.add_routes([
web.get('/panel_es5', create_response('hassio-main-es5')),
web.get('/panel_latest', create_response('hassio-main-latest')),
])
# This route is for backwards compatibility with HA 0.62 - 0.70
self.webapp.add_routes([
web.get('/app-es5/index.html', create_response('index')),
web.get('/app-es5/hassio-app.html', create_response('hassio-app')),
])
# This route is for HA > 0.70
self.webapp.add_routes([web.static('/app', panel_dir)])
async def start(self) -> None:
"""Run RESTful API webserver."""
await self._runner.setup()
self._site = web.TCPSite(
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5)
try:
self.server = await self.loop.create_server(
self._handler, "0.0.0.0", "80")
await self._site.start()
except OSError as err:
_LOGGER.fatal(
"Failed to create HTTP server at 0.0.0.0:80 -> %s", err)
_LOGGER.fatal("Failed to create HTTP server at 0.0.0.0:80 -> %s",
err)
else:
_LOGGER.info("Start API on %s", self.sys_docker.network.supervisor)
async def stop(self):
"""Stop rest api webserver."""
if self.server:
self.server.close()
await self.server.wait_closed()
await self.webapp.shutdown()
async def stop(self) -> None:
"""Stop RESTful API webserver."""
if not self._site:
return
if self._handler:
await self._handler.finish_connections(60)
await self.webapp.cleanup()
# Shutdown running API
await self._site.stop()
await self._runner.cleanup()
_LOGGER.info("Stop API on %s", self.sys_docker.network.supervisor)

View File

@@ -1,11 +1,12 @@
"""Init file for HassIO homeassistant rest api."""
"""Init file for Hass.io Home Assistant RESTful API."""
import asyncio
import logging
import voluptuous as vol
from voluptuous.humanize import humanize_error
from .util import api_process, api_process_raw, api_validate
from .utils import api_process, api_process_raw, api_validate
from ..addons.utils import rating_security
from ..const import (
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS,
ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY,
@@ -13,8 +14,18 @@ from ..const import (
ATTR_SOURCE, ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_ARCH, ATTR_MAINTAINER,
ATTR_INSTALLED, ATTR_LOGO, ATTR_WEBUI, ATTR_DEVICES, ATTR_PRIVILEGED,
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API,
BOOT_AUTO, BOOT_MANUAL, CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY)
from ..validate import DOCKER_PORTS
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, BOOT_AUTO, BOOT_MANUAL,
ATTR_CHANGELOG, ATTR_HOST_IPC, ATTR_HOST_DBUS, ATTR_LONG_DESCRIPTION,
ATTR_CPU_PERCENT, ATTR_MEMORY_LIMIT, ATTR_MEMORY_USAGE, ATTR_NETWORK_TX,
ATTR_NETWORK_RX, ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_ICON, ATTR_SERVICES,
ATTR_DISCOVERY, ATTR_APPARMOR, ATTR_DEVICETREE, ATTR_DOCKER_API,
ATTR_FULL_ACCESS, ATTR_PROTECTED, ATTR_RATING, ATTR_HOST_PID,
ATTR_HASSIO_ROLE, ATTR_MACHINE, ATTR_AVAILABLE, ATTR_AUTH_API,
ATTR_KERNEL_MODULES,
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY, CONTENT_TYPE_TEXT, REQUEST_FROM)
from ..coresys import CoreSysAttributes
from ..validate import DOCKER_PORTS, ALSA_DEVICE
from ..exceptions import APIError
_LOGGER = logging.getLogger(__name__)
@@ -27,62 +38,58 @@ SCHEMA_OPTIONS = vol.Schema({
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS),
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
})
# pylint: disable=no-value-for-parameter
SCHEMA_SECURITY = vol.Schema({
vol.Optional(ATTR_PROTECTED): vol.Boolean(),
})
class APIAddons(object):
"""Handle rest api for addons functions."""
def __init__(self, config, loop, addons):
"""Initialize homeassistant rest api part."""
self.config = config
self.loop = loop
self.addons = addons
class APIAddons(CoreSysAttributes):
"""Handle RESTful API for add-on functions."""
def _extract_addon(self, request, check_installed=True):
"""Return addon and if not exists trow a exception."""
addon = self.addons.get(request.match_info.get('addon'))
"""Return addon, throw an exception it it doesn't exist."""
addon_slug = request.match_info.get('addon')
# Lookup itself
if addon_slug == 'self':
return request.get(REQUEST_FROM)
addon = self.sys_addons.get(addon_slug)
if not addon:
raise RuntimeError("Addon not exists")
raise APIError("Addon does not exist")
if check_installed and not addon.is_installed:
raise RuntimeError("Addon is not installed")
raise APIError("Addon is not installed")
return addon
@staticmethod
def _pretty_devices(addon):
"""Return a simplified device list."""
dev_list = addon.devices
if not dev_list:
return
return [row.split(':')[0] for row in dev_list]
@api_process
async def list(self, request):
"""Return all addons / repositories ."""
"""Return all add-ons or repositories."""
data_addons = []
for addon in self.addons.list_addons:
for addon in self.sys_addons.list_addons:
data_addons.append({
ATTR_NAME: addon.name,
ATTR_SLUG: addon.slug,
ATTR_DESCRIPTON: addon.description,
ATTR_VERSION: addon.last_version,
ATTR_INSTALLED: addon.version_installed,
ATTR_ARCH: addon.supported_arch,
ATTR_AVAILABLE: addon.available,
ATTR_DETACHED: addon.is_detached,
ATTR_REPOSITORY: addon.repository,
ATTR_BUILD: addon.need_build,
ATTR_PRIVILEGED: addon.privileged,
ATTR_DEVICES: self._pretty_devices(addon),
ATTR_URL: addon.url,
ATTR_ICON: addon.with_icon,
ATTR_LOGO: addon.with_logo,
ATTR_HASSIO_API: addon.use_hassio_api,
ATTR_AUDIO: addon.with_audio,
})
data_repositories = []
for repository in self.addons.list_repositories:
for repository in self.sys_addons.list_repositories:
data_repositories.append({
ATTR_SLUG: repository.slug,
ATTR_NAME: repository.name,
@@ -98,47 +105,71 @@ class APIAddons(object):
@api_process
async def reload(self, request):
"""Reload all addons data."""
await asyncio.shield(self.addons.reload(), loop=self.loop)
"""Reload all add-on data."""
await asyncio.shield(self.sys_addons.reload())
return True
@api_process
async def info(self, request):
"""Return addon information."""
"""Return add-on information."""
addon = self._extract_addon(request, check_installed=False)
return {
ATTR_NAME: addon.name,
ATTR_SLUG: addon.slug,
ATTR_DESCRIPTON: addon.description,
ATTR_LONG_DESCRIPTION: addon.long_description,
ATTR_VERSION: addon.version_installed,
ATTR_AUTO_UPDATE: addon.auto_update,
ATTR_REPOSITORY: addon.repository,
ATTR_LAST_VERSION: addon.last_version,
ATTR_STATE: await addon.state(),
ATTR_PROTECTED: addon.protected,
ATTR_RATING: rating_security(addon),
ATTR_BOOT: addon.boot,
ATTR_OPTIONS: addon.options,
ATTR_ARCH: addon.supported_arch,
ATTR_MACHINE: addon.supported_machine,
ATTR_URL: addon.url,
ATTR_DETACHED: addon.is_detached,
ATTR_AVAILABLE: addon.available,
ATTR_BUILD: addon.need_build,
ATTR_NETWORK: addon.ports,
ATTR_HOST_NETWORK: addon.network_mode == 'host',
ATTR_HOST_NETWORK: addon.host_network,
ATTR_HOST_PID: addon.host_pid,
ATTR_HOST_IPC: addon.host_ipc,
ATTR_HOST_DBUS: addon.host_dbus,
ATTR_PRIVILEGED: addon.privileged,
ATTR_DEVICES: self._pretty_devices(addon),
ATTR_FULL_ACCESS: addon.with_full_access,
ATTR_APPARMOR: addon.apparmor,
ATTR_DEVICES: _pretty_devices(addon),
ATTR_ICON: addon.with_icon,
ATTR_LOGO: addon.with_logo,
ATTR_CHANGELOG: addon.with_changelog,
ATTR_WEBUI: addon.webui,
ATTR_HASSIO_API: addon.use_hassio_api,
ATTR_STDIN: addon.with_stdin,
ATTR_HASSIO_API: addon.access_hassio_api,
ATTR_HASSIO_ROLE: addon.hassio_role,
ATTR_AUTH_API: addon.access_auth_api,
ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api,
ATTR_GPIO: addon.with_gpio,
ATTR_KERNEL_MODULES: addon.with_kernel_modules,
ATTR_DEVICETREE: addon.with_devicetree,
ATTR_DOCKER_API: addon.access_docker_api,
ATTR_AUDIO: addon.with_audio,
ATTR_AUDIO_INPUT: addon.audio_input,
ATTR_AUDIO_OUTPUT: addon.audio_output,
ATTR_SERVICES: _pretty_services(addon),
ATTR_DISCOVERY: addon.discovery,
}
@api_process
async def options(self, request):
"""Store user options for addon."""
"""Store user options for add-on."""
addon = self._extract_addon(request)
addon_schema = SCHEMA_OPTIONS.extend({
vol.Optional(ATTR_OPTIONS): addon.schema,
vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema),
})
body = await api_validate(addon_schema, request)
@@ -156,27 +187,56 @@ class APIAddons(object):
if ATTR_AUDIO_OUTPUT in body:
addon.audio_output = body[ATTR_AUDIO_OUTPUT]
addon.save_data()
return True
@api_process
async def install(self, request):
"""Install addon."""
body = await api_validate(SCHEMA_VERSION, request)
addon = self._extract_addon(request, check_installed=False)
version = body.get(ATTR_VERSION, addon.last_version)
async def security(self, request):
"""Store security options for add-on."""
addon = self._extract_addon(request)
body = await api_validate(SCHEMA_SECURITY, request)
return await asyncio.shield(
addon.install(version=version), loop=self.loop)
if ATTR_PROTECTED in body:
_LOGGER.warning("Protected flag changing for %s!", addon.slug)
addon.protected = body[ATTR_PROTECTED]
addon.save_data()
return True
@api_process
async def stats(self, request):
"""Return resource information."""
addon = self._extract_addon(request)
stats = await addon.stats()
if not stats:
raise APIError("No stats available")
return {
ATTR_CPU_PERCENT: stats.cpu_percent,
ATTR_MEMORY_USAGE: stats.memory_usage,
ATTR_MEMORY_LIMIT: stats.memory_limit,
ATTR_NETWORK_RX: stats.network_rx,
ATTR_NETWORK_TX: stats.network_tx,
ATTR_BLK_READ: stats.blk_read,
ATTR_BLK_WRITE: stats.blk_write,
}
@api_process
def install(self, request):
"""Install add-on."""
addon = self._extract_addon(request, check_installed=False)
return asyncio.shield(addon.install())
@api_process
def uninstall(self, request):
"""Uninstall addon."""
"""Uninstall add-on."""
addon = self._extract_addon(request)
return asyncio.shield(addon.uninstall(), loop=self.loop)
return asyncio.shield(addon.uninstall())
@api_process
def start(self, request):
"""Start addon."""
"""Start add-on."""
addon = self._extract_addon(request)
# check options
@@ -184,47 +244,99 @@ class APIAddons(object):
try:
addon.schema(options)
except vol.Invalid as ex:
raise RuntimeError(humanize_error(options, ex)) from None
raise APIError(humanize_error(options, ex)) from None
return asyncio.shield(addon.start(), loop=self.loop)
return asyncio.shield(addon.start())
@api_process
def stop(self, request):
"""Stop addon."""
"""Stop add-on."""
addon = self._extract_addon(request)
return asyncio.shield(addon.stop(), loop=self.loop)
return asyncio.shield(addon.stop())
@api_process
async def update(self, request):
"""Update addon."""
body = await api_validate(SCHEMA_VERSION, request)
def update(self, request):
"""Update add-on."""
addon = self._extract_addon(request)
version = body.get(ATTR_VERSION, addon.last_version)
if version == addon.version_installed:
raise RuntimeError("Version %s is already in use", version)
if addon.last_version == addon.version_installed:
raise APIError("No update available!")
return await asyncio.shield(
addon.update(version=version), loop=self.loop)
return asyncio.shield(addon.update())
@api_process
def restart(self, request):
"""Restart addon."""
"""Restart add-on."""
addon = self._extract_addon(request)
return asyncio.shield(addon.restart(), loop=self.loop)
return asyncio.shield(addon.restart())
@api_process
def rebuild(self, request):
"""Rebuild local build add-on."""
addon = self._extract_addon(request)
if not addon.need_build:
raise APIError("Only local build addons are supported")
return asyncio.shield(addon.rebuild())
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request):
"""Return logs from addon."""
"""Return logs from add-on."""
addon = self._extract_addon(request)
return addon.logs()
@api_process_raw(CONTENT_TYPE_PNG)
async def icon(self, request):
"""Return icon from add-on."""
addon = self._extract_addon(request, check_installed=False)
if not addon.with_icon:
raise APIError("No icon found!")
with addon.path_icon.open('rb') as png:
return png.read()
@api_process_raw(CONTENT_TYPE_PNG)
async def logo(self, request):
"""Return logo from addon."""
"""Return logo from add-on."""
addon = self._extract_addon(request, check_installed=False)
if not addon.with_logo:
raise RuntimeError("No image found!")
raise APIError("No logo found!")
with addon.path_logo.open('rb') as png:
return png.read()
@api_process_raw(CONTENT_TYPE_TEXT)
async def changelog(self, request):
"""Return changelog from add-on."""
addon = self._extract_addon(request, check_installed=False)
if not addon.with_changelog:
raise APIError("No changelog found!")
with addon.path_changelog.open('r') as changelog:
return changelog.read()
@api_process
async def stdin(self, request):
"""Write to stdin of add-on."""
addon = self._extract_addon(request)
if not addon.with_stdin:
raise APIError("STDIN not supported by add-on")
data = await request.read()
return await asyncio.shield(addon.write_stdin(data))
def _pretty_devices(addon):
"""Return a simplified device list."""
dev_list = addon.devices
if not dev_list:
return None
return [row.split(':')[0] for row in dev_list]
def _pretty_services(addon):
"""Return a simplified services role list."""
services = []
for name, access in addon.services_role.items():
services.append(f"{name}:{access}")
return services

61
hassio/api/auth.py Normal file
View File

@@ -0,0 +1,61 @@
"""Init file for Hass.io auth/SSO RESTful API."""
import logging
from aiohttp import BasicAuth
from aiohttp.web_exceptions import HTTPUnauthorized
from aiohttp.hdrs import CONTENT_TYPE, AUTHORIZATION, WWW_AUTHENTICATE
from .utils import api_process
from ..const import REQUEST_FROM, CONTENT_TYPE_JSON, CONTENT_TYPE_URL
from ..coresys import CoreSysAttributes
from ..exceptions import APIForbidden
_LOGGER = logging.getLogger(__name__)
class APIAuth(CoreSysAttributes):
"""Handle RESTful API for auth functions."""
def _process_basic(self, request, addon):
"""Process login request with basic auth.
Return a coroutine.
"""
auth = BasicAuth.decode(request.headers[AUTHORIZATION])
return self.sys_auth.check_login(addon, auth.login, auth.password)
def _process_dict(self, request, addon, data):
"""Process login with dict data.
Return a coroutine.
"""
username = data.get('username') or data.get('user')
password = data.get('password')
return self.sys_auth.check_login(addon, username, password)
@api_process
async def auth(self, request):
"""Process login request."""
addon = request[REQUEST_FROM]
if not addon.access_auth_api:
raise APIForbidden("Can't use Home Assistant auth!")
# BasicAuth
if AUTHORIZATION in request.headers:
return await self._process_basic(request, addon)
# Json
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
data = await request.json()
return await self._process_dict(request, addon, data)
# URL encoded
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_URL:
data = await request.post()
return await self._process_dict(request, addon, data)
raise HTTPUnauthorized(headers={
WWW_AUTHENTICATE: "Basic realm=\"Hass.io Authentication\""
})

100
hassio/api/discovery.py Normal file
View File

@@ -0,0 +1,100 @@
"""Init file for Hass.io network RESTful API."""
import voluptuous as vol
from .utils import api_process, api_validate
from ..const import (
ATTR_ADDON,
ATTR_UUID,
ATTR_CONFIG,
ATTR_DISCOVERY,
ATTR_SERVICE,
REQUEST_FROM,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APIForbidden
from ..discovery.validate import valid_discovery_service
SCHEMA_DISCOVERY = vol.Schema(
{
vol.Required(ATTR_SERVICE): valid_discovery_service,
vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
}
)
class APIDiscovery(CoreSysAttributes):
"""Handle RESTful API for discovery functions."""
def _extract_message(self, request):
"""Extract discovery message from URL."""
message = self.sys_discovery.get(request.match_info.get("uuid"))
if not message:
raise APIError("Discovery message not found")
return message
def _check_permission_ha(self, request):
"""Check permission for API call / Home Assistant."""
if request[REQUEST_FROM] != self.sys_homeassistant:
raise APIForbidden("Only HomeAssistant can use this API!")
@api_process
async def list(self, request):
"""Show register services."""
self._check_permission_ha(request)
discovery = []
for message in self.sys_discovery.list_messages:
discovery.append(
{
ATTR_ADDON: message.addon,
ATTR_SERVICE: message.service,
ATTR_UUID: message.uuid,
ATTR_CONFIG: message.config,
}
)
return {ATTR_DISCOVERY: discovery}
@api_process
async def set_discovery(self, request):
"""Write data into a discovery pipeline."""
body = await api_validate(SCHEMA_DISCOVERY, request)
addon = request[REQUEST_FROM]
# Access?
if body[ATTR_SERVICE] not in addon.discovery:
raise APIForbidden(f"Can't use discovery!")
# Process discovery message
message = self.sys_discovery.send(addon, **body)
return {ATTR_UUID: message.uuid}
@api_process
async def get_discovery(self, request):
"""Read data into a discovery message."""
message = self._extract_message(request)
# HomeAssistant?
self._check_permission_ha(request)
return {
ATTR_ADDON: message.addon,
ATTR_SERVICE: message.service,
ATTR_UUID: message.uuid,
ATTR_CONFIG: message.config,
}
@api_process
async def del_discovery(self, request):
"""Delete data into a discovery message."""
message = self._extract_message(request)
addon = request[REQUEST_FROM]
# Permission
if message.addon != addon.slug:
raise APIForbidden(f"Can't remove discovery message")
self.sys_discovery.remove(message)
return True

34
hassio/api/hardware.py Normal file
View File

@@ -0,0 +1,34 @@
"""Init file for Hass.io hardware RESTful API."""
import logging
from .utils import api_process
from ..const import (
ATTR_SERIAL, ATTR_DISK, ATTR_GPIO, ATTR_AUDIO, ATTR_INPUT, ATTR_OUTPUT)
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
class APIHardware(CoreSysAttributes):
"""Handle RESTful API for hardware functions."""
@api_process
async def info(self, request):
"""Show hardware info."""
return {
ATTR_SERIAL: list(self.sys_hardware.serial_devices),
ATTR_INPUT: list(self.sys_hardware.input_devices),
ATTR_DISK: list(self.sys_hardware.disk_devices),
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
ATTR_AUDIO: self.sys_hardware.audio_devices,
}
@api_process
async def audio(self, request):
"""Show ALSA audio devices."""
return {
ATTR_AUDIO: {
ATTR_INPUT: self.sys_host.alsa.input_devices,
ATTR_OUTPUT: self.sys_host.alsa.output_devices,
}
}

53
hassio/api/hassos.py Normal file
View File

@@ -0,0 +1,53 @@
"""Init file for Hass.io HassOS RESTful API."""
import asyncio
import logging
import voluptuous as vol
from .utils import api_process, api_validate
from ..const import (
ATTR_VERSION, ATTR_BOARD, ATTR_VERSION_LATEST, ATTR_VERSION_CLI,
ATTR_VERSION_CLI_LATEST)
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
SCHEMA_VERSION = vol.Schema({
vol.Optional(ATTR_VERSION): vol.Coerce(str),
})
class APIHassOS(CoreSysAttributes):
"""Handle RESTful API for HassOS functions."""
@api_process
async def info(self, request):
"""Return HassOS information."""
return {
ATTR_VERSION: self.sys_hassos.version,
ATTR_VERSION_CLI: self.sys_hassos.version_cli,
ATTR_VERSION_LATEST: self.sys_hassos.version_latest,
ATTR_VERSION_CLI_LATEST: self.sys_hassos.version_cli_latest,
ATTR_BOARD: self.sys_hassos.board,
}
@api_process
async def update(self, request):
"""Update HassOS."""
body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self.sys_hassos.version_latest)
await asyncio.shield(self.sys_hassos.update(version))
@api_process
async def update_cli(self, request):
"""Update HassOS CLI."""
body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self.sys_hassos.version_cli_latest)
await asyncio.shield(self.sys_hassos.update_cli(version))
@api_process
def config_sync(self, request):
"""Trigger config reload on HassOS."""
return asyncio.shield(self.sys_hassos.config_sync())

View File

@@ -1,91 +1,165 @@
"""Init file for HassIO homeassistant rest api."""
"""Init file for Hass.io Home Assistant RESTful API."""
import asyncio
import logging
from typing import Coroutine, Dict, Any
import voluptuous as vol
from aiohttp import web
from .util import api_process, api_process_raw, api_validate
from ..const import (
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_DEVICES, ATTR_IMAGE, ATTR_CUSTOM,
CONTENT_TYPE_BINARY)
from ..validate import HASS_DEVICES
ATTR_ARCH,
ATTR_BLK_READ,
ATTR_BLK_WRITE,
ATTR_BOOT,
ATTR_CPU_PERCENT,
ATTR_CUSTOM,
ATTR_IMAGE,
ATTR_LAST_VERSION,
ATTR_MACHINE,
ATTR_MEMORY_LIMIT,
ATTR_MEMORY_USAGE,
ATTR_NETWORK_RX,
ATTR_NETWORK_TX,
ATTR_PASSWORD,
ATTR_PORT,
ATTR_REFRESH_TOKEN,
ATTR_SSL,
ATTR_VERSION,
ATTR_WAIT_BOOT,
ATTR_WATCHDOG,
CONTENT_TYPE_BINARY,
)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
from ..validate import DOCKER_IMAGE, NETWORK_PORT
from .utils import api_process, api_process_raw, api_validate
_LOGGER = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter
SCHEMA_OPTIONS = vol.Schema(
{
vol.Optional(ATTR_BOOT): vol.Boolean(),
vol.Inclusive(ATTR_IMAGE, "custom_hass"): vol.Maybe(vol.Coerce(str)),
vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Any(None, DOCKER_IMAGE),
vol.Optional(ATTR_PORT): NETWORK_PORT,
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
vol.Optional(ATTR_SSL): vol.Boolean(),
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
vol.Optional(ATTR_WAIT_BOOT): vol.All(vol.Coerce(int), vol.Range(min=60)),
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
}
)
SCHEMA_OPTIONS = vol.Schema({
vol.Optional(ATTR_DEVICES): HASS_DEVICES,
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Any(None, vol.Coerce(str)),
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
vol.Any(None, vol.Coerce(str)),
})
SCHEMA_VERSION = vol.Schema({
vol.Optional(ATTR_VERSION): vol.Coerce(str),
})
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
class APIHomeAssistant(object):
"""Handle rest api for homeassistant functions."""
def __init__(self, config, loop, homeassistant):
"""Initialize homeassistant rest api part."""
self.config = config
self.loop = loop
self.homeassistant = homeassistant
class APIHomeAssistant(CoreSysAttributes):
"""Handle RESTful API for Home Assistant functions."""
@api_process
async def info(self, request):
async def info(self, request: web.Request) -> Dict[str, Any]:
"""Return host information."""
return {
ATTR_VERSION: self.homeassistant.version,
ATTR_LAST_VERSION: self.homeassistant.last_version,
ATTR_IMAGE: self.homeassistant.image,
ATTR_DEVICES: self.homeassistant.devices,
ATTR_CUSTOM: self.homeassistant.is_custom_image,
ATTR_VERSION: self.sys_homeassistant.version,
ATTR_LAST_VERSION: self.sys_homeassistant.last_version,
ATTR_MACHINE: self.sys_homeassistant.machine,
ATTR_ARCH: self.sys_homeassistant.arch,
ATTR_IMAGE: self.sys_homeassistant.image,
ATTR_CUSTOM: self.sys_homeassistant.is_custom_image,
ATTR_BOOT: self.sys_homeassistant.boot,
ATTR_PORT: self.sys_homeassistant.api_port,
ATTR_SSL: self.sys_homeassistant.api_ssl,
ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
ATTR_WAIT_BOOT: self.sys_homeassistant.wait_boot,
}
@api_process
async def options(self, request):
"""Set homeassistant options."""
async def options(self, request: web.Request) -> None:
"""Set Home Assistant options."""
body = await api_validate(SCHEMA_OPTIONS, request)
if ATTR_DEVICES in body:
self.homeassistant.devices = body[ATTR_DEVICES]
if ATTR_IMAGE in body and ATTR_LAST_VERSION in body:
self.sys_homeassistant.image = body[ATTR_IMAGE]
self.sys_homeassistant.last_version = body[ATTR_LAST_VERSION]
if ATTR_IMAGE in body:
self.homeassistant.set_custom(
body[ATTR_IMAGE], body[ATTR_LAST_VERSION])
if ATTR_BOOT in body:
self.sys_homeassistant.boot = body[ATTR_BOOT]
return True
if ATTR_PORT in body:
self.sys_homeassistant.api_port = body[ATTR_PORT]
if ATTR_PASSWORD in body:
self.sys_homeassistant.api_password = body[ATTR_PASSWORD]
self.sys_homeassistant.refresh_token = None
if ATTR_SSL in body:
self.sys_homeassistant.api_ssl = body[ATTR_SSL]
if ATTR_WATCHDOG in body:
self.sys_homeassistant.watchdog = body[ATTR_WATCHDOG]
if ATTR_WAIT_BOOT in body:
self.sys_homeassistant.wait_boot = body[ATTR_WAIT_BOOT]
if ATTR_REFRESH_TOKEN in body:
self.sys_homeassistant.refresh_token = body[ATTR_REFRESH_TOKEN]
self.sys_homeassistant.save_data()
@api_process
async def update(self, request):
"""Update homeassistant."""
async def stats(self, request: web.Request) -> Dict[Any, str]:
"""Return resource information."""
stats = await self.sys_homeassistant.stats()
if not stats:
raise APIError("No stats available")
return {
ATTR_CPU_PERCENT: stats.cpu_percent,
ATTR_MEMORY_USAGE: stats.memory_usage,
ATTR_MEMORY_LIMIT: stats.memory_limit,
ATTR_NETWORK_RX: stats.network_rx,
ATTR_NETWORK_TX: stats.network_tx,
ATTR_BLK_READ: stats.blk_read,
ATTR_BLK_WRITE: stats.blk_write,
}
@api_process
async def update(self, request: web.Request) -> None:
"""Update Home Assistant."""
body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self.homeassistant.last_version)
version = body.get(ATTR_VERSION, self.sys_homeassistant.last_version)
if version == self.homeassistant.version:
raise RuntimeError("Version {} is already in use".format(version))
return await asyncio.shield(
self.homeassistant.update(version), loop=self.loop)
await asyncio.shield(self.sys_homeassistant.update(version))
@api_process
def restart(self, request):
"""Restart homeassistant."""
return asyncio.shield(self.homeassistant.restart(), loop=self.loop)
def stop(self, request: web.Request) -> Coroutine:
"""Stop Home Assistant."""
return asyncio.shield(self.sys_homeassistant.stop())
@api_process
def start(self, request: web.Request) -> Coroutine:
"""Start Home Assistant."""
return asyncio.shield(self.sys_homeassistant.start())
@api_process
def restart(self, request: web.Request) -> Coroutine:
"""Restart Home Assistant."""
return asyncio.shield(self.sys_homeassistant.restart())
@api_process
def rebuild(self, request: web.Request) -> Coroutine:
"""Rebuild Home Assistant."""
return asyncio.shield(self.sys_homeassistant.rebuild())
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request):
"""Return homeassistant docker logs."""
return self.homeassistant.logs()
def logs(self, request: web.Request) -> Coroutine:
"""Return Home Assistant Docker logs."""
return self.sys_homeassistant.logs()
@api_process
async def check(self, request):
"""Check config of homeassistant."""
code, message = await self.homeassistant.check_config()
if not code:
raise RuntimeError(message)
return True
async def check(self, request: web.Request) -> None:
"""Check configuration of Home Assistant."""
result = await self.sys_homeassistant.check_config()
if not result.valid:
raise APIError(result.log)

View File

@@ -1,90 +1,101 @@
"""Init file for HassIO host rest api."""
"""Init file for Hass.io host RESTful API."""
import asyncio
import logging
import voluptuous as vol
from .util import api_process_hostcontrol, api_process, api_validate
from .utils import api_process, api_validate
from ..const import (
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_TYPE, ATTR_HOSTNAME, ATTR_FEATURES,
ATTR_OS, ATTR_SERIAL, ATTR_INPUT, ATTR_DISK, ATTR_AUDIO, ATTR_AUDIO_INPUT,
ATTR_AUDIO_OUTPUT)
from ..validate import ALSA_CHANNEL
ATTR_HOSTNAME, ATTR_FEATURES, ATTR_KERNEL, ATTR_OPERATING_SYSTEM,
ATTR_CHASSIS, ATTR_DEPLOYMENT, ATTR_STATE, ATTR_NAME, ATTR_DESCRIPTON,
ATTR_SERVICES, ATTR_CPE)
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
SCHEMA_VERSION = vol.Schema({
vol.Optional(ATTR_VERSION): vol.Coerce(str),
})
SERVICE = 'service'
SCHEMA_OPTIONS = vol.Schema({
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
vol.Optional(ATTR_HOSTNAME): vol.Coerce(str),
})
class APIHost(object):
"""Handle rest api for host functions."""
def __init__(self, config, loop, host_control, hardware):
"""Initialize host rest api part."""
self.config = config
self.loop = loop
self.host_control = host_control
self.local_hw = hardware
class APIHost(CoreSysAttributes):
"""Handle RESTful API for host functions."""
@api_process
async def info(self, request):
"""Return host information."""
return {
ATTR_TYPE: self.host_control.type,
ATTR_VERSION: self.host_control.version,
ATTR_LAST_VERSION: self.host_control.last_version,
ATTR_FEATURES: self.host_control.features,
ATTR_HOSTNAME: self.host_control.hostname,
ATTR_OS: self.host_control.os_info,
ATTR_CHASSIS: self.sys_host.info.chassis,
ATTR_CPE: self.sys_host.info.cpe,
ATTR_FEATURES: self.sys_host.supperted_features,
ATTR_HOSTNAME: self.sys_host.info.hostname,
ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system,
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
ATTR_KERNEL: self.sys_host.info.kernel,
}
@api_process
async def options(self, request):
"""Process host options."""
"""Edit host settings."""
body = await api_validate(SCHEMA_OPTIONS, request)
if ATTR_AUDIO_OUTPUT in body:
self.config.audio_output = body[ATTR_AUDIO_OUTPUT]
if ATTR_AUDIO_INPUT in body:
self.config.audio_input = body[ATTR_AUDIO_INPUT]
return True
@api_process_hostcontrol
def reboot(self, request):
"""Reboot host."""
return self.host_control.reboot()
@api_process_hostcontrol
def shutdown(self, request):
"""Poweroff host."""
return self.host_control.shutdown()
@api_process_hostcontrol
async def update(self, request):
"""Update host OS."""
body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self.host_control.last_version)
if version == self.host_control.version:
raise RuntimeError("Version {} is already in use".format(version))
return await asyncio.shield(
self.host_control.update(version=version), loop=self.loop)
# hostname
if ATTR_HOSTNAME in body:
await asyncio.shield(
self.sys_host.control.set_hostname(body[ATTR_HOSTNAME]))
@api_process
async def hardware(self, request):
"""Return local hardware infos."""
def reboot(self, request):
"""Reboot host."""
return asyncio.shield(self.sys_host.control.reboot())
@api_process
def shutdown(self, request):
"""Poweroff host."""
return asyncio.shield(self.sys_host.control.shutdown())
@api_process
def reload(self, request):
"""Reload host data."""
return asyncio.shield(self.sys_host.reload())
@api_process
async def services(self, request):
"""Return list of available services."""
services = []
for unit in self.sys_host.services:
services.append({
ATTR_NAME: unit.name,
ATTR_DESCRIPTON: unit.description,
ATTR_STATE: unit.state,
})
return {
ATTR_SERIAL: self.local_hw.serial_devices,
ATTR_INPUT: self.local_hw.input_devices,
ATTR_DISK: self.local_hw.disk_devices,
ATTR_AUDIO: self.local_hw.audio_devices,
ATTR_SERVICES: services
}
@api_process
def service_start(self, request):
"""Start a service."""
unit = request.match_info.get(SERVICE)
return asyncio.shield(self.sys_host.services.start(unit))
@api_process
def service_stop(self, request):
"""Stop a service."""
unit = request.match_info.get(SERVICE)
return asyncio.shield(self.sys_host.services.stop(unit))
@api_process
def service_reload(self, request):
"""Reload a service."""
unit = request.match_info.get(SERVICE)
return asyncio.shield(self.sys_host.services.reload(unit))
@api_process
def service_restart(self, request):
"""Restart a service."""
unit = request.match_info.get(SERVICE)
return asyncio.shield(self.sys_host.services.restart(unit))

28
hassio/api/info.py Normal file
View File

@@ -0,0 +1,28 @@
"""Init file for Hass.io info RESTful API."""
import logging
from ..const import (ATTR_ARCH, ATTR_CHANNEL, ATTR_HASSOS, ATTR_HOMEASSISTANT,
ATTR_HOSTNAME, ATTR_MACHINE, ATTR_SUPERVISOR,
ATTR_SUPPORTED_ARCH)
from ..coresys import CoreSysAttributes
from .utils import api_process
_LOGGER = logging.getLogger(__name__)
class APIInfo(CoreSysAttributes):
"""Handle RESTful API for info functions."""
@api_process
async def info(self, request):
"""Show system info."""
return {
ATTR_SUPERVISOR: self.sys_supervisor.version,
ATTR_HOMEASSISTANT: self.sys_homeassistant.version,
ATTR_HASSOS: self.sys_hassos.version,
ATTR_HOSTNAME: self.sys_host.info.hostname,
ATTR_MACHINE: self.sys_machine,
ATTR_ARCH: self.sys_arch.default,
ATTR_SUPPORTED_ARCH: self.sys_arch.supported,
ATTR_CHANNEL: self.sys_updater.channel,
}

View File

@@ -1,43 +0,0 @@
"""Init file for HassIO network rest api."""
import logging
import voluptuous as vol
from .util import api_process, api_process_hostcontrol, api_validate
from ..const import ATTR_HOSTNAME
_LOGGER = logging.getLogger(__name__)
SCHEMA_OPTIONS = vol.Schema({
vol.Optional(ATTR_HOSTNAME): vol.Coerce(str),
})
class APINetwork(object):
"""Handle rest api for network functions."""
def __init__(self, config, loop, host_control):
"""Initialize network rest api part."""
self.config = config
self.loop = loop
self.host_control = host_control
@api_process
async def info(self, request):
"""Show network settings."""
return {
ATTR_HOSTNAME: self.host_control.hostname,
}
@api_process_hostcontrol
async def options(self, request):
"""Edit network settings."""
body = await api_validate(SCHEMA_OPTIONS, request)
# hostname
if ATTR_HOSTNAME in body:
if self.host_control.hostname != body[ATTR_HOSTNAME]:
await self.host_control.set_hostname(body[ATTR_HOSTNAME])
return True

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,142 @@
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
* @license
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright 2018 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/**
* @license
* Copyright 2016 Google Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
/**
@license
Copyright (c) 2019 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
* @license
* Copyright (c) 2018 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt
* The complete set of authors may be found at
* http://polymer.github.io/AUTHORS.txt
* The complete set of contributors may be found at
* http://polymer.github.io/CONTRIBUTORS.txt
* Code distributed by Google as part of the polymer project is also
* subject to an additional IP rights grant found at
* http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2014 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/

Binary file not shown.

View File

@@ -0,0 +1 @@
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.2c1fb1dea4fa88f96920.js","sourceRoot":""}

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@@ -0,0 +1 @@
(window.webpackJsonp=window.webpackJsonp||[]).push([[4],{110:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(101),i=t.n(e),o=t(103),u=t.n(o),a=i.a,c=u.a}}]);

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,20 @@
/**
@license
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
Code distributed by Google as part of the polymer project is also
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
*/
/**
@license
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
This code may only be used under the BSD style license found at
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
part of the polymer project is also subject to an additional IP rights grant
found at http://polymer.github.io/PATENTS.txt
*/

Binary file not shown.

View File

@@ -0,0 +1 @@
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.e7d34dbf975fad4b7776.js","sourceRoot":""}

View File

@@ -0,0 +1 @@
!function(e){function t(t){for(var n,o,i=t[0],u=t[1],a=0,f=[];a<i.length;a++)o=i[a],r[o]&&f.push(r[o][0]),r[o]=0;for(n in u)Object.prototype.hasOwnProperty.call(u,n)&&(e[n]=u[n]);for(c&&c(t);f.length;)f.shift()()}var n={},r={1:0};function o(t){if(n[t])return n[t].exports;var r=n[t]={i:t,l:!1,exports:{}};return e[t].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var t=[],n=r[e];if(0!==n)if(n)t.push(n[2]);else{var i=new Promise(function(t,o){n=r[e]=[t,o]});t.push(n[2]=i);var u,a=document.createElement("script");a.charset="utf-8",a.timeout=120,o.nc&&a.setAttribute("nonce",o.nc),a.src=function(e){return o.p+"chunk."+{0:"e7d34dbf975fad4b7776",2:"75766aa821239c9936dc",3:"6ff2deda34a647d6051c",4:"b74ddf4cacc7d5de8a55",5:"d33e783375f0db186ab5",6:"2c1fb1dea4fa88f96920",7:"088b1034e27d00ee9329"}[e]+".js"}(e),u=function(t){a.onerror=a.onload=null,clearTimeout(c);var n=r[e];if(0!==n){if(n){var o=t&&("load"===t.type?"missing":t.type),i=t&&t.target&&t.target.src,u=new Error("Loading chunk "+e+" failed.\n("+o+": "+i+")");u.type=o,u.request=i,n[1](u)}r[e]=void 0}};var c=setTimeout(function(){u({type:"timeout",target:a})},12e4);a.onerror=a.onload=u,document.head.appendChild(a)}return Promise.all(t)},o.m=e,o.c=n,o.d=function(e,t,n){o.o(e,t)||Object.defineProperty(e,t,{enumerable:!0,get:n})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,t){if(1&t&&(e=o(e)),8&t)return e;if(4&t&&"object"==typeof e&&e&&e.__esModule)return e;var n=Object.create(null);if(o.r(n),Object.defineProperty(n,"default",{enumerable:!0,value:e}),2&t&&"string"!=typeof e)for(var r in e)o.d(n,r,function(t){return e[t]}.bind(null,r));return n},o.n=function(e){var t=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(t,"a",t),t},o.o=function(e,t){return Object.prototype.hasOwnProperty.call(e,t)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var i=window.webpackJsonp=window.webpackJsonp||[],u=i.push.bind(i);i.push=t,i=i.slice();for(var a=0;a<i.length;a++)t(i[a]);var c=u;o(o.s=0)}([function(e,t,n){window.loadES5Adapter().then(function(){Promise.all([n.e(0),n.e(2)]).then(n.bind(null,2)),Promise.all([n.e(0),n.e(6),n.e(3)]).then(n.bind(null,1))}),document.body.style.height="100%"}]);

Binary file not shown.

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@@ -0,0 +1,38 @@
<!doctype html>
<html>
<head>
<meta charset="utf-8">
<title>Hass.io</title>
<meta name='viewport' content='width=device-width, user-scalable=no'>
<style>
body {
height: 100vh;
margin: 0;
padding: 0;
}
</style>
<script src='/frontend_es5/custom-elements-es5-adapter.js'></script>
</head>
<body>
<hassio-app></hassio-app>
<script>
function addScript(src) {
var e = document.createElement('script');
e.src = src;
document.write(e.outerHTML);
}
var webComponentsSupported = (
'customElements' in window &&
'import' in document.createElement('link') &&
'content' in document.createElement('template'));
if (!webComponentsSupported) {
addScript('/static/webcomponents-lite.js');
}
</script>
<!--
Disabled while we make Home Assistant able to serve the right files.
<script src="./app.js"></script>
-->
<link rel='import' href='./hassio-app.html'>
</body>
</html>

Binary file not shown.

253
hassio/api/proxy.py Normal file
View File

@@ -0,0 +1,253 @@
"""Utils for Home Assistant Proxy."""
import asyncio
from contextlib import asynccontextmanager
import logging
import aiohttp
from aiohttp import web
from aiohttp.web_exceptions import HTTPBadGateway, HTTPUnauthorized
from aiohttp.client_exceptions import ClientConnectorError
from aiohttp.hdrs import CONTENT_TYPE, AUTHORIZATION
import async_timeout
from ..const import HEADER_HA_ACCESS
from ..coresys import CoreSysAttributes
from ..exceptions import (
HomeAssistantAuthError, HomeAssistantAPIError, APIError)
_LOGGER = logging.getLogger(__name__)
class APIProxy(CoreSysAttributes):
"""API Proxy for Home Assistant."""
def _check_access(self, request):
"""Check the Hass.io token."""
if AUTHORIZATION in request.headers:
bearer = request.headers[AUTHORIZATION]
hassio_token = bearer.split(' ')[-1]
else:
hassio_token = request.headers.get(HEADER_HA_ACCESS)
addon = self.sys_addons.from_token(hassio_token)
if not addon:
_LOGGER.warning("Unknown Home Assistant API access!")
elif not addon.access_homeassistant_api:
_LOGGER.warning("Not permitted API access: %s", addon.slug)
else:
_LOGGER.info("%s access from %s", request.path, addon.slug)
return
raise HTTPUnauthorized()
@asynccontextmanager
async def _api_client(self, request, path, timeout=300):
"""Return a client request with proxy origin for Home Assistant."""
try:
# read data
with async_timeout.timeout(30):
data = await request.read()
if data:
content_type = request.content_type
else:
content_type = None
async with self.sys_homeassistant.make_request(
request.method.lower(), f'api/{path}',
content_type=content_type,
data=data,
timeout=timeout,
) as resp:
yield resp
return
except HomeAssistantAuthError:
_LOGGER.error("Authenticate error on API for request %s", path)
except HomeAssistantAPIError:
_LOGGER.error("Error on API for request %s", path)
except aiohttp.ClientError as err:
_LOGGER.error("Client error on API %s request %s", path, err)
except asyncio.TimeoutError:
_LOGGER.error("Client timeout error on API request %s", path)
raise HTTPBadGateway()
async def stream(self, request):
"""Proxy HomeAssistant EventStream Requests."""
self._check_access(request)
_LOGGER.info("Home Assistant EventStream start")
async with self._api_client(request, 'stream', timeout=None) as client:
response = web.StreamResponse()
response.content_type = request.headers.get(CONTENT_TYPE)
try:
await response.prepare(request)
async for data in client.content:
await response.write(data)
except (aiohttp.ClientError, aiohttp.ClientPayloadError):
pass
_LOGGER.info("Home Assistant EventStream close")
return response
async def api(self, request):
"""Proxy Home Assistant API Requests."""
self._check_access(request)
# Normal request
path = request.match_info.get('path', '')
async with self._api_client(request, path) as client:
data = await client.read()
return web.Response(
body=data,
status=client.status,
content_type=client.content_type
)
async def _websocket_client(self):
"""Initialize a WebSocket API connection."""
url = f"{self.sys_homeassistant.api_url}/api/websocket"
try:
client = await self.sys_websession_ssl.ws_connect(
url, heartbeat=30, verify_ssl=False)
# Handle authentication
data = await client.receive_json()
if data.get('type') == 'auth_ok':
return client
if data.get('type') != 'auth_required':
# Invalid protocol
_LOGGER.error(
"Got unexpected response from HA WebSocket: %s", data)
raise APIError()
if self.sys_homeassistant.refresh_token:
await self.sys_homeassistant.ensure_access_token()
await client.send_json({
'type': 'auth',
'access_token': self.sys_homeassistant.access_token,
})
else:
await client.send_json({
'type': 'auth',
'api_password': self.sys_homeassistant.api_password,
})
data = await client.receive_json()
if data.get('type') == 'auth_ok':
return client
# Renew the Token is invalid
if data.get('type') == 'invalid_auth' and self.sys_homeassistant.refresh_token:
self.sys_homeassistant.access_token = None
return await self._websocket_client()
raise HomeAssistantAuthError()
except (RuntimeError, ValueError, ClientConnectorError) as err:
_LOGGER.error("Client error on WebSocket API %s.", err)
except HomeAssistantAuthError:
_LOGGER.error("Failed authentication to Home Assistant WebSocket")
raise APIError()
async def websocket(self, request):
"""Initialize a WebSocket API connection."""
_LOGGER.info("Home Assistant WebSocket API request initialize")
# init server
server = web.WebSocketResponse(heartbeat=30)
await server.prepare(request)
# handle authentication
try:
await server.send_json({
'type': 'auth_required',
'ha_version': self.sys_homeassistant.version,
})
# Check API access
response = await server.receive_json()
hassio_token = response.get('api_password') or response.get('access_token')
addon = self.sys_addons.from_token(hassio_token)
if not addon or not addon.access_homeassistant_api:
_LOGGER.warning("Unauthorized WebSocket access!")
await server.send_json({
'type': 'auth_invalid',
'message': 'Invalid access',
})
return server
_LOGGER.info("WebSocket access from %s", addon.slug)
await server.send_json({
'type': 'auth_ok',
'ha_version': self.sys_homeassistant.version,
})
except (RuntimeError, ValueError) as err:
_LOGGER.error("Can't initialize handshake: %s", err)
return server
# init connection to hass
try:
client = await self._websocket_client()
except APIError:
return server
_LOGGER.info("Home Assistant WebSocket API request running")
try:
client_read = None
server_read = None
while not server.closed and not client.closed:
if not client_read:
client_read = self.sys_create_task(
client.receive_str())
if not server_read:
server_read = self.sys_create_task(
server.receive_str())
# wait until data need to be processed
await asyncio.wait(
[client_read, server_read],
return_when=asyncio.FIRST_COMPLETED
)
# server
if server_read.done() and not client.closed:
server_read.exception()
await client.send_str(server_read.result())
server_read = None
# client
if client_read.done() and not server.closed:
client_read.exception()
await server.send_str(client_read.result())
client_read = None
except asyncio.CancelledError:
pass
except (RuntimeError, ConnectionError, TypeError) as err:
_LOGGER.info("Home Assistant WebSocket API error: %s", err)
finally:
if client_read:
client_read.cancel()
if server_read:
server_read.cancel()
# close connections
if not client.closed:
await client.close()
if not server.closed:
await server.close()
_LOGGER.info("Home Assistant WebSocket API connection is closed")
return server

View File

@@ -1,102 +1,140 @@
"""Init file for HassIO security rest api."""
from datetime import datetime, timedelta
import io
"""Handle security part of this API."""
import logging
import hashlib
import os
import re
from aiohttp import web
import voluptuous as vol
import pyotp
import pyqrcode
from aiohttp.web import middleware
from aiohttp.web_exceptions import HTTPUnauthorized, HTTPForbidden
from .util import api_process, api_validate, hash_password
from ..const import ATTR_INITIALIZE, ATTR_PASSWORD, ATTR_TOTP, ATTR_SESSION
from ..const import (
HEADER_TOKEN, REQUEST_FROM, ROLE_ADMIN, ROLE_DEFAULT, ROLE_HOMEASSISTANT,
ROLE_MANAGER, ROLE_BACKUP)
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
SCHEMA_PASSWORD = vol.Schema({
vol.Required(ATTR_PASSWORD): vol.Coerce(str),
})
SCHEMA_SESSION = SCHEMA_PASSWORD.extend({
vol.Optional(ATTR_TOTP, default=None): vol.Coerce(str),
})
# Block Anytime
BLACKLIST = re.compile(
r"^(?:"
r"|/homeassistant/api/hassio/.*"
r")$"
)
# Free to call or have own security concepts
NO_SECURITY_CHECK = re.compile(
r"^(?:"
r"|/homeassistant/api/.*"
r"|/homeassistant/websocket"
r"|/supervisor/ping"
r")$"
)
# Can called by every add-on
ADDONS_API_BYPASS = re.compile(
r"^(?:"
r"|/addons/self/(?!security|update)[^/]+"
r"|/info"
r"|/services.*"
r"|/discovery.*"
r"|/auth"
r")$"
)
# Policy role add-on API access
ADDONS_ROLE_ACCESS = {
ROLE_DEFAULT: re.compile(
r"^(?:"
r"|/[^/]+/info"
r"|/addons"
r")$"
),
ROLE_HOMEASSISTANT: re.compile(
r"^(?:"
r"|/homeassistant/.+"
r")$"
),
ROLE_BACKUP: re.compile(
r"^(?:"
r"|/snapshots.*"
r")$"
),
ROLE_MANAGER: re.compile(
r"^(?:"
r"|/homeassistant/.+"
r"|/host/.+"
r"|/hardware/.+"
r"|/hassos/.+"
r"|/supervisor/.+"
r"|/addons(?:/[^/]+/(?!security).+)?"
r"|/snapshots.*"
r")$"
),
ROLE_ADMIN: re.compile(
r".*"
),
}
class APISecurity(object):
"""Handle rest api for security functions."""
class SecurityMiddleware(CoreSysAttributes):
"""Security middleware functions."""
def __init__(self, config, loop):
"""Initialize security rest api part."""
self.config = config
self.loop = loop
def __init__(self, coresys):
"""Initialize security middleware."""
self.coresys = coresys
def _check_password(self, body):
"""Check if password is valid and security is initialize."""
if not self.config.security_initialize:
raise RuntimeError("First set a password")
@middleware
async def token_validation(self, request, handler):
"""Check security access of this layer."""
request_from = None
hassio_token = request.headers.get(HEADER_TOKEN)
password = hash_password(body[ATTR_PASSWORD])
if password != self.config.security_password:
raise RuntimeError("Wrong password")
# Blacklist
if BLACKLIST.match(request.path):
_LOGGER.warning("%s is blacklisted!", request.path)
raise HTTPForbidden()
@api_process
async def info(self, request):
"""Return host information."""
return {
ATTR_INITIALIZE: self.config.security_initialize,
ATTR_TOTP: self.config.security_totp is not None,
}
# Ignore security check
if NO_SECURITY_CHECK.match(request.path):
_LOGGER.debug("Passthrough %s", request.path)
return await handler(request)
@api_process
async def options(self, request):
"""Set options / password."""
body = await api_validate(SCHEMA_PASSWORD, request)
# Not token
if not hassio_token:
_LOGGER.warning("No API token provided for %s", request.path)
raise HTTPUnauthorized()
if self.config.security_initialize:
raise RuntimeError("Password is already set!")
# Home-Assistant
# UUID check need removed with 131
if hassio_token in (self.sys_homeassistant.uuid,
self.sys_homeassistant.hassio_token):
_LOGGER.debug("%s access from Home Assistant", request.path)
request_from = self.sys_homeassistant
self.config.security_password = hash_password(body[ATTR_PASSWORD])
self.config.security_initialize = True
return True
# Host
if hassio_token == self.sys_machine_id:
_LOGGER.debug("%s access from Host", request.path)
request_from = self.sys_host
@api_process
async def totp(self, request):
"""Set and initialze TOTP."""
body = await api_validate(SCHEMA_PASSWORD, request)
self._check_password(body)
# Add-on
addon = None
if hassio_token and not request_from:
addon = self.sys_addons.from_token(hassio_token)
# generate TOTP
totp_init_key = pyotp.random_base32()
totp = pyotp.TOTP(totp_init_key)
# Check Add-on API access
if addon and ADDONS_API_BYPASS.match(request.path):
_LOGGER.debug("Passthrough %s from %s", request.path, addon.slug)
request_from = addon
elif addon and addon.access_hassio_api:
# Check Role
if ADDONS_ROLE_ACCESS[addon.hassio_role].match(request.path):
_LOGGER.info("%s access from %s", request.path, addon.slug)
request_from = addon
else:
_LOGGER.warning("%s no role for %s", request.path, addon.slug)
# init qrcode
buff = io.BytesIO()
if request_from:
request[REQUEST_FROM] = request_from
return await handler(request)
qrcode = pyqrcode.create(totp.provisioning_uri("Hass.IO"))
qrcode.svg(buff)
# finish
self.config.security_totp = totp_init_key
return web.Response(body=buff.getvalue(), content_type='image/svg+xml')
@api_process
async def session(self, request):
"""Set and initialze session."""
body = await api_validate(SCHEMA_SESSION, request)
self._check_password(body)
# check TOTP
if self.config.security_totp:
totp = pyotp.TOTP(self.config.security_totp)
if body[ATTR_TOTP] != totp.now():
raise RuntimeError("Invalid TOTP token!")
# create session
valid_until = datetime.now() + timedelta(days=1)
session = hashlib.sha256(os.urandom(54)).hexdigest()
# store session
self.config.add_security_session(session, valid_until)
return {ATTR_SESSION: session}
_LOGGER.error("Invalid token for access %s", request.path)
raise HTTPForbidden()

75
hassio/api/services.py Normal file
View File

@@ -0,0 +1,75 @@
"""Init file for Hass.io network RESTful API."""
from .utils import api_process, api_validate
from ..const import (
ATTR_AVAILABLE, ATTR_PROVIDERS, ATTR_SLUG, ATTR_SERVICES, REQUEST_FROM,
PROVIDE_SERVICE)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError, APIForbidden
class APIServices(CoreSysAttributes):
"""Handle RESTful API for services functions."""
def _extract_service(self, request):
"""Return service, throw an exception if it doesn't exist."""
service = self.sys_services.get(request.match_info.get('service'))
if not service:
raise APIError("Service does not exist")
return service
@api_process
async def list(self, request):
"""Show register services."""
services = []
for service in self.sys_services.list_services:
services.append({
ATTR_SLUG: service.slug,
ATTR_AVAILABLE: service.enabled,
ATTR_PROVIDERS: service.providers,
})
return {ATTR_SERVICES: services}
@api_process
async def set_service(self, request):
"""Write data into a service."""
service = self._extract_service(request)
body = await api_validate(service.schema, request)
addon = request[REQUEST_FROM]
_check_access(request, service.slug)
service.set_service_data(addon, body)
@api_process
async def get_service(self, request):
"""Read data into a service."""
service = self._extract_service(request)
# Access
_check_access(request, service.slug)
if not service.enabled:
raise APIError("Service not enabled")
return service.get_service_data()
@api_process
async def del_service(self, request):
"""Delete data into a service."""
service = self._extract_service(request)
addon = request[REQUEST_FROM]
# Access
_check_access(request, service.slug, True)
service.del_service_data(addon)
def _check_access(request, service, provide=False):
"""Raise error if the rights are wrong."""
addon = request[REQUEST_FROM]
if not addon.services_role.get(service):
raise APIForbidden(f"No access to {service} service!")
if provide and addon.services_role.get(service) != PROVIDE_SERVICE:
raise APIForbidden(f"No access to write {service} service!")

View File

@@ -1,61 +1,72 @@
"""Init file for HassIO snapshot rest api."""
"""Init file for Hass.io snapshot RESTful API."""
import asyncio
import logging
from pathlib import Path
from tempfile import TemporaryDirectory
from aiohttp import web
import voluptuous as vol
from .util import api_process, api_validate
from .utils import api_process, api_validate
from ..snapshots.validate import ALL_FOLDERS
from ..const import (
ATTR_NAME, ATTR_SLUG, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
ATTR_HOMEASSISTANT, ATTR_VERSION, ATTR_SIZE, ATTR_FOLDERS, ATTR_TYPE,
ATTR_DEVICES, ATTR_SNAPSHOTS)
ATTR_SNAPSHOTS, ATTR_PASSWORD, ATTR_PROTECTED, CONTENT_TYPE_TAR)
from ..coresys import CoreSysAttributes
from ..exceptions import APIError
_LOGGER = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter
SCHEMA_RESTORE_PARTIAL = vol.Schema({
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
vol.Optional(ATTR_ADDONS): [vol.Coerce(str)],
vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)],
vol.Optional(ATTR_ADDONS):
vol.All([vol.Coerce(str)], vol.Unique()),
vol.Optional(ATTR_FOLDERS):
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
})
SCHEMA_RESTORE_FULL = vol.Schema({
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
})
SCHEMA_SNAPSHOT_FULL = vol.Schema({
vol.Optional(ATTR_NAME): vol.Coerce(str),
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
})
SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend({
vol.Optional(ATTR_ADDONS): [vol.Coerce(str)],
vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)],
vol.Optional(ATTR_ADDONS):
vol.All([vol.Coerce(str)], vol.Unique()),
vol.Optional(ATTR_FOLDERS):
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
})
class APISnapshots(object):
"""Handle rest api for snapshot functions."""
def __init__(self, config, loop, snapshots):
"""Initialize network rest api part."""
self.config = config
self.loop = loop
self.snapshots = snapshots
class APISnapshots(CoreSysAttributes):
"""Handle RESTful API for snapshot functions."""
def _extract_snapshot(self, request):
"""Return addon and if not exists trow a exception."""
snapshot = self.snapshots.get(request.match_info.get('snapshot'))
"""Return snapshot, throw an exception if it doesn't exist."""
snapshot = self.sys_snapshots.get(request.match_info.get('snapshot'))
if not snapshot:
raise RuntimeError("Snapshot not exists")
raise APIError("Snapshot does not exist")
return snapshot
@api_process
async def list(self, request):
"""Return snapshot list."""
data_snapshots = []
for snapshot in self.snapshots.list_snapshots:
for snapshot in self.sys_snapshots.list_snapshots:
data_snapshots.append({
ATTR_SLUG: snapshot.slug,
ATTR_NAME: snapshot.name,
ATTR_DATE: snapshot.date,
ATTR_TYPE: snapshot.sys_type,
ATTR_PROTECTED: snapshot.protected,
})
return {
@@ -65,7 +76,7 @@ class APISnapshots(object):
@api_process
async def reload(self, request):
"""Reload snapshot list."""
await asyncio.shield(self.snapshots.reload(), loop=self.loop)
await asyncio.shield(self.sys_snapshots.reload())
return True
@api_process
@@ -79,6 +90,7 @@ class APISnapshots(object):
ATTR_SLUG: addon_data[ATTR_SLUG],
ATTR_NAME: addon_data[ATTR_NAME],
ATTR_VERSION: addon_data[ATTR_VERSION],
ATTR_SIZE: addon_data[ATTR_SIZE],
})
return {
@@ -87,10 +99,8 @@ class APISnapshots(object):
ATTR_NAME: snapshot.name,
ATTR_DATE: snapshot.date,
ATTR_SIZE: snapshot.size,
ATTR_HOMEASSISTANT: {
ATTR_VERSION: snapshot.homeassistant_version,
ATTR_DEVICES: snapshot.homeassistant_devices,
},
ATTR_PROTECTED: snapshot.protected,
ATTR_HOMEASSISTANT: snapshot.homeassistant_version,
ATTR_ADDONS: data_addons,
ATTR_REPOSITORIES: snapshot.repositories,
ATTR_FOLDERS: snapshot.folders,
@@ -100,36 +110,78 @@ class APISnapshots(object):
async def snapshot_full(self, request):
"""Full-Snapshot a snapshot."""
body = await api_validate(SCHEMA_SNAPSHOT_FULL, request)
return await asyncio.shield(
self.snapshots.do_snapshot_full(**body), loop=self.loop)
snapshot = await asyncio.shield(
self.sys_snapshots.do_snapshot_full(**body))
if snapshot:
return {ATTR_SLUG: snapshot.slug}
return False
@api_process
async def snapshot_partial(self, request):
"""Partial-Snapshot a snapshot."""
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
return await asyncio.shield(
self.snapshots.do_snapshot_partial(**body), loop=self.loop)
snapshot = await asyncio.shield(
self.sys_snapshots.do_snapshot_partial(**body))
if snapshot:
return {ATTR_SLUG: snapshot.slug}
return False
@api_process
def restore_full(self, request):
async def restore_full(self, request):
"""Full-Restore a snapshot."""
snapshot = self._extract_snapshot(request)
return asyncio.shield(
self.snapshots.do_restore_full(snapshot), loop=self.loop)
body = await api_validate(SCHEMA_RESTORE_FULL, request)
return await asyncio.shield(
self.sys_snapshots.do_restore_full(snapshot, **body))
@api_process
async def restore_partial(self, request):
"""Partial-Restore a snapshot."""
snapshot = self._extract_snapshot(request)
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
return await asyncio.shield(
self.snapshots.do_restore_partial(snapshot, **body),
loop=self.loop
)
self.sys_snapshots.do_restore_partial(snapshot, **body))
@api_process
async def remove(self, request):
"""Remove a snapshot."""
snapshot = self._extract_snapshot(request)
return self.snapshots.remove(snapshot)
return self.sys_snapshots.remove(snapshot)
async def download(self, request):
"""Download a snapshot file."""
snapshot = self._extract_snapshot(request)
_LOGGER.info("Download snapshot %s", snapshot.slug)
response = web.FileResponse(snapshot.tarfile)
response.content_type = CONTENT_TYPE_TAR
return response
@api_process
async def upload(self, request):
"""Upload a snapshot file."""
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir:
tar_file = Path(temp_dir, f"snapshot.tar")
try:
with tar_file.open('wb') as snapshot:
async for data in request.content.iter_any():
snapshot.write(data)
except OSError as err:
_LOGGER.error("Can't write new snapshot file: %s", err)
return False
except asyncio.CancelledError:
return False
snapshot = await asyncio.shield(
self.sys_snapshots.import_snapshot(tar_file))
if snapshot:
return {ATTR_SLUG: snapshot.slug}
return False

View File

@@ -1,24 +1,29 @@
"""Init file for HassIO supervisor rest api."""
"""Init file for Hass.io Supervisor RESTful API."""
import asyncio
import logging
import voluptuous as vol
from .util import api_process, api_process_raw, api_validate
from .utils import api_process, api_process_raw, api_validate
from ..const import (
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, ATTR_ARCH,
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_CHANNEL, ATTR_ARCH,
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY,
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
ATTR_STATE, CONTENT_TYPE_BINARY)
from ..validate import validate_timezone
ATTR_STATE, ATTR_WAIT_BOOT, ATTR_CPU_PERCENT, ATTR_MEMORY_USAGE,
ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_BLK_READ,
ATTR_BLK_WRITE, CONTENT_TYPE_BINARY, ATTR_ICON)
from ..coresys import CoreSysAttributes
from ..validate import WAIT_BOOT, REPOSITORIES, CHANNELS
from ..exceptions import APIError
from ..utils.validate import validate_timezone
_LOGGER = logging.getLogger(__name__)
SCHEMA_OPTIONS = vol.Schema({
# pylint: disable=no-value-for-parameter
vol.Optional(ATTR_BETA_CHANNEL): vol.Boolean(),
vol.Optional(ATTR_ADDONS_REPOSITORIES): [vol.Url()],
vol.Optional(ATTR_CHANNEL): CHANNELS,
vol.Optional(ATTR_ADDONS_REPOSITORIES): REPOSITORIES,
vol.Optional(ATTR_TIMEZONE): validate_timezone,
vol.Optional(ATTR_WAIT_BOOT): WAIT_BOOT,
})
SCHEMA_VERSION = vol.Schema({
@@ -26,30 +31,19 @@ SCHEMA_VERSION = vol.Schema({
})
class APISupervisor(object):
"""Handle rest api for supervisor functions."""
def __init__(self, config, loop, supervisor, snapshots, addons,
host_control, updater):
"""Initialize supervisor rest api part."""
self.config = config
self.loop = loop
self.supervisor = supervisor
self.addons = addons
self.snapshots = snapshots
self.host_control = host_control
self.updater = updater
class APISupervisor(CoreSysAttributes):
"""Handle RESTful API for Supervisor functions."""
@api_process
async def ping(self, request):
"""Return ok for signal that the api is ready."""
"""Return ok for signal that the API is ready."""
return True
@api_process
async def info(self, request):
"""Return host information."""
list_addons = []
for addon in self.addons.list_addons:
for addon in self.sys_addons.list_addons:
if addon.is_installed:
list_addons.append({
ATTR_NAME: addon.name,
@@ -59,67 +53,86 @@ class APISupervisor(object):
ATTR_VERSION: addon.last_version,
ATTR_INSTALLED: addon.version_installed,
ATTR_REPOSITORY: addon.repository,
ATTR_ICON: addon.with_icon,
ATTR_LOGO: addon.with_logo,
})
return {
ATTR_VERSION: HASSIO_VERSION,
ATTR_LAST_VERSION: self.updater.version_hassio,
ATTR_BETA_CHANNEL: self.updater.beta_channel,
ATTR_ARCH: self.config.arch,
ATTR_TIMEZONE: self.config.timezone,
ATTR_LAST_VERSION: self.sys_updater.version_hassio,
ATTR_CHANNEL: self.sys_updater.channel,
ATTR_ARCH: self.sys_supervisor.arch,
ATTR_WAIT_BOOT: self.sys_config.wait_boot,
ATTR_TIMEZONE: self.sys_config.timezone,
ATTR_ADDONS: list_addons,
ATTR_ADDONS_REPOSITORIES: self.config.addons_repositories,
ATTR_ADDONS_REPOSITORIES: self.sys_config.addons_repositories,
}
@api_process
async def options(self, request):
"""Set supervisor options."""
"""Set Supervisor options."""
body = await api_validate(SCHEMA_OPTIONS, request)
if ATTR_BETA_CHANNEL in body:
self.updater.beta_channel = body[ATTR_BETA_CHANNEL]
if ATTR_CHANNEL in body:
self.sys_updater.channel = body[ATTR_CHANNEL]
if ATTR_TIMEZONE in body:
self.config.timezone = body[ATTR_TIMEZONE]
self.sys_config.timezone = body[ATTR_TIMEZONE]
if ATTR_WAIT_BOOT in body:
self.sys_config.wait_boot = body[ATTR_WAIT_BOOT]
if ATTR_ADDONS_REPOSITORIES in body:
new = set(body[ATTR_ADDONS_REPOSITORIES])
await asyncio.shield(self.addons.load_repositories(new))
await asyncio.shield(self.sys_addons.load_repositories(new))
self.sys_updater.save_data()
self.sys_config.save_data()
return True
@api_process
async def stats(self, request):
"""Return resource information."""
stats = await self.sys_supervisor.stats()
if not stats:
raise APIError("No stats available")
return {
ATTR_CPU_PERCENT: stats.cpu_percent,
ATTR_MEMORY_USAGE: stats.memory_usage,
ATTR_MEMORY_LIMIT: stats.memory_limit,
ATTR_NETWORK_RX: stats.network_rx,
ATTR_NETWORK_TX: stats.network_tx,
ATTR_BLK_READ: stats.blk_read,
ATTR_BLK_WRITE: stats.blk_write,
}
@api_process
async def update(self, request):
"""Update supervisor OS."""
"""Update Supervisor OS."""
body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self.updater.version_hassio)
version = body.get(ATTR_VERSION, self.sys_updater.version_hassio)
if version == self.supervisor.version:
raise RuntimeError("Version {} is already in use".format(version))
if version == self.sys_supervisor.version:
raise APIError("Version {} is already in use".format(version))
return await asyncio.shield(
self.supervisor.update(version), loop=self.loop)
return await asyncio.shield(self.sys_supervisor.update(version))
@api_process
async def reload(self, request):
"""Reload addons, config ect."""
"""Reload add-ons, configuration, etc."""
tasks = [
self.addons.reload(),
self.snapshots.reload(),
self.updater.fetch_data(),
self.host_control.load()
self.sys_updater.reload(),
]
results, _ = await asyncio.shield(
asyncio.wait(tasks, loop=self.loop), loop=self.loop)
results, _ = await asyncio.shield(asyncio.wait(tasks))
for result in results:
if result.exception() is not None:
raise RuntimeError("Some reload task fails!")
raise APIError("Some reload task fails!")
return True
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request):
"""Return supervisor docker logs."""
return self.supervisor.logs()
"""Return supervisor Docker logs."""
return self.sys_supervisor.logs()

View File

@@ -1,71 +1,51 @@
"""Init file for HassIO util for rest api."""
"""Init file for Hass.io util for RESTful API."""
import json
import hashlib
import logging
from aiohttp import web
from aiohttp.web_exceptions import HTTPServiceUnavailable
import voluptuous as vol
from voluptuous.humanize import humanize_error
from ..const import (
JSON_RESULT, JSON_DATA, JSON_MESSAGE, RESULT_OK, RESULT_ERROR,
CONTENT_TYPE_BINARY)
from ..exceptions import HassioError, APIError, APIForbidden
_LOGGER = logging.getLogger(__name__)
def json_loads(data):
"""Extract json from string with support for '' and None."""
if not data:
return {}
try:
return json.loads(data)
except json.JSONDecodeError:
return {}
raise APIError("Invalid json")
def api_process(method):
"""Wrap function with true/false calls to rest api."""
async def wrap_api(api, *args, **kwargs):
"""Return api information."""
"""Return API information."""
try:
answer = await method(api, *args, **kwargs)
except RuntimeError as err:
except (APIError, APIForbidden) as err:
return api_return_error(message=str(err))
except HassioError:
return api_return_error(message="Unknown Error, see logs")
if isinstance(answer, dict):
return api_return_ok(data=answer)
if isinstance(answer, web.Response):
return answer
elif answer:
return api_return_ok()
return api_return_error()
elif isinstance(answer, bool) and not answer:
return api_return_error()
return api_return_ok()
return wrap_api
def api_process_hostcontrol(method):
"""Wrap HostControl calls to rest api."""
async def wrap_hostcontrol(api, *args, **kwargs):
"""Return host information."""
if not api.host_control.active:
raise HTTPServiceUnavailable()
try:
answer = await method(api, *args, **kwargs)
except RuntimeError as err:
return api_return_error(message=str(err))
if isinstance(answer, dict):
return api_return_ok(data=answer)
elif answer is None:
return api_return_error("Function is not supported")
elif answer:
return api_return_ok()
return api_return_error()
return wrap_hostcontrol
def api_process_raw(content):
"""Wrap content_type into function."""
def wrap_method(method):
@@ -75,9 +55,12 @@ def api_process_raw(content):
try:
msg_data = await method(api, *args, **kwargs)
msg_type = content
except RuntimeError as err:
except (APIError, APIForbidden) as err:
msg_data = str(err).encode()
msg_type = CONTENT_TYPE_BINARY
except HassioError:
msg_data = b''
msg_type = CONTENT_TYPE_BINARY
return web.Response(body=msg_data, content_type=msg_type)
@@ -86,7 +69,7 @@ def api_process_raw(content):
def api_return_error(message=None):
"""Return a API error message."""
"""Return an API error message."""
return web.json_response({
JSON_RESULT: RESULT_ERROR,
JSON_MESSAGE: message,
@@ -94,7 +77,7 @@ def api_return_error(message=None):
def api_return_ok(data=None):
"""Return a API ok answer."""
"""Return an API ok answer."""
return web.json_response({
JSON_RESULT: RESULT_OK,
JSON_DATA: data or {},
@@ -107,12 +90,6 @@ async def api_validate(schema, request):
try:
data = schema(data)
except vol.Invalid as ex:
raise RuntimeError(humanize_error(data, ex)) from None
raise APIError(humanize_error(data, ex)) from None
return data
def hash_password(password):
"""Hash and salt our passwords."""
key = ")*()*SALT_HASSIO2123{}6554547485HSKA!!*JSLAfdasda$".format(password)
return hashlib.sha256(key.encode()).hexdigest()

49
hassio/arch.json Normal file
View File

@@ -0,0 +1,49 @@
{
"raspberrypi": [
"armhf"
],
"raspberrypi2": [
"armv7",
"armhf"
],
"raspberrypi3": [
"armv7",
"armhf"
],
"raspberrypi3-64": [
"aarch64",
"armv7",
"armhf"
],
"tinker": [
"armv7",
"armhf"
],
"odroid-c2": [
"aarch64"
],
"odroid-xu": [
"armv7",
"armhf"
],
"orangepi-prime": [
"aarch64"
],
"qemux86": [
"i386"
],
"qemux86-64": [
"amd64",
"i386"
],
"qemuarm": [
"armhf"
],
"qemuarm-64": [
"aarch64"
],
"intel-nuc": [
"amd64",
"i386"
]
}

65
hassio/arch.py Normal file
View File

@@ -0,0 +1,65 @@
"""Handle Arch for underlay maschine/platforms."""
import logging
from typing import List
from pathlib import Path
from .coresys import CoreSysAttributes, CoreSys
from .exceptions import HassioArchNotFound, JsonFileError
from .utils.json import read_json_file
_LOGGER = logging.getLogger(__name__)
class CpuArch(CoreSysAttributes):
"""Manage available architectures."""
def __init__(self, coresys: CoreSys) -> None:
"""Initialize CPU Architecture handler."""
self.coresys = coresys
self._supported_arch: List[str] = []
self._default_arch: str
@property
def default(self) -> str:
"""Return system default arch."""
return self._default_arch
@property
def supervisor(self) -> str:
"""Return supervisor arch."""
return self.sys_supervisor.arch
@property
def supported(self) -> List[str]:
"""Return support arch by CPU/Machine."""
return self._supported_arch
async def load(self) -> None:
"""Load data and initialize default arch."""
try:
arch_data = read_json_file(Path(__file__).parent.joinpath("arch.json"))
except JsonFileError:
_LOGGER.warning("Can't read arch json")
return
# Evaluate current CPU/Platform
if not self.sys_machine or self.sys_machine not in arch_data:
_LOGGER.warning("Can't detect underlay machine type!")
self._default_arch = self.sys_supervisor.arch
self._supported_arch.append(self.default)
return
# Use configs from arch.json
self._supported_arch.extend(arch_data[self.sys_machine])
self._default_arch = self.supported[0]
def is_supported(self, arch_list: List[str]) -> bool:
"""Return True if there is a supported arch by this platform."""
return not set(self.supported).isdisjoint(set(arch_list))
def match(self, arch_list: List[str]) -> str:
"""Return best match for this CPU/Platform."""
for self_arch in self.supported:
if self_arch in arch_list:
return self_arch
raise HassioArchNotFound()

95
hassio/auth.py Normal file
View File

@@ -0,0 +1,95 @@
"""Manage SSO for Add-ons with Home Assistant user."""
import logging
import hashlib
from .const import (
FILE_HASSIO_AUTH, ATTR_PASSWORD, ATTR_USERNAME, ATTR_ADDON)
from .coresys import CoreSysAttributes
from .utils.json import JsonConfig
from .validate import SCHEMA_AUTH_CONFIG
from .exceptions import AuthError, HomeAssistantAPIError
_LOGGER = logging.getLogger(__name__)
class Auth(JsonConfig, CoreSysAttributes):
"""Manage SSO for Add-ons with Home Assistant user."""
def __init__(self, coresys):
"""Initialize updater."""
super().__init__(FILE_HASSIO_AUTH, SCHEMA_AUTH_CONFIG)
self.coresys = coresys
def _check_cache(self, username, password):
"""Check password in cache."""
username_h = _rehash(username)
password_h = _rehash(password, username)
if self._data.get(username_h) == password_h:
_LOGGER.info("Cache hit for %s", username)
return True
_LOGGER.warning("No cache hit for %s", username)
return False
def _update_cache(self, username, password):
"""Cache a username, password."""
username_h = _rehash(username)
password_h = _rehash(password, username)
if self._data.get(username_h) == password_h:
return
self._data[username_h] = password_h
self.save_data()
def _dismatch_cache(self, username, password):
"""Remove user from cache."""
username_h = _rehash(username)
password_h = _rehash(password, username)
if self._data.get(username_h) != password_h:
return
self._data.pop(username_h, None)
self.save_data()
async def check_login(self, addon, username, password):
"""Check username login."""
if password is None:
_LOGGER.error("None as password is not supported!")
raise AuthError()
_LOGGER.info("Auth request from %s for %s", addon.slug, username)
# Check API state
if not await self.sys_homeassistant.check_api_state():
_LOGGER.info("Home Assistant not running, check cache")
return self._check_cache(username, password)
try:
async with self.sys_homeassistant.make_request(
'post', 'api/hassio_auth', json={
ATTR_USERNAME: username,
ATTR_PASSWORD: password,
ATTR_ADDON: addon.slug,
}) as req:
if req.status == 200:
_LOGGER.info("Success login from %s", username)
self._update_cache(username, password)
return True
_LOGGER.warning("Wrong login from %s", username)
self._dismatch_cache(username, password)
return False
except HomeAssistantAPIError:
_LOGGER.error("Can't request auth on Home Assistant!")
raise AuthError()
def _rehash(value, salt2=""):
"""Rehash a value."""
for idx in range(1, 20):
value = hashlib.sha256(f"{value}{idx}{salt2}".encode()).hexdigest()
return value

View File

@@ -1,68 +1,127 @@
"""Bootstrap HassIO."""
"""Bootstrap Hass.io."""
import logging
import os
import signal
from pathlib import Path
import shutil
import signal
from colorlog import ColoredFormatter
from .addons import AddonManager
from .api import RestAPI
from .arch import CpuArch
from .auth import Auth
from .const import SOCKET_DOCKER
from .config import CoreConfig
from .core import HassIO
from .coresys import CoreSys
from .dbus import DBusManager
from .discovery import Discovery
from .hassos import HassOS
from .homeassistant import HomeAssistant
from .host import HostManager
from .services import ServiceManager
from .snapshots import SnapshotManager
from .supervisor import Supervisor
from .tasks import Tasks
from .updater import Updater
_LOGGER = logging.getLogger(__name__)
ENV_SHARE = "SUPERVISOR_SHARE"
ENV_NAME = "SUPERVISOR_NAME"
ENV_REPO = "HOMEASSISTANT_REPOSITORY"
def initialize_system_data():
"""Setup default config and create folders."""
config = CoreConfig()
MACHINE_ID = Path("/etc/machine-id")
# homeassistant config folder
if not config.path_config.is_dir():
_LOGGER.info(
"Create Home-Assistant config folder %s", config.path_config)
config.path_config.mkdir()
async def initialize_coresys():
"""Initialize HassIO coresys/objects."""
coresys = CoreSys()
# Initialize core objects
coresys.core = HassIO(coresys)
coresys.arch = CpuArch(coresys)
coresys.auth = Auth(coresys)
coresys.updater = Updater(coresys)
coresys.api = RestAPI(coresys)
coresys.supervisor = Supervisor(coresys)
coresys.homeassistant = HomeAssistant(coresys)
coresys.addons = AddonManager(coresys)
coresys.snapshots = SnapshotManager(coresys)
coresys.host = HostManager(coresys)
coresys.tasks = Tasks(coresys)
coresys.services = ServiceManager(coresys)
coresys.discovery = Discovery(coresys)
coresys.dbus = DBusManager(coresys)
coresys.hassos = HassOS(coresys)
# bootstrap config
initialize_system_data(coresys)
# Set Machine/Host ID
if MACHINE_ID.exists():
coresys.machine_id = MACHINE_ID.read_text().strip()
return coresys
def initialize_system_data(coresys):
"""Set up the default configuration and create folders."""
config = coresys.config
# Home Assistant configuration folder
if not config.path_homeassistant.is_dir():
_LOGGER.info("Create Home Assistant configuration folder %s",
config.path_homeassistant)
config.path_homeassistant.mkdir()
# hassio ssl folder
if not config.path_ssl.is_dir():
_LOGGER.info("Create hassio ssl folder %s", config.path_ssl)
_LOGGER.info("Create Hass.io SSL/TLS folder %s", config.path_ssl)
config.path_ssl.mkdir()
# hassio addon data folder
if not config.path_addons_data.is_dir():
_LOGGER.info(
"Create hassio addon data folder %s", config.path_addons_data)
_LOGGER.info("Create Hass.io Add-on data folder %s",
config.path_addons_data)
config.path_addons_data.mkdir(parents=True)
if not config.path_addons_local.is_dir():
_LOGGER.info("Create hassio addon local repository folder %s",
_LOGGER.info("Create Hass.io Add-on local repository folder %s",
config.path_addons_local)
config.path_addons_local.mkdir(parents=True)
if not config.path_addons_git.is_dir():
_LOGGER.info("Create hassio addon git repositories folder %s",
_LOGGER.info("Create Hass.io Add-on git repositories folder %s",
config.path_addons_git)
config.path_addons_git.mkdir(parents=True)
# hassio tmp folder
if not config.path_tmp.is_dir():
_LOGGER.info("Create hassio temp folder %s", config.path_tmp)
_LOGGER.info("Create Hass.io temp folder %s", config.path_tmp)
config.path_tmp.mkdir(parents=True)
# hassio backup folder
if not config.path_backup.is_dir():
_LOGGER.info("Create hassio backup folder %s", config.path_backup)
_LOGGER.info("Create Hass.io backup folder %s", config.path_backup)
config.path_backup.mkdir()
# share folder
if not config.path_share.is_dir():
_LOGGER.info("Create hassio share folder %s", config.path_share)
_LOGGER.info("Create Hass.io share folder %s", config.path_share)
config.path_share.mkdir()
# apparmor folder
if not config.path_apparmor.is_dir():
_LOGGER.info("Create Hass.io Apparmor folder %s", config.path_apparmor)
config.path_apparmor.mkdir()
return config
def migrate_system_env(config):
def migrate_system_env(coresys):
"""Cleanup some stuff after update."""
config = coresys.config
# hass.io 0.37 -> 0.38
old_build = Path(config.path_hassio, "addons/build")
@@ -70,67 +129,78 @@ def migrate_system_env(config):
try:
old_build.rmdir()
except OSError:
_LOGGER.warning("Can't cleanup old addons build dir.")
_LOGGER.warning("Can't cleanup old Add-on build directory")
def initialize_logging():
"""Setup the logging."""
logging.basicConfig(level=logging.INFO)
fmt = ("%(asctime)s %(levelname)s (%(threadName)s) "
"[%(name)s] %(message)s")
colorfmt = "%(log_color)s{}%(reset)s".format(fmt)
datefmt = '%y-%m-%d %H:%M:%S'
fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
colorfmt = f"%(log_color)s{fmt}%(reset)s"
datefmt = "%y-%m-%d %H:%M:%S"
# suppress overly verbose logs from libraries that aren't helpful
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
logging.getLogger().handlers[0].setFormatter(ColoredFormatter(
colorfmt,
datefmt=datefmt,
reset=True,
log_colors={
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red',
}
))
logging.getLogger().handlers[0].setFormatter(
ColoredFormatter(
colorfmt,
datefmt=datefmt,
reset=True,
log_colors={
"DEBUG": "cyan",
"INFO": "green",
"WARNING": "yellow",
"ERROR": "red",
"CRITICAL": "red",
},
))
def check_environment():
"""Check if all environment are exists."""
for key in ('SUPERVISOR_SHARE', 'SUPERVISOR_NAME',
'HOMEASSISTANT_REPOSITORY'):
# check environment variables
for key in (ENV_SHARE, ENV_NAME, ENV_REPO):
try:
os.environ[key]
except KeyError:
_LOGGER.fatal("Can't find %s in env!", key)
return False
# check docker socket
if not SOCKET_DOCKER.is_socket():
_LOGGER.fatal("Can't find docker socket!")
_LOGGER.fatal("Can't find Docker socket!")
return False
# check socat exec
if not shutil.which("socat"):
_LOGGER.fatal("Can't find socat!")
return False
# check socat exec
if not shutil.which("gdbus"):
_LOGGER.fatal("Can't find gdbus!")
return False
return True
def reg_signal(loop, hassio):
"""Register SIGTERM, SIGKILL to stop system."""
def reg_signal(loop):
"""Register SIGTERM and SIGKILL to stop system."""
try:
loop.add_signal_handler(
signal.SIGTERM, lambda: loop.create_task(hassio.stop()))
loop.add_signal_handler(signal.SIGTERM,
lambda: loop.call_soon(loop.stop))
except (ValueError, RuntimeError):
_LOGGER.warning("Could not bind to SIGTERM")
try:
loop.add_signal_handler(
signal.SIGHUP, lambda: loop.create_task(hassio.stop()))
loop.add_signal_handler(signal.SIGHUP,
lambda: loop.call_soon(loop.stop))
except (ValueError, RuntimeError):
_LOGGER.warning("Could not bind to SIGHUP")
try:
loop.add_signal_handler(
signal.SIGINT, lambda: loop.create_task(hassio.stop()))
loop.add_signal_handler(signal.SIGINT,
lambda: loop.call_soon(loop.stop))
except (ValueError, RuntimeError):
_LOGGER.warning("Could not bind to SIGINT")

View File

@@ -1,21 +1,22 @@
"""Bootstrap HassIO."""
"""Bootstrap Hass.io."""
from datetime import datetime
import logging
import os
import re
from pathlib import Path, PurePath
import pytz
from .const import (
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_SECURITY, ATTR_SESSIONS,
ATTR_PASSWORD, ATTR_TOTP, ATTR_TIMEZONE, ATTR_API_ENDPOINT,
ATTR_ADDONS_CUSTOM_LIST, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT)
from .tools import JsonConfig
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_TIMEZONE, ATTR_ADDONS_CUSTOM_LIST,
ATTR_LAST_BOOT, ATTR_WAIT_BOOT)
from .utils.dt import parse_datetime
from .utils.json import JsonConfig
from .validate import SCHEMA_HASSIO_CONFIG
_LOGGER = logging.getLogger(__name__)
DATETIME_FORMAT = "%Y%m%d %H:%M:%S"
HOMEASSISTANT_CONFIG = PurePath("homeassistant")
HOMEASSISTANT_CONFIG = PurePath('homeassistant')
HASSIO_SSL = PurePath("ssl")
@@ -27,6 +28,11 @@ ADDONS_DATA = PurePath("addons/data")
BACKUP_DATA = PurePath("backup")
SHARE_DATA = PurePath("share")
TMP_DATA = PurePath("tmp")
APPARMOR_DATA = PurePath("apparmor")
DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat()
RE_TIMEZONE = re.compile(r"time_zone: (?P<timezone>[\w/\-+]+)")
class CoreConfig(JsonConfig):
@@ -35,52 +41,79 @@ class CoreConfig(JsonConfig):
def __init__(self):
"""Initialize config object."""
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_HASSIO_CONFIG)
self.arch = None
@property
def api_endpoint(self):
"""Return IP address of api endpoint."""
return self._data[ATTR_API_ENDPOINT]
@api_endpoint.setter
def api_endpoint(self, value):
"""Store IP address of api endpoint."""
self._data[ATTR_API_ENDPOINT] = value
@property
def timezone(self):
"""Return system timezone."""
return self._data[ATTR_TIMEZONE]
config_file = Path(self.path_homeassistant, 'configuration.yaml')
try:
assert config_file.exists()
configuration = config_file.read_text()
data = RE_TIMEZONE.search(configuration)
assert data
timezone = data.group('timezone')
pytz.timezone(timezone)
except (pytz.exceptions.UnknownTimeZoneError, OSError, AssertionError):
_LOGGER.debug("Can't parse Home Assistant timezone")
return self._data[ATTR_TIMEZONE]
return timezone
@timezone.setter
def timezone(self, value):
"""Set system timezone."""
self._data[ATTR_TIMEZONE] = value
self.save()
@property
def wait_boot(self):
"""Return wait time for auto boot stages."""
return self._data[ATTR_WAIT_BOOT]
@wait_boot.setter
def wait_boot(self, value):
"""Set wait boot time."""
self._data[ATTR_WAIT_BOOT] = value
@property
def last_boot(self):
"""Return last boot datetime."""
boot_str = self._data.get(ATTR_LAST_BOOT, DEFAULT_BOOT_TIME)
boot_time = parse_datetime(boot_str)
if not boot_time:
return datetime.utcfromtimestamp(1)
return boot_time
@last_boot.setter
def last_boot(self, value):
"""Set last boot datetime."""
self._data[ATTR_LAST_BOOT] = value.isoformat()
@property
def path_hassio(self):
"""Return hassio data path."""
"""Return Hass.io data path."""
return HASSIO_DATA
@property
def path_extern_hassio(self):
"""Return hassio data path extern for docker."""
"""Return Hass.io data path external for Docker."""
return PurePath(os.environ['SUPERVISOR_SHARE'])
@property
def path_extern_config(self):
"""Return config path extern for docker."""
def path_extern_homeassistant(self):
"""Return config path external for Docker."""
return str(PurePath(self.path_extern_hassio, HOMEASSISTANT_CONFIG))
@property
def path_config(self):
def path_homeassistant(self):
"""Return config path inside supervisor."""
return Path(HASSIO_DATA, HOMEASSISTANT_CONFIG)
@property
def path_extern_ssl(self):
"""Return SSL path extern for docker."""
"""Return SSL path external for Docker."""
return str(PurePath(self.path_extern_hassio, HASSIO_SSL))
@property
@@ -90,39 +123,44 @@ class CoreConfig(JsonConfig):
@property
def path_addons_core(self):
"""Return git path for core addons."""
"""Return git path for core Add-ons."""
return Path(HASSIO_DATA, ADDONS_CORE)
@property
def path_addons_git(self):
"""Return path for git addons."""
"""Return path for Git Add-on."""
return Path(HASSIO_DATA, ADDONS_GIT)
@property
def path_addons_local(self):
"""Return path for customs addons."""
"""Return path for custom Add-ons."""
return Path(HASSIO_DATA, ADDONS_LOCAL)
@property
def path_extern_addons_local(self):
"""Return path for customs addons."""
"""Return path for custom Add-ons."""
return PurePath(self.path_extern_hassio, ADDONS_LOCAL)
@property
def path_addons_data(self):
"""Return root addon data folder."""
"""Return root Add-on data folder."""
return Path(HASSIO_DATA, ADDONS_DATA)
@property
def path_extern_addons_data(self):
"""Return root addon data folder extern for docker."""
"""Return root add-on data folder external for Docker."""
return PurePath(self.path_extern_hassio, ADDONS_DATA)
@property
def path_tmp(self):
"""Return hass.io temp folder."""
"""Return Hass.io temp folder."""
return Path(HASSIO_DATA, TMP_DATA)
@property
def path_extern_tmp(self):
"""Return Hass.io temp folder for Docker."""
return PurePath(self.path_extern_hassio, TMP_DATA)
@property
def path_backup(self):
"""Return root backup data folder."""
@@ -130,7 +168,7 @@ class CoreConfig(JsonConfig):
@property
def path_extern_backup(self):
"""Return root backup data folder extern for docker."""
"""Return root backup data folder external for Docker."""
return PurePath(self.path_extern_hassio, BACKUP_DATA)
@property
@@ -138,14 +176,19 @@ class CoreConfig(JsonConfig):
"""Return root share data folder."""
return Path(HASSIO_DATA, SHARE_DATA)
@property
def path_apparmor(self):
"""Return root Apparmor profile folder."""
return Path(HASSIO_DATA, APPARMOR_DATA)
@property
def path_extern_share(self):
"""Return root share data folder extern for docker."""
"""Return root share data folder external for Docker."""
return PurePath(self.path_extern_hassio, SHARE_DATA)
@property
def addons_repositories(self):
"""Return list of addons custom repositories."""
"""Return list of custom Add-on repositories."""
return self._data[ATTR_ADDONS_CUSTOM_LIST]
def add_addon_repository(self, repo):
@@ -154,7 +197,6 @@ class CoreConfig(JsonConfig):
return
self._data[ATTR_ADDONS_CUSTOM_LIST].append(repo)
self.save()
def drop_addon_repository(self, repo):
"""Remove a custom repository from list."""
@@ -162,79 +204,3 @@ class CoreConfig(JsonConfig):
return
self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo)
self.save()
@property
def security_initialize(self):
"""Return is security was initialize."""
return self._data[ATTR_SECURITY]
@security_initialize.setter
def security_initialize(self, value):
"""Set is security initialize."""
self._data[ATTR_SECURITY] = value
self.save()
@property
def security_totp(self):
"""Return the TOTP key."""
return self._data.get(ATTR_TOTP)
@security_totp.setter
def security_totp(self, value):
"""Set the TOTP key."""
self._data[ATTR_TOTP] = value
self.save()
@property
def security_password(self):
"""Return the password key."""
return self._data.get(ATTR_PASSWORD)
@security_password.setter
def security_password(self, value):
"""Set the password key."""
self._data[ATTR_PASSWORD] = value
self.save()
@property
def security_sessions(self):
"""Return api sessions."""
return {
session: datetime.strptime(until, DATETIME_FORMAT) for
session, until in self._data[ATTR_SESSIONS].items()
}
def add_security_session(self, session, valid):
"""Set the a new session."""
self._data[ATTR_SESSIONS].update(
{session: valid.strftime(DATETIME_FORMAT)}
)
self.save()
def drop_security_session(self, session):
"""Delete the a session."""
self._data[ATTR_SESSIONS].pop(session, None)
self.save()
@property
def audio_output(self):
"""Return ALSA audio output card,dev."""
return self._data.get(ATTR_AUDIO_OUTPUT)
@audio_output.setter
def audio_output(self, value):
"""Set ALSA audio output card,dev."""
self._data[ATTR_AUDIO_OUTPUT] = value
self.save()
@property
def audio_input(self):
"""Return ALSA audio input card,dev."""
return self._data.get(ATTR_AUDIO_INPUT)
@audio_input.setter
def audio_input(self, value):
"""Set ALSA audio input card,dev."""
self._data[ATTR_AUDIO_INPUT] = value
self.save()

View File

@@ -1,150 +1,290 @@
"""Const file for HassIO."""
"""Constants file for Hass.io."""
from pathlib import Path
from ipaddress import ip_network
HASSIO_VERSION = '0.57'
HASSIO_VERSION = "151"
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
'hassio/{}/version.json')
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
URL_HASSIO_VERSION = "https://s3.amazonaws.com/hassio-version/{channel}.json"
URL_HASSIO_APPARMOR = "https://s3.amazonaws.com/hassio-version/apparmor.txt"
URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons'
URL_HASSOS_OTA = (
"https://github.com/home-assistant/hassos/releases/download/"
"{version}/hassos_{board}-{version}.raucb"
)
HASSIO_DATA = Path("/data")
RUN_UPDATE_INFO_TASKS = 28800
RUN_UPDATE_SUPERVISOR_TASKS = 29100
RUN_UPDATE_ADDONS_TASKS = 57600
RUN_RELOAD_ADDONS_TASKS = 28800
RUN_RELOAD_SNAPSHOTS_TASKS = 72000
RUN_WATCHDOG_HOMEASSISTANT = 15
RUN_CLEANUP_API_SESSIONS = 900
RESTART_EXIT_CODE = 100
FILE_HASSIO_AUTH = Path(HASSIO_DATA, "auth.json")
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json")
FILE_HASSIO_SERVICES = Path(HASSIO_DATA, "services.json")
FILE_HASSIO_DISCOVERY = Path(HASSIO_DATA, "discovery.json")
SOCKET_DOCKER = Path("/var/run/docker.sock")
SOCKET_HC = Path("/var/run/hassio-hc.sock")
LABEL_VERSION = 'io.hass.version'
LABEL_ARCH = 'io.hass.arch'
LABEL_TYPE = 'io.hass.type'
DOCKER_NETWORK = "hassio"
DOCKER_NETWORK_MASK = ip_network("172.30.32.0/23")
DOCKER_NETWORK_RANGE = ip_network("172.30.33.0/24")
META_ADDON = 'addon'
META_SUPERVISOR = 'supervisor'
META_HOMEASSISTANT = 'homeassistant'
LABEL_VERSION = "io.hass.version"
LABEL_ARCH = "io.hass.arch"
LABEL_TYPE = "io.hass.type"
LABEL_MACHINE = "io.hass.machine"
JSON_RESULT = 'result'
JSON_DATA = 'data'
JSON_MESSAGE = 'message'
META_ADDON = "addon"
META_SUPERVISOR = "supervisor"
META_HOMEASSISTANT = "homeassistant"
RESULT_ERROR = 'error'
RESULT_OK = 'ok'
JSON_RESULT = "result"
JSON_DATA = "data"
JSON_MESSAGE = "message"
CONTENT_TYPE_BINARY = 'application/octet-stream'
CONTENT_TYPE_PNG = 'image/png'
RESULT_ERROR = "error"
RESULT_OK = "ok"
ATTR_DATE = 'date'
ATTR_ARCH = 'arch'
ATTR_HOSTNAME = 'hostname'
ATTR_TIMEZONE = 'timezone'
ATTR_OS = 'os'
ATTR_TYPE = 'type'
ATTR_SOURCE = 'source'
ATTR_FEATURES = 'features'
ATTR_ADDONS = 'addons'
ATTR_VERSION = 'version'
ATTR_LAST_VERSION = 'last_version'
ATTR_BETA_CHANNEL = 'beta_channel'
ATTR_NAME = 'name'
ATTR_SLUG = 'slug'
ATTR_DESCRIPTON = 'description'
ATTR_STARTUP = 'startup'
ATTR_BOOT = 'boot'
ATTR_PORTS = 'ports'
ATTR_MAP = 'map'
ATTR_WEBUI = 'webui'
ATTR_OPTIONS = 'options'
ATTR_INSTALLED = 'installed'
ATTR_DETACHED = 'detached'
ATTR_STATE = 'state'
ATTR_SCHEMA = 'schema'
ATTR_IMAGE = 'image'
ATTR_LOGO = 'logo'
ATTR_ADDONS_REPOSITORIES = 'addons_repositories'
ATTR_REPOSITORY = 'repository'
ATTR_REPOSITORIES = 'repositories'
ATTR_URL = 'url'
ATTR_MAINTAINER = 'maintainer'
ATTR_PASSWORD = 'password'
ATTR_TOTP = 'totp'
ATTR_INITIALIZE = 'initialize'
ATTR_SESSION = 'session'
ATTR_SESSIONS = 'sessions'
ATTR_LOCATON = 'location'
ATTR_BUILD = 'build'
ATTR_DEVICES = 'devices'
ATTR_ENVIRONMENT = 'environment'
ATTR_HOST_NETWORK = 'host_network'
ATTR_NETWORK = 'network'
ATTR_TMPFS = 'tmpfs'
ATTR_PRIVILEGED = 'privileged'
ATTR_USER = 'user'
ATTR_SYSTEM = 'system'
ATTR_SNAPSHOTS = 'snapshots'
ATTR_HOMEASSISTANT = 'homeassistant'
ATTR_HASSIO = 'hassio'
ATTR_HASSIO_API = 'hassio_api'
ATTR_FOLDERS = 'folders'
ATTR_SIZE = 'size'
ATTR_TYPE = 'type'
ATTR_TIMEOUT = 'timeout'
ATTR_AUTO_UPDATE = 'auto_update'
ATTR_CUSTOM = 'custom'
ATTR_AUDIO = 'audio'
ATTR_AUDIO_INPUT = 'audio_input'
ATTR_AUDIO_OUTPUT = 'audio_output'
ATTR_INPUT = 'input'
ATTR_OUTPUT = 'output'
ATTR_DISK = 'disk'
ATTR_SERIAL = 'serial'
ATTR_SECURITY = 'security'
ATTR_API_ENDPOINT = 'api_endpoint'
ATTR_ADDONS_CUSTOM_LIST = 'addons_custom_list'
CONTENT_TYPE_BINARY = "application/octet-stream"
CONTENT_TYPE_PNG = "image/png"
CONTENT_TYPE_JSON = "application/json"
CONTENT_TYPE_TEXT = "text/plain"
CONTENT_TYPE_TAR = "application/tar"
CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
HEADER_HA_ACCESS = "x-ha-access"
HEADER_TOKEN = "x-hassio-key"
STARTUP_INITIALIZE = 'initialize'
STARTUP_SYSTEM = 'system'
STARTUP_SERVICES = 'services'
STARTUP_APPLICATION = 'application'
STARTUP_ONCE = 'once'
ENV_TOKEN = "HASSIO_TOKEN"
ENV_TIME = "TZ"
BOOT_AUTO = 'auto'
BOOT_MANUAL = 'manual'
REQUEST_FROM = "HASSIO_FROM"
STATE_STARTED = 'started'
STATE_STOPPED = 'stopped'
STATE_NONE = 'none'
ATTR_MACHINE = "machine"
ATTR_WAIT_BOOT = "wait_boot"
ATTR_DEPLOYMENT = "deployment"
ATTR_WATCHDOG = "watchdog"
ATTR_CHANGELOG = "changelog"
ATTR_DATE = "date"
ATTR_ARCH = "arch"
ATTR_LONG_DESCRIPTION = "long_description"
ATTR_HOSTNAME = "hostname"
ATTR_TIMEZONE = "timezone"
ATTR_ARGS = "args"
ATTR_OPERATING_SYSTEM = "operating_system"
ATTR_CHASSIS = "chassis"
ATTR_TYPE = "type"
ATTR_SOURCE = "source"
ATTR_FEATURES = "features"
ATTR_ADDONS = "addons"
ATTR_PROVIDERS = "providers"
ATTR_VERSION = "version"
ATTR_VERSION_LATEST = "version_latest"
ATTR_AUTO_UART = "auto_uart"
ATTR_LAST_BOOT = "last_boot"
ATTR_LAST_VERSION = "last_version"
ATTR_CHANNEL = "channel"
ATTR_NAME = "name"
ATTR_SLUG = "slug"
ATTR_DESCRIPTON = "description"
ATTR_STARTUP = "startup"
ATTR_BOOT = "boot"
ATTR_PORTS = "ports"
ATTR_PORT = "port"
ATTR_SSL = "ssl"
ATTR_MAP = "map"
ATTR_WEBUI = "webui"
ATTR_OPTIONS = "options"
ATTR_INSTALLED = "installed"
ATTR_DETACHED = "detached"
ATTR_STATE = "state"
ATTR_SCHEMA = "schema"
ATTR_IMAGE = "image"
ATTR_ICON = "icon"
ATTR_LOGO = "logo"
ATTR_STDIN = "stdin"
ATTR_ADDONS_REPOSITORIES = "addons_repositories"
ATTR_REPOSITORY = "repository"
ATTR_REPOSITORIES = "repositories"
ATTR_URL = "url"
ATTR_MAINTAINER = "maintainer"
ATTR_PASSWORD = "password"
ATTR_TOTP = "totp"
ATTR_INITIALIZE = "initialize"
ATTR_LOCATON = "location"
ATTR_BUILD = "build"
ATTR_DEVICES = "devices"
ATTR_ENVIRONMENT = "environment"
ATTR_HOST_NETWORK = "host_network"
ATTR_HOST_PID = "host_pid"
ATTR_HOST_IPC = "host_ipc"
ATTR_HOST_DBUS = "host_dbus"
ATTR_NETWORK = "network"
ATTR_TMPFS = "tmpfs"
ATTR_PRIVILEGED = "privileged"
ATTR_USER = "user"
ATTR_SYSTEM = "system"
ATTR_SNAPSHOTS = "snapshots"
ATTR_HOMEASSISTANT = "homeassistant"
ATTR_HASSIO = "hassio"
ATTR_HASSIO_API = "hassio_api"
ATTR_HOMEASSISTANT_API = "homeassistant_api"
ATTR_UUID = "uuid"
ATTR_FOLDERS = "folders"
ATTR_SIZE = "size"
ATTR_TYPE = "type"
ATTR_TIMEOUT = "timeout"
ATTR_AUTO_UPDATE = "auto_update"
ATTR_CUSTOM = "custom"
ATTR_AUDIO = "audio"
ATTR_AUDIO_INPUT = "audio_input"
ATTR_AUDIO_OUTPUT = "audio_output"
ATTR_INPUT = "input"
ATTR_OUTPUT = "output"
ATTR_DISK = "disk"
ATTR_SERIAL = "serial"
ATTR_SECURITY = "security"
ATTR_BUILD_FROM = "build_from"
ATTR_SQUASH = "squash"
ATTR_GPIO = "gpio"
ATTR_LEGACY = "legacy"
ATTR_ADDONS_CUSTOM_LIST = "addons_custom_list"
ATTR_CPU_PERCENT = "cpu_percent"
ATTR_NETWORK_RX = "network_rx"
ATTR_NETWORK_TX = "network_tx"
ATTR_MEMORY_LIMIT = "memory_limit"
ATTR_MEMORY_USAGE = "memory_usage"
ATTR_BLK_READ = "blk_read"
ATTR_BLK_WRITE = "blk_write"
ATTR_ADDON = "addon"
ATTR_AVAILABLE = "available"
ATTR_HOST = "host"
ATTR_USERNAME = "username"
ATTR_DISCOVERY = "discovery"
ATTR_CONFIG = "config"
ATTR_SERVICES = "services"
ATTR_SERVICE = "service"
ATTR_DISCOVERY = "discovery"
ATTR_PROTECTED = "protected"
ATTR_CRYPTO = "crypto"
ATTR_BRANCH = "branch"
ATTR_KERNEL = "kernel"
ATTR_APPARMOR = "apparmor"
ATTR_DEVICETREE = "devicetree"
ATTR_CPE = "cpe"
ATTR_BOARD = "board"
ATTR_HASSOS = "hassos"
ATTR_HASSOS_CLI = "hassos_cli"
ATTR_VERSION_CLI = "version_cli"
ATTR_VERSION_CLI_LATEST = "version_cli_latest"
ATTR_REFRESH_TOKEN = "refresh_token"
ATTR_ACCESS_TOKEN = "access_token"
ATTR_DOCKER_API = "docker_api"
ATTR_FULL_ACCESS = "full_access"
ATTR_PROTECTED = "protected"
ATTR_RATING = "rating"
ATTR_HASSIO_ROLE = "hassio_role"
ATTR_SUPERVISOR = "supervisor"
ATTR_AUTH_API = "auth_api"
ATTR_KERNEL_MODULES = "kernel_modules"
ATTR_SUPPORTED_ARCH = "supported_arch"
MAP_CONFIG = 'config'
MAP_SSL = 'ssl'
MAP_ADDONS = 'addons'
MAP_BACKUP = 'backup'
MAP_SHARE = 'share'
PROVIDE_SERVICE = "provide"
NEED_SERVICE = "need"
WANT_SERVICE = "want"
ARCH_ARMHF = 'armhf'
ARCH_AARCH64 = 'aarch64'
ARCH_AMD64 = 'amd64'
ARCH_I386 = 'i386'
STARTUP_INITIALIZE = "initialize"
STARTUP_SYSTEM = "system"
STARTUP_SERVICES = "services"
STARTUP_APPLICATION = "application"
STARTUP_ONCE = "once"
REPOSITORY_CORE = 'core'
REPOSITORY_LOCAL = 'local'
STARTUP_ALL = [
STARTUP_ONCE,
STARTUP_INITIALIZE,
STARTUP_SYSTEM,
STARTUP_SERVICES,
STARTUP_APPLICATION,
]
FOLDER_HOMEASSISTANT = 'homeassistant'
FOLDER_SHARE = 'share'
FOLDER_ADDONS = 'addons/local'
FOLDER_SSL = 'ssl'
BOOT_AUTO = "auto"
BOOT_MANUAL = "manual"
SNAPSHOT_FULL = 'full'
SNAPSHOT_PARTIAL = 'partial'
STATE_STARTED = "started"
STATE_STOPPED = "stopped"
STATE_NONE = "none"
MAP_CONFIG = "config"
MAP_SSL = "ssl"
MAP_ADDONS = "addons"
MAP_BACKUP = "backup"
MAP_SHARE = "share"
ARCH_ARMHF = "armhf"
ARCH_ARMV7 = "armv7"
ARCH_AARCH64 = "aarch64"
ARCH_AMD64 = "amd64"
ARCH_I386 = "i386"
ARCH_ALL = [ARCH_ARMHF, ARCH_ARMV7, ARCH_AARCH64, ARCH_AMD64, ARCH_I386]
CHANNEL_STABLE = "stable"
CHANNEL_BETA = "beta"
CHANNEL_DEV = "dev"
REPOSITORY_CORE = "core"
REPOSITORY_LOCAL = "local"
FOLDER_HOMEASSISTANT = "homeassistant"
FOLDER_SHARE = "share"
FOLDER_ADDONS = "addons/local"
FOLDER_SSL = "ssl"
SNAPSHOT_FULL = "full"
SNAPSHOT_PARTIAL = "partial"
CRYPTO_AES128 = "aes128"
SECURITY_PROFILE = "profile"
SECURITY_DEFAULT = "default"
SECURITY_DISABLE = "disable"
PRIVILEGED_NET_ADMIN = "NET_ADMIN"
PRIVILEGED_SYS_ADMIN = "SYS_ADMIN"
PRIVILEGED_SYS_RAWIO = "SYS_RAWIO"
PRIVILEGED_IPC_LOCK = "IPC_LOCK"
PRIVILEGED_SYS_TIME = "SYS_TIME"
PRIVILEGED_SYS_NICE = "SYS_NICE"
PRIVILEGED_SYS_MODULE = "SYS_MODULE"
PRIVILEGED_SYS_RESOURCE = "SYS_RESOURCE"
PRIVILEGED_SYS_PTRACE = "SYS_PTRACE"
PRIVILEGED_DAC_READ_SEARCH = "DAC_READ_SEARCH"
PRIVILEGED_ALL = [
PRIVILEGED_NET_ADMIN,
PRIVILEGED_SYS_ADMIN,
PRIVILEGED_SYS_RAWIO,
PRIVILEGED_IPC_LOCK,
PRIVILEGED_SYS_TIME,
PRIVILEGED_SYS_NICE,
PRIVILEGED_SYS_RESOURCE,
PRIVILEGED_SYS_PTRACE,
PRIVILEGED_SYS_MODULE,
PRIVILEGED_DAC_READ_SEARCH,
]
FEATURES_SHUTDOWN = "shutdown"
FEATURES_REBOOT = "reboot"
FEATURES_HASSOS = "hassos"
FEATURES_HOSTNAME = "hostname"
FEATURES_SERVICES = "services"
ROLE_DEFAULT = "default"
ROLE_HOMEASSISTANT = "homeassistant"
ROLE_BACKUP = "backup"
ROLE_MANAGER = "manager"
ROLE_ADMIN = "admin"
ROLE_ALL = [ROLE_DEFAULT, ROLE_HOMEASSISTANT, ROLE_BACKUP, ROLE_MANAGER, ROLE_ADMIN]
CHAN_ID = "chan_id"
CHAN_TYPE = "chan_type"

View File

@@ -1,179 +1,151 @@
"""Main file for HassIO."""
"""Main file for Hass.io."""
from contextlib import suppress
import asyncio
import logging
import aiohttp
import docker
import async_timeout
from .addons import AddonManager
from .api import RestAPI
from .host_control import HostControl
from .coresys import CoreSysAttributes
from .const import (
SOCKET_DOCKER, RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS,
RUN_UPDATE_SUPERVISOR_TASKS, RUN_WATCHDOG_HOMEASSISTANT,
RUN_CLEANUP_API_SESSIONS, STARTUP_SYSTEM, STARTUP_SERVICES,
STARTUP_APPLICATION, STARTUP_INITIALIZE, RUN_RELOAD_SNAPSHOTS_TASKS,
RUN_UPDATE_ADDONS_TASKS)
from .hardware import Hardware
from .homeassistant import HomeAssistant
from .scheduler import Scheduler
from .dock.supervisor import DockerSupervisor
from .snapshots import SnapshotsManager
from .updater import Updater
from .tasks import (
hassio_update, homeassistant_watchdog, api_sessions_cleanup, addons_update)
from .tools import get_local_ip, fetch_timezone
STARTUP_SYSTEM,
STARTUP_SERVICES,
STARTUP_APPLICATION,
STARTUP_INITIALIZE,
)
from .exceptions import HassioError, HomeAssistantError
_LOGGER = logging.getLogger(__name__)
class HassIO(object):
"""Main object of hassio."""
class HassIO(CoreSysAttributes):
"""Main object of Hass.io."""
def __init__(self, loop, config):
"""Initialize hassio object."""
self.exit_code = 0
self.loop = loop
self.config = config
self.websession = aiohttp.ClientSession(loop=loop)
self.updater = Updater(config, loop, self.websession)
self.scheduler = Scheduler(loop)
self.api = RestAPI(config, loop)
self.hardware = Hardware()
self.dock = docker.DockerClient(
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version='auto')
# init basic docker container
self.supervisor = DockerSupervisor(config, loop, self.dock, self.stop)
# init homeassistant
self.homeassistant = HomeAssistant(
config, loop, self.dock, self.updater)
# init HostControl
self.host_control = HostControl(loop)
# init addon system
self.addons = AddonManager(config, loop, self.dock)
# init snapshot system
self.snapshots = SnapshotsManager(
config, loop, self.scheduler, self.addons, self.homeassistant)
def __init__(self, coresys):
"""Initialize Hass.io object."""
self.coresys = coresys
async def setup(self):
"""Setup HassIO orchestration."""
# supervisor
if not await self.supervisor.attach():
_LOGGER.fatal("Can't attach to supervisor docker container!")
await self.supervisor.cleanup()
# Load Supervisor
await self.sys_supervisor.load()
# set running arch
self.config.arch = self.supervisor.arch
# Load DBus
await self.sys_dbus.load()
# set api endpoint
self.config.api_endpoint = await get_local_ip(self.loop)
# Load Host
await self.sys_host.load()
# update timezone
if self.config.timezone == 'UTC':
self.config.timezone = await fetch_timezone(self.websession)
# Load Home Assistant
await self.sys_homeassistant.load()
# hostcontrol
await self.host_control.load()
# Load CPU/Arch
await self.sys_arch.load()
# schedule update info tasks
self.scheduler.register_task(
self.host_control.load, RUN_UPDATE_INFO_TASKS)
# Load HassOS
await self.sys_hassos.load()
# Load Add-ons
await self.sys_addons.load()
# rest api views
self.api.register_host(self.host_control, self.hardware)
self.api.register_network(self.host_control)
self.api.register_supervisor(
self.supervisor, self.snapshots, self.addons, self.host_control,
self.updater)
self.api.register_homeassistant(self.homeassistant)
self.api.register_addons(self.addons)
self.api.register_security()
self.api.register_snapshots(self.snapshots)
self.api.register_panel()
await self.sys_api.load()
# schedule api session cleanup
self.scheduler.register_task(
api_sessions_cleanup(self.config), RUN_CLEANUP_API_SESSIONS,
now=True)
# load last available data
await self.sys_updater.load()
# Load homeassistant
await self.homeassistant.prepare()
# load last available data
await self.sys_snapshots.load()
# Load addons
await self.addons.prepare()
# load services
await self.sys_services.load()
# schedule addon update task
self.scheduler.register_task(
self.addons.reload, RUN_RELOAD_ADDONS_TASKS, now=True)
self.scheduler.register_task(
addons_update(self.loop, self.addons), RUN_UPDATE_ADDONS_TASKS)
# Load discovery
await self.sys_discovery.load()
# schedule self update task
self.scheduler.register_task(
hassio_update(self.supervisor, self.updater),
RUN_UPDATE_SUPERVISOR_TASKS)
# schedule snapshot update tasks
self.scheduler.register_task(
self.snapshots.reload, RUN_RELOAD_SNAPSHOTS_TASKS, now=True)
# start addon mark as initialize
await self.addons.auto_boot(STARTUP_INITIALIZE)
# start dns forwarding
self.sys_create_task(self.sys_dns.start())
async def start(self):
"""Start HassIO orchestration."""
"""Start Hass.io orchestration."""
# on release channel, try update itself
# on beta channel, only read new versions
await asyncio.wait(
[hassio_update(self.supervisor, self.updater)()],
loop=self.loop
)
if self.sys_supervisor.need_update:
if self.sys_dev:
_LOGGER.warning("Ignore Hass.io updates on dev!")
elif await self.sys_supervisor.update():
return
# start api
await self.api.start()
_LOGGER.info("Start hassio api on %s", self.config.api_endpoint)
await self.sys_api.start()
# start addon mark as initialize
await self.sys_addons.boot(STARTUP_INITIALIZE)
try:
# HomeAssistant is already running / supervisor have only reboot
if await self.homeassistant.is_running():
_LOGGER.info("HassIO reboot detected")
if self.sys_hardware.last_boot == self.sys_config.last_boot:
_LOGGER.info("Hass.io reboot detected")
return
# reset register services / discovery
self.sys_services.reset()
# start addon mark as system
await self.addons.auto_boot(STARTUP_SYSTEM)
await self.sys_addons.boot(STARTUP_SYSTEM)
# start addon mark as services
await self.addons.auto_boot(STARTUP_SERVICES)
await self.sys_addons.boot(STARTUP_SERVICES)
# run HomeAssistant
await self.homeassistant.run()
if self.sys_homeassistant.boot:
with suppress(HomeAssistantError):
await self.sys_homeassistant.start()
# start addon mark as application
await self.addons.auto_boot(STARTUP_APPLICATION)
await self.sys_addons.boot(STARTUP_APPLICATION)
# store new last boot
self.sys_config.last_boot = self.sys_hardware.last_boot
self.sys_config.save_data()
finally:
# schedule homeassistant watchdog
self.scheduler.register_task(
homeassistant_watchdog(self.loop, self.homeassistant),
RUN_WATCHDOG_HOMEASSISTANT)
# Add core tasks into scheduler
await self.sys_tasks.load()
# If landingpage / run upgrade in background
if self.homeassistant.version == 'landingpage':
self.loop.create_task(self.homeassistant.install())
if self.sys_homeassistant.version == "landingpage":
self.sys_create_task(self.sys_homeassistant.install())
async def stop(self, exit_code=0):
_LOGGER.info("Hass.io is up and running")
async def stop(self):
"""Stop a running orchestration."""
# don't process scheduler anymore
self.scheduler.suspend = True
self.sys_scheduler.suspend = True
# process stop tasks
self.websession.close()
await self.api.stop()
# process async stop tasks
try:
with async_timeout.timeout(10):
await asyncio.wait(
[
self.sys_api.stop(),
self.sys_dns.stop(),
self.sys_websession.close(),
self.sys_websession_ssl.close(),
]
)
except asyncio.TimeoutError:
_LOGGER.warning("Force Shutdown!")
self.exit_code = exit_code
self.loop.stop()
_LOGGER.info("Hass.io is down")
async def shutdown(self):
"""Shutdown all running containers in correct order."""
await self.sys_addons.shutdown(STARTUP_APPLICATION)
# Close Home Assistant
with suppress(HassioError):
await self.sys_homeassistant.stop()
await self.sys_addons.shutdown(STARTUP_SERVICES)
await self.sys_addons.shutdown(STARTUP_SYSTEM)
await self.sys_addons.shutdown(STARTUP_INITIALIZE)

455
hassio/coresys.py Normal file
View File

@@ -0,0 +1,455 @@
"""Handle core shared data."""
from __future__ import annotations
import asyncio
from typing import TYPE_CHECKING
import aiohttp
from .config import CoreConfig
from .const import CHANNEL_DEV
from .docker import DockerAPI
from .misc.dns import DNSForward
from .misc.hardware import Hardware
from .misc.scheduler import Scheduler
if TYPE_CHECKING:
from .addons import AddonManager
from .api import RestAPI
from .arch import CpuArch
from .auth import Auth
from .core import HassIO
from .dbus import DBusManager
from .discovery import Discovery
from .hassos import HassOS
from .homeassistant import HomeAssistant
from .host import HostManager
from .services import ServiceManager
from .snapshots import SnapshotManager
from .supervisor import Supervisor
from .tasks import Tasks
from .updater import Updater
class CoreSys:
"""Class that handle all shared data."""
def __init__(self):
"""Initialize coresys."""
# Static attributes
self.machine_id: str = None
# External objects
self._loop: asyncio.BaseEventLoop = asyncio.get_running_loop()
self._websession: aiohttp.ClientSession = aiohttp.ClientSession()
self._websession_ssl: aiohttp.ClientSession = aiohttp.ClientSession(
connector=aiohttp.TCPConnector(ssl=False))
# Global objects
self._config: CoreConfig = CoreConfig()
self._hardware: Hardware = Hardware()
self._docker: DockerAPI = DockerAPI()
self._scheduler: Scheduler = Scheduler()
self._dns: DNSForward = DNSForward()
# Internal objects pointers
self._core: HassIO = None
self._arch: CpuArch = None
self._auth: Auth = None
self._homeassistant: HomeAssistant = None
self._supervisor: Supervisor = None
self._addons: AddonManager = None
self._api: RestAPI = None
self._updater: Updater = None
self._snapshots: SnapshotManager = None
self._tasks: Tasks = None
self._host: HostManager = None
self._dbus: DBusManager = None
self._hassos: HassOS = None
self._services: ServiceManager = None
self._discovery: Discovery = None
@property
def machine(self) -> str:
"""Return running machine type of the Hass.io system."""
if self._homeassistant:
return self._homeassistant.machine
return None
@property
def dev(self) -> str:
"""Return True if we run dev mode."""
return self._updater.channel == CHANNEL_DEV
@property
def timezone(self) -> str:
"""Return timezone."""
return self._config.timezone
@property
def loop(self) -> asyncio.BaseEventLoop:
"""Return loop object."""
return self._loop
@property
def websession(self) -> aiohttp.ClientSession:
"""Return websession object."""
return self._websession
@property
def websession_ssl(self) -> aiohttp.ClientSession:
"""Return websession object with disabled SSL."""
return self._websession_ssl
@property
def config(self) -> CoreConfig:
"""Return CoreConfig object."""
return self._config
@property
def hardware(self) -> Hardware:
"""Return Hardware object."""
return self._hardware
@property
def docker(self) -> DockerAPI:
"""Return DockerAPI object."""
return self._docker
@property
def scheduler(self) -> Scheduler:
"""Return Scheduler object."""
return self._scheduler
@property
def dns(self) -> DNSForward:
"""Return DNSForward object."""
return self._dns
@property
def core(self) -> HassIO:
"""Return HassIO object."""
return self._core
@core.setter
def core(self, value: HassIO):
"""Set a Hass.io object."""
if self._core:
raise RuntimeError("Hass.io already set!")
self._core = value
@property
def arch(self) -> CpuArch:
"""Return CpuArch object."""
return self._arch
@arch.setter
def arch(self, value: CpuArch):
"""Set a CpuArch object."""
if self._arch:
raise RuntimeError("CpuArch already set!")
self._arch = value
@property
def auth(self) -> Auth:
"""Return Auth object."""
return self._auth
@auth.setter
def auth(self, value: Auth):
"""Set a Auth object."""
if self._auth:
raise RuntimeError("Auth already set!")
self._auth = value
@property
def homeassistant(self) -> HomeAssistant:
"""Return Home Assistant object."""
return self._homeassistant
@homeassistant.setter
def homeassistant(self, value: HomeAssistant):
"""Set a HomeAssistant object."""
if self._homeassistant:
raise RuntimeError("Home Assistant already set!")
self._homeassistant = value
@property
def supervisor(self) -> Supervisor:
"""Return Supervisor object."""
return self._supervisor
@supervisor.setter
def supervisor(self, value: Supervisor):
"""Set a Supervisor object."""
if self._supervisor:
raise RuntimeError("Supervisor already set!")
self._supervisor = value
@property
def api(self) -> RestAPI:
"""Return API object."""
return self._api
@api.setter
def api(self, value: RestAPI):
"""Set an API object."""
if self._api:
raise RuntimeError("API already set!")
self._api = value
@property
def updater(self) -> Updater:
"""Return Updater object."""
return self._updater
@updater.setter
def updater(self, value: Updater):
"""Set a Updater object."""
if self._updater:
raise RuntimeError("Updater already set!")
self._updater = value
@property
def addons(self) -> AddonManager:
"""Return AddonManager object."""
return self._addons
@addons.setter
def addons(self, value: AddonManager):
"""Set a AddonManager object."""
if self._addons:
raise RuntimeError("AddonManager already set!")
self._addons = value
@property
def snapshots(self) -> SnapshotManager:
"""Return SnapshotManager object."""
return self._snapshots
@snapshots.setter
def snapshots(self, value: SnapshotManager):
"""Set a SnapshotManager object."""
if self._snapshots:
raise RuntimeError("SnapshotsManager already set!")
self._snapshots = value
@property
def tasks(self) -> Tasks:
"""Return Tasks object."""
return self._tasks
@tasks.setter
def tasks(self, value: Tasks):
"""Set a Tasks object."""
if self._tasks:
raise RuntimeError("Tasks already set!")
self._tasks = value
@property
def services(self) -> ServiceManager:
"""Return ServiceManager object."""
return self._services
@services.setter
def services(self, value: ServiceManager):
"""Set a ServiceManager object."""
if self._services:
raise RuntimeError("Services already set!")
self._services = value
@property
def discovery(self) -> Discovery:
"""Return ServiceManager object."""
return self._discovery
@discovery.setter
def discovery(self, value: Discovery):
"""Set a Discovery object."""
if self._discovery:
raise RuntimeError("Discovery already set!")
self._discovery = value
@property
def dbus(self) -> DBusManager:
"""Return DBusManager object."""
return self._dbus
@dbus.setter
def dbus(self, value: DBusManager):
"""Set a DBusManager object."""
if self._dbus:
raise RuntimeError("DBusManager already set!")
self._dbus = value
@property
def host(self) -> HostManager:
"""Return HostManager object."""
return self._host
@host.setter
def host(self, value: HostManager):
"""Set a HostManager object."""
if self._host:
raise RuntimeError("HostManager already set!")
self._host = value
@property
def hassos(self) -> HassOS:
"""Return HassOS object."""
return self._hassos
@hassos.setter
def hassos(self, value: HassOS):
"""Set a HassOS object."""
if self._hassos:
raise RuntimeError("HassOS already set!")
self._hassos = value
class CoreSysAttributes:
"""Inheret basic CoreSysAttributes."""
coresys = None
@property
def sys_machine(self) -> str:
"""Return running machine type of the Hass.io system."""
return self.coresys.machine
@property
def sys_dev(self) -> str:
"""Return True if we run dev mode."""
return self.coresys.dev
@property
def sys_timezone(self) -> str:
"""Return timezone."""
return self.coresys.timezone
@property
def sys_machine_id(self) -> str:
"""Return timezone."""
return self.coresys.machine_id
@property
def sys_loop(self) -> asyncio.BaseEventLoop:
"""Return loop object."""
return self.coresys.loop
@property
def sys_websession(self) -> aiohttp.ClientSession:
"""Return websession object."""
return self.coresys.websession
@property
def sys_websession_ssl(self) -> aiohttp.ClientSession:
"""Return websession object with disabled SSL."""
return self.coresys.websession_ssl
@property
def sys_config(self) -> CoreConfig:
"""Return CoreConfig object."""
return self.coresys.config
@property
def sys_hardware(self) -> Hardware:
"""Return Hardware object."""
return self.coresys.hardware
@property
def sys_docker(self) -> DockerAPI:
"""Return DockerAPI object."""
return self.coresys.docker
@property
def sys_scheduler(self) -> Scheduler:
"""Return Scheduler object."""
return self.coresys.scheduler
@property
def sys_dns(self) -> DNSForward:
"""Return DNSForward object."""
return self.coresys.dns
@property
def sys_core(self) -> HassIO:
"""Return HassIO object."""
return self.coresys.core
@property
def sys_arch(self) -> CpuArch:
"""Return CpuArch object."""
return self.coresys.arch
@property
def sys_auth(self) -> Auth:
"""Return Auth object."""
return self.coresys.auth
@property
def sys_homeassistant(self) -> HomeAssistant:
"""Return Home Assistant object."""
return self.coresys.homeassistant
@property
def sys_supervisor(self) -> Supervisor:
"""Return Supervisor object."""
return self.coresys.supervisor
@property
def sys_api(self) -> RestAPI:
"""Return API object."""
return self.coresys.api
@property
def sys_updater(self) -> Updater:
"""Return Updater object."""
return self.coresys.updater
@property
def sys_addons(self) -> AddonManager:
"""Return AddonManager object."""
return self.coresys.addons
@property
def sys_snapshots(self) -> SnapshotManager:
"""Return SnapshotManager object."""
return self.coresys.snapshots
@property
def sys_tasks(self) -> Tasks:
"""Return Tasks object."""
return self.coresys.tasks
@property
def sys_services(self) -> ServiceManager:
"""Return ServiceManager object."""
return self.coresys.services
@property
def sys_discovery(self) -> Discovery:
"""Return ServiceManager object."""
return self.coresys.discovery
@property
def sys_dbus(self) -> DBusManager:
"""Return DBusManager object."""
return self.coresys.dbus
@property
def sys_host(self) -> HostManager:
"""Return HostManager object."""
return self.coresys.host
@property
def sys_hassos(self) -> HassOS:
"""Return HassOS object."""
return self.coresys.hassos
def sys_run_in_executor(self, funct, *args) -> asyncio.Future:
"""Wrapper for executor pool."""
return self.sys_loop.run_in_executor(None, funct, *args)
def sys_create_task(self, coroutine) -> asyncio.Task:
"""Wrapper for async task."""
return self.sys_loop.create_task(coroutine)

39
hassio/dbus/__init__.py Normal file
View File

@@ -0,0 +1,39 @@
"""D-Bus interface objects."""
from .systemd import Systemd
from .hostname import Hostname
from .rauc import Rauc
from ..coresys import CoreSysAttributes
class DBusManager(CoreSysAttributes):
"""A DBus Interface handler."""
def __init__(self, coresys):
"""Initialize D-Bus interface."""
self.coresys = coresys
self._systemd = Systemd()
self._hostname = Hostname()
self._rauc = Rauc()
@property
def systemd(self):
"""Return the systemd interface."""
return self._systemd
@property
def hostname(self):
"""Return the hostname interface."""
return self._hostname
@property
def rauc(self):
"""Return the rauc interface."""
return self._rauc
async def load(self):
"""Connect interfaces to D-Bus."""
await self.systemd.connect()
await self.hostname.connect()
await self.rauc.connect()

39
hassio/dbus/hostname.py Normal file
View File

@@ -0,0 +1,39 @@
"""D-Bus interface for hostname."""
import logging
from .interface import DBusInterface
from .utils import dbus_connected
from ..exceptions import DBusError
from ..utils.gdbus import DBus
_LOGGER = logging.getLogger(__name__)
DBUS_NAME = 'org.freedesktop.hostname1'
DBUS_OBJECT = '/org/freedesktop/hostname1'
class Hostname(DBusInterface):
"""Handle D-Bus interface for hostname/system."""
async def connect(self):
"""Connect to system's D-Bus."""
try:
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
except DBusError:
_LOGGER.warning("Can't connect to hostname")
@dbus_connected
def set_static_hostname(self, hostname):
"""Change local hostname.
Return a coroutine.
"""
return self.dbus.SetStaticHostname(hostname, False)
@dbus_connected
def get_properties(self):
"""Return local host informations.
Return a coroutine.
"""
return self.dbus.get_properties(DBUS_NAME)

18
hassio/dbus/interface.py Normal file
View File

@@ -0,0 +1,18 @@
"""Interface class for D-Bus wrappers."""
class DBusInterface:
"""Handle D-Bus interface for hostname/system."""
def __init__(self):
"""Initialize systemd."""
self.dbus = None
@property
def is_connected(self):
"""Return True, if they is connected to D-Bus."""
return self.dbus is not None
async def connect(self):
"""Connect to D-Bus."""
raise NotImplementedError()

55
hassio/dbus/rauc.py Normal file
View File

@@ -0,0 +1,55 @@
"""D-Bus interface for rauc."""
import logging
from .interface import DBusInterface
from .utils import dbus_connected
from ..exceptions import DBusError
from ..utils.gdbus import DBus
_LOGGER = logging.getLogger(__name__)
DBUS_NAME = 'de.pengutronix.rauc'
DBUS_OBJECT = '/'
class Rauc(DBusInterface):
"""Handle D-Bus interface for rauc."""
async def connect(self):
"""Connect to D-Bus."""
try:
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
except DBusError:
_LOGGER.warning("Can't connect to rauc")
@dbus_connected
def install(self, raucb_file):
"""Install rauc bundle file.
Return a coroutine.
"""
return self.dbus.Installer.Install(raucb_file)
@dbus_connected
def get_slot_status(self):
"""Get slot status.
Return a coroutine.
"""
return self.dbus.Installer.GetSlotStatus()
@dbus_connected
def get_properties(self):
"""Return rauc informations.
Return a coroutine.
"""
return self.dbus.get_properties(f"{DBUS_NAME}.Installer")
@dbus_connected
def signal_completed(self):
"""Return a signal wrapper for completed signal.
Return a coroutine.
"""
return self.dbus.wait_signal(f"{DBUS_NAME}.Installer.Completed")

79
hassio/dbus/systemd.py Normal file
View File

@@ -0,0 +1,79 @@
"""Interface to Systemd over D-Bus."""
import logging
from .interface import DBusInterface
from .utils import dbus_connected
from ..exceptions import DBusError
from ..utils.gdbus import DBus
_LOGGER = logging.getLogger(__name__)
DBUS_NAME = 'org.freedesktop.systemd1'
DBUS_OBJECT = '/org/freedesktop/systemd1'
class Systemd(DBusInterface):
"""Systemd function handler."""
async def connect(self):
"""Connect to D-Bus."""
try:
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
except DBusError:
_LOGGER.warning("Can't connect to systemd")
@dbus_connected
def reboot(self):
"""Reboot host computer.
Return a coroutine.
"""
return self.dbus.Manager.Reboot()
@dbus_connected
def power_off(self):
"""Power off host computer.
Return a coroutine.
"""
return self.dbus.Manager.PowerOff()
@dbus_connected
def start_unit(self, unit, mode):
"""Start a systemd service unit.
Return a coroutine.
"""
return self.dbus.Manager.StartUnit(unit, mode)
@dbus_connected
def stop_unit(self, unit, mode):
"""Stop a systemd service unit.
Return a coroutine.
"""
return self.dbus.Manager.StopUnit(unit, mode)
@dbus_connected
def reload_unit(self, unit, mode):
"""Reload a systemd service unit.
Return a coroutine.
"""
return self.dbus.Manager.ReloadOrRestartUnit(unit, mode)
@dbus_connected
def restart_unit(self, unit, mode):
"""Restart a systemd service unit.
Return a coroutine.
"""
return self.dbus.Manager.RestartUnit(unit, mode)
@dbus_connected
def list_units(self):
"""Return a list of available systemd services.
Return a coroutine.
"""
return self.dbus.Manager.ListUnits()

14
hassio/dbus/utils.py Normal file
View File

@@ -0,0 +1,14 @@
"""Utils for D-Bus."""
from ..exceptions import DBusNotConnectedError
def dbus_connected(method):
"""Wrapper for check if D-Bus is connected."""
def wrap_dbus(api, *args, **kwargs):
"""Check if D-Bus is connected before call a method."""
if api.dbus is None:
raise DBusNotConnectedError()
return method(api, *args, **kwargs)
return wrap_dbus

View File

@@ -0,0 +1,132 @@
"""Handle discover message for Home Assistant."""
from __future__ import annotations
from contextlib import suppress
import logging
from typing import Any, Dict, List, Optional, TYPE_CHECKING
from uuid import uuid4, UUID
import attr
import voluptuous as vol
from voluptuous.humanize import humanize_error
from ..const import ATTR_CONFIG, ATTR_DISCOVERY, FILE_HASSIO_DISCOVERY
from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import DiscoveryError, HomeAssistantAPIError
from ..utils.json import JsonConfig
from .validate import SCHEMA_DISCOVERY_CONFIG, valid_discovery_config
if TYPE_CHECKING:
from ..addons.addon import Addon
_LOGGER = logging.getLogger(__name__)
CMD_NEW = "post"
CMD_DEL = "delete"
@attr.s
class Message:
"""Represent a single Discovery message."""
addon: str = attr.ib()
service: str = attr.ib()
config: Dict[str, Any] = attr.ib(cmp=False)
uuid: UUID = attr.ib(factory=lambda: uuid4().hex, cmp=False)
class Discovery(CoreSysAttributes, JsonConfig):
"""Home Assistant Discovery handler."""
def __init__(self, coresys: CoreSys):
"""Initialize discovery handler."""
super().__init__(FILE_HASSIO_DISCOVERY, SCHEMA_DISCOVERY_CONFIG)
self.coresys: CoreSys = coresys
self.message_obj: Dict[str, Message] = {}
async def load(self) -> None:
"""Load exists discovery message into storage."""
messages = {}
for message in self._data[ATTR_DISCOVERY]:
discovery = Message(**message)
messages[discovery.uuid] = discovery
_LOGGER.info("Load %d messages", len(messages))
self.message_obj = messages
def save(self) -> None:
"""Write discovery message into data file."""
messages: List[Dict[str, Any]] = []
for message in self.list_messages:
messages.append(attr.asdict(message))
self._data[ATTR_DISCOVERY].clear()
self._data[ATTR_DISCOVERY].extend(messages)
self.save_data()
def get(self, uuid: str) -> Optional[Message]:
"""Return discovery message."""
return self.message_obj.get(uuid)
@property
def list_messages(self) -> List[Message]:
"""Return list of available discovery messages."""
return list(self.message_obj.values())
def send(self, addon: Addon, service: str, config: Dict[str, Any]) -> Message:
"""Send a discovery message to Home Assistant."""
try:
config = valid_discovery_config(service, config)
except vol.Invalid as err:
_LOGGER.error("Invalid discovery %s config", humanize_error(config, err))
raise DiscoveryError() from None
# Create message
message = Message(addon.slug, service, config)
# Already exists?
for old_message in self.list_messages:
if old_message != message:
continue
_LOGGER.info("Duplicate discovery message from %s", addon.slug)
return old_message
_LOGGER.info("Send discovery to Home Assistant %s from %s", service, addon.slug)
self.message_obj[message.uuid] = message
self.save()
self.sys_create_task(self._push_discovery(message, CMD_NEW))
return message
def remove(self, message: Message) -> None:
"""Remove a discovery message from Home Assistant."""
self.message_obj.pop(message.uuid, None)
self.save()
_LOGGER.info(
"Delete discovery to Home Assistant %s from %s",
message.service,
message.addon,
)
self.sys_create_task(self._push_discovery(message, CMD_DEL))
async def _push_discovery(self, message: Message, command: str) -> None:
"""Send a discovery request."""
if not await self.sys_homeassistant.check_api_state():
_LOGGER.info("Discovery %s mesage ignore", message.uuid)
return
data = attr.asdict(message)
data.pop(ATTR_CONFIG)
with suppress(HomeAssistantAPIError):
async with self.sys_homeassistant.make_request(
command,
f"api/hassio_push/discovery/{message.uuid}",
json=data,
timeout=10,
):
_LOGGER.info("Discovery %s message send", message.uuid)
return
_LOGGER.warning("Discovery %s message fail", message.uuid)

View File

@@ -0,0 +1,8 @@
"""Discovery static data."""
ATTR_HOST = "host"
ATTR_PASSWORD = "password"
ATTR_PORT = "port"
ATTR_PROTOCOL = "protocol"
ATTR_SSL = "ssl"
ATTR_USERNAME = "username"

View File

@@ -0,0 +1 @@
"""Discovery service modules."""

View File

@@ -0,0 +1,11 @@
"""Discovery service for MQTT."""
import voluptuous as vol
from hassio.validate import NETWORK_PORT
from ..const import ATTR_HOST, ATTR_PORT
SCHEMA = vol.Schema(
{vol.Required(ATTR_HOST): vol.Coerce(str), vol.Required(ATTR_PORT): NETWORK_PORT}
)

View File

@@ -0,0 +1,27 @@
"""Discovery service for MQTT."""
import voluptuous as vol
from hassio.validate import NETWORK_PORT
from ..const import (
ATTR_HOST,
ATTR_PASSWORD,
ATTR_PORT,
ATTR_PROTOCOL,
ATTR_SSL,
ATTR_USERNAME,
)
# pylint: disable=no-value-for-parameter
SCHEMA = vol.Schema(
{
vol.Required(ATTR_HOST): vol.Coerce(str),
vol.Required(ATTR_PORT): NETWORK_PORT,
vol.Optional(ATTR_USERNAME): vol.Coerce(str),
vol.Optional(ATTR_PASSWORD): vol.Coerce(str),
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
vol.Optional(ATTR_PROTOCOL, default="3.1.1"): vol.All(
vol.Coerce(str), vol.In(["3.1", "3.1.1"])
),
}
)

View File

@@ -0,0 +1,47 @@
"""Validate services schema."""
from pathlib import Path
from importlib import import_module
import voluptuous as vol
from ..const import ATTR_ADDON, ATTR_CONFIG, ATTR_DISCOVERY, ATTR_SERVICE, ATTR_UUID
from ..utils.validate import schema_or
from ..validate import UUID_MATCH
def valid_discovery_service(service):
"""Validate service name."""
service_file = Path(__file__).parent.joinpath(f"services/{service}.py")
if not service_file.exists():
raise vol.Invalid(f"Service {service} not found")
return service
def valid_discovery_config(service, config):
"""Validate service name."""
try:
service_mod = import_module(f".services.{service}", "hassio.discovery")
except ImportError:
raise vol.Invalid(f"Service {service} not found")
return service_mod.SCHEMA(config)
SCHEMA_DISCOVERY = vol.Schema(
[
vol.Schema(
{
vol.Required(ATTR_UUID): UUID_MATCH,
vol.Required(ATTR_ADDON): vol.Coerce(str),
vol.Required(ATTR_SERVICE): valid_discovery_service,
vol.Required(ATTR_CONFIG): vol.Maybe(dict),
},
extra=vol.REMOVE_EXTRA,
)
]
)
SCHEMA_DISCOVERY_CONFIG = vol.Schema(
{vol.Optional(ATTR_DISCOVERY, default=list): schema_or(SCHEMA_DISCOVERY)},
extra=vol.REMOVE_EXTRA,
)

View File

@@ -1,324 +0,0 @@
"""Init file for HassIO docker object."""
import asyncio
from contextlib import suppress
import logging
import docker
from .util import docker_process
from ..const import LABEL_VERSION, LABEL_ARCH
_LOGGER = logging.getLogger(__name__)
class DockerBase(object):
"""Docker hassio wrapper."""
def __init__(self, config, loop, dock, image=None, timeout=30):
"""Initialize docker base wrapper."""
self.config = config
self.loop = loop
self.dock = dock
self.image = image
self.timeout = timeout
self.version = None
self.arch = None
self._lock = asyncio.Lock(loop=loop)
@property
def name(self):
"""Return name of docker container."""
return None
@property
def in_progress(self):
"""Return True if a task is in progress."""
return self._lock.locked()
def process_metadata(self, metadata, force=False):
"""Read metadata and set it to object."""
# read image
if not self.image:
self.image = metadata['Config']['Image']
# read version
need_version = force or not self.version
if need_version and LABEL_VERSION in metadata['Config']['Labels']:
self.version = metadata['Config']['Labels'][LABEL_VERSION]
elif need_version:
_LOGGER.warning("Can't read version from %s", self.name)
# read arch
need_arch = force or not self.arch
if need_arch and LABEL_ARCH in metadata['Config']['Labels']:
self.arch = metadata['Config']['Labels'][LABEL_ARCH]
@docker_process
def install(self, tag):
"""Pull docker image."""
return self.loop.run_in_executor(None, self._install, tag)
def _install(self, tag):
"""Pull docker image.
Need run inside executor.
"""
try:
_LOGGER.info("Pull image %s tag %s.", self.image, tag)
image = self.dock.images.pull("{}:{}".format(self.image, tag))
image.tag(self.image, tag='latest')
self.process_metadata(image.attrs, force=True)
except docker.errors.APIError as err:
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
return False
_LOGGER.info("Tag image %s with version %s as latest", self.image, tag)
return True
def exists(self):
"""Return True if docker image exists in local repo."""
return self.loop.run_in_executor(None, self._exists)
def _exists(self):
"""Return True if docker image exists in local repo.
Need run inside executor.
"""
try:
self.dock.images.get(self.image)
except docker.errors.DockerException:
return False
return True
def is_running(self):
"""Return True if docker is Running.
Return a Future.
"""
return self.loop.run_in_executor(None, self._is_running)
def _is_running(self):
"""Return True if docker is Running.
Need run inside executor.
"""
try:
container = self.dock.containers.get(self.name)
image = self.dock.images.get(self.image)
except docker.errors.DockerException:
return False
# container is not running
if container.status != 'running':
return False
# we run on a old image, stop and start it
if container.image.id != image.id:
return False
return True
@docker_process
def attach(self):
"""Attach to running docker container."""
return self.loop.run_in_executor(None, self._attach)
def _attach(self):
"""Attach to running docker container.
Need run inside executor.
"""
try:
if self.image:
obj_data = self.dock.images.get(self.image).attrs
else:
obj_data = self.dock.containers.get(self.name).attrs
except docker.errors.DockerException:
return False
self.process_metadata(obj_data)
_LOGGER.info(
"Attach to image %s with version %s", self.image, self.version)
return True
@docker_process
def run(self):
"""Run docker image."""
return self.loop.run_in_executor(None, self._run)
def _run(self):
"""Run docker image.
Need run inside executor.
"""
raise NotImplementedError()
@docker_process
def stop(self):
"""Stop/remove docker container."""
return self.loop.run_in_executor(None, self._stop)
def _stop(self):
"""Stop/remove and remove docker container.
Need run inside executor.
"""
try:
container = self.dock.containers.get(self.name)
except docker.errors.DockerException:
return False
if container.status == 'running':
_LOGGER.info("Stop %s docker application", self.image)
with suppress(docker.errors.DockerException):
container.stop(timeout=self.timeout)
with suppress(docker.errors.DockerException):
_LOGGER.info("Clean %s docker application", self.image)
container.remove(force=True)
return True
@docker_process
def remove(self):
"""Remove docker images."""
return self.loop.run_in_executor(None, self._remove)
def _remove(self):
"""remove docker images.
Need run inside executor.
"""
# cleanup container
self._stop()
_LOGGER.info(
"Remove docker %s with latest and %s", self.image, self.version)
try:
with suppress(docker.errors.ImageNotFound):
self.dock.images.remove(
image="{}:latest".format(self.image), force=True)
with suppress(docker.errors.ImageNotFound):
self.dock.images.remove(
image="{}:{}".format(self.image, self.version), force=True)
except docker.errors.DockerException as err:
_LOGGER.warning("Can't remove image %s -> %s", self.image, err)
return False
# clean metadata
self.version = None
self.arch = None
return True
@docker_process
def update(self, tag):
"""Update a docker image."""
return self.loop.run_in_executor(None, self._update, tag)
def _update(self, tag):
"""Update a docker image.
Need run inside executor.
"""
_LOGGER.info(
"Update docker %s with %s:%s", self.version, self.image, tag)
# update docker image
if not self._install(tag):
return False
# stop container & cleanup
self._stop()
self._cleanup()
return True
@docker_process
def logs(self):
"""Return docker logs of container."""
return self.loop.run_in_executor(None, self._logs)
def _logs(self):
"""Return docker logs of container.
Need run inside executor.
"""
try:
container = self.dock.containers.get(self.name)
except docker.errors.DockerException:
return b""
try:
return container.logs(tail=100, stdout=True, stderr=True)
except docker.errors.DockerException as err:
_LOGGER.warning("Can't grap logs from %s -> %s", self.image, err)
@docker_process
def restart(self):
"""Restart docker container."""
return self.loop.run_in_executor(None, self._restart)
def _restart(self):
"""Restart docker container.
Need run inside executor.
"""
try:
container = self.dock.containers.get(self.name)
except docker.errors.DockerException:
return False
_LOGGER.info("Restart %s", self.image)
try:
container.restart(timeout=self.timeout)
except docker.errors.DockerException as err:
_LOGGER.warning("Can't restart %s -> %s", self.image, err)
return False
return True
@docker_process
def cleanup(self):
"""Check if old version exists and cleanup."""
return self.loop.run_in_executor(None, self._cleanup)
def _cleanup(self):
"""Check if old version exists and cleanup.
Need run inside executor.
"""
try:
latest = self.dock.images.get(self.image)
except docker.errors.DockerException:
_LOGGER.warning("Can't find %s for cleanup", self.image)
return False
for image in self.dock.images.list(name=self.image):
if latest.id == image.id:
continue
with suppress(docker.errors.DockerException):
_LOGGER.info("Cleanup docker images: %s", image.tags)
self.dock.images.remove(image.id, force=True)
return True
@docker_process
def execute_command(self, command):
"""Create a temporary container and run command."""
return self.loop.run_in_executor(None, self._execute_command, command)
def _execute_command(self, command):
"""Create a temporary container and run command.
Need run inside executor.
"""
raise NotImplementedError()

View File

@@ -1,280 +0,0 @@
"""Init file for HassIO addon docker object."""
import logging
from pathlib import Path
import shutil
import docker
import requests
from . import DockerBase
from .util import dockerfile_template, docker_process
from ..const import (
META_ADDON, MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE)
_LOGGER = logging.getLogger(__name__)
AUDIO_DEVICE = "/dev/snd:/dev/snd:rwm"
class DockerAddon(DockerBase):
"""Docker hassio wrapper for HomeAssistant."""
def __init__(self, config, loop, dock, addon):
"""Initialize docker homeassistant wrapper."""
super().__init__(
config, loop, dock, image=addon.image, timeout=addon.timeout)
self.addon = addon
@property
def name(self):
"""Return name of docker container."""
return "addon_{}".format(self.addon.slug)
@property
def hostname(self):
"""Return slug/id of addon."""
return self.addon.slug.replace('_', '-')
@property
def environment(self):
"""Return environment for docker add-on."""
addon_env = self.addon.environment or {}
if self.addon.with_audio:
addon_env.update({
'ALSA_OUTPUT': self.addon.audio_output,
'ALSA_INPUT': self.addon.audio_input,
})
return {
**addon_env,
'TZ': self.config.timezone,
}
@property
def devices(self):
"""Return needed devices."""
devices = self.addon.devices or []
# use audio devices
if self.addon.with_audio and AUDIO_DEVICE not in devices:
devices.append(AUDIO_DEVICE)
# Return None if no devices is present
if devices:
return devices
return None
@property
def tmpfs(self):
"""Return tmpfs for docker add-on."""
options = self.addon.tmpfs
if options:
return {"/tmpfs": "{}".format(options)}
return None
@property
def mapping(self):
"""Return hosts mapping."""
if not self.addon.use_hassio_api:
return None
return {
'hassio': self.config.api_endpoint,
}
@property
def volumes(self):
"""Generate volumes for mappings."""
volumes = {
str(self.addon.path_extern_data): {
'bind': '/data', 'mode': 'rw'
}}
addon_mapping = self.addon.map_volumes
if MAP_CONFIG in addon_mapping:
volumes.update({
str(self.config.path_extern_config): {
'bind': '/config', 'mode': addon_mapping[MAP_CONFIG]
}})
if MAP_SSL in addon_mapping:
volumes.update({
str(self.config.path_extern_ssl): {
'bind': '/ssl', 'mode': addon_mapping[MAP_SSL]
}})
if MAP_ADDONS in addon_mapping:
volumes.update({
str(self.config.path_extern_addons_local): {
'bind': '/addons', 'mode': addon_mapping[MAP_ADDONS]
}})
if MAP_BACKUP in addon_mapping:
volumes.update({
str(self.config.path_extern_backup): {
'bind': '/backup', 'mode': addon_mapping[MAP_BACKUP]
}})
if MAP_SHARE in addon_mapping:
volumes.update({
str(self.config.path_extern_share): {
'bind': '/share', 'mode': addon_mapping[MAP_SHARE]
}})
return volumes
def _run(self):
"""Run docker image.
Need run inside executor.
"""
if self._is_running():
return True
# cleanup
self._stop()
# write config
if not self.addon.write_options():
return False
try:
self.dock.containers.run(
self.image,
name=self.name,
hostname=self.hostname,
detach=True,
network_mode=self.addon.network_mode,
ports=self.addon.ports,
extra_hosts=self.mapping,
devices=self.devices,
cap_add=self.addon.privileged,
environment=self.environment,
volumes=self.volumes,
tmpfs=self.tmpfs
)
except docker.errors.DockerException as err:
_LOGGER.error("Can't run %s -> %s", self.image, err)
return False
_LOGGER.info(
"Start docker addon %s with version %s", self.image, self.version)
return True
def _install(self, tag):
"""Pull docker image or build it.
Need run inside executor.
"""
if self.addon.need_build:
return self._build(tag)
return super()._install(tag)
def _build(self, tag):
"""Build a docker container.
Need run inside executor.
"""
build_dir = Path(self.config.path_tmp, self.addon.slug)
try:
# prepare temporary addon build folder
try:
source = self.addon.path_location
shutil.copytree(str(source), str(build_dir))
except shutil.Error as err:
_LOGGER.error("Can't copy %s to temporary build folder -> %s",
source, err)
return False
# prepare Dockerfile
try:
dockerfile_template(
Path(build_dir, 'Dockerfile'), self.config.arch,
tag, META_ADDON)
except OSError as err:
_LOGGER.error("Can't prepare dockerfile -> %s", err)
# run docker build
try:
build_tag = "{}:{}".format(self.image, tag)
_LOGGER.info("Start build %s on %s", build_tag, build_dir)
image = self.dock.images.build(
path=str(build_dir), tag=build_tag, pull=True)
image.tag(self.image, tag='latest')
self.process_metadata(image.attrs, force=True)
except (docker.errors.DockerException, TypeError) as err:
_LOGGER.error("Can't build %s -> %s", build_tag, err)
return False
_LOGGER.info("Build %s done", build_tag)
return True
finally:
shutil.rmtree(str(build_dir), ignore_errors=True)
@docker_process
def export_image(self, path):
"""Export current images into a tar file."""
return self.loop.run_in_executor(None, self._export_image, path)
def _export_image(self, tar_file):
"""Export current images into a tar file.
Need run inside executor.
"""
try:
image = self.dock.api.get_image(self.image)
except docker.errors.DockerException as err:
_LOGGER.error("Can't fetch image %s -> %s", self.image, err)
return False
try:
with tar_file.open("wb") as write_tar:
for chunk in image.stream():
write_tar.write(chunk)
except (OSError, requests.exceptions.ReadTimeout) as err:
_LOGGER.error("Can't write tar file %s -> %s", tar_file, err)
return False
_LOGGER.info("Export image %s to %s", self.image, tar_file)
return True
@docker_process
def import_image(self, path, tag):
"""Import a tar file as image."""
return self.loop.run_in_executor(None, self._import_image, path, tag)
def _import_image(self, tar_file, tag):
"""Import a tar file as image.
Need run inside executor.
"""
try:
with tar_file.open("rb") as read_tar:
self.dock.api.load_image(read_tar)
image = self.dock.images.get(self.image)
image.tag(self.image, tag=tag)
except (docker.errors.DockerException, OSError) as err:
_LOGGER.error("Can't import image %s -> %s", self.image, err)
return False
_LOGGER.info("Import image %s and tag %s", tar_file, tag)
self.process_metadata(image.attrs, force=True)
self._cleanup()
return True
def _restart(self):
"""Restart docker container.
Addons prepare some thing on start and that is normaly not repeatable.
Need run inside executor.
"""
self._stop()
return self._run()

View File

@@ -1,117 +0,0 @@
"""Init file for HassIO docker object."""
from contextlib import suppress
import logging
import docker
from . import DockerBase
_LOGGER = logging.getLogger(__name__)
HASS_DOCKER_NAME = 'homeassistant'
class DockerHomeAssistant(DockerBase):
"""Docker hassio wrapper for HomeAssistant."""
def __init__(self, config, loop, dock, data):
"""Initialize docker homeassistant wrapper."""
super().__init__(config, loop, dock, image=data.image)
self.data = data
@property
def name(self):
"""Return name of docker container."""
return HASS_DOCKER_NAME
@property
def devices(self):
"""Create list of special device to map into docker."""
if not self.data.devices:
return
devices = []
for device in self.data.devices:
devices.append("/dev/{0}:/dev/{0}:rwm".format(device))
return devices
def _run(self):
"""Run docker image.
Need run inside executor.
"""
if self._is_running():
return
# cleanup
self._stop()
try:
self.dock.containers.run(
self.image,
name=self.name,
hostname=self.name,
detach=True,
privileged=True,
devices=self.devices,
network_mode='host',
environment={
'HASSIO': self.config.api_endpoint,
'TZ': self.config.timezone,
},
volumes={
str(self.config.path_extern_config):
{'bind': '/config', 'mode': 'rw'},
str(self.config.path_extern_ssl):
{'bind': '/ssl', 'mode': 'ro'},
str(self.config.path_extern_share):
{'bind': '/share', 'mode': 'rw'},
}
)
except docker.errors.DockerException as err:
_LOGGER.error("Can't run %s -> %s", self.image, err)
return False
_LOGGER.info(
"Start homeassistant %s with version %s", self.image, self.version)
return True
def _execute_command(self, command):
"""Create a temporary container and run command.
Need run inside executor.
"""
_LOGGER.info("Run command '%s' on %s", command, self.image)
try:
container = self.dock.containers.run(
self.image,
command=command,
detach=True,
stdout=True,
stderr=True,
environment={
'TZ': self.config.timezone,
},
volumes={
str(self.config.path_extern_config):
{'bind': '/config', 'mode': 'ro'},
str(self.config.path_extern_ssl):
{'bind': '/ssl', 'mode': 'ro'},
}
)
# wait until command is done
exit_code = container.wait()
output = container.logs()
except docker.errors.DockerException as err:
_LOGGER.error("Can't execute command -> %s", err)
return (None, b"")
# cleanup container
with suppress(docker.errors.DockerException):
container.remove(force=True)
return (exit_code, output)

View File

@@ -1,54 +0,0 @@
"""Init file for HassIO docker object."""
import logging
import os
from . import DockerBase
from .util import docker_process
from ..const import RESTART_EXIT_CODE
_LOGGER = logging.getLogger(__name__)
class DockerSupervisor(DockerBase):
"""Docker hassio wrapper for HomeAssistant."""
def __init__(self, config, loop, dock, stop_callback, image=None):
"""Initialize docker base wrapper."""
super().__init__(config, loop, dock, image=image)
self.stop_callback = stop_callback
@property
def name(self):
"""Return name of docker container."""
return os.environ['SUPERVISOR_NAME']
@docker_process
async def update(self, tag):
"""Update a supervisor docker image."""
_LOGGER.info("Update supervisor docker to %s:%s", self.image, tag)
if await self.loop.run_in_executor(None, self._install, tag):
self.loop.create_task(self.stop_callback(RESTART_EXIT_CODE))
return True
return False
async def run(self):
"""Run docker image."""
raise RuntimeError("Not support on supervisor docker container!")
async def install(self, tag):
"""Pull docker image."""
raise RuntimeError("Not support on supervisor docker container!")
async def stop(self):
"""Stop/remove docker container."""
raise RuntimeError("Not support on supervisor docker container!")
async def remove(self):
"""Remove docker image."""
raise RuntimeError("Not support on supervisor docker container!")
async def restart(self):
"""Restart docker container."""
raise RuntimeError("Not support on supervisor docker container!")

View File

@@ -1,60 +0,0 @@
"""HassIO docker utilitys."""
import logging
import re
from ..const import ARCH_AARCH64, ARCH_ARMHF, ARCH_I386, ARCH_AMD64
_LOGGER = logging.getLogger(__name__)
HASSIO_BASE_IMAGE = {
ARCH_ARMHF: "homeassistant/armhf-base:latest",
ARCH_AARCH64: "homeassistant/aarch64-base:latest",
ARCH_I386: "homeassistant/i386-base:latest",
ARCH_AMD64: "homeassistant/amd64-base:latest",
}
TMPL_IMAGE = re.compile(r"%%BASE_IMAGE%%")
def dockerfile_template(dockerfile, arch, version, meta_type):
"""Prepare a Hass.IO dockerfile."""
buff = []
hassio_image = HASSIO_BASE_IMAGE[arch]
custom_image = re.compile(r"^#{}:FROM".format(arch))
# read docker
with dockerfile.open('r') as dock_input:
for line in dock_input:
line = TMPL_IMAGE.sub(hassio_image, line)
line = custom_image.sub("FROM", line)
buff.append(line)
# add metadata
buff.append(create_metadata(version, arch, meta_type))
# write docker
with dockerfile.open('w') as dock_output:
dock_output.writelines(buff)
def create_metadata(version, arch, meta_type):
"""Generate docker label layer for hassio."""
return ('LABEL io.hass.version="{}" '
'io.hass.arch="{}" '
'io.hass.type="{}"').format(version, arch, meta_type)
# pylint: disable=protected-access
def docker_process(method):
"""Wrap function with only run once."""
async def wrap_api(api, *args, **kwargs):
"""Return api wrapper."""
if api._lock.locked():
_LOGGER.error(
"Can't excute %s while a task is in progress", method.__name__)
return False
async with api._lock:
return await method(api, *args, **kwargs)
return wrap_api

122
hassio/docker/__init__.py Normal file
View File

@@ -0,0 +1,122 @@
"""Init file for Hass.io Docker object."""
from contextlib import suppress
import logging
import attr
import docker
from .network import DockerNetwork
from ..const import SOCKET_DOCKER
_LOGGER = logging.getLogger(__name__)
@attr.s(frozen=True)
class CommandReturn:
"""Return object from command run."""
exit_code = attr.ib()
output = attr.ib()
class DockerAPI:
"""Docker Hass.io wrapper.
This class is not AsyncIO safe!
"""
def __init__(self):
"""Initialize Docker base wrapper."""
self.docker = docker.DockerClient(
base_url="unix:/{}".format(str(SOCKET_DOCKER)),
version='auto', timeout=900)
self.network = DockerNetwork(self.docker)
@property
def images(self):
"""Return API images."""
return self.docker.images
@property
def containers(self):
"""Return API containers."""
return self.docker.containers
@property
def api(self):
"""Return API containers."""
return self.docker.api
def run(self, image, **kwargs):
""""Create a Docker container and run it.
Need run inside executor.
"""
name = kwargs.get('name', image)
network_mode = kwargs.get('network_mode')
hostname = kwargs.get('hostname')
# Setup network
kwargs['dns_search'] = ["."]
if network_mode:
kwargs['dns'] = [str(self.network.supervisor)]
kwargs['dns_opt'] = ["ndots:0"]
else:
kwargs['network'] = None
# Create container
try:
container = self.docker.containers.create(
image, use_config_proxy=False, **kwargs)
except docker.errors.DockerException as err:
_LOGGER.error("Can't create container from %s: %s", name, err)
return False
# attach network
if not network_mode:
alias = [hostname] if hostname else None
if self.network.attach_container(container, alias=alias):
self.network.detach_default_bridge(container)
else:
_LOGGER.warning("Can't attach %s to hassio-net!", name)
# run container
try:
container.start()
except docker.errors.DockerException as err:
_LOGGER.error("Can't start %s: %s", name, err)
return False
return True
def run_command(self, image, command=None, **kwargs):
"""Create a temporary container and run command.
Need run inside executor.
"""
stdout = kwargs.get('stdout', True)
stderr = kwargs.get('stderr', True)
_LOGGER.info("Run command '%s' on %s", command, image)
try:
container = self.docker.containers.run(
image,
command=command,
network=self.network.name,
use_config_proxy=False,
**kwargs
)
# wait until command is done
result = container.wait()
output = container.logs(stdout=stdout, stderr=stderr)
except docker.errors.DockerException as err:
_LOGGER.error("Can't execute command: %s", err)
return CommandReturn(None, b"")
finally:
# cleanup container
with suppress(docker.errors.DockerException):
container.remove(force=True)
return CommandReturn(result.get('StatusCode'), output)

447
hassio/docker/addon.py Normal file
View File

@@ -0,0 +1,447 @@
"""Init file for Hass.io add-on Docker object."""
import logging
import os
import docker
import requests
from .interface import DockerInterface
from ..addons.build import AddonBuild
from ..const import (MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE,
ENV_TOKEN, ENV_TIME, SECURITY_PROFILE, SECURITY_DISABLE)
from ..utils import process_lock
_LOGGER = logging.getLogger(__name__)
AUDIO_DEVICE = "/dev/snd:/dev/snd:rwm"
class DockerAddon(DockerInterface):
"""Docker Hass.io wrapper for Home Assistant."""
def __init__(self, coresys, slug):
"""Initialize Docker Home Assistant wrapper."""
super().__init__(coresys)
self._id = slug
@property
def addon(self):
"""Return add-on of Docker image."""
return self.sys_addons.get(self._id)
@property
def image(self):
"""Return name of Docker image."""
return self.addon.image
@property
def timeout(self):
"""Return timeout for Docker actions."""
return self.addon.timeout
@property
def version(self):
"""Return version of Docker image."""
if self.addon.legacy:
return self.addon.version_installed
return super().version
@property
def arch(self):
"""Return arch of Docker image."""
if self.addon.legacy:
return self.sys_arch.default
return super().arch
@property
def name(self):
"""Return name of Docker container."""
return "addon_{}".format(self.addon.slug)
@property
def ipc(self):
"""Return the IPC namespace."""
if self.addon.host_ipc:
return 'host'
return None
@property
def full_access(self):
"""Return True if full access is enabled."""
return not self.addon.protected and self.addon.with_full_access
@property
def hostname(self):
"""Return slug/id of add-on."""
return self.addon.slug.replace('_', '-')
@property
def environment(self):
"""Return environment for Docker add-on."""
addon_env = self.addon.environment or {}
# Provide options for legacy add-ons
if self.addon.legacy:
for key, value in self.addon.options.items():
if isinstance(value, (int, str)):
addon_env[key] = value
else:
_LOGGER.warning(
"Can not set nested option %s as Docker env", key)
return {
**addon_env,
ENV_TIME: self.sys_timezone,
ENV_TOKEN: self.addon.hassio_token,
}
@property
def devices(self):
"""Return needed devices."""
devices = self.addon.devices or []
# Use audio devices
if self.addon.with_audio and self.sys_hardware.support_audio:
devices.append(AUDIO_DEVICE)
# Auto mapping UART devices
if self.addon.auto_uart:
for device in self.sys_hardware.serial_devices:
devices.append(f"{device}:{device}:rwm")
# Return None if no devices is present
return devices or None
@property
def ports(self):
"""Filter None from add-on ports."""
if not self.addon.ports:
return None
return {
container_port: host_port
for container_port, host_port in self.addon.ports.items()
if host_port
}
@property
def security_opt(self):
"""Controlling security options."""
security = []
# AppArmor
apparmor = self.sys_host.apparmor.available
if not apparmor or self.addon.apparmor == SECURITY_DISABLE:
security.append("apparmor:unconfined")
elif self.addon.apparmor == SECURITY_PROFILE:
security.append(f"apparmor={self.addon.slug}")
# Disable Seccomp / We don't support it official and it
# make troubles on some kind of host systems.
security.append("seccomp=unconfined")
return security
@property
def tmpfs(self):
"""Return tmpfs for Docker add-on."""
options = self.addon.tmpfs
if options:
return {"/tmpfs": f"{options}"}
return None
@property
def network_mapping(self):
"""Return hosts mapping."""
return {
'homeassistant': self.sys_docker.network.gateway,
'hassio': self.sys_docker.network.supervisor,
}
@property
def network_mode(self):
"""Return network mode for add-on."""
if self.addon.host_network:
return 'host'
return None
@property
def pid_mode(self):
"""Return PID mode for add-on."""
if not self.addon.protected and self.addon.host_pid:
return 'host'
return None
@property
def volumes(self):
"""Generate volumes for mappings."""
volumes = {
str(self.addon.path_extern_data): {
'bind': "/data",
'mode': 'rw'
}
}
addon_mapping = self.addon.map_volumes
# setup config mappings
if MAP_CONFIG in addon_mapping:
volumes.update({
str(self.sys_config.path_extern_homeassistant): {
'bind': "/config",
'mode': addon_mapping[MAP_CONFIG]
}
})
if MAP_SSL in addon_mapping:
volumes.update({
str(self.sys_config.path_extern_ssl): {
'bind': "/ssl",
'mode': addon_mapping[MAP_SSL]
}
})
if MAP_ADDONS in addon_mapping:
volumes.update({
str(self.sys_config.path_extern_addons_local): {
'bind': "/addons",
'mode': addon_mapping[MAP_ADDONS]
}
})
if MAP_BACKUP in addon_mapping:
volumes.update({
str(self.sys_config.path_extern_backup): {
'bind': "/backup",
'mode': addon_mapping[MAP_BACKUP]
}
})
if MAP_SHARE in addon_mapping:
volumes.update({
str(self.sys_config.path_extern_share): {
'bind': "/share",
'mode': addon_mapping[MAP_SHARE]
}
})
# Init other hardware mappings
# GPIO support
if self.addon.with_gpio and self.sys_hardware.support_gpio:
for gpio_path in ("/sys/class/gpio", "/sys/devices/platform/soc"):
volumes.update({
gpio_path: {
'bind': gpio_path,
'mode': 'rw'
},
})
# DeviceTree support
if self.addon.with_devicetree:
volumes.update({
"/sys/firmware/devicetree/base": {
'bind': "/device-tree",
'mode': 'ro'
},
})
# Kernel Modules support
if self.addon.with_kernel_modules:
volumes.update({
"/lib/modules": {
'bind': "/lib/modules",
'mode': 'ro'
},
})
# Docker API support
if not self.addon.protected and self.addon.access_docker_api:
volumes.update({
"/var/run/docker.sock": {
'bind': "/var/run/docker.sock",
'mode': 'ro'
},
})
# Host D-Bus system
if self.addon.host_dbus:
volumes.update({
"/var/run/dbus": {
'bind': "/var/run/dbus",
'mode': 'rw'
}
})
# ALSA configuration
if self.addon.with_audio:
volumes.update({
str(self.addon.path_extern_asound): {
'bind': "/etc/asound.conf",
'mode': 'ro'
}
})
return volumes
def _run(self):
"""Run Docker image.
Need run inside executor.
"""
if self._is_running():
return True
# Security check
if not self.addon.protected:
_LOGGER.warning("%s run with disabled protected mode!",
self.addon.name)
# cleanup
self._stop()
ret = self.sys_docker.run(
self.image,
name=self.name,
hostname=self.hostname,
detach=True,
init=True,
privileged=self.full_access,
ipc_mode=self.ipc,
stdin_open=self.addon.with_stdin,
network_mode=self.network_mode,
pid_mode=self.pid_mode,
ports=self.ports,
extra_hosts=self.network_mapping,
devices=self.devices,
cap_add=self.addon.privileged,
security_opt=self.security_opt,
environment=self.environment,
volumes=self.volumes,
tmpfs=self.tmpfs)
if ret:
_LOGGER.info("Start Docker add-on %s with version %s", self.image,
self.version)
return ret
def _install(self, tag, image=None):
"""Pull Docker image or build it.
Need run inside executor.
"""
if self.addon.need_build:
return self._build(tag)
return super()._install(tag, image)
def _build(self, tag):
"""Build a Docker container.
Need run inside executor.
"""
build_env = AddonBuild(self.coresys, self._id)
_LOGGER.info("Start build %s:%s", self.image, tag)
try:
image, log = self.sys_docker.images.build(
use_config_proxy=False, **build_env.get_docker_args(tag))
_LOGGER.debug("Build %s:%s done: %s", self.image, tag, log)
image.tag(self.image, tag='latest')
# Update meta data
self._meta = image.attrs
except docker.errors.DockerException as err:
_LOGGER.error("Can't build %s:%s: %s", self.image, tag, err)
return False
_LOGGER.info("Build %s:%s done", self.image, tag)
return True
@process_lock
def export_image(self, path):
"""Export current images into a tar file."""
return self.sys_run_in_executor(self._export_image, path)
def _export_image(self, tar_file):
"""Export current images into a tar file.
Need run inside executor.
"""
try:
image = self.sys_docker.api.get_image(self.image)
except docker.errors.DockerException as err:
_LOGGER.error("Can't fetch image %s: %s", self.image, err)
return False
_LOGGER.info("Export image %s to %s", self.image, tar_file)
try:
with tar_file.open("wb") as write_tar:
for chunk in image:
write_tar.write(chunk)
except (OSError, requests.exceptions.ReadTimeout) as err:
_LOGGER.error("Can't write tar file %s: %s", tar_file, err)
return False
_LOGGER.info("Export image %s done", self.image)
return True
@process_lock
def import_image(self, path, tag):
"""Import a tar file as image."""
return self.sys_run_in_executor(self._import_image, path, tag)
def _import_image(self, tar_file, tag):
"""Import a tar file as image.
Need run inside executor.
"""
try:
with tar_file.open("rb") as read_tar:
self.sys_docker.api.load_image(read_tar, quiet=True)
image = self.sys_docker.images.get(self.image)
image.tag(self.image, tag=tag)
except (docker.errors.DockerException, OSError) as err:
_LOGGER.error("Can't import image %s: %s", self.image, err)
return False
_LOGGER.info("Import image %s and tag %s", tar_file, tag)
self._meta = image.attrs
self._cleanup()
return True
@process_lock
def write_stdin(self, data):
"""Write to add-on stdin."""
return self.sys_run_in_executor(self._write_stdin, data)
def _write_stdin(self, data):
"""Write to add-on stdin.
Need run inside executor.
"""
if not self._is_running():
return False
try:
# Load needed docker objects
container = self.sys_docker.containers.get(self.name)
socket = container.attach_socket(params={'stdin': 1, 'stream': 1})
except docker.errors.DockerException as err:
_LOGGER.error("Can't attach to %s stdin: %s", self.name, err)
return False
try:
# Write to stdin
data += b"\n"
os.write(socket.fileno(), data)
socket.close()
except OSError as err:
_LOGGER.error("Can't write to %s stdin: %s", self.name, err)
return False
return True

View File

@@ -0,0 +1,38 @@
"""HassOS Cli docker object."""
import logging
import docker
from ..coresys import CoreSysAttributes
from .interface import DockerInterface
_LOGGER = logging.getLogger(__name__)
class DockerHassOSCli(DockerInterface, CoreSysAttributes):
"""Docker Hass.io wrapper for HassOS Cli."""
@property
def image(self):
"""Return name of HassOS CLI image."""
return f"homeassistant/{self.sys_arch.supervisor}-hassio-cli"
def _stop(self, remove_container=True):
"""Don't need stop."""
return True
def _attach(self):
"""Attach to running Docker container.
Need run inside executor.
"""
try:
image = self.sys_docker.images.get(self.image)
except docker.errors.DockerException:
_LOGGER.warning("Can't find a HassOS CLI %s", self.image)
else:
self._meta = image.attrs
_LOGGER.info(
"Found HassOS CLI %s with version %s", self.image, self.version
)

View File

@@ -0,0 +1,134 @@
"""Init file for Hass.io Docker object."""
import logging
import docker
from .interface import DockerInterface
from ..const import ENV_TOKEN, ENV_TIME, LABEL_MACHINE
_LOGGER = logging.getLogger(__name__)
HASS_DOCKER_NAME = "homeassistant"
class DockerHomeAssistant(DockerInterface):
"""Docker Hass.io wrapper for Home Assistant."""
@property
def machine(self):
"""Return machine of Home Assistant Docker image."""
if self._meta and LABEL_MACHINE in self._meta["Config"]["Labels"]:
return self._meta["Config"]["Labels"][LABEL_MACHINE]
return None
@property
def image(self):
"""Return name of Docker image."""
return self.sys_homeassistant.image
@property
def name(self):
"""Return name of Docker container."""
return HASS_DOCKER_NAME
@property
def devices(self):
"""Create list of special device to map into Docker."""
devices = []
for device in self.sys_hardware.serial_devices:
devices.append(f"{device}:{device}:rwm")
return devices or None
def _run(self):
"""Run Docker image.
Need run inside executor.
"""
if self._is_running():
return False
# cleanup
self._stop()
ret = self.sys_docker.run(
self.image,
name=self.name,
hostname=self.name,
detach=True,
privileged=True,
init=True,
devices=self.devices,
network_mode="host",
environment={
"HASSIO": self.sys_docker.network.supervisor,
ENV_TIME: self.sys_timezone,
ENV_TOKEN: self.sys_homeassistant.hassio_token,
},
volumes={
str(self.sys_config.path_extern_homeassistant): {
"bind": "/config",
"mode": "rw",
},
str(self.sys_config.path_extern_ssl): {"bind": "/ssl", "mode": "ro"},
str(self.sys_config.path_extern_share): {
"bind": "/share",
"mode": "rw",
},
},
)
if ret:
_LOGGER.info(
"Start homeassistant %s with version %s", self.image, self.version
)
return ret
def _execute_command(self, command):
"""Create a temporary container and run command.
Need run inside executor.
"""
return self.sys_docker.run_command(
self.image,
command,
privileged=True,
init=True,
devices=self.devices,
detach=True,
stdout=True,
stderr=True,
environment={ENV_TIME: self.sys_timezone},
volumes={
str(self.sys_config.path_extern_homeassistant): {
"bind": "/config",
"mode": "rw",
},
str(self.sys_config.path_extern_ssl): {"bind": "/ssl", "mode": "ro"},
str(self.sys_config.path_extern_share): {
"bind": "/share",
"mode": "ro",
},
},
)
def is_initialize(self):
"""Return True if Docker container exists."""
return self.sys_run_in_executor(self._is_initialize)
def _is_initialize(self):
"""Return True if docker container exists.
Need run inside executor.
"""
try:
docker_container = self.sys_docker.containers.get(self.name)
docker_image = self.sys_docker.images.get(self.image)
except docker.errors.DockerException:
return False
# we run on an old image, stop and start it
if docker_container.image.id != docker_image.id:
return False
return True

408
hassio/docker/interface.py Normal file
View File

@@ -0,0 +1,408 @@
"""Interface class for Hass.io Docker object."""
import asyncio
from contextlib import suppress
import logging
import docker
from ..const import LABEL_ARCH, LABEL_VERSION
from ..coresys import CoreSysAttributes
from ..utils import process_lock
from .stats import DockerStats
_LOGGER = logging.getLogger(__name__)
class DockerInterface(CoreSysAttributes):
"""Docker Hass.io interface."""
def __init__(self, coresys):
"""Initialize Docker base wrapper."""
self.coresys = coresys
self._meta = None
self.lock = asyncio.Lock(loop=coresys.loop)
@property
def timeout(self):
"""Return timeout for Docker actions."""
return 30
@property
def name(self):
"""Return name of Docker container."""
return None
@property
def meta_config(self):
"""Return meta data of configuration for container/image."""
if not self._meta:
return {}
return self._meta.get("Config", {})
@property
def meta_labels(self):
"""Return meta data of labels for container/image."""
return self.meta_config.get("Labels") or {}
@property
def image(self):
"""Return name of Docker image."""
return self.meta_config.get("Image")
@property
def version(self):
"""Return version of Docker image."""
return self.meta_labels.get(LABEL_VERSION)
@property
def arch(self):
"""Return arch of Docker image."""
return self.meta_labels.get(LABEL_ARCH)
@property
def in_progress(self):
"""Return True if a task is in progress."""
return self.lock.locked()
@process_lock
def install(self, tag, image=None):
"""Pull docker image."""
return self.sys_run_in_executor(self._install, tag, image)
def _install(self, tag, image=None):
"""Pull Docker image.
Need run inside executor.
"""
image = image or self.image
try:
_LOGGER.info("Pull image %s tag %s.", image, tag)
docker_image = self.sys_docker.images.pull(f"{image}:{tag}")
docker_image.tag(image, tag="latest")
self._meta = docker_image.attrs
except docker.errors.APIError as err:
_LOGGER.error("Can't install %s:%s -> %s.", image, tag, err)
return False
_LOGGER.info("Tag image %s with version %s as latest", image, tag)
return True
def exists(self):
"""Return True if Docker image exists in local repository."""
return self.sys_run_in_executor(self._exists)
def _exists(self):
"""Return True if Docker image exists in local repository.
Need run inside executor.
"""
try:
docker_image = self.sys_docker.images.get(self.image)
assert f"{self.image}:{self.version}" in docker_image.tags
except (docker.errors.DockerException, AssertionError):
return False
return True
def is_running(self):
"""Return True if Docker is running.
Return a Future.
"""
return self.sys_run_in_executor(self._is_running)
def _is_running(self):
"""Return True if Docker is running.
Need run inside executor.
"""
try:
docker_container = self.sys_docker.containers.get(self.name)
docker_image = self.sys_docker.images.get(self.image)
except docker.errors.DockerException:
return False
# container is not running
if docker_container.status != "running":
return False
# we run on an old image, stop and start it
if docker_container.image.id != docker_image.id:
return False
return True
@process_lock
def attach(self):
"""Attach to running Docker container."""
return self.sys_run_in_executor(self._attach)
def _attach(self):
"""Attach to running docker container.
Need run inside executor.
"""
try:
if self.image:
self._meta = self.sys_docker.images.get(self.image).attrs
else:
self._meta = self.sys_docker.containers.get(self.name).attrs
except docker.errors.DockerException:
return False
_LOGGER.info("Attach to image %s with version %s", self.image, self.version)
return True
@process_lock
def run(self):
"""Run Docker image."""
return self.sys_run_in_executor(self._run)
def _run(self):
"""Run Docker image.
Need run inside executor.
"""
raise NotImplementedError()
@process_lock
def stop(self, remove_container=True):
"""Stop/remove Docker container."""
return self.sys_run_in_executor(self._stop, remove_container)
def _stop(self, remove_container=True):
"""Stop/remove Docker container.
Need run inside executor.
"""
try:
docker_container = self.sys_docker.containers.get(self.name)
except docker.errors.DockerException:
return False
if docker_container.status == "running":
_LOGGER.info("Stop %s Docker application", self.image)
with suppress(docker.errors.DockerException):
docker_container.stop(timeout=self.timeout)
if remove_container:
with suppress(docker.errors.DockerException):
_LOGGER.info("Clean %s Docker application", self.image)
docker_container.remove(force=True)
return True
@process_lock
def start(self):
"""Start Docker container."""
return self.sys_run_in_executor(self._start)
def _start(self):
"""Start docker container.
Need run inside executor.
"""
try:
docker_container = self.sys_docker.containers.get(self.name)
except docker.errors.DockerException:
return False
_LOGGER.info("Start %s", self.image)
try:
docker_container.start()
except docker.errors.DockerException as err:
_LOGGER.error("Can't start %s: %s", self.image, err)
return False
return True
@process_lock
def remove(self):
"""Remove Docker images."""
return self.sys_run_in_executor(self._remove)
def _remove(self):
"""remove docker images.
Need run inside executor.
"""
# Cleanup container
self._stop()
_LOGGER.info("Remove Docker %s with latest and %s", self.image, self.version)
try:
with suppress(docker.errors.ImageNotFound):
self.sys_docker.images.remove(image=f"{self.image}:latest", force=True)
with suppress(docker.errors.ImageNotFound):
self.sys_docker.images.remove(
image=f"{self.image}:{self.version}", force=True
)
except docker.errors.DockerException as err:
_LOGGER.warning("Can't remove image %s: %s", self.image, err)
return False
self._meta = None
return True
@process_lock
def update(self, tag, image=None):
"""Update a Docker image."""
return self.sys_run_in_executor(self._update, tag, image)
def _update(self, tag, image=None):
"""Update a docker image.
Need run inside executor.
"""
image = image or self.image
_LOGGER.info(
"Update Docker %s:%s to %s:%s", self.image, self.version, image, tag
)
# Update docker image
if not self._install(tag, image):
return False
# Stop container & cleanup
self._stop()
self._cleanup()
return True
def logs(self):
"""Return Docker logs of container.
Return a Future.
"""
return self.sys_run_in_executor(self._logs)
def _logs(self):
"""Return Docker logs of container.
Need run inside executor.
"""
try:
docker_container = self.sys_docker.containers.get(self.name)
except docker.errors.DockerException:
return b""
try:
return docker_container.logs(tail=100, stdout=True, stderr=True)
except docker.errors.DockerException as err:
_LOGGER.warning("Can't grep logs from %s: %s", self.image, err)
@process_lock
def cleanup(self):
"""Check if old version exists and cleanup."""
return self.sys_run_in_executor(self._cleanup)
def _cleanup(self):
"""Check if old version exists and cleanup.
Need run inside executor.
"""
try:
latest = self.sys_docker.images.get(self.image)
except docker.errors.DockerException:
_LOGGER.warning("Can't find %s for cleanup", self.image)
return False
for image in self.sys_docker.images.list(name=self.image):
if latest.id == image.id:
continue
with suppress(docker.errors.DockerException):
_LOGGER.info("Cleanup Docker images: %s", image.tags)
self.sys_docker.images.remove(image.id, force=True)
return True
@process_lock
def restart(self):
"""Restart docker container."""
return self.sys_loop.run_in_executor(None, self._restart)
def _restart(self):
"""Restart docker container.
Need run inside executor.
"""
try:
container = self.sys_docker.containers.get(self.name)
except docker.errors.DockerException:
return False
_LOGGER.info("Restart %s", self.image)
try:
container.restart(timeout=self.timeout)
except docker.errors.DockerException as err:
_LOGGER.warning("Can't restart %s: %s", self.image, err)
return False
return True
@process_lock
def execute_command(self, command):
"""Create a temporary container and run command."""
return self.sys_run_in_executor(self._execute_command, command)
def _execute_command(self, command):
"""Create a temporary container and run command.
Need run inside executor.
"""
raise NotImplementedError()
def stats(self):
"""Read and return stats from container."""
return self.sys_run_in_executor(self._stats)
def _stats(self):
"""Create a temporary container and run command.
Need run inside executor.
"""
try:
docker_container = self.sys_docker.containers.get(self.name)
except docker.errors.DockerException:
return None
try:
stats = docker_container.stats(stream=False)
return DockerStats(stats)
except docker.errors.DockerException as err:
_LOGGER.error("Can't read stats from %s: %s", self.name, err)
return None
def is_fails(self):
"""Return True if Docker is failing state.
Return a Future.
"""
return self.sys_run_in_executor(self._is_fails)
def _is_fails(self):
"""Return True if Docker is failing state.
Need run inside executor.
"""
try:
docker_container = self.sys_docker.containers.get(self.name)
except docker.errors.DockerException:
return False
# container is not running
if docker_container.status != "exited":
return False
# Check return value
if int(docker_container.attrs["State"]["ExitCode"]) != 0:
return True
return False

93
hassio/docker/network.py Normal file
View File

@@ -0,0 +1,93 @@
"""Internal network manager for Hass.io."""
import logging
import docker
from ..const import DOCKER_NETWORK_MASK, DOCKER_NETWORK, DOCKER_NETWORK_RANGE
_LOGGER = logging.getLogger(__name__)
class DockerNetwork:
"""Internal Hass.io Network.
This class is not AsyncIO safe!
"""
def __init__(self, dock):
"""Initialize internal Hass.io network."""
self.docker = dock
self.network = self._get_network()
@property
def name(self):
"""Return name of network."""
return DOCKER_NETWORK
@property
def containers(self):
"""Return of connected containers from network."""
return self.network.containers
@property
def gateway(self):
"""Return gateway of the network."""
return DOCKER_NETWORK_MASK[1]
@property
def supervisor(self):
"""Return supervisor of the network."""
return DOCKER_NETWORK_MASK[2]
def _get_network(self):
"""Get HassIO network."""
try:
return self.docker.networks.get(DOCKER_NETWORK)
except docker.errors.NotFound:
_LOGGER.info("Can't find Hass.io network, create new network")
ipam_pool = docker.types.IPAMPool(
subnet=str(DOCKER_NETWORK_MASK),
gateway=str(self.gateway),
iprange=str(DOCKER_NETWORK_RANGE)
)
ipam_config = docker.types.IPAMConfig(pool_configs=[ipam_pool])
return self.docker.networks.create(
DOCKER_NETWORK, driver='bridge', ipam=ipam_config,
enable_ipv6=False, options={
"com.docker.network.bridge.name": DOCKER_NETWORK,
})
def attach_container(self, container, alias=None, ipv4=None):
"""Attach container to Hass.io network.
Need run inside executor.
"""
ipv4 = str(ipv4) if ipv4 else None
try:
self.network.connect(container, aliases=alias, ipv4_address=ipv4)
except docker.errors.APIError as err:
_LOGGER.error("Can't link container to hassio-net: %s", err)
return False
self.network.reload()
return True
def detach_default_bridge(self, container):
"""Detach default Docker bridge.
Need run inside executor.
"""
try:
default_network = self.docker.networks.get('bridge')
default_network.disconnect(container)
except docker.errors.NotFound:
return
except docker.errors.APIError as err:
_LOGGER.warning(
"Can't disconnect container from default: %s", err)

90
hassio/docker/stats.py Normal file
View File

@@ -0,0 +1,90 @@
"""Calc and represent docker stats data."""
from contextlib import suppress
class DockerStats:
"""Hold stats data from container inside."""
def __init__(self, stats):
"""Initialize Docker stats."""
self._cpu = 0.0
self._network_rx = 0
self._network_tx = 0
self._blk_read = 0
self._blk_write = 0
try:
self._memory_usage = stats['memory_stats']['usage']
self._memory_limit = stats['memory_stats']['limit']
except KeyError:
self._memory_usage = 0
self._memory_limit = 0
with suppress(KeyError):
self._calc_cpu_percent(stats)
with suppress(KeyError):
self._calc_network(stats['networks'])
with suppress(KeyError):
self._calc_block_io(stats['blkio_stats'])
def _calc_cpu_percent(self, stats):
"""Calculate CPU percent."""
cpu_delta = stats['cpu_stats']['cpu_usage']['total_usage'] - \
stats['precpu_stats']['cpu_usage']['total_usage']
system_delta = stats['cpu_stats']['system_cpu_usage'] - \
stats['precpu_stats']['system_cpu_usage']
if system_delta > 0.0 and cpu_delta > 0.0:
self._cpu = (cpu_delta / system_delta) * \
len(stats['cpu_stats']['cpu_usage']['percpu_usage']) * 100.0
def _calc_network(self, networks):
"""Calculate Network IO stats."""
for _, stats in networks.items():
self._network_rx += stats['rx_bytes']
self._network_tx += stats['tx_bytes']
def _calc_block_io(self, blkio):
"""Calculate block IO stats."""
for stats in blkio['io_service_bytes_recursive']:
if stats['op'] == 'Read':
self._blk_read += stats['value']
elif stats['op'] == 'Write':
self._blk_write += stats['value']
@property
def cpu_percent(self):
"""Return CPU percent."""
return self._cpu
@property
def memory_usage(self):
"""Return memory usage."""
return self._memory_usage
@property
def memory_limit(self):
"""Return memory limit."""
return self._memory_limit
@property
def network_rx(self):
"""Return network rx stats."""
return self._network_rx
@property
def network_tx(self):
"""Return network rx stats."""
return self._network_tx
@property
def blk_read(self):
"""Return block IO read stats."""
return self._blk_read
@property
def blk_write(self):
"""Return block IO write stats."""
return self._blk_write

Some files were not shown because too many files have changed in this diff Show More