Compare commits

..

336 Commits
0.86 ... 105

Author SHA1 Message Date
Pascal Vizeli
9084ac119f Fix version conflict 2018-05-29 19:40:16 +02:00
Pascal Vizeli
41943ba61a Delete .gitattributes 2018-05-29 19:38:00 +02:00
Pascal Vizeli
33794669a1 Last version.json update 2018-05-29 19:36:01 +02:00
Pascal Vizeli
fe155a4ff0 Read version from AWS (#488)
* Read version from AWS

* Update const.py

* Update updater.py

* Update updater.py

* Update updater.py

* Update updater.py

* Update updater.py

* Update const.py

* Update updater.py
2018-05-29 19:14:09 +02:00
Pascal Vizeli
124e487ef7 Support new panel generation (#487)
* Support new panel generation

* fix lint
2018-05-29 17:53:09 +02:00
Pascal Vizeli
f361916a60 Update docker timeout to 900sec (#486) 2018-05-29 17:37:20 +02:00
Pascal Vizeli
20afa1544b Bump version to 105 2018-05-29 00:22:12 +02:00
Pascal Vizeli
c08d5af4db Fix version conflicts 2018-05-29 00:21:24 +02:00
Pascal Vizeli
dc341c8af8 Fix version conflicts 2018-05-29 00:18:08 +02:00
Pascal Vizeli
2507b52adb Update Home Assistant to 0.70.0 2018-05-28 23:59:12 +02:00
Pascal Vizeli
1302708135 Update Home Assistant to 0.70.0 2018-05-28 23:58:45 +02:00
Pascal Vizeli
1314812f92 Update Home Assistant to 0.70.0 2018-05-28 23:53:28 +02:00
Pascal Vizeli
f739e3ed11 Update Hass.io to 104 2018-05-28 23:30:48 +02:00
Pascal Vizeli
abb526fc0f Update Panel / fix icons (#483) 2018-05-28 23:29:34 +02:00
Pascal Vizeli
efb1a24b8f Expose panel update (#482)
* Update __init__.py

* Update setup.py

* Update security.py

* Update setup.py

* Update __init__.py

* Update setup.py

* Update __init__.py
2018-05-28 23:16:03 +02:00
Pascal Vizeli
bc0835963d Bump version to 104 2018-05-28 21:28:19 +02:00
Pascal Vizeli
316190dff8 Fix new panel build for 0.70.0 (#481)
Signed-off-by: Pascal Vizeli <pvizeli@syshack.ch>
2018-05-28 21:24:17 +02:00
Pascal Vizeli
029ead0c7c Home Assistant 0.70.0b7 2018-05-27 10:52:10 +02:00
Paulus Schoutsen
a85172f30b Update to b7 2018-05-26 22:03:24 -04:00
Pascal Vizeli
dfe2532813 0.70.0b5 2018-05-26 22:28:47 +02:00
Pascal Vizeli
cf3bb23629 Home Assistant 0.70.0b5 2018-05-26 22:28:31 +02:00
Pascal Vizeli
2132042aca Update Home Assistant to version 0.70.0b3 2018-05-25 19:27:49 +02:00
Pascal Vizeli
19e448fc54 Update Home Assistant to version 0.70.0b3 2018-05-25 19:27:33 +02:00
c727
a4e0fb8e99 Update HA beta to 0.70.0b2 2018-05-22 15:03:18 +02:00
Paulus Schoutsen
5b72e2887e Update Hass.io to 0.70.0b2 2018-05-21 21:14:41 -04:00
Pascal Vizeli
d2b6ec1b7e Update Home Assistant to version 0.70.0b1 2018-05-21 15:38:04 +02:00
Paulus Schoutsen
4b541a23c4 Update Hass.io to 0.70.0b1 2018-05-21 09:27:11 -04:00
Pascal Vizeli
99869449ae Update Home Assistant to 0.70.0b0 2018-05-19 10:21:23 +02:00
Pascal Vizeli
eab73f3895 Update Home Assistant to 0.70.0b0 2018-05-19 10:20:55 +02:00
Pascal Vizeli
9e96615ffa Update Home Assistant to version 0.69.1 2018-05-13 10:20:56 +02:00
Pascal Vizeli
350010feb5 Update Home Assistant to version 0.69.1 2018-05-13 10:20:38 +02:00
Pascal Vizeli
7395e4620b Update Home Assistant to version 0.69.1 2018-05-13 10:20:18 +02:00
Pascal Vizeli
7d91ae4513 Update Home Assistant to 0.69.0 2018-05-11 22:32:38 +02:00
Pascal Vizeli
343f759983 Update Home Assistant to 0.69.0 2018-05-11 22:32:01 +02:00
Pascal Vizeli
24ee3f8cc0 Update Home Assistant to 0.69.0 2018-05-11 22:31:41 +02:00
Pascal Vizeli
c143eadb62 Update Home-Assistant 2018-05-09 20:31:22 +02:00
Pascal Vizeli
e7df38f4d1 Merge pull request #467 from home-assistant/rc
Hass.io 103
2018-05-09 15:47:18 +02:00
Pascal Vizeli
3e42318ac8 Merge branch 'master' into rc 2018-05-09 15:18:53 +02:00
Pascal Vizeli
c6e5d2932e Update Hass.io 2018-05-09 13:15:54 +02:00
Pascal Vizeli
1aaf21a350 Update Hass.io and Home Assistant 2018-05-09 13:15:35 +02:00
Pascal Vizeli
f185eece8a Update Hass.io and Home Assistant 2018-05-09 13:15:17 +02:00
Pascal Vizeli
9d951280ef Update const.py 2018-05-09 11:07:07 +02:00
Pascal Vizeli
3f598bafc0 Bugfix panel loading (#464) 2018-05-09 11:06:19 +02:00
Franck Nijhof
cddd859f56 🔈 Improves of audio devices handling (#463) 2018-05-08 14:27:17 +02:00
Pascal Vizeli
e7adf50ec1 Update Home Assistant 0.69.0b2 2018-05-07 23:54:11 +02:00
Pascal Vizeli
ac437f809a Update Home Assistant 0.69.0b2 2018-05-07 23:53:54 +02:00
Pascal Vizeli
f13dee9b9d Update Hass.io 2018-05-06 09:53:56 +02:00
Pascal Vizeli
00855c0909 Update Home Assistant and Hass.io 2018-05-06 09:53:28 +02:00
Pascal Vizeli
1fafed5a07 Update Home Assistant and Hass.io 2018-05-06 09:52:55 +02:00
Pascal Vizeli
7adb81b350 Update const.py 2018-05-06 09:45:46 +02:00
Pascal Vizeli
4647035b00 Bugfix Websession 2018-05-06 09:44:58 +02:00
Pascal Vizeli
8ad7344e02 Update Hass.io to version 103.1 2018-05-05 23:29:58 +02:00
Pascal Vizeli
f1c46b3385 Update Hass.io to version 103.1 2018-05-05 23:29:41 +02:00
Pascal Vizeli
7f84073b12 Update Hass.io to version 103.1 2018-05-05 23:29:24 +02:00
Pascal Vizeli
e383a11bb7 Pump version to fix 2018-05-05 23:19:56 +02:00
Pascal Vizeli
cc113e2251 Update Hass.io to version 103 2018-05-05 19:05:30 +02:00
Pascal Vizeli
c5a3830c7d Merge remote-tracking branch 'origin/dev' into rc 2018-05-04 21:53:40 +02:00
Pascal Vizeli
a2abadc970 Update hass.io to version 103 2018-05-04 21:39:12 +02:00
Pascal Vizeli
db444b89d3 Update gdbus.py (#460)
* Update gdbus.py

* Update gdbus.py
2018-05-04 20:58:23 +02:00
Pascal Vizeli
77881e8a58 Update Panel (#459)
* Update Panel

* Update core.py
2018-05-04 20:04:34 +02:00
Pascal Vizeli
0b15f88da3 Bugfixes (#457)
* Update gdbus.py

* Update gdbus.py

* Update gdbus.py

* Update gdbus.py

* Update gdbus.py

* Update gdbus.py

* Update gdbus.py

* Update gdbus.py

* Update gdbus.py

* Update gdbus.py
2018-05-03 23:22:48 +02:00
Pascal Vizeli
7c6bf96f6f shield host functions 2018-05-03 01:00:13 +02:00
Pascal Vizeli
dc77e2d8d9 Update gdbus.py 2018-05-03 00:52:57 +02:00
Pascal Vizeli
68824fab4f fix bug 2018-05-02 23:31:23 +02:00
Pascal Vizeli
d6b3a36714 Update Dockerfile 2018-05-02 22:52:08 +02:00
Pascal Vizeli
8ab1f703c7 Update Home Assistant to version 0.68.1 2018-05-01 07:24:34 +02:00
Pascal Vizeli
95a4e292aa Update Home Assistant to version 0.68.1 2018-05-01 07:24:03 +02:00
Pascal Vizeli
3b9252558f Update Home Assistant to version 0.68.1 2018-05-01 07:23:46 +02:00
Pascal Vizeli
4a324dccc6 Pump version to 103 2018-04-30 14:49:34 +02:00
Pascal Vizeli
8fffb0f8b5 Fix version 2018-04-30 14:46:59 +02:00
Pascal Vizeli
87adfce211 Update Hass.io to version 0.102 2018-04-30 14:45:31 +02:00
Tod Schmidt
297813f6e6 fix for asound.tmpl (#454) 2018-04-30 14:09:20 +02:00
Pascal Vizeli
362315852a Pump version to 0.102 2018-04-30 14:08:18 +02:00
Tod Schmidt
d221f36cf8 fix for asound.tmpl (#454) 2018-04-30 07:39:44 +02:00
Pascal Vizeli
9e18589b6b Update info.py 2018-04-28 10:51:25 +02:00
Pascal Vizeli
c4d09210e1 Update Home-Assistant to version 0.68.0 2018-04-28 10:16:57 +02:00
Pascal Vizeli
43797c5eb5 Update Home-Assistant to version 0.68.0 2018-04-28 10:16:28 +02:00
Pascal Vizeli
fe38fe94dc Update Home-Assistant to version 0.68.0 2018-04-28 10:16:06 +02:00
Pascal Vizeli
f185291eca Update control.py 2018-04-27 22:33:41 +02:00
Pascal Vizeli
7541ae6476 Update hostname.py 2018-04-27 22:31:37 +02:00
Pascal Vizeli
d94715be2b Merge pull request #451 from home-assistant/cleanups
Cleanups & restructs
2018-04-26 21:47:06 +02:00
Pascal Vizeli
99cc5972c8 Update __init__.py 2018-04-26 21:44:06 +02:00
Pascal Vizeli
3d101a24a1 Update API.md 2018-04-26 21:42:45 +02:00
Pascal Vizeli
2ed3ddf05b fix lint 2018-04-26 21:23:43 +02:00
Pascal Vizeli
10b3658bd7 Revert last changes 2018-04-26 20:51:29 +02:00
Pascal Vizeli
9f5903089e Cleanup API 2018-04-26 19:49:12 +02:00
Pascal Vizeli
0593885ed4 revert error 2018-04-26 19:25:10 +02:00
Pascal Vizeli
3efbe11d49 Cleanup 2018-04-26 19:23:52 +02:00
Pascal Vizeli
1c2e0e5749 Update host.py 2018-04-26 11:36:51 +02:00
Pascal Vizeli
f64da6a547 Update exceptions.py 2018-04-26 11:33:43 +02:00
Pascal Vizeli
94fba7e175 Update info.py 2018-04-26 11:32:15 +02:00
Pascal Vizeli
a59245e6bb Update __init__.py 2018-04-26 11:29:12 +02:00
Pascal Vizeli
217c1acc62 Update and rename power.py to control.py 2018-04-26 11:27:02 +02:00
Pascal Vizeli
2c0a68bd8f Update and rename local.py to info.py 2018-04-26 09:44:49 +02:00
Pascal Vizeli
e37ffd6107 Merge pull request #450 from home-assistant/dbus
Initial Dbus support
2018-04-26 00:17:45 +02:00
Pascal Vizeli
3bde598fa7 fix host 2018-04-25 23:49:45 +02:00
Pascal Vizeli
53f42ff934 fix attr 2018-04-25 23:36:44 +02:00
Pascal Vizeli
9041eb9e9a Fix attributes 2018-04-25 23:24:55 +02:00
Pascal Vizeli
70ac395232 fix bugs 2018-04-25 22:47:17 +02:00
Pascal Vizeli
82f68b4a7b fix dbus 2018-04-25 22:27:57 +02:00
Pascal Vizeli
2b2f3214e9 fix selecter 2018-04-25 22:12:27 +02:00
Pascal Vizeli
1c0d63a02e fix sys 2018-04-25 21:49:28 +02:00
Pascal Vizeli
de77215630 Update Home-Assistant to version 0.68.0b1 2018-04-25 21:43:06 +02:00
Pascal Vizeli
f300b843c1 Update Home-Assistant to version 0.68.0b1 2018-04-25 21:42:35 +02:00
Pascal Vizeli
0bb81136bb Add hostname function 2018-04-24 23:38:40 +02:00
Pascal Vizeli
2a81ced817 Update gdbus.py 2018-04-24 15:52:18 +02:00
Pascal Vizeli
7363951a9a Update gdbus.py 2018-04-24 15:40:14 +02:00
Pascal Vizeli
6f770b78af add interface dbus class 2018-04-23 23:30:21 +02:00
Pascal Vizeli
10219a348f fix lint 2018-04-23 21:56:54 +02:00
Pascal Vizeli
23d1013cfa Follow the correct shutdown flow 2018-04-23 21:45:06 +02:00
Pascal Vizeli
05980d4147 some cleanup more 2018-04-23 21:22:29 +02:00
Pascal Vizeli
e5e25c895f Fix error handling 2018-04-23 21:10:48 +02:00
Pascal Vizeli
b486883ff6 Cleanups 2018-04-23 15:32:23 +02:00
Pascal Vizeli
42dd4d9557 Update coresys.py 2018-04-23 09:05:52 +02:00
Pascal Vizeli
7dff9e09a7 Update bootstrap.py 2018-04-23 09:05:08 +02:00
Pascal Vizeli
c315b026a3 Update __init__.py 2018-04-23 08:58:06 +02:00
Pascal Vizeli
a4ba4c80e8 Update __init__.py 2018-04-23 08:57:39 +02:00
Pascal Vizeli
ccd48b63a2 Create __init__.py 2018-04-23 08:51:47 +02:00
Pascal Vizeli
6d5f70ced6 Rename hassio/misc/dbus/rauc.py to hassio/dbus/rauc.py 2018-04-23 08:51:15 +02:00
Pascal Vizeli
ccffb4b786 Rename hassio/misc/rauc.py to hassio/misc/dbus/rauc.py 2018-04-23 08:50:51 +02:00
Pascal Vizeli
68dbbe212c Rename hassio/misc/networkmanager.py to hassio/dbus/networkmanager.py 2018-04-23 08:50:18 +02:00
Pascal Vizeli
5df869e08a Rename hassio/misc/systemd.py to hassio/dbus/systemd.py 2018-04-23 08:49:56 +02:00
Pascal Vizeli
63b9e023b4 add hostmanager 2018-04-22 17:59:41 +02:00
Pascal Vizeli
8f357739ec code cleanups 2018-04-22 17:44:03 +02:00
Pascal Vizeli
808fc0f8b6 Log internal exceptions on API level 2018-04-22 10:16:24 +02:00
Pascal Vizeli
1a6f6085e6 Add API support for new handling 2018-04-22 10:15:07 +02:00
Pascal Vizeli
0de3e9a233 update handling 2018-04-22 09:59:43 +02:00
Pascal Vizeli
f1237f124f small down the footprint 2018-04-22 09:35:36 +02:00
Pascal Vizeli
69142b6fb0 Add systemd dbus 2018-04-21 23:56:36 +02:00
Pascal Vizeli
28f295a1e2 Cleanup 2018-04-21 22:33:06 +02:00
Pascal Vizeli
55c2127baa Cleanup Loop handling 2018-04-21 16:30:31 +02:00
Pascal Vizeli
265c36b345 Claim exceptions 2018-04-21 15:39:08 +02:00
Pascal Vizeli
9f081fe32f Update Home-Assistant to version 0.68.0b0 2018-04-21 00:04:15 +02:00
Pascal Vizeli
e4fb6ad727 Update Home-Assistant to version 0.68.0b0 2018-04-21 00:03:52 +02:00
Pascal Vizeli
1040a1624a fix lint 2018-04-20 23:40:58 +02:00
Pascal Vizeli
a2ee2852a0 Update gdbus.py 2018-04-20 16:10:59 +02:00
Pascal Vizeli
b2e3b726d9 Update gdbus.py 2018-04-20 16:01:43 +02:00
Pascal Vizeli
0f4e557552 Update gdbus.py 2018-04-20 15:59:04 +02:00
Pascal Vizeli
2efa9f9483 Update gdbus.py 2018-04-20 15:48:36 +02:00
Pascal Vizeli
43e6ca8f4a Update gdbus.py 2018-04-20 10:56:47 +02:00
Pascal Vizeli
34d67a7bcd Update gdbus.py 2018-04-20 10:15:29 +02:00
Pascal Vizeli
5a6051f9a1 Update gdbus.py 2018-04-20 09:58:49 +02:00
Pascal Vizeli
157e48f946 Initial Dbus support 2018-04-19 23:27:20 +02:00
Pascal Vizeli
9469a258ff Update Home-Assistant to version 0.67.1 2018-04-18 12:30:15 +02:00
Pascal Vizeli
fd0aeb5341 Update Home-Assistant to version 0.67.1 2018-04-18 12:29:52 +02:00
Pascal Vizeli
4d4a4ce043 Update Home-Assistant to version 0.67.1 2018-04-18 12:29:38 +02:00
Pascal Vizeli
678f77cc05 Pump version to 0.102 2018-04-14 10:50:42 +02:00
Pascal Vizeli
6c30248389 Update Home-Assistant to version 0.67.0 2018-04-14 10:04:43 +02:00
Pascal Vizeli
fda7c1cf11 Update Home-Assistant to version 0.67.0 2018-04-14 10:04:19 +02:00
Pascal Vizeli
364e5ec0b8 Update Home-Assistant to version 0.67.0 2018-04-14 10:03:51 +02:00
Pascal Vizeli
947bf7799c Fix version conflict 2018-04-14 01:28:44 +02:00
Pascal Vizeli
e22836d706 Fix version conflict 2018-04-14 01:22:39 +02:00
Pascal Vizeli
6c8fcbfb80 Update Hass.io to version 0.101 2018-04-14 01:18:00 +02:00
Pascal Vizeli
f1fe1877fe Merge pull request #442 from home-assistant/ui-101
Update panel audio
2018-04-14 00:59:37 +02:00
Pascal Vizeli
3c0831c8eb Update pannel audio 2018-04-14 00:52:11 +02:00
Pascal Vizeli
35b3f364c9 Merge pull request #441 from home-assistant/new_audio_system
Extend Audio support
2018-04-14 00:44:37 +02:00
Pascal Vizeli
c4299b51cd Clear device on changes 2018-04-14 00:30:28 +02:00
Pascal Vizeli
31caed20fa Fix device name 2018-04-14 00:27:31 +02:00
Pascal Vizeli
41fed656c1 Use now attr 2018-04-14 00:19:29 +02:00
Pascal Vizeli
c5ee2ebc49 fix v2 2018-04-13 23:58:46 +02:00
Pascal Vizeli
743a218219 fix bug 2018-04-13 23:45:03 +02:00
Pascal Vizeli
093ef17fb7 find the error 2018-04-13 23:31:40 +02:00
Pascal Vizeli
a41912be0a fix db 2018-04-13 23:21:42 +02:00
Pascal Vizeli
5becd51b50 test 4 2018-04-13 23:11:20 +02:00
Pascal Vizeli
ef7a375396 test 2 2018-04-13 22:56:36 +02:00
Pascal Vizeli
19879e3287 test 2018-04-13 22:42:27 +02:00
Pascal Vizeli
d1c4f342fc Fix bootstrap 2018-04-13 22:16:55 +02:00
Pascal Vizeli
2f62b7046c cleanup 2018-04-13 22:04:01 +02:00
Pascal Vizeli
0cca8f522b rename audio object 2018-04-13 21:19:57 +02:00
Pascal Vizeli
39decec001 Update Home-Assistant to version 0.67.0b1 2018-04-13 06:36:42 +02:00
Pascal Vizeli
3489db2768 Update Home-Assistant to version 0.67.0b1 2018-04-13 06:36:21 +02:00
Pascal Vizeli
3382688669 Fix name 2018-04-13 00:03:03 +02:00
Pascal Vizeli
cf00ce7d78 fix lint 2018-04-12 23:50:58 +02:00
Pascal Vizeli
2c714aa003 fix 1 2018-04-12 23:11:38 +02:00
Pascal Vizeli
1e7858bf06 fix url 2018-04-12 23:00:42 +02:00
Pascal Vizeli
4e428c2e41 Fix options 2018-04-12 22:39:26 +02:00
Pascal Vizeli
b95ab3e95a Stage API 2018-04-12 22:15:08 +02:00
Pascal Vizeli
0dd7f8fbaa Fix some comments 2018-04-12 22:07:41 +02:00
Pascal Vizeli
a2789ac540 Extend Audio support 2018-04-11 23:53:30 +02:00
Pascal Vizeli
a785e10a3f Merge pull request #440 from home-assistant/addon-slug
Add slug to add-on info
2018-04-11 20:01:15 +02:00
Pascal Vizeli
10dad5a209 Update API.md 2018-04-11 18:53:01 +02:00
Pascal Vizeli
9327b24d44 Add slug to add-on info 2018-04-11 18:51:03 +02:00
Pascal Vizeli
7d02bb2fe9 Pump version to 0.101 2018-04-10 21:21:15 +02:00
Pascal Vizeli
a2d3ee0d67 Fix version conflict 2018-04-10 21:18:47 +02:00
Pascal Vizeli
d29fab69e8 Merge remote-tracking branch 'origin/dev' into rc 2018-04-09 23:46:52 +02:00
Pascal Vizeli
6205f40298 Pump version to 0.100 2018-04-09 23:45:33 +02:00
Pascal Vizeli
6b169f3f17 Fix version 2018-04-09 23:44:52 +02:00
Pascal Vizeli
0d4a5a7ffb Fix version conflict 2018-04-09 23:41:00 +02:00
Pascal Vizeli
dac90d29dd Update Hass.io to version 1.0 2018-04-09 23:39:19 +02:00
Pascal Vizeli
7e815633e7 Merge pull request #437 from home-assistant/fix_restart
Abstract restart logic
2018-04-09 23:31:55 +02:00
Pascal Vizeli
f062f31ca2 Fix logic 2018-04-09 23:10:12 +02:00
Pascal Vizeli
1374f90433 cleanup version 2018-04-09 22:52:16 +02:00
Pascal Vizeli
b692b19a4d Fix log bug 2018-04-09 22:50:21 +02:00
Pascal Vizeli
92d5b14cf5 Abstract restart logic 2018-04-09 22:13:16 +02:00
Pascal Vizeli
6a84829c16 Merge pull request #436 from home-assistant/aiohttp_update
Update aioHttp 3.1.2 & Handling
2018-04-09 21:29:21 +02:00
Pascal Vizeli
7036ecbd0a Update aioHttp 3.1.2 & Handling 2018-04-09 21:01:06 +02:00
Pascal Vizeli
19b5059972 Pump version 1.0 2018-04-09 20:23:04 +02:00
Pascal Vizeli
cebc377fa7 Merge pull request #435 from home-assistant/fix_docker_char
Bugfix, remove unsupported characters
2018-04-09 20:22:03 +02:00
Pascal Vizeli
d36c3919d7 Update docker 2018-04-09 20:01:48 +02:00
Pascal Vizeli
0684427373 Bugfix, remove unsupported characters 2018-04-08 23:30:42 +02:00
Pascal Vizeli
8ff79e85bf Merge pull request #433 from home-assistant/secure
Add support for Seccomp/AppArmor profiles
2018-04-08 23:07:33 +02:00
Pascal Vizeli
ee4b28a490 Fix's & cleanup 2018-04-08 22:27:58 +02:00
Pascal Vizeli
fddd5b8860 Fix lint 2018-04-07 00:32:54 +02:00
Pascal Vizeli
72279072ac Add support for Seccomp/AppArmor profiles 2018-04-07 00:24:23 +02:00
Pascal Vizeli
0b70448273 Update Home-Assistant 0.67.0b0 2018-04-06 23:20:02 +02:00
Pascal Vizeli
4eb24fcbc5 Update Home-Assistant 0.67.0b0 2018-04-06 23:19:31 +02:00
Pascal Vizeli
06edf59d14 Update Home-Assistant to version 0.66.1 2018-04-02 09:47:39 +02:00
Pascal Vizeli
36ca851bc2 Update Home-Assistant to version 0.66.1 2018-04-02 09:47:21 +02:00
Pascal Vizeli
a4e453bf83 Update Home-Assistant to version 0.66.1 2018-04-02 09:47:05 +02:00
Pascal Vizeli
d211eec66f Update Home-Assistant to version 0.66.1b0 2018-04-01 11:08:34 +02:00
Pascal Vizeli
db8540d4ab Update Home-Assistant to version 0.66.1b0 2018-04-01 11:07:55 +02:00
Pascal Vizeli
30e270e7c0 Update Home-Assistant to version 0.66.0 2018-03-31 09:45:53 +02:00
Pascal Vizeli
9734307551 Update Home-Assistant to version 0.66.0 2018-03-31 09:45:32 +02:00
Pascal Vizeli
c650f8d1e1 Update Home-Assistant to version 0.66.0 2018-03-31 09:45:15 +02:00
Pascal Vizeli
10005898f8 Fix tag name 2018-03-30 15:10:35 +02:00
Pascal Vizeli
716389e0c1 Fix tag format 2018-03-30 15:10:02 +02:00
Pascal Vizeli
658729feb5 Update Home-Assistant to version 0.66.0.b3 2018-03-30 10:23:10 +02:00
Pascal Vizeli
ae7808eb2a Update Home-Assistant to version 0.66.0.b3 2018-03-30 10:22:41 +02:00
Pascal Vizeli
d8e0e9e0b0 Update Home-Assistant to version 0.66.0.b2 2018-03-27 09:24:59 +02:00
Pascal Vizeli
a860a3c122 Update Home-Assistant to version 0.66.0.b2 2018-03-27 09:24:38 +02:00
Pascal Vizeli
fe60d526b9 Revert home-assistant version 2018-03-24 23:34:22 +01:00
Pascal Vizeli
769904778f Merge remote-tracking branch 'origin/rc' 2018-03-24 23:32:37 +01:00
Pascal Vizeli
a3a40c79d6 Fix merge conflicts 2018-03-24 23:31:26 +01:00
Pascal Vizeli
b44f613136 Update hass.io to version 0.99 2018-03-24 23:09:32 +01:00
Pascal Vizeli
801be9c60b Create .gitattributes 2018-03-24 23:05:58 +01:00
Pascal Vizeli
b6db6a1287 Create .gitattributes 2018-03-24 23:04:37 +01:00
Pascal Vizeli
4181174bcc Create .gitattributes 2018-03-24 23:03:15 +01:00
Pascal Vizeli
3be46e6011 Update Home-Assistant to version 0.66.0.beta0 2018-03-24 22:58:52 +01:00
Pascal Vizeli
98b93efc5c Merge pull request #423 from home-assistant/beta
Change Upstream handling
2018-03-24 22:56:04 +01:00
Pascal Vizeli
6156019c2f Merge pull request #424 from home-assistant/ui-99
Update pannel for 0.99
2018-03-24 22:50:33 +01:00
Pascal Vizeli
80d60148a9 Update pannel for 0.99 2018-03-24 22:48:45 +01:00
Pascal Vizeli
8baf59a608 fix lint 2018-03-24 22:06:56 +01:00
Pascal Vizeli
b546365aaa Fix 2018-03-24 22:05:01 +01:00
Pascal Vizeli
0a68698912 rename mode to channel 2018-03-24 22:01:13 +01:00
Pascal Vizeli
45288a2491 Change Upstream handling 2018-03-24 21:44:44 +01:00
Pascal Vizeli
f34a175e4f Update Home-Assistant to version 0.66.0.beta0 2018-03-24 10:34:53 +01:00
Pascal Vizeli
6e7e145822 Update Home-Assistant to version 0.65.6 2018-03-21 22:19:38 +01:00
Pascal Vizeli
9abebe2d5d Update Home-Assistant to version 0.65.6 2018-03-21 22:19:08 +01:00
Pascal Vizeli
b0c5884c3f Update Home-Assistant to version 0.65.5 2018-03-15 12:11:01 +01:00
Pascal Vizeli
a79e6a8eea Update Home-Assistant to version 0.65.5 2018-03-15 12:10:16 +01:00
Pascal Vizeli
c1f1aed9ca Pump version to 0.99 2018-03-14 22:25:58 +01:00
Pascal Vizeli
65b0e17b5b Merge pull request #414 from home-assistant/dev
Release 0.98
2018-03-14 22:25:14 +01:00
Pascal Vizeli
6947131b47 Update Hass.io to version 0.98 2018-03-14 22:10:13 +01:00
Pascal Vizeli
914dd53da0 Merge pull request #411 from home-assistant/fix_watchdog
Use lock on homeassistant level
2018-03-14 21:52:20 +01:00
Pascal Vizeli
58616ef686 bugfix aiohttp 2018-03-14 21:12:08 +01:00
Pascal Vizeli
563e0c1e0e fix wrong startup blocking 2018-03-14 19:08:03 +01:00
Pascal Vizeli
437070fd7a Merge pull request #412 from home-assistant/fix-geoip
Fix URL for freegeoip
2018-03-13 23:34:06 +01:00
Pascal Vizeli
baa9cf451c Fix URL for freegeoip 2018-03-13 23:28:38 +01:00
Pascal Vizeli
c2918d4519 Use lock on homeassistant level 2018-03-13 23:09:53 +01:00
Pascal Vizeli
1efdcd4691 Merge remote-tracking branch 'origin/master' into dev 2018-03-13 21:31:56 +01:00
Pascal Vizeli
2a43087ed7 Pump version to 0.98 2018-03-13 16:06:44 +01:00
Pascal Vizeli
5716324934 Merge pull request #410 from home-assistant/dev
Release 0.97
2018-03-13 16:05:31 +01:00
Pascal Vizeli
ae267e0380 Merge branch 'master' into dev 2018-03-13 14:09:13 +01:00
Pascal Vizeli
3918a2a228 Update Home-Assistant version 0.65.4 2018-03-13 14:07:21 +01:00
Pascal Vizeli
e375fc36d3 Update Hass.io to version 0.97 2018-03-13 00:09:57 +01:00
Pascal Vizeli
f5e29b4651 Update panel to last (#408) 2018-03-12 23:51:09 +01:00
Pascal Vizeli
524d875516 Update aioHttp3 (#403)
* Update aioHttp3

* fix line ending

* fix close session
2018-03-12 23:40:06 +01:00
Pascal Vizeli
60bdc00ce9 Update Home-Assistant to version 0.65.3 2018-03-12 07:13:47 +01:00
Pascal Vizeli
073166190f Update Home-Assistant to version 0.65.3 2018-03-12 07:13:27 +01:00
Pascal Vizeli
b80e4d7d70 Update Home-Assistant to version 0.65.2 2018-03-11 23:58:16 +01:00
Pascal Vizeli
cc434e27cf Update Home-Assistant to version 0.65.2 2018-03-11 23:57:57 +01:00
Pascal Vizeli
8377e04b62 Update Home-Assistant to version 0.65.1 2018-03-11 20:32:43 +01:00
Pascal Vizeli
0a47fb9c83 Update Home-Assistant to version 0.65.1 2018-03-11 20:32:25 +01:00
Pascal Vizeli
a5d3c850e9 Update Home-Assistant to version 0.65.0 2018-03-09 23:32:47 +01:00
Pascal Vizeli
d6391f62be Update Home-Assistant to version 0.65.0 2018-03-09 23:10:27 +01:00
Pascal Vizeli
c6f302e448 Update ResinOS to version 1.3 2018-03-05 22:51:44 +01:00
Pascal Vizeli
9706022c21 Update ResinOS to version 1.3 2018-03-05 22:51:08 +01:00
Pascal Vizeli
1d858f4920 Update ResinOS to version 1.2 2018-03-04 00:43:24 +01:00
Pascal Vizeli
e09ba30d46 Update ResinOS to version 1.2 2018-03-04 00:43:00 +01:00
mark9white
38ec3d14ed Allow addons that require IPC_LOCK capability (#397) 2018-03-03 23:06:42 +01:00
Pascal Vizeli
8ee9380cc7 Pump version to 0.97 2018-03-03 11:15:39 +01:00
Pascal Vizeli
6e74e4c008 Fix version conflicts 2018-03-03 11:12:59 +01:00
Pascal Vizeli
5ebc58851b Update Hass.io to version 0.96 2018-03-03 11:08:00 +01:00
Pascal Vizeli
16b09bbfc5 Allow to use branch on repositories (#395)
* Allow to use branch on repositories

* Fix argument extraction

* fix lint
2018-03-03 11:00:58 +01:00
Pascal Vizeli
d4b5fc79f4 Update Home-Assistant to version 0.64.3 2018-03-03 00:07:04 +01:00
Pascal Vizeli
e51c044ccd Update Home-Assistant to version 0.64.3 2018-03-02 23:56:48 +01:00
Pascal Vizeli
d3b1ba81f7 Update panel for encrypted backups (#394)
* Update panel for encrypted backups

* fix lint
2018-03-02 23:23:40 +01:00
Pascal Vizeli
26f55f02c0 Update Home-Assistant to version 0.64.2 2018-03-02 07:01:42 +01:00
Pascal Vizeli
8050707ff9 Update Home-Assistant to version 0.64.2 2018-03-02 06:54:32 +01:00
c727
46252030cf Improve names for built-in repos (#391) 2018-03-01 19:00:21 +01:00
Pascal Vizeli
681fa835ef Update Home-Assistant to version 0.64.1 2018-02-28 08:16:18 +01:00
Pascal Vizeli
d6560eb976 Update Home-Assistant to version 0.64.1 2018-02-28 07:48:54 +01:00
Pascal Vizeli
3770b307af Pump version to 0.96 2018-02-26 22:55:53 +01:00
Pascal Vizeli
0dacbb31be Fix version conflicts 2018-02-26 22:53:31 +01:00
Pascal Vizeli
bbdbd756a7 Update Hass.io to version 0.95 2018-02-26 22:42:29 +01:00
Pascal Vizeli
508e38e622 Fix snapshot partial API (#389) 2018-02-26 22:26:39 +01:00
Pascal Vizeli
ffe45d0d02 Bugfix if no data is given for encryption (#387)
* Bugfix if no data is given for encryption

* Update snapshot.py
2018-02-26 22:17:25 +01:00
Pascal Vizeli
9206d1acf8 Update Home-Assistant to version 0.64 2018-02-26 06:10:40 +01:00
Pascal Vizeli
da867ef8ef Update Home-Assistant to version 0.64 2018-02-26 06:03:24 +01:00
Pascal Vizeli
4826201e51 Pump version to 0.95 2018-02-25 12:57:53 +01:00
Pascal Vizeli
463c97f9e7 Update Hass.io to version 0.94 2018-02-25 12:49:39 +01:00
Pascal Vizeli
3983928c6c Bugfix snapshot dialog (#380) 2018-02-25 12:18:05 +01:00
Pascal Vizeli
15e626027f Pump version to 0.94 2018-02-24 08:50:21 +01:00
Pascal Vizeli
d46810752e Update Hass.io to version 0.93 2018-02-24 08:46:53 +01:00
Pascal Vizeli
3d10b502a0 Bugfix panel system (#379) 2018-02-24 08:38:59 +01:00
Pascal Vizeli
433c5cef3b Stop home-assistant only if they will be restored (#377) 2018-02-23 22:22:38 +01:00
Pascal Vizeli
697caf553a Pump version to 0.93 2018-02-23 11:38:04 +01:00
Pascal Vizeli
1e11359c71 Fix version conflicts 2018-02-23 11:35:43 +01:00
Pascal Vizeli
5285431825 New panel (#374) 2018-02-23 11:13:53 +01:00
Pascal Vizeli
7743a572a9 Update Hass.io to version 0.92 2018-02-23 11:01:51 +01:00
Pascal Vizeli
3b974920d3 Return snapshot slug for snapshot/import (#372)
* Update __init__.py

* Update snapshots.py

* Update API.md

* Update __init__.py

* Update __init__.py
2018-02-23 10:52:35 +01:00
Pascal Vizeli
6bc9792248 Update setup.py (#373) 2018-02-23 10:37:14 +01:00
Pascal Vizeli
da55f6fb10 Pump version to 0.92 2018-02-23 10:34:21 +01:00
Pascal Vizeli
ffa90a3407 Update Home-Assistant to version 0.63.3 2018-02-18 22:16:46 +01:00
Pascal Vizeli
0a13ea3743 Update Home-Assistant to version 0.63.3 2018-02-18 22:15:39 +01:00
Pascal Vizeli
0e2e588145 Update utils.py 2018-02-18 12:31:06 +01:00
Pascal Vizeli
b8c50fee36 Update validate.py 2018-02-18 12:30:41 +01:00
Pascal Vizeli
8cb0b7c498 Update validate.py 2018-02-18 12:23:46 +01:00
Pascal Vizeli
699fcdafba Fix pw2 (#369)
* fix rate password

* convert int
2018-02-18 12:18:11 +01:00
Pascal Vizeli
b4d5aeb5d0 Update Hass.io to version 0.91 2018-02-18 12:15:54 +01:00
Pascal Vizeli
d067dd643e Fix password hack (#368) 2018-02-18 11:51:11 +01:00
Pascal Vizeli
65a2bf2d18 Pump version to 0.91 2018-02-18 11:01:13 +01:00
Pascal Vizeli
e826e8184f Update Hass.io to version 0.90 2018-02-18 10:59:58 +01:00
Pascal Vizeli
dacbde7d77 Extend the security of snapshots (#367)
* extend security

* fix lint
2018-02-18 10:57:05 +01:00
Pascal Vizeli
5b0587b672 Pump version to 0.90 2018-02-17 17:28:19 +01:00
Pascal Vizeli
f0320c0f6d Fix version conflicts 2018-02-17 16:29:36 +01:00
Pascal Vizeli
e05c32df25 Update Hass.io to version 0.89 2018-02-17 16:28:50 +01:00
c727
9c40c32e95 Add timezone to snapshot timestamp (#360)
* Add timezone to snapshot timestamp

```
old: 2018-02-14T15:13:46.391829
new: 2018-02-14T15:13:46.391829+00:00
```

* Update __init__.py

* Move code to dt util

* Lint

* Lint 2

* Update dt.py

* Update __init__.py
2018-02-17 16:13:23 +01:00
Pascal Vizeli
ac60de0360 Update security.py (#365) 2018-02-17 16:09:10 +01:00
Pascal Vizeli
587047f9d6 Add support for encrypted snapshot files (#354)
* Add support for encrypted files

* Update tar.py

* Update tar.py

* Update tar.py

* Update addon.py

* Update API.md

* Update API.md

* Update tar.py

* cleanup snapshot

* Update API.md

* Update const.py

* Update const.py

* Update validate.py

* Update homeassistant.py

* Update homeassistant.py

* Update validate.py

* Update validate.py

* Update snapshot.py

* Update utils.py

* Update snapshot.py

* Update utils.py

* Update snapshot.py

* Update validate.py

* Update snapshot.py

* Update validate.py

* Update const.py

* fix lint

* Update snapshot.py

* Update __init__.py

* Update snapshot.py

* Update __init__.py

* Update __init__.py

* Finish snapshot object

* Fix struct

* cleanup snapshot flow

* fix some points

* Add API upload

* fix lint

* Update voluptuous

* fix docker

* Update snapshots.py

* fix versions

* fix schema

* fix schema

* fix api

* fix path

* Handle import better

* fix routing

* fix bugs

* fix bug

* cleanup gz

* fix some bugs

* fix stage

* Fix

* fix

* protect None password

* fix API

* handle exception better

* fix

* fix remove of addons

* fix bug

* clenaup code

* fix none tasks

* Encrypt Home-Assistant

* fix decrypt

* fix binary
2018-02-17 15:52:33 +01:00
Fabian Affolter
e815223047 Merge pull request #363 from home-assistant/probot
Enable probot move
2018-02-16 13:25:16 +01:00
Fabian Affolter
b6fb5ab950 Enable probot move 2018-02-16 13:18:13 +01:00
Pascal Vizeli
a0906937c4 Update Home-Assistant to version 0.63.2 2018-02-14 22:08:09 +01:00
Pascal Vizeli
07c47df369 Update Home-Assistant to version 0.63.2 2018-02-14 21:12:18 +01:00
Pascal Vizeli
85e9a949cc Update Home-Assistant to version 0.63.1 2018-02-13 06:38:45 +01:00
Pascal Vizeli
3933fb0664 Update Home-Assistant to version 0.63.1 2018-02-13 06:26:45 +01:00
Pascal Vizeli
a885fbdb41 Pump version to 0.89 2018-02-11 23:03:46 +01:00
Pascal Vizeli
210793eb34 Update Home-Assistant to version 0.63 2018-02-11 09:31:19 +01:00
Pascal Vizeli
0235c7bce0 Update Home-Assistant to version 0.63 2018-02-11 09:22:31 +01:00
Pascal Vizeli
4419c0fc6c Update Hass.io to version 0.88 2018-02-11 01:53:18 +01:00
Pascal Vizeli
2f3701693d Fix bugs with docker api 3.0.1 and fix the version (#353)
* Fix version

* fix snapshot
2018-02-11 01:42:53 +01:00
Pascal Vizeli
3bf446cbdb Improve security layer (#352)
* Improve security layer

* Update logger

* Fix access

* Validate token

* fix

* fix some bugs

* fix lint
2018-02-11 00:05:20 +01:00
Pascal Vizeli
0c67cc13a1 Pump version to 0.88 2018-02-10 00:23:37 +01:00
Pascal Vizeli
0b80d7b6f4 Update Hass.io to version 0.87 2018-02-10 00:17:13 +01:00
Pascal Vizeli
23c35d4c80 Bugfix Check Config for Home-Assistant (#350)
* add logger

* Bugfix config check
2018-02-10 00:10:30 +01:00
Pascal Vizeli
e939c29efa Pump version to 0.87 2018-02-09 10:45:15 +01:00
79 changed files with 2424 additions and 1415 deletions

13
.github/move.yml vendored Normal file
View File

@@ -0,0 +1,13 @@
# Configuration for move-issues - https://github.com/dessant/move-issues
# Delete the command comment. Ignored when the comment also contains other content
deleteCommand: true
# Close the source issue after moving
closeSourceIssue: true
# Lock the source issue after moving
lockSourceIssue: false
# Set custom aliases for targets
# aliases:
# r: repo
# or: owner/repo

157
API.md
View File

@@ -36,7 +36,7 @@ The addons from `addons` are only installed one.
"version": "INSTALL_VERSION", "version": "INSTALL_VERSION",
"last_version": "LAST_VERSION", "last_version": "LAST_VERSION",
"arch": "armhf|aarch64|i386|amd64", "arch": "armhf|aarch64|i386|amd64",
"beta_channel": "true|false", "channel": "stable|beta|dev",
"timezone": "TIMEZONE", "timezone": "TIMEZONE",
"wait_boot": "int", "wait_boot": "int",
"addons": [ "addons": [
@@ -72,7 +72,7 @@ Optional:
```json ```json
{ {
"beta_channel": "true|false", "channel": "stable|beta|dev",
"timezone": "TIMEZONE", "timezone": "TIMEZONE",
"wait_boot": "int", "wait_boot": "int",
"addons_repositories": [ "addons_repositories": [
@@ -113,7 +113,8 @@ Output is the raw docker log.
"slug": "SLUG", "slug": "SLUG",
"date": "ISO", "date": "ISO",
"name": "Custom name", "name": "Custom name",
"type": "full|partial" "type": "full|partial",
"protected": "bool"
} }
] ]
} }
@@ -121,11 +122,28 @@ Output is the raw docker log.
- POST `/snapshots/reload` - POST `/snapshots/reload`
- POST `/snapshots/new/upload`
return:
```json
{
"slug": ""
}
```
- POST `/snapshots/new/full` - POST `/snapshots/new/full`
```json ```json
{ {
"name": "Optional" "name": "Optional",
"password": "Optional"
}
```
return:
```json
{
"slug": ""
} }
``` ```
@@ -135,7 +153,15 @@ Output is the raw docker log.
{ {
"name": "Optional", "name": "Optional",
"addons": ["ADDON_SLUG"], "addons": ["ADDON_SLUG"],
"folders": ["FOLDER_NAME"] "folders": ["FOLDER_NAME"],
"password": "Optional"
}
```
return:
```json
{
"slug": ""
} }
``` ```
@@ -150,12 +176,14 @@ Output is the raw docker log.
"name": "custom snapshot name / description", "name": "custom snapshot name / description",
"date": "ISO", "date": "ISO",
"size": "SIZE_IN_MB", "size": "SIZE_IN_MB",
"protected": "bool",
"homeassistant": "version", "homeassistant": "version",
"addons": [ "addons": [
{ {
"slug": "ADDON_SLUG", "slug": "ADDON_SLUG",
"name": "NAME", "name": "NAME",
"version": "INSTALLED_VERSION" "version": "INSTALLED_VERSION",
"size": "SIZE_IN_MB"
} }
], ],
"repositories": ["URL"], "repositories": ["URL"],
@@ -164,36 +192,49 @@ Output is the raw docker log.
``` ```
- POST `/snapshots/{slug}/remove` - POST `/snapshots/{slug}/remove`
- GET `/snapshots/{slug}/download`
- POST `/snapshots/{slug}/restore/full` - POST `/snapshots/{slug}/restore/full`
```json
{
"password": "Optional"
}
```
- POST `/snapshots/{slug}/restore/partial` - POST `/snapshots/{slug}/restore/partial`
```json ```json
{ {
"homeassistant": "bool", "homeassistant": "bool",
"addons": ["ADDON_SLUG"], "addons": ["ADDON_SLUG"],
"folders": ["FOLDER_NAME"] "folders": ["FOLDER_NAME"],
"password": "Optional"
} }
``` ```
### Host ### Host
- POST `/host/reload` - POST `/host/reload`
- POST `/host/shutdown` - POST `/host/shutdown`
- POST `/host/reboot` - POST `/host/reboot`
- GET `/host/info` - GET `/host/info`
```json ```json
{ {
"type": "", "hostname": "hostname|null",
"version": "", "features": ["shutdown", "reboot", "update", "hostname"],
"last_version": "", "operating_system": "Hass.io-OS XY|Ubuntu 16.4|null",
"features": ["shutdown", "reboot", "update", "hostname", "network_info", "network_control"], "kernel": "4.15.7|null",
"hostname": "", "chassis": "specific|null",
"os": "", "type": "Hass.io-OS Type|null",
"audio": { "deployment": "stable|beta|dev|null",
"input": "0,0", "version": "xy|null",
"output": "0,0" "last_version": "xy|null",
}
} }
``` ```
@@ -201,11 +242,11 @@ Output is the raw docker log.
```json ```json
{ {
"audio_input": "0,0", "hostname": "",
"audio_output": "0,0"
} }
``` ```
- POST `/host/update` - POST `/host/update`
Optional: Optional:
@@ -216,7 +257,11 @@ Optional:
} }
``` ```
- GET `/host/hardware` - POST `/host/reload`
### Hardware
- GET `/hardware/info`
```json ```json
{ {
"serial": ["/dev/xy"], "serial": ["/dev/xy"],
@@ -235,23 +280,18 @@ Optional:
} }
``` ```
- POST `/host/reload` - GET `/hardware/audio`
### Network
- GET `/network/info`
```json ```json
{ {
"hostname": "" "audio": {
} "input": {
``` "0,0": "Mic"
},
- POST `/network/options` "output": {
"1,0": "Jack",
```json "1,1": "HDMI"
{ }
"hostname": "", }
} }
``` ```
@@ -263,6 +303,7 @@ Optional:
{ {
"version": "INSTALL_VERSION", "version": "INSTALL_VERSION",
"last_version": "LAST_VERSION", "last_version": "LAST_VERSION",
"machine": "Image machine type",
"image": "str", "image": "str",
"custom": "bool -> if custom image", "custom": "bool -> if custom image",
"boot": "bool", "boot": "bool",
@@ -371,6 +412,7 @@ Get all available addons.
```json ```json
{ {
"name": "xy bla", "name": "xy bla",
"slug": "xdssd_xybla",
"description": "description", "description": "description",
"long_description": "null|markdown", "long_description": "null|markdown",
"auto_update": "bool", "auto_update": "bool",
@@ -388,6 +430,8 @@ Get all available addons.
"host_ipc": "bool", "host_ipc": "bool",
"host_dbus": "bool", "host_dbus": "bool",
"privileged": ["NET_ADMIN", "SYS_ADMIN"], "privileged": ["NET_ADMIN", "SYS_ADMIN"],
"seccomp": "disable|default|profile",
"apparmor": "disable|default|profile",
"devices": ["/dev/xy"], "devices": ["/dev/xy"],
"auto_uart": "bool", "auto_uart": "bool",
"icon": "bool", "icon": "bool",
@@ -563,46 +607,3 @@ This service perform a auto discovery to Home-Assistant.
``` ```
- DEL `/services/mqtt` - DEL `/services/mqtt`
## Host Control
Communicate over UNIX socket with a host daemon.
- commands
```
# info
-> {'type', 'version', 'last_version', 'features', 'hostname'}
# reboot
# shutdown
# host-update [v]
# hostname xy
# network info
-> {}
# network wlan ssd xy
# network wlan password xy
# network int ip xy
# network int netmask xy
# network int route xy
```
Features:
- shutdown
- reboot
- update
- hostname
- network_info
- network_control
Answer:
```
{}|OK|ERROR|WRONG
```
- {}: json
- OK: call was successfully
- ERROR: error on call
- WRONG: not supported

View File

@@ -9,14 +9,17 @@ RUN apk add --no-cache \
python3 \ python3 \
git \ git \
socat \ socat \
glib \
libstdc++ \ libstdc++ \
eudev-libs \
&& apk add --no-cache --virtual .build-dependencies \ && apk add --no-cache --virtual .build-dependencies \
make \ make \
python3-dev \ python3-dev \
g++ \ g++ \
&& pip3 install --no-cache-dir \ && pip3 install --no-cache-dir \
uvloop \ uvloop==0.9.1 \
cchardet \ cchardet==2.1.1 \
pycryptodome==3.4.11 \
&& apk del .build-dependencies && apk del .build-dependencies
# Install HassIO # Install HassIO

View File

@@ -5,7 +5,6 @@ import logging
import sys import sys
import hassio.bootstrap as bootstrap import hassio.bootstrap as bootstrap
import hassio.core as core
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -34,14 +33,13 @@ if __name__ == "__main__":
_LOGGER.info("Initialize Hassio setup") _LOGGER.info("Initialize Hassio setup")
coresys = bootstrap.initialize_coresys(loop) coresys = bootstrap.initialize_coresys(loop)
hassio = core.HassIO(coresys)
bootstrap.migrate_system_env(coresys) bootstrap.migrate_system_env(coresys)
_LOGGER.info("Setup HassIO") _LOGGER.info("Setup HassIO")
loop.run_until_complete(hassio.setup()) loop.run_until_complete(coresys.core.setup())
loop.call_soon_threadsafe(loop.create_task, hassio.start()) loop.call_soon_threadsafe(loop.create_task, coresys.core.start())
loop.call_soon_threadsafe(bootstrap.reg_signal, loop) loop.call_soon_threadsafe(bootstrap.reg_signal, loop)
try: try:
@@ -49,7 +47,7 @@ if __name__ == "__main__":
loop.run_forever() loop.run_forever()
finally: finally:
_LOGGER.info("Stopping HassIO") _LOGGER.info("Stopping HassIO")
loop.run_until_complete(hassio.stop()) loop.run_until_complete(coresys.core.stop())
executor.shutdown(wait=False) executor.shutdown(wait=False)
loop.close() loop.close()

View File

@@ -5,7 +5,7 @@ import logging
from .addon import Addon from .addon import Addon
from .repository import Repository from .repository import Repository
from .data import AddonsData from .data import AddonsData
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO, STATE_STARTED
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -28,22 +28,35 @@ class AddonManager(CoreSysAttributes):
"""Return a list of all addons.""" """Return a list of all addons."""
return list(self.addons_obj.values()) return list(self.addons_obj.values())
@property
def list_installed(self):
"""Return a list of installed addons."""
return [addon for addon in self.addons_obj.values()
if addon.is_installed]
@property @property
def list_repositories(self): def list_repositories(self):
"""Return list of addon repositories.""" """Return list of addon repositories."""
return list(self.repositories_obj.values()) return list(self.repositories_obj.values())
def get(self, addon_slug): def get(self, addon_slug):
"""Return a adddon from slug.""" """Return a add-on from slug."""
return self.addons_obj.get(addon_slug) return self.addons_obj.get(addon_slug)
def from_uuid(self, uuid):
"""Return a add-on from uuid."""
for addon in self.list_addons:
if addon.is_installed and uuid == addon.uuid:
return addon
return None
async def load(self): async def load(self):
"""Startup addon management.""" """Startup addon management."""
self.data.reload() self.data.reload()
# init hassio built-in repositories # init hassio built-in repositories
repositories = \ repositories = \
set(self._config.addons_repositories) | BUILTIN_REPOSITORIES set(self.sys_config.addons_repositories) | BUILTIN_REPOSITORIES
# init custom repositories & load addons # init custom repositories & load addons
await self.load_repositories(repositories) await self.load_repositories(repositories)
@@ -53,7 +66,7 @@ class AddonManager(CoreSysAttributes):
tasks = [repository.update() for repository in tasks = [repository.update() for repository in
self.repositories_obj.values()] self.repositories_obj.values()]
if tasks: if tasks:
await asyncio.wait(tasks, loop=self._loop) await asyncio.wait(tasks)
# read data from repositories # read data from repositories
self.data.reload() self.data.reload()
@@ -77,16 +90,16 @@ class AddonManager(CoreSysAttributes):
# don't add built-in repository to config # don't add built-in repository to config
if url not in BUILTIN_REPOSITORIES: if url not in BUILTIN_REPOSITORIES:
self._config.add_addon_repository(url) self.sys_config.add_addon_repository(url)
tasks = [_add_repository(url) for url in new_rep - old_rep] tasks = [_add_repository(url) for url in new_rep - old_rep]
if tasks: if tasks:
await asyncio.wait(tasks, loop=self._loop) await asyncio.wait(tasks)
# del new repository # del new repository
for url in old_rep - new_rep - BUILTIN_REPOSITORIES: for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
self.repositories_obj.pop(url).remove() self.repositories_obj.pop(url).remove()
self._config.drop_addon_repository(url) self.sys_config.drop_addon_repository(url)
# update data # update data
self.data.reload() self.data.reload()
@@ -112,13 +125,13 @@ class AddonManager(CoreSysAttributes):
self.addons_obj[addon_slug] = addon self.addons_obj[addon_slug] = addon
if tasks: if tasks:
await asyncio.wait(tasks, loop=self._loop) await asyncio.wait(tasks)
# remove # remove
for addon_slug in del_addons: for addon_slug in del_addons:
self.addons_obj.pop(addon_slug) self.addons_obj.pop(addon_slug)
async def auto_boot(self, stage): async def boot(self, stage):
"""Boot addons with mode auto.""" """Boot addons with mode auto."""
tasks = [] tasks = []
for addon in self.addons_obj.values(): for addon in self.addons_obj.values():
@@ -128,5 +141,18 @@ class AddonManager(CoreSysAttributes):
_LOGGER.info("Startup %s run %d addons", stage, len(tasks)) _LOGGER.info("Startup %s run %d addons", stage, len(tasks))
if tasks: if tasks:
await asyncio.wait(tasks, loop=self._loop) await asyncio.wait(tasks)
await asyncio.sleep(self._config.wait_boot, loop=self._loop) await asyncio.sleep(self.sys_config.wait_boot)
async def shutdown(self, stage):
"""Shutdown addons."""
tasks = []
for addon in self.addons_obj.values():
if addon.is_installed and \
await addon.state() == STATE_STARTED and \
addon.startup == stage:
tasks.append(addon.stop())
_LOGGER.info("Shutdown %s stop %d addons", stage, len(tasks))
if tasks:
await asyncio.wait(tasks)

View File

@@ -1,4 +1,5 @@
"""Init file for HassIO addons.""" """Init file for HassIO addons."""
from contextlib import suppress
from copy import deepcopy from copy import deepcopy
import logging import logging
import json import json
@@ -23,7 +24,9 @@ from ..const import (
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI, ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI,
ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT, ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY, ATTR_HOST_IPC, ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY, ATTR_HOST_IPC,
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_DISCOVERY, ATTR_SERVICES) ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_DISCOVERY, ATTR_SERVICES,
ATTR_SECCOMP, ATTR_APPARMOR, SECURITY_PROFILE, SECURITY_DISABLE,
SECURITY_DEFAULT)
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..docker.addon import DockerAddon from ..docker.addon import DockerAddon
from ..utils.json import write_json_file, read_json_file from ..utils.json import write_json_file, read_json_file
@@ -63,7 +66,7 @@ class Addon(CoreSysAttributes):
@property @property
def _data(self): def _data(self):
"""Return addons data storage.""" """Return addons data storage."""
return self._addons.data return self.sys_addons.data
@property @property
def is_installed(self): def is_installed(self):
@@ -316,6 +319,24 @@ class Addon(CoreSysAttributes):
"""Return list of privilege.""" """Return list of privilege."""
return self._mesh.get(ATTR_PRIVILEGED) return self._mesh.get(ATTR_PRIVILEGED)
@property
def seccomp(self):
"""Return True if seccomp is enabled."""
if not self._mesh.get(ATTR_SECCOMP):
return SECURITY_DISABLE
elif self.path_seccomp.exists():
return SECURITY_PROFILE
return SECURITY_DEFAULT
@property
def apparmor(self):
"""Return True if seccomp is enabled."""
if not self._mesh.get(ATTR_APPARMOR):
return SECURITY_DISABLE
elif self.path_apparmor.exists():
return SECURITY_PROFILE
return SECURITY_DEFAULT
@property @property
def legacy(self): def legacy(self):
"""Return if the add-on don't support hass labels.""" """Return if the add-on don't support hass labels."""
@@ -352,15 +373,14 @@ class Addon(CoreSysAttributes):
if not self.with_audio: if not self.with_audio:
return None return None
setting = self._config.audio_output
if self.is_installed and \ if self.is_installed and \
ATTR_AUDIO_OUTPUT in self._data.user[self._id]: ATTR_AUDIO_OUTPUT in self._data.user[self._id]:
setting = self._data.user[self._id][ATTR_AUDIO_OUTPUT] return self._data.user[self._id][ATTR_AUDIO_OUTPUT]
return setting return self.sys_host.alsa.default.output
@audio_output.setter @audio_output.setter
def audio_output(self, value): def audio_output(self, value):
"""Set/remove custom audio output settings.""" """Set/reset audio output settings."""
if value is None: if value is None:
self._data.user[self._id].pop(ATTR_AUDIO_OUTPUT, None) self._data.user[self._id].pop(ATTR_AUDIO_OUTPUT, None)
else: else:
@@ -372,14 +392,13 @@ class Addon(CoreSysAttributes):
if not self.with_audio: if not self.with_audio:
return None return None
setting = self._config.audio_input
if self.is_installed and ATTR_AUDIO_INPUT in self._data.user[self._id]: if self.is_installed and ATTR_AUDIO_INPUT in self._data.user[self._id]:
setting = self._data.user[self._id][ATTR_AUDIO_INPUT] return self._data.user[self._id][ATTR_AUDIO_INPUT]
return setting return self.sys_host.alsa.default.input
@audio_input.setter @audio_input.setter
def audio_input(self, value): def audio_input(self, value):
"""Set/remove custom audio input settings.""" """Set/reset audio input settings."""
if value is None: if value is None:
self._data.user[self._id].pop(ATTR_AUDIO_INPUT, None) self._data.user[self._id].pop(ATTR_AUDIO_INPUT, None)
else: else:
@@ -417,11 +436,11 @@ class Addon(CoreSysAttributes):
# Repository with dockerhub images # Repository with dockerhub images
if ATTR_IMAGE in addon_data: if ATTR_IMAGE in addon_data:
return addon_data[ATTR_IMAGE].format(arch=self._arch) return addon_data[ATTR_IMAGE].format(arch=self.sys_arch)
# local build # local build
return "{}/{}-addon-{}".format( return "{}/{}-addon-{}".format(
addon_data[ATTR_REPOSITORY], self._arch, addon_data[ATTR_REPOSITORY], self.sys_arch,
addon_data[ATTR_SLUG]) addon_data[ATTR_SLUG])
@property @property
@@ -442,12 +461,12 @@ class Addon(CoreSysAttributes):
@property @property
def path_data(self): def path_data(self):
"""Return addon data path inside supervisor.""" """Return addon data path inside supervisor."""
return Path(self._config.path_addons_data, self._id) return Path(self.sys_config.path_addons_data, self._id)
@property @property
def path_extern_data(self): def path_extern_data(self):
"""Return addon data path external for docker.""" """Return addon data path external for docker."""
return PurePath(self._config.path_extern_addons_data, self._id) return PurePath(self.sys_config.path_extern_addons_data, self._id)
@property @property
def path_options(self): def path_options(self):
@@ -474,9 +493,29 @@ class Addon(CoreSysAttributes):
"""Return path to addon changelog.""" """Return path to addon changelog."""
return Path(self.path_location, 'CHANGELOG.md') return Path(self.path_location, 'CHANGELOG.md')
@property
def path_seccomp(self):
"""Return path to custom seccomp profile."""
return Path(self.path_location, 'seccomp.json')
@property
def path_apparmor(self):
"""Return path to custom AppArmor profile."""
return Path(self.path_location, 'apparmor')
@property
def path_asound(self):
"""Return path to asound config."""
return Path(self.sys_config.path_tmp, f"{self.slug}_asound")
@property
def path_extern_asound(self):
"""Return path to asound config for docker."""
return Path(self.sys_config.path_extern_tmp, f"{self.slug}_asound")
def save_data(self): def save_data(self):
"""Save data of addon.""" """Save data of addon."""
self._addons.data.save_data() self.sys_addons.data.save_data()
def write_options(self): def write_options(self):
"""Return True if addon options is written to data.""" """Return True if addon options is written to data."""
@@ -496,6 +535,20 @@ class Addon(CoreSysAttributes):
return False return False
def write_asound(self):
"""Write asound config to file and return True on success."""
asound_config = self.sys_host.alsa.asound(
alsa_input=self.audio_input, alsa_output=self.audio_output)
try:
with self.path_asound.open('w') as config_file:
config_file.write(asound_config)
except OSError as err:
_LOGGER.error("Addon %s can't write asound: %s", self._id, err)
return False
return True
@property @property
def schema(self): def schema(self):
"""Create a schema for addon options.""" """Create a schema for addon options."""
@@ -537,9 +590,9 @@ class Addon(CoreSysAttributes):
async def install(self): async def install(self):
"""Install a addon.""" """Install a addon."""
if self._arch not in self.supported_arch: if self.sys_arch not in self.supported_arch:
_LOGGER.error( _LOGGER.error(
"Addon %s not supported on %s", self._id, self._arch) "Addon %s not supported on %s", self._id, self.sys_arch)
return False return False
if self.is_installed: if self.is_installed:
@@ -568,6 +621,11 @@ class Addon(CoreSysAttributes):
"Remove Home-Assistant addon data folder %s", self.path_data) "Remove Home-Assistant addon data folder %s", self.path_data)
shutil.rmtree(str(self.path_data)) shutil.rmtree(str(self.path_data))
# Cleanup audio settings
if self.path_asound.exists():
with suppress(OSError):
self.path_asound.unlink()
self._set_uninstall() self._set_uninstall()
return True return True
@@ -583,9 +641,14 @@ class Addon(CoreSysAttributes):
@check_installed @check_installed
async def start(self): async def start(self):
"""Set options and start addon.""" """Set options and start addon."""
# Options
if not self.write_options(): if not self.write_options():
return False return False
# Sound
if self.with_audio and not self.write_asound():
return False
return await self.instance.run() return await self.instance.run()
@check_installed @check_installed
@@ -672,7 +735,7 @@ class Addon(CoreSysAttributes):
@check_installed @check_installed
async def snapshot(self, tar_file): async def snapshot(self, tar_file):
"""Snapshot a state of a addon.""" """Snapshot a state of a addon."""
with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp: with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp:
# store local image # store local image
if self.need_build and not await \ if self.need_build and not await \
self.instance.export_image(Path(temp, "image.tar")): self.instance.export_image(Path(temp, "image.tar")):
@@ -693,16 +756,15 @@ class Addon(CoreSysAttributes):
return False return False
# write into tarfile # write into tarfile
def _create_tar(): def _write_tarfile():
"""Write tar inside loop.""" """Write tar inside loop."""
with tarfile.open(tar_file, "w:gz", with tar_file as snapshot:
compresslevel=1) as snapshot:
snapshot.add(temp, arcname=".") snapshot.add(temp, arcname=".")
snapshot.add(self.path_data, arcname="data") snapshot.add(self.path_data, arcname="data")
try: try:
_LOGGER.info("Build snapshot for addon %s", self._id) _LOGGER.info("Build snapshot for addon %s", self._id)
await self._loop.run_in_executor(None, _create_tar) await self.sys_run_in_executor(_write_tarfile)
except (tarfile.TarError, OSError) as err: except (tarfile.TarError, OSError) as err:
_LOGGER.error("Can't write tarfile %s: %s", tar_file, err) _LOGGER.error("Can't write tarfile %s: %s", tar_file, err)
return False return False
@@ -712,15 +774,15 @@ class Addon(CoreSysAttributes):
async def restore(self, tar_file): async def restore(self, tar_file):
"""Restore a state of a addon.""" """Restore a state of a addon."""
with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp: with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp:
# extract snapshot # extract snapshot
def _extract_tar(): def _extract_tarfile():
"""Extract tar snapshot.""" """Extract tar snapshot."""
with tarfile.open(tar_file, "r:gz") as snapshot: with tar_file as snapshot:
snapshot.extractall(path=Path(temp)) snapshot.extractall(path=Path(temp))
try: try:
await self._loop.run_in_executor(None, _extract_tar) await self.sys_run_in_executor(_extract_tarfile)
except tarfile.TarError as err: except tarfile.TarError as err:
_LOGGER.error("Can't read tarfile %s: %s", tar_file, err) _LOGGER.error("Can't read tarfile %s: %s", tar_file, err)
return False return False
@@ -766,7 +828,7 @@ class Addon(CoreSysAttributes):
try: try:
_LOGGER.info("Restore data for addon %s", self._id) _LOGGER.info("Restore data for addon %s", self._id)
await self._loop.run_in_executor(None, _restore_data) await self.sys_run_in_executor(_restore_data)
except shutil.Error as err: except shutil.Error as err:
_LOGGER.error("Can't restore origin data: %s", err) _LOGGER.error("Can't restore origin data: %s", err)
return False return False

View File

@@ -25,13 +25,13 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
@property @property
def addon(self): def addon(self):
"""Return addon of build data.""" """Return addon of build data."""
return self._addons.get(self._id) return self.sys_addons.get(self._id)
@property @property
def base_image(self): def base_image(self):
"""Base images for this addon.""" """Base images for this addon."""
return self._data[ATTR_BUILD_FROM].get( return self._data[ATTR_BUILD_FROM].get(
self._arch, BASE_IMAGE[self._arch]) self.sys_arch, BASE_IMAGE[self.sys_arch])
@property @property
def squash(self): def squash(self):
@@ -53,15 +53,15 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
'squash': self.squash, 'squash': self.squash,
'labels': { 'labels': {
'io.hass.version': version, 'io.hass.version': version,
'io.hass.arch': self._arch, 'io.hass.arch': self.sys_arch,
'io.hass.type': META_ADDON, 'io.hass.type': META_ADDON,
'io.hass.name': self.addon.name, 'io.hass.name': self._fix_label('name'),
'io.hass.description': self.addon.description, 'io.hass.description': self._fix_label('description'),
}, },
'buildargs': { 'buildargs': {
'BUILD_FROM': self.base_image, 'BUILD_FROM': self.base_image,
'BUILD_VERSION': version, 'BUILD_VERSION': version,
'BUILD_ARCH': self._arch, 'BUILD_ARCH': self.sys_arch,
**self.additional_args, **self.additional_args,
} }
} }
@@ -70,3 +70,8 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
args['labels']['io.hass.url'] = self.addon.url args['labels']['io.hass.url'] = self.addon.url
return args return args
def _fix_label(self, label_name):
"""Remove characters they are not supported."""
label = getattr(self.addon, label_name, "")
return label.replace("'", "")

View File

@@ -1,12 +1,12 @@
{ {
"local": { "local": {
"name": "Local Add-Ons", "name": "Local add-ons",
"url": "https://home-assistant.io/hassio", "url": "https://home-assistant.io/hassio",
"maintainer": "you" "maintainer": "you"
}, },
"core": { "core": {
"name": "Built-in Add-Ons", "name": "Official add-ons",
"url": "https://home-assistant.io/addons", "url": "https://home-assistant.io/addons",
"maintainer": "Home Assistant authors" "maintainer": "Home Assistant"
} }
} }

View File

@@ -56,17 +56,17 @@ class AddonsData(JsonConfig, CoreSysAttributes):
# read core repository # read core repository
self._read_addons_folder( self._read_addons_folder(
self._config.path_addons_core, REPOSITORY_CORE) self.sys_config.path_addons_core, REPOSITORY_CORE)
# read local repository # read local repository
self._read_addons_folder( self._read_addons_folder(
self._config.path_addons_local, REPOSITORY_LOCAL) self.sys_config.path_addons_local, REPOSITORY_LOCAL)
# add built-in repositories information # add built-in repositories information
self._set_builtin_repositories() self._set_builtin_repositories()
# read custom git repositories # read custom git repositories
for repository_element in self._config.path_addons_git.iterdir(): for repository_element in self.sys_config.path_addons_git.iterdir():
if repository_element.is_dir(): if repository_element.is_dir():
self._read_git_repository(repository_element) self._read_git_repository(repository_element)

View File

@@ -8,8 +8,9 @@ import shutil
import git import git
from .utils import get_hash_from_repository from .utils import get_hash_from_repository
from ..const import URL_HASSIO_ADDONS from ..const import URL_HASSIO_ADDONS, ATTR_URL, ATTR_BRANCH
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..validate import RE_REPOSITORY
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -22,9 +23,20 @@ class GitRepo(CoreSysAttributes):
self.coresys = coresys self.coresys = coresys
self.repo = None self.repo = None
self.path = path self.path = path
self.url = url
self.lock = asyncio.Lock(loop=coresys.loop) self.lock = asyncio.Lock(loop=coresys.loop)
self._data = RE_REPOSITORY.match(url).groupdict()
@property
def url(self):
"""Return repository URL."""
return self._data[ATTR_URL]
@property
def branch(self):
"""Return repository branch."""
return self._data[ATTR_BRANCH]
async def load(self): async def load(self):
"""Init git addon repo.""" """Init git addon repo."""
if not self.path.is_dir(): if not self.path.is_dir():
@@ -33,7 +45,7 @@ class GitRepo(CoreSysAttributes):
async with self.lock: async with self.lock:
try: try:
_LOGGER.info("Load addon %s repository", self.path) _LOGGER.info("Load addon %s repository", self.path)
self.repo = await self._loop.run_in_executor( self.repo = await self.sys_loop.run_in_executor(
None, git.Repo, str(self.path)) None, git.Repo, str(self.path))
except (git.InvalidGitRepositoryError, git.NoSuchPathError, except (git.InvalidGitRepositoryError, git.NoSuchPathError,
@@ -46,12 +58,20 @@ class GitRepo(CoreSysAttributes):
async def clone(self): async def clone(self):
"""Clone git addon repo.""" """Clone git addon repo."""
async with self.lock: async with self.lock:
git_args = {
attribute: value
for attribute, value in (
('recursive', True),
('branch', self.branch)
) if value is not None
}
try: try:
_LOGGER.info("Clone addon %s repository", self.url) _LOGGER.info("Clone addon %s repository", self.url)
self.repo = await self._loop.run_in_executor( self.repo = await self.sys_run_in_executor(ft.partial(
None, ft.partial( git.Repo.clone_from, self.url, str(self.path),
git.Repo.clone_from, self.url, str(self.path), **git_args
recursive=True)) ))
except (git.InvalidGitRepositoryError, git.NoSuchPathError, except (git.InvalidGitRepositoryError, git.NoSuchPathError,
git.GitCommandError) as err: git.GitCommandError) as err:
@@ -69,7 +89,7 @@ class GitRepo(CoreSysAttributes):
async with self.lock: async with self.lock:
try: try:
_LOGGER.info("Pull addon %s repository", self.url) _LOGGER.info("Pull addon %s repository", self.url)
await self._loop.run_in_executor( await self.sys_loop.run_in_executor(
None, self.repo.remotes.origin.pull) None, self.repo.remotes.origin.pull)
except (git.InvalidGitRepositoryError, git.NoSuchPathError, except (git.InvalidGitRepositoryError, git.NoSuchPathError,

View File

@@ -30,7 +30,7 @@ class Repository(CoreSysAttributes):
@property @property
def _mesh(self): def _mesh(self):
"""Return data struct repository.""" """Return data struct repository."""
return self._addons.data.repositories.get(self._id, {}) return self.sys_addons.data.repositories.get(self._id, {})
@property @property
def slug(self): def slug(self):

View File

@@ -17,8 +17,9 @@ from ..const import (
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_HOST_IPC, ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_HOST_IPC,
ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API, ATTR_BUILD_FROM, ATTR_SQUASH, ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API, ATTR_BUILD_FROM, ATTR_SQUASH,
ATTR_ARGS, ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY, ATTR_ARGS, ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY,
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_SERVICES, ATTR_DISCOVERY) ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_SERVICES, ATTR_DISCOVERY,
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_CHANNEL ATTR_SECCOMP, ATTR_APPARMOR)
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_DEVICE
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -60,6 +61,7 @@ PRIVILEGED_ALL = [
"NET_ADMIN", "NET_ADMIN",
"SYS_ADMIN", "SYS_ADMIN",
"SYS_RAWIO", "SYS_RAWIO",
"IPC_LOCK",
"SYS_TIME", "SYS_TIME",
"SYS_NICE" "SYS_NICE"
] ]
@@ -106,6 +108,8 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)], vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)}, vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)], vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
vol.Optional(ATTR_SECCOMP, default=True): vol.Boolean(),
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(), vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(), vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(), vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
@@ -161,8 +165,8 @@ SCHEMA_ADDON_USER = vol.Schema({
vol.Optional(ATTR_BOOT): vol.Optional(ATTR_BOOT):
vol.In([BOOT_AUTO, BOOT_MANUAL]), vol.In([BOOT_AUTO, BOOT_MANUAL]),
vol.Optional(ATTR_NETWORK): DOCKER_PORTS, vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL, vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL, vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
}, extra=vol.REMOVE_EXTRA) }, extra=vol.REMOVE_EXTRA)

View File

@@ -7,13 +7,13 @@ from aiohttp import web
from .addons import APIAddons from .addons import APIAddons
from .discovery import APIDiscovery from .discovery import APIDiscovery
from .homeassistant import APIHomeAssistant from .homeassistant import APIHomeAssistant
from .hardware import APIHardware
from .host import APIHost from .host import APIHost
from .network import APINetwork
from .proxy import APIProxy from .proxy import APIProxy
from .supervisor import APISupervisor from .supervisor import APISupervisor
from .snapshots import APISnapshots from .snapshots import APISnapshots
from .services import APIServices from .services import APIServices
from .security import security_layer from .security import SecurityMiddleware
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -25,26 +25,24 @@ class RestAPI(CoreSysAttributes):
def __init__(self, coresys): def __init__(self, coresys):
"""Initialize docker base wrapper.""" """Initialize docker base wrapper."""
self.coresys = coresys self.coresys = coresys
self.security = SecurityMiddleware(coresys)
self.webapp = web.Application( self.webapp = web.Application(
middlewares=[security_layer], loop=self._loop) middlewares=[self.security.token_validation], loop=coresys.loop)
# service stuff # service stuff
self._handler = None self._handler = None
self.server = None self.server = None
# middleware
self.webapp['coresys'] = coresys
async def load(self): async def load(self):
"""Register REST API Calls.""" """Register REST API Calls."""
self._register_supervisor() self._register_supervisor()
self._register_host() self._register_host()
self._register_hardware()
self._register_homeassistant() self._register_homeassistant()
self._register_proxy() self._register_proxy()
self._register_panel() self._register_panel()
self._register_addons() self._register_addons()
self._register_snapshots() self._register_snapshots()
self._register_network()
self._register_discovery() self._register_discovery()
self._register_services() self._register_services()
@@ -53,180 +51,172 @@ class RestAPI(CoreSysAttributes):
api_host = APIHost() api_host = APIHost()
api_host.coresys = self.coresys api_host.coresys = self.coresys
self.webapp.router.add_get('/host/info', api_host.info) self.webapp.add_routes([
self.webapp.router.add_get('/host/hardware', api_host.hardware) web.get('/host/info', api_host.info),
self.webapp.router.add_post('/host/reboot', api_host.reboot) web.post('/host/reboot', api_host.reboot),
self.webapp.router.add_post('/host/shutdown', api_host.shutdown) web.post('/host/shutdown', api_host.shutdown),
self.webapp.router.add_post('/host/update', api_host.update) web.post('/host/update', api_host.update),
self.webapp.router.add_post('/host/options', api_host.options) web.post('/host/reload', api_host.reload),
self.webapp.router.add_post('/host/reload', api_host.reload) ])
def _register_network(self): def _register_hardware(self):
"""Register network function.""" """Register hardware function."""
api_net = APINetwork() api_hardware = APIHardware()
api_net.coresys = self.coresys api_hardware.coresys = self.coresys
self.webapp.router.add_get('/network/info', api_net.info) self.webapp.add_routes([
self.webapp.router.add_post('/network/options', api_net.options) web.get('/hardware/info', api_hardware.info),
web.get('/hardware/audio', api_hardware.audio),
])
def _register_supervisor(self): def _register_supervisor(self):
"""Register supervisor function.""" """Register supervisor function."""
api_supervisor = APISupervisor() api_supervisor = APISupervisor()
api_supervisor.coresys = self.coresys api_supervisor.coresys = self.coresys
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping) self.webapp.add_routes([
self.webapp.router.add_get('/supervisor/info', api_supervisor.info) web.get('/supervisor/ping', api_supervisor.ping),
self.webapp.router.add_get('/supervisor/stats', api_supervisor.stats) web.get('/supervisor/info', api_supervisor.info),
self.webapp.router.add_post( web.get('/supervisor/stats', api_supervisor.stats),
'/supervisor/update', api_supervisor.update) web.get('/supervisor/logs', api_supervisor.logs),
self.webapp.router.add_post( web.post('/supervisor/update', api_supervisor.update),
'/supervisor/reload', api_supervisor.reload) web.post('/supervisor/reload', api_supervisor.reload),
self.webapp.router.add_post( web.post('/supervisor/options', api_supervisor.options),
'/supervisor/options', api_supervisor.options) ])
self.webapp.router.add_get('/supervisor/logs', api_supervisor.logs)
def _register_homeassistant(self): def _register_homeassistant(self):
"""Register homeassistant function.""" """Register homeassistant function."""
api_hass = APIHomeAssistant() api_hass = APIHomeAssistant()
api_hass.coresys = self.coresys api_hass.coresys = self.coresys
self.webapp.router.add_get('/homeassistant/info', api_hass.info) self.webapp.add_routes([
self.webapp.router.add_get('/homeassistant/logs', api_hass.logs) web.get('/homeassistant/info', api_hass.info),
self.webapp.router.add_get('/homeassistant/stats', api_hass.stats) web.get('/homeassistant/logs', api_hass.logs),
self.webapp.router.add_post('/homeassistant/options', api_hass.options) web.get('/homeassistant/stats', api_hass.stats),
self.webapp.router.add_post('/homeassistant/update', api_hass.update) web.post('/homeassistant/options', api_hass.options),
self.webapp.router.add_post('/homeassistant/restart', api_hass.restart) web.post('/homeassistant/update', api_hass.update),
self.webapp.router.add_post('/homeassistant/stop', api_hass.stop) web.post('/homeassistant/restart', api_hass.restart),
self.webapp.router.add_post('/homeassistant/start', api_hass.start) web.post('/homeassistant/stop', api_hass.stop),
self.webapp.router.add_post('/homeassistant/check', api_hass.check) web.post('/homeassistant/start', api_hass.start),
web.post('/homeassistant/check', api_hass.check),
])
def _register_proxy(self): def _register_proxy(self):
"""Register HomeAssistant API Proxy.""" """Register HomeAssistant API Proxy."""
api_proxy = APIProxy() api_proxy = APIProxy()
api_proxy.coresys = self.coresys api_proxy.coresys = self.coresys
self.webapp.router.add_get( self.webapp.add_routes([
'/homeassistant/api/websocket', api_proxy.websocket) web.get('/homeassistant/api/websocket', api_proxy.websocket),
self.webapp.router.add_get( web.get('/homeassistant/websocket', api_proxy.websocket),
'/homeassistant/websocket', api_proxy.websocket) web.get('/homeassistant/api/stream', api_proxy.stream),
self.webapp.router.add_get( web.post('/homeassistant/api/{path:.+}', api_proxy.api),
'/homeassistant/api/stream', api_proxy.stream) web.get('/homeassistant/api/{path:.+}', api_proxy.api),
self.webapp.router.add_post( web.get('/homeassistant/api/', api_proxy.api),
'/homeassistant/api/{path:.+}', api_proxy.api) ])
self.webapp.router.add_get(
'/homeassistant/api/{path:.+}', api_proxy.api)
self.webapp.router.add_get(
'/homeassistant/api/', api_proxy.api)
def _register_addons(self): def _register_addons(self):
"""Register homeassistant function.""" """Register homeassistant function."""
api_addons = APIAddons() api_addons = APIAddons()
api_addons.coresys = self.coresys api_addons.coresys = self.coresys
self.webapp.router.add_get('/addons', api_addons.list) self.webapp.add_routes([
self.webapp.router.add_post('/addons/reload', api_addons.reload) web.get('/addons', api_addons.list),
self.webapp.router.add_get('/addons/{addon}/info', api_addons.info) web.post('/addons/reload', api_addons.reload),
self.webapp.router.add_post( web.get('/addons/{addon}/info', api_addons.info),
'/addons/{addon}/install', api_addons.install) web.post('/addons/{addon}/install', api_addons.install),
self.webapp.router.add_post( web.post('/addons/{addon}/uninstall', api_addons.uninstall),
'/addons/{addon}/uninstall', api_addons.uninstall) web.post('/addons/{addon}/start', api_addons.start),
self.webapp.router.add_post('/addons/{addon}/start', api_addons.start) web.post('/addons/{addon}/stop', api_addons.stop),
self.webapp.router.add_post('/addons/{addon}/stop', api_addons.stop) web.post('/addons/{addon}/restart', api_addons.restart),
self.webapp.router.add_post( web.post('/addons/{addon}/update', api_addons.update),
'/addons/{addon}/restart', api_addons.restart) web.post('/addons/{addon}/options', api_addons.options),
self.webapp.router.add_post( web.post('/addons/{addon}/rebuild', api_addons.rebuild),
'/addons/{addon}/update', api_addons.update) web.get('/addons/{addon}/logs', api_addons.logs),
self.webapp.router.add_post( web.get('/addons/{addon}/icon', api_addons.icon),
'/addons/{addon}/options', api_addons.options) web.get('/addons/{addon}/logo', api_addons.logo),
self.webapp.router.add_post( web.get('/addons/{addon}/changelog', api_addons.changelog),
'/addons/{addon}/rebuild', api_addons.rebuild) web.post('/addons/{addon}/stdin', api_addons.stdin),
self.webapp.router.add_get('/addons/{addon}/logs', api_addons.logs) web.get('/addons/{addon}/stats', api_addons.stats),
self.webapp.router.add_get('/addons/{addon}/icon', api_addons.icon) ])
self.webapp.router.add_get('/addons/{addon}/logo', api_addons.logo)
self.webapp.router.add_get(
'/addons/{addon}/changelog', api_addons.changelog)
self.webapp.router.add_post('/addons/{addon}/stdin', api_addons.stdin)
self.webapp.router.add_get('/addons/{addon}/stats', api_addons.stats)
def _register_snapshots(self): def _register_snapshots(self):
"""Register snapshots function.""" """Register snapshots function."""
api_snapshots = APISnapshots() api_snapshots = APISnapshots()
api_snapshots.coresys = self.coresys api_snapshots.coresys = self.coresys
self.webapp.router.add_get('/snapshots', api_snapshots.list) self.webapp.add_routes([
self.webapp.router.add_post('/snapshots/reload', api_snapshots.reload) web.get('/snapshots', api_snapshots.list),
web.post('/snapshots/reload', api_snapshots.reload),
self.webapp.router.add_post( web.post('/snapshots/new/full', api_snapshots.snapshot_full),
'/snapshots/new/full', api_snapshots.snapshot_full) web.post('/snapshots/new/partial', api_snapshots.snapshot_partial),
self.webapp.router.add_post( web.post('/snapshots/new/upload', api_snapshots.upload),
'/snapshots/new/partial', api_snapshots.snapshot_partial) web.get('/snapshots/{snapshot}/info', api_snapshots.info),
web.post('/snapshots/{snapshot}/remove', api_snapshots.remove),
self.webapp.router.add_get( web.post('/snapshots/{snapshot}/restore/full',
'/snapshots/{snapshot}/info', api_snapshots.info) api_snapshots.restore_full),
self.webapp.router.add_post( web.post('/snapshots/{snapshot}/restore/partial',
'/snapshots/{snapshot}/remove', api_snapshots.remove) api_snapshots.restore_partial),
self.webapp.router.add_post( web.get('/snapshots/{snapshot}/download', api_snapshots.download),
'/snapshots/{snapshot}/restore/full', api_snapshots.restore_full) ])
self.webapp.router.add_post(
'/snapshots/{snapshot}/restore/partial',
api_snapshots.restore_partial)
def _register_services(self): def _register_services(self):
api_services = APIServices() api_services = APIServices()
api_services.coresys = self.coresys api_services.coresys = self.coresys
self.webapp.router.add_get('/services', api_services.list) self.webapp.add_routes([
web.get('/services', api_services.list),
self.webapp.router.add_get( web.get('/services/{service}', api_services.get_service),
'/services/{service}', api_services.get_service) web.post('/services/{service}', api_services.set_service),
self.webapp.router.add_post( web.delete('/services/{service}', api_services.del_service),
'/services/{service}', api_services.set_service) ])
self.webapp.router.add_delete(
'/services/{service}', api_services.del_service)
def _register_discovery(self): def _register_discovery(self):
api_discovery = APIDiscovery() api_discovery = APIDiscovery()
api_discovery.coresys = self.coresys api_discovery.coresys = self.coresys
self.webapp.router.add_get( self.webapp.add_routes([
'/services/discovery', api_discovery.list) web.get('/services/discovery', api_discovery.list),
self.webapp.router.add_get( web.get('/services/discovery/{uuid}', api_discovery.get_discovery),
'/services/discovery/{uuid}', api_discovery.get_discovery) web.delete('/services/discovery/{uuid}',
self.webapp.router.add_delete( api_discovery.del_discovery),
'/services/discovery/{uuid}', api_discovery.del_discovery) web.post('/services/discovery', api_discovery.set_discovery),
self.webapp.router.add_post( ])
'/services/discovery', api_discovery.set_discovery)
def _register_panel(self): def _register_panel(self):
"""Register panel for homeassistant.""" """Register panel for homeassistant."""
def create_panel_response(build_type): panel_dir = Path(__file__).parent.joinpath("panel")
def create_response(panel_file):
"""Create a function to generate a response.""" """Create a function to generate a response."""
path = Path(__file__).parent.joinpath( path = panel_dir.joinpath(f"{panel_file!s}.html")
f"panel/{build_type}.html")
return lambda request: web.FileResponse(path) return lambda request: web.FileResponse(path)
# This route is for backwards compatibility with HA < 0.58 # This route is for backwards compatibility with HA < 0.58
self.webapp.router.add_get( self.webapp.add_routes([
'/panel', create_panel_response('hassio-main-es5')) web.get('/panel', create_response('hassio-main-es5'))])
# This route is for backwards compatibility with HA 0.58 - 0.61 # This route is for backwards compatibility with HA 0.58 - 0.61
self.webapp.router.add_get( self.webapp.add_routes([
'/panel_es5', create_panel_response('hassio-main-es5')) web.get('/panel_es5', create_response('hassio-main-es5')),
self.webapp.router.add_get( web.get('/panel_latest', create_response('hassio-main-latest')),
'/panel_latest', create_panel_response('hassio-main-latest')) ])
# This route is for HA > 0.61 # This route is for backwards compatibility with HA 0.62 - 0.70
self.webapp.router.add_get( self.webapp.add_routes([
'/app-es5/index.html', create_panel_response('index')) web.get('/app-es5/index.html', create_response('index')),
self.webapp.router.add_get( web.get('/app-es5/hassio-app.html', create_response('hassio-app')),
'/app-es5/hassio-app.html', create_panel_response('hassio-app')) ])
# This route is for HA > 0.70
self.webapp.add_routes([web.static('/app', panel_dir)])
async def start(self): async def start(self):
"""Run rest api webserver.""" """Run rest api webserver."""
self._handler = self.webapp.make_handler(loop=self._loop) self._handler = self.webapp.make_handler()
try: try:
self.server = await self._loop.create_server( self.server = await self.sys_loop.create_server(
self._handler, "0.0.0.0", "80") self._handler, "0.0.0.0", "80")
except OSError as err: except OSError as err:
_LOGGER.fatal( _LOGGER.fatal(

View File

@@ -17,10 +17,10 @@ from ..const import (
ATTR_CHANGELOG, ATTR_HOST_IPC, ATTR_HOST_DBUS, ATTR_LONG_DESCRIPTION, ATTR_CHANGELOG, ATTR_HOST_IPC, ATTR_HOST_DBUS, ATTR_LONG_DESCRIPTION,
ATTR_CPU_PERCENT, ATTR_MEMORY_LIMIT, ATTR_MEMORY_USAGE, ATTR_NETWORK_TX, ATTR_CPU_PERCENT, ATTR_MEMORY_LIMIT, ATTR_MEMORY_USAGE, ATTR_NETWORK_TX,
ATTR_NETWORK_RX, ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_ICON, ATTR_SERVICES, ATTR_NETWORK_RX, ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_ICON, ATTR_SERVICES,
ATTR_DISCOVERY, ATTR_DISCOVERY, ATTR_SECCOMP, ATTR_APPARMOR,
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY, CONTENT_TYPE_TEXT) CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY, CONTENT_TYPE_TEXT)
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..validate import DOCKER_PORTS from ..validate import DOCKER_PORTS, ALSA_DEVICE
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -33,6 +33,8 @@ SCHEMA_OPTIONS = vol.Schema({
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]), vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS), vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS),
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(), vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
}) })
@@ -41,7 +43,7 @@ class APIAddons(CoreSysAttributes):
def _extract_addon(self, request, check_installed=True): def _extract_addon(self, request, check_installed=True):
"""Return addon and if not exists trow a exception.""" """Return addon and if not exists trow a exception."""
addon = self._addons.get(request.match_info.get('addon')) addon = self.sys_addons.get(request.match_info.get('addon'))
if not addon: if not addon:
raise RuntimeError("Addon not exists") raise RuntimeError("Addon not exists")
@@ -62,7 +64,7 @@ class APIAddons(CoreSysAttributes):
async def list(self, request): async def list(self, request):
"""Return all addons / repositories .""" """Return all addons / repositories ."""
data_addons = [] data_addons = []
for addon in self._addons.list_addons: for addon in self.sys_addons.list_addons:
data_addons.append({ data_addons.append({
ATTR_NAME: addon.name, ATTR_NAME: addon.name,
ATTR_SLUG: addon.slug, ATTR_SLUG: addon.slug,
@@ -79,7 +81,7 @@ class APIAddons(CoreSysAttributes):
}) })
data_repositories = [] data_repositories = []
for repository in self._addons.list_repositories: for repository in self.sys_addons.list_repositories:
data_repositories.append({ data_repositories.append({
ATTR_SLUG: repository.slug, ATTR_SLUG: repository.slug,
ATTR_NAME: repository.name, ATTR_NAME: repository.name,
@@ -96,7 +98,7 @@ class APIAddons(CoreSysAttributes):
@api_process @api_process
async def reload(self, request): async def reload(self, request):
"""Reload all addons data.""" """Reload all addons data."""
await asyncio.shield(self._addons.reload(), loop=self._loop) await asyncio.shield(self.sys_addons.reload())
return True return True
@api_process @api_process
@@ -106,6 +108,7 @@ class APIAddons(CoreSysAttributes):
return { return {
ATTR_NAME: addon.name, ATTR_NAME: addon.name,
ATTR_SLUG: addon.slug,
ATTR_DESCRIPTON: addon.description, ATTR_DESCRIPTON: addon.description,
ATTR_LONG_DESCRIPTION: addon.long_description, ATTR_LONG_DESCRIPTION: addon.long_description,
ATTR_VERSION: addon.version_installed, ATTR_VERSION: addon.version_installed,
@@ -123,6 +126,8 @@ class APIAddons(CoreSysAttributes):
ATTR_HOST_IPC: addon.host_ipc, ATTR_HOST_IPC: addon.host_ipc,
ATTR_HOST_DBUS: addon.host_dbus, ATTR_HOST_DBUS: addon.host_dbus,
ATTR_PRIVILEGED: addon.privileged, ATTR_PRIVILEGED: addon.privileged,
ATTR_SECCOMP: addon.seccomp,
ATTR_APPARMOR: addon.apparmor,
ATTR_DEVICES: self._pretty_devices(addon), ATTR_DEVICES: self._pretty_devices(addon),
ATTR_ICON: addon.with_icon, ATTR_ICON: addon.with_icon,
ATTR_LOGO: addon.with_logo, ATTR_LOGO: addon.with_logo,
@@ -189,13 +194,13 @@ class APIAddons(CoreSysAttributes):
def install(self, request): def install(self, request):
"""Install addon.""" """Install addon."""
addon = self._extract_addon(request, check_installed=False) addon = self._extract_addon(request, check_installed=False)
return asyncio.shield(addon.install(), loop=self._loop) return asyncio.shield(addon.install())
@api_process @api_process
def uninstall(self, request): def uninstall(self, request):
"""Uninstall addon.""" """Uninstall addon."""
addon = self._extract_addon(request) addon = self._extract_addon(request)
return asyncio.shield(addon.uninstall(), loop=self._loop) return asyncio.shield(addon.uninstall())
@api_process @api_process
def start(self, request): def start(self, request):
@@ -209,13 +214,13 @@ class APIAddons(CoreSysAttributes):
except vol.Invalid as ex: except vol.Invalid as ex:
raise RuntimeError(humanize_error(options, ex)) from None raise RuntimeError(humanize_error(options, ex)) from None
return asyncio.shield(addon.start(), loop=self._loop) return asyncio.shield(addon.start())
@api_process @api_process
def stop(self, request): def stop(self, request):
"""Stop addon.""" """Stop addon."""
addon = self._extract_addon(request) addon = self._extract_addon(request)
return asyncio.shield(addon.stop(), loop=self._loop) return asyncio.shield(addon.stop())
@api_process @api_process
def update(self, request): def update(self, request):
@@ -225,13 +230,13 @@ class APIAddons(CoreSysAttributes):
if addon.last_version == addon.version_installed: if addon.last_version == addon.version_installed:
raise RuntimeError("No update available!") raise RuntimeError("No update available!")
return asyncio.shield(addon.update(), loop=self._loop) return asyncio.shield(addon.update())
@api_process @api_process
def restart(self, request): def restart(self, request):
"""Restart addon.""" """Restart addon."""
addon = self._extract_addon(request) addon = self._extract_addon(request)
return asyncio.shield(addon.restart(), loop=self._loop) return asyncio.shield(addon.restart())
@api_process @api_process
def rebuild(self, request): def rebuild(self, request):
@@ -240,7 +245,7 @@ class APIAddons(CoreSysAttributes):
if not addon.need_build: if not addon.need_build:
raise RuntimeError("Only local build addons are supported") raise RuntimeError("Only local build addons are supported")
return asyncio.shield(addon.rebuild(), loop=self._loop) return asyncio.shield(addon.rebuild())
@api_process_raw(CONTENT_TYPE_BINARY) @api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request): def logs(self, request):
@@ -286,4 +291,4 @@ class APIAddons(CoreSysAttributes):
raise RuntimeError("STDIN not supported by addon") raise RuntimeError("STDIN not supported by addon")
data = await request.read() data = await request.read()
return await asyncio.shield(addon.write_stdin(data), loop=self._loop) return await asyncio.shield(addon.write_stdin(data))

View File

@@ -21,7 +21,7 @@ class APIDiscovery(CoreSysAttributes):
def _extract_message(self, request): def _extract_message(self, request):
"""Extract discovery message from URL.""" """Extract discovery message from URL."""
message = self._services.discovery.get(request.match_info.get('uuid')) message = self.sys_discovery.get(request.match_info.get('uuid'))
if not message: if not message:
raise RuntimeError("Discovery message not found") raise RuntimeError("Discovery message not found")
return message return message
@@ -30,7 +30,7 @@ class APIDiscovery(CoreSysAttributes):
async def list(self, request): async def list(self, request):
"""Show register services.""" """Show register services."""
discovery = [] discovery = []
for message in self._services.discovery.list_messages: for message in self.sys_discovery.list_messages:
discovery.append({ discovery.append({
ATTR_PROVIDER: message.provider, ATTR_PROVIDER: message.provider,
ATTR_UUID: message.uuid, ATTR_UUID: message.uuid,
@@ -45,7 +45,7 @@ class APIDiscovery(CoreSysAttributes):
async def set_discovery(self, request): async def set_discovery(self, request):
"""Write data into a discovery pipeline.""" """Write data into a discovery pipeline."""
body = await api_validate(SCHEMA_DISCOVERY, request) body = await api_validate(SCHEMA_DISCOVERY, request)
message = self._services.discovery.send( message = self.sys_discovery.send(
provider=request[REQUEST_FROM], **body) provider=request[REQUEST_FROM], **body)
return {ATTR_UUID: message.uuid} return {ATTR_UUID: message.uuid}
@@ -68,5 +68,5 @@ class APIDiscovery(CoreSysAttributes):
"""Delete data into a discovery message.""" """Delete data into a discovery message."""
message = self._extract_message(request) message = self._extract_message(request)
self._services.discovery.remove(message) self.sys_discovery.remove(message)
return True return True

34
hassio/api/hardware.py Normal file
View File

@@ -0,0 +1,34 @@
"""Init file for HassIO hardware rest api."""
import logging
from .utils import api_process
from ..const import (
ATTR_SERIAL, ATTR_DISK, ATTR_GPIO, ATTR_AUDIO, ATTR_INPUT, ATTR_OUTPUT)
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
class APIHardware(CoreSysAttributes):
"""Handle rest api for hardware functions."""
@api_process
async def info(self, request):
"""Show hardware info."""
return {
ATTR_SERIAL: list(self.sys_hardware.serial_devices),
ATTR_INPUT: list(self.sys_hardware.input_devices),
ATTR_DISK: list(self.sys_hardware.disk_devices),
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
ATTR_AUDIO: self.sys_hardware.audio_devices,
}
@api_process
async def audio(self, request):
"""Show ALSA audio devices."""
return {
ATTR_AUDIO: {
ATTR_INPUT: self.sys_host.alsa.input_devices,
ATTR_OUTPUT: self.sys_host.alsa.output_devices,
}
}

View File

@@ -9,7 +9,8 @@ from ..const import (
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_IMAGE, ATTR_CUSTOM, ATTR_BOOT, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_IMAGE, ATTR_CUSTOM, ATTR_BOOT,
ATTR_PORT, ATTR_PASSWORD, ATTR_SSL, ATTR_WATCHDOG, ATTR_CPU_PERCENT, ATTR_PORT, ATTR_PASSWORD, ATTR_SSL, ATTR_WATCHDOG, ATTR_CPU_PERCENT,
ATTR_MEMORY_USAGE, ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_MEMORY_USAGE, ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX,
ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_STARTUP_TIME, CONTENT_TYPE_BINARY) ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_WAIT_BOOT, ATTR_MACHINE,
CONTENT_TYPE_BINARY)
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..validate import NETWORK_PORT, DOCKER_IMAGE from ..validate import NETWORK_PORT, DOCKER_IMAGE
@@ -27,7 +28,7 @@ SCHEMA_OPTIONS = vol.Schema({
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)), vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
vol.Optional(ATTR_SSL): vol.Boolean(), vol.Optional(ATTR_SSL): vol.Boolean(),
vol.Optional(ATTR_WATCHDOG): vol.Boolean(), vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
vol.Optional(ATTR_STARTUP_TIME): vol.Optional(ATTR_WAIT_BOOT):
vol.All(vol.Coerce(int), vol.Range(min=60)), vol.All(vol.Coerce(int), vol.Range(min=60)),
}) })
@@ -43,15 +44,16 @@ class APIHomeAssistant(CoreSysAttributes):
async def info(self, request): async def info(self, request):
"""Return host information.""" """Return host information."""
return { return {
ATTR_VERSION: self._homeassistant.version, ATTR_VERSION: self.sys_homeassistant.version,
ATTR_LAST_VERSION: self._homeassistant.last_version, ATTR_LAST_VERSION: self.sys_homeassistant.last_version,
ATTR_IMAGE: self._homeassistant.image, ATTR_MACHINE: self.sys_homeassistant.machine,
ATTR_CUSTOM: self._homeassistant.is_custom_image, ATTR_IMAGE: self.sys_homeassistant.image,
ATTR_BOOT: self._homeassistant.boot, ATTR_CUSTOM: self.sys_homeassistant.is_custom_image,
ATTR_PORT: self._homeassistant.api_port, ATTR_BOOT: self.sys_homeassistant.boot,
ATTR_SSL: self._homeassistant.api_ssl, ATTR_PORT: self.sys_homeassistant.api_port,
ATTR_WATCHDOG: self._homeassistant.watchdog, ATTR_SSL: self.sys_homeassistant.api_ssl,
ATTR_STARTUP_TIME: self._homeassistant.startup_time, ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
ATTR_WAIT_BOOT: self.sys_homeassistant.wait_boot,
} }
@api_process @api_process
@@ -60,34 +62,34 @@ class APIHomeAssistant(CoreSysAttributes):
body = await api_validate(SCHEMA_OPTIONS, request) body = await api_validate(SCHEMA_OPTIONS, request)
if ATTR_IMAGE in body and ATTR_LAST_VERSION in body: if ATTR_IMAGE in body and ATTR_LAST_VERSION in body:
self._homeassistant.image = body[ATTR_IMAGE] self.sys_homeassistant.image = body[ATTR_IMAGE]
self._homeassistant.last_version = body[ATTR_LAST_VERSION] self.sys_homeassistant.last_version = body[ATTR_LAST_VERSION]
if ATTR_BOOT in body: if ATTR_BOOT in body:
self._homeassistant.boot = body[ATTR_BOOT] self.sys_homeassistant.boot = body[ATTR_BOOT]
if ATTR_PORT in body: if ATTR_PORT in body:
self._homeassistant.api_port = body[ATTR_PORT] self.sys_homeassistant.api_port = body[ATTR_PORT]
if ATTR_PASSWORD in body: if ATTR_PASSWORD in body:
self._homeassistant.api_password = body[ATTR_PASSWORD] self.sys_homeassistant.api_password = body[ATTR_PASSWORD]
if ATTR_SSL in body: if ATTR_SSL in body:
self._homeassistant.api_ssl = body[ATTR_SSL] self.sys_homeassistant.api_ssl = body[ATTR_SSL]
if ATTR_WATCHDOG in body: if ATTR_WATCHDOG in body:
self._homeassistant.watchdog = body[ATTR_WATCHDOG] self.sys_homeassistant.watchdog = body[ATTR_WATCHDOG]
if ATTR_STARTUP_TIME in body: if ATTR_WAIT_BOOT in body:
self._homeassistant.startup_time = body[ATTR_STARTUP_TIME] self.sys_homeassistant.wait_boot = body[ATTR_WAIT_BOOT]
self._homeassistant.save_data() self.sys_homeassistant.save_data()
return True return True
@api_process @api_process
async def stats(self, request): async def stats(self, request):
"""Return resource information.""" """Return resource information."""
stats = await self._homeassistant.stats() stats = await self.sys_homeassistant.stats()
if not stats: if not stats:
raise RuntimeError("No stats available") raise RuntimeError("No stats available")
@@ -105,39 +107,39 @@ class APIHomeAssistant(CoreSysAttributes):
async def update(self, request): async def update(self, request):
"""Update homeassistant.""" """Update homeassistant."""
body = await api_validate(SCHEMA_VERSION, request) body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self._homeassistant.last_version) version = body.get(ATTR_VERSION, self.sys_homeassistant.last_version)
if version == self._homeassistant.version: if version == self.sys_homeassistant.version:
raise RuntimeError("Version {} is already in use".format(version)) raise RuntimeError("Version {} is already in use".format(version))
return await asyncio.shield( return await asyncio.shield(
self._homeassistant.update(version), loop=self._loop) self.sys_homeassistant.update(version))
@api_process @api_process
def stop(self, request): def stop(self, request):
"""Stop homeassistant.""" """Stop homeassistant."""
return asyncio.shield(self._homeassistant.stop(), loop=self._loop) return asyncio.shield(self.sys_homeassistant.stop())
@api_process @api_process
def start(self, request): def start(self, request):
"""Start homeassistant.""" """Start homeassistant."""
return asyncio.shield(self._homeassistant.start(), loop=self._loop) return asyncio.shield(self.sys_homeassistant.start())
@api_process @api_process
def restart(self, request): def restart(self, request):
"""Restart homeassistant.""" """Restart homeassistant."""
return asyncio.shield(self._homeassistant.restart(), loop=self._loop) return asyncio.shield(self.sys_homeassistant.restart())
@api_process_raw(CONTENT_TYPE_BINARY) @api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request): def logs(self, request):
"""Return homeassistant docker logs.""" """Return homeassistant docker logs."""
return self._homeassistant.logs() return self.sys_homeassistant.logs()
@api_process @api_process
async def check(self, request): async def check(self, request):
"""Check config of homeassistant.""" """Check config of homeassistant."""
code, message = await self._homeassistant.check_config() result = await self.sys_homeassistant.check_config()
if not code: if not result.valid:
raise RuntimeError(message) raise RuntimeError(result.log)
return True return True

View File

@@ -4,13 +4,11 @@ import logging
import voluptuous as vol import voluptuous as vol
from .utils import api_process_hostcontrol, api_process, api_validate from .utils import api_process, api_validate
from ..const import ( from ..const import (
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_TYPE, ATTR_HOSTNAME, ATTR_FEATURES, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_HOSTNAME, ATTR_FEATURES, ATTR_KERNEL,
ATTR_OS, ATTR_SERIAL, ATTR_INPUT, ATTR_DISK, ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_TYPE, ATTR_OPERATING_SYSTEM, ATTR_CHASSIS, ATTR_DEPLOYMENT)
ATTR_AUDIO_OUTPUT, ATTR_GPIO)
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..validate import ALSA_CHANNEL
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -19,8 +17,7 @@ SCHEMA_VERSION = vol.Schema({
}) })
SCHEMA_OPTIONS = vol.Schema({ SCHEMA_OPTIONS = vol.Schema({
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL, vol.Optional(ATTR_HOSTNAME): vol.Coerce(str),
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
}) })
@@ -31,62 +28,45 @@ class APIHost(CoreSysAttributes):
async def info(self, request): async def info(self, request):
"""Return host information.""" """Return host information."""
return { return {
ATTR_TYPE: self._host_control.type, ATTR_CHASSIS: self.sys_host.info.chassis,
ATTR_VERSION: self._host_control.version, ATTR_VERSION: None,
ATTR_LAST_VERSION: self._host_control.last_version, ATTR_LAST_VERSION: None,
ATTR_FEATURES: self._host_control.features, ATTR_TYPE: None,
ATTR_HOSTNAME: self._host_control.hostname, ATTR_FEATURES: self.sys_host.supperted_features,
ATTR_OS: self._host_control.os_info, ATTR_HOSTNAME: self.sys_host.info.hostname,
ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system,
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
ATTR_KERNEL: self.sys_host.info.kernel,
} }
@api_process @api_process
async def options(self, request): async def options(self, request):
"""Process host options.""" """Edit host settings."""
body = await api_validate(SCHEMA_OPTIONS, request) body = await api_validate(SCHEMA_OPTIONS, request)
if ATTR_AUDIO_OUTPUT in body: # hostname
self._config.audio_output = body[ATTR_AUDIO_OUTPUT] if ATTR_HOSTNAME in body:
if ATTR_AUDIO_INPUT in body: await asyncio.shield(
self._config.audio_input = body[ATTR_AUDIO_INPUT] self.sys_host.control.set_hostname(body[ATTR_HOSTNAME]))
self._config.save_data()
return True
@api_process_hostcontrol
def reboot(self, request):
"""Reboot host."""
return self._host_control.reboot()
@api_process_hostcontrol
def shutdown(self, request):
"""Poweroff host."""
return self._host_control.shutdown()
@api_process_hostcontrol
async def reload(self, request):
"""Reload host data."""
await self._host_control.load()
return True
@api_process_hostcontrol
async def update(self, request):
"""Update host OS."""
body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self._host_control.last_version)
if version == self._host_control.version:
raise RuntimeError(f"Version {version} is already in use")
return await asyncio.shield(
self._host_control.update(version=version), loop=self._loop)
@api_process @api_process
async def hardware(self, request): def reboot(self, request):
"""Return local hardware infos.""" """Reboot host."""
return { return asyncio.shield(self.sys_host.control.reboot())
ATTR_SERIAL: list(self._hardware.serial_devices),
ATTR_INPUT: list(self._hardware.input_devices), @api_process
ATTR_DISK: list(self._hardware.disk_devices), def shutdown(self, request):
ATTR_GPIO: list(self._hardware.gpio_devices), """Poweroff host."""
ATTR_AUDIO: self._hardware.audio_devices, return asyncio.shield(self.sys_host.control.shutdown())
}
@api_process
def reload(self, request):
"""Reload host data."""
return asyncio.shield(self.sys_host.reload())
@api_process
async def update(self, request):
"""Update host OS."""
pass
# body = await api_validate(SCHEMA_VERSION, request)
# version = body.get(ATTR_VERSION, self.sys_host.last_version)

View File

@@ -1,38 +0,0 @@
"""Init file for HassIO network rest api."""
import logging
import voluptuous as vol
from .utils import api_process, api_process_hostcontrol, api_validate
from ..const import ATTR_HOSTNAME
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
SCHEMA_OPTIONS = vol.Schema({
vol.Optional(ATTR_HOSTNAME): vol.Coerce(str),
})
class APINetwork(CoreSysAttributes):
"""Handle rest api for network functions."""
@api_process
async def info(self, request):
"""Show network settings."""
return {
ATTR_HOSTNAME: self._host_control.hostname,
}
@api_process_hostcontrol
async def options(self, request):
"""Edit network settings."""
body = await api_validate(SCHEMA_OPTIONS, request)
# hostname
if ATTR_HOSTNAME in body:
if self._host_control.hostname != body[ATTR_HOSTNAME]:
await self._host_control.set_hostname(body[ATTR_HOSTNAME])
return True

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@@ -11,27 +11,28 @@
padding: 0; padding: 0;
} }
</style> </style>
<script src='/frontend_es5/custom-elements-es5-adapter.js'></script>
</head> </head>
<body> <body>
<hassio-app></hassio-app> <hassio-app></hassio-app>
<script> <script>
function addScript(src) { function addScript(src) {
var e = document.createElement('script'); var e = document.createElement('script');
e.src = src; e.src = src;
document.head.appendChild(e); document.write(e.outerHTML);
} }
if (!window.parent.HASS_DEV) { var webComponentsSupported = (
addScript('/frontend_es5/custom-elements-es5-adapter.js'); 'customElements' in window &&
} 'import' in document.createElement('link') &&
var webComponentsSupported = ( 'content' in document.createElement('template'));
'customElements' in window && if (!webComponentsSupported) {
'import' in document.createElement('link') && addScript('/static/webcomponents-bundle.js');
'content' in document.createElement('template')); }
if (!webComponentsSupported) {
addScript('/static/webcomponents-lite.js');
}
</script> </script>
<!--
Disabled while we make Home Assistant able to serve the right files.
<script src="./app.js"></script>
-->
<link rel='import' href='./hassio-app.html'> <link rel='import' href='./hassio-app.html'>
<link rel='import' href='/static/mdi.html' async>
</body> </body>
</html> </html>

Binary file not shown.

View File

@@ -17,26 +17,38 @@ _LOGGER = logging.getLogger(__name__)
class APIProxy(CoreSysAttributes): class APIProxy(CoreSysAttributes):
"""API Proxy for Home-Assistant.""" """API Proxy for Home-Assistant."""
def _check_access(self, request):
"""Check the Hass.io token."""
hassio_token = request.headers.get(HEADER_HA_ACCESS)
addon = self.sys_addons.from_uuid(hassio_token)
if not addon:
_LOGGER.warning("Unknown Home-Assistant API access!")
else:
_LOGGER.info("%s access from %s", request.path, addon.slug)
async def _api_client(self, request, path, timeout=300): async def _api_client(self, request, path, timeout=300):
"""Return a client request with proxy origin for Home-Assistant.""" """Return a client request with proxy origin for Home-Assistant."""
url = f"{self._homeassistant.api_url}/api/{path}" url = f"{self.sys_homeassistant.api_url}/api/{path}"
try: try:
data = None data = None
headers = {} headers = {}
method = getattr(self._websession_ssl, request.method.lower()) method = getattr(self.sys_websession_ssl, request.method.lower())
params = request.query or None params = request.query or None
# read data # read data
with async_timeout.timeout(30, loop=self._loop): with async_timeout.timeout(30):
data = await request.read() data = await request.read()
if data: if data:
headers.update({CONTENT_TYPE: request.content_type}) headers.update({CONTENT_TYPE: request.content_type})
# need api password? # need api password?
if self._homeassistant.api_password: if self.sys_homeassistant.api_password:
headers = {HEADER_HA_ACCESS: self._homeassistant.api_password} headers = {
HEADER_HA_ACCESS: self.sys_homeassistant.api_password,
}
# reset headers # reset headers
if not headers: if not headers:
@@ -59,6 +71,8 @@ class APIProxy(CoreSysAttributes):
async def stream(self, request): async def stream(self, request):
"""Proxy HomeAssistant EventStream Requests.""" """Proxy HomeAssistant EventStream Requests."""
self._check_access(request)
_LOGGER.info("Home-Assistant EventStream start") _LOGGER.info("Home-Assistant EventStream start")
client = await self._api_client(request, 'stream', timeout=None) client = await self._api_client(request, 'stream', timeout=None)
@@ -71,7 +85,7 @@ class APIProxy(CoreSysAttributes):
if not data: if not data:
await response.write_eof() await response.write_eof()
break break
response.write(data) await response.write(data)
except aiohttp.ClientError: except aiohttp.ClientError:
await response.write_eof() await response.write_eof()
@@ -83,12 +97,14 @@ class APIProxy(CoreSysAttributes):
client.close() client.close()
_LOGGER.info("Home-Assistant EventStream close") _LOGGER.info("Home-Assistant EventStream close")
return response
async def api(self, request): async def api(self, request):
"""Proxy HomeAssistant API Requests.""" """Proxy HomeAssistant API Requests."""
path = request.match_info.get('path', '') self._check_access(request)
# Normal request # Normal request
_LOGGER.info("Home-Assistant /api/%s request", path) path = request.match_info.get('path', '')
client = await self._api_client(request, path) client = await self._api_client(request, path)
data = await client.read() data = await client.read()
@@ -100,10 +116,10 @@ class APIProxy(CoreSysAttributes):
async def _websocket_client(self): async def _websocket_client(self):
"""Initialize a websocket api connection.""" """Initialize a websocket api connection."""
url = f"{self._homeassistant.api_url}/api/websocket" url = f"{self.sys_homeassistant.api_url}/api/websocket"
try: try:
client = await self._websession_ssl.ws_connect( client = await self.sys_websession_ssl.ws_connect(
url, heartbeat=60, verify_ssl=False) url, heartbeat=60, verify_ssl=False)
# handle authentication # handle authentication
@@ -114,7 +130,7 @@ class APIProxy(CoreSysAttributes):
elif data.get('type') == 'auth_required': elif data.get('type') == 'auth_required':
await client.send_json({ await client.send_json({
'type': 'auth', 'type': 'auth',
'api_password': self._homeassistant.api_password, 'api_password': self.sys_homeassistant.api_password,
}) })
_LOGGER.error("Authentication to Home-Assistant websocket") _LOGGER.error("Authentication to Home-Assistant websocket")
@@ -136,12 +152,22 @@ class APIProxy(CoreSysAttributes):
try: try:
await server.send_json({ await server.send_json({
'type': 'auth_required', 'type': 'auth_required',
'ha_version': self._homeassistant.version, 'ha_version': self.sys_homeassistant.version,
}) })
await server.receive_json() # get internal token
# Check API access
response = await server.receive_json()
hassio_token = response.get('api_password')
addon = self.sys_addons.from_uuid(hassio_token)
if not addon:
_LOGGER.warning("Unauthorized websocket access!")
else:
_LOGGER.info("Websocket access from %s", addon.slug)
await server.send_json({ await server.send_json({
'type': 'auth_ok', 'type': 'auth_ok',
'ha_version': self._homeassistant.version, 'ha_version': self.sys_homeassistant.version,
}) })
except (RuntimeError, ValueError) as err: except (RuntimeError, ValueError) as err:
_LOGGER.error("Can't initialize handshake: %s", err) _LOGGER.error("Can't initialize handshake: %s", err)
@@ -156,16 +182,16 @@ class APIProxy(CoreSysAttributes):
server_read = None server_read = None
while not server.closed and not client.closed: while not server.closed and not client.closed:
if not client_read: if not client_read:
client_read = asyncio.ensure_future( client_read = self.sys_create_task(
client.receive_str(), loop=self._loop) client.receive_str())
if not server_read: if not server_read:
server_read = asyncio.ensure_future( server_read = self.sys_create_task(
server.receive_str(), loop=self._loop) server.receive_str())
# wait until data need to be processed # wait until data need to be processed
await asyncio.wait( await asyncio.wait(
[client_read, server_read], [client_read, server_read],
loop=self._loop, return_when=asyncio.FIRST_COMPLETED return_when=asyncio.FIRST_COMPLETED
) )
# server # server

View File

@@ -1,34 +1,56 @@
"""Handle security part of this API.""" """Handle security part of this API."""
import logging import logging
import re
from aiohttp.web import middleware from aiohttp.web import middleware
from aiohttp.web_exceptions import HTTPUnauthorized
from ..const import HEADER_TOKEN, REQUEST_FROM from ..const import HEADER_TOKEN, REQUEST_FROM
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
NO_SECURITY_CHECK = set((
re.compile(r"^/homeassistant/api/.*$"),
re.compile(r"^/homeassistant/websocket$"),
re.compile(r"^/supervisor/ping$"),
))
@middleware
async def security_layer(request, handler):
"""Check security access of this layer."""
coresys = request.app['coresys']
hassio_token = request.headers.get(HEADER_TOKEN)
# Need to be removed later class SecurityMiddleware(CoreSysAttributes):
if not hassio_token: """Security middleware functions."""
_LOGGER.warning("No valid hassio token for API access!")
request[REQUEST_FROM] = 'UNKNOWN'
# From Home-Assistant def __init__(self, coresys):
elif hassio_token == coresys.homeassistant.uuid: """Initialize security middleware."""
request[REQUEST_FROM] = 'homeassistant' self.coresys = coresys
# From Add-on @middleware
else: async def token_validation(self, request, handler):
for addon in coresys.addons.list_addons: """Check security access of this layer."""
if hassio_token != addon.uuid: hassio_token = request.headers.get(HEADER_TOKEN)
continue
# Ignore security check
for rule in NO_SECURITY_CHECK:
if rule.match(request.path):
_LOGGER.debug("Passthrough %s", request.path)
return await handler(request)
# Unknown API access
if not hassio_token:
_LOGGER.warning("Invalid token for access %s", request.path)
raise HTTPUnauthorized()
# Home-Assistant
if hassio_token == self.sys_homeassistant.uuid:
_LOGGER.debug("%s access from Home-Assistant", request.path)
request[REQUEST_FROM] = 'homeassistant'
return await handler(request)
# Add-on
addon = self.sys_addons.from_uuid(hassio_token)
if addon:
_LOGGER.info("%s access from %s", request.path, addon.slug)
request[REQUEST_FROM] = addon.slug request[REQUEST_FROM] = addon.slug
break return await handler(request)
return await handler(request) raise HTTPUnauthorized()

View File

@@ -11,7 +11,7 @@ class APIServices(CoreSysAttributes):
def _extract_service(self, request): def _extract_service(self, request):
"""Return service and if not exists trow a exception.""" """Return service and if not exists trow a exception."""
service = self._services.get(request.match_info.get('service')) service = self.sys_services.get(request.match_info.get('service'))
if not service: if not service:
raise RuntimeError("Service not exists") raise RuntimeError("Service not exists")
@@ -21,7 +21,7 @@ class APIServices(CoreSysAttributes):
async def list(self, request): async def list(self, request):
"""Show register services.""" """Show register services."""
services = [] services = []
for service in self._services.list_services: for service in self.sys_services.list_services:
services.append({ services.append({
ATTR_SLUG: service.slug, ATTR_SLUG: service.slug,
ATTR_AVAILABLE: service.enabled, ATTR_AVAILABLE: service.enabled,

View File

@@ -1,7 +1,10 @@
"""Init file for HassIO snapshot rest api.""" """Init file for HassIO snapshot rest api."""
import asyncio import asyncio
import logging import logging
from pathlib import Path
from tempfile import TemporaryDirectory
from aiohttp import web
import voluptuous as vol import voluptuous as vol
from .utils import api_process, api_validate from .utils import api_process, api_validate
@@ -9,7 +12,7 @@ from ..snapshots.validate import ALL_FOLDERS
from ..const import ( from ..const import (
ATTR_NAME, ATTR_SLUG, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES, ATTR_NAME, ATTR_SLUG, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
ATTR_HOMEASSISTANT, ATTR_VERSION, ATTR_SIZE, ATTR_FOLDERS, ATTR_TYPE, ATTR_HOMEASSISTANT, ATTR_VERSION, ATTR_SIZE, ATTR_FOLDERS, ATTR_TYPE,
ATTR_SNAPSHOTS) ATTR_SNAPSHOTS, ATTR_PASSWORD, ATTR_PROTECTED, CONTENT_TYPE_TAR)
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -17,6 +20,7 @@ _LOGGER = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter # pylint: disable=no-value-for-parameter
SCHEMA_RESTORE_PARTIAL = vol.Schema({ SCHEMA_RESTORE_PARTIAL = vol.Schema({
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(), vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
vol.Optional(ATTR_ADDONS): vol.Optional(ATTR_ADDONS):
vol.All([vol.Coerce(str)], vol.Unique()), vol.All([vol.Coerce(str)], vol.Unique()),
@@ -24,8 +28,13 @@ SCHEMA_RESTORE_PARTIAL = vol.Schema({
vol.All([vol.In(ALL_FOLDERS)], vol.Unique()), vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
}) })
SCHEMA_RESTORE_FULL = vol.Schema({
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
})
SCHEMA_SNAPSHOT_FULL = vol.Schema({ SCHEMA_SNAPSHOT_FULL = vol.Schema({
vol.Optional(ATTR_NAME): vol.Coerce(str), vol.Optional(ATTR_NAME): vol.Coerce(str),
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
}) })
SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend({ SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend({
@@ -41,7 +50,7 @@ class APISnapshots(CoreSysAttributes):
def _extract_snapshot(self, request): def _extract_snapshot(self, request):
"""Return addon and if not exists trow a exception.""" """Return addon and if not exists trow a exception."""
snapshot = self._snapshots.get(request.match_info.get('snapshot')) snapshot = self.sys_snapshots.get(request.match_info.get('snapshot'))
if not snapshot: if not snapshot:
raise RuntimeError("Snapshot not exists") raise RuntimeError("Snapshot not exists")
return snapshot return snapshot
@@ -50,12 +59,13 @@ class APISnapshots(CoreSysAttributes):
async def list(self, request): async def list(self, request):
"""Return snapshot list.""" """Return snapshot list."""
data_snapshots = [] data_snapshots = []
for snapshot in self._snapshots.list_snapshots: for snapshot in self.sys_snapshots.list_snapshots:
data_snapshots.append({ data_snapshots.append({
ATTR_SLUG: snapshot.slug, ATTR_SLUG: snapshot.slug,
ATTR_NAME: snapshot.name, ATTR_NAME: snapshot.name,
ATTR_DATE: snapshot.date, ATTR_DATE: snapshot.date,
ATTR_TYPE: snapshot.sys_type, ATTR_TYPE: snapshot.sys_type,
ATTR_PROTECTED: snapshot.protected,
}) })
return { return {
@@ -65,7 +75,7 @@ class APISnapshots(CoreSysAttributes):
@api_process @api_process
async def reload(self, request): async def reload(self, request):
"""Reload snapshot list.""" """Reload snapshot list."""
await asyncio.shield(self._snapshots.reload(), loop=self._loop) await asyncio.shield(self.sys_snapshots.reload())
return True return True
@api_process @api_process
@@ -79,6 +89,7 @@ class APISnapshots(CoreSysAttributes):
ATTR_SLUG: addon_data[ATTR_SLUG], ATTR_SLUG: addon_data[ATTR_SLUG],
ATTR_NAME: addon_data[ATTR_NAME], ATTR_NAME: addon_data[ATTR_NAME],
ATTR_VERSION: addon_data[ATTR_VERSION], ATTR_VERSION: addon_data[ATTR_VERSION],
ATTR_SIZE: addon_data[ATTR_SIZE],
}) })
return { return {
@@ -87,6 +98,7 @@ class APISnapshots(CoreSysAttributes):
ATTR_NAME: snapshot.name, ATTR_NAME: snapshot.name,
ATTR_DATE: snapshot.date, ATTR_DATE: snapshot.date,
ATTR_SIZE: snapshot.size, ATTR_SIZE: snapshot.size,
ATTR_PROTECTED: snapshot.protected,
ATTR_HOMEASSISTANT: snapshot.homeassistant_version, ATTR_HOMEASSISTANT: snapshot.homeassistant_version,
ATTR_ADDONS: data_addons, ATTR_ADDONS: data_addons,
ATTR_REPOSITORIES: snapshot.repositories, ATTR_REPOSITORIES: snapshot.repositories,
@@ -97,36 +109,78 @@ class APISnapshots(CoreSysAttributes):
async def snapshot_full(self, request): async def snapshot_full(self, request):
"""Full-Snapshot a snapshot.""" """Full-Snapshot a snapshot."""
body = await api_validate(SCHEMA_SNAPSHOT_FULL, request) body = await api_validate(SCHEMA_SNAPSHOT_FULL, request)
return await asyncio.shield( snapshot = await asyncio.shield(
self._snapshots.do_snapshot_full(**body), loop=self._loop) self.sys_snapshots.do_snapshot_full(**body))
if snapshot:
return {ATTR_SLUG: snapshot.slug}
return False
@api_process @api_process
async def snapshot_partial(self, request): async def snapshot_partial(self, request):
"""Partial-Snapshot a snapshot.""" """Partial-Snapshot a snapshot."""
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request) body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
return await asyncio.shield( snapshot = await asyncio.shield(
self._snapshots.do_snapshot_partial(**body), loop=self._loop) self.sys_snapshots.do_snapshot_partial(**body))
if snapshot:
return {ATTR_SLUG: snapshot.slug}
return False
@api_process @api_process
def restore_full(self, request): async def restore_full(self, request):
"""Full-Restore a snapshot.""" """Full-Restore a snapshot."""
snapshot = self._extract_snapshot(request) snapshot = self._extract_snapshot(request)
return asyncio.shield( body = await api_validate(SCHEMA_RESTORE_FULL, request)
self._snapshots.do_restore_full(snapshot), loop=self._loop)
return await asyncio.shield(
self.sys_snapshots.do_restore_full(snapshot, **body))
@api_process @api_process
async def restore_partial(self, request): async def restore_partial(self, request):
"""Partial-Restore a snapshot.""" """Partial-Restore a snapshot."""
snapshot = self._extract_snapshot(request) snapshot = self._extract_snapshot(request)
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request) body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
return await asyncio.shield( return await asyncio.shield(
self._snapshots.do_restore_partial(snapshot, **body), self.sys_snapshots.do_restore_partial(snapshot, **body))
loop=self._loop
)
@api_process @api_process
async def remove(self, request): async def remove(self, request):
"""Remove a snapshot.""" """Remove a snapshot."""
snapshot = self._extract_snapshot(request) snapshot = self._extract_snapshot(request)
return self._snapshots.remove(snapshot) return self.sys_snapshots.remove(snapshot)
async def download(self, request):
"""Download a snapshot file."""
snapshot = self._extract_snapshot(request)
_LOGGER.info("Download snapshot %s", snapshot.slug)
response = web.FileResponse(snapshot.tarfile)
response.content_type = CONTENT_TYPE_TAR
return response
@api_process
async def upload(self, request):
"""Upload a snapshot file."""
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir:
tar_file = Path(temp_dir, f"snapshot.tar")
try:
with tar_file.open('wb') as snapshot:
async for data in request.content.iter_any():
snapshot.write(data)
except OSError as err:
_LOGGER.error("Can't write new snapshot file: %s", err)
return False
except asyncio.CancelledError:
return False
snapshot = await asyncio.shield(
self.sys_snapshots.import_snapshot(tar_file))
if snapshot:
return {ATTR_SLUG: snapshot.slug}
return False

View File

@@ -6,20 +6,19 @@ import voluptuous as vol
from .utils import api_process, api_process_raw, api_validate from .utils import api_process, api_process_raw, api_validate
from ..const import ( from ..const import (
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, ATTR_ARCH, ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_CHANNEL, ATTR_ARCH,
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY, HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY,
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE, ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
ATTR_STATE, ATTR_WAIT_BOOT, ATTR_CPU_PERCENT, ATTR_MEMORY_USAGE, ATTR_STATE, ATTR_WAIT_BOOT, ATTR_CPU_PERCENT, ATTR_MEMORY_USAGE,
ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_BLK_READ, ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_BLK_READ,
ATTR_BLK_WRITE, CONTENT_TYPE_BINARY, ATTR_ICON) ATTR_BLK_WRITE, CONTENT_TYPE_BINARY, ATTR_ICON)
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..validate import validate_timezone, WAIT_BOOT, REPOSITORIES from ..validate import validate_timezone, WAIT_BOOT, REPOSITORIES, CHANNELS
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
SCHEMA_OPTIONS = vol.Schema({ SCHEMA_OPTIONS = vol.Schema({
# pylint: disable=no-value-for-parameter vol.Optional(ATTR_CHANNEL): CHANNELS,
vol.Optional(ATTR_BETA_CHANNEL): vol.Boolean(),
vol.Optional(ATTR_ADDONS_REPOSITORIES): REPOSITORIES, vol.Optional(ATTR_ADDONS_REPOSITORIES): REPOSITORIES,
vol.Optional(ATTR_TIMEZONE): validate_timezone, vol.Optional(ATTR_TIMEZONE): validate_timezone,
vol.Optional(ATTR_WAIT_BOOT): WAIT_BOOT, vol.Optional(ATTR_WAIT_BOOT): WAIT_BOOT,
@@ -42,7 +41,7 @@ class APISupervisor(CoreSysAttributes):
async def info(self, request): async def info(self, request):
"""Return host information.""" """Return host information."""
list_addons = [] list_addons = []
for addon in self._addons.list_addons: for addon in self.sys_addons.list_addons:
if addon.is_installed: if addon.is_installed:
list_addons.append({ list_addons.append({
ATTR_NAME: addon.name, ATTR_NAME: addon.name,
@@ -58,13 +57,13 @@ class APISupervisor(CoreSysAttributes):
return { return {
ATTR_VERSION: HASSIO_VERSION, ATTR_VERSION: HASSIO_VERSION,
ATTR_LAST_VERSION: self._updater.version_hassio, ATTR_LAST_VERSION: self.sys_updater.version_hassio,
ATTR_BETA_CHANNEL: self._updater.beta_channel, ATTR_CHANNEL: self.sys_updater.channel,
ATTR_ARCH: self._arch, ATTR_ARCH: self.sys_arch,
ATTR_WAIT_BOOT: self._config.wait_boot, ATTR_WAIT_BOOT: self.sys_config.wait_boot,
ATTR_TIMEZONE: self._config.timezone, ATTR_TIMEZONE: self.sys_config.timezone,
ATTR_ADDONS: list_addons, ATTR_ADDONS: list_addons,
ATTR_ADDONS_REPOSITORIES: self._config.addons_repositories, ATTR_ADDONS_REPOSITORIES: self.sys_config.addons_repositories,
} }
@api_process @api_process
@@ -72,27 +71,27 @@ class APISupervisor(CoreSysAttributes):
"""Set supervisor options.""" """Set supervisor options."""
body = await api_validate(SCHEMA_OPTIONS, request) body = await api_validate(SCHEMA_OPTIONS, request)
if ATTR_BETA_CHANNEL in body: if ATTR_CHANNEL in body:
self._updater.beta_channel = body[ATTR_BETA_CHANNEL] self.sys_updater.channel = body[ATTR_CHANNEL]
if ATTR_TIMEZONE in body: if ATTR_TIMEZONE in body:
self._config.timezone = body[ATTR_TIMEZONE] self.sys_config.timezone = body[ATTR_TIMEZONE]
if ATTR_WAIT_BOOT in body: if ATTR_WAIT_BOOT in body:
self._config.wait_boot = body[ATTR_WAIT_BOOT] self.sys_config.wait_boot = body[ATTR_WAIT_BOOT]
if ATTR_ADDONS_REPOSITORIES in body: if ATTR_ADDONS_REPOSITORIES in body:
new = set(body[ATTR_ADDONS_REPOSITORIES]) new = set(body[ATTR_ADDONS_REPOSITORIES])
await asyncio.shield(self._addons.load_repositories(new)) await asyncio.shield(self.sys_addons.load_repositories(new))
self._updater.save_data() self.sys_updater.save_data()
self._config.save_data() self.sys_config.save_data()
return True return True
@api_process @api_process
async def stats(self, request): async def stats(self, request):
"""Return resource information.""" """Return resource information."""
stats = await self._supervisor.stats() stats = await self.sys_supervisor.stats()
if not stats: if not stats:
raise RuntimeError("No stats available") raise RuntimeError("No stats available")
@@ -110,22 +109,22 @@ class APISupervisor(CoreSysAttributes):
async def update(self, request): async def update(self, request):
"""Update supervisor OS.""" """Update supervisor OS."""
body = await api_validate(SCHEMA_VERSION, request) body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self._updater.version_hassio) version = body.get(ATTR_VERSION, self.sys_updater.version_hassio)
if version == self._supervisor.version: if version == self.sys_supervisor.version:
raise RuntimeError("Version {} is already in use".format(version)) raise RuntimeError("Version {} is already in use".format(version))
return await asyncio.shield( return await asyncio.shield(
self._supervisor.update(version), loop=self._loop) self.sys_supervisor.update(version))
@api_process @api_process
async def reload(self, request): async def reload(self, request):
"""Reload addons, config ect.""" """Reload addons, config ect."""
tasks = [ tasks = [
self._updater.reload(), self.sys_updater.reload(),
] ]
results, _ = await asyncio.shield( results, _ = await asyncio.shield(
asyncio.wait(tasks, loop=self._loop), loop=self._loop) asyncio.wait(tasks))
for result in results: for result in results:
if result.exception() is not None: if result.exception() is not None:
@@ -136,4 +135,4 @@ class APISupervisor(CoreSysAttributes):
@api_process_raw(CONTENT_TYPE_BINARY) @api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request): def logs(self, request):
"""Return supervisor docker logs.""" """Return supervisor docker logs."""
return self._supervisor.logs() return self.sys_supervisor.logs()

View File

@@ -4,13 +4,13 @@ import hashlib
import logging import logging
from aiohttp import web from aiohttp import web
from aiohttp.web_exceptions import HTTPServiceUnavailable
import voluptuous as vol import voluptuous as vol
from voluptuous.humanize import humanize_error from voluptuous.humanize import humanize_error
from ..const import ( from ..const import (
JSON_RESULT, JSON_DATA, JSON_MESSAGE, RESULT_OK, RESULT_ERROR, JSON_RESULT, JSON_DATA, JSON_MESSAGE, RESULT_OK, RESULT_ERROR,
CONTENT_TYPE_BINARY) CONTENT_TYPE_BINARY)
from ..exceptions import HassioError
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -33,42 +33,21 @@ def api_process(method):
answer = await method(api, *args, **kwargs) answer = await method(api, *args, **kwargs)
except RuntimeError as err: except RuntimeError as err:
return api_return_error(message=str(err)) return api_return_error(message=str(err))
except HassioError:
_LOGGER.exception("Hassio error")
return api_return_error()
if isinstance(answer, dict): if isinstance(answer, dict):
return api_return_ok(data=answer) return api_return_ok(data=answer)
if isinstance(answer, web.Response): if isinstance(answer, web.Response):
return answer return answer
elif answer: elif isinstance(answer, bool) and not answer:
return api_return_ok() return api_return_error()
return api_return_error() return api_return_ok()
return wrap_api return wrap_api
def api_process_hostcontrol(method):
"""Wrap HostControl calls to rest api."""
async def wrap_hostcontrol(api, *args, **kwargs):
"""Return host information."""
# pylint: disable=protected-access
if not api._host_control.active:
raise HTTPServiceUnavailable()
try:
answer = await method(api, *args, **kwargs)
except RuntimeError as err:
return api_return_error(message=str(err))
if isinstance(answer, dict):
return api_return_ok(data=answer)
elif answer is None:
return api_return_error("Function is not supported")
elif answer:
return api_return_ok()
return api_return_error()
return wrap_hostcontrol
def api_process_raw(content): def api_process_raw(content):
"""Wrap content_type into function.""" """Wrap content_type into function."""
def wrap_method(method): def wrap_method(method):
@@ -81,6 +60,9 @@ def api_process_raw(content):
except RuntimeError as err: except RuntimeError as err:
msg_data = str(err).encode() msg_data = str(err).encode()
msg_type = CONTENT_TYPE_BINARY msg_type = CONTENT_TYPE_BINARY
except HassioError:
msg_data = b''
msg_type = CONTENT_TYPE_BINARY
return web.Response(body=msg_data, content_type=msg_type) return web.Response(body=msg_data, content_type=msg_type)

View File

@@ -7,6 +7,7 @@ from pathlib import Path
from colorlog import ColoredFormatter from colorlog import ColoredFormatter
from .core import HassIO
from .addons import AddonManager from .addons import AddonManager
from .api import RestAPI from .api import RestAPI
from .const import SOCKET_DOCKER from .const import SOCKET_DOCKER
@@ -17,6 +18,9 @@ from .snapshots import SnapshotManager
from .tasks import Tasks from .tasks import Tasks
from .updater import Updater from .updater import Updater
from .services import ServiceManager from .services import ServiceManager
from .services import Discovery
from .host import HostManager
from .dbus import DBusManager
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -26,14 +30,18 @@ def initialize_coresys(loop):
coresys = CoreSys(loop) coresys = CoreSys(loop)
# Initialize core objects # Initialize core objects
coresys.core = HassIO(coresys)
coresys.updater = Updater(coresys) coresys.updater = Updater(coresys)
coresys.api = RestAPI(coresys) coresys.api = RestAPI(coresys)
coresys.supervisor = Supervisor(coresys) coresys.supervisor = Supervisor(coresys)
coresys.homeassistant = HomeAssistant(coresys) coresys.homeassistant = HomeAssistant(coresys)
coresys.addons = AddonManager(coresys) coresys.addons = AddonManager(coresys)
coresys.snapshots = SnapshotManager(coresys) coresys.snapshots = SnapshotManager(coresys)
coresys.host = HostManager(coresys)
coresys.tasks = Tasks(coresys) coresys.tasks = Tasks(coresys)
coresys.services = ServiceManager(coresys) coresys.services = ServiceManager(coresys)
coresys.discovery = Discovery(coresys)
coresys.dbus = DBusManager(coresys)
# bootstrap config # bootstrap config
initialize_system_data(coresys) initialize_system_data(coresys)
@@ -146,7 +154,12 @@ def check_environment():
# check socat exec # check socat exec
if not shutil.which('socat'): if not shutil.which('socat'):
_LOGGER.fatal("Can0t find socat program!") _LOGGER.fatal("Can't find socat program!")
return False
# check socat exec
if not shutil.which('gdbus'):
_LOGGER.fatal("Can't find gdbus program!")
return False return False
return True return True

View File

@@ -6,7 +6,7 @@ from pathlib import Path, PurePath
from .const import ( from .const import (
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_TIMEZONE, ATTR_ADDONS_CUSTOM_LIST, FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_TIMEZONE, ATTR_ADDONS_CUSTOM_LIST,
ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_LAST_BOOT, ATTR_WAIT_BOOT) ATTR_LAST_BOOT, ATTR_WAIT_BOOT)
from .utils.dt import parse_datetime from .utils.dt import parse_datetime
from .utils.json import JsonConfig from .utils.json import JsonConfig
from .validate import SCHEMA_HASSIO_CONFIG from .validate import SCHEMA_HASSIO_CONFIG
@@ -136,6 +136,11 @@ class CoreConfig(JsonConfig):
"""Return hass.io temp folder.""" """Return hass.io temp folder."""
return Path(HASSIO_DATA, TMP_DATA) return Path(HASSIO_DATA, TMP_DATA)
@property
def path_extern_tmp(self):
"""Return hass.io temp folder for docker."""
return PurePath(self.path_extern_hassio, TMP_DATA)
@property @property
def path_backup(self): def path_backup(self):
"""Return root backup data folder.""" """Return root backup data folder."""
@@ -174,23 +179,3 @@ class CoreConfig(JsonConfig):
return return
self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo) self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo)
@property
def audio_output(self):
"""Return ALSA audio output card,dev."""
return self._data.get(ATTR_AUDIO_OUTPUT)
@audio_output.setter
def audio_output(self, value):
"""Set ALSA audio output card,dev."""
self._data[ATTR_AUDIO_OUTPUT] = value
@property
def audio_input(self):
"""Return ALSA audio input card,dev."""
return self._data.get(ATTR_AUDIO_INPUT)
@audio_input.setter
def audio_input(self, value):
"""Set ALSA audio input card,dev."""
self._data[ATTR_AUDIO_INPUT] = value

View File

@@ -2,12 +2,11 @@
from pathlib import Path from pathlib import Path
from ipaddress import ip_network from ipaddress import ip_network
HASSIO_VERSION = '0.86' HASSIO_VERSION = '105'
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/' URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
'hassio/{}/version.json') URL_HASSIO_VERSION = \
"https://s3.amazonaws.com/hassio-version/{channel}.json"
URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons'
HASSIO_DATA = Path("/data") HASSIO_DATA = Path("/data")
@@ -18,7 +17,6 @@ FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json")
FILE_HASSIO_SERVICES = Path(HASSIO_DATA, "services.json") FILE_HASSIO_SERVICES = Path(HASSIO_DATA, "services.json")
SOCKET_DOCKER = Path("/var/run/docker.sock") SOCKET_DOCKER = Path("/var/run/docker.sock")
SOCKET_HC = Path("/var/run/hassio-hc.sock")
DOCKER_NETWORK = 'hassio' DOCKER_NETWORK = 'hassio'
DOCKER_NETWORK_MASK = ip_network('172.30.32.0/23') DOCKER_NETWORK_MASK = ip_network('172.30.32.0/23')
@@ -27,6 +25,7 @@ DOCKER_NETWORK_RANGE = ip_network('172.30.33.0/24')
LABEL_VERSION = 'io.hass.version' LABEL_VERSION = 'io.hass.version'
LABEL_ARCH = 'io.hass.arch' LABEL_ARCH = 'io.hass.arch'
LABEL_TYPE = 'io.hass.type' LABEL_TYPE = 'io.hass.type'
LABEL_MACHINE = 'io.hass.machine'
META_ADDON = 'addon' META_ADDON = 'addon'
META_SUPERVISOR = 'supervisor' META_SUPERVISOR = 'supervisor'
@@ -43,6 +42,7 @@ CONTENT_TYPE_BINARY = 'application/octet-stream'
CONTENT_TYPE_PNG = 'image/png' CONTENT_TYPE_PNG = 'image/png'
CONTENT_TYPE_JSON = 'application/json' CONTENT_TYPE_JSON = 'application/json'
CONTENT_TYPE_TEXT = 'text/plain' CONTENT_TYPE_TEXT = 'text/plain'
CONTENT_TYPE_TAR = 'application/tar'
HEADER_HA_ACCESS = 'x-ha-access' HEADER_HA_ACCESS = 'x-ha-access'
HEADER_TOKEN = 'X-HASSIO-KEY' HEADER_TOKEN = 'X-HASSIO-KEY'
@@ -51,7 +51,9 @@ ENV_TIME = 'TZ'
REQUEST_FROM = 'HASSIO_FROM' REQUEST_FROM = 'HASSIO_FROM'
ATTR_MACHINE = 'machine'
ATTR_WAIT_BOOT = 'wait_boot' ATTR_WAIT_BOOT = 'wait_boot'
ATTR_DEPLOYMENT = 'deployment'
ATTR_WATCHDOG = 'watchdog' ATTR_WATCHDOG = 'watchdog'
ATTR_CHANGELOG = 'changelog' ATTR_CHANGELOG = 'changelog'
ATTR_DATE = 'date' ATTR_DATE = 'date'
@@ -60,7 +62,8 @@ ATTR_LONG_DESCRIPTION = 'long_description'
ATTR_HOSTNAME = 'hostname' ATTR_HOSTNAME = 'hostname'
ATTR_TIMEZONE = 'timezone' ATTR_TIMEZONE = 'timezone'
ATTR_ARGS = 'args' ATTR_ARGS = 'args'
ATTR_OS = 'os' ATTR_OPERATING_SYSTEM = 'operating_system'
ATTR_CHASSIS = 'chassis'
ATTR_TYPE = 'type' ATTR_TYPE = 'type'
ATTR_SOURCE = 'source' ATTR_SOURCE = 'source'
ATTR_FEATURES = 'features' ATTR_FEATURES = 'features'
@@ -69,7 +72,7 @@ ATTR_VERSION = 'version'
ATTR_AUTO_UART = 'auto_uart' ATTR_AUTO_UART = 'auto_uart'
ATTR_LAST_BOOT = 'last_boot' ATTR_LAST_BOOT = 'last_boot'
ATTR_LAST_VERSION = 'last_version' ATTR_LAST_VERSION = 'last_version'
ATTR_BETA_CHANNEL = 'beta_channel' ATTR_CHANNEL = 'channel'
ATTR_NAME = 'name' ATTR_NAME = 'name'
ATTR_SLUG = 'slug' ATTR_SLUG = 'slug'
ATTR_DESCRIPTON = 'description' ATTR_DESCRIPTON = 'description'
@@ -155,7 +158,12 @@ ATTR_CONFIG = 'config'
ATTR_DISCOVERY_ID = 'discovery_id' ATTR_DISCOVERY_ID = 'discovery_id'
ATTR_SERVICES = 'services' ATTR_SERVICES = 'services'
ATTR_DISCOVERY = 'discovery' ATTR_DISCOVERY = 'discovery'
ATTR_STARTUP_TIME = 'startup_time' ATTR_PROTECTED = 'protected'
ATTR_CRYPTO = 'crypto'
ATTR_BRANCH = 'branch'
ATTR_KERNEL = 'kernel'
ATTR_SECCOMP = 'seccomp'
ATTR_APPARMOR = 'apparmor'
SERVICE_MQTT = 'mqtt' SERVICE_MQTT = 'mqtt'
@@ -183,6 +191,10 @@ ARCH_AARCH64 = 'aarch64'
ARCH_AMD64 = 'amd64' ARCH_AMD64 = 'amd64'
ARCH_I386 = 'i386' ARCH_I386 = 'i386'
CHANNEL_STABLE = 'stable'
CHANNEL_BETA = 'beta'
CHANNEL_DEV = 'dev'
REPOSITORY_CORE = 'core' REPOSITORY_CORE = 'core'
REPOSITORY_LOCAL = 'local' REPOSITORY_LOCAL = 'local'
@@ -193,3 +205,14 @@ FOLDER_SSL = 'ssl'
SNAPSHOT_FULL = 'full' SNAPSHOT_FULL = 'full'
SNAPSHOT_PARTIAL = 'partial' SNAPSHOT_PARTIAL = 'partial'
CRYPTO_AES128 = 'aes128'
SECURITY_PROFILE = 'profile'
SECURITY_DEFAULT = 'default'
SECURITY_DISABLE = 'disable'
FEATURES_SHUTDOWN = 'shutdown'
FEATURES_REBOOT = 'reboot'
FEATURES_UPDATE = 'update'
FEATURES_HOSTNAME = 'hostname'

View File

@@ -1,10 +1,12 @@
"""Main file for HassIO.""" """Main file for HassIO."""
from contextlib import suppress
import asyncio import asyncio
import logging import logging
from .coresys import CoreSysAttributes from .coresys import CoreSysAttributes
from .const import ( from .const import (
STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION, STARTUP_INITIALIZE) STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION, STARTUP_INITIALIZE)
from .exceptions import HassioError
from .utils.dt import fetch_timezone from .utils.dt import fetch_timezone
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -20,98 +22,114 @@ class HassIO(CoreSysAttributes):
async def setup(self): async def setup(self):
"""Setup HassIO orchestration.""" """Setup HassIO orchestration."""
# update timezone # update timezone
if self._config.timezone == 'UTC': if self.sys_config.timezone == 'UTC':
self._config.timezone = await fetch_timezone(self._websession) self.sys_config.timezone = \
await fetch_timezone(self.sys_websession)
# supervisor # Load DBus
await self._supervisor.load() await self.sys_dbus.load()
# hostcontrol # Load Host
await self._host_control.load() await self.sys_host.load()
# Load homeassistant # Load Supervisor
await self._homeassistant.load() await self.sys_supervisor.load()
# Load addons # Load Home Assistant
await self._addons.load() await self.sys_homeassistant.load()
# Load Add-ons
await self.sys_addons.load()
# rest api views # rest api views
await self._api.load() await self.sys_api.load()
# load last available data # load last available data
await self._updater.load() await self.sys_updater.load()
# load last available data # load last available data
await self._snapshots.load() await self.sys_snapshots.load()
# load services # load services
await self._services.load() await self.sys_services.load()
# start dns forwarding # start dns forwarding
self._loop.create_task(self._dns.start()) self.sys_create_task(self.sys_dns.start())
# start addon mark as initialize
await self._addons.auto_boot(STARTUP_INITIALIZE)
async def start(self): async def start(self):
"""Start HassIO orchestration.""" """Start HassIO orchestration."""
# on release channel, try update itself # on release channel, try update itself
# on beta channel, only read new versions # on dev mode, only read new versions
if not self._updater.beta_channel and self._supervisor.need_update: if not self.sys_dev and self.sys_supervisor.need_update:
if await self._supervisor.update(): if await self.sys_supervisor.update():
return return
else: else:
_LOGGER.info("Ignore Hass.io auto updates on beta mode") _LOGGER.info("Ignore Hass.io auto updates on dev channel")
# start api # start api
await self._api.start() await self.sys_api.start()
_LOGGER.info("Start API on %s", self._docker.network.supervisor) _LOGGER.info("Start API on %s", self.sys_docker.network.supervisor)
# start addon mark as initialize
await self.sys_addons.boot(STARTUP_INITIALIZE)
try: try:
# HomeAssistant is already running / supervisor have only reboot # HomeAssistant is already running / supervisor have only reboot
if self._hardware.last_boot == self._config.last_boot: if self.sys_hardware.last_boot == self.sys_config.last_boot:
_LOGGER.info("Hass.io reboot detected") _LOGGER.info("Hass.io reboot detected")
return return
# reset register services / discovery # reset register services / discovery
self._services.reset() self.sys_services.reset()
# start addon mark as system # start addon mark as system
await self._addons.auto_boot(STARTUP_SYSTEM) await self.sys_addons.boot(STARTUP_SYSTEM)
# start addon mark as services # start addon mark as services
await self._addons.auto_boot(STARTUP_SERVICES) await self.sys_addons.boot(STARTUP_SERVICES)
# run HomeAssistant # run HomeAssistant
if self._homeassistant.boot: if self.sys_homeassistant.boot:
await self._homeassistant.start() await self.sys_homeassistant.start()
# start addon mark as application # start addon mark as application
await self._addons.auto_boot(STARTUP_APPLICATION) await self.sys_addons.boot(STARTUP_APPLICATION)
# store new last boot # store new last boot
self._config.last_boot = self._hardware.last_boot self.sys_config.last_boot = self.sys_hardware.last_boot
self._config.save_data() self.sys_config.save_data()
finally: finally:
# Add core tasks into scheduler # Add core tasks into scheduler
await self._tasks.load() await self.sys_tasks.load()
# If landingpage / run upgrade in background # If landingpage / run upgrade in background
if self._homeassistant.version == 'landingpage': if self.sys_homeassistant.version == 'landingpage':
self._loop.create_task(self._homeassistant.install()) self.sys_create_task(self.sys_homeassistant.install())
_LOGGER.info("Hass.io is up and running") _LOGGER.info("Hass.io is up and running")
async def stop(self): async def stop(self):
"""Stop a running orchestration.""" """Stop a running orchestration."""
# don't process scheduler anymore # don't process scheduler anymore
self._scheduler.suspend = True self.sys_scheduler.suspend = True
# process stop tasks
self._websession.close()
self._websession_ssl.close()
# process async stop tasks # process async stop tasks
await asyncio.wait( await asyncio.wait([
[self._api.stop(), self._dns.stop()], loop=self._loop) self.sys_api.stop(),
self.sys_dns.stop(),
self.sys_websession.close(),
self.sys_websession_ssl.close()
])
async def shutdown(self):
"""Shutdown all running containers in correct order."""
await self.sys_addons.shutdown(STARTUP_APPLICATION)
# Close Home Assistant
with suppress(HassioError):
await self.sys_homeassistant.stop()
await self.sys_addons.shutdown(STARTUP_SERVICES)
await self.sys_addons.shutdown(STARTUP_SYSTEM)
await self.sys_addons.shutdown(STARTUP_INITIALIZE)

View File

@@ -2,15 +2,15 @@
import aiohttp import aiohttp
from .const import CHANNEL_DEV
from .config import CoreConfig from .config import CoreConfig
from .docker import DockerAPI from .docker import DockerAPI
from .misc.dns import DNSForward from .misc.dns import DNSForward
from .misc.hardware import Hardware from .misc.hardware import Hardware
from .misc.host_control import HostControl
from .misc.scheduler import Scheduler from .misc.scheduler import Scheduler
class CoreSys(object): class CoreSys:
"""Class that handle all shared data.""" """Class that handle all shared data."""
def __init__(self, loop): def __init__(self, loop):
@@ -30,9 +30,9 @@ class CoreSys(object):
self._docker = DockerAPI() self._docker = DockerAPI()
self._scheduler = Scheduler(loop=loop) self._scheduler = Scheduler(loop=loop)
self._dns = DNSForward(loop=loop) self._dns = DNSForward(loop=loop)
self._host_control = HostControl(loop=loop)
# Internal objects pointers # Internal objects pointers
self._core = None
self._homeassistant = None self._homeassistant = None
self._supervisor = None self._supervisor = None
self._addons = None self._addons = None
@@ -40,7 +40,10 @@ class CoreSys(object):
self._updater = None self._updater = None
self._snapshots = None self._snapshots = None
self._tasks = None self._tasks = None
self._host = None
self._dbus = None
self._services = None self._services = None
self._discovery = None
@property @property
def arch(self): def arch(self):
@@ -49,6 +52,18 @@ class CoreSys(object):
return self._supervisor.arch return self._supervisor.arch
return None return None
@property
def machine(self):
"""Return running machine type of hass.io system."""
if self._homeassistant:
return self._homeassistant.machine
return None
@property
def dev(self):
"""Return True if we run dev modus."""
return self._updater.channel == CHANNEL_DEV
@property @property
def loop(self): def loop(self):
"""Return loop object.""" """Return loop object."""
@@ -90,9 +105,16 @@ class CoreSys(object):
return self._dns return self._dns
@property @property
def host_control(self): def core(self):
"""Return HostControl object.""" """Return HassIO object."""
return self._host_control return self._core
@core.setter
def core(self, value):
"""Set a HassIO object."""
if self._core:
raise RuntimeError("HassIO already set!")
self._core = value
@property @property
def homeassistant(self): def homeassistant(self):
@@ -190,14 +212,58 @@ class CoreSys(object):
raise RuntimeError("Services already set!") raise RuntimeError("Services already set!")
self._services = value self._services = value
@property
def discovery(self):
"""Return ServiceManager object."""
return self._discovery
class CoreSysAttributes(object): @discovery.setter
def discovery(self, value):
"""Set a Discovery object."""
if self._discovery:
raise RuntimeError("Discovery already set!")
self._discovery = value
@property
def dbus(self):
"""Return DBusManager object."""
return self._dbus
@dbus.setter
def dbus(self, value):
"""Set a DBusManager object."""
if self._dbus:
raise RuntimeError("DBusManager already set!")
self._dbus = value
@property
def host(self):
"""Return HostManager object."""
return self._host
@host.setter
def host(self, value):
"""Set a HostManager object."""
if self._host:
raise RuntimeError("HostManager already set!")
self._host = value
def run_in_executor(self, funct, *args):
"""Wrapper for executor pool."""
return self._loop.run_in_executor(None, funct, *args)
def create_task(self, coroutine):
"""Wrapper for async task."""
return self._loop.create_task(coroutine)
class CoreSysAttributes:
"""Inheret basic CoreSysAttributes.""" """Inheret basic CoreSysAttributes."""
coresys = None coresys = None
def __getattr__(self, name): def __getattr__(self, name):
"""Mapping to coresys.""" """Mapping to coresys."""
if hasattr(self.coresys, name[1:]): if name.startswith("sys_") and hasattr(self.coresys, name[4:]):
return getattr(self.coresys, name[1:]) return getattr(self.coresys, name[4:])
raise AttributeError(f"Can't find {name} on {self.__class__}") raise AttributeError()

30
hassio/dbus/__init__.py Normal file
View File

@@ -0,0 +1,30 @@
"""DBus interface objects."""
from .systemd import Systemd
from .hostname import Hostname
from ..coresys import CoreSysAttributes
class DBusManager(CoreSysAttributes):
"""DBus Interface handler."""
def __init__(self, coresys):
"""Initialize DBus Interface."""
self.coresys = coresys
self._systemd = Systemd()
self._hostname = Hostname()
@property
def systemd(self):
"""Return Systemd Interface."""
return self._systemd
@property
def hostname(self):
"""Return hostname Interface."""
return self._hostname
async def load(self):
"""Connect interfaces to dbus."""
await self.systemd.connect()
await self.hostname.connect()

39
hassio/dbus/hostname.py Normal file
View File

@@ -0,0 +1,39 @@
"""DBus interface for hostname."""
import logging
from .interface import DBusInterface
from .utils import dbus_connected
from ..exceptions import DBusError
from ..utils.gdbus import DBus
_LOGGER = logging.getLogger(__name__)
DBUS_NAME = 'org.freedesktop.hostname1'
DBUS_OBJECT = '/org/freedesktop/hostname1'
class Hostname(DBusInterface):
"""Handle DBus interface for hostname/system."""
async def connect(self):
"""Connect do bus."""
try:
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
except DBusError:
_LOGGER.warning("Can't connect to hostname")
@dbus_connected
def set_static_hostname(self, hostname):
"""Change local hostname.
Return a coroutine.
"""
return self.dbus.SetStaticHostname(hostname)
@dbus_connected
def get_properties(self):
"""Return local host informations.
Return a coroutine.
"""
return self.dbus.get_properties(DBUS_NAME)

18
hassio/dbus/interface.py Normal file
View File

@@ -0,0 +1,18 @@
"""Interface class for dbus wrappers."""
class DBusInterface:
"""Handle DBus interface for hostname/system."""
def __init__(self):
"""Initialize systemd."""
self.dbus = None
@property
def is_connected(self):
"""Return True, if they is connected to dbus."""
return self.dbus is not None
async def connect(self):
"""Connect do bus."""
raise NotImplementedError()

39
hassio/dbus/systemd.py Normal file
View File

@@ -0,0 +1,39 @@
"""Interface to Systemd over dbus."""
import logging
from .interface import DBusInterface
from .utils import dbus_connected
from ..exceptions import DBusError
from ..utils.gdbus import DBus
_LOGGER = logging.getLogger(__name__)
DBUS_NAME = 'org.freedesktop.systemd1'
DBUS_OBJECT = '/org/freedesktop/systemd1'
class Systemd(DBusInterface):
"""Systemd function handler."""
async def connect(self):
"""Connect do bus."""
try:
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
except DBusError:
_LOGGER.warning("Can't connect to systemd")
@dbus_connected
def reboot(self):
"""Reboot host computer.
Return a coroutine.
"""
return self.dbus.Manager.Reboot()
@dbus_connected
def power_off(self):
"""Power off host computer.
Return a coroutine.
"""
return self.dbus.Manager.PowerOff()

14
hassio/dbus/utils.py Normal file
View File

@@ -0,0 +1,14 @@
"""Utils for dbus."""
from ..exceptions import DBusNotConnectedError
def dbus_connected(method):
"""Wrapper for check if dbus is connected."""
def wrap_dbus(api, *args, **kwargs):
"""Check if dbus is connected before call a method."""
if api.dbus is None:
raise DBusNotConnectedError()
return method(api, *args, **kwargs)
return wrap_dbus

View File

@@ -2,6 +2,7 @@
from contextlib import suppress from contextlib import suppress
import logging import logging
import attr
import docker import docker
from .network import DockerNetwork from .network import DockerNetwork
@@ -9,8 +10,11 @@ from ..const import SOCKET_DOCKER
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
# pylint: disable=invalid-name
CommandReturn = attr.make_class('CommandReturn', ['exit_code', 'output'])
class DockerAPI(object):
class DockerAPI:
"""Docker hassio wrapper. """Docker hassio wrapper.
This class is not AsyncIO safe! This class is not AsyncIO safe!
@@ -20,7 +24,7 @@ class DockerAPI(object):
"""Initialize docker base wrapper.""" """Initialize docker base wrapper."""
self.docker = docker.DockerClient( self.docker = docker.DockerClient(
base_url="unix:/{}".format(str(SOCKET_DOCKER)), base_url="unix:/{}".format(str(SOCKET_DOCKER)),
version='auto', timeout=300) version='auto', timeout=900)
self.network = DockerNetwork(self.docker) self.network = DockerNetwork(self.docker)
@property @property
@@ -97,15 +101,15 @@ class DockerAPI(object):
) )
# wait until command is done # wait until command is done
exit_code = container.wait() result = container.wait()
output = container.logs(stdout=stdout, stderr=stderr) output = container.logs(stdout=stdout, stderr=stderr)
except docker.errors.DockerException as err: except docker.errors.DockerException as err:
_LOGGER.error("Can't execute command: %s", err) _LOGGER.error("Can't execute command: %s", err)
return (None, b"") return CommandReturn(None, b"")
# cleanup container # cleanup container
with suppress(docker.errors.DockerException): with suppress(docker.errors.DockerException):
container.remove(force=True) container.remove(force=True)
return (exit_code, output) return CommandReturn(result.get('StatusCode'), output)

View File

@@ -6,11 +6,11 @@ import docker
import requests import requests
from .interface import DockerInterface from .interface import DockerInterface
from .utils import docker_process
from ..addons.build import AddonBuild from ..addons.build import AddonBuild
from ..const import ( from ..const import (
MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE, ENV_TOKEN, MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE, ENV_TOKEN,
ENV_TIME) ENV_TIME, SECURITY_PROFILE, SECURITY_DISABLE)
from ..utils import process_lock
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -28,7 +28,7 @@ class DockerAddon(DockerInterface):
@property @property
def addon(self): def addon(self):
"""Return addon of docker image.""" """Return addon of docker image."""
return self._addons.get(self._id) return self.sys_addons.get(self._id)
@property @property
def image(self): def image(self):
@@ -52,7 +52,7 @@ class DockerAddon(DockerInterface):
"""Return arch of docker image.""" """Return arch of docker image."""
if not self.addon.legacy: if not self.addon.legacy:
return super().arch return super().arch
return self._arch return self.sys_arch
@property @property
def name(self): def name(self):
@@ -85,7 +85,7 @@ class DockerAddon(DockerInterface):
return { return {
**addon_env, **addon_env,
ENV_TIME: self._config.timezone, ENV_TIME: self.sys_config.timezone,
ENV_TOKEN: self.addon.uuid, ENV_TOKEN: self.addon.uuid,
} }
@@ -100,7 +100,7 @@ class DockerAddon(DockerInterface):
# Auto mapping UART devices # Auto mapping UART devices
if self.addon.auto_uart: if self.addon.auto_uart:
for device in self._hardware.serial_devices: for device in self.sys_hardware.serial_devices:
devices.append(f"{device}:{device}:rwm") devices.append(f"{device}:{device}:rwm")
# Return None if no devices is present # Return None if no devices is present
@@ -121,14 +121,21 @@ class DockerAddon(DockerInterface):
@property @property
def security_opt(self): def security_opt(self):
"""Controlling security opt.""" """Controlling security opt."""
privileged = self.addon.privileged or [] security = []
# Disable AppArmor sinse it make troubles wit SYS_ADMIN # AppArmor
if 'SYS_ADMIN' in privileged: if self.addon.apparmor == SECURITY_DISABLE:
return [ security.append("apparmor:unconfined")
"apparmor:unconfined", elif self.addon.apparmor == SECURITY_PROFILE:
] security.append(f"apparmor={self.addon.slug}")
return None
# Seccomp
if self.addon.seccomp == SECURITY_DISABLE:
security.append("seccomp=unconfined")
elif self.addon.seccomp == SECURITY_PROFILE:
security.append(f"seccomp={self.addon.path_seccomp}")
return security or None
@property @property
def tmpfs(self): def tmpfs(self):
@@ -142,8 +149,8 @@ class DockerAddon(DockerInterface):
def network_mapping(self): def network_mapping(self):
"""Return hosts mapping.""" """Return hosts mapping."""
return { return {
'homeassistant': self._docker.network.gateway, 'homeassistant': self.sys_docker.network.gateway,
'hassio': self._docker.network.supervisor, 'hassio': self.sys_docker.network.supervisor,
} }
@property @property
@@ -166,35 +173,35 @@ class DockerAddon(DockerInterface):
# setup config mappings # setup config mappings
if MAP_CONFIG in addon_mapping: if MAP_CONFIG in addon_mapping:
volumes.update({ volumes.update({
str(self._config.path_extern_config): { str(self.sys_config.path_extern_config): {
'bind': "/config", 'mode': addon_mapping[MAP_CONFIG] 'bind': "/config", 'mode': addon_mapping[MAP_CONFIG]
}}) }})
if MAP_SSL in addon_mapping: if MAP_SSL in addon_mapping:
volumes.update({ volumes.update({
str(self._config.path_extern_ssl): { str(self.sys_config.path_extern_ssl): {
'bind': "/ssl", 'mode': addon_mapping[MAP_SSL] 'bind': "/ssl", 'mode': addon_mapping[MAP_SSL]
}}) }})
if MAP_ADDONS in addon_mapping: if MAP_ADDONS in addon_mapping:
volumes.update({ volumes.update({
str(self._config.path_extern_addons_local): { str(self.sys_config.path_extern_addons_local): {
'bind': "/addons", 'mode': addon_mapping[MAP_ADDONS] 'bind': "/addons", 'mode': addon_mapping[MAP_ADDONS]
}}) }})
if MAP_BACKUP in addon_mapping: if MAP_BACKUP in addon_mapping:
volumes.update({ volumes.update({
str(self._config.path_extern_backup): { str(self.sys_config.path_extern_backup): {
'bind': "/backup", 'mode': addon_mapping[MAP_BACKUP] 'bind': "/backup", 'mode': addon_mapping[MAP_BACKUP]
}}) }})
if MAP_SHARE in addon_mapping: if MAP_SHARE in addon_mapping:
volumes.update({ volumes.update({
str(self._config.path_extern_share): { str(self.sys_config.path_extern_share): {
'bind': "/share", 'mode': addon_mapping[MAP_SHARE] 'bind': "/share", 'mode': addon_mapping[MAP_SHARE]
}}) }})
# init other hardware mappings # Init other hardware mappings
if self.addon.with_gpio: if self.addon.with_gpio:
volumes.update({ volumes.update({
"/sys/class/gpio": { "/sys/class/gpio": {
@@ -205,13 +212,20 @@ class DockerAddon(DockerInterface):
}, },
}) })
# host dbus system # Host dbus system
if self.addon.host_dbus: if self.addon.host_dbus:
volumes.update({ volumes.update({
"/var/run/dbus": { "/var/run/dbus": {
'bind': "/var/run/dbus", 'mode': 'rw' 'bind': "/var/run/dbus", 'mode': 'rw'
}}) }})
# ALSA configuration
if self.addon.with_audio:
volumes.update({
str(self.addon.path_extern_asound): {
'bind': "/etc/asound.conf", 'mode': 'ro'
}})
return volumes return volumes
def _run(self): def _run(self):
@@ -225,7 +239,7 @@ class DockerAddon(DockerInterface):
# cleanup # cleanup
self._stop() self._stop()
ret = self._docker.run( ret = self.sys_docker.run(
self.image, self.image,
name=self.name, name=self.name,
hostname=self.hostname, hostname=self.hostname,
@@ -269,9 +283,13 @@ class DockerAddon(DockerInterface):
_LOGGER.info("Start build %s:%s", self.image, tag) _LOGGER.info("Start build %s:%s", self.image, tag)
try: try:
image = self._docker.images.build(**build_env.get_docker_args(tag)) image, log = self.sys_docker.images.build(
**build_env.get_docker_args(tag))
_LOGGER.debug("Build %s:%s done: %s", self.image, tag, log)
image.tag(self.image, tag='latest') image.tag(self.image, tag='latest')
# Update meta data
self._meta = image.attrs self._meta = image.attrs
except (docker.errors.DockerException) as err: except (docker.errors.DockerException) as err:
@@ -281,10 +299,10 @@ class DockerAddon(DockerInterface):
_LOGGER.info("Build %s:%s done", self.image, tag) _LOGGER.info("Build %s:%s done", self.image, tag)
return True return True
@docker_process @process_lock
def export_image(self, path): def export_image(self, path):
"""Export current images into a tar file.""" """Export current images into a tar file."""
return self._loop.run_in_executor(None, self._export_image, path) return self.sys_run_in_executor(self._export_image, path)
def _export_image(self, tar_file): def _export_image(self, tar_file):
"""Export current images into a tar file. """Export current images into a tar file.
@@ -292,26 +310,27 @@ class DockerAddon(DockerInterface):
Need run inside executor. Need run inside executor.
""" """
try: try:
image = self._docker.api.get_image(self.image) image = self.sys_docker.api.get_image(self.image)
except docker.errors.DockerException as err: except docker.errors.DockerException as err:
_LOGGER.error("Can't fetch image %s: %s", self.image, err) _LOGGER.error("Can't fetch image %s: %s", self.image, err)
return False return False
_LOGGER.info("Export image %s to %s", self.image, tar_file)
try: try:
with tar_file.open("wb") as write_tar: with tar_file.open("wb") as write_tar:
for chunk in image.stream(): for chunk in image:
write_tar.write(chunk) write_tar.write(chunk)
except (OSError, requests.exceptions.ReadTimeout) as err: except (OSError, requests.exceptions.ReadTimeout) as err:
_LOGGER.error("Can't write tar file %s: %s", tar_file, err) _LOGGER.error("Can't write tar file %s: %s", tar_file, err)
return False return False
_LOGGER.info("Export image %s to %s", self.image, tar_file) _LOGGER.info("Export image %s done", self.image)
return True return True
@docker_process @process_lock
def import_image(self, path, tag): def import_image(self, path, tag):
"""Import a tar file as image.""" """Import a tar file as image."""
return self._loop.run_in_executor(None, self._import_image, path, tag) return self.sys_run_in_executor(self._import_image, path, tag)
def _import_image(self, tar_file, tag): def _import_image(self, tar_file, tag):
"""Import a tar file as image. """Import a tar file as image.
@@ -320,9 +339,9 @@ class DockerAddon(DockerInterface):
""" """
try: try:
with tar_file.open("rb") as read_tar: with tar_file.open("rb") as read_tar:
self._docker.api.load_image(read_tar, quiet=True) self.sys_docker.api.load_image(read_tar, quiet=True)
image = self._docker.images.get(self.image) image = self.sys_docker.images.get(self.image)
image.tag(self.image, tag=tag) image.tag(self.image, tag=tag)
except (docker.errors.DockerException, OSError) as err: except (docker.errors.DockerException, OSError) as err:
_LOGGER.error("Can't import image %s: %s", self.image, err) _LOGGER.error("Can't import image %s: %s", self.image, err)
@@ -333,10 +352,10 @@ class DockerAddon(DockerInterface):
self._cleanup() self._cleanup()
return True return True
@docker_process @process_lock
def write_stdin(self, data): def write_stdin(self, data):
"""Write to add-on stdin.""" """Write to add-on stdin."""
return self._loop.run_in_executor(None, self._write_stdin, data) return self.sys_run_in_executor(self._write_stdin, data)
def _write_stdin(self, data): def _write_stdin(self, data):
"""Write to add-on stdin. """Write to add-on stdin.
@@ -348,7 +367,7 @@ class DockerAddon(DockerInterface):
try: try:
# load needed docker objects # load needed docker objects
container = self._docker.containers.get(self.name) container = self.sys_docker.containers.get(self.name)
socket = container.attach_socket(params={'stdin': 1, 'stream': 1}) socket = container.attach_socket(params={'stdin': 1, 'stream': 1})
except docker.errors.DockerException as err: except docker.errors.DockerException as err:
_LOGGER.error("Can't attach to %s stdin: %s", self.name, err) _LOGGER.error("Can't attach to %s stdin: %s", self.name, err)

View File

@@ -4,7 +4,7 @@ import logging
import docker import docker
from .interface import DockerInterface from .interface import DockerInterface
from ..const import ENV_TOKEN, ENV_TIME from ..const import ENV_TOKEN, ENV_TIME, LABEL_MACHINE
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -14,10 +14,17 @@ HASS_DOCKER_NAME = 'homeassistant'
class DockerHomeAssistant(DockerInterface): class DockerHomeAssistant(DockerInterface):
"""Docker hassio wrapper for HomeAssistant.""" """Docker hassio wrapper for HomeAssistant."""
@property
def machine(self):
"""Return machine of Home-Assistant docker image."""
if self._meta and LABEL_MACHINE in self._meta['Config']['Labels']:
return self._meta['Config']['Labels'][LABEL_MACHINE]
return None
@property @property
def image(self): def image(self):
"""Return name of docker image.""" """Return name of docker image."""
return self._homeassistant.image return self.sys_homeassistant.image
@property @property
def name(self): def name(self):
@@ -28,7 +35,7 @@ class DockerHomeAssistant(DockerInterface):
def devices(self): def devices(self):
"""Create list of special device to map into docker.""" """Create list of special device to map into docker."""
devices = [] devices = []
for device in self._hardware.serial_devices: for device in self.sys_hardware.serial_devices:
devices.append(f"{device}:{device}:rwm") devices.append(f"{device}:{device}:rwm")
return devices or None return devices or None
@@ -43,7 +50,7 @@ class DockerHomeAssistant(DockerInterface):
# cleanup # cleanup
self._stop() self._stop()
ret = self._docker.run( ret = self.sys_docker.run(
self.image, self.image,
name=self.name, name=self.name,
hostname=self.name, hostname=self.name,
@@ -53,16 +60,16 @@ class DockerHomeAssistant(DockerInterface):
devices=self.devices, devices=self.devices,
network_mode='host', network_mode='host',
environment={ environment={
'HASSIO': self._docker.network.supervisor, 'HASSIO': self.sys_docker.network.supervisor,
ENV_TIME: self._config.timezone, ENV_TIME: self.sys_config.timezone,
ENV_TOKEN: self._homeassistant.uuid, ENV_TOKEN: self.sys_homeassistant.uuid,
}, },
volumes={ volumes={
str(self._config.path_extern_config): str(self.sys_config.path_extern_config):
{'bind': '/config', 'mode': 'rw'}, {'bind': '/config', 'mode': 'rw'},
str(self._config.path_extern_ssl): str(self.sys_config.path_extern_ssl):
{'bind': '/ssl', 'mode': 'ro'}, {'bind': '/ssl', 'mode': 'ro'},
str(self._config.path_extern_share): str(self.sys_config.path_extern_share):
{'bind': '/share', 'mode': 'rw'}, {'bind': '/share', 'mode': 'rw'},
} }
) )
@@ -78,26 +85,26 @@ class DockerHomeAssistant(DockerInterface):
Need run inside executor. Need run inside executor.
""" """
return self._docker.run_command( return self.sys_docker.run_command(
self.image, self.image,
command, command,
detach=True, detach=True,
stdout=True, stdout=True,
stderr=True, stderr=True,
environment={ environment={
'TZ': self._config.timezone, ENV_TIME: self.sys_config.timezone,
}, },
volumes={ volumes={
str(self._config.path_extern_config): str(self.sys_config.path_extern_config):
{'bind': '/config', 'mode': 'ro'}, {'bind': '/config', 'mode': 'ro'},
str(self._config.path_extern_ssl): str(self.sys_config.path_extern_ssl):
{'bind': '/ssl', 'mode': 'ro'}, {'bind': '/ssl', 'mode': 'ro'},
} }
) )
def is_initialize(self): def is_initialize(self):
"""Return True if docker container exists.""" """Return True if docker container exists."""
return self._loop.run_in_executor(None, self._is_initialize) return self.sys_run_in_executor(self._is_initialize)
def _is_initialize(self): def _is_initialize(self):
"""Return True if docker container exists. """Return True if docker container exists.
@@ -105,7 +112,7 @@ class DockerHomeAssistant(DockerInterface):
Need run inside executor. Need run inside executor.
""" """
try: try:
self._docker.containers.get(self.name) self.sys_docker.containers.get(self.name)
except docker.errors.DockerException: except docker.errors.DockerException:
return False return False

View File

@@ -5,10 +5,10 @@ import logging
import docker import docker
from .utils import docker_process
from .stats import DockerStats from .stats import DockerStats
from ..const import LABEL_VERSION, LABEL_ARCH from ..const import LABEL_VERSION, LABEL_ARCH
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..utils import process_lock
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -20,7 +20,7 @@ class DockerInterface(CoreSysAttributes):
"""Initialize docker base wrapper.""" """Initialize docker base wrapper."""
self.coresys = coresys self.coresys = coresys
self._meta = None self._meta = None
self.lock = asyncio.Lock(loop=self._loop) self.lock = asyncio.Lock(loop=coresys.loop)
@property @property
def timeout(self): def timeout(self):
@@ -58,10 +58,10 @@ class DockerInterface(CoreSysAttributes):
"""Return True if a task is in progress.""" """Return True if a task is in progress."""
return self.lock.locked() return self.lock.locked()
@docker_process @process_lock
def install(self, tag): def install(self, tag):
"""Pull docker image.""" """Pull docker image."""
return self._loop.run_in_executor(None, self._install, tag) return self.sys_run_in_executor(self._install, tag)
def _install(self, tag): def _install(self, tag):
"""Pull docker image. """Pull docker image.
@@ -70,7 +70,7 @@ class DockerInterface(CoreSysAttributes):
""" """
try: try:
_LOGGER.info("Pull image %s tag %s.", self.image, tag) _LOGGER.info("Pull image %s tag %s.", self.image, tag)
image = self._docker.images.pull(f"{self.image}:{tag}") image = self.sys_docker.images.pull(f"{self.image}:{tag}")
image.tag(self.image, tag='latest') image.tag(self.image, tag='latest')
self._meta = image.attrs self._meta = image.attrs
@@ -83,7 +83,7 @@ class DockerInterface(CoreSysAttributes):
def exists(self): def exists(self):
"""Return True if docker image exists in local repo.""" """Return True if docker image exists in local repo."""
return self._loop.run_in_executor(None, self._exists) return self.sys_run_in_executor(self._exists)
def _exists(self): def _exists(self):
"""Return True if docker image exists in local repo. """Return True if docker image exists in local repo.
@@ -91,7 +91,7 @@ class DockerInterface(CoreSysAttributes):
Need run inside executor. Need run inside executor.
""" """
try: try:
image = self._docker.images.get(self.image) image = self.sys_docker.images.get(self.image)
assert f"{self.image}:{self.version}" in image.tags assert f"{self.image}:{self.version}" in image.tags
except (docker.errors.DockerException, AssertionError): except (docker.errors.DockerException, AssertionError):
return False return False
@@ -103,7 +103,7 @@ class DockerInterface(CoreSysAttributes):
Return a Future. Return a Future.
""" """
return self._loop.run_in_executor(None, self._is_running) return self.sys_run_in_executor(self._is_running)
def _is_running(self): def _is_running(self):
"""Return True if docker is Running. """Return True if docker is Running.
@@ -111,8 +111,8 @@ class DockerInterface(CoreSysAttributes):
Need run inside executor. Need run inside executor.
""" """
try: try:
container = self._docker.containers.get(self.name) container = self.sys_docker.containers.get(self.name)
image = self._docker.images.get(self.image) image = self.sys_docker.images.get(self.image)
except docker.errors.DockerException: except docker.errors.DockerException:
return False return False
@@ -126,10 +126,10 @@ class DockerInterface(CoreSysAttributes):
return True return True
@docker_process @process_lock
def attach(self): def attach(self):
"""Attach to running docker container.""" """Attach to running docker container."""
return self._loop.run_in_executor(None, self._attach) return self.sys_run_in_executor(self._attach)
def _attach(self): def _attach(self):
"""Attach to running docker container. """Attach to running docker container.
@@ -138,9 +138,9 @@ class DockerInterface(CoreSysAttributes):
""" """
try: try:
if self.image: if self.image:
self._meta = self._docker.images.get(self.image).attrs self._meta = self.sys_docker.images.get(self.image).attrs
else: else:
self._meta = self._docker.containers.get(self.name).attrs self._meta = self.sys_docker.containers.get(self.name).attrs
except docker.errors.DockerException: except docker.errors.DockerException:
return False return False
@@ -149,10 +149,10 @@ class DockerInterface(CoreSysAttributes):
return True return True
@docker_process @process_lock
def run(self): def run(self):
"""Run docker image.""" """Run docker image."""
return self._loop.run_in_executor(None, self._run) return self.sys_run_in_executor(self._run)
def _run(self): def _run(self):
"""Run docker image. """Run docker image.
@@ -161,10 +161,10 @@ class DockerInterface(CoreSysAttributes):
""" """
raise NotImplementedError() raise NotImplementedError()
@docker_process @process_lock
def stop(self): def stop(self):
"""Stop/remove docker container.""" """Stop/remove docker container."""
return self._loop.run_in_executor(None, self._stop) return self.sys_run_in_executor(self._stop)
def _stop(self): def _stop(self):
"""Stop/remove and remove docker container. """Stop/remove and remove docker container.
@@ -172,7 +172,7 @@ class DockerInterface(CoreSysAttributes):
Need run inside executor. Need run inside executor.
""" """
try: try:
container = self._docker.containers.get(self.name) container = self.sys_docker.containers.get(self.name)
except docker.errors.DockerException: except docker.errors.DockerException:
return False return False
@@ -187,10 +187,10 @@ class DockerInterface(CoreSysAttributes):
return True return True
@docker_process @process_lock
def remove(self): def remove(self):
"""Remove docker images.""" """Remove docker images."""
return self._loop.run_in_executor(None, self._remove) return self.sys_run_in_executor(self._remove)
def _remove(self): def _remove(self):
"""remove docker images. """remove docker images.
@@ -205,11 +205,11 @@ class DockerInterface(CoreSysAttributes):
try: try:
with suppress(docker.errors.ImageNotFound): with suppress(docker.errors.ImageNotFound):
self._docker.images.remove( self.sys_docker.images.remove(
image=f"{self.image}:latest", force=True) image=f"{self.image}:latest", force=True)
with suppress(docker.errors.ImageNotFound): with suppress(docker.errors.ImageNotFound):
self._docker.images.remove( self.sys_docker.images.remove(
image=f"{self.image}:{self.version}", force=True) image=f"{self.image}:{self.version}", force=True)
except docker.errors.DockerException as err: except docker.errors.DockerException as err:
@@ -219,10 +219,10 @@ class DockerInterface(CoreSysAttributes):
self._meta = None self._meta = None
return True return True
@docker_process @process_lock
def update(self, tag): def update(self, tag):
"""Update a docker image.""" """Update a docker image."""
return self._loop.run_in_executor(None, self._update, tag) return self.sys_run_in_executor(self._update, tag)
def _update(self, tag): def _update(self, tag):
"""Update a docker image. """Update a docker image.
@@ -247,7 +247,7 @@ class DockerInterface(CoreSysAttributes):
Return a Future. Return a Future.
""" """
return self._loop.run_in_executor(None, self._logs) return self.sys_run_in_executor(self._logs)
def _logs(self): def _logs(self):
"""Return docker logs of container. """Return docker logs of container.
@@ -255,7 +255,7 @@ class DockerInterface(CoreSysAttributes):
Need run inside executor. Need run inside executor.
""" """
try: try:
container = self._docker.containers.get(self.name) container = self.sys_docker.containers.get(self.name)
except docker.errors.DockerException: except docker.errors.DockerException:
return b"" return b""
@@ -264,35 +264,10 @@ class DockerInterface(CoreSysAttributes):
except docker.errors.DockerException as err: except docker.errors.DockerException as err:
_LOGGER.warning("Can't grap logs from %s: %s", self.image, err) _LOGGER.warning("Can't grap logs from %s: %s", self.image, err)
@docker_process @process_lock
def restart(self):
"""Restart docker container."""
return self._loop.run_in_executor(None, self._restart)
def _restart(self):
"""Restart docker container.
Need run inside executor.
"""
try:
container = self._docker.containers.get(self.name)
except docker.errors.DockerException:
return False
_LOGGER.info("Restart %s", self.image)
try:
container.restart(timeout=self.timeout)
except docker.errors.DockerException as err:
_LOGGER.warning("Can't restart %s: %s", self.image, err)
return False
return True
@docker_process
def cleanup(self): def cleanup(self):
"""Check if old version exists and cleanup.""" """Check if old version exists and cleanup."""
return self._loop.run_in_executor(None, self._cleanup) return self.sys_run_in_executor(self._cleanup)
def _cleanup(self): def _cleanup(self):
"""Check if old version exists and cleanup. """Check if old version exists and cleanup.
@@ -300,25 +275,25 @@ class DockerInterface(CoreSysAttributes):
Need run inside executor. Need run inside executor.
""" """
try: try:
latest = self._docker.images.get(self.image) latest = self.sys_docker.images.get(self.image)
except docker.errors.DockerException: except docker.errors.DockerException:
_LOGGER.warning("Can't find %s for cleanup", self.image) _LOGGER.warning("Can't find %s for cleanup", self.image)
return False return False
for image in self._docker.images.list(name=self.image): for image in self.sys_docker.images.list(name=self.image):
if latest.id == image.id: if latest.id == image.id:
continue continue
with suppress(docker.errors.DockerException): with suppress(docker.errors.DockerException):
_LOGGER.info("Cleanup docker images: %s", image.tags) _LOGGER.info("Cleanup docker images: %s", image.tags)
self._docker.images.remove(image.id, force=True) self.sys_docker.images.remove(image.id, force=True)
return True return True
@docker_process @process_lock
def execute_command(self, command): def execute_command(self, command):
"""Create a temporary container and run command.""" """Create a temporary container and run command."""
return self._loop.run_in_executor(None, self._execute_command, command) return self.sys_run_in_executor(self._execute_command, command)
def _execute_command(self, command): def _execute_command(self, command):
"""Create a temporary container and run command. """Create a temporary container and run command.
@@ -329,7 +304,7 @@ class DockerInterface(CoreSysAttributes):
def stats(self): def stats(self):
"""Read and return stats from container.""" """Read and return stats from container."""
return self._loop.run_in_executor(None, self._stats) return self.sys_run_in_executor(self._stats)
def _stats(self): def _stats(self):
"""Create a temporary container and run command. """Create a temporary container and run command.
@@ -337,7 +312,7 @@ class DockerInterface(CoreSysAttributes):
Need run inside executor. Need run inside executor.
""" """
try: try:
container = self._docker.containers.get(self.name) container = self.sys_docker.containers.get(self.name)
except docker.errors.DockerException: except docker.errors.DockerException:
return None return None

View File

@@ -8,7 +8,7 @@ from ..const import DOCKER_NETWORK_MASK, DOCKER_NETWORK, DOCKER_NETWORK_RANGE
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
class DockerNetwork(object): class DockerNetwork:
"""Internal HassIO Network. """Internal HassIO Network.
This class is not AsyncIO safe! This class is not AsyncIO safe!

View File

@@ -2,7 +2,7 @@
from contextlib import suppress from contextlib import suppress
class DockerStats(object): class DockerStats:
"""Hold stats data from container inside.""" """Hold stats data from container inside."""
def __init__(self, stats): def __init__(self, stats):

View File

@@ -24,7 +24,7 @@ class DockerSupervisor(DockerInterface, CoreSysAttributes):
Need run inside executor. Need run inside executor.
""" """
try: try:
container = self._docker.containers.get(self.name) container = self.sys_docker.containers.get(self.name)
except docker.errors.DockerException: except docker.errors.DockerException:
return False return False
@@ -33,9 +33,10 @@ class DockerSupervisor(DockerInterface, CoreSysAttributes):
self.image, self.version) self.image, self.version)
# if already attach # if already attach
if container in self._docker.network.containers: if container in self.sys_docker.network.containers:
return True return True
# attach to network # attach to network
return self._docker.network.attach_container( return self.sys_docker.network.attach_container(
container, alias=['hassio'], ipv4=self._docker.network.supervisor) container, alias=['hassio'],
ipv4=self.sys_docker.network.supervisor)

View File

@@ -1,20 +0,0 @@
"""HassIO docker utilitys."""
import logging
_LOGGER = logging.getLogger(__name__)
# pylint: disable=protected-access
def docker_process(method):
"""Wrap function with only run once."""
async def wrap_api(api, *args, **kwargs):
"""Return api wrapper."""
if api.lock.locked():
_LOGGER.error(
"Can't excute %s while a task is in progress", method.__name__)
return False
async with api.lock:
return await method(api, *args, **kwargs)
return wrap_api

49
hassio/exceptions.py Normal file
View File

@@ -0,0 +1,49 @@
"""Core Exceptions."""
class HassioError(Exception):
"""Root exception."""
pass
class HassioInternalError(HassioError):
"""Internal Hass.io error they can't handle."""
pass
class HassioNotSupportedError(HassioError):
"""Function is not supported."""
pass
# Host
class HostError(HassioError):
"""Internal Host error."""
pass
class HostNotSupportedError(HassioNotSupportedError):
"""Host function is not supprted."""
pass
# utils/gdbus
class DBusError(HassioError):
"""DBus generic error."""
pass
class DBusNotConnectedError(HostNotSupportedError):
"""DBus is not connected and call a method."""
class DBusFatalError(DBusError):
"""DBus call going wrong."""
pass
class DBusParseError(DBusError):
"""DBus parse error."""
pass

View File

@@ -8,14 +8,15 @@ import time
import aiohttp import aiohttp
from aiohttp.hdrs import CONTENT_TYPE from aiohttp.hdrs import CONTENT_TYPE
import attr
from .const import ( from .const import (
FILE_HASSIO_HOMEASSISTANT, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_UUID, FILE_HASSIO_HOMEASSISTANT, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_UUID,
ATTR_BOOT, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG, ATTR_BOOT, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG,
ATTR_STARTUP_TIME, HEADER_HA_ACCESS, CONTENT_TYPE_JSON) ATTR_WAIT_BOOT, HEADER_HA_ACCESS, CONTENT_TYPE_JSON)
from .coresys import CoreSysAttributes from .coresys import CoreSysAttributes
from .docker.homeassistant import DockerHomeAssistant from .docker.homeassistant import DockerHomeAssistant
from .utils import convert_to_ascii from .utils import convert_to_ascii, process_lock
from .utils.json import JsonConfig from .utils.json import JsonConfig
from .validate import SCHEMA_HASS_CONFIG from .validate import SCHEMA_HASS_CONFIG
@@ -23,6 +24,9 @@ _LOGGER = logging.getLogger(__name__)
RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml") RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml")
# pylint: disable=invalid-name
ConfigResult = attr.make_class('ConfigResult', ['valid', 'log'])
class HomeAssistant(JsonConfig, CoreSysAttributes): class HomeAssistant(JsonConfig, CoreSysAttributes):
"""Hass core object for handle it.""" """Hass core object for handle it."""
@@ -32,6 +36,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG) super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG)
self.coresys = coresys self.coresys = coresys
self.instance = DockerHomeAssistant(coresys) self.instance = DockerHomeAssistant(coresys)
self.lock = asyncio.Lock(loop=coresys.loop)
async def load(self): async def load(self):
"""Prepare HomeAssistant object.""" """Prepare HomeAssistant object."""
@@ -41,10 +46,15 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
_LOGGER.info("No HomeAssistant docker %s found.", self.image) _LOGGER.info("No HomeAssistant docker %s found.", self.image)
await self.install_landingpage() await self.install_landingpage()
@property
def machine(self):
"""Return System Machines."""
return self.instance.machine
@property @property
def api_ip(self): def api_ip(self):
"""Return IP of HomeAssistant instance.""" """Return IP of HomeAssistant instance."""
return self._docker.network.gateway return self.sys_docker.network.gateway
@property @property
def api_port(self): def api_port(self):
@@ -94,14 +104,14 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
self._data[ATTR_WATCHDOG] = value self._data[ATTR_WATCHDOG] = value
@property @property
def startup_time(self): def wait_boot(self):
"""Return time to wait for Home-Assistant startup.""" """Return time to wait for Home-Assistant startup."""
return self._data[ATTR_STARTUP_TIME] return self._data[ATTR_WAIT_BOOT]
@startup_time.setter @wait_boot.setter
def startup_time(self, value): def wait_boot(self, value):
"""Set time to wait for Home-Assistant startup.""" """Set time to wait for Home-Assistant startup."""
self._data[ATTR_STARTUP_TIME] = value self._data[ATTR_WAIT_BOOT] = value
@property @property
def version(self): def version(self):
@@ -113,7 +123,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
"""Return last available version of homeassistant.""" """Return last available version of homeassistant."""
if self.is_custom_image: if self.is_custom_image:
return self._data.get(ATTR_LAST_VERSION) return self._data.get(ATTR_LAST_VERSION)
return self._updater.version_homeassistant return self.sys_updater.version_homeassistant
@last_version.setter @last_version.setter
def last_version(self, value): def last_version(self, value):
@@ -159,6 +169,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
"""Return a UUID of this HomeAssistant.""" """Return a UUID of this HomeAssistant."""
return self._data[ATTR_UUID] return self._data[ATTR_UUID]
@process_lock
async def install_landingpage(self): async def install_landingpage(self):
"""Install a landingpage.""" """Install a landingpage."""
_LOGGER.info("Setup HomeAssistant landingpage") _LOGGER.info("Setup HomeAssistant landingpage")
@@ -166,31 +177,33 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
if await self.instance.install('landingpage'): if await self.instance.install('landingpage'):
break break
_LOGGER.warning("Fails install landingpage, retry after 60sec") _LOGGER.warning("Fails install landingpage, retry after 60sec")
await asyncio.sleep(60, loop=self._loop) await asyncio.sleep(60)
# Run landingpage after installation # Run landingpage after installation
await self.start() await self._start()
@process_lock
async def install(self): async def install(self):
"""Install a landingpage.""" """Install a landingpage."""
_LOGGER.info("Setup HomeAssistant") _LOGGER.info("Setup HomeAssistant")
while True: while True:
# read homeassistant tag and install it # read homeassistant tag and install it
if not self.last_version: if not self.last_version:
await self._updater.reload() await self.sys_updater.reload()
tag = self.last_version tag = self.last_version
if tag and await self.instance.install(tag): if tag and await self.instance.install(tag):
break break
_LOGGER.warning("Error on install HomeAssistant. Retry in 60sec") _LOGGER.warning("Error on install HomeAssistant. Retry in 60sec")
await asyncio.sleep(60, loop=self._loop) await asyncio.sleep(60)
# finishing # finishing
_LOGGER.info("HomeAssistant docker now installed") _LOGGER.info("HomeAssistant docker now installed")
if self.boot: if self.boot:
await self.start() await self._start()
await self.instance.cleanup() await self.instance.cleanup()
@process_lock
async def update(self, version=None): async def update(self, version=None):
"""Update HomeAssistant version.""" """Update HomeAssistant version."""
version = version or self.last_version version = version or self.last_version
@@ -205,15 +218,23 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
return await self.instance.update(version) return await self.instance.update(version)
finally: finally:
if running: if running:
await self.start() await self._start()
async def start(self): async def _start(self):
"""Run HomeAssistant docker.""" """Start HomeAssistant docker & wait."""
if not await self.instance.run(): if not await self.instance.run():
return False return False
return await self._block_till_run() return await self._block_till_run()
@process_lock
def start(self):
"""Run HomeAssistant docker.
Return a coroutine.
"""
return self._start()
@process_lock
def stop(self): def stop(self):
"""Stop HomeAssistant docker. """Stop HomeAssistant docker.
@@ -221,12 +242,11 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
""" """
return self.instance.stop() return self.instance.stop()
@process_lock
async def restart(self): async def restart(self):
"""Restart HomeAssistant docker.""" """Restart HomeAssistant docker."""
if not await self.instance.restart(): await self.instance.stop()
return False return await self._start()
return await self._block_till_run()
def logs(self): def logs(self):
"""Get HomeAssistant docker logs. """Get HomeAssistant docker logs.
@@ -259,23 +279,23 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
@property @property
def in_progress(self): def in_progress(self):
"""Return True if a task is in progress.""" """Return True if a task is in progress."""
return self.instance.in_progress return self.instance.in_progress or self.lock.locked()
async def check_config(self): async def check_config(self):
"""Run homeassistant config check.""" """Run homeassistant config check."""
exit_code, log = await self.instance.execute_command( result = await self.instance.execute_command(
"python3 -m homeassistant -c /config --script check_config" "python3 -m homeassistant -c /config --script check_config"
) )
# if not valid # if not valid
if exit_code is None: if result.exit_code is None:
return (False, "") return ConfigResult(False, "")
# parse output # parse output
log = convert_to_ascii(log) log = convert_to_ascii(result.output)
if exit_code != 0 or RE_YAML_ERROR.search(log): if result.exit_code != 0 or RE_YAML_ERROR.search(log):
return (False, log) return ConfigResult(False, log)
return (True, log) return ConfigResult(True, log)
async def check_api_state(self): async def check_api_state(self):
"""Check if Home-Assistant up and running.""" """Check if Home-Assistant up and running."""
@@ -287,7 +307,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
try: try:
# pylint: disable=bad-continuation # pylint: disable=bad-continuation
async with self._websession_ssl.get( async with self.sys_websession_ssl.get(
url, headers=header, timeout=30) as request: url, headers=header, timeout=30) as request:
status = request.status status = request.status
@@ -308,7 +328,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
try: try:
# pylint: disable=bad-continuation # pylint: disable=bad-continuation
async with self._websession_ssl.post( async with self.sys_websession_ssl.post(
url, headers=header, timeout=30, url, headers=header, timeout=30,
json=event_data) as request: json=event_data) as request:
status = request.status status = request.status
@@ -340,11 +360,11 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
except OSError: except OSError:
pass pass
while time.monotonic() - start_time < self.startup_time: while time.monotonic() - start_time < self.wait_boot:
if await self._loop.run_in_executor(None, check_port): if await self.sys_run_in_executor(check_port):
_LOGGER.info("Detect a running Home-Assistant instance") _LOGGER.info("Detect a running Home-Assistant instance")
return True return True
await asyncio.sleep(10, loop=self._loop) await asyncio.sleep(10)
_LOGGER.warning("Don't wait anymore of Home-Assistant startup!") _LOGGER.warning("Don't wait anymore of Home-Assistant startup!")
return False return False

58
hassio/host/__init__.py Normal file
View File

@@ -0,0 +1,58 @@
"""Host function like audio/dbus/systemd."""
from .alsa import AlsaAudio
from .control import SystemControl
from .info import InfoCenter
from ..const import FEATURES_REBOOT, FEATURES_SHUTDOWN, FEATURES_HOSTNAME
from ..coresys import CoreSysAttributes
class HostManager(CoreSysAttributes):
"""Manage supported function from host."""
def __init__(self, coresys):
"""Initialize Host manager."""
self.coresys = coresys
self._alsa = AlsaAudio(coresys)
self._control = SystemControl(coresys)
self._info = InfoCenter(coresys)
@property
def alsa(self):
"""Return host ALSA handler."""
return self._alsa
@property
def control(self):
"""Return host control handler."""
return self._control
@property
def info(self):
"""Return host info handler."""
return self._info
@property
def supperted_features(self):
"""Return a list of supported host features."""
features = []
if self.sys_dbus.systemd.is_connected:
features.extend([
FEATURES_REBOOT,
FEATURES_SHUTDOWN,
])
if self.sys_dbus.hostname.is_connected:
features.append(FEATURES_HOSTNAME)
return features
async def load(self):
"""Load host functions."""
if self.sys_dbus.hostname.is_connected:
await self.info.update()
def reload(self):
"""Reload host information."""
return self.load()

137
hassio/host/alsa.py Normal file
View File

@@ -0,0 +1,137 @@
"""Host Audio-support."""
import logging
import json
from pathlib import Path
from string import Template
import attr
from ..const import ATTR_INPUT, ATTR_OUTPUT, ATTR_DEVICES, ATTR_NAME
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
# pylint: disable=invalid-name
DefaultConfig = attr.make_class('DefaultConfig', ['input', 'output'])
class AlsaAudio(CoreSysAttributes):
"""Handle Audio ALSA host data."""
def __init__(self, coresys):
"""Initialize Alsa audio system."""
self.coresys = coresys
self._data = {
ATTR_INPUT: {},
ATTR_OUTPUT: {},
}
self._cache = 0
self._default = None
@property
def input_devices(self):
"""Return list of ALSA input devices."""
self._update_device()
return self._data[ATTR_INPUT]
@property
def output_devices(self):
"""Return list of ALSA output devices."""
self._update_device()
return self._data[ATTR_OUTPUT]
def _update_device(self):
"""Update Internal device DB."""
current_id = hash(frozenset(self.sys_hardware.audio_devices))
# Need rebuild?
if current_id == self._cache:
return
# Clean old stuff
self._data[ATTR_INPUT].clear()
self._data[ATTR_OUTPUT].clear()
# Init database
_LOGGER.info("Update ALSA device list")
database = self._audio_database()
# Process devices
for dev_id, dev_data in self.sys_hardware.audio_devices.items():
for chan_id, chan_type in dev_data[ATTR_DEVICES].items():
alsa_id = f"{dev_id},{chan_id}"
dev_name = dev_data[ATTR_NAME]
# Lookup type
if chan_type.endswith('playback'):
key = ATTR_OUTPUT
elif chan_type.endswith('capture'):
key = ATTR_INPUT
else:
_LOGGER.warning("Unknown channel type: %s", chan_type)
continue
# Use name from DB or a generic name
self._data[key][alsa_id] = database.get(
self.sys_machine, {}).get(
dev_name, {}).get(alsa_id, f"{dev_name}: {chan_id}")
self._cache = current_id
@staticmethod
def _audio_database():
"""Read local json audio data into dict."""
json_file = Path(__file__).parent.joinpath('audiodb.json')
try:
# pylint: disable=no-member
with json_file.open('r') as database:
return json.loads(database.read())
except (ValueError, OSError) as err:
_LOGGER.warning("Can't read audio DB: %s", err)
return {}
@property
def default(self):
"""Generate ALSA default setting."""
# Init defaults
if self._default is None:
database = self._audio_database()
alsa_input = database.get(self.sys_machine, {}).get(ATTR_INPUT)
alsa_output = database.get(self.sys_machine, {}).get(ATTR_OUTPUT)
self._default = DefaultConfig(alsa_input, alsa_output)
# Search exists/new output
if self._default.output is None and self.output_devices:
self._default.output = next(iter(self.output_devices))
_LOGGER.info("Detect output device %s", self._default.output)
# Search exists/new input
if self._default.input is None and self.input_devices:
self._default.input = next(iter(self.input_devices))
_LOGGER.info("Detect input device %s", self._default.input)
return self._default
def asound(self, alsa_input=None, alsa_output=None):
"""Generate a asound data."""
alsa_input = alsa_input or self.default.input
alsa_output = alsa_output or self.default.output
# Read Template
asound_file = Path(__file__).parent.joinpath('asound.tmpl')
try:
# pylint: disable=no-member
with asound_file.open('r') as asound:
asound_data = asound.read()
except OSError as err:
_LOGGER.error("Can't read asound.tmpl: %s", err)
return ""
# Process Template
asound_template = Template(asound_data)
return asound_template.safe_substitute(
input=alsa_input, output=alsa_output
)

17
hassio/host/asound.tmpl Normal file
View File

@@ -0,0 +1,17 @@
pcm.!default {
type asym
capture.pcm "mic"
playback.pcm "speaker"
}
pcm.mic {
type plug
slave {
pcm "hw:$input"
}
}
pcm.speaker {
type plug
slave {
pcm "hw:$output"
}
}

18
hassio/host/audiodb.json Normal file
View File

@@ -0,0 +1,18 @@
{
"raspberrypi3": {
"bcm2835 - bcm2835 ALSA": {
"0,0": "Raspberry Jack",
"0,1": "Raspberry HDMI"
},
"output": "0,0",
"input": null
},
"raspberrypi2": {
"output": "0,0",
"input": null
},
"raspberrypi": {
"output": "0,0",
"input": null
}
}

51
hassio/host/control.py Normal file
View File

@@ -0,0 +1,51 @@
"""Power control for host."""
import logging
from ..coresys import CoreSysAttributes
from ..exceptions import HostNotSupportedError
_LOGGER = logging.getLogger(__name__)
class SystemControl(CoreSysAttributes):
"""Handle host power controls."""
def __init__(self, coresys):
"""Initialize host power handling."""
self.coresys = coresys
def _check_systemd(self):
"""Check if systemd is connect or raise error."""
if not self.sys_dbus.systemd.is_connected:
_LOGGER.error("No systemd dbus connection available")
raise HostNotSupportedError()
async def reboot(self):
"""Reboot host system."""
self._check_systemd()
_LOGGER.info("Initialize host reboot over systemd")
try:
await self.sys_core.shutdown()
finally:
await self.sys_dbus.systemd.reboot()
async def shutdown(self):
"""Shutdown host system."""
self._check_systemd()
_LOGGER.info("Initialize host power off over systemd")
try:
await self.sys_core.shutdown()
finally:
await self.sys_dbus.systemd.power_off()
async def set_hostname(self, hostname):
"""Set local a new Hostname."""
if not self.sys_dbus.systemd.is_connected:
_LOGGER.error("No hostname dbus connection available")
raise HostNotSupportedError()
_LOGGER.info("Set Hostname %s", hostname)
await self.sys_dbus.hostname.set_static_hostname(hostname)
await self.sys_host.info.update()

58
hassio/host/info.py Normal file
View File

@@ -0,0 +1,58 @@
"""Power control for host."""
import logging
from ..coresys import CoreSysAttributes
from ..exceptions import HassioError, HostNotSupportedError
_LOGGER = logging.getLogger(__name__)
class InfoCenter(CoreSysAttributes):
"""Handle local system information controls."""
def __init__(self, coresys):
"""Initialize system center handling."""
self.coresys = coresys
self._data = {}
@property
def hostname(self):
"""Return local hostname."""
return self._data.get('StaticHostname') or None
@property
def chassis(self):
"""Return local chassis type."""
return self._data.get('Chassis') or None
@property
def deployment(self):
"""Return local deployment type."""
return self._data.get('Deployment') or None
@property
def kernel(self):
"""Return local kernel version."""
return self._data.get('KernelRelease') or None
@property
def operating_system(self):
"""Return local operating system."""
return self._data.get('OperatingSystemPrettyName') or None
@property
def cpe(self):
"""Return local CPE."""
return self._data.get('OperatingSystemCPEName') or None
async def update(self):
"""Update properties over dbus."""
if not self.sys_dbus.systemd.is_connected:
_LOGGER.error("No hostname dbus connection available")
raise HostNotSupportedError()
_LOGGER.info("Update local host information")
try:
self._data = await self.sys_dbus.hostname.get_properties()
except HassioError:
_LOGGER.warning("Can't update host system information!")

View File

@@ -8,7 +8,7 @@ _LOGGER = logging.getLogger(__name__)
COMMAND = "socat UDP-RECVFROM:53,fork UDP-SENDTO:127.0.0.11:53" COMMAND = "socat UDP-RECVFROM:53,fork UDP-SENDTO:127.0.0.11:53"
class DNSForward(object): class DNSForward:
"""Manage DNS forwarding to internal DNS.""" """Manage DNS forwarding to internal DNS."""
def __init__(self, loop): def __init__(self, loop):

View File

@@ -23,7 +23,7 @@ GPIO_DEVICES = Path("/sys/class/gpio")
RE_TTY = re.compile(r"tty[A-Z]+") RE_TTY = re.compile(r"tty[A-Z]+")
class Hardware(object): class Hardware:
"""Represent a interface to procfs, sysfs and udev.""" """Represent a interface to procfs, sysfs and udev."""
def __init__(self): def __init__(self):
@@ -63,6 +63,10 @@ class Hardware(object):
@property @property
def audio_devices(self): def audio_devices(self):
"""Return all available audio interfaces.""" """Return all available audio interfaces."""
if not ASOUND_CARDS.exists():
_LOGGER.info("No audio devices found")
return {}
try: try:
with ASOUND_CARDS.open('r') as cards_file: with ASOUND_CARDS.open('r') as cards_file:
cards = cards_file.read() cards = cards_file.read()

View File

@@ -1,124 +0,0 @@
"""Host control for HassIO."""
import asyncio
import json
import logging
import async_timeout
from ..const import (
SOCKET_HC, ATTR_LAST_VERSION, ATTR_VERSION, ATTR_TYPE, ATTR_FEATURES,
ATTR_HOSTNAME, ATTR_OS)
_LOGGER = logging.getLogger(__name__)
TIMEOUT = 15
UNKNOWN = 'unknown'
FEATURES_SHUTDOWN = 'shutdown'
FEATURES_REBOOT = 'reboot'
FEATURES_UPDATE = 'update'
FEATURES_HOSTNAME = 'hostname'
FEATURES_NETWORK_INFO = 'network_info'
FEATURES_NETWORK_CONTROL = 'network_control'
class HostControl(object):
"""Client for host control."""
def __init__(self, loop):
"""Initialize HostControl socket client."""
self.loop = loop
self.active = False
self.version = UNKNOWN
self.last_version = UNKNOWN
self.type = UNKNOWN
self.features = []
self.hostname = UNKNOWN
self.os_info = UNKNOWN
if SOCKET_HC.is_socket():
self.active = True
async def _send_command(self, command):
"""Send command to host.
Is a coroutine.
"""
if not self.active:
return
reader, writer = await asyncio.open_unix_connection(
str(SOCKET_HC), loop=self.loop)
try:
# send
_LOGGER.info("Send '%s' to HostControl.", command)
with async_timeout.timeout(TIMEOUT, loop=self.loop):
writer.write("{}\n".format(command).encode())
data = await reader.readline()
response = data.decode().rstrip()
_LOGGER.info("Receive from HostControl: %s.", response)
if response == "OK":
return True
elif response == "ERROR":
return False
elif response == "WRONG":
return None
else:
try:
return json.loads(response)
except json.JSONDecodeError:
_LOGGER.warning("Json parse error from HostControl '%s'.",
response)
except asyncio.TimeoutError:
_LOGGER.error("Timeout from HostControl!")
finally:
writer.close()
async def load(self):
"""Load Info from host.
Return a coroutine.
"""
info = await self._send_command("info")
if not info:
return
self.version = info.get(ATTR_VERSION, UNKNOWN)
self.last_version = info.get(ATTR_LAST_VERSION, UNKNOWN)
self.type = info.get(ATTR_TYPE, UNKNOWN)
self.features = info.get(ATTR_FEATURES, [])
self.hostname = info.get(ATTR_HOSTNAME, UNKNOWN)
self.os_info = info.get(ATTR_OS, UNKNOWN)
def reboot(self):
"""Reboot the host system.
Return a coroutine.
"""
return self._send_command("reboot")
def shutdown(self):
"""Shutdown the host system.
Return a coroutine.
"""
return self._send_command("shutdown")
def update(self, version=None):
"""Update the host system.
Return a coroutine.
"""
if version:
return self._send_command("update {}".format(version))
return self._send_command("update")
def set_hostname(self, hostname):
"""Update hostname on host."""
return self._send_command("hostname {}".format(hostname))

View File

@@ -10,7 +10,7 @@ CALL = 'callback'
TASK = 'task' TASK = 'task'
class Scheduler(object): class Scheduler:
"""Schedule task inside HassIO.""" """Schedule task inside HassIO."""
def __init__(self, loop): def __init__(self, loop):

View File

@@ -1,8 +1,8 @@
"""Handle internal services discovery.""" """Handle internal services discovery."""
from .discovery import Discovery # noqa
from .mqtt import MQTTService from .mqtt import MQTTService
from .data import ServicesData from .data import ServicesData
from .discovery import Discovery
from ..const import SERVICE_MQTT from ..const import SERVICE_MQTT
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
@@ -19,7 +19,6 @@ class ServiceManager(CoreSysAttributes):
"""Initialize Services handler.""" """Initialize Services handler."""
self.coresys = coresys self.coresys = coresys
self.data = ServicesData() self.data = ServicesData()
self.discovery = Discovery(coresys)
self.services_obj = {} self.services_obj = {}
@property @property
@@ -37,9 +36,9 @@ class ServiceManager(CoreSysAttributes):
self.services_obj[slug] = service(self.coresys) self.services_obj[slug] = service(self.coresys)
# Read exists discovery messages # Read exists discovery messages
self.discovery.load() self.sys_discovery.load()
def reset(self): def reset(self):
"""Reset available data.""" """Reset available data."""
self.data.reset_data() self.data.reset_data()
self.discovery.load() self.sys_discovery.load()

View File

@@ -36,7 +36,7 @@ class Discovery(CoreSysAttributes):
self._data.clear() self._data.clear()
self._data.extend(messages) self._data.extend(messages)
self._services.data.save_data() self.sys_services.data.save_data()
def get(self, uuid): def get(self, uuid):
"""Return discovery message.""" """Return discovery message."""
@@ -45,7 +45,7 @@ class Discovery(CoreSysAttributes):
@property @property
def _data(self): def _data(self):
"""Return discovery data.""" """Return discovery data."""
return self._services.data.discovery return self.sys_services.data.discovery
@property @property
def list_messages(self): def list_messages(self):
@@ -69,7 +69,7 @@ class Discovery(CoreSysAttributes):
self.save() self.save()
# send event to Home-Assistant # send event to Home-Assistant
self._loop.create_task(self._homeassistant.send_event( self.sys_create_task(self.sys_homeassistant.send_event(
EVENT_DISCOVERY_ADD, {ATTR_UUID: message.uuid})) EVENT_DISCOVERY_ADD, {ATTR_UUID: message.uuid}))
return message return message
@@ -80,11 +80,11 @@ class Discovery(CoreSysAttributes):
self.save() self.save()
# send event to Home-Assistant # send event to Home-Assistant
self._loop.create_task(self._homeassistant.send_event( self.sys_create_task(self.sys_homeassistant.send_event(
EVENT_DISCOVERY_DEL, {ATTR_UUID: message.uuid})) EVENT_DISCOVERY_DEL, {ATTR_UUID: message.uuid}))
class Message(object): class Message:
"""Represent a single Discovery message.""" """Represent a single Discovery message."""
def __init__(self, provider, component, platform, config, uuid=None): def __init__(self, provider, component, platform, config, uuid=None):

View File

@@ -37,7 +37,7 @@ class ServiceInterface(CoreSysAttributes):
def save(self): def save(self):
"""Save changes.""" """Save changes."""
self._services.data.save_data() self.sys_services.data.save_data()
def get_service_data(self): def get_service_data(self):
"""Return the requested service data.""" """Return the requested service data."""

View File

@@ -21,7 +21,7 @@ class MQTTService(ServiceInterface):
@property @property
def _data(self): def _data(self):
"""Return data of this service.""" """Return data of this service."""
return self._services.data.mqtt return self.sys_services.data.mqtt
@property @property
def schema(self): def schema(self):
@@ -66,7 +66,7 @@ class MQTTService(ServiceInterface):
return True return True
# discover mqtt to homeassistant # discover mqtt to homeassistant
message = self._services.discovery.send( message = self.sys_discovery.send(
provider, SERVICE_MQTT, None, self.hass_config) provider, SERVICE_MQTT, None, self.hass_config)
self._data[ATTR_DISCOVERY_ID] = message.uuid self._data[ATTR_DISCOVERY_ID] = message.uuid
@@ -81,8 +81,8 @@ class MQTTService(ServiceInterface):
discovery_id = self._data.get(ATTR_DISCOVERY_ID) discovery_id = self._data.get(ATTR_DISCOVERY_ID)
if discovery_id: if discovery_id:
self._services.discovery.remove( self.sys_discovery.remove(
self._services.discovery.get(discovery_id)) self.sys_discovery.get(discovery_id))
self._data.clear() self._data.clear()
self.save() self.save()

View File

@@ -1,15 +1,14 @@
"""Snapshot system control.""" """Snapshot system control."""
import asyncio import asyncio
from datetime import datetime
import logging import logging
from pathlib import Path from pathlib import Path
import tarfile
from .snapshot import Snapshot from .snapshot import Snapshot
from .utils import create_slug from .utils import create_slug
from ..const import ( from ..const import (
ATTR_SLUG, FOLDER_HOMEASSISTANT, SNAPSHOT_FULL, SNAPSHOT_PARTIAL) FOLDER_HOMEASSISTANT, SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..utils.dt import utcnow
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -32,15 +31,15 @@ class SnapshotManager(CoreSysAttributes):
"""Return snapshot object.""" """Return snapshot object."""
return self.snapshots_obj.get(slug) return self.snapshots_obj.get(slug)
def _create_snapshot(self, name, sys_type): def _create_snapshot(self, name, sys_type, password):
"""Initialize a new snapshot object from name.""" """Initialize a new snapshot object from name."""
date_str = datetime.utcnow().isoformat() date_str = utcnow().isoformat()
slug = create_slug(name, date_str) slug = create_slug(name, date_str)
tar_file = Path(self._config.path_backup, "{}.tar".format(slug)) tar_file = Path(self.sys_config.path_backup, f"{slug}.tar")
# init object # init object
snapshot = Snapshot(self.coresys, tar_file) snapshot = Snapshot(self.coresys, tar_file)
snapshot.create(slug, name, date_str, sys_type) snapshot.new(slug, name, date_str, sys_type, password)
# set general data # set general data
snapshot.store_homeassistant() snapshot.store_homeassistant()
@@ -66,193 +65,201 @@ class SnapshotManager(CoreSysAttributes):
self.snapshots_obj[snapshot.slug] = snapshot self.snapshots_obj[snapshot.slug] = snapshot
tasks = [_load_snapshot(tar_file) for tar_file in tasks = [_load_snapshot(tar_file) for tar_file in
self._config.path_backup.glob("*.tar")] self.sys_config.path_backup.glob("*.tar")]
_LOGGER.info("Found %d snapshot files", len(tasks)) _LOGGER.info("Found %d snapshot files", len(tasks))
if tasks: if tasks:
await asyncio.wait(tasks, loop=self._loop) await asyncio.wait(tasks)
def remove(self, snapshot): def remove(self, snapshot):
"""Remove a snapshot.""" """Remove a snapshot."""
try: try:
snapshot.tar_file.unlink() snapshot.tarfile.unlink()
self.snapshots_obj.pop(snapshot.slug, None) self.snapshots_obj.pop(snapshot.slug, None)
_LOGGER.info("Removed snapshot file %s", snapshot.slug)
except OSError as err: except OSError as err:
_LOGGER.error("Can't remove snapshot %s: %s", snapshot.slug, err) _LOGGER.error("Can't remove snapshot %s: %s", snapshot.slug, err)
return False return False
return True return True
async def do_snapshot_full(self, name=""): async def import_snapshot(self, tar_file):
"""Check snapshot tarfile and import it."""
snapshot = Snapshot(self.coresys, tar_file)
# Read meta data
if not await snapshot.load():
return None
# Allready exists?
if snapshot.slug in self.snapshots_obj:
_LOGGER.error("Snapshot %s allready exists!", snapshot.slug)
return None
# Move snapshot to backup
tar_origin = Path(self.sys_config.path_backup, f"{snapshot.slug}.tar")
try:
snapshot.tarfile.rename(tar_origin)
except OSError as err:
_LOGGER.error("Can't move snapshot file to storage: %s", err)
return None
# Load new snapshot
snapshot = Snapshot(self.coresys, tar_origin)
if not await snapshot.load():
return None
_LOGGER.info("Success import %s", snapshot.slug)
self.snapshots_obj[snapshot.slug] = snapshot
return snapshot
async def do_snapshot_full(self, name="", password=None):
"""Create a full snapshot.""" """Create a full snapshot."""
if self.lock.locked(): if self.lock.locked():
_LOGGER.error("It is already a snapshot/restore process running") _LOGGER.error("It is already a snapshot/restore process running")
return False return None
snapshot = self._create_snapshot(name, SNAPSHOT_FULL) snapshot = self._create_snapshot(name, SNAPSHOT_FULL, password)
_LOGGER.info("Full-Snapshot %s start", snapshot.slug) _LOGGER.info("Full-Snapshot %s start", snapshot.slug)
try: try:
self._scheduler.suspend = True self.sys_scheduler.suspend = True
await self.lock.acquire() await self.lock.acquire()
async with snapshot: async with snapshot:
# snapshot addons # Snapshot add-ons
tasks = [] _LOGGER.info("Snapshot %s store Add-ons", snapshot.slug)
for addon in self._addons.list_addons: await snapshot.store_addons()
if not addon.is_installed:
continue
tasks.append(snapshot.import_addon(addon))
if tasks: # Snapshot folders
_LOGGER.info("Full-Snapshot %s run %d addons", _LOGGER.info("Snapshot %s store folders", snapshot.slug)
snapshot.slug, len(tasks))
await asyncio.wait(tasks, loop=self._loop)
# snapshot folders
_LOGGER.info("Full-Snapshot %s store folders", snapshot.slug)
await snapshot.store_folders() await snapshot.store_folders()
except (OSError, ValueError, tarfile.TarError) as err: except Exception: # pylint: disable=broad-except
_LOGGER.info("Full-Snapshot %s error: %s", snapshot.slug, err) _LOGGER.exception("Snapshot %s error", snapshot.slug)
return False return None
else: else:
_LOGGER.info("Full-Snapshot %s done", snapshot.slug) _LOGGER.info("Full-Snapshot %s done", snapshot.slug)
self.snapshots_obj[snapshot.slug] = snapshot self.snapshots_obj[snapshot.slug] = snapshot
return True return snapshot
finally: finally:
self._scheduler.suspend = False self.sys_scheduler.suspend = False
self.lock.release() self.lock.release()
async def do_snapshot_partial(self, name="", addons=None, folders=None): async def do_snapshot_partial(self, name="", addons=None, folders=None,
password=None):
"""Create a partial snapshot.""" """Create a partial snapshot."""
if self.lock.locked(): if self.lock.locked():
_LOGGER.error("It is already a snapshot/restore process running") _LOGGER.error("It is already a snapshot/restore process running")
return False return None
addons = addons or [] addons = addons or []
folders = folders or [] folders = folders or []
snapshot = self._create_snapshot(name, SNAPSHOT_PARTIAL) snapshot = self._create_snapshot(name, SNAPSHOT_PARTIAL, password)
_LOGGER.info("Partial-Snapshot %s start", snapshot.slug) _LOGGER.info("Partial-Snapshot %s start", snapshot.slug)
try: try:
self._scheduler.suspend = True self.sys_scheduler.suspend = True
await self.lock.acquire() await self.lock.acquire()
async with snapshot: async with snapshot:
# snapshot addons # Snapshot add-ons
tasks = [] addon_list = []
for slug in addons: for addon_slug in addons:
addon = self._addons.get(slug) addon = self.sys_addons.get(addon_slug)
if addon.is_installed: if addon and addon.is_installed:
tasks.append(snapshot.import_addon(addon)) addon_list.append(addon)
continue
_LOGGER.warning(
"Add-on %s not found/installed", addon_slug)
if tasks: if addon_list:
_LOGGER.info("Partial-Snapshot %s run %d addons", _LOGGER.info("Snapshot %s store Add-ons", snapshot.slug)
snapshot.slug, len(tasks)) await snapshot.store_addons(addon_list)
await asyncio.wait(tasks, loop=self._loop)
# snapshot folders # Snapshot folders
_LOGGER.info("Partial-Snapshot %s store folders %s", if folders:
snapshot.slug, folders) _LOGGER.info("Snapshot %s store folders", snapshot.slug)
await snapshot.store_folders(folders) await snapshot.store_folders(folders)
except (OSError, ValueError, tarfile.TarError) as err: except Exception: # pylint: disable=broad-except
_LOGGER.info("Partial-Snapshot %s error: %s", snapshot.slug, err) _LOGGER.exception("Snapshot %s error", snapshot.slug)
return False return None
else: else:
_LOGGER.info("Partial-Snapshot %s done", snapshot.slug) _LOGGER.info("Partial-Snapshot %s done", snapshot.slug)
self.snapshots_obj[snapshot.slug] = snapshot self.snapshots_obj[snapshot.slug] = snapshot
return True return snapshot
finally: finally:
self._scheduler.suspend = False self.sys_scheduler.suspend = False
self.lock.release() self.lock.release()
async def do_restore_full(self, snapshot): async def do_restore_full(self, snapshot, password=None):
"""Restore a snapshot.""" """Restore a snapshot."""
if self.lock.locked(): if self.lock.locked():
_LOGGER.error("It is already a snapshot/restore process running") _LOGGER.error("It is already a snapshot/restore process running")
return False return False
if snapshot.sys_type != SNAPSHOT_FULL: if snapshot.sys_type != SNAPSHOT_FULL:
_LOGGER.error( _LOGGER.error("Restore %s is only a partial snapshot!",
"Full-Restore %s is only a partial snapshot!", snapshot.slug) snapshot.slug)
return False
if snapshot.protected and not snapshot.set_password(password):
_LOGGER.error("Invalid password for snapshot %s", snapshot.slug)
return False return False
_LOGGER.info("Full-Restore %s start", snapshot.slug) _LOGGER.info("Full-Restore %s start", snapshot.slug)
try: try:
self._scheduler.suspend = True self.sys_scheduler.suspend = True
await self.lock.acquire() await self.lock.acquire()
async with snapshot: async with snapshot:
# stop system
tasks = [] tasks = []
tasks.append(self._homeassistant.stop())
for addon in self._addons.list_addons: # Stop Home-Assistant / Add-ons
if addon.is_installed: await self.sys_core.shutdown()
tasks.append(addon.stop())
await asyncio.wait(tasks, loop=self._loop) # Restore folders
_LOGGER.info("Restore %s run folders", snapshot.slug)
# restore folders
_LOGGER.info("Full-Restore %s restore folders", snapshot.slug)
await snapshot.restore_folders() await snapshot.restore_folders()
# start homeassistant restore # Start homeassistant restore
_LOGGER.info("Full-Restore %s restore Home-Assistant", _LOGGER.info("Restore %s run Home-Assistant", snapshot.slug)
snapshot.slug)
snapshot.restore_homeassistant() snapshot.restore_homeassistant()
task_hass = self._loop.create_task( task_hass = self.sys_create_task(self.sys_homeassistant.update(
self._homeassistant.update(snapshot.homeassistant_version)) snapshot.homeassistant_version))
# restore repositories # Restore repositories
_LOGGER.info("Full-Restore %s restore Repositories", _LOGGER.info("Restore %s run Repositories", snapshot.slug)
snapshot.slug)
await snapshot.restore_repositories() await snapshot.restore_repositories()
# restore addons # Delete delta add-ons
tasks = [] tasks.clear()
actual_addons = \ for addon in self.sys_addons.list_installed:
set(addon.slug for addon in self._addons.list_addons if addon.slug not in snapshot.addon_list:
if addon.is_installed)
restore_addons = \
set(data[ATTR_SLUG] for data in snapshot.addons)
remove_addons = actual_addons - restore_addons
_LOGGER.info("Full-Restore %s restore addons %s, remove %s",
snapshot.slug, restore_addons, remove_addons)
for slug in remove_addons:
addon = self._addons.get(slug)
if addon:
tasks.append(addon.uninstall()) tasks.append(addon.uninstall())
else:
_LOGGER.warning("Can't remove addon %s", snapshot.slug)
for slug in restore_addons:
addon = self._addons.get(slug)
if addon:
tasks.append(snapshot.export_addon(addon))
else:
_LOGGER.warning("Can't restore addon %s", slug)
if tasks: if tasks:
_LOGGER.info("Full-Restore %s restore addons tasks %d", _LOGGER.info("Restore %s remove add-ons", snapshot.slug)
snapshot.slug, len(tasks)) await asyncio.wait(tasks)
await asyncio.wait(tasks, loop=self._loop)
# Restore add-ons
_LOGGER.info("Restore %s old add-ons", snapshot.slug)
await snapshot.restore_addons()
# finish homeassistant task # finish homeassistant task
_LOGGER.info("Full-Restore %s wait until homeassistant ready", _LOGGER.info("Restore %s wait until homeassistant ready",
snapshot.slug) snapshot.slug)
await task_hass await task_hass
await self._homeassistant.start() await self.sys_homeassistant.start()
except (OSError, ValueError, tarfile.TarError) as err: except Exception: # pylint: disable=broad-except
_LOGGER.info("Full-Restore %s error: %s", snapshot.slug, err) _LOGGER.exception("Restore %s error", snapshot.slug)
return False return False
else: else:
@@ -260,60 +267,70 @@ class SnapshotManager(CoreSysAttributes):
return True return True
finally: finally:
self._scheduler.suspend = False self.sys_scheduler.suspend = False
self.lock.release() self.lock.release()
async def do_restore_partial(self, snapshot, homeassistant=False, async def do_restore_partial(self, snapshot, homeassistant=False,
addons=None, folders=None): addons=None, folders=None, password=None):
"""Restore a snapshot.""" """Restore a snapshot."""
if self.lock.locked(): if self.lock.locked():
_LOGGER.error("It is already a snapshot/restore process running") _LOGGER.error("It is already a snapshot/restore process running")
return False return False
if snapshot.protected and not snapshot.set_password(password):
_LOGGER.error("Invalid password for snapshot %s", snapshot.slug)
return False
addons = addons or [] addons = addons or []
folders = folders or [] folders = folders or []
_LOGGER.info("Partial-Restore %s start", snapshot.slug) _LOGGER.info("Partial-Restore %s start", snapshot.slug)
try: try:
self._scheduler.suspend = True self.sys_scheduler.suspend = True
await self.lock.acquire() await self.lock.acquire()
async with snapshot: async with snapshot:
tasks = [] # Stop Home-Assistant if they will be restored later
if homeassistant and FOLDER_HOMEASSISTANT in folders:
if FOLDER_HOMEASSISTANT in folders: await self.sys_homeassistant.stop()
await self._homeassistant.stop()
# Process folders
if folders: if folders:
_LOGGER.info("Partial-Restore %s restore folders %s", _LOGGER.info("Restore %s run folders", snapshot.slug)
snapshot.slug, folders)
await snapshot.restore_folders(folders) await snapshot.restore_folders(folders)
# Process Home-Assistant
task_hass = None
if homeassistant: if homeassistant:
_LOGGER.info("Partial-Restore %s restore Home-Assistant", _LOGGER.info("Restore %s run Home-Assistant",
snapshot.slug) snapshot.slug)
snapshot.restore_homeassistant() snapshot.restore_homeassistant()
tasks.append(self._homeassistant.update( task_hass = self.sys_create_task(
snapshot.homeassistant_version)) self.sys_homeassistant.update(
snapshot.homeassistant_version))
# Process Add-ons
addon_list = []
for slug in addons: for slug in addons:
addon = self._addons.get(slug) addon = self.sys_addons.get(slug)
if addon: if addon:
tasks.append(snapshot.export_addon(addon)) addon_list.append(addon)
else: continue
_LOGGER.warning("Can't restore addon %s", _LOGGER.warning("Can't restore addon %s", snapshot.slug)
snapshot.slug)
if tasks: if addon_list:
_LOGGER.info("Partial-Restore %s run %d tasks", _LOGGER.info("Restore %s old add-ons", snapshot.slug)
snapshot.slug, len(tasks)) await snapshot.restore_addons(addon_list)
await asyncio.wait(tasks, loop=self._loop)
# make sure homeassistant run agen # make sure homeassistant run agen
await self._homeassistant.start() if task_hass:
_LOGGER.info("Restore %s wait for Home-Assistant",
snapshot.slug)
await task_hass
await self.sys_homeassistant.start()
except (OSError, ValueError, tarfile.TarError) as err: except Exception: # pylint: disable=broad-except
_LOGGER.info("Partial-Restore %s error: %s", snapshot.slug, err) _LOGGER.exception("Restore %s error", snapshot.slug)
return False return False
else: else:
@@ -321,5 +338,5 @@ class SnapshotManager(CoreSysAttributes):
return True return True
finally: finally:
self._scheduler.suspend = False self.sys_scheduler.suspend = False
self.lock.release() self.lock.release()

View File

@@ -1,23 +1,29 @@
"""Represent a snapshot file.""" """Represent a snapshot file."""
import asyncio import asyncio
from base64 import b64decode, b64encode
import json import json
import logging import logging
from pathlib import Path from pathlib import Path
import tarfile import tarfile
from tempfile import TemporaryDirectory from tempfile import TemporaryDirectory
from Crypto.Cipher import AES
from Crypto.Util import Padding
import voluptuous as vol import voluptuous as vol
from voluptuous.humanize import humanize_error from voluptuous.humanize import humanize_error
from .validate import SCHEMA_SNAPSHOT, ALL_FOLDERS from .validate import SCHEMA_SNAPSHOT, ALL_FOLDERS
from .utils import remove_folder from .utils import (
remove_folder, password_to_key, password_for_validating, key_to_iv)
from ..const import ( from ..const import (
ATTR_SLUG, ATTR_NAME, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES, ATTR_SLUG, ATTR_NAME, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_VERSION, ATTR_TYPE, ATTR_IMAGE, ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_VERSION, ATTR_TYPE, ATTR_IMAGE,
ATTR_PORT, ATTR_SSL, ATTR_PASSWORD, ATTR_WATCHDOG, ATTR_BOOT, ATTR_PORT, ATTR_SSL, ATTR_PASSWORD, ATTR_WATCHDOG, ATTR_BOOT, ATTR_CRYPTO,
ATTR_LAST_VERSION, ATTR_STARTUP_TIME) ATTR_LAST_VERSION, ATTR_PROTECTED, ATTR_WAIT_BOOT, ATTR_SIZE,
CRYPTO_AES128)
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..utils.json import write_json_file from ..utils.json import write_json_file
from ..utils.tar import SecureTarFile
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -28,9 +34,11 @@ class Snapshot(CoreSysAttributes):
def __init__(self, coresys, tar_file): def __init__(self, coresys, tar_file):
"""Initialize a snapshot.""" """Initialize a snapshot."""
self.coresys = coresys self.coresys = coresys
self.tar_file = tar_file self._tarfile = tar_file
self._data = {} self._data = {}
self._tmp = None self._tmp = None
self._key = None
self._aes = None
@property @property
def slug(self): def slug(self):
@@ -52,11 +60,21 @@ class Snapshot(CoreSysAttributes):
"""Return snapshot date.""" """Return snapshot date."""
return self._data[ATTR_DATE] return self._data[ATTR_DATE]
@property
def protected(self):
"""Return snapshot date."""
return self._data.get(ATTR_PROTECTED) is not None
@property @property
def addons(self): def addons(self):
"""Return snapshot date.""" """Return snapshot date."""
return self._data[ATTR_ADDONS] return self._data[ATTR_ADDONS]
@property
def addon_list(self):
"""Return a list of addons slugs."""
return [addon_data[ATTR_SLUG] for addon_data in self.addons]
@property @property
def folders(self): def folders(self):
"""Return list of saved folders.""" """Return list of saved folders."""
@@ -77,99 +95,29 @@ class Snapshot(CoreSysAttributes):
"""Return snapshot homeassistant version.""" """Return snapshot homeassistant version."""
return self._data[ATTR_HOMEASSISTANT].get(ATTR_VERSION) return self._data[ATTR_HOMEASSISTANT].get(ATTR_VERSION)
@homeassistant_version.setter
def homeassistant_version(self, value):
"""Set snapshot homeassistant version."""
self._data[ATTR_HOMEASSISTANT][ATTR_VERSION] = value
@property @property
def homeassistant_last_version(self): def homeassistant(self):
"""Return snapshot homeassistant last version (custom).""" """Return snapshot homeassistant data."""
return self._data[ATTR_HOMEASSISTANT].get(ATTR_LAST_VERSION) return self._data[ATTR_HOMEASSISTANT]
@homeassistant_last_version.setter
def homeassistant_last_version(self, value):
"""Set snapshot homeassistant last version (custom)."""
self._data[ATTR_HOMEASSISTANT][ATTR_LAST_VERSION] = value
@property
def homeassistant_image(self):
"""Return snapshot homeassistant custom image."""
return self._data[ATTR_HOMEASSISTANT].get(ATTR_IMAGE)
@homeassistant_image.setter
def homeassistant_image(self, value):
"""Set snapshot homeassistant custom image."""
self._data[ATTR_HOMEASSISTANT][ATTR_IMAGE] = value
@property
def homeassistant_ssl(self):
"""Return snapshot homeassistant api ssl."""
return self._data[ATTR_HOMEASSISTANT].get(ATTR_SSL)
@homeassistant_ssl.setter
def homeassistant_ssl(self, value):
"""Set snapshot homeassistant api ssl."""
self._data[ATTR_HOMEASSISTANT][ATTR_SSL] = value
@property
def homeassistant_port(self):
"""Return snapshot homeassistant api port."""
return self._data[ATTR_HOMEASSISTANT].get(ATTR_PORT)
@homeassistant_port.setter
def homeassistant_port(self, value):
"""Set snapshot homeassistant api port."""
self._data[ATTR_HOMEASSISTANT][ATTR_PORT] = value
@property
def homeassistant_password(self):
"""Return snapshot homeassistant api password."""
return self._data[ATTR_HOMEASSISTANT].get(ATTR_PASSWORD)
@homeassistant_password.setter
def homeassistant_password(self, value):
"""Set snapshot homeassistant api password."""
self._data[ATTR_HOMEASSISTANT][ATTR_PASSWORD] = value
@property
def homeassistant_watchdog(self):
"""Return snapshot homeassistant watchdog options."""
return self._data[ATTR_HOMEASSISTANT].get(ATTR_WATCHDOG)
@homeassistant_watchdog.setter
def homeassistant_watchdog(self, value):
"""Set snapshot homeassistant watchdog options."""
self._data[ATTR_HOMEASSISTANT][ATTR_WATCHDOG] = value
@property
def homeassistant_startup_time(self):
"""Return snapshot homeassistant startup time options."""
return self._data[ATTR_HOMEASSISTANT].get(ATTR_STARTUP_TIME)
@homeassistant_startup_time.setter
def homeassistant_startup_time(self, value):
"""Set snapshot homeassistant startup time options."""
self._data[ATTR_HOMEASSISTANT][ATTR_STARTUP_TIME] = value
@property
def homeassistant_boot(self):
"""Return snapshot homeassistant boot options."""
return self._data[ATTR_HOMEASSISTANT].get(ATTR_BOOT)
@homeassistant_boot.setter
def homeassistant_boot(self, value):
"""Set snapshot homeassistant boot options."""
self._data[ATTR_HOMEASSISTANT][ATTR_BOOT] = value
@property @property
def size(self): def size(self):
"""Return snapshot size.""" """Return snapshot size."""
if not self.tar_file.is_file(): if not self.tarfile.is_file():
return 0 return 0
return self.tar_file.stat().st_size / 1048576 # calc mbyte return round(self.tarfile.stat().st_size / 1048576, 2) # calc mbyte
def create(self, slug, name, date, sys_type): @property
def is_new(self):
"""Return True if there is new."""
return not self.tarfile.exists()
@property
def tarfile(self):
"""Return path to Snapshot tarfile."""
return self._tarfile
def new(self, slug, name, date, sys_type, password=None):
"""Initialize a new snapshot.""" """Initialize a new snapshot."""
# init metadata # init metadata
self._data[ATTR_SLUG] = slug self._data[ATTR_SLUG] = slug
@@ -180,38 +128,75 @@ class Snapshot(CoreSysAttributes):
# Add defaults # Add defaults
self._data = SCHEMA_SNAPSHOT(self._data) self._data = SCHEMA_SNAPSHOT(self._data)
# Set password
if password:
self._key = password_to_key(password)
self._aes = AES.new(
self._key, AES.MODE_CBC, iv=key_to_iv(self._key))
self._data[ATTR_PROTECTED] = password_for_validating(password)
self._data[ATTR_CRYPTO] = CRYPTO_AES128
def set_password(self, password):
"""Set the password for a exists snapshot."""
if not password:
return False
validating = password_for_validating(password)
if validating != self._data[ATTR_PROTECTED]:
return False
self._key = password_to_key(password)
self._aes = AES.new(self._key, AES.MODE_CBC, iv=key_to_iv(self._key))
return True
def _encrypt_data(self, data):
"""Make data secure."""
if not self._key or data is None:
return data
return b64encode(
self._aes.encrypt(Padding.pad(data.encode(), 16))).decode()
def _decrypt_data(self, data):
"""Make data readable."""
if not self._key or data is None:
return data
return Padding.unpad(
self._aes.decrypt(b64decode(data)), 16).decode()
async def load(self): async def load(self):
"""Read snapshot.json from tar file.""" """Read snapshot.json from tar file."""
if not self.tar_file.is_file(): if not self.tarfile.is_file():
_LOGGER.error("No tarfile %s", self.tar_file) _LOGGER.error("No tarfile %s", self.tarfile)
return False return False
def _load_file(): def _load_file():
"""Read snapshot.json.""" """Read snapshot.json."""
with tarfile.open(self.tar_file, "r:") as snapshot: with tarfile.open(self.tarfile, "r:") as snapshot:
json_file = snapshot.extractfile("./snapshot.json") json_file = snapshot.extractfile("./snapshot.json")
return json_file.read() return json_file.read()
# read snapshot.json # read snapshot.json
try: try:
raw = await self._loop.run_in_executor(None, _load_file) raw = await self.sys_run_in_executor(_load_file)
except (tarfile.TarError, KeyError) as err: except (tarfile.TarError, KeyError) as err:
_LOGGER.error( _LOGGER.error(
"Can't read snapshot tarfile %s: %s", self.tar_file, err) "Can't read snapshot tarfile %s: %s", self.tarfile, err)
return False return False
# parse data # parse data
try: try:
raw_dict = json.loads(raw) raw_dict = json.loads(raw)
except json.JSONDecodeError as err: except json.JSONDecodeError as err:
_LOGGER.error("Can't read data for %s: %s", self.tar_file, err) _LOGGER.error("Can't read data for %s: %s", self.tarfile, err)
return False return False
# validate # validate
try: try:
self._data = SCHEMA_SNAPSHOT(raw_dict) self._data = SCHEMA_SNAPSHOT(raw_dict)
except vol.Invalid as err: except vol.Invalid as err:
_LOGGER.error("Can't validate data for %s: %s", self.tar_file, _LOGGER.error("Can't validate data for %s: %s", self.tarfile,
humanize_error(raw_dict, err)) humanize_error(raw_dict, err))
return False return False
@@ -219,24 +204,24 @@ class Snapshot(CoreSysAttributes):
async def __aenter__(self): async def __aenter__(self):
"""Async context to open a snapshot.""" """Async context to open a snapshot."""
self._tmp = TemporaryDirectory(dir=str(self._config.path_tmp)) self._tmp = TemporaryDirectory(dir=str(self.sys_config.path_tmp))
# create a snapshot # create a snapshot
if not self.tar_file.is_file(): if not self.tarfile.is_file():
return self return self
# extract a exists snapshot # extract a exists snapshot
def _extract_snapshot(): def _extract_snapshot():
"""Extract a snapshot.""" """Extract a snapshot."""
with tarfile.open(self.tar_file, "r:") as tar: with tarfile.open(self.tarfile, "r:") as tar:
tar.extractall(path=self._tmp.name) tar.extractall(path=self._tmp.name)
await self._loop.run_in_executor(None, _extract_snapshot) await self.sys_run_in_executor(_extract_snapshot)
async def __aexit__(self, exception_type, exception_value, traceback): async def __aexit__(self, exception_type, exception_value, traceback):
"""Async context to close a snapshot.""" """Async context to close a snapshot."""
# exists snapshot or exception on build # exists snapshot or exception on build
if self.tar_file.is_file() or exception_type is not None: if self.tarfile.is_file() or exception_type is not None:
self._tmp.cleanup() self._tmp.cleanup()
return return
@@ -244,50 +229,81 @@ class Snapshot(CoreSysAttributes):
try: try:
self._data = SCHEMA_SNAPSHOT(self._data) self._data = SCHEMA_SNAPSHOT(self._data)
except vol.Invalid as err: except vol.Invalid as err:
_LOGGER.error("Invalid data for %s: %s", self.tar_file, _LOGGER.error("Invalid data for %s: %s", self.tarfile,
humanize_error(self._data, err)) humanize_error(self._data, err))
raise ValueError("Invalid config") from None raise ValueError("Invalid config") from None
# new snapshot, build it # new snapshot, build it
def _create_snapshot(): def _create_snapshot():
"""Create a new snapshot.""" """Create a new snapshot."""
with tarfile.open(self.tar_file, "w:") as tar: with tarfile.open(self.tarfile, "w:") as tar:
tar.add(self._tmp.name, arcname=".") tar.add(self._tmp.name, arcname=".")
try: try:
write_json_file(Path(self._tmp.name, "snapshot.json"), self._data) write_json_file(Path(self._tmp.name, "snapshot.json"), self._data)
await self._loop.run_in_executor(None, _create_snapshot) await self.sys_run_in_executor(_create_snapshot)
except (OSError, json.JSONDecodeError) as err: except (OSError, json.JSONDecodeError) as err:
_LOGGER.error("Can't write snapshot: %s", err) _LOGGER.error("Can't write snapshot: %s", err)
finally: finally:
self._tmp.cleanup() self._tmp.cleanup()
async def import_addon(self, addon): async def store_addons(self, addon_list=None):
"""Add a addon into snapshot.""" """Add a list of add-ons into snapshot."""
snapshot_file = Path(self._tmp.name, "{}.tar.gz".format(addon.slug)) addon_list = addon_list or self.sys_addons.list_installed
if not await addon.snapshot(snapshot_file): async def _addon_save(addon):
_LOGGER.error("Can't make snapshot from %s", addon.slug) """Task to store a add-on into snapshot."""
return False addon_file = SecureTarFile(
Path(self._tmp.name, f"{addon.slug}.tar.gz"),
'w', key=self._key)
# store to config # Take snapshot
self._data[ATTR_ADDONS].append({ if not await addon.snapshot(addon_file):
ATTR_SLUG: addon.slug, _LOGGER.error("Can't make snapshot from %s", addon.slug)
ATTR_NAME: addon.name, return
ATTR_VERSION: addon.version_installed,
})
return True # Store to config
self._data[ATTR_ADDONS].append({
ATTR_SLUG: addon.slug,
ATTR_NAME: addon.name,
ATTR_VERSION: addon.version_installed,
ATTR_SIZE: addon_file.size,
})
async def export_addon(self, addon): # Run tasks
"""Restore a addon from snapshot.""" tasks = [_addon_save(addon) for addon in addon_list]
snapshot_file = Path(self._tmp.name, "{}.tar.gz".format(addon.slug)) if tasks:
await asyncio.wait(tasks)
if not await addon.restore(snapshot_file): async def restore_addons(self, addon_list=None):
_LOGGER.error("Can't restore snapshot for %s", addon.slug) """Restore a list add-on from snapshot."""
return False if not addon_list:
addon_list = []
for addon_slug in self.addon_list:
addon = self.sys_addons.get(addon_slug)
if addon:
addon_list.append(addon)
return True async def _addon_restore(addon):
"""Task to restore a add-on into snapshot."""
addon_file = SecureTarFile(
Path(self._tmp.name, f"{addon.slug}.tar.gz"),
'r', key=self._key)
# If exists inside snapshot
if not addon_file.path.exists():
_LOGGER.error("Can't find snapshot for %s", addon.slug)
return
# Performe a restore
if not await addon.restore(addon_file):
_LOGGER.error("Can't restore snapshot for %s", addon.slug)
return
# Run tasks
tasks = [_addon_restore(addon) for addon in addon_list]
if tasks:
await asyncio.wait(tasks)
async def store_folders(self, folder_list=None): async def store_folders(self, folder_list=None):
"""Backup hassio data into snapshot.""" """Backup hassio data into snapshot."""
@@ -296,13 +312,18 @@ class Snapshot(CoreSysAttributes):
def _folder_save(name): def _folder_save(name):
"""Intenal function to snapshot a folder.""" """Intenal function to snapshot a folder."""
slug_name = name.replace("/", "_") slug_name = name.replace("/", "_")
snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name)) tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz")
origin_dir = Path(self._config.path_hassio, name) origin_dir = Path(self.sys_config.path_hassio, name)
# Check if exsits
if not origin_dir.is_dir():
_LOGGER.warning("Can't find snapshot folder %s", name)
return
# Take snapshot
try: try:
_LOGGER.info("Snapshot folder %s", name) _LOGGER.info("Snapshot folder %s", name)
with tarfile.open(snapshot_tar, "w:gz", with SecureTarFile(tar_name, 'w', key=self._key) as tar_file:
compresslevel=1) as tar_file:
tar_file.add(origin_dir, arcname=".") tar_file.add(origin_dir, arcname=".")
_LOGGER.info("Snapshot folder %s done", name) _LOGGER.info("Snapshot folder %s done", name)
@@ -310,11 +331,11 @@ class Snapshot(CoreSysAttributes):
except (tarfile.TarError, OSError) as err: except (tarfile.TarError, OSError) as err:
_LOGGER.warning("Can't snapshot folder %s: %s", name, err) _LOGGER.warning("Can't snapshot folder %s: %s", name, err)
# run tasks # Run tasks
tasks = [self._loop.run_in_executor(None, _folder_save, folder) tasks = [self.sys_run_in_executor(_folder_save, folder)
for folder in folder_list] for folder in folder_list]
if tasks: if tasks:
await asyncio.wait(tasks, loop=self._loop) await asyncio.wait(tasks)
async def restore_folders(self, folder_list=None): async def restore_folders(self, folder_list=None):
"""Backup hassio data into snapshot.""" """Backup hassio data into snapshot."""
@@ -323,70 +344,80 @@ class Snapshot(CoreSysAttributes):
def _folder_restore(name): def _folder_restore(name):
"""Intenal function to restore a folder.""" """Intenal function to restore a folder."""
slug_name = name.replace("/", "_") slug_name = name.replace("/", "_")
snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name)) tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz")
origin_dir = Path(self._config.path_hassio, name) origin_dir = Path(self.sys_config.path_hassio, name)
# clean old stuff # Check if exists inside snapshot
if not tar_name.exists():
_LOGGER.warning("Can't find restore folder %s", name)
return
# Clean old stuff
if origin_dir.is_dir(): if origin_dir.is_dir():
remove_folder(origin_dir) remove_folder(origin_dir)
# Performe a restore
try: try:
_LOGGER.info("Restore folder %s", name) _LOGGER.info("Restore folder %s", name)
with tarfile.open(snapshot_tar, "r:gz") as tar_file: with SecureTarFile(tar_name, 'r', key=self._key) as tar_file:
tar_file.extractall(path=origin_dir) tar_file.extractall(path=origin_dir)
_LOGGER.info("Restore folder %s done", name) _LOGGER.info("Restore folder %s done", name)
except (tarfile.TarError, OSError) as err: except (tarfile.TarError, OSError) as err:
_LOGGER.warning("Can't restore folder %s: %s", name, err) _LOGGER.warning("Can't restore folder %s: %s", name, err)
# run tasks # Run tasks
tasks = [self._loop.run_in_executor(None, _folder_restore, folder) tasks = [self.sys_run_in_executor(_folder_restore, folder)
for folder in folder_list] for folder in folder_list]
if tasks: if tasks:
await asyncio.wait(tasks, loop=self._loop) await asyncio.wait(tasks)
def store_homeassistant(self): def store_homeassistant(self):
"""Read all data from homeassistant object.""" """Read all data from homeassistant object."""
self.homeassistant_version = self._homeassistant.version self.homeassistant[ATTR_VERSION] = self.sys_homeassistant.version
self.homeassistant_watchdog = self._homeassistant.watchdog self.homeassistant[ATTR_WATCHDOG] = self.sys_homeassistant.watchdog
self.homeassistant_boot = self._homeassistant.boot self.homeassistant[ATTR_BOOT] = self.sys_homeassistant.boot
self.homeassistant_startup_time = self._homeassistant.startup_time self.homeassistant[ATTR_WAIT_BOOT] = self.sys_homeassistant.wait_boot
# custom image # Custom image
if self._homeassistant.is_custom_image: if self.sys_homeassistant.is_custom_image:
self.homeassistant_image = self._homeassistant.image self.homeassistant[ATTR_IMAGE] = self.sys_homeassistant.image
self.homeassistant_last_version = self._homeassistant.last_version self.homeassistant[ATTR_LAST_VERSION] = \
self.sys_homeassistant.last_version
# api # API/Proxy
self.homeassistant_port = self._homeassistant.api_port self.homeassistant[ATTR_PORT] = self.sys_homeassistant.api_port
self.homeassistant_ssl = self._homeassistant.api_ssl self.homeassistant[ATTR_SSL] = self.sys_homeassistant.api_ssl
self.homeassistant_password = self._homeassistant.api_password self.homeassistant[ATTR_PASSWORD] = \
self._encrypt_data(self.sys_homeassistant.api_password)
def restore_homeassistant(self): def restore_homeassistant(self):
"""Write all data to homeassistant object.""" """Write all data to homeassistant object."""
self._homeassistant.watchdog = self.homeassistant_watchdog self.sys_homeassistant.watchdog = self.homeassistant[ATTR_WATCHDOG]
self._homeassistant.boot = self.homeassistant_boot self.sys_homeassistant.boot = self.homeassistant[ATTR_BOOT]
self._homeassistant.startup_time = self.homeassistant_startup_time self.sys_homeassistant.wait_boot = self.homeassistant[ATTR_WAIT_BOOT]
# custom image # Custom image
if self.homeassistant_image: if self.homeassistant.get(ATTR_IMAGE):
self._homeassistant.image = self.homeassistant_image self.sys_homeassistant.image = self.homeassistant[ATTR_IMAGE]
self._homeassistant.last_version = self.homeassistant_last_version self.sys_homeassistant.last_version = \
self.homeassistant[ATTR_LAST_VERSION]
# api # API/Proxy
self._homeassistant.api_port = self.homeassistant_port self.sys_homeassistant.api_port = self.homeassistant[ATTR_PORT]
self._homeassistant.api_ssl = self.homeassistant_ssl self.sys_homeassistant.api_ssl = self.homeassistant[ATTR_SSL]
self._homeassistant.api_password = self.homeassistant_password self.sys_homeassistant.api_password = \
self._decrypt_data(self.homeassistant[ATTR_PASSWORD])
# save # save
self._homeassistant.save_data() self.sys_homeassistant.save_data()
def store_repositories(self): def store_repositories(self):
"""Store repository list into snapshot.""" """Store repository list into snapshot."""
self.repositories = self._config.addons_repositories self.repositories = self.sys_config.addons_repositories
def restore_repositories(self): def restore_repositories(self):
"""Restore repositories from snapshot. """Restore repositories from snapshot.
Return a coroutine. Return a coroutine.
""" """
return self._addons.load_repositories(self.repositories) return self.sys_addons.load_repositories(self.repositories)

View File

@@ -1,6 +1,34 @@
"""Util addons functions.""" """Util addons functions."""
import hashlib import hashlib
import shutil import shutil
import re
RE_DIGITS = re.compile(r"\d+")
def password_to_key(password):
"""Generate a AES Key from password."""
password = password.encode()
for _ in range(100):
password = hashlib.sha256(password).digest()
return password[:16]
def password_for_validating(password):
"""Generate a SHA256 hash from password."""
for _ in range(100):
password = hashlib.sha256(password.encode()).hexdigest()
try:
return str(sum(map(int, RE_DIGITS.findall(password))))[0]
except (ValueError, IndexError):
return "0"
def key_to_iv(key):
"""Generate a iv from Key."""
for _ in range(100):
key = hashlib.sha256(key).digest()
return key[:16]
def create_slug(name, date_str): def create_slug(name, date_str):

View File

@@ -5,10 +5,10 @@ import voluptuous as vol
from ..const import ( from ..const import (
ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_NAME, ATTR_SLUG, ATTR_DATE, ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_NAME, ATTR_SLUG, ATTR_DATE,
ATTR_VERSION, ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_TYPE, ATTR_IMAGE, ATTR_VERSION, ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_TYPE, ATTR_IMAGE,
ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG, ATTR_BOOT, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG, ATTR_BOOT, ATTR_SIZE,
ATTR_LAST_VERSION, ATTR_STARTUP_TIME, ATTR_LAST_VERSION, ATTR_WAIT_BOOT, ATTR_PROTECTED, ATTR_CRYPTO,
FOLDER_SHARE, FOLDER_HOMEASSISTANT, FOLDER_ADDONS, FOLDER_SSL, FOLDER_SHARE, FOLDER_HOMEASSISTANT, FOLDER_ADDONS, FOLDER_SSL,
SNAPSHOT_FULL, SNAPSHOT_PARTIAL) SNAPSHOT_FULL, SNAPSHOT_PARTIAL, CRYPTO_AES128)
from ..validate import NETWORK_PORT, REPOSITORIES, DOCKER_IMAGE from ..validate import NETWORK_PORT, REPOSITORIES, DOCKER_IMAGE
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL] ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]
@@ -29,8 +29,11 @@ SCHEMA_SNAPSHOT = vol.Schema({
vol.Required(ATTR_TYPE): vol.In([SNAPSHOT_FULL, SNAPSHOT_PARTIAL]), vol.Required(ATTR_TYPE): vol.In([SNAPSHOT_FULL, SNAPSHOT_PARTIAL]),
vol.Required(ATTR_NAME): vol.Coerce(str), vol.Required(ATTR_NAME): vol.Coerce(str),
vol.Required(ATTR_DATE): vol.Coerce(str), vol.Required(ATTR_DATE): vol.Coerce(str),
vol.Inclusive(ATTR_PROTECTED, 'encrypted'):
vol.All(vol.Coerce(str), vol.Length(min=1, max=1)),
vol.Inclusive(ATTR_CRYPTO, 'encrypted'): CRYPTO_AES128,
vol.Optional(ATTR_HOMEASSISTANT, default=dict): vol.Schema({ vol.Optional(ATTR_HOMEASSISTANT, default=dict): vol.Schema({
vol.Required(ATTR_VERSION): vol.Coerce(str), vol.Optional(ATTR_VERSION): vol.Coerce(str),
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): DOCKER_IMAGE, vol.Inclusive(ATTR_IMAGE, 'custom_hass'): DOCKER_IMAGE,
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str), vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(), vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
@@ -38,7 +41,7 @@ SCHEMA_SNAPSHOT = vol.Schema({
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT, vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)), vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(), vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
vol.Optional(ATTR_STARTUP_TIME, default=600): vol.Optional(ATTR_WAIT_BOOT, default=600):
vol.All(vol.Coerce(int), vol.Range(min=60)), vol.All(vol.Coerce(int), vol.Range(min=60)),
}, extra=vol.REMOVE_EXTRA), }, extra=vol.REMOVE_EXTRA),
vol.Optional(ATTR_FOLDERS, default=list): vol.Optional(ATTR_FOLDERS, default=list):
@@ -47,6 +50,7 @@ SCHEMA_SNAPSHOT = vol.Schema({
vol.Required(ATTR_SLUG): vol.Coerce(str), vol.Required(ATTR_SLUG): vol.Coerce(str),
vol.Required(ATTR_NAME): vol.Coerce(str), vol.Required(ATTR_NAME): vol.Coerce(str),
vol.Required(ATTR_VERSION): vol.Coerce(str), vol.Required(ATTR_VERSION): vol.Coerce(str),
vol.Optional(ATTR_SIZE, default=0): vol.Coerce(float),
}, extra=vol.REMOVE_EXTRA)], unique_addons), }, extra=vol.REMOVE_EXTRA)], unique_addons),
vol.Optional(ATTR_REPOSITORIES, default=list): REPOSITORIES, vol.Optional(ATTR_REPOSITORIES, default=list): REPOSITORIES,
}, extra=vol.ALLOW_EXTRA) }, extra=vol.ALLOW_EXTRA)

View File

@@ -34,7 +34,7 @@ class Supervisor(CoreSysAttributes):
@property @property
def last_version(self): def last_version(self):
"""Return last available version of homeassistant.""" """Return last available version of homeassistant."""
return self._updater.version_hassio return self.sys_updater.version_hassio
@property @property
def image(self): def image(self):
@@ -50,13 +50,13 @@ class Supervisor(CoreSysAttributes):
"""Update HomeAssistant version.""" """Update HomeAssistant version."""
version = version or self.last_version version = version or self.last_version
if version == self._supervisor.version: if version == self.sys_supervisor.version:
_LOGGER.warning("Version %s is already installed", version) _LOGGER.warning("Version %s is already installed", version)
return return
_LOGGER.info("Update supervisor to version %s", version) _LOGGER.info("Update supervisor to version %s", version)
if await self.instance.install(version): if await self.instance.install(version):
self._loop.call_later(1, self._loop.stop) self.sys_loop.call_later(1, self.sys_loop.stop)
return True return True
_LOGGER.error("Update of hass.io fails!") _LOGGER.error("Update of hass.io fails!")

View File

@@ -15,7 +15,7 @@ class Tasks(CoreSysAttributes):
RUN_RELOAD_ADDONS = 21600 RUN_RELOAD_ADDONS = 21600
RUN_RELOAD_SNAPSHOTS = 72000 RUN_RELOAD_SNAPSHOTS = 72000
RUN_RELOAD_HOST_CONTROL = 72000 RUN_RELOAD_HOST = 72000
RUN_RELOAD_UPDATER = 21600 RUN_RELOAD_UPDATER = 21600
RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15 RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15
@@ -29,24 +29,24 @@ class Tasks(CoreSysAttributes):
async def load(self): async def load(self):
"""Add Tasks to scheduler.""" """Add Tasks to scheduler."""
self.jobs.add(self._scheduler.register_task( self.jobs.add(self.sys_scheduler.register_task(
self._update_addons, self.RUN_UPDATE_ADDONS)) self._update_addons, self.RUN_UPDATE_ADDONS))
self.jobs.add(self._scheduler.register_task( self.jobs.add(self.sys_scheduler.register_task(
self._update_supervisor, self.RUN_UPDATE_SUPERVISOR)) self._update_supervisor, self.RUN_UPDATE_SUPERVISOR))
self.jobs.add(self._scheduler.register_task( self.jobs.add(self.sys_scheduler.register_task(
self._addons.reload, self.RUN_RELOAD_ADDONS)) self.sys_addons.reload, self.RUN_RELOAD_ADDONS))
self.jobs.add(self._scheduler.register_task( self.jobs.add(self.sys_scheduler.register_task(
self._updater.reload, self.RUN_RELOAD_UPDATER)) self.sys_updater.reload, self.RUN_RELOAD_UPDATER))
self.jobs.add(self._scheduler.register_task( self.jobs.add(self.sys_scheduler.register_task(
self._snapshots.reload, self.RUN_RELOAD_SNAPSHOTS)) self.sys_snapshots.reload, self.RUN_RELOAD_SNAPSHOTS))
self.jobs.add(self._scheduler.register_task( self.jobs.add(self.sys_scheduler.register_task(
self._host_control.load, self.RUN_RELOAD_HOST_CONTROL)) self.sys_host.load, self.RUN_RELOAD_HOST))
self.jobs.add(self._scheduler.register_task( self.jobs.add(self.sys_scheduler.register_task(
self._watchdog_homeassistant_docker, self._watchdog_homeassistant_docker,
self.RUN_WATCHDOG_HOMEASSISTANT_DOCKER)) self.RUN_WATCHDOG_HOMEASSISTANT_DOCKER))
self.jobs.add(self._scheduler.register_task( self.jobs.add(self.sys_scheduler.register_task(
self._watchdog_homeassistant_api, self._watchdog_homeassistant_api,
self.RUN_WATCHDOG_HOMEASSISTANT_API)) self.RUN_WATCHDOG_HOMEASSISTANT_API))
@@ -55,7 +55,7 @@ class Tasks(CoreSysAttributes):
async def _update_addons(self): async def _update_addons(self):
"""Check if a update is available of a addon and update it.""" """Check if a update is available of a addon and update it."""
tasks = [] tasks = []
for addon in self._addons.list_addons: for addon in self.sys_addons.list_addons:
if not addon.is_installed or not addon.auto_update: if not addon.is_installed or not addon.auto_update:
continue continue
@@ -70,35 +70,35 @@ class Tasks(CoreSysAttributes):
if tasks: if tasks:
_LOGGER.info("Addon auto update process %d tasks", len(tasks)) _LOGGER.info("Addon auto update process %d tasks", len(tasks))
await asyncio.wait(tasks, loop=self._loop) await asyncio.wait(tasks)
async def _update_supervisor(self): async def _update_supervisor(self):
"""Check and run update of supervisor hassio.""" """Check and run update of supervisor hassio."""
if not self._supervisor.need_update: if not self.sys_supervisor.need_update:
return return
# don't perform a update on beta/dev channel # don't perform a update on beta/dev channel
if self._updater.beta_channel: if self.sys_dev:
_LOGGER.warning("Ignore Hass.io update on beta upstream!") _LOGGER.warning("Ignore Hass.io update on dev channel!")
return return
_LOGGER.info("Found new Hass.io version") _LOGGER.info("Found new Hass.io version")
await self._supervisor.update() await self.sys_supervisor.update()
async def _watchdog_homeassistant_docker(self): async def _watchdog_homeassistant_docker(self):
"""Check running state of docker and start if they is close.""" """Check running state of docker and start if they is close."""
# if Home-Assistant is active # if Home-Assistant is active
if not await self._homeassistant.is_initialize() or \ if not await self.sys_homeassistant.is_initialize() or \
not self._homeassistant.watchdog: not self.sys_homeassistant.watchdog:
return return
# if Home-Assistant is running # if Home-Assistant is running
if self._homeassistant.in_progress or \ if self.sys_homeassistant.in_progress or \
await self._homeassistant.is_running(): await self.sys_homeassistant.is_running():
return return
_LOGGER.warning("Watchdog found a problem with Home-Assistant docker!") _LOGGER.warning("Watchdog found a problem with Home-Assistant docker!")
await self._homeassistant.start() await self.sys_homeassistant.start()
async def _watchdog_homeassistant_api(self): async def _watchdog_homeassistant_api(self):
"""Create scheduler task for montoring running state of API. """Create scheduler task for montoring running state of API.
@@ -109,13 +109,13 @@ class Tasks(CoreSysAttributes):
retry_scan = self._data.get('HASS_WATCHDOG_API', 0) retry_scan = self._data.get('HASS_WATCHDOG_API', 0)
# If Home-Assistant is active # If Home-Assistant is active
if not await self._homeassistant.is_initialize() or \ if not await self.sys_homeassistant.is_initialize() or \
not self._homeassistant.watchdog: not self.sys_homeassistant.watchdog:
return return
# If Home-Assistant API is up # If Home-Assistant API is up
if self._homeassistant.in_progress or \ if self.sys_homeassistant.in_progress or \
await self._homeassistant.check_api_state(): await self.sys_homeassistant.check_api_state():
return return
# Look like we run into a problem # Look like we run into a problem
@@ -126,5 +126,5 @@ class Tasks(CoreSysAttributes):
return return
_LOGGER.error("Watchdog found a problem with Home-Assistant API!") _LOGGER.error("Watchdog found a problem with Home-Assistant API!")
await self._homeassistant.restart() await self.sys_homeassistant.restart()
self._data['HASS_WATCHDOG_API'] = 0 self._data['HASS_WATCHDOG_API'] = 0

View File

@@ -1,15 +1,15 @@
"""Fetch last versions from webserver.""" """Fetch last versions from webserver."""
import asyncio import asyncio
from contextlib import suppress
from datetime import timedelta from datetime import timedelta
import json import json
import logging import logging
import aiohttp import aiohttp
import async_timeout
from .const import ( from .const import (
URL_HASSIO_VERSION, FILE_HASSIO_UPDATER, ATTR_HOMEASSISTANT, ATTR_HASSIO, URL_HASSIO_VERSION, FILE_HASSIO_UPDATER, ATTR_HOMEASSISTANT, ATTR_HASSIO,
ATTR_BETA_CHANNEL) ATTR_CHANNEL)
from .coresys import CoreSysAttributes from .coresys import CoreSysAttributes
from .utils import AsyncThrottle from .utils import AsyncThrottle
from .utils.json import JsonConfig from .utils.json import JsonConfig
@@ -44,21 +44,14 @@ class Updater(JsonConfig, CoreSysAttributes):
return self._data.get(ATTR_HASSIO) return self._data.get(ATTR_HASSIO)
@property @property
def upstream(self): def channel(self):
"""Return Upstream branch for version.""" """Return upstream channel of hassio instance."""
if self.beta_channel: return self._data[ATTR_CHANNEL]
return 'dev'
return 'master'
@property @channel.setter
def beta_channel(self): def channel(self, value):
"""Return True if we run in beta upstream.""" """Set upstream mode."""
return self._data[ATTR_BETA_CHANNEL] self._data[ATTR_CHANNEL] = value
@beta_channel.setter
def beta_channel(self, value):
"""Set beta upstream mode."""
self._data[ATTR_BETA_CHANNEL] = bool(value)
@AsyncThrottle(timedelta(seconds=60)) @AsyncThrottle(timedelta(seconds=60))
async def reload(self): async def reload(self):
@@ -66,12 +59,11 @@ class Updater(JsonConfig, CoreSysAttributes):
Is a coroutine. Is a coroutine.
""" """
url = URL_HASSIO_VERSION.format(self.upstream) url = URL_HASSIO_VERSION.format(channel=self.channel)
try: try:
_LOGGER.info("Fetch update data from %s", url) _LOGGER.info("Fetch update data from %s", url)
with async_timeout.timeout(10, loop=self._loop): async with self.sys_websession.get(url, timeout=10) as request:
async with self._websession.get(url) as request: data = await request.json(content_type=None)
data = await request.json(content_type=None)
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err: except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
_LOGGER.warning("Can't fetch versions from %s: %s", url, err) _LOGGER.warning("Can't fetch versions from %s: %s", url, err)
@@ -82,11 +74,18 @@ class Updater(JsonConfig, CoreSysAttributes):
return return
# data valid? # data valid?
if not data: if not data or data.get(ATTR_CHANNEL) != self.channel:
_LOGGER.warning("Invalid data from %s", url) _LOGGER.warning("Invalid data from %s", url)
return return
# update versions # update supervisor versions
self._data[ATTR_HOMEASSISTANT] = data.get('homeassistant') with suppress(KeyError):
self._data[ATTR_HASSIO] = data.get('hassio') self._data[ATTR_HASSIO] = data['supervisor']
# update Home Assistant version
machine = self.sys_machine or 'default'
with suppress(KeyError):
self._data[ATTR_HOMEASSISTANT] = \
data['homeassistant'][machine]
self.save_data() self.save_data()

View File

@@ -1,7 +1,9 @@
"""Tools file for HassIO.""" """Tools file for HassIO."""
from datetime import datetime from datetime import datetime
import logging
import re import re
_LOGGER = logging.getLogger(__name__)
RE_STRING = re.compile(r"\x1b(\[.*?[@-~]|\].*?(\x07|\x1b\\))") RE_STRING = re.compile(r"\x1b(\[.*?[@-~]|\].*?(\x07|\x1b\\))")
@@ -10,7 +12,22 @@ def convert_to_ascii(raw):
return RE_STRING.sub("", raw.decode()) return RE_STRING.sub("", raw.decode())
class AsyncThrottle(object): def process_lock(method):
"""Wrap function with only run once."""
async def wrap_api(api, *args, **kwargs):
"""Return api wrapper."""
if api.lock.locked():
_LOGGER.error(
"Can't excute %s while a task is in progress", method.__name__)
return False
async with api.lock:
return await method(api, *args, **kwargs)
return wrap_api
class AsyncThrottle:
""" """
Decorator that prevents a function from being called more than once every Decorator that prevents a function from being called more than once every
time period. time period.

View File

@@ -8,9 +8,11 @@ import aiohttp
import async_timeout import async_timeout
import pytz import pytz
UTC = pytz.utc
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
FREEGEOIP_URL = "https://freegeoip.io/json/" FREEGEOIP_URL = "https://freegeoip.net/json/"
# Copyright (c) Django Software Foundation and individual contributors. # Copyright (c) Django Software Foundation and individual contributors.
# All rights reserved. # All rights reserved.
@@ -27,7 +29,7 @@ async def fetch_timezone(websession):
"""Read timezone from freegeoip.""" """Read timezone from freegeoip."""
data = {} data = {}
try: try:
with async_timeout.timeout(10, loop=websession.loop): with async_timeout.timeout(10):
async with websession.get(FREEGEOIP_URL) as request: async with websession.get(FREEGEOIP_URL) as request:
data = await request.json() data = await request.json()
@@ -61,7 +63,7 @@ def parse_datetime(dt_str):
tzinfo = None # type: Optional[dt.tzinfo] tzinfo = None # type: Optional[dt.tzinfo]
if tzinfo_str == 'Z': if tzinfo_str == 'Z':
tzinfo = pytz.utc tzinfo = UTC
elif tzinfo_str is not None: elif tzinfo_str is not None:
offset_mins = int(tzinfo_str[-2:]) if len(tzinfo_str) > 3 else 0 offset_mins = int(tzinfo_str[-2:]) if len(tzinfo_str) > 3 else 0
offset_hours = int(tzinfo_str[1:3]) offset_hours = int(tzinfo_str[1:3])
@@ -74,3 +76,8 @@ def parse_datetime(dt_str):
kws = {k: int(v) for k, v in kws.items() if v is not None} kws = {k: int(v) for k, v in kws.items() if v is not None}
kws['tzinfo'] = tzinfo kws['tzinfo'] = tzinfo
return datetime(**kws) return datetime(**kws)
def utcnow():
"""Returns current timestamp including timezone."""
return datetime.now(UTC)

174
hassio/utils/gdbus.py Normal file
View File

@@ -0,0 +1,174 @@
"""DBus implementation with glib."""
import asyncio
import logging
import json
import shlex
import re
import xml.etree.ElementTree as ET
from ..exceptions import DBusFatalError, DBusParseError
_LOGGER = logging.getLogger(__name__)
# Use to convert GVariant into json
RE_GVARIANT_TYPE = re.compile(
r"(?:boolean|byte|int16|uint16|int32|uint32|handle|int64|uint64|double|"
r"string|objectpath|signature) ")
RE_GVARIANT_TULPE = re.compile(r"^\((.*),\)$")
RE_GVARIANT_VARIANT = re.compile(
r"(?<=(?: |{|\[))<((?:'|\").*?(?:'|\")|\d+(?:\.\d+)?)>(?=(?:|]|}|,))")
RE_GVARIANT_STRING = re.compile(r"(?<=(?: |{|\[))'(.*?)'(?=(?:|]|}|,))")
# Commands for dbus
INTROSPECT = ("gdbus introspect --system --dest {bus} "
"--object-path {object} --xml")
CALL = ("gdbus call --system --dest {bus} --object-path {object} "
"--method {method} {args}")
DBUS_METHOD_GETALL = 'org.freedesktop.DBus.Properties.GetAll'
class DBus:
"""DBus handler."""
def __init__(self, bus_name, object_path):
"""Initialize dbus object."""
self.bus_name = bus_name
self.object_path = object_path
self.methods = set()
@staticmethod
async def connect(bus_name, object_path):
"""Read object data."""
self = DBus(bus_name, object_path)
await self._init_proxy() # pylint: disable=protected-access
_LOGGER.info("Connect to dbus: %s - %s", bus_name, object_path)
return self
async def _init_proxy(self):
"""Read interface data."""
command = shlex.split(INTROSPECT.format(
bus=self.bus_name,
object=self.object_path
))
# Ask data
_LOGGER.info("Introspect %s on %s", self.bus_name, self.object_path)
data = await self._send(command)
# Parse XML
try:
xml = ET.fromstring(data)
except ET.ParseError as err:
_LOGGER.error("Can't parse introspect data: %s", err)
raise DBusParseError() from None
# Read available methods
_LOGGER.debug("data: %s", data)
for interface in xml.findall("./interface"):
interface_name = interface.get('name')
for method in interface.findall("./method"):
method_name = method.get('name')
self.methods.add(f"{interface_name}.{method_name}")
@staticmethod
def _gvariant(raw):
"""Parse GVariant input to python."""
raw = RE_GVARIANT_TYPE.sub("", raw)
raw = RE_GVARIANT_TULPE.sub(r"[\1]", raw)
raw = RE_GVARIANT_VARIANT.sub(r"\1", raw)
raw = RE_GVARIANT_STRING.sub(r'"\1"', raw)
# No data
if raw.startswith("()"):
return {}
try:
return json.loads(raw)
except json.JSONDecodeError as err:
_LOGGER.error("Can't parse '%s': %s", raw, err)
raise DBusParseError() from None
async def call_dbus(self, method, *args):
"""Call a dbus method."""
command = shlex.split(CALL.format(
bus=self.bus_name,
object=self.object_path,
method=method,
args=" ".join(map(str, args))
))
# Run command
_LOGGER.info("Call %s on %s", method, self.object_path)
data = await self._send(command)
# Parse and return data
return self._gvariant(data)
async def get_properties(self, interface):
"""Read all properties from interface."""
try:
return (await self.call_dbus(DBUS_METHOD_GETALL, interface))[0]
except IndexError:
_LOGGER.error("No attributes returned for %s", interface)
raise DBusFatalError from None
async def _send(self, command):
"""Send command over dbus."""
# Run command
_LOGGER.debug("Send dbus command: %s", command)
try:
proc = await asyncio.create_subprocess_exec(
*command,
stdin=asyncio.subprocess.DEVNULL,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
data, error = await proc.communicate()
except OSError as err:
_LOGGER.error("DBus fatal error: %s", err)
raise DBusFatalError() from None
# Success?
if proc.returncode != 0:
_LOGGER.error("DBus return error: %s", error)
raise DBusFatalError()
# End
return data.decode()
def __getattr__(self, name):
"""Mapping to dbus method."""
return getattr(DBusCallWrapper(self, self.bus_name), name)
class DBusCallWrapper:
"""Wrapper a DBus interface for a call."""
def __init__(self, dbus, interface):
"""Initialize wrapper."""
self.dbus = dbus
self.interface = interface
def __call__(self):
"""Should never be called."""
_LOGGER.error("DBus method %s not exists!", self.interface)
raise DBusFatalError()
def __getattr__(self, name):
"""Mapping to dbus method."""
interface = f"{self.interface}.{name}"
if interface not in self.dbus.methods:
return DBusCallWrapper(self.dbus, interface)
def _method_wrapper(*args):
"""Wrap method.
Return a coroutine
"""
return self.dbus.call_dbus(interface, *args)
return _method_wrapper

View File

@@ -21,7 +21,7 @@ def read_json_file(jsonfile):
return json.loads(cfile.read()) return json.loads(cfile.read())
class JsonConfig(object): class JsonConfig:
"""Hass core object for handle it.""" """Hass core object for handle it."""
def __init__(self, json_file, schema): def __init__(self, json_file, schema):

88
hassio/utils/tar.py Normal file
View File

@@ -0,0 +1,88 @@
"""Tarfile fileobject handler for encrypted files."""
import tarfile
import hashlib
from Crypto.Cipher import AES
from Crypto.Random import get_random_bytes
from Crypto.Util.Padding import pad
BLOCK_SIZE = 16
MOD_READ = 'r'
MOD_WRITE = 'w'
class SecureTarFile:
"""Handle encrypted files for tarfile library."""
def __init__(self, name, mode, key=None, gzip=True):
"""Initialize encryption handler."""
self._file = None
self._mode = mode
self._name = name
# Tarfile options
self._tar = None
self._tar_mode = f"{mode}|gz" if gzip else f"{mode}|"
# Encryption/Decription
self._aes = None
self._key = key
def __enter__(self):
"""Start context manager tarfile."""
if not self._key:
self._tar = tarfile.open(name=str(self._name), mode=self._tar_mode)
return self._tar
# Encrypted/Decryped Tarfile
self._file = self._name.open(f"{self._mode}b")
# Extract IV for CBC
if self._mode == MOD_READ:
cbc_rand = self._file.read(16)
else:
cbc_rand = get_random_bytes(16)
self._file.write(cbc_rand)
self._aes = AES.new(
self._key, AES.MODE_CBC, iv=_generate_iv(self._key, cbc_rand))
self._tar = tarfile.open(fileobj=self, mode=self._tar_mode)
return self._tar
def __exit__(self, exc_type, exc_value, traceback):
"""Close file."""
if self._tar:
self._tar.close()
if self._file:
self._file.close()
def write(self, data):
"""Write data."""
if len(data) % BLOCK_SIZE != 0:
data = pad(data, BLOCK_SIZE)
self._file.write(self._aes.encrypt(data))
def read(self, size=0):
"""Read data."""
return self._aes.decrypt(self._file.read(size))
@property
def path(self):
"""Return path object of tarfile."""
return self._name
@property
def size(self):
"""Return snapshot size."""
if not self._name.is_file():
return 0
return round(self._name.stat().st_size / 1048576, 2) # calc mbyte
def _generate_iv(key, salt):
"""Generate a iv from data."""
temp_iv = key + salt
for _ in range(100):
temp_iv = hashlib.sha256(temp_iv).digest()
return temp_iv[:16]

View File

@@ -1,24 +1,41 @@
"""Validate functions.""" """Validate functions."""
import uuid import uuid
import re
import voluptuous as vol import voluptuous as vol
import pytz import pytz
from .const import ( from .const import (
ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, ATTR_TIMEZONE, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_CHANNEL, ATTR_TIMEZONE,
ATTR_ADDONS_CUSTOM_LIST, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT, ATTR_ADDONS_CUSTOM_LIST, ATTR_PASSWORD, ATTR_HOMEASSISTANT, ATTR_HASSIO,
ATTR_PASSWORD, ATTR_HOMEASSISTANT, ATTR_HASSIO, ATTR_BOOT, ATTR_LAST_BOOT, ATTR_BOOT, ATTR_LAST_BOOT, ATTR_SSL, ATTR_PORT, ATTR_WATCHDOG,
ATTR_SSL, ATTR_PORT, ATTR_WATCHDOG, ATTR_WAIT_BOOT, ATTR_UUID, ATTR_WAIT_BOOT, ATTR_UUID, CHANNEL_STABLE, CHANNEL_BETA, CHANNEL_DEV)
ATTR_STARTUP_TIME)
RE_REPOSITORY = re.compile(r"^(?P<url>[^#]+)(?:#(?P<branch>[\w\-]+))?$")
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535)) NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
ALSA_CHANNEL = vol.Match(r"\d+,\d+")
WAIT_BOOT = vol.All(vol.Coerce(int), vol.Range(min=1, max=60)) WAIT_BOOT = vol.All(vol.Coerce(int), vol.Range(min=1, max=60))
DOCKER_IMAGE = vol.Match(r"^[\w{}]+/[\-\w{}]+$") DOCKER_IMAGE = vol.Match(r"^[\w{}]+/[\-\w{}]+$")
ALSA_DEVICE = vol.Any(None, vol.Match(r"\d+,\d+"))
CHANNELS = vol.In([CHANNEL_STABLE, CHANNEL_BETA, CHANNEL_DEV])
def validate_repository(repository):
"""Validate a valide repository."""
data = RE_REPOSITORY.match(repository)
if not data:
raise vol.Invalid("No valid repository format!")
# Validate URL
# pylint: disable=no-value-for-parameter
vol.Url()(data.group('url'))
return repository
# pylint: disable=no-value-for-parameter # pylint: disable=no-value-for-parameter
REPOSITORIES = vol.All([vol.Url()], vol.Unique()) REPOSITORIES = vol.All([validate_repository], vol.Unique())
def validate_timezone(timezone): def validate_timezone(timezone):
@@ -73,14 +90,13 @@ SCHEMA_HASS_CONFIG = vol.Schema({
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)), vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
vol.Optional(ATTR_SSL, default=False): vol.Boolean(), vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(), vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
vol.Optional(ATTR_STARTUP_TIME, default=600): vol.Optional(ATTR_WAIT_BOOT, default=600):
vol.All(vol.Coerce(int), vol.Range(min=60)), vol.All(vol.Coerce(int), vol.Range(min=60)),
}, extra=vol.REMOVE_EXTRA) }, extra=vol.REMOVE_EXTRA)
# pylint: disable=no-value-for-parameter
SCHEMA_UPDATER_CONFIG = vol.Schema({ SCHEMA_UPDATER_CONFIG = vol.Schema({
vol.Optional(ATTR_BETA_CHANNEL, default=False): vol.Boolean(), vol.Optional(ATTR_CHANNEL, default=CHANNEL_STABLE): CHANNELS,
vol.Optional(ATTR_HOMEASSISTANT): vol.Coerce(str), vol.Optional(ATTR_HOMEASSISTANT): vol.Coerce(str),
vol.Optional(ATTR_HASSIO): vol.Coerce(str), vol.Optional(ATTR_HASSIO): vol.Coerce(str),
}, extra=vol.REMOVE_EXTRA) }, extra=vol.REMOVE_EXTRA)
@@ -93,7 +109,5 @@ SCHEMA_HASSIO_CONFIG = vol.Schema({
vol.Optional(ATTR_ADDONS_CUSTOM_LIST, default=[ vol.Optional(ATTR_ADDONS_CUSTOM_LIST, default=[
"https://github.com/hassio-addons/repository", "https://github.com/hassio-addons/repository",
]): REPOSITORIES, ]): REPOSITORIES,
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
vol.Optional(ATTR_WAIT_BOOT, default=5): WAIT_BOOT, vol.Optional(ATTR_WAIT_BOOT, default=5): WAIT_BOOT,
}, extra=vol.REMOVE_EXTRA) }, extra=vol.REMOVE_EXTRA)

View File

@@ -40,13 +40,15 @@ setup(
], ],
include_package_data=True, include_package_data=True,
install_requires=[ install_requires=[
'async_timeout', 'attr==0.3.1',
'aiohttp', 'async_timeout==3.0.0',
'docker', 'aiohttp==3.2.1',
'colorlog', 'docker==3.3.0',
'voluptuous', 'colorlog==3.1.2',
'gitpython', 'voluptuous==0.11.1',
'pytz', 'gitpython==2.1.10',
'pyudev' 'pytz==2018.4',
'pyudev==0.21.0',
'pycryptodome==3.4.11'
] ]
) )

View File

@@ -1,8 +1,4 @@
{ {
"hassio": "0.86", "hassio": "105",
"homeassistant": "0.62.1", "homeassistant": "0.70.0"
"resinos": "1.1",
"resinhup": "0.3",
"generic": "0.3",
"cluster": "0.1"
} }