Compare commits

...

149 Commits
0.101 ... 105

Author SHA1 Message Date
Pascal Vizeli
9084ac119f Fix version conflict 2018-05-29 19:40:16 +02:00
Pascal Vizeli
41943ba61a Delete .gitattributes 2018-05-29 19:38:00 +02:00
Pascal Vizeli
33794669a1 Last version.json update 2018-05-29 19:36:01 +02:00
Pascal Vizeli
fe155a4ff0 Read version from AWS (#488)
* Read version from AWS

* Update const.py

* Update updater.py

* Update updater.py

* Update updater.py

* Update updater.py

* Update updater.py

* Update const.py

* Update updater.py
2018-05-29 19:14:09 +02:00
Pascal Vizeli
124e487ef7 Support new panel generation (#487)
* Support new panel generation

* fix lint
2018-05-29 17:53:09 +02:00
Pascal Vizeli
f361916a60 Update docker timeout to 900sec (#486) 2018-05-29 17:37:20 +02:00
Pascal Vizeli
20afa1544b Bump version to 105 2018-05-29 00:22:12 +02:00
Pascal Vizeli
c08d5af4db Fix version conflicts 2018-05-29 00:21:24 +02:00
Pascal Vizeli
dc341c8af8 Fix version conflicts 2018-05-29 00:18:08 +02:00
Pascal Vizeli
2507b52adb Update Home Assistant to 0.70.0 2018-05-28 23:59:12 +02:00
Pascal Vizeli
1302708135 Update Home Assistant to 0.70.0 2018-05-28 23:58:45 +02:00
Pascal Vizeli
1314812f92 Update Home Assistant to 0.70.0 2018-05-28 23:53:28 +02:00
Pascal Vizeli
f739e3ed11 Update Hass.io to 104 2018-05-28 23:30:48 +02:00
Pascal Vizeli
abb526fc0f Update Panel / fix icons (#483) 2018-05-28 23:29:34 +02:00
Pascal Vizeli
efb1a24b8f Expose panel update (#482)
* Update __init__.py

* Update setup.py

* Update security.py

* Update setup.py

* Update __init__.py

* Update setup.py

* Update __init__.py
2018-05-28 23:16:03 +02:00
Pascal Vizeli
bc0835963d Bump version to 104 2018-05-28 21:28:19 +02:00
Pascal Vizeli
316190dff8 Fix new panel build for 0.70.0 (#481)
Signed-off-by: Pascal Vizeli <pvizeli@syshack.ch>
2018-05-28 21:24:17 +02:00
Pascal Vizeli
029ead0c7c Home Assistant 0.70.0b7 2018-05-27 10:52:10 +02:00
Paulus Schoutsen
a85172f30b Update to b7 2018-05-26 22:03:24 -04:00
Pascal Vizeli
dfe2532813 0.70.0b5 2018-05-26 22:28:47 +02:00
Pascal Vizeli
cf3bb23629 Home Assistant 0.70.0b5 2018-05-26 22:28:31 +02:00
Pascal Vizeli
2132042aca Update Home Assistant to version 0.70.0b3 2018-05-25 19:27:49 +02:00
Pascal Vizeli
19e448fc54 Update Home Assistant to version 0.70.0b3 2018-05-25 19:27:33 +02:00
c727
a4e0fb8e99 Update HA beta to 0.70.0b2 2018-05-22 15:03:18 +02:00
Paulus Schoutsen
5b72e2887e Update Hass.io to 0.70.0b2 2018-05-21 21:14:41 -04:00
Pascal Vizeli
d2b6ec1b7e Update Home Assistant to version 0.70.0b1 2018-05-21 15:38:04 +02:00
Paulus Schoutsen
4b541a23c4 Update Hass.io to 0.70.0b1 2018-05-21 09:27:11 -04:00
Pascal Vizeli
99869449ae Update Home Assistant to 0.70.0b0 2018-05-19 10:21:23 +02:00
Pascal Vizeli
eab73f3895 Update Home Assistant to 0.70.0b0 2018-05-19 10:20:55 +02:00
Pascal Vizeli
9e96615ffa Update Home Assistant to version 0.69.1 2018-05-13 10:20:56 +02:00
Pascal Vizeli
350010feb5 Update Home Assistant to version 0.69.1 2018-05-13 10:20:38 +02:00
Pascal Vizeli
7395e4620b Update Home Assistant to version 0.69.1 2018-05-13 10:20:18 +02:00
Pascal Vizeli
7d91ae4513 Update Home Assistant to 0.69.0 2018-05-11 22:32:38 +02:00
Pascal Vizeli
343f759983 Update Home Assistant to 0.69.0 2018-05-11 22:32:01 +02:00
Pascal Vizeli
24ee3f8cc0 Update Home Assistant to 0.69.0 2018-05-11 22:31:41 +02:00
Pascal Vizeli
c143eadb62 Update Home-Assistant 2018-05-09 20:31:22 +02:00
Pascal Vizeli
e7df38f4d1 Merge pull request #467 from home-assistant/rc
Hass.io 103
2018-05-09 15:47:18 +02:00
Pascal Vizeli
3e42318ac8 Merge branch 'master' into rc 2018-05-09 15:18:53 +02:00
Pascal Vizeli
c6e5d2932e Update Hass.io 2018-05-09 13:15:54 +02:00
Pascal Vizeli
1aaf21a350 Update Hass.io and Home Assistant 2018-05-09 13:15:35 +02:00
Pascal Vizeli
f185eece8a Update Hass.io and Home Assistant 2018-05-09 13:15:17 +02:00
Pascal Vizeli
9d951280ef Update const.py 2018-05-09 11:07:07 +02:00
Pascal Vizeli
3f598bafc0 Bugfix panel loading (#464) 2018-05-09 11:06:19 +02:00
Franck Nijhof
cddd859f56 🔈 Improves of audio devices handling (#463) 2018-05-08 14:27:17 +02:00
Pascal Vizeli
e7adf50ec1 Update Home Assistant 0.69.0b2 2018-05-07 23:54:11 +02:00
Pascal Vizeli
ac437f809a Update Home Assistant 0.69.0b2 2018-05-07 23:53:54 +02:00
Pascal Vizeli
f13dee9b9d Update Hass.io 2018-05-06 09:53:56 +02:00
Pascal Vizeli
00855c0909 Update Home Assistant and Hass.io 2018-05-06 09:53:28 +02:00
Pascal Vizeli
1fafed5a07 Update Home Assistant and Hass.io 2018-05-06 09:52:55 +02:00
Pascal Vizeli
7adb81b350 Update const.py 2018-05-06 09:45:46 +02:00
Pascal Vizeli
4647035b00 Bugfix Websession 2018-05-06 09:44:58 +02:00
Pascal Vizeli
8ad7344e02 Update Hass.io to version 103.1 2018-05-05 23:29:58 +02:00
Pascal Vizeli
f1c46b3385 Update Hass.io to version 103.1 2018-05-05 23:29:41 +02:00
Pascal Vizeli
7f84073b12 Update Hass.io to version 103.1 2018-05-05 23:29:24 +02:00
Pascal Vizeli
e383a11bb7 Pump version to fix 2018-05-05 23:19:56 +02:00
Pascal Vizeli
cc113e2251 Update Hass.io to version 103 2018-05-05 19:05:30 +02:00
Pascal Vizeli
c5a3830c7d Merge remote-tracking branch 'origin/dev' into rc 2018-05-04 21:53:40 +02:00
Pascal Vizeli
a2abadc970 Update hass.io to version 103 2018-05-04 21:39:12 +02:00
Pascal Vizeli
db444b89d3 Update gdbus.py (#460)
* Update gdbus.py

* Update gdbus.py
2018-05-04 20:58:23 +02:00
Pascal Vizeli
77881e8a58 Update Panel (#459)
* Update Panel

* Update core.py
2018-05-04 20:04:34 +02:00
Pascal Vizeli
0b15f88da3 Bugfixes (#457)
* Update gdbus.py

* Update gdbus.py

* Update gdbus.py

* Update gdbus.py

* Update gdbus.py

* Update gdbus.py

* Update gdbus.py

* Update gdbus.py

* Update gdbus.py

* Update gdbus.py
2018-05-03 23:22:48 +02:00
Pascal Vizeli
7c6bf96f6f shield host functions 2018-05-03 01:00:13 +02:00
Pascal Vizeli
dc77e2d8d9 Update gdbus.py 2018-05-03 00:52:57 +02:00
Pascal Vizeli
68824fab4f fix bug 2018-05-02 23:31:23 +02:00
Pascal Vizeli
d6b3a36714 Update Dockerfile 2018-05-02 22:52:08 +02:00
Pascal Vizeli
8ab1f703c7 Update Home Assistant to version 0.68.1 2018-05-01 07:24:34 +02:00
Pascal Vizeli
95a4e292aa Update Home Assistant to version 0.68.1 2018-05-01 07:24:03 +02:00
Pascal Vizeli
3b9252558f Update Home Assistant to version 0.68.1 2018-05-01 07:23:46 +02:00
Pascal Vizeli
4a324dccc6 Pump version to 103 2018-04-30 14:49:34 +02:00
Pascal Vizeli
8fffb0f8b5 Fix version 2018-04-30 14:46:59 +02:00
Pascal Vizeli
87adfce211 Update Hass.io to version 0.102 2018-04-30 14:45:31 +02:00
Tod Schmidt
297813f6e6 fix for asound.tmpl (#454) 2018-04-30 14:09:20 +02:00
Pascal Vizeli
362315852a Pump version to 0.102 2018-04-30 14:08:18 +02:00
Tod Schmidt
d221f36cf8 fix for asound.tmpl (#454) 2018-04-30 07:39:44 +02:00
Pascal Vizeli
9e18589b6b Update info.py 2018-04-28 10:51:25 +02:00
Pascal Vizeli
c4d09210e1 Update Home-Assistant to version 0.68.0 2018-04-28 10:16:57 +02:00
Pascal Vizeli
43797c5eb5 Update Home-Assistant to version 0.68.0 2018-04-28 10:16:28 +02:00
Pascal Vizeli
fe38fe94dc Update Home-Assistant to version 0.68.0 2018-04-28 10:16:06 +02:00
Pascal Vizeli
f185291eca Update control.py 2018-04-27 22:33:41 +02:00
Pascal Vizeli
7541ae6476 Update hostname.py 2018-04-27 22:31:37 +02:00
Pascal Vizeli
d94715be2b Merge pull request #451 from home-assistant/cleanups
Cleanups & restructs
2018-04-26 21:47:06 +02:00
Pascal Vizeli
99cc5972c8 Update __init__.py 2018-04-26 21:44:06 +02:00
Pascal Vizeli
3d101a24a1 Update API.md 2018-04-26 21:42:45 +02:00
Pascal Vizeli
2ed3ddf05b fix lint 2018-04-26 21:23:43 +02:00
Pascal Vizeli
10b3658bd7 Revert last changes 2018-04-26 20:51:29 +02:00
Pascal Vizeli
9f5903089e Cleanup API 2018-04-26 19:49:12 +02:00
Pascal Vizeli
0593885ed4 revert error 2018-04-26 19:25:10 +02:00
Pascal Vizeli
3efbe11d49 Cleanup 2018-04-26 19:23:52 +02:00
Pascal Vizeli
1c2e0e5749 Update host.py 2018-04-26 11:36:51 +02:00
Pascal Vizeli
f64da6a547 Update exceptions.py 2018-04-26 11:33:43 +02:00
Pascal Vizeli
94fba7e175 Update info.py 2018-04-26 11:32:15 +02:00
Pascal Vizeli
a59245e6bb Update __init__.py 2018-04-26 11:29:12 +02:00
Pascal Vizeli
217c1acc62 Update and rename power.py to control.py 2018-04-26 11:27:02 +02:00
Pascal Vizeli
2c0a68bd8f Update and rename local.py to info.py 2018-04-26 09:44:49 +02:00
Pascal Vizeli
e37ffd6107 Merge pull request #450 from home-assistant/dbus
Initial Dbus support
2018-04-26 00:17:45 +02:00
Pascal Vizeli
3bde598fa7 fix host 2018-04-25 23:49:45 +02:00
Pascal Vizeli
53f42ff934 fix attr 2018-04-25 23:36:44 +02:00
Pascal Vizeli
9041eb9e9a Fix attributes 2018-04-25 23:24:55 +02:00
Pascal Vizeli
70ac395232 fix bugs 2018-04-25 22:47:17 +02:00
Pascal Vizeli
82f68b4a7b fix dbus 2018-04-25 22:27:57 +02:00
Pascal Vizeli
2b2f3214e9 fix selecter 2018-04-25 22:12:27 +02:00
Pascal Vizeli
1c0d63a02e fix sys 2018-04-25 21:49:28 +02:00
Pascal Vizeli
de77215630 Update Home-Assistant to version 0.68.0b1 2018-04-25 21:43:06 +02:00
Pascal Vizeli
f300b843c1 Update Home-Assistant to version 0.68.0b1 2018-04-25 21:42:35 +02:00
Pascal Vizeli
0bb81136bb Add hostname function 2018-04-24 23:38:40 +02:00
Pascal Vizeli
2a81ced817 Update gdbus.py 2018-04-24 15:52:18 +02:00
Pascal Vizeli
7363951a9a Update gdbus.py 2018-04-24 15:40:14 +02:00
Pascal Vizeli
6f770b78af add interface dbus class 2018-04-23 23:30:21 +02:00
Pascal Vizeli
10219a348f fix lint 2018-04-23 21:56:54 +02:00
Pascal Vizeli
23d1013cfa Follow the correct shutdown flow 2018-04-23 21:45:06 +02:00
Pascal Vizeli
05980d4147 some cleanup more 2018-04-23 21:22:29 +02:00
Pascal Vizeli
e5e25c895f Fix error handling 2018-04-23 21:10:48 +02:00
Pascal Vizeli
b486883ff6 Cleanups 2018-04-23 15:32:23 +02:00
Pascal Vizeli
42dd4d9557 Update coresys.py 2018-04-23 09:05:52 +02:00
Pascal Vizeli
7dff9e09a7 Update bootstrap.py 2018-04-23 09:05:08 +02:00
Pascal Vizeli
c315b026a3 Update __init__.py 2018-04-23 08:58:06 +02:00
Pascal Vizeli
a4ba4c80e8 Update __init__.py 2018-04-23 08:57:39 +02:00
Pascal Vizeli
ccd48b63a2 Create __init__.py 2018-04-23 08:51:47 +02:00
Pascal Vizeli
6d5f70ced6 Rename hassio/misc/dbus/rauc.py to hassio/dbus/rauc.py 2018-04-23 08:51:15 +02:00
Pascal Vizeli
ccffb4b786 Rename hassio/misc/rauc.py to hassio/misc/dbus/rauc.py 2018-04-23 08:50:51 +02:00
Pascal Vizeli
68dbbe212c Rename hassio/misc/networkmanager.py to hassio/dbus/networkmanager.py 2018-04-23 08:50:18 +02:00
Pascal Vizeli
5df869e08a Rename hassio/misc/systemd.py to hassio/dbus/systemd.py 2018-04-23 08:49:56 +02:00
Pascal Vizeli
63b9e023b4 add hostmanager 2018-04-22 17:59:41 +02:00
Pascal Vizeli
8f357739ec code cleanups 2018-04-22 17:44:03 +02:00
Pascal Vizeli
808fc0f8b6 Log internal exceptions on API level 2018-04-22 10:16:24 +02:00
Pascal Vizeli
1a6f6085e6 Add API support for new handling 2018-04-22 10:15:07 +02:00
Pascal Vizeli
0de3e9a233 update handling 2018-04-22 09:59:43 +02:00
Pascal Vizeli
f1237f124f small down the footprint 2018-04-22 09:35:36 +02:00
Pascal Vizeli
69142b6fb0 Add systemd dbus 2018-04-21 23:56:36 +02:00
Pascal Vizeli
28f295a1e2 Cleanup 2018-04-21 22:33:06 +02:00
Pascal Vizeli
55c2127baa Cleanup Loop handling 2018-04-21 16:30:31 +02:00
Pascal Vizeli
265c36b345 Claim exceptions 2018-04-21 15:39:08 +02:00
Pascal Vizeli
9f081fe32f Update Home-Assistant to version 0.68.0b0 2018-04-21 00:04:15 +02:00
Pascal Vizeli
e4fb6ad727 Update Home-Assistant to version 0.68.0b0 2018-04-21 00:03:52 +02:00
Pascal Vizeli
1040a1624a fix lint 2018-04-20 23:40:58 +02:00
Pascal Vizeli
a2ee2852a0 Update gdbus.py 2018-04-20 16:10:59 +02:00
Pascal Vizeli
b2e3b726d9 Update gdbus.py 2018-04-20 16:01:43 +02:00
Pascal Vizeli
0f4e557552 Update gdbus.py 2018-04-20 15:59:04 +02:00
Pascal Vizeli
2efa9f9483 Update gdbus.py 2018-04-20 15:48:36 +02:00
Pascal Vizeli
43e6ca8f4a Update gdbus.py 2018-04-20 10:56:47 +02:00
Pascal Vizeli
34d67a7bcd Update gdbus.py 2018-04-20 10:15:29 +02:00
Pascal Vizeli
5a6051f9a1 Update gdbus.py 2018-04-20 09:58:49 +02:00
Pascal Vizeli
157e48f946 Initial Dbus support 2018-04-19 23:27:20 +02:00
Pascal Vizeli
9469a258ff Update Home-Assistant to version 0.67.1 2018-04-18 12:30:15 +02:00
Pascal Vizeli
fd0aeb5341 Update Home-Assistant to version 0.67.1 2018-04-18 12:29:52 +02:00
Pascal Vizeli
4d4a4ce043 Update Home-Assistant to version 0.67.1 2018-04-18 12:29:38 +02:00
Pascal Vizeli
678f77cc05 Pump version to 0.102 2018-04-14 10:50:42 +02:00
Pascal Vizeli
fda7c1cf11 Update Home-Assistant to version 0.67.0 2018-04-14 10:04:19 +02:00
Pascal Vizeli
364e5ec0b8 Update Home-Assistant to version 0.67.0 2018-04-14 10:03:51 +02:00
71 changed files with 1165 additions and 869 deletions

2
.gitattributes vendored
View File

@@ -1,2 +0,0 @@
# Ignore version on merge
version.json merge=ours

93
API.md
View File

@@ -217,25 +217,36 @@ return:
### Host
- POST `/host/reload`
- POST `/host/shutdown`
- POST `/host/reboot`
- GET `/host/info`
```json
{
"type": "",
"version": "",
"last_version": "",
"features": ["shutdown", "reboot", "update", "hostname", "network_info", "network_control"],
"hostname": "",
"os": "",
"audio": {
"input": "0,0",
"output": "0,0"
}
"hostname": "hostname|null",
"features": ["shutdown", "reboot", "update", "hostname"],
"operating_system": "Hass.io-OS XY|Ubuntu 16.4|null",
"kernel": "4.15.7|null",
"chassis": "specific|null",
"type": "Hass.io-OS Type|null",
"deployment": "stable|beta|dev|null",
"version": "xy|null",
"last_version": "xy|null",
}
```
- POST `/host/options`
```json
{
"hostname": "",
}
```
- POST `/host/update`
Optional:
@@ -284,24 +295,6 @@ Optional:
}
```
### Network
- GET `/network/info`
```json
{
"hostname": ""
}
```
- POST `/network/options`
```json
{
"hostname": "",
}
```
### Home Assistant
- GET `/homeassistant/info`
@@ -310,6 +303,7 @@ Optional:
{
"version": "INSTALL_VERSION",
"last_version": "LAST_VERSION",
"machine": "Image machine type",
"image": "str",
"custom": "bool -> if custom image",
"boot": "bool",
@@ -613,46 +607,3 @@ This service perform a auto discovery to Home-Assistant.
```
- DEL `/services/mqtt`
## Host Control
Communicate over UNIX socket with a host daemon.
- commands
```
# info
-> {'type', 'version', 'last_version', 'features', 'hostname'}
# reboot
# shutdown
# host-update [v]
# hostname xy
# network info
-> {}
# network wlan ssd xy
# network wlan password xy
# network int ip xy
# network int netmask xy
# network int route xy
```
Features:
- shutdown
- reboot
- update
- hostname
- network_info
- network_control
Answer:
```
{}|OK|ERROR|WRONG
```
- {}: json
- OK: call was successfully
- ERROR: error on call
- WRONG: not supported

View File

@@ -9,7 +9,9 @@ RUN apk add --no-cache \
python3 \
git \
socat \
glib \
libstdc++ \
eudev-libs \
&& apk add --no-cache --virtual .build-dependencies \
make \
python3-dev \

View File

@@ -5,7 +5,6 @@ import logging
import sys
import hassio.bootstrap as bootstrap
import hassio.core as core
_LOGGER = logging.getLogger(__name__)
@@ -34,14 +33,13 @@ if __name__ == "__main__":
_LOGGER.info("Initialize Hassio setup")
coresys = bootstrap.initialize_coresys(loop)
hassio = core.HassIO(coresys)
bootstrap.migrate_system_env(coresys)
_LOGGER.info("Setup HassIO")
loop.run_until_complete(hassio.setup())
loop.run_until_complete(coresys.core.setup())
loop.call_soon_threadsafe(loop.create_task, hassio.start())
loop.call_soon_threadsafe(loop.create_task, coresys.core.start())
loop.call_soon_threadsafe(bootstrap.reg_signal, loop)
try:
@@ -49,7 +47,7 @@ if __name__ == "__main__":
loop.run_forever()
finally:
_LOGGER.info("Stopping HassIO")
loop.run_until_complete(hassio.stop())
loop.run_until_complete(coresys.core.stop())
executor.shutdown(wait=False)
loop.close()

View File

@@ -5,7 +5,7 @@ import logging
from .addon import Addon
from .repository import Repository
from .data import AddonsData
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO, STATE_STARTED
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
@@ -56,7 +56,7 @@ class AddonManager(CoreSysAttributes):
# init hassio built-in repositories
repositories = \
set(self._config.addons_repositories) | BUILTIN_REPOSITORIES
set(self.sys_config.addons_repositories) | BUILTIN_REPOSITORIES
# init custom repositories & load addons
await self.load_repositories(repositories)
@@ -66,7 +66,7 @@ class AddonManager(CoreSysAttributes):
tasks = [repository.update() for repository in
self.repositories_obj.values()]
if tasks:
await asyncio.wait(tasks, loop=self._loop)
await asyncio.wait(tasks)
# read data from repositories
self.data.reload()
@@ -90,16 +90,16 @@ class AddonManager(CoreSysAttributes):
# don't add built-in repository to config
if url not in BUILTIN_REPOSITORIES:
self._config.add_addon_repository(url)
self.sys_config.add_addon_repository(url)
tasks = [_add_repository(url) for url in new_rep - old_rep]
if tasks:
await asyncio.wait(tasks, loop=self._loop)
await asyncio.wait(tasks)
# del new repository
for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
self.repositories_obj.pop(url).remove()
self._config.drop_addon_repository(url)
self.sys_config.drop_addon_repository(url)
# update data
self.data.reload()
@@ -125,13 +125,13 @@ class AddonManager(CoreSysAttributes):
self.addons_obj[addon_slug] = addon
if tasks:
await asyncio.wait(tasks, loop=self._loop)
await asyncio.wait(tasks)
# remove
for addon_slug in del_addons:
self.addons_obj.pop(addon_slug)
async def auto_boot(self, stage):
async def boot(self, stage):
"""Boot addons with mode auto."""
tasks = []
for addon in self.addons_obj.values():
@@ -141,5 +141,18 @@ class AddonManager(CoreSysAttributes):
_LOGGER.info("Startup %s run %d addons", stage, len(tasks))
if tasks:
await asyncio.wait(tasks, loop=self._loop)
await asyncio.sleep(self._config.wait_boot, loop=self._loop)
await asyncio.wait(tasks)
await asyncio.sleep(self.sys_config.wait_boot)
async def shutdown(self, stage):
"""Shutdown addons."""
tasks = []
for addon in self.addons_obj.values():
if addon.is_installed and \
await addon.state() == STATE_STARTED and \
addon.startup == stage:
tasks.append(addon.stop())
_LOGGER.info("Shutdown %s stop %d addons", stage, len(tasks))
if tasks:
await asyncio.wait(tasks)

View File

@@ -66,7 +66,7 @@ class Addon(CoreSysAttributes):
@property
def _data(self):
"""Return addons data storage."""
return self._addons.data
return self.sys_addons.data
@property
def is_installed(self):
@@ -376,7 +376,7 @@ class Addon(CoreSysAttributes):
if self.is_installed and \
ATTR_AUDIO_OUTPUT in self._data.user[self._id]:
return self._data.user[self._id][ATTR_AUDIO_OUTPUT]
return self._alsa.default.output
return self.sys_host.alsa.default.output
@audio_output.setter
def audio_output(self, value):
@@ -394,7 +394,7 @@ class Addon(CoreSysAttributes):
if self.is_installed and ATTR_AUDIO_INPUT in self._data.user[self._id]:
return self._data.user[self._id][ATTR_AUDIO_INPUT]
return self._alsa.default.input
return self.sys_host.alsa.default.input
@audio_input.setter
def audio_input(self, value):
@@ -436,11 +436,11 @@ class Addon(CoreSysAttributes):
# Repository with dockerhub images
if ATTR_IMAGE in addon_data:
return addon_data[ATTR_IMAGE].format(arch=self._arch)
return addon_data[ATTR_IMAGE].format(arch=self.sys_arch)
# local build
return "{}/{}-addon-{}".format(
addon_data[ATTR_REPOSITORY], self._arch,
addon_data[ATTR_REPOSITORY], self.sys_arch,
addon_data[ATTR_SLUG])
@property
@@ -461,12 +461,12 @@ class Addon(CoreSysAttributes):
@property
def path_data(self):
"""Return addon data path inside supervisor."""
return Path(self._config.path_addons_data, self._id)
return Path(self.sys_config.path_addons_data, self._id)
@property
def path_extern_data(self):
"""Return addon data path external for docker."""
return PurePath(self._config.path_extern_addons_data, self._id)
return PurePath(self.sys_config.path_extern_addons_data, self._id)
@property
def path_options(self):
@@ -506,16 +506,16 @@ class Addon(CoreSysAttributes):
@property
def path_asound(self):
"""Return path to asound config."""
return Path(self._config.path_tmp, f"{self.slug}_asound")
return Path(self.sys_config.path_tmp, f"{self.slug}_asound")
@property
def path_extern_asound(self):
"""Return path to asound config for docker."""
return Path(self._config.path_extern_tmp, f"{self.slug}_asound")
return Path(self.sys_config.path_extern_tmp, f"{self.slug}_asound")
def save_data(self):
"""Save data of addon."""
self._addons.data.save_data()
self.sys_addons.data.save_data()
def write_options(self):
"""Return True if addon options is written to data."""
@@ -537,7 +537,7 @@ class Addon(CoreSysAttributes):
def write_asound(self):
"""Write asound config to file and return True on success."""
asound_config = self._alsa.asound(
asound_config = self.sys_host.alsa.asound(
alsa_input=self.audio_input, alsa_output=self.audio_output)
try:
@@ -590,9 +590,9 @@ class Addon(CoreSysAttributes):
async def install(self):
"""Install a addon."""
if self._arch not in self.supported_arch:
if self.sys_arch not in self.supported_arch:
_LOGGER.error(
"Addon %s not supported on %s", self._id, self._arch)
"Addon %s not supported on %s", self._id, self.sys_arch)
return False
if self.is_installed:
@@ -735,7 +735,7 @@ class Addon(CoreSysAttributes):
@check_installed
async def snapshot(self, tar_file):
"""Snapshot a state of a addon."""
with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp:
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp:
# store local image
if self.need_build and not await \
self.instance.export_image(Path(temp, "image.tar")):
@@ -764,7 +764,7 @@ class Addon(CoreSysAttributes):
try:
_LOGGER.info("Build snapshot for addon %s", self._id)
await self._loop.run_in_executor(None, _write_tarfile)
await self.sys_run_in_executor(_write_tarfile)
except (tarfile.TarError, OSError) as err:
_LOGGER.error("Can't write tarfile %s: %s", tar_file, err)
return False
@@ -774,7 +774,7 @@ class Addon(CoreSysAttributes):
async def restore(self, tar_file):
"""Restore a state of a addon."""
with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp:
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp:
# extract snapshot
def _extract_tarfile():
"""Extract tar snapshot."""
@@ -782,7 +782,7 @@ class Addon(CoreSysAttributes):
snapshot.extractall(path=Path(temp))
try:
await self._loop.run_in_executor(None, _extract_tarfile)
await self.sys_run_in_executor(_extract_tarfile)
except tarfile.TarError as err:
_LOGGER.error("Can't read tarfile %s: %s", tar_file, err)
return False
@@ -828,7 +828,7 @@ class Addon(CoreSysAttributes):
try:
_LOGGER.info("Restore data for addon %s", self._id)
await self._loop.run_in_executor(None, _restore_data)
await self.sys_run_in_executor(_restore_data)
except shutil.Error as err:
_LOGGER.error("Can't restore origin data: %s", err)
return False

View File

@@ -25,13 +25,13 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
@property
def addon(self):
"""Return addon of build data."""
return self._addons.get(self._id)
return self.sys_addons.get(self._id)
@property
def base_image(self):
"""Base images for this addon."""
return self._data[ATTR_BUILD_FROM].get(
self._arch, BASE_IMAGE[self._arch])
self.sys_arch, BASE_IMAGE[self.sys_arch])
@property
def squash(self):
@@ -53,7 +53,7 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
'squash': self.squash,
'labels': {
'io.hass.version': version,
'io.hass.arch': self._arch,
'io.hass.arch': self.sys_arch,
'io.hass.type': META_ADDON,
'io.hass.name': self._fix_label('name'),
'io.hass.description': self._fix_label('description'),
@@ -61,7 +61,7 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
'buildargs': {
'BUILD_FROM': self.base_image,
'BUILD_VERSION': version,
'BUILD_ARCH': self._arch,
'BUILD_ARCH': self.sys_arch,
**self.additional_args,
}
}

View File

@@ -56,17 +56,17 @@ class AddonsData(JsonConfig, CoreSysAttributes):
# read core repository
self._read_addons_folder(
self._config.path_addons_core, REPOSITORY_CORE)
self.sys_config.path_addons_core, REPOSITORY_CORE)
# read local repository
self._read_addons_folder(
self._config.path_addons_local, REPOSITORY_LOCAL)
self.sys_config.path_addons_local, REPOSITORY_LOCAL)
# add built-in repositories information
self._set_builtin_repositories()
# read custom git repositories
for repository_element in self._config.path_addons_git.iterdir():
for repository_element in self.sys_config.path_addons_git.iterdir():
if repository_element.is_dir():
self._read_git_repository(repository_element)

View File

@@ -45,7 +45,7 @@ class GitRepo(CoreSysAttributes):
async with self.lock:
try:
_LOGGER.info("Load addon %s repository", self.path)
self.repo = await self._loop.run_in_executor(
self.repo = await self.sys_loop.run_in_executor(
None, git.Repo, str(self.path))
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
@@ -68,7 +68,7 @@ class GitRepo(CoreSysAttributes):
try:
_LOGGER.info("Clone addon %s repository", self.url)
self.repo = await self._loop.run_in_executor(None, ft.partial(
self.repo = await self.sys_run_in_executor(ft.partial(
git.Repo.clone_from, self.url, str(self.path),
**git_args
))
@@ -89,7 +89,7 @@ class GitRepo(CoreSysAttributes):
async with self.lock:
try:
_LOGGER.info("Pull addon %s repository", self.url)
await self._loop.run_in_executor(
await self.sys_loop.run_in_executor(
None, self.repo.remotes.origin.pull)
except (git.InvalidGitRepositoryError, git.NoSuchPathError,

View File

@@ -30,7 +30,7 @@ class Repository(CoreSysAttributes):
@property
def _mesh(self):
"""Return data struct repository."""
return self._addons.data.repositories.get(self._id, {})
return self.sys_addons.data.repositories.get(self._id, {})
@property
def slug(self):

View File

@@ -9,7 +9,6 @@ from .discovery import APIDiscovery
from .homeassistant import APIHomeAssistant
from .hardware import APIHardware
from .host import APIHost
from .network import APINetwork
from .proxy import APIProxy
from .supervisor import APISupervisor
from .snapshots import APISnapshots
@@ -28,7 +27,7 @@ class RestAPI(CoreSysAttributes):
self.coresys = coresys
self.security = SecurityMiddleware(coresys)
self.webapp = web.Application(
middlewares=[self.security.token_validation], loop=self._loop)
middlewares=[self.security.token_validation], loop=coresys.loop)
# service stuff
self._handler = None
@@ -44,7 +43,6 @@ class RestAPI(CoreSysAttributes):
self._register_panel()
self._register_addons()
self._register_snapshots()
self._register_network()
self._register_discovery()
self._register_services()
@@ -61,16 +59,6 @@ class RestAPI(CoreSysAttributes):
web.post('/host/reload', api_host.reload),
])
def _register_network(self):
"""Register network function."""
api_net = APINetwork()
api_net.coresys = self.coresys
self.webapp.add_routes([
web.get('/network/info', api_net.info),
web.post('/network/options', api_net.options),
])
def _register_hardware(self):
"""Register hardware function."""
api_hardware = APIHardware()
@@ -197,10 +185,11 @@ class RestAPI(CoreSysAttributes):
def _register_panel(self):
"""Register panel for homeassistant."""
def create_response(build_type):
panel_dir = Path(__file__).parent.joinpath("panel")
def create_response(panel_file):
"""Create a function to generate a response."""
path = Path(__file__).parent.joinpath(
f"panel/{build_type}.html")
path = panel_dir.joinpath(f"{panel_file!s}.html")
return lambda request: web.FileResponse(path)
# This route is for backwards compatibility with HA < 0.58
@@ -213,18 +202,21 @@ class RestAPI(CoreSysAttributes):
web.get('/panel_latest', create_response('hassio-main-latest')),
])
# This route is for HA > 0.61
# This route is for backwards compatibility with HA 0.62 - 0.70
self.webapp.add_routes([
web.get('/app-es5/index.html', create_response('index')),
web.get('/app-es5/hassio-app.html', create_response('hassio-app')),
])
# This route is for HA > 0.70
self.webapp.add_routes([web.static('/app', panel_dir)])
async def start(self):
"""Run rest api webserver."""
self._handler = self.webapp.make_handler(loop=self._loop)
self._handler = self.webapp.make_handler()
try:
self.server = await self._loop.create_server(
self.server = await self.sys_loop.create_server(
self._handler, "0.0.0.0", "80")
except OSError as err:
_LOGGER.fatal(

View File

@@ -43,7 +43,7 @@ class APIAddons(CoreSysAttributes):
def _extract_addon(self, request, check_installed=True):
"""Return addon and if not exists trow a exception."""
addon = self._addons.get(request.match_info.get('addon'))
addon = self.sys_addons.get(request.match_info.get('addon'))
if not addon:
raise RuntimeError("Addon not exists")
@@ -64,7 +64,7 @@ class APIAddons(CoreSysAttributes):
async def list(self, request):
"""Return all addons / repositories ."""
data_addons = []
for addon in self._addons.list_addons:
for addon in self.sys_addons.list_addons:
data_addons.append({
ATTR_NAME: addon.name,
ATTR_SLUG: addon.slug,
@@ -81,7 +81,7 @@ class APIAddons(CoreSysAttributes):
})
data_repositories = []
for repository in self._addons.list_repositories:
for repository in self.sys_addons.list_repositories:
data_repositories.append({
ATTR_SLUG: repository.slug,
ATTR_NAME: repository.name,
@@ -98,7 +98,7 @@ class APIAddons(CoreSysAttributes):
@api_process
async def reload(self, request):
"""Reload all addons data."""
await asyncio.shield(self._addons.reload(), loop=self._loop)
await asyncio.shield(self.sys_addons.reload())
return True
@api_process
@@ -194,13 +194,13 @@ class APIAddons(CoreSysAttributes):
def install(self, request):
"""Install addon."""
addon = self._extract_addon(request, check_installed=False)
return asyncio.shield(addon.install(), loop=self._loop)
return asyncio.shield(addon.install())
@api_process
def uninstall(self, request):
"""Uninstall addon."""
addon = self._extract_addon(request)
return asyncio.shield(addon.uninstall(), loop=self._loop)
return asyncio.shield(addon.uninstall())
@api_process
def start(self, request):
@@ -214,13 +214,13 @@ class APIAddons(CoreSysAttributes):
except vol.Invalid as ex:
raise RuntimeError(humanize_error(options, ex)) from None
return asyncio.shield(addon.start(), loop=self._loop)
return asyncio.shield(addon.start())
@api_process
def stop(self, request):
"""Stop addon."""
addon = self._extract_addon(request)
return asyncio.shield(addon.stop(), loop=self._loop)
return asyncio.shield(addon.stop())
@api_process
def update(self, request):
@@ -230,13 +230,13 @@ class APIAddons(CoreSysAttributes):
if addon.last_version == addon.version_installed:
raise RuntimeError("No update available!")
return asyncio.shield(addon.update(), loop=self._loop)
return asyncio.shield(addon.update())
@api_process
def restart(self, request):
"""Restart addon."""
addon = self._extract_addon(request)
return asyncio.shield(addon.restart(), loop=self._loop)
return asyncio.shield(addon.restart())
@api_process
def rebuild(self, request):
@@ -245,7 +245,7 @@ class APIAddons(CoreSysAttributes):
if not addon.need_build:
raise RuntimeError("Only local build addons are supported")
return asyncio.shield(addon.rebuild(), loop=self._loop)
return asyncio.shield(addon.rebuild())
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request):
@@ -291,4 +291,4 @@ class APIAddons(CoreSysAttributes):
raise RuntimeError("STDIN not supported by addon")
data = await request.read()
return await asyncio.shield(addon.write_stdin(data), loop=self._loop)
return await asyncio.shield(addon.write_stdin(data))

View File

@@ -21,7 +21,7 @@ class APIDiscovery(CoreSysAttributes):
def _extract_message(self, request):
"""Extract discovery message from URL."""
message = self._services.discovery.get(request.match_info.get('uuid'))
message = self.sys_discovery.get(request.match_info.get('uuid'))
if not message:
raise RuntimeError("Discovery message not found")
return message
@@ -30,7 +30,7 @@ class APIDiscovery(CoreSysAttributes):
async def list(self, request):
"""Show register services."""
discovery = []
for message in self._services.discovery.list_messages:
for message in self.sys_discovery.list_messages:
discovery.append({
ATTR_PROVIDER: message.provider,
ATTR_UUID: message.uuid,
@@ -45,7 +45,7 @@ class APIDiscovery(CoreSysAttributes):
async def set_discovery(self, request):
"""Write data into a discovery pipeline."""
body = await api_validate(SCHEMA_DISCOVERY, request)
message = self._services.discovery.send(
message = self.sys_discovery.send(
provider=request[REQUEST_FROM], **body)
return {ATTR_UUID: message.uuid}
@@ -68,5 +68,5 @@ class APIDiscovery(CoreSysAttributes):
"""Delete data into a discovery message."""
message = self._extract_message(request)
self._services.discovery.remove(message)
self.sys_discovery.remove(message)
return True

View File

@@ -16,11 +16,11 @@ class APIHardware(CoreSysAttributes):
async def info(self, request):
"""Show hardware info."""
return {
ATTR_SERIAL: list(self._hardware.serial_devices),
ATTR_INPUT: list(self._hardware.input_devices),
ATTR_DISK: list(self._hardware.disk_devices),
ATTR_GPIO: list(self._hardware.gpio_devices),
ATTR_AUDIO: self._hardware.audio_devices,
ATTR_SERIAL: list(self.sys_hardware.serial_devices),
ATTR_INPUT: list(self.sys_hardware.input_devices),
ATTR_DISK: list(self.sys_hardware.disk_devices),
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
ATTR_AUDIO: self.sys_hardware.audio_devices,
}
@api_process
@@ -28,7 +28,7 @@ class APIHardware(CoreSysAttributes):
"""Show ALSA audio devices."""
return {
ATTR_AUDIO: {
ATTR_INPUT: self._alsa.input_devices,
ATTR_OUTPUT: self._alsa.output_devices,
ATTR_INPUT: self.sys_host.alsa.input_devices,
ATTR_OUTPUT: self.sys_host.alsa.output_devices,
}
}

View File

@@ -9,7 +9,8 @@ from ..const import (
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_IMAGE, ATTR_CUSTOM, ATTR_BOOT,
ATTR_PORT, ATTR_PASSWORD, ATTR_SSL, ATTR_WATCHDOG, ATTR_CPU_PERCENT,
ATTR_MEMORY_USAGE, ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX,
ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_WAIT_BOOT, CONTENT_TYPE_BINARY)
ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_WAIT_BOOT, ATTR_MACHINE,
CONTENT_TYPE_BINARY)
from ..coresys import CoreSysAttributes
from ..validate import NETWORK_PORT, DOCKER_IMAGE
@@ -43,15 +44,16 @@ class APIHomeAssistant(CoreSysAttributes):
async def info(self, request):
"""Return host information."""
return {
ATTR_VERSION: self._homeassistant.version,
ATTR_LAST_VERSION: self._homeassistant.last_version,
ATTR_IMAGE: self._homeassistant.image,
ATTR_CUSTOM: self._homeassistant.is_custom_image,
ATTR_BOOT: self._homeassistant.boot,
ATTR_PORT: self._homeassistant.api_port,
ATTR_SSL: self._homeassistant.api_ssl,
ATTR_WATCHDOG: self._homeassistant.watchdog,
ATTR_WAIT_BOOT: self._homeassistant.wait_boot,
ATTR_VERSION: self.sys_homeassistant.version,
ATTR_LAST_VERSION: self.sys_homeassistant.last_version,
ATTR_MACHINE: self.sys_homeassistant.machine,
ATTR_IMAGE: self.sys_homeassistant.image,
ATTR_CUSTOM: self.sys_homeassistant.is_custom_image,
ATTR_BOOT: self.sys_homeassistant.boot,
ATTR_PORT: self.sys_homeassistant.api_port,
ATTR_SSL: self.sys_homeassistant.api_ssl,
ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
ATTR_WAIT_BOOT: self.sys_homeassistant.wait_boot,
}
@api_process
@@ -60,34 +62,34 @@ class APIHomeAssistant(CoreSysAttributes):
body = await api_validate(SCHEMA_OPTIONS, request)
if ATTR_IMAGE in body and ATTR_LAST_VERSION in body:
self._homeassistant.image = body[ATTR_IMAGE]
self._homeassistant.last_version = body[ATTR_LAST_VERSION]
self.sys_homeassistant.image = body[ATTR_IMAGE]
self.sys_homeassistant.last_version = body[ATTR_LAST_VERSION]
if ATTR_BOOT in body:
self._homeassistant.boot = body[ATTR_BOOT]
self.sys_homeassistant.boot = body[ATTR_BOOT]
if ATTR_PORT in body:
self._homeassistant.api_port = body[ATTR_PORT]
self.sys_homeassistant.api_port = body[ATTR_PORT]
if ATTR_PASSWORD in body:
self._homeassistant.api_password = body[ATTR_PASSWORD]
self.sys_homeassistant.api_password = body[ATTR_PASSWORD]
if ATTR_SSL in body:
self._homeassistant.api_ssl = body[ATTR_SSL]
self.sys_homeassistant.api_ssl = body[ATTR_SSL]
if ATTR_WATCHDOG in body:
self._homeassistant.watchdog = body[ATTR_WATCHDOG]
self.sys_homeassistant.watchdog = body[ATTR_WATCHDOG]
if ATTR_WAIT_BOOT in body:
self._homeassistant.wait_boot = body[ATTR_WAIT_BOOT]
self.sys_homeassistant.wait_boot = body[ATTR_WAIT_BOOT]
self._homeassistant.save_data()
self.sys_homeassistant.save_data()
return True
@api_process
async def stats(self, request):
"""Return resource information."""
stats = await self._homeassistant.stats()
stats = await self.sys_homeassistant.stats()
if not stats:
raise RuntimeError("No stats available")
@@ -105,38 +107,38 @@ class APIHomeAssistant(CoreSysAttributes):
async def update(self, request):
"""Update homeassistant."""
body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self._homeassistant.last_version)
version = body.get(ATTR_VERSION, self.sys_homeassistant.last_version)
if version == self._homeassistant.version:
if version == self.sys_homeassistant.version:
raise RuntimeError("Version {} is already in use".format(version))
return await asyncio.shield(
self._homeassistant.update(version), loop=self._loop)
self.sys_homeassistant.update(version))
@api_process
def stop(self, request):
"""Stop homeassistant."""
return asyncio.shield(self._homeassistant.stop(), loop=self._loop)
return asyncio.shield(self.sys_homeassistant.stop())
@api_process
def start(self, request):
"""Start homeassistant."""
return asyncio.shield(self._homeassistant.start(), loop=self._loop)
return asyncio.shield(self.sys_homeassistant.start())
@api_process
def restart(self, request):
"""Restart homeassistant."""
return asyncio.shield(self._homeassistant.restart(), loop=self._loop)
return asyncio.shield(self.sys_homeassistant.restart())
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request):
"""Return homeassistant docker logs."""
return self._homeassistant.logs()
return self.sys_homeassistant.logs()
@api_process
async def check(self, request):
"""Check config of homeassistant."""
result = await self._homeassistant.check_config()
result = await self.sys_homeassistant.check_config()
if not result.valid:
raise RuntimeError(result.log)

View File

@@ -4,10 +4,10 @@ import logging
import voluptuous as vol
from .utils import api_process_hostcontrol, api_process, api_validate
from .utils import api_process, api_validate
from ..const import (
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_TYPE, ATTR_HOSTNAME, ATTR_FEATURES,
ATTR_OS)
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_HOSTNAME, ATTR_FEATURES, ATTR_KERNEL,
ATTR_TYPE, ATTR_OPERATING_SYSTEM, ATTR_CHASSIS, ATTR_DEPLOYMENT)
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
@@ -16,6 +16,10 @@ SCHEMA_VERSION = vol.Schema({
vol.Optional(ATTR_VERSION): vol.Coerce(str),
})
SCHEMA_OPTIONS = vol.Schema({
vol.Optional(ATTR_HOSTNAME): vol.Coerce(str),
})
class APIHost(CoreSysAttributes):
"""Handle rest api for host functions."""
@@ -24,38 +28,45 @@ class APIHost(CoreSysAttributes):
async def info(self, request):
"""Return host information."""
return {
ATTR_TYPE: self._host_control.type,
ATTR_VERSION: self._host_control.version,
ATTR_LAST_VERSION: self._host_control.last_version,
ATTR_FEATURES: self._host_control.features,
ATTR_HOSTNAME: self._host_control.hostname,
ATTR_OS: self._host_control.os_info,
ATTR_CHASSIS: self.sys_host.info.chassis,
ATTR_VERSION: None,
ATTR_LAST_VERSION: None,
ATTR_TYPE: None,
ATTR_FEATURES: self.sys_host.supperted_features,
ATTR_HOSTNAME: self.sys_host.info.hostname,
ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system,
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
ATTR_KERNEL: self.sys_host.info.kernel,
}
@api_process_hostcontrol
@api_process
async def options(self, request):
"""Edit host settings."""
body = await api_validate(SCHEMA_OPTIONS, request)
# hostname
if ATTR_HOSTNAME in body:
await asyncio.shield(
self.sys_host.control.set_hostname(body[ATTR_HOSTNAME]))
@api_process
def reboot(self, request):
"""Reboot host."""
return self._host_control.reboot()
return asyncio.shield(self.sys_host.control.reboot())
@api_process_hostcontrol
@api_process
def shutdown(self, request):
"""Poweroff host."""
return self._host_control.shutdown()
return asyncio.shield(self.sys_host.control.shutdown())
@api_process_hostcontrol
async def reload(self, request):
@api_process
def reload(self, request):
"""Reload host data."""
await self._host_control.load()
return True
return asyncio.shield(self.sys_host.reload())
@api_process_hostcontrol
@api_process
async def update(self, request):
"""Update host OS."""
body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self._host_control.last_version)
if version == self._host_control.version:
raise RuntimeError(f"Version {version} is already in use")
return await asyncio.shield(
self._host_control.update(version=version), loop=self._loop)
pass
# body = await api_validate(SCHEMA_VERSION, request)
# version = body.get(ATTR_VERSION, self.sys_host.last_version)

View File

@@ -1,38 +0,0 @@
"""Init file for HassIO network rest api."""
import logging
import voluptuous as vol
from .utils import api_process, api_process_hostcontrol, api_validate
from ..const import ATTR_HOSTNAME
from ..coresys import CoreSysAttributes
_LOGGER = logging.getLogger(__name__)
SCHEMA_OPTIONS = vol.Schema({
vol.Optional(ATTR_HOSTNAME): vol.Coerce(str),
})
class APINetwork(CoreSysAttributes):
"""Handle rest api for network functions."""
@api_process
async def info(self, request):
"""Show network settings."""
return {
ATTR_HOSTNAME: self._host_control.hostname,
}
@api_process_hostcontrol
async def options(self, request):
"""Edit network settings."""
body = await api_validate(SCHEMA_OPTIONS, request)
# hostname
if ATTR_HOSTNAME in body:
if self._host_control.hostname != body[ATTR_HOSTNAME]:
await self._host_control.set_hostname(body[ATTR_HOSTNAME])
return True

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@@ -11,27 +11,28 @@
padding: 0;
}
</style>
<script src='/frontend_es5/custom-elements-es5-adapter.js'></script>
</head>
<body>
<hassio-app></hassio-app>
<script>
function addScript(src) {
var e = document.createElement('script');
e.src = src;
document.head.appendChild(e);
}
if (!window.parent.HASS_DEV) {
addScript('/frontend_es5/custom-elements-es5-adapter.js');
}
var webComponentsSupported = (
'customElements' in window &&
'import' in document.createElement('link') &&
'content' in document.createElement('template'));
if (!webComponentsSupported) {
addScript('/static/webcomponents-lite.js');
}
function addScript(src) {
var e = document.createElement('script');
e.src = src;
document.write(e.outerHTML);
}
var webComponentsSupported = (
'customElements' in window &&
'import' in document.createElement('link') &&
'content' in document.createElement('template'));
if (!webComponentsSupported) {
addScript('/static/webcomponents-bundle.js');
}
</script>
<!--
Disabled while we make Home Assistant able to serve the right files.
<script src="./app.js"></script>
-->
<link rel='import' href='./hassio-app.html'>
<link rel='import' href='/static/mdi.html' async>
</body>
</html>

Binary file not shown.

View File

@@ -20,7 +20,7 @@ class APIProxy(CoreSysAttributes):
def _check_access(self, request):
"""Check the Hass.io token."""
hassio_token = request.headers.get(HEADER_HA_ACCESS)
addon = self._addons.from_uuid(hassio_token)
addon = self.sys_addons.from_uuid(hassio_token)
if not addon:
_LOGGER.warning("Unknown Home-Assistant API access!")
@@ -29,24 +29,26 @@ class APIProxy(CoreSysAttributes):
async def _api_client(self, request, path, timeout=300):
"""Return a client request with proxy origin for Home-Assistant."""
url = f"{self._homeassistant.api_url}/api/{path}"
url = f"{self.sys_homeassistant.api_url}/api/{path}"
try:
data = None
headers = {}
method = getattr(self._websession_ssl, request.method.lower())
method = getattr(self.sys_websession_ssl, request.method.lower())
params = request.query or None
# read data
with async_timeout.timeout(30, loop=self._loop):
with async_timeout.timeout(30):
data = await request.read()
if data:
headers.update({CONTENT_TYPE: request.content_type})
# need api password?
if self._homeassistant.api_password:
headers = {HEADER_HA_ACCESS: self._homeassistant.api_password}
if self.sys_homeassistant.api_password:
headers = {
HEADER_HA_ACCESS: self.sys_homeassistant.api_password,
}
# reset headers
if not headers:
@@ -114,10 +116,10 @@ class APIProxy(CoreSysAttributes):
async def _websocket_client(self):
"""Initialize a websocket api connection."""
url = f"{self._homeassistant.api_url}/api/websocket"
url = f"{self.sys_homeassistant.api_url}/api/websocket"
try:
client = await self._websession_ssl.ws_connect(
client = await self.sys_websession_ssl.ws_connect(
url, heartbeat=60, verify_ssl=False)
# handle authentication
@@ -128,7 +130,7 @@ class APIProxy(CoreSysAttributes):
elif data.get('type') == 'auth_required':
await client.send_json({
'type': 'auth',
'api_password': self._homeassistant.api_password,
'api_password': self.sys_homeassistant.api_password,
})
_LOGGER.error("Authentication to Home-Assistant websocket")
@@ -150,13 +152,13 @@ class APIProxy(CoreSysAttributes):
try:
await server.send_json({
'type': 'auth_required',
'ha_version': self._homeassistant.version,
'ha_version': self.sys_homeassistant.version,
})
# Check API access
response = await server.receive_json()
hassio_token = response.get('api_password')
addon = self._addons.from_uuid(hassio_token)
addon = self.sys_addons.from_uuid(hassio_token)
if not addon:
_LOGGER.warning("Unauthorized websocket access!")
@@ -165,7 +167,7 @@ class APIProxy(CoreSysAttributes):
await server.send_json({
'type': 'auth_ok',
'ha_version': self._homeassistant.version,
'ha_version': self.sys_homeassistant.version,
})
except (RuntimeError, ValueError) as err:
_LOGGER.error("Can't initialize handshake: %s", err)
@@ -180,16 +182,16 @@ class APIProxy(CoreSysAttributes):
server_read = None
while not server.closed and not client.closed:
if not client_read:
client_read = asyncio.ensure_future(
client.receive_str(), loop=self._loop)
client_read = self.sys_create_task(
client.receive_str())
if not server_read:
server_read = asyncio.ensure_future(
server.receive_str(), loop=self._loop)
server_read = self.sys_create_task(
server.receive_str())
# wait until data need to be processed
await asyncio.wait(
[client_read, server_read],
loop=self._loop, return_when=asyncio.FIRST_COMPLETED
return_when=asyncio.FIRST_COMPLETED
)
# server

View File

@@ -35,20 +35,19 @@ class SecurityMiddleware(CoreSysAttributes):
_LOGGER.debug("Passthrough %s", request.path)
return await handler(request)
# Need to be removed later
# Unknown API access
if not hassio_token:
_LOGGER.warning("Invalid token for access %s", request.path)
request[REQUEST_FROM] = 'UNKNOWN'
return await handler(request)
raise HTTPUnauthorized()
# Home-Assistant
if hassio_token == self._homeassistant.uuid:
if hassio_token == self.sys_homeassistant.uuid:
_LOGGER.debug("%s access from Home-Assistant", request.path)
request[REQUEST_FROM] = 'homeassistant'
return await handler(request)
# Add-on
addon = self._addons.from_uuid(hassio_token)
addon = self.sys_addons.from_uuid(hassio_token)
if addon:
_LOGGER.info("%s access from %s", request.path, addon.slug)
request[REQUEST_FROM] = addon.slug

View File

@@ -11,7 +11,7 @@ class APIServices(CoreSysAttributes):
def _extract_service(self, request):
"""Return service and if not exists trow a exception."""
service = self._services.get(request.match_info.get('service'))
service = self.sys_services.get(request.match_info.get('service'))
if not service:
raise RuntimeError("Service not exists")
@@ -21,7 +21,7 @@ class APIServices(CoreSysAttributes):
async def list(self, request):
"""Show register services."""
services = []
for service in self._services.list_services:
for service in self.sys_services.list_services:
services.append({
ATTR_SLUG: service.slug,
ATTR_AVAILABLE: service.enabled,

View File

@@ -50,7 +50,7 @@ class APISnapshots(CoreSysAttributes):
def _extract_snapshot(self, request):
"""Return addon and if not exists trow a exception."""
snapshot = self._snapshots.get(request.match_info.get('snapshot'))
snapshot = self.sys_snapshots.get(request.match_info.get('snapshot'))
if not snapshot:
raise RuntimeError("Snapshot not exists")
return snapshot
@@ -59,7 +59,7 @@ class APISnapshots(CoreSysAttributes):
async def list(self, request):
"""Return snapshot list."""
data_snapshots = []
for snapshot in self._snapshots.list_snapshots:
for snapshot in self.sys_snapshots.list_snapshots:
data_snapshots.append({
ATTR_SLUG: snapshot.slug,
ATTR_NAME: snapshot.name,
@@ -75,7 +75,7 @@ class APISnapshots(CoreSysAttributes):
@api_process
async def reload(self, request):
"""Reload snapshot list."""
await asyncio.shield(self._snapshots.reload(), loop=self._loop)
await asyncio.shield(self.sys_snapshots.reload())
return True
@api_process
@@ -110,7 +110,7 @@ class APISnapshots(CoreSysAttributes):
"""Full-Snapshot a snapshot."""
body = await api_validate(SCHEMA_SNAPSHOT_FULL, request)
snapshot = await asyncio.shield(
self._snapshots.do_snapshot_full(**body), loop=self._loop)
self.sys_snapshots.do_snapshot_full(**body))
if snapshot:
return {ATTR_SLUG: snapshot.slug}
@@ -121,7 +121,7 @@ class APISnapshots(CoreSysAttributes):
"""Partial-Snapshot a snapshot."""
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
snapshot = await asyncio.shield(
self._snapshots.do_snapshot_partial(**body), loop=self._loop)
self.sys_snapshots.do_snapshot_partial(**body))
if snapshot:
return {ATTR_SLUG: snapshot.slug}
@@ -134,9 +134,7 @@ class APISnapshots(CoreSysAttributes):
body = await api_validate(SCHEMA_RESTORE_FULL, request)
return await asyncio.shield(
self._snapshots.do_restore_full(snapshot, **body),
loop=self._loop
)
self.sys_snapshots.do_restore_full(snapshot, **body))
@api_process
async def restore_partial(self, request):
@@ -145,15 +143,13 @@ class APISnapshots(CoreSysAttributes):
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
return await asyncio.shield(
self._snapshots.do_restore_partial(snapshot, **body),
loop=self._loop
)
self.sys_snapshots.do_restore_partial(snapshot, **body))
@api_process
async def remove(self, request):
"""Remove a snapshot."""
snapshot = self._extract_snapshot(request)
return self._snapshots.remove(snapshot)
return self.sys_snapshots.remove(snapshot)
async def download(self, request):
"""Download a snapshot file."""
@@ -167,7 +163,7 @@ class APISnapshots(CoreSysAttributes):
@api_process
async def upload(self, request):
"""Upload a snapshot file."""
with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp_dir:
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir:
tar_file = Path(temp_dir, f"snapshot.tar")
try:
@@ -183,7 +179,7 @@ class APISnapshots(CoreSysAttributes):
return False
snapshot = await asyncio.shield(
self._snapshots.import_snapshot(tar_file), loop=self._loop)
self.sys_snapshots.import_snapshot(tar_file))
if snapshot:
return {ATTR_SLUG: snapshot.slug}

View File

@@ -41,7 +41,7 @@ class APISupervisor(CoreSysAttributes):
async def info(self, request):
"""Return host information."""
list_addons = []
for addon in self._addons.list_addons:
for addon in self.sys_addons.list_addons:
if addon.is_installed:
list_addons.append({
ATTR_NAME: addon.name,
@@ -57,13 +57,13 @@ class APISupervisor(CoreSysAttributes):
return {
ATTR_VERSION: HASSIO_VERSION,
ATTR_LAST_VERSION: self._updater.version_hassio,
ATTR_CHANNEL: self._updater.channel,
ATTR_ARCH: self._arch,
ATTR_WAIT_BOOT: self._config.wait_boot,
ATTR_TIMEZONE: self._config.timezone,
ATTR_LAST_VERSION: self.sys_updater.version_hassio,
ATTR_CHANNEL: self.sys_updater.channel,
ATTR_ARCH: self.sys_arch,
ATTR_WAIT_BOOT: self.sys_config.wait_boot,
ATTR_TIMEZONE: self.sys_config.timezone,
ATTR_ADDONS: list_addons,
ATTR_ADDONS_REPOSITORIES: self._config.addons_repositories,
ATTR_ADDONS_REPOSITORIES: self.sys_config.addons_repositories,
}
@api_process
@@ -72,26 +72,26 @@ class APISupervisor(CoreSysAttributes):
body = await api_validate(SCHEMA_OPTIONS, request)
if ATTR_CHANNEL in body:
self._updater.channel = body[ATTR_CHANNEL]
self.sys_updater.channel = body[ATTR_CHANNEL]
if ATTR_TIMEZONE in body:
self._config.timezone = body[ATTR_TIMEZONE]
self.sys_config.timezone = body[ATTR_TIMEZONE]
if ATTR_WAIT_BOOT in body:
self._config.wait_boot = body[ATTR_WAIT_BOOT]
self.sys_config.wait_boot = body[ATTR_WAIT_BOOT]
if ATTR_ADDONS_REPOSITORIES in body:
new = set(body[ATTR_ADDONS_REPOSITORIES])
await asyncio.shield(self._addons.load_repositories(new))
await asyncio.shield(self.sys_addons.load_repositories(new))
self._updater.save_data()
self._config.save_data()
self.sys_updater.save_data()
self.sys_config.save_data()
return True
@api_process
async def stats(self, request):
"""Return resource information."""
stats = await self._supervisor.stats()
stats = await self.sys_supervisor.stats()
if not stats:
raise RuntimeError("No stats available")
@@ -109,22 +109,22 @@ class APISupervisor(CoreSysAttributes):
async def update(self, request):
"""Update supervisor OS."""
body = await api_validate(SCHEMA_VERSION, request)
version = body.get(ATTR_VERSION, self._updater.version_hassio)
version = body.get(ATTR_VERSION, self.sys_updater.version_hassio)
if version == self._supervisor.version:
if version == self.sys_supervisor.version:
raise RuntimeError("Version {} is already in use".format(version))
return await asyncio.shield(
self._supervisor.update(version), loop=self._loop)
self.sys_supervisor.update(version))
@api_process
async def reload(self, request):
"""Reload addons, config ect."""
tasks = [
self._updater.reload(),
self.sys_updater.reload(),
]
results, _ = await asyncio.shield(
asyncio.wait(tasks, loop=self._loop), loop=self._loop)
asyncio.wait(tasks))
for result in results:
if result.exception() is not None:
@@ -135,4 +135,4 @@ class APISupervisor(CoreSysAttributes):
@api_process_raw(CONTENT_TYPE_BINARY)
def logs(self, request):
"""Return supervisor docker logs."""
return self._supervisor.logs()
return self.sys_supervisor.logs()

View File

@@ -4,13 +4,13 @@ import hashlib
import logging
from aiohttp import web
from aiohttp.web_exceptions import HTTPServiceUnavailable
import voluptuous as vol
from voluptuous.humanize import humanize_error
from ..const import (
JSON_RESULT, JSON_DATA, JSON_MESSAGE, RESULT_OK, RESULT_ERROR,
CONTENT_TYPE_BINARY)
from ..exceptions import HassioError
_LOGGER = logging.getLogger(__name__)
@@ -33,42 +33,21 @@ def api_process(method):
answer = await method(api, *args, **kwargs)
except RuntimeError as err:
return api_return_error(message=str(err))
except HassioError:
_LOGGER.exception("Hassio error")
return api_return_error()
if isinstance(answer, dict):
return api_return_ok(data=answer)
if isinstance(answer, web.Response):
return answer
elif answer:
return api_return_ok()
return api_return_error()
elif isinstance(answer, bool) and not answer:
return api_return_error()
return api_return_ok()
return wrap_api
def api_process_hostcontrol(method):
"""Wrap HostControl calls to rest api."""
async def wrap_hostcontrol(api, *args, **kwargs):
"""Return host information."""
# pylint: disable=protected-access
if not api._host_control.active:
raise HTTPServiceUnavailable()
try:
answer = await method(api, *args, **kwargs)
except RuntimeError as err:
return api_return_error(message=str(err))
if isinstance(answer, dict):
return api_return_ok(data=answer)
elif answer is None:
return api_return_error("Function is not supported")
elif answer:
return api_return_ok()
return api_return_error()
return wrap_hostcontrol
def api_process_raw(content):
"""Wrap content_type into function."""
def wrap_method(method):
@@ -81,6 +60,9 @@ def api_process_raw(content):
except RuntimeError as err:
msg_data = str(err).encode()
msg_type = CONTENT_TYPE_BINARY
except HassioError:
msg_data = b''
msg_type = CONTENT_TYPE_BINARY
return web.Response(body=msg_data, content_type=msg_type)

View File

@@ -7,6 +7,7 @@ from pathlib import Path
from colorlog import ColoredFormatter
from .core import HassIO
from .addons import AddonManager
from .api import RestAPI
from .const import SOCKET_DOCKER
@@ -17,7 +18,9 @@ from .snapshots import SnapshotManager
from .tasks import Tasks
from .updater import Updater
from .services import ServiceManager
from .host import AlsaAudio
from .services import Discovery
from .host import HostManager
from .dbus import DBusManager
_LOGGER = logging.getLogger(__name__)
@@ -27,15 +30,18 @@ def initialize_coresys(loop):
coresys = CoreSys(loop)
# Initialize core objects
coresys.core = HassIO(coresys)
coresys.updater = Updater(coresys)
coresys.api = RestAPI(coresys)
coresys.alsa = AlsaAudio(coresys)
coresys.supervisor = Supervisor(coresys)
coresys.homeassistant = HomeAssistant(coresys)
coresys.addons = AddonManager(coresys)
coresys.snapshots = SnapshotManager(coresys)
coresys.host = HostManager(coresys)
coresys.tasks = Tasks(coresys)
coresys.services = ServiceManager(coresys)
coresys.discovery = Discovery(coresys)
coresys.dbus = DBusManager(coresys)
# bootstrap config
initialize_system_data(coresys)
@@ -148,7 +154,12 @@ def check_environment():
# check socat exec
if not shutil.which('socat'):
_LOGGER.fatal("Can0t find socat program!")
_LOGGER.fatal("Can't find socat program!")
return False
# check socat exec
if not shutil.which('gdbus'):
_LOGGER.fatal("Can't find gdbus program!")
return False
return True

View File

@@ -2,12 +2,11 @@
from pathlib import Path
from ipaddress import ip_network
HASSIO_VERSION = '0.101'
HASSIO_VERSION = '105'
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
'hassio/{}/version.json')
URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons'
URL_HASSIO_ADDONS = "https://github.com/home-assistant/hassio-addons"
URL_HASSIO_VERSION = \
"https://s3.amazonaws.com/hassio-version/{channel}.json"
HASSIO_DATA = Path("/data")
@@ -18,7 +17,6 @@ FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json")
FILE_HASSIO_SERVICES = Path(HASSIO_DATA, "services.json")
SOCKET_DOCKER = Path("/var/run/docker.sock")
SOCKET_HC = Path("/var/run/hassio-hc.sock")
DOCKER_NETWORK = 'hassio'
DOCKER_NETWORK_MASK = ip_network('172.30.32.0/23')
@@ -53,7 +51,9 @@ ENV_TIME = 'TZ'
REQUEST_FROM = 'HASSIO_FROM'
ATTR_MACHINE = 'machine'
ATTR_WAIT_BOOT = 'wait_boot'
ATTR_DEPLOYMENT = 'deployment'
ATTR_WATCHDOG = 'watchdog'
ATTR_CHANGELOG = 'changelog'
ATTR_DATE = 'date'
@@ -62,7 +62,8 @@ ATTR_LONG_DESCRIPTION = 'long_description'
ATTR_HOSTNAME = 'hostname'
ATTR_TIMEZONE = 'timezone'
ATTR_ARGS = 'args'
ATTR_OS = 'os'
ATTR_OPERATING_SYSTEM = 'operating_system'
ATTR_CHASSIS = 'chassis'
ATTR_TYPE = 'type'
ATTR_SOURCE = 'source'
ATTR_FEATURES = 'features'
@@ -160,6 +161,7 @@ ATTR_DISCOVERY = 'discovery'
ATTR_PROTECTED = 'protected'
ATTR_CRYPTO = 'crypto'
ATTR_BRANCH = 'branch'
ATTR_KERNEL = 'kernel'
ATTR_SECCOMP = 'seccomp'
ATTR_APPARMOR = 'apparmor'
@@ -209,3 +211,8 @@ CRYPTO_AES128 = 'aes128'
SECURITY_PROFILE = 'profile'
SECURITY_DEFAULT = 'default'
SECURITY_DISABLE = 'disable'
FEATURES_SHUTDOWN = 'shutdown'
FEATURES_REBOOT = 'reboot'
FEATURES_UPDATE = 'update'
FEATURES_HOSTNAME = 'hostname'

View File

@@ -1,10 +1,12 @@
"""Main file for HassIO."""
from contextlib import suppress
import asyncio
import logging
from .coresys import CoreSysAttributes
from .const import (
STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION, STARTUP_INITIALIZE)
from .exceptions import HassioError
from .utils.dt import fetch_timezone
_LOGGER = logging.getLogger(__name__)
@@ -20,98 +22,114 @@ class HassIO(CoreSysAttributes):
async def setup(self):
"""Setup HassIO orchestration."""
# update timezone
if self._config.timezone == 'UTC':
self._config.timezone = await fetch_timezone(self._websession)
if self.sys_config.timezone == 'UTC':
self.sys_config.timezone = \
await fetch_timezone(self.sys_websession)
# supervisor
await self._supervisor.load()
# Load DBus
await self.sys_dbus.load()
# hostcontrol
await self._host_control.load()
# Load Host
await self.sys_host.load()
# Load homeassistant
await self._homeassistant.load()
# Load Supervisor
await self.sys_supervisor.load()
# Load addons
await self._addons.load()
# Load Home Assistant
await self.sys_homeassistant.load()
# Load Add-ons
await self.sys_addons.load()
# rest api views
await self._api.load()
await self.sys_api.load()
# load last available data
await self._updater.load()
await self.sys_updater.load()
# load last available data
await self._snapshots.load()
await self.sys_snapshots.load()
# load services
await self._services.load()
await self.sys_services.load()
# start dns forwarding
self._loop.create_task(self._dns.start())
# start addon mark as initialize
await self._addons.auto_boot(STARTUP_INITIALIZE)
self.sys_create_task(self.sys_dns.start())
async def start(self):
"""Start HassIO orchestration."""
# on release channel, try update itself
# on dev mode, only read new versions
if not self._dev and self._supervisor.need_update:
if await self._supervisor.update():
if not self.sys_dev and self.sys_supervisor.need_update:
if await self.sys_supervisor.update():
return
else:
_LOGGER.info("Ignore Hass.io auto updates on dev channel")
# start api
await self._api.start()
_LOGGER.info("Start API on %s", self._docker.network.supervisor)
await self.sys_api.start()
_LOGGER.info("Start API on %s", self.sys_docker.network.supervisor)
# start addon mark as initialize
await self.sys_addons.boot(STARTUP_INITIALIZE)
try:
# HomeAssistant is already running / supervisor have only reboot
if self._hardware.last_boot == self._config.last_boot:
if self.sys_hardware.last_boot == self.sys_config.last_boot:
_LOGGER.info("Hass.io reboot detected")
return
# reset register services / discovery
self._services.reset()
self.sys_services.reset()
# start addon mark as system
await self._addons.auto_boot(STARTUP_SYSTEM)
await self.sys_addons.boot(STARTUP_SYSTEM)
# start addon mark as services
await self._addons.auto_boot(STARTUP_SERVICES)
await self.sys_addons.boot(STARTUP_SERVICES)
# run HomeAssistant
if self._homeassistant.boot:
await self._homeassistant.start()
if self.sys_homeassistant.boot:
await self.sys_homeassistant.start()
# start addon mark as application
await self._addons.auto_boot(STARTUP_APPLICATION)
await self.sys_addons.boot(STARTUP_APPLICATION)
# store new last boot
self._config.last_boot = self._hardware.last_boot
self._config.save_data()
self.sys_config.last_boot = self.sys_hardware.last_boot
self.sys_config.save_data()
finally:
# Add core tasks into scheduler
await self._tasks.load()
await self.sys_tasks.load()
# If landingpage / run upgrade in background
if self._homeassistant.version == 'landingpage':
self._loop.create_task(self._homeassistant.install())
if self.sys_homeassistant.version == 'landingpage':
self.sys_create_task(self.sys_homeassistant.install())
_LOGGER.info("Hass.io is up and running")
async def stop(self):
"""Stop a running orchestration."""
# don't process scheduler anymore
self._scheduler.suspend = True
self.sys_scheduler.suspend = True
# process async stop tasks
await asyncio.wait([
self._api.stop(),
self._dns.stop(),
self._websession.close(),
self._websession_ssl.close()
], loop=self._loop)
self.sys_api.stop(),
self.sys_dns.stop(),
self.sys_websession.close(),
self.sys_websession_ssl.close()
])
async def shutdown(self):
"""Shutdown all running containers in correct order."""
await self.sys_addons.shutdown(STARTUP_APPLICATION)
# Close Home Assistant
with suppress(HassioError):
await self.sys_homeassistant.stop()
await self.sys_addons.shutdown(STARTUP_SERVICES)
await self.sys_addons.shutdown(STARTUP_SYSTEM)
await self.sys_addons.shutdown(STARTUP_INITIALIZE)

View File

@@ -7,11 +7,10 @@ from .config import CoreConfig
from .docker import DockerAPI
from .misc.dns import DNSForward
from .misc.hardware import Hardware
from .misc.host_control import HostControl
from .misc.scheduler import Scheduler
class CoreSys(object):
class CoreSys:
"""Class that handle all shared data."""
def __init__(self, loop):
@@ -31,9 +30,9 @@ class CoreSys(object):
self._docker = DockerAPI()
self._scheduler = Scheduler(loop=loop)
self._dns = DNSForward(loop=loop)
self._host_control = HostControl(loop=loop)
# Internal objects pointers
self._core = None
self._homeassistant = None
self._supervisor = None
self._addons = None
@@ -41,8 +40,10 @@ class CoreSys(object):
self._updater = None
self._snapshots = None
self._tasks = None
self._host = None
self._dbus = None
self._services = None
self._alsa = None
self._discovery = None
@property
def arch(self):
@@ -104,9 +105,16 @@ class CoreSys(object):
return self._dns
@property
def host_control(self):
"""Return HostControl object."""
return self._host_control
def core(self):
"""Return HassIO object."""
return self._core
@core.setter
def core(self, value):
"""Set a HassIO object."""
if self._core:
raise RuntimeError("HassIO already set!")
self._core = value
@property
def homeassistant(self):
@@ -205,25 +213,57 @@ class CoreSys(object):
self._services = value
@property
def alsa(self):
"""Return ALSA Audio object."""
return self._alsa
def discovery(self):
"""Return ServiceManager object."""
return self._discovery
@alsa.setter
def alsa(self, value):
"""Set a ALSA Audio object."""
if self._alsa:
raise RuntimeError("ALSA already set!")
self._alsa = value
@discovery.setter
def discovery(self, value):
"""Set a Discovery object."""
if self._discovery:
raise RuntimeError("Discovery already set!")
self._discovery = value
@property
def dbus(self):
"""Return DBusManager object."""
return self._dbus
@dbus.setter
def dbus(self, value):
"""Set a DBusManager object."""
if self._dbus:
raise RuntimeError("DBusManager already set!")
self._dbus = value
@property
def host(self):
"""Return HostManager object."""
return self._host
@host.setter
def host(self, value):
"""Set a HostManager object."""
if self._host:
raise RuntimeError("HostManager already set!")
self._host = value
def run_in_executor(self, funct, *args):
"""Wrapper for executor pool."""
return self._loop.run_in_executor(None, funct, *args)
def create_task(self, coroutine):
"""Wrapper for async task."""
return self._loop.create_task(coroutine)
class CoreSysAttributes(object):
class CoreSysAttributes:
"""Inheret basic CoreSysAttributes."""
coresys = None
def __getattr__(self, name):
"""Mapping to coresys."""
if hasattr(self.coresys, name[1:]):
return getattr(self.coresys, name[1:])
raise AttributeError(f"Can't find {name} on {self.__class__}")
if name.startswith("sys_") and hasattr(self.coresys, name[4:]):
return getattr(self.coresys, name[4:])
raise AttributeError()

30
hassio/dbus/__init__.py Normal file
View File

@@ -0,0 +1,30 @@
"""DBus interface objects."""
from .systemd import Systemd
from .hostname import Hostname
from ..coresys import CoreSysAttributes
class DBusManager(CoreSysAttributes):
"""DBus Interface handler."""
def __init__(self, coresys):
"""Initialize DBus Interface."""
self.coresys = coresys
self._systemd = Systemd()
self._hostname = Hostname()
@property
def systemd(self):
"""Return Systemd Interface."""
return self._systemd
@property
def hostname(self):
"""Return hostname Interface."""
return self._hostname
async def load(self):
"""Connect interfaces to dbus."""
await self.systemd.connect()
await self.hostname.connect()

39
hassio/dbus/hostname.py Normal file
View File

@@ -0,0 +1,39 @@
"""DBus interface for hostname."""
import logging
from .interface import DBusInterface
from .utils import dbus_connected
from ..exceptions import DBusError
from ..utils.gdbus import DBus
_LOGGER = logging.getLogger(__name__)
DBUS_NAME = 'org.freedesktop.hostname1'
DBUS_OBJECT = '/org/freedesktop/hostname1'
class Hostname(DBusInterface):
"""Handle DBus interface for hostname/system."""
async def connect(self):
"""Connect do bus."""
try:
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
except DBusError:
_LOGGER.warning("Can't connect to hostname")
@dbus_connected
def set_static_hostname(self, hostname):
"""Change local hostname.
Return a coroutine.
"""
return self.dbus.SetStaticHostname(hostname)
@dbus_connected
def get_properties(self):
"""Return local host informations.
Return a coroutine.
"""
return self.dbus.get_properties(DBUS_NAME)

18
hassio/dbus/interface.py Normal file
View File

@@ -0,0 +1,18 @@
"""Interface class for dbus wrappers."""
class DBusInterface:
"""Handle DBus interface for hostname/system."""
def __init__(self):
"""Initialize systemd."""
self.dbus = None
@property
def is_connected(self):
"""Return True, if they is connected to dbus."""
return self.dbus is not None
async def connect(self):
"""Connect do bus."""
raise NotImplementedError()

39
hassio/dbus/systemd.py Normal file
View File

@@ -0,0 +1,39 @@
"""Interface to Systemd over dbus."""
import logging
from .interface import DBusInterface
from .utils import dbus_connected
from ..exceptions import DBusError
from ..utils.gdbus import DBus
_LOGGER = logging.getLogger(__name__)
DBUS_NAME = 'org.freedesktop.systemd1'
DBUS_OBJECT = '/org/freedesktop/systemd1'
class Systemd(DBusInterface):
"""Systemd function handler."""
async def connect(self):
"""Connect do bus."""
try:
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
except DBusError:
_LOGGER.warning("Can't connect to systemd")
@dbus_connected
def reboot(self):
"""Reboot host computer.
Return a coroutine.
"""
return self.dbus.Manager.Reboot()
@dbus_connected
def power_off(self):
"""Power off host computer.
Return a coroutine.
"""
return self.dbus.Manager.PowerOff()

14
hassio/dbus/utils.py Normal file
View File

@@ -0,0 +1,14 @@
"""Utils for dbus."""
from ..exceptions import DBusNotConnectedError
def dbus_connected(method):
"""Wrapper for check if dbus is connected."""
def wrap_dbus(api, *args, **kwargs):
"""Check if dbus is connected before call a method."""
if api.dbus is None:
raise DBusNotConnectedError()
return method(api, *args, **kwargs)
return wrap_dbus

View File

@@ -14,7 +14,7 @@ _LOGGER = logging.getLogger(__name__)
CommandReturn = attr.make_class('CommandReturn', ['exit_code', 'output'])
class DockerAPI(object):
class DockerAPI:
"""Docker hassio wrapper.
This class is not AsyncIO safe!
@@ -24,7 +24,7 @@ class DockerAPI(object):
"""Initialize docker base wrapper."""
self.docker = docker.DockerClient(
base_url="unix:/{}".format(str(SOCKET_DOCKER)),
version='auto', timeout=300)
version='auto', timeout=900)
self.network = DockerNetwork(self.docker)
@property

View File

@@ -28,7 +28,7 @@ class DockerAddon(DockerInterface):
@property
def addon(self):
"""Return addon of docker image."""
return self._addons.get(self._id)
return self.sys_addons.get(self._id)
@property
def image(self):
@@ -52,7 +52,7 @@ class DockerAddon(DockerInterface):
"""Return arch of docker image."""
if not self.addon.legacy:
return super().arch
return self._arch
return self.sys_arch
@property
def name(self):
@@ -85,7 +85,7 @@ class DockerAddon(DockerInterface):
return {
**addon_env,
ENV_TIME: self._config.timezone,
ENV_TIME: self.sys_config.timezone,
ENV_TOKEN: self.addon.uuid,
}
@@ -100,7 +100,7 @@ class DockerAddon(DockerInterface):
# Auto mapping UART devices
if self.addon.auto_uart:
for device in self._hardware.serial_devices:
for device in self.sys_hardware.serial_devices:
devices.append(f"{device}:{device}:rwm")
# Return None if no devices is present
@@ -149,8 +149,8 @@ class DockerAddon(DockerInterface):
def network_mapping(self):
"""Return hosts mapping."""
return {
'homeassistant': self._docker.network.gateway,
'hassio': self._docker.network.supervisor,
'homeassistant': self.sys_docker.network.gateway,
'hassio': self.sys_docker.network.supervisor,
}
@property
@@ -173,31 +173,31 @@ class DockerAddon(DockerInterface):
# setup config mappings
if MAP_CONFIG in addon_mapping:
volumes.update({
str(self._config.path_extern_config): {
str(self.sys_config.path_extern_config): {
'bind': "/config", 'mode': addon_mapping[MAP_CONFIG]
}})
if MAP_SSL in addon_mapping:
volumes.update({
str(self._config.path_extern_ssl): {
str(self.sys_config.path_extern_ssl): {
'bind': "/ssl", 'mode': addon_mapping[MAP_SSL]
}})
if MAP_ADDONS in addon_mapping:
volumes.update({
str(self._config.path_extern_addons_local): {
str(self.sys_config.path_extern_addons_local): {
'bind': "/addons", 'mode': addon_mapping[MAP_ADDONS]
}})
if MAP_BACKUP in addon_mapping:
volumes.update({
str(self._config.path_extern_backup): {
str(self.sys_config.path_extern_backup): {
'bind': "/backup", 'mode': addon_mapping[MAP_BACKUP]
}})
if MAP_SHARE in addon_mapping:
volumes.update({
str(self._config.path_extern_share): {
str(self.sys_config.path_extern_share): {
'bind': "/share", 'mode': addon_mapping[MAP_SHARE]
}})
@@ -239,7 +239,7 @@ class DockerAddon(DockerInterface):
# cleanup
self._stop()
ret = self._docker.run(
ret = self.sys_docker.run(
self.image,
name=self.name,
hostname=self.hostname,
@@ -283,7 +283,7 @@ class DockerAddon(DockerInterface):
_LOGGER.info("Start build %s:%s", self.image, tag)
try:
image, log = self._docker.images.build(
image, log = self.sys_docker.images.build(
**build_env.get_docker_args(tag))
_LOGGER.debug("Build %s:%s done: %s", self.image, tag, log)
@@ -302,7 +302,7 @@ class DockerAddon(DockerInterface):
@process_lock
def export_image(self, path):
"""Export current images into a tar file."""
return self._loop.run_in_executor(None, self._export_image, path)
return self.sys_run_in_executor(self._export_image, path)
def _export_image(self, tar_file):
"""Export current images into a tar file.
@@ -310,7 +310,7 @@ class DockerAddon(DockerInterface):
Need run inside executor.
"""
try:
image = self._docker.api.get_image(self.image)
image = self.sys_docker.api.get_image(self.image)
except docker.errors.DockerException as err:
_LOGGER.error("Can't fetch image %s: %s", self.image, err)
return False
@@ -330,7 +330,7 @@ class DockerAddon(DockerInterface):
@process_lock
def import_image(self, path, tag):
"""Import a tar file as image."""
return self._loop.run_in_executor(None, self._import_image, path, tag)
return self.sys_run_in_executor(self._import_image, path, tag)
def _import_image(self, tar_file, tag):
"""Import a tar file as image.
@@ -339,9 +339,9 @@ class DockerAddon(DockerInterface):
"""
try:
with tar_file.open("rb") as read_tar:
self._docker.api.load_image(read_tar, quiet=True)
self.sys_docker.api.load_image(read_tar, quiet=True)
image = self._docker.images.get(self.image)
image = self.sys_docker.images.get(self.image)
image.tag(self.image, tag=tag)
except (docker.errors.DockerException, OSError) as err:
_LOGGER.error("Can't import image %s: %s", self.image, err)
@@ -355,7 +355,7 @@ class DockerAddon(DockerInterface):
@process_lock
def write_stdin(self, data):
"""Write to add-on stdin."""
return self._loop.run_in_executor(None, self._write_stdin, data)
return self.sys_run_in_executor(self._write_stdin, data)
def _write_stdin(self, data):
"""Write to add-on stdin.
@@ -367,7 +367,7 @@ class DockerAddon(DockerInterface):
try:
# load needed docker objects
container = self._docker.containers.get(self.name)
container = self.sys_docker.containers.get(self.name)
socket = container.attach_socket(params={'stdin': 1, 'stream': 1})
except docker.errors.DockerException as err:
_LOGGER.error("Can't attach to %s stdin: %s", self.name, err)

View File

@@ -24,7 +24,7 @@ class DockerHomeAssistant(DockerInterface):
@property
def image(self):
"""Return name of docker image."""
return self._homeassistant.image
return self.sys_homeassistant.image
@property
def name(self):
@@ -35,7 +35,7 @@ class DockerHomeAssistant(DockerInterface):
def devices(self):
"""Create list of special device to map into docker."""
devices = []
for device in self._hardware.serial_devices:
for device in self.sys_hardware.serial_devices:
devices.append(f"{device}:{device}:rwm")
return devices or None
@@ -50,7 +50,7 @@ class DockerHomeAssistant(DockerInterface):
# cleanup
self._stop()
ret = self._docker.run(
ret = self.sys_docker.run(
self.image,
name=self.name,
hostname=self.name,
@@ -60,16 +60,16 @@ class DockerHomeAssistant(DockerInterface):
devices=self.devices,
network_mode='host',
environment={
'HASSIO': self._docker.network.supervisor,
ENV_TIME: self._config.timezone,
ENV_TOKEN: self._homeassistant.uuid,
'HASSIO': self.sys_docker.network.supervisor,
ENV_TIME: self.sys_config.timezone,
ENV_TOKEN: self.sys_homeassistant.uuid,
},
volumes={
str(self._config.path_extern_config):
str(self.sys_config.path_extern_config):
{'bind': '/config', 'mode': 'rw'},
str(self._config.path_extern_ssl):
str(self.sys_config.path_extern_ssl):
{'bind': '/ssl', 'mode': 'ro'},
str(self._config.path_extern_share):
str(self.sys_config.path_extern_share):
{'bind': '/share', 'mode': 'rw'},
}
)
@@ -85,26 +85,26 @@ class DockerHomeAssistant(DockerInterface):
Need run inside executor.
"""
return self._docker.run_command(
return self.sys_docker.run_command(
self.image,
command,
detach=True,
stdout=True,
stderr=True,
environment={
ENV_TIME: self._config.timezone,
ENV_TIME: self.sys_config.timezone,
},
volumes={
str(self._config.path_extern_config):
str(self.sys_config.path_extern_config):
{'bind': '/config', 'mode': 'ro'},
str(self._config.path_extern_ssl):
str(self.sys_config.path_extern_ssl):
{'bind': '/ssl', 'mode': 'ro'},
}
)
def is_initialize(self):
"""Return True if docker container exists."""
return self._loop.run_in_executor(None, self._is_initialize)
return self.sys_run_in_executor(self._is_initialize)
def _is_initialize(self):
"""Return True if docker container exists.
@@ -112,7 +112,7 @@ class DockerHomeAssistant(DockerInterface):
Need run inside executor.
"""
try:
self._docker.containers.get(self.name)
self.sys_docker.containers.get(self.name)
except docker.errors.DockerException:
return False

View File

@@ -61,7 +61,7 @@ class DockerInterface(CoreSysAttributes):
@process_lock
def install(self, tag):
"""Pull docker image."""
return self._loop.run_in_executor(None, self._install, tag)
return self.sys_run_in_executor(self._install, tag)
def _install(self, tag):
"""Pull docker image.
@@ -70,7 +70,7 @@ class DockerInterface(CoreSysAttributes):
"""
try:
_LOGGER.info("Pull image %s tag %s.", self.image, tag)
image = self._docker.images.pull(f"{self.image}:{tag}")
image = self.sys_docker.images.pull(f"{self.image}:{tag}")
image.tag(self.image, tag='latest')
self._meta = image.attrs
@@ -83,7 +83,7 @@ class DockerInterface(CoreSysAttributes):
def exists(self):
"""Return True if docker image exists in local repo."""
return self._loop.run_in_executor(None, self._exists)
return self.sys_run_in_executor(self._exists)
def _exists(self):
"""Return True if docker image exists in local repo.
@@ -91,7 +91,7 @@ class DockerInterface(CoreSysAttributes):
Need run inside executor.
"""
try:
image = self._docker.images.get(self.image)
image = self.sys_docker.images.get(self.image)
assert f"{self.image}:{self.version}" in image.tags
except (docker.errors.DockerException, AssertionError):
return False
@@ -103,7 +103,7 @@ class DockerInterface(CoreSysAttributes):
Return a Future.
"""
return self._loop.run_in_executor(None, self._is_running)
return self.sys_run_in_executor(self._is_running)
def _is_running(self):
"""Return True if docker is Running.
@@ -111,8 +111,8 @@ class DockerInterface(CoreSysAttributes):
Need run inside executor.
"""
try:
container = self._docker.containers.get(self.name)
image = self._docker.images.get(self.image)
container = self.sys_docker.containers.get(self.name)
image = self.sys_docker.images.get(self.image)
except docker.errors.DockerException:
return False
@@ -129,7 +129,7 @@ class DockerInterface(CoreSysAttributes):
@process_lock
def attach(self):
"""Attach to running docker container."""
return self._loop.run_in_executor(None, self._attach)
return self.sys_run_in_executor(self._attach)
def _attach(self):
"""Attach to running docker container.
@@ -138,9 +138,9 @@ class DockerInterface(CoreSysAttributes):
"""
try:
if self.image:
self._meta = self._docker.images.get(self.image).attrs
self._meta = self.sys_docker.images.get(self.image).attrs
else:
self._meta = self._docker.containers.get(self.name).attrs
self._meta = self.sys_docker.containers.get(self.name).attrs
except docker.errors.DockerException:
return False
@@ -152,7 +152,7 @@ class DockerInterface(CoreSysAttributes):
@process_lock
def run(self):
"""Run docker image."""
return self._loop.run_in_executor(None, self._run)
return self.sys_run_in_executor(self._run)
def _run(self):
"""Run docker image.
@@ -164,7 +164,7 @@ class DockerInterface(CoreSysAttributes):
@process_lock
def stop(self):
"""Stop/remove docker container."""
return self._loop.run_in_executor(None, self._stop)
return self.sys_run_in_executor(self._stop)
def _stop(self):
"""Stop/remove and remove docker container.
@@ -172,7 +172,7 @@ class DockerInterface(CoreSysAttributes):
Need run inside executor.
"""
try:
container = self._docker.containers.get(self.name)
container = self.sys_docker.containers.get(self.name)
except docker.errors.DockerException:
return False
@@ -190,7 +190,7 @@ class DockerInterface(CoreSysAttributes):
@process_lock
def remove(self):
"""Remove docker images."""
return self._loop.run_in_executor(None, self._remove)
return self.sys_run_in_executor(self._remove)
def _remove(self):
"""remove docker images.
@@ -205,11 +205,11 @@ class DockerInterface(CoreSysAttributes):
try:
with suppress(docker.errors.ImageNotFound):
self._docker.images.remove(
self.sys_docker.images.remove(
image=f"{self.image}:latest", force=True)
with suppress(docker.errors.ImageNotFound):
self._docker.images.remove(
self.sys_docker.images.remove(
image=f"{self.image}:{self.version}", force=True)
except docker.errors.DockerException as err:
@@ -222,7 +222,7 @@ class DockerInterface(CoreSysAttributes):
@process_lock
def update(self, tag):
"""Update a docker image."""
return self._loop.run_in_executor(None, self._update, tag)
return self.sys_run_in_executor(self._update, tag)
def _update(self, tag):
"""Update a docker image.
@@ -247,7 +247,7 @@ class DockerInterface(CoreSysAttributes):
Return a Future.
"""
return self._loop.run_in_executor(None, self._logs)
return self.sys_run_in_executor(self._logs)
def _logs(self):
"""Return docker logs of container.
@@ -255,7 +255,7 @@ class DockerInterface(CoreSysAttributes):
Need run inside executor.
"""
try:
container = self._docker.containers.get(self.name)
container = self.sys_docker.containers.get(self.name)
except docker.errors.DockerException:
return b""
@@ -267,7 +267,7 @@ class DockerInterface(CoreSysAttributes):
@process_lock
def cleanup(self):
"""Check if old version exists and cleanup."""
return self._loop.run_in_executor(None, self._cleanup)
return self.sys_run_in_executor(self._cleanup)
def _cleanup(self):
"""Check if old version exists and cleanup.
@@ -275,25 +275,25 @@ class DockerInterface(CoreSysAttributes):
Need run inside executor.
"""
try:
latest = self._docker.images.get(self.image)
latest = self.sys_docker.images.get(self.image)
except docker.errors.DockerException:
_LOGGER.warning("Can't find %s for cleanup", self.image)
return False
for image in self._docker.images.list(name=self.image):
for image in self.sys_docker.images.list(name=self.image):
if latest.id == image.id:
continue
with suppress(docker.errors.DockerException):
_LOGGER.info("Cleanup docker images: %s", image.tags)
self._docker.images.remove(image.id, force=True)
self.sys_docker.images.remove(image.id, force=True)
return True
@process_lock
def execute_command(self, command):
"""Create a temporary container and run command."""
return self._loop.run_in_executor(None, self._execute_command, command)
return self.sys_run_in_executor(self._execute_command, command)
def _execute_command(self, command):
"""Create a temporary container and run command.
@@ -304,7 +304,7 @@ class DockerInterface(CoreSysAttributes):
def stats(self):
"""Read and return stats from container."""
return self._loop.run_in_executor(None, self._stats)
return self.sys_run_in_executor(self._stats)
def _stats(self):
"""Create a temporary container and run command.
@@ -312,7 +312,7 @@ class DockerInterface(CoreSysAttributes):
Need run inside executor.
"""
try:
container = self._docker.containers.get(self.name)
container = self.sys_docker.containers.get(self.name)
except docker.errors.DockerException:
return None

View File

@@ -8,7 +8,7 @@ from ..const import DOCKER_NETWORK_MASK, DOCKER_NETWORK, DOCKER_NETWORK_RANGE
_LOGGER = logging.getLogger(__name__)
class DockerNetwork(object):
class DockerNetwork:
"""Internal HassIO Network.
This class is not AsyncIO safe!

View File

@@ -2,7 +2,7 @@
from contextlib import suppress
class DockerStats(object):
class DockerStats:
"""Hold stats data from container inside."""
def __init__(self, stats):

View File

@@ -24,7 +24,7 @@ class DockerSupervisor(DockerInterface, CoreSysAttributes):
Need run inside executor.
"""
try:
container = self._docker.containers.get(self.name)
container = self.sys_docker.containers.get(self.name)
except docker.errors.DockerException:
return False
@@ -33,9 +33,10 @@ class DockerSupervisor(DockerInterface, CoreSysAttributes):
self.image, self.version)
# if already attach
if container in self._docker.network.containers:
if container in self.sys_docker.network.containers:
return True
# attach to network
return self._docker.network.attach_container(
container, alias=['hassio'], ipv4=self._docker.network.supervisor)
return self.sys_docker.network.attach_container(
container, alias=['hassio'],
ipv4=self.sys_docker.network.supervisor)

49
hassio/exceptions.py Normal file
View File

@@ -0,0 +1,49 @@
"""Core Exceptions."""
class HassioError(Exception):
"""Root exception."""
pass
class HassioInternalError(HassioError):
"""Internal Hass.io error they can't handle."""
pass
class HassioNotSupportedError(HassioError):
"""Function is not supported."""
pass
# Host
class HostError(HassioError):
"""Internal Host error."""
pass
class HostNotSupportedError(HassioNotSupportedError):
"""Host function is not supprted."""
pass
# utils/gdbus
class DBusError(HassioError):
"""DBus generic error."""
pass
class DBusNotConnectedError(HostNotSupportedError):
"""DBus is not connected and call a method."""
class DBusFatalError(DBusError):
"""DBus call going wrong."""
pass
class DBusParseError(DBusError):
"""DBus parse error."""
pass

View File

@@ -54,7 +54,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
@property
def api_ip(self):
"""Return IP of HomeAssistant instance."""
return self._docker.network.gateway
return self.sys_docker.network.gateway
@property
def api_port(self):
@@ -123,7 +123,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
"""Return last available version of homeassistant."""
if self.is_custom_image:
return self._data.get(ATTR_LAST_VERSION)
return self._updater.version_homeassistant
return self.sys_updater.version_homeassistant
@last_version.setter
def last_version(self, value):
@@ -177,7 +177,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
if await self.instance.install('landingpage'):
break
_LOGGER.warning("Fails install landingpage, retry after 60sec")
await asyncio.sleep(60, loop=self._loop)
await asyncio.sleep(60)
# Run landingpage after installation
await self._start()
@@ -189,13 +189,13 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
while True:
# read homeassistant tag and install it
if not self.last_version:
await self._updater.reload()
await self.sys_updater.reload()
tag = self.last_version
if tag and await self.instance.install(tag):
break
_LOGGER.warning("Error on install HomeAssistant. Retry in 60sec")
await asyncio.sleep(60, loop=self._loop)
await asyncio.sleep(60)
# finishing
_LOGGER.info("HomeAssistant docker now installed")
@@ -307,7 +307,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
try:
# pylint: disable=bad-continuation
async with self._websession_ssl.get(
async with self.sys_websession_ssl.get(
url, headers=header, timeout=30) as request:
status = request.status
@@ -328,7 +328,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
try:
# pylint: disable=bad-continuation
async with self._websession_ssl.post(
async with self.sys_websession_ssl.post(
url, headers=header, timeout=30,
json=event_data) as request:
status = request.status
@@ -361,10 +361,10 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
pass
while time.monotonic() - start_time < self.wait_boot:
if await self._loop.run_in_executor(None, check_port):
if await self.sys_run_in_executor(check_port):
_LOGGER.info("Detect a running Home-Assistant instance")
return True
await asyncio.sleep(10, loop=self._loop)
await asyncio.sleep(10)
_LOGGER.warning("Don't wait anymore of Home-Assistant startup!")
return False

View File

@@ -1,2 +1,58 @@
"""Host function like audio/dbus/systemd."""
from .alsa import AlsaAudio # noqa
from .alsa import AlsaAudio
from .control import SystemControl
from .info import InfoCenter
from ..const import FEATURES_REBOOT, FEATURES_SHUTDOWN, FEATURES_HOSTNAME
from ..coresys import CoreSysAttributes
class HostManager(CoreSysAttributes):
"""Manage supported function from host."""
def __init__(self, coresys):
"""Initialize Host manager."""
self.coresys = coresys
self._alsa = AlsaAudio(coresys)
self._control = SystemControl(coresys)
self._info = InfoCenter(coresys)
@property
def alsa(self):
"""Return host ALSA handler."""
return self._alsa
@property
def control(self):
"""Return host control handler."""
return self._control
@property
def info(self):
"""Return host info handler."""
return self._info
@property
def supperted_features(self):
"""Return a list of supported host features."""
features = []
if self.sys_dbus.systemd.is_connected:
features.extend([
FEATURES_REBOOT,
FEATURES_SHUTDOWN,
])
if self.sys_dbus.hostname.is_connected:
features.append(FEATURES_HOSTNAME)
return features
async def load(self):
"""Load host functions."""
if self.sys_dbus.hostname.is_connected:
await self.info.update()
def reload(self):
"""Reload host information."""
return self.load()

View File

@@ -42,7 +42,7 @@ class AlsaAudio(CoreSysAttributes):
def _update_device(self):
"""Update Internal device DB."""
current_id = hash(frozenset(self._hardware.audio_devices))
current_id = hash(frozenset(self.sys_hardware.audio_devices))
# Need rebuild?
if current_id == self._cache:
@@ -57,7 +57,7 @@ class AlsaAudio(CoreSysAttributes):
database = self._audio_database()
# Process devices
for dev_id, dev_data in self._hardware.audio_devices.items():
for dev_id, dev_data in self.sys_hardware.audio_devices.items():
for chan_id, chan_type in dev_data[ATTR_DEVICES].items():
alsa_id = f"{dev_id},{chan_id}"
dev_name = dev_data[ATTR_NAME]
@@ -73,7 +73,7 @@ class AlsaAudio(CoreSysAttributes):
# Use name from DB or a generic name
self._data[key][alsa_id] = database.get(
self._machine, {}).get(
self.sys_machine, {}).get(
dev_name, {}).get(alsa_id, f"{dev_name}: {chan_id}")
self._cache = current_id
@@ -98,8 +98,8 @@ class AlsaAudio(CoreSysAttributes):
# Init defaults
if self._default is None:
database = self._audio_database()
alsa_input = database.get(self._machine, {}).get(ATTR_INPUT)
alsa_output = database.get(self._machine, {}).get(ATTR_OUTPUT)
alsa_input = database.get(self.sys_machine, {}).get(ATTR_INPUT)
alsa_output = database.get(self.sys_machine, {}).get(ATTR_OUTPUT)
self._default = DefaultConfig(alsa_input, alsa_output)

View File

@@ -6,12 +6,12 @@ pcm.!default {
pcm.mic {
type plug
slave {
pcm "hw:{$input}"
pcm "hw:$input"
}
}
pcm.speaker {
type plug
slave {
pcm "hw:{$output}"
pcm "hw:$output"
}
}

51
hassio/host/control.py Normal file
View File

@@ -0,0 +1,51 @@
"""Power control for host."""
import logging
from ..coresys import CoreSysAttributes
from ..exceptions import HostNotSupportedError
_LOGGER = logging.getLogger(__name__)
class SystemControl(CoreSysAttributes):
"""Handle host power controls."""
def __init__(self, coresys):
"""Initialize host power handling."""
self.coresys = coresys
def _check_systemd(self):
"""Check if systemd is connect or raise error."""
if not self.sys_dbus.systemd.is_connected:
_LOGGER.error("No systemd dbus connection available")
raise HostNotSupportedError()
async def reboot(self):
"""Reboot host system."""
self._check_systemd()
_LOGGER.info("Initialize host reboot over systemd")
try:
await self.sys_core.shutdown()
finally:
await self.sys_dbus.systemd.reboot()
async def shutdown(self):
"""Shutdown host system."""
self._check_systemd()
_LOGGER.info("Initialize host power off over systemd")
try:
await self.sys_core.shutdown()
finally:
await self.sys_dbus.systemd.power_off()
async def set_hostname(self, hostname):
"""Set local a new Hostname."""
if not self.sys_dbus.systemd.is_connected:
_LOGGER.error("No hostname dbus connection available")
raise HostNotSupportedError()
_LOGGER.info("Set Hostname %s", hostname)
await self.sys_dbus.hostname.set_static_hostname(hostname)
await self.sys_host.info.update()

58
hassio/host/info.py Normal file
View File

@@ -0,0 +1,58 @@
"""Power control for host."""
import logging
from ..coresys import CoreSysAttributes
from ..exceptions import HassioError, HostNotSupportedError
_LOGGER = logging.getLogger(__name__)
class InfoCenter(CoreSysAttributes):
"""Handle local system information controls."""
def __init__(self, coresys):
"""Initialize system center handling."""
self.coresys = coresys
self._data = {}
@property
def hostname(self):
"""Return local hostname."""
return self._data.get('StaticHostname') or None
@property
def chassis(self):
"""Return local chassis type."""
return self._data.get('Chassis') or None
@property
def deployment(self):
"""Return local deployment type."""
return self._data.get('Deployment') or None
@property
def kernel(self):
"""Return local kernel version."""
return self._data.get('KernelRelease') or None
@property
def operating_system(self):
"""Return local operating system."""
return self._data.get('OperatingSystemPrettyName') or None
@property
def cpe(self):
"""Return local CPE."""
return self._data.get('OperatingSystemCPEName') or None
async def update(self):
"""Update properties over dbus."""
if not self.sys_dbus.systemd.is_connected:
_LOGGER.error("No hostname dbus connection available")
raise HostNotSupportedError()
_LOGGER.info("Update local host information")
try:
self._data = await self.sys_dbus.hostname.get_properties()
except HassioError:
_LOGGER.warning("Can't update host system information!")

View File

@@ -8,7 +8,7 @@ _LOGGER = logging.getLogger(__name__)
COMMAND = "socat UDP-RECVFROM:53,fork UDP-SENDTO:127.0.0.11:53"
class DNSForward(object):
class DNSForward:
"""Manage DNS forwarding to internal DNS."""
def __init__(self, loop):

View File

@@ -23,7 +23,7 @@ GPIO_DEVICES = Path("/sys/class/gpio")
RE_TTY = re.compile(r"tty[A-Z]+")
class Hardware(object):
class Hardware:
"""Represent a interface to procfs, sysfs and udev."""
def __init__(self):
@@ -63,6 +63,10 @@ class Hardware(object):
@property
def audio_devices(self):
"""Return all available audio interfaces."""
if not ASOUND_CARDS.exists():
_LOGGER.info("No audio devices found")
return {}
try:
with ASOUND_CARDS.open('r') as cards_file:
cards = cards_file.read()

View File

@@ -1,124 +0,0 @@
"""Host control for HassIO."""
import asyncio
import json
import logging
import async_timeout
from ..const import (
SOCKET_HC, ATTR_LAST_VERSION, ATTR_VERSION, ATTR_TYPE, ATTR_FEATURES,
ATTR_HOSTNAME, ATTR_OS)
_LOGGER = logging.getLogger(__name__)
TIMEOUT = 15
UNKNOWN = 'unknown'
FEATURES_SHUTDOWN = 'shutdown'
FEATURES_REBOOT = 'reboot'
FEATURES_UPDATE = 'update'
FEATURES_HOSTNAME = 'hostname'
FEATURES_NETWORK_INFO = 'network_info'
FEATURES_NETWORK_CONTROL = 'network_control'
class HostControl(object):
"""Client for host control."""
def __init__(self, loop):
"""Initialize HostControl socket client."""
self.loop = loop
self.active = False
self.version = UNKNOWN
self.last_version = UNKNOWN
self.type = UNKNOWN
self.features = []
self.hostname = UNKNOWN
self.os_info = UNKNOWN
if SOCKET_HC.is_socket():
self.active = True
async def _send_command(self, command):
"""Send command to host.
Is a coroutine.
"""
if not self.active:
return
reader, writer = await asyncio.open_unix_connection(
str(SOCKET_HC), loop=self.loop)
try:
# send
_LOGGER.info("Send '%s' to HostControl.", command)
with async_timeout.timeout(TIMEOUT, loop=self.loop):
writer.write("{}\n".format(command).encode())
data = await reader.readline()
response = data.decode().rstrip()
_LOGGER.info("Receive from HostControl: %s.", response)
if response == "OK":
return True
elif response == "ERROR":
return False
elif response == "WRONG":
return None
else:
try:
return json.loads(response)
except json.JSONDecodeError:
_LOGGER.warning("Json parse error from HostControl '%s'.",
response)
except asyncio.TimeoutError:
_LOGGER.error("Timeout from HostControl!")
finally:
writer.close()
async def load(self):
"""Load Info from host.
Return a coroutine.
"""
info = await self._send_command("info")
if not info:
return
self.version = info.get(ATTR_VERSION, UNKNOWN)
self.last_version = info.get(ATTR_LAST_VERSION, UNKNOWN)
self.type = info.get(ATTR_TYPE, UNKNOWN)
self.features = info.get(ATTR_FEATURES, [])
self.hostname = info.get(ATTR_HOSTNAME, UNKNOWN)
self.os_info = info.get(ATTR_OS, UNKNOWN)
def reboot(self):
"""Reboot the host system.
Return a coroutine.
"""
return self._send_command("reboot")
def shutdown(self):
"""Shutdown the host system.
Return a coroutine.
"""
return self._send_command("shutdown")
def update(self, version=None):
"""Update the host system.
Return a coroutine.
"""
if version:
return self._send_command("update {}".format(version))
return self._send_command("update")
def set_hostname(self, hostname):
"""Update hostname on host."""
return self._send_command("hostname {}".format(hostname))

View File

@@ -10,7 +10,7 @@ CALL = 'callback'
TASK = 'task'
class Scheduler(object):
class Scheduler:
"""Schedule task inside HassIO."""
def __init__(self, loop):

View File

@@ -1,8 +1,8 @@
"""Handle internal services discovery."""
from .discovery import Discovery # noqa
from .mqtt import MQTTService
from .data import ServicesData
from .discovery import Discovery
from ..const import SERVICE_MQTT
from ..coresys import CoreSysAttributes
@@ -19,7 +19,6 @@ class ServiceManager(CoreSysAttributes):
"""Initialize Services handler."""
self.coresys = coresys
self.data = ServicesData()
self.discovery = Discovery(coresys)
self.services_obj = {}
@property
@@ -37,9 +36,9 @@ class ServiceManager(CoreSysAttributes):
self.services_obj[slug] = service(self.coresys)
# Read exists discovery messages
self.discovery.load()
self.sys_discovery.load()
def reset(self):
"""Reset available data."""
self.data.reset_data()
self.discovery.load()
self.sys_discovery.load()

View File

@@ -36,7 +36,7 @@ class Discovery(CoreSysAttributes):
self._data.clear()
self._data.extend(messages)
self._services.data.save_data()
self.sys_services.data.save_data()
def get(self, uuid):
"""Return discovery message."""
@@ -45,7 +45,7 @@ class Discovery(CoreSysAttributes):
@property
def _data(self):
"""Return discovery data."""
return self._services.data.discovery
return self.sys_services.data.discovery
@property
def list_messages(self):
@@ -69,7 +69,7 @@ class Discovery(CoreSysAttributes):
self.save()
# send event to Home-Assistant
self._loop.create_task(self._homeassistant.send_event(
self.sys_create_task(self.sys_homeassistant.send_event(
EVENT_DISCOVERY_ADD, {ATTR_UUID: message.uuid}))
return message
@@ -80,11 +80,11 @@ class Discovery(CoreSysAttributes):
self.save()
# send event to Home-Assistant
self._loop.create_task(self._homeassistant.send_event(
self.sys_create_task(self.sys_homeassistant.send_event(
EVENT_DISCOVERY_DEL, {ATTR_UUID: message.uuid}))
class Message(object):
class Message:
"""Represent a single Discovery message."""
def __init__(self, provider, component, platform, config, uuid=None):

View File

@@ -37,7 +37,7 @@ class ServiceInterface(CoreSysAttributes):
def save(self):
"""Save changes."""
self._services.data.save_data()
self.sys_services.data.save_data()
def get_service_data(self):
"""Return the requested service data."""

View File

@@ -21,7 +21,7 @@ class MQTTService(ServiceInterface):
@property
def _data(self):
"""Return data of this service."""
return self._services.data.mqtt
return self.sys_services.data.mqtt
@property
def schema(self):
@@ -66,7 +66,7 @@ class MQTTService(ServiceInterface):
return True
# discover mqtt to homeassistant
message = self._services.discovery.send(
message = self.sys_discovery.send(
provider, SERVICE_MQTT, None, self.hass_config)
self._data[ATTR_DISCOVERY_ID] = message.uuid
@@ -81,8 +81,8 @@ class MQTTService(ServiceInterface):
discovery_id = self._data.get(ATTR_DISCOVERY_ID)
if discovery_id:
self._services.discovery.remove(
self._services.discovery.get(discovery_id))
self.sys_discovery.remove(
self.sys_discovery.get(discovery_id))
self._data.clear()
self.save()

View File

@@ -35,7 +35,7 @@ class SnapshotManager(CoreSysAttributes):
"""Initialize a new snapshot object from name."""
date_str = utcnow().isoformat()
slug = create_slug(name, date_str)
tar_file = Path(self._config.path_backup, f"{slug}.tar")
tar_file = Path(self.sys_config.path_backup, f"{slug}.tar")
# init object
snapshot = Snapshot(self.coresys, tar_file)
@@ -65,11 +65,11 @@ class SnapshotManager(CoreSysAttributes):
self.snapshots_obj[snapshot.slug] = snapshot
tasks = [_load_snapshot(tar_file) for tar_file in
self._config.path_backup.glob("*.tar")]
self.sys_config.path_backup.glob("*.tar")]
_LOGGER.info("Found %d snapshot files", len(tasks))
if tasks:
await asyncio.wait(tasks, loop=self._loop)
await asyncio.wait(tasks)
def remove(self, snapshot):
"""Remove a snapshot."""
@@ -98,7 +98,7 @@ class SnapshotManager(CoreSysAttributes):
return None
# Move snapshot to backup
tar_origin = Path(self._config.path_backup, f"{snapshot.slug}.tar")
tar_origin = Path(self.sys_config.path_backup, f"{snapshot.slug}.tar")
try:
snapshot.tarfile.rename(tar_origin)
@@ -124,7 +124,7 @@ class SnapshotManager(CoreSysAttributes):
snapshot = self._create_snapshot(name, SNAPSHOT_FULL, password)
_LOGGER.info("Full-Snapshot %s start", snapshot.slug)
try:
self._scheduler.suspend = True
self.sys_scheduler.suspend = True
await self.lock.acquire()
async with snapshot:
@@ -146,7 +146,7 @@ class SnapshotManager(CoreSysAttributes):
return snapshot
finally:
self._scheduler.suspend = False
self.sys_scheduler.suspend = False
self.lock.release()
async def do_snapshot_partial(self, name="", addons=None, folders=None,
@@ -162,14 +162,14 @@ class SnapshotManager(CoreSysAttributes):
_LOGGER.info("Partial-Snapshot %s start", snapshot.slug)
try:
self._scheduler.suspend = True
self.sys_scheduler.suspend = True
await self.lock.acquire()
async with snapshot:
# Snapshot add-ons
addon_list = []
for addon_slug in addons:
addon = self._addons.get(addon_slug)
addon = self.sys_addons.get(addon_slug)
if addon and addon.is_installed:
addon_list.append(addon)
continue
@@ -195,7 +195,7 @@ class SnapshotManager(CoreSysAttributes):
return snapshot
finally:
self._scheduler.suspend = False
self.sys_scheduler.suspend = False
self.lock.release()
async def do_restore_full(self, snapshot, password=None):
@@ -215,21 +215,14 @@ class SnapshotManager(CoreSysAttributes):
_LOGGER.info("Full-Restore %s start", snapshot.slug)
try:
self._scheduler.suspend = True
self.sys_scheduler.suspend = True
await self.lock.acquire()
async with snapshot:
tasks = []
# Stop Home-Assistant / Add-ons
tasks.append(self._homeassistant.stop())
for addon in self._addons.list_addons:
if addon.is_installed:
tasks.append(addon.stop())
if tasks:
_LOGGER.info("Restore %s stop tasks", snapshot.slug)
await asyncio.wait(tasks, loop=self._loop)
await self.sys_core.shutdown()
# Restore folders
_LOGGER.info("Restore %s run folders", snapshot.slug)
@@ -238,8 +231,8 @@ class SnapshotManager(CoreSysAttributes):
# Start homeassistant restore
_LOGGER.info("Restore %s run Home-Assistant", snapshot.slug)
snapshot.restore_homeassistant()
task_hass = self._loop.create_task(
self._homeassistant.update(snapshot.homeassistant_version))
task_hass = self.sys_create_task(self.sys_homeassistant.update(
snapshot.homeassistant_version))
# Restore repositories
_LOGGER.info("Restore %s run Repositories", snapshot.slug)
@@ -247,13 +240,13 @@ class SnapshotManager(CoreSysAttributes):
# Delete delta add-ons
tasks.clear()
for addon in self._addons.list_installed:
for addon in self.sys_addons.list_installed:
if addon.slug not in snapshot.addon_list:
tasks.append(addon.uninstall())
if tasks:
_LOGGER.info("Restore %s remove add-ons", snapshot.slug)
await asyncio.wait(tasks, loop=self._loop)
await asyncio.wait(tasks)
# Restore add-ons
_LOGGER.info("Restore %s old add-ons", snapshot.slug)
@@ -263,7 +256,7 @@ class SnapshotManager(CoreSysAttributes):
_LOGGER.info("Restore %s wait until homeassistant ready",
snapshot.slug)
await task_hass
await self._homeassistant.start()
await self.sys_homeassistant.start()
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Restore %s error", snapshot.slug)
@@ -274,7 +267,7 @@ class SnapshotManager(CoreSysAttributes):
return True
finally:
self._scheduler.suspend = False
self.sys_scheduler.suspend = False
self.lock.release()
async def do_restore_partial(self, snapshot, homeassistant=False,
@@ -293,13 +286,13 @@ class SnapshotManager(CoreSysAttributes):
_LOGGER.info("Partial-Restore %s start", snapshot.slug)
try:
self._scheduler.suspend = True
self.sys_scheduler.suspend = True
await self.lock.acquire()
async with snapshot:
# Stop Home-Assistant if they will be restored later
if homeassistant and FOLDER_HOMEASSISTANT in folders:
await self._homeassistant.stop()
await self.sys_homeassistant.stop()
# Process folders
if folders:
@@ -312,14 +305,14 @@ class SnapshotManager(CoreSysAttributes):
_LOGGER.info("Restore %s run Home-Assistant",
snapshot.slug)
snapshot.restore_homeassistant()
task_hass = self._loop.create_task(
self._homeassistant.update(
task_hass = self.sys_create_task(
self.sys_homeassistant.update(
snapshot.homeassistant_version))
# Process Add-ons
addon_list = []
for slug in addons:
addon = self._addons.get(slug)
addon = self.sys_addons.get(slug)
if addon:
addon_list.append(addon)
continue
@@ -334,7 +327,7 @@ class SnapshotManager(CoreSysAttributes):
_LOGGER.info("Restore %s wait for Home-Assistant",
snapshot.slug)
await task_hass
await self._homeassistant.start()
await self.sys_homeassistant.start()
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Restore %s error", snapshot.slug)
@@ -345,5 +338,5 @@ class SnapshotManager(CoreSysAttributes):
return True
finally:
self._scheduler.suspend = False
self.sys_scheduler.suspend = False
self.lock.release()

View File

@@ -179,7 +179,7 @@ class Snapshot(CoreSysAttributes):
# read snapshot.json
try:
raw = await self._loop.run_in_executor(None, _load_file)
raw = await self.sys_run_in_executor(_load_file)
except (tarfile.TarError, KeyError) as err:
_LOGGER.error(
"Can't read snapshot tarfile %s: %s", self.tarfile, err)
@@ -204,7 +204,7 @@ class Snapshot(CoreSysAttributes):
async def __aenter__(self):
"""Async context to open a snapshot."""
self._tmp = TemporaryDirectory(dir=str(self._config.path_tmp))
self._tmp = TemporaryDirectory(dir=str(self.sys_config.path_tmp))
# create a snapshot
if not self.tarfile.is_file():
@@ -216,7 +216,7 @@ class Snapshot(CoreSysAttributes):
with tarfile.open(self.tarfile, "r:") as tar:
tar.extractall(path=self._tmp.name)
await self._loop.run_in_executor(None, _extract_snapshot)
await self.sys_run_in_executor(_extract_snapshot)
async def __aexit__(self, exception_type, exception_value, traceback):
"""Async context to close a snapshot."""
@@ -241,7 +241,7 @@ class Snapshot(CoreSysAttributes):
try:
write_json_file(Path(self._tmp.name, "snapshot.json"), self._data)
await self._loop.run_in_executor(None, _create_snapshot)
await self.sys_run_in_executor(_create_snapshot)
except (OSError, json.JSONDecodeError) as err:
_LOGGER.error("Can't write snapshot: %s", err)
finally:
@@ -249,7 +249,7 @@ class Snapshot(CoreSysAttributes):
async def store_addons(self, addon_list=None):
"""Add a list of add-ons into snapshot."""
addon_list = addon_list or self._addons.list_installed
addon_list = addon_list or self.sys_addons.list_installed
async def _addon_save(addon):
"""Task to store a add-on into snapshot."""
@@ -273,14 +273,14 @@ class Snapshot(CoreSysAttributes):
# Run tasks
tasks = [_addon_save(addon) for addon in addon_list]
if tasks:
await asyncio.wait(tasks, loop=self._loop)
await asyncio.wait(tasks)
async def restore_addons(self, addon_list=None):
"""Restore a list add-on from snapshot."""
if not addon_list:
addon_list = []
for addon_slug in self.addon_list:
addon = self._addons.get(addon_slug)
addon = self.sys_addons.get(addon_slug)
if addon:
addon_list.append(addon)
@@ -303,7 +303,7 @@ class Snapshot(CoreSysAttributes):
# Run tasks
tasks = [_addon_restore(addon) for addon in addon_list]
if tasks:
await asyncio.wait(tasks, loop=self._loop)
await asyncio.wait(tasks)
async def store_folders(self, folder_list=None):
"""Backup hassio data into snapshot."""
@@ -313,7 +313,7 @@ class Snapshot(CoreSysAttributes):
"""Intenal function to snapshot a folder."""
slug_name = name.replace("/", "_")
tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz")
origin_dir = Path(self._config.path_hassio, name)
origin_dir = Path(self.sys_config.path_hassio, name)
# Check if exsits
if not origin_dir.is_dir():
@@ -332,10 +332,10 @@ class Snapshot(CoreSysAttributes):
_LOGGER.warning("Can't snapshot folder %s: %s", name, err)
# Run tasks
tasks = [self._loop.run_in_executor(None, _folder_save, folder)
tasks = [self.sys_run_in_executor(_folder_save, folder)
for folder in folder_list]
if tasks:
await asyncio.wait(tasks, loop=self._loop)
await asyncio.wait(tasks)
async def restore_folders(self, folder_list=None):
"""Backup hassio data into snapshot."""
@@ -345,7 +345,7 @@ class Snapshot(CoreSysAttributes):
"""Intenal function to restore a folder."""
slug_name = name.replace("/", "_")
tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz")
origin_dir = Path(self._config.path_hassio, name)
origin_dir = Path(self.sys_config.path_hassio, name)
# Check if exists inside snapshot
if not tar_name.exists():
@@ -366,58 +366,58 @@ class Snapshot(CoreSysAttributes):
_LOGGER.warning("Can't restore folder %s: %s", name, err)
# Run tasks
tasks = [self._loop.run_in_executor(None, _folder_restore, folder)
tasks = [self.sys_run_in_executor(_folder_restore, folder)
for folder in folder_list]
if tasks:
await asyncio.wait(tasks, loop=self._loop)
await asyncio.wait(tasks)
def store_homeassistant(self):
"""Read all data from homeassistant object."""
self.homeassistant[ATTR_VERSION] = self._homeassistant.version
self.homeassistant[ATTR_WATCHDOG] = self._homeassistant.watchdog
self.homeassistant[ATTR_BOOT] = self._homeassistant.boot
self.homeassistant[ATTR_WAIT_BOOT] = self._homeassistant.wait_boot
self.homeassistant[ATTR_VERSION] = self.sys_homeassistant.version
self.homeassistant[ATTR_WATCHDOG] = self.sys_homeassistant.watchdog
self.homeassistant[ATTR_BOOT] = self.sys_homeassistant.boot
self.homeassistant[ATTR_WAIT_BOOT] = self.sys_homeassistant.wait_boot
# Custom image
if self._homeassistant.is_custom_image:
self.homeassistant[ATTR_IMAGE] = self._homeassistant.image
if self.sys_homeassistant.is_custom_image:
self.homeassistant[ATTR_IMAGE] = self.sys_homeassistant.image
self.homeassistant[ATTR_LAST_VERSION] = \
self._homeassistant.last_version
self.sys_homeassistant.last_version
# API/Proxy
self.homeassistant[ATTR_PORT] = self._homeassistant.api_port
self.homeassistant[ATTR_SSL] = self._homeassistant.api_ssl
self.homeassistant[ATTR_PORT] = self.sys_homeassistant.api_port
self.homeassistant[ATTR_SSL] = self.sys_homeassistant.api_ssl
self.homeassistant[ATTR_PASSWORD] = \
self._encrypt_data(self._homeassistant.api_password)
self._encrypt_data(self.sys_homeassistant.api_password)
def restore_homeassistant(self):
"""Write all data to homeassistant object."""
self._homeassistant.watchdog = self.homeassistant[ATTR_WATCHDOG]
self._homeassistant.boot = self.homeassistant[ATTR_BOOT]
self._homeassistant.wait_boot = self.homeassistant[ATTR_WAIT_BOOT]
self.sys_homeassistant.watchdog = self.homeassistant[ATTR_WATCHDOG]
self.sys_homeassistant.boot = self.homeassistant[ATTR_BOOT]
self.sys_homeassistant.wait_boot = self.homeassistant[ATTR_WAIT_BOOT]
# Custom image
if self.homeassistant.get(ATTR_IMAGE):
self._homeassistant.image = self.homeassistant[ATTR_IMAGE]
self._homeassistant.last_version = \
self.sys_homeassistant.image = self.homeassistant[ATTR_IMAGE]
self.sys_homeassistant.last_version = \
self.homeassistant[ATTR_LAST_VERSION]
# API/Proxy
self._homeassistant.api_port = self.homeassistant[ATTR_PORT]
self._homeassistant.api_ssl = self.homeassistant[ATTR_SSL]
self._homeassistant.api_password = \
self.sys_homeassistant.api_port = self.homeassistant[ATTR_PORT]
self.sys_homeassistant.api_ssl = self.homeassistant[ATTR_SSL]
self.sys_homeassistant.api_password = \
self._decrypt_data(self.homeassistant[ATTR_PASSWORD])
# save
self._homeassistant.save_data()
self.sys_homeassistant.save_data()
def store_repositories(self):
"""Store repository list into snapshot."""
self.repositories = self._config.addons_repositories
self.repositories = self.sys_config.addons_repositories
def restore_repositories(self):
"""Restore repositories from snapshot.
Return a coroutine.
"""
return self._addons.load_repositories(self.repositories)
return self.sys_addons.load_repositories(self.repositories)

View File

@@ -34,7 +34,7 @@ class Supervisor(CoreSysAttributes):
@property
def last_version(self):
"""Return last available version of homeassistant."""
return self._updater.version_hassio
return self.sys_updater.version_hassio
@property
def image(self):
@@ -50,13 +50,13 @@ class Supervisor(CoreSysAttributes):
"""Update HomeAssistant version."""
version = version or self.last_version
if version == self._supervisor.version:
if version == self.sys_supervisor.version:
_LOGGER.warning("Version %s is already installed", version)
return
_LOGGER.info("Update supervisor to version %s", version)
if await self.instance.install(version):
self._loop.call_later(1, self._loop.stop)
self.sys_loop.call_later(1, self.sys_loop.stop)
return True
_LOGGER.error("Update of hass.io fails!")

View File

@@ -15,7 +15,7 @@ class Tasks(CoreSysAttributes):
RUN_RELOAD_ADDONS = 21600
RUN_RELOAD_SNAPSHOTS = 72000
RUN_RELOAD_HOST_CONTROL = 72000
RUN_RELOAD_HOST = 72000
RUN_RELOAD_UPDATER = 21600
RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15
@@ -29,24 +29,24 @@ class Tasks(CoreSysAttributes):
async def load(self):
"""Add Tasks to scheduler."""
self.jobs.add(self._scheduler.register_task(
self.jobs.add(self.sys_scheduler.register_task(
self._update_addons, self.RUN_UPDATE_ADDONS))
self.jobs.add(self._scheduler.register_task(
self.jobs.add(self.sys_scheduler.register_task(
self._update_supervisor, self.RUN_UPDATE_SUPERVISOR))
self.jobs.add(self._scheduler.register_task(
self._addons.reload, self.RUN_RELOAD_ADDONS))
self.jobs.add(self._scheduler.register_task(
self._updater.reload, self.RUN_RELOAD_UPDATER))
self.jobs.add(self._scheduler.register_task(
self._snapshots.reload, self.RUN_RELOAD_SNAPSHOTS))
self.jobs.add(self._scheduler.register_task(
self._host_control.load, self.RUN_RELOAD_HOST_CONTROL))
self.jobs.add(self.sys_scheduler.register_task(
self.sys_addons.reload, self.RUN_RELOAD_ADDONS))
self.jobs.add(self.sys_scheduler.register_task(
self.sys_updater.reload, self.RUN_RELOAD_UPDATER))
self.jobs.add(self.sys_scheduler.register_task(
self.sys_snapshots.reload, self.RUN_RELOAD_SNAPSHOTS))
self.jobs.add(self.sys_scheduler.register_task(
self.sys_host.load, self.RUN_RELOAD_HOST))
self.jobs.add(self._scheduler.register_task(
self.jobs.add(self.sys_scheduler.register_task(
self._watchdog_homeassistant_docker,
self.RUN_WATCHDOG_HOMEASSISTANT_DOCKER))
self.jobs.add(self._scheduler.register_task(
self.jobs.add(self.sys_scheduler.register_task(
self._watchdog_homeassistant_api,
self.RUN_WATCHDOG_HOMEASSISTANT_API))
@@ -55,7 +55,7 @@ class Tasks(CoreSysAttributes):
async def _update_addons(self):
"""Check if a update is available of a addon and update it."""
tasks = []
for addon in self._addons.list_addons:
for addon in self.sys_addons.list_addons:
if not addon.is_installed or not addon.auto_update:
continue
@@ -70,35 +70,35 @@ class Tasks(CoreSysAttributes):
if tasks:
_LOGGER.info("Addon auto update process %d tasks", len(tasks))
await asyncio.wait(tasks, loop=self._loop)
await asyncio.wait(tasks)
async def _update_supervisor(self):
"""Check and run update of supervisor hassio."""
if not self._supervisor.need_update:
if not self.sys_supervisor.need_update:
return
# don't perform a update on beta/dev channel
if self._dev:
if self.sys_dev:
_LOGGER.warning("Ignore Hass.io update on dev channel!")
return
_LOGGER.info("Found new Hass.io version")
await self._supervisor.update()
await self.sys_supervisor.update()
async def _watchdog_homeassistant_docker(self):
"""Check running state of docker and start if they is close."""
# if Home-Assistant is active
if not await self._homeassistant.is_initialize() or \
not self._homeassistant.watchdog:
if not await self.sys_homeassistant.is_initialize() or \
not self.sys_homeassistant.watchdog:
return
# if Home-Assistant is running
if self._homeassistant.in_progress or \
await self._homeassistant.is_running():
if self.sys_homeassistant.in_progress or \
await self.sys_homeassistant.is_running():
return
_LOGGER.warning("Watchdog found a problem with Home-Assistant docker!")
await self._homeassistant.start()
await self.sys_homeassistant.start()
async def _watchdog_homeassistant_api(self):
"""Create scheduler task for montoring running state of API.
@@ -109,13 +109,13 @@ class Tasks(CoreSysAttributes):
retry_scan = self._data.get('HASS_WATCHDOG_API', 0)
# If Home-Assistant is active
if not await self._homeassistant.is_initialize() or \
not self._homeassistant.watchdog:
if not await self.sys_homeassistant.is_initialize() or \
not self.sys_homeassistant.watchdog:
return
# If Home-Assistant API is up
if self._homeassistant.in_progress or \
await self._homeassistant.check_api_state():
if self.sys_homeassistant.in_progress or \
await self.sys_homeassistant.check_api_state():
return
# Look like we run into a problem
@@ -126,5 +126,5 @@ class Tasks(CoreSysAttributes):
return
_LOGGER.error("Watchdog found a problem with Home-Assistant API!")
await self._homeassistant.restart()
await self.sys_homeassistant.restart()
self._data['HASS_WATCHDOG_API'] = 0

View File

@@ -1,15 +1,15 @@
"""Fetch last versions from webserver."""
import asyncio
from contextlib import suppress
from datetime import timedelta
import json
import logging
import aiohttp
import async_timeout
from .const import (
URL_HASSIO_VERSION, FILE_HASSIO_UPDATER, ATTR_HOMEASSISTANT, ATTR_HASSIO,
ATTR_CHANNEL, CHANNEL_STABLE, CHANNEL_BETA, CHANNEL_DEV)
ATTR_CHANNEL)
from .coresys import CoreSysAttributes
from .utils import AsyncThrottle
from .utils.json import JsonConfig
@@ -17,12 +17,6 @@ from .validate import SCHEMA_UPDATER_CONFIG
_LOGGER = logging.getLogger(__name__)
CHANNEL_TO_BRANCH = {
CHANNEL_STABLE: 'master',
CHANNEL_BETA: 'rc',
CHANNEL_DEV: 'dev',
}
class Updater(JsonConfig, CoreSysAttributes):
"""Fetch last versions from version.json."""
@@ -65,12 +59,11 @@ class Updater(JsonConfig, CoreSysAttributes):
Is a coroutine.
"""
url = URL_HASSIO_VERSION.format(CHANNEL_TO_BRANCH[self.channel])
url = URL_HASSIO_VERSION.format(channel=self.channel)
try:
_LOGGER.info("Fetch update data from %s", url)
with async_timeout.timeout(10, loop=self._loop):
async with self._websession.get(url) as request:
data = await request.json(content_type=None)
async with self.sys_websession.get(url, timeout=10) as request:
data = await request.json(content_type=None)
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
_LOGGER.warning("Can't fetch versions from %s: %s", url, err)
@@ -81,11 +74,18 @@ class Updater(JsonConfig, CoreSysAttributes):
return
# data valid?
if not data:
if not data or data.get(ATTR_CHANNEL) != self.channel:
_LOGGER.warning("Invalid data from %s", url)
return
# update versions
self._data[ATTR_HOMEASSISTANT] = data.get('homeassistant')
self._data[ATTR_HASSIO] = data.get('hassio')
# update supervisor versions
with suppress(KeyError):
self._data[ATTR_HASSIO] = data['supervisor']
# update Home Assistant version
machine = self.sys_machine or 'default'
with suppress(KeyError):
self._data[ATTR_HOMEASSISTANT] = \
data['homeassistant'][machine]
self.save_data()

View File

@@ -27,7 +27,7 @@ def process_lock(method):
return wrap_api
class AsyncThrottle(object):
class AsyncThrottle:
"""
Decorator that prevents a function from being called more than once every
time period.

View File

@@ -29,7 +29,7 @@ async def fetch_timezone(websession):
"""Read timezone from freegeoip."""
data = {}
try:
with async_timeout.timeout(10, loop=websession.loop):
with async_timeout.timeout(10):
async with websession.get(FREEGEOIP_URL) as request:
data = await request.json()

174
hassio/utils/gdbus.py Normal file
View File

@@ -0,0 +1,174 @@
"""DBus implementation with glib."""
import asyncio
import logging
import json
import shlex
import re
import xml.etree.ElementTree as ET
from ..exceptions import DBusFatalError, DBusParseError
_LOGGER = logging.getLogger(__name__)
# Use to convert GVariant into json
RE_GVARIANT_TYPE = re.compile(
r"(?:boolean|byte|int16|uint16|int32|uint32|handle|int64|uint64|double|"
r"string|objectpath|signature) ")
RE_GVARIANT_TULPE = re.compile(r"^\((.*),\)$")
RE_GVARIANT_VARIANT = re.compile(
r"(?<=(?: |{|\[))<((?:'|\").*?(?:'|\")|\d+(?:\.\d+)?)>(?=(?:|]|}|,))")
RE_GVARIANT_STRING = re.compile(r"(?<=(?: |{|\[))'(.*?)'(?=(?:|]|}|,))")
# Commands for dbus
INTROSPECT = ("gdbus introspect --system --dest {bus} "
"--object-path {object} --xml")
CALL = ("gdbus call --system --dest {bus} --object-path {object} "
"--method {method} {args}")
DBUS_METHOD_GETALL = 'org.freedesktop.DBus.Properties.GetAll'
class DBus:
"""DBus handler."""
def __init__(self, bus_name, object_path):
"""Initialize dbus object."""
self.bus_name = bus_name
self.object_path = object_path
self.methods = set()
@staticmethod
async def connect(bus_name, object_path):
"""Read object data."""
self = DBus(bus_name, object_path)
await self._init_proxy() # pylint: disable=protected-access
_LOGGER.info("Connect to dbus: %s - %s", bus_name, object_path)
return self
async def _init_proxy(self):
"""Read interface data."""
command = shlex.split(INTROSPECT.format(
bus=self.bus_name,
object=self.object_path
))
# Ask data
_LOGGER.info("Introspect %s on %s", self.bus_name, self.object_path)
data = await self._send(command)
# Parse XML
try:
xml = ET.fromstring(data)
except ET.ParseError as err:
_LOGGER.error("Can't parse introspect data: %s", err)
raise DBusParseError() from None
# Read available methods
_LOGGER.debug("data: %s", data)
for interface in xml.findall("./interface"):
interface_name = interface.get('name')
for method in interface.findall("./method"):
method_name = method.get('name')
self.methods.add(f"{interface_name}.{method_name}")
@staticmethod
def _gvariant(raw):
"""Parse GVariant input to python."""
raw = RE_GVARIANT_TYPE.sub("", raw)
raw = RE_GVARIANT_TULPE.sub(r"[\1]", raw)
raw = RE_GVARIANT_VARIANT.sub(r"\1", raw)
raw = RE_GVARIANT_STRING.sub(r'"\1"', raw)
# No data
if raw.startswith("()"):
return {}
try:
return json.loads(raw)
except json.JSONDecodeError as err:
_LOGGER.error("Can't parse '%s': %s", raw, err)
raise DBusParseError() from None
async def call_dbus(self, method, *args):
"""Call a dbus method."""
command = shlex.split(CALL.format(
bus=self.bus_name,
object=self.object_path,
method=method,
args=" ".join(map(str, args))
))
# Run command
_LOGGER.info("Call %s on %s", method, self.object_path)
data = await self._send(command)
# Parse and return data
return self._gvariant(data)
async def get_properties(self, interface):
"""Read all properties from interface."""
try:
return (await self.call_dbus(DBUS_METHOD_GETALL, interface))[0]
except IndexError:
_LOGGER.error("No attributes returned for %s", interface)
raise DBusFatalError from None
async def _send(self, command):
"""Send command over dbus."""
# Run command
_LOGGER.debug("Send dbus command: %s", command)
try:
proc = await asyncio.create_subprocess_exec(
*command,
stdin=asyncio.subprocess.DEVNULL,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
data, error = await proc.communicate()
except OSError as err:
_LOGGER.error("DBus fatal error: %s", err)
raise DBusFatalError() from None
# Success?
if proc.returncode != 0:
_LOGGER.error("DBus return error: %s", error)
raise DBusFatalError()
# End
return data.decode()
def __getattr__(self, name):
"""Mapping to dbus method."""
return getattr(DBusCallWrapper(self, self.bus_name), name)
class DBusCallWrapper:
"""Wrapper a DBus interface for a call."""
def __init__(self, dbus, interface):
"""Initialize wrapper."""
self.dbus = dbus
self.interface = interface
def __call__(self):
"""Should never be called."""
_LOGGER.error("DBus method %s not exists!", self.interface)
raise DBusFatalError()
def __getattr__(self, name):
"""Mapping to dbus method."""
interface = f"{self.interface}.{name}"
if interface not in self.dbus.methods:
return DBusCallWrapper(self.dbus, interface)
def _method_wrapper(*args):
"""Wrap method.
Return a coroutine
"""
return self.dbus.call_dbus(interface, *args)
return _method_wrapper

View File

@@ -21,7 +21,7 @@ def read_json_file(jsonfile):
return json.loads(cfile.read())
class JsonConfig(object):
class JsonConfig:
"""Hass core object for handle it."""
def __init__(self, json_file, schema):

View File

@@ -12,7 +12,7 @@ MOD_READ = 'r'
MOD_WRITE = 'w'
class SecureTarFile(object):
class SecureTarFile:
"""Handle encrypted files for tarfile library."""
def __init__(self, name, mode, key=None, gzip=True):

View File

@@ -40,13 +40,14 @@ setup(
],
include_package_data=True,
install_requires=[
'async_timeout==2.0.1',
'aiohttp==3.1.2',
'docker==3.2.0',
'attr==0.3.1',
'async_timeout==3.0.0',
'aiohttp==3.2.1',
'docker==3.3.0',
'colorlog==3.1.2',
'voluptuous==0.11.1',
'gitpython==2.1.8',
'pytz==2018.3',
'gitpython==2.1.10',
'pytz==2018.4',
'pyudev==0.21.0',
'pycryptodome==3.4.11'
]

View File

@@ -1,8 +1,4 @@
{
"hassio": "0.101",
"homeassistant": "0.67.0",
"resinos": "1.3",
"resinhup": "0.3",
"generic": "0.3",
"cluster": "0.1"
"hassio": "105",
"homeassistant": "0.70.0"
}