mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-20 06:29:21 +00:00
Compare commits
114 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
7cd81dcc95 | ||
![]() |
1bdd3d88de | ||
![]() |
d105552fa9 | ||
![]() |
b5af35bd6c | ||
![]() |
7d46487491 | ||
![]() |
38a599011e | ||
![]() |
e59e2fc8d7 | ||
![]() |
b9ce405ada | ||
![]() |
d7df423deb | ||
![]() |
99eea99e93 | ||
![]() |
63d82ce03e | ||
![]() |
13a2c1ecd9 | ||
![]() |
627ab4ee81 | ||
![]() |
54f45539be | ||
![]() |
53297205c8 | ||
![]() |
0f09fdfcce | ||
![]() |
24db0fdb86 | ||
![]() |
7349234638 | ||
![]() |
c691f2a559 | ||
![]() |
110cd32dc3 | ||
![]() |
26d8dc0ec6 | ||
![]() |
fd41bda828 | ||
![]() |
1e3868bb70 | ||
![]() |
ece6c644cf | ||
![]() |
6a5bd5a014 | ||
![]() |
664334f1ad | ||
![]() |
e5e28747d4 | ||
![]() |
c7956d95ae | ||
![]() |
5ce6abdbb6 | ||
![]() |
fad0185c26 | ||
![]() |
86faf32709 | ||
![]() |
19f413796d | ||
![]() |
8f94b4d63f | ||
![]() |
db263f84af | ||
![]() |
747810b729 | ||
![]() |
d6768f15a1 | ||
![]() |
6c75957578 | ||
![]() |
3a8307acfe | ||
![]() |
f20c7d42ee | ||
![]() |
9419fbff94 | ||
![]() |
3ac6c03637 | ||
![]() |
a95274f1b3 | ||
![]() |
9d2fb87cec | ||
![]() |
ce9c3565b6 | ||
![]() |
b0ec58ed1b | ||
![]() |
893a5f8dd3 | ||
![]() |
98064f6a90 | ||
![]() |
5146f89354 | ||
![]() |
fb46592d48 | ||
![]() |
b4fb5ac681 | ||
![]() |
4b7201dc59 | ||
![]() |
3a5a4e4c27 | ||
![]() |
70104a9280 | ||
![]() |
efbc7b17a1 | ||
![]() |
64c5e20fc4 | ||
![]() |
13498afa97 | ||
![]() |
f6375f1bd6 | ||
![]() |
8fd1599173 | ||
![]() |
63302b73b0 | ||
![]() |
f591f67a2a | ||
![]() |
cda3184a55 | ||
![]() |
afc811e975 | ||
![]() |
2e169dcb42 | ||
![]() |
34e24e184f | ||
![]() |
2e4751ed7d | ||
![]() |
8c82c467d4 | ||
![]() |
f3f6771534 | ||
![]() |
0a75a4dcbc | ||
![]() |
1a4542fc4e | ||
![]() |
7e0525749e | ||
![]() |
b33b26018d | ||
![]() |
66c93e7176 | ||
![]() |
5674d32bad | ||
![]() |
7a84972770 | ||
![]() |
638f0f5371 | ||
![]() |
dca1b6f1d3 | ||
![]() |
2b0ee109d6 | ||
![]() |
e7430d87d7 | ||
![]() |
9751c1de79 | ||
![]() |
c497167b64 | ||
![]() |
7fb2aca88b | ||
![]() |
0d544845b1 | ||
![]() |
602eb472f9 | ||
![]() |
f22fa46bdb | ||
![]() |
4171a28260 | ||
![]() |
55365a631a | ||
![]() |
547415b30b | ||
![]() |
cbf79f1fab | ||
![]() |
31cc1dce82 | ||
![]() |
8a11e6c845 | ||
![]() |
2df4f80aa5 | ||
![]() |
68566ee9e1 | ||
![]() |
fe04b7ec59 | ||
![]() |
38f96d7ddd | ||
![]() |
2b2edd6e98 | ||
![]() |
361969aca2 | ||
![]() |
e61e7f41f2 | ||
![]() |
75150fd149 | ||
![]() |
bd1c8be1e1 | ||
![]() |
f167197640 | ||
![]() |
f084ecc007 | ||
![]() |
65becbd0ae | ||
![]() |
f38e28a4d9 | ||
![]() |
2998cd94ff | ||
![]() |
79e2f3e8ab | ||
![]() |
13291f52f2 | ||
![]() |
4baa80c3de | ||
![]() |
be28a6b012 | ||
![]() |
d94ada6216 | ||
![]() |
b2d7743e06 | ||
![]() |
40324beb72 | ||
![]() |
c02f6913b3 | ||
![]() |
d56af22d5e | ||
![]() |
1795103086 |
9
.dockerignore
Normal file
9
.dockerignore
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# General files
|
||||||
|
.git
|
||||||
|
.github
|
||||||
|
|
||||||
|
# Test related files
|
||||||
|
.tox
|
||||||
|
|
||||||
|
# Temporary files
|
||||||
|
**/__pycache__
|
59
API.md
59
API.md
@@ -243,6 +243,7 @@ Optional:
|
|||||||
"serial": ["/dev/xy"],
|
"serial": ["/dev/xy"],
|
||||||
"input": ["Input device name"],
|
"input": ["Input device name"],
|
||||||
"disk": ["/dev/sdax"],
|
"disk": ["/dev/sdax"],
|
||||||
|
"gpio": ["gpiochip0", "gpiochip100"],
|
||||||
"audio": {
|
"audio": {
|
||||||
"CARD_ID": {
|
"CARD_ID": {
|
||||||
"name": "xy",
|
"name": "xy",
|
||||||
@@ -284,7 +285,10 @@ Optional:
|
|||||||
"devices": [""],
|
"devices": [""],
|
||||||
"image": "str",
|
"image": "str",
|
||||||
"custom": "bool -> if custom image",
|
"custom": "bool -> if custom image",
|
||||||
"boot": "bool"
|
"boot": "bool",
|
||||||
|
"port": 8123,
|
||||||
|
"ssl": "bool",
|
||||||
|
"watchdog": "bool"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -303,21 +307,34 @@ Optional:
|
|||||||
Output is the raw Docker log.
|
Output is the raw Docker log.
|
||||||
|
|
||||||
- POST `/homeassistant/restart`
|
- POST `/homeassistant/restart`
|
||||||
- POST `/homeassistant/options`
|
|
||||||
- POST `/homeassistant/check`
|
- POST `/homeassistant/check`
|
||||||
- POST `/homeassistant/start`
|
- POST `/homeassistant/start`
|
||||||
- POST `/homeassistant/stop`
|
- POST `/homeassistant/stop`
|
||||||
|
|
||||||
|
- POST `/homeassistant/options`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"devices": [],
|
"devices": [],
|
||||||
"image": "Optional|null",
|
"image": "Optional|null",
|
||||||
"last_version": "Optional for custom image|null"
|
"last_version": "Optional for custom image|null",
|
||||||
|
"port": "port for access hass",
|
||||||
|
"ssl": "bool",
|
||||||
|
"password": "",
|
||||||
|
"watchdog": "bool"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
Image with `null` and last_version with `null` reset this options.
|
Image with `null` and last_version with `null` reset this options.
|
||||||
|
|
||||||
|
- POST/GET `/homeassistant/api`
|
||||||
|
|
||||||
|
Proxy to real home-assistant instance.
|
||||||
|
|
||||||
|
- GET `/homeassistant/websocket`
|
||||||
|
|
||||||
|
Proxy to real websocket instance.
|
||||||
|
|
||||||
### RESTful for API addons
|
### RESTful for API addons
|
||||||
|
|
||||||
- GET `/addons`
|
- GET `/addons`
|
||||||
@@ -337,12 +354,8 @@ Get all available addons.
|
|||||||
"installed": "none|INSTALL_VERSION",
|
"installed": "none|INSTALL_VERSION",
|
||||||
"detached": "bool",
|
"detached": "bool",
|
||||||
"build": "bool",
|
"build": "bool",
|
||||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
|
||||||
"devices": ["/dev/xy"],
|
|
||||||
"url": "null|url",
|
"url": "null|url",
|
||||||
"logo": "bool",
|
"logo": "bool"
|
||||||
"audio": "bool",
|
|
||||||
"hassio_api": "bool"
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"repositories": [
|
"repositories": [
|
||||||
@@ -364,6 +377,7 @@ Get all available addons.
|
|||||||
{
|
{
|
||||||
"name": "xy bla",
|
"name": "xy bla",
|
||||||
"description": "description",
|
"description": "description",
|
||||||
|
"long_description": "null|markdown",
|
||||||
"auto_update": "bool",
|
"auto_update": "bool",
|
||||||
"url": "null|url of addon",
|
"url": "null|url of addon",
|
||||||
"detached": "bool",
|
"detached": "bool",
|
||||||
@@ -376,11 +390,18 @@ Get all available addons.
|
|||||||
"options": "{}",
|
"options": "{}",
|
||||||
"network": "{}|null",
|
"network": "{}|null",
|
||||||
"host_network": "bool",
|
"host_network": "bool",
|
||||||
|
"host_ipc": "bool",
|
||||||
|
"host_dbus": "bool",
|
||||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||||
"devices": ["/dev/xy"],
|
"devices": ["/dev/xy"],
|
||||||
|
"auto_uart": "bool",
|
||||||
"logo": "bool",
|
"logo": "bool",
|
||||||
|
"changelog": "bool",
|
||||||
"hassio_api": "bool",
|
"hassio_api": "bool",
|
||||||
|
"homeassistant_api": "bool",
|
||||||
|
"stdin": "bool",
|
||||||
"webui": "null|http(s)://[HOST]:port/xy/zx",
|
"webui": "null|http(s)://[HOST]:port/xy/zx",
|
||||||
|
"gpio": "bool",
|
||||||
"audio": "bool",
|
"audio": "bool",
|
||||||
"audio_input": "null|0,0",
|
"audio_input": "null|0,0",
|
||||||
"audio_output": "null|0,0"
|
"audio_output": "null|0,0"
|
||||||
@@ -389,6 +410,8 @@ Get all available addons.
|
|||||||
|
|
||||||
- GET `/addons/{addon}/logo`
|
- GET `/addons/{addon}/logo`
|
||||||
|
|
||||||
|
- GET `/addons/{addon}/changelog`
|
||||||
|
|
||||||
- POST `/addons/{addon}/options`
|
- POST `/addons/{addon}/options`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
@@ -412,26 +435,10 @@ For reset custom network/audio settings, set it `null`.
|
|||||||
|
|
||||||
- POST `/addons/{addon}/install`
|
- POST `/addons/{addon}/install`
|
||||||
|
|
||||||
Optional:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"version": "VERSION"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/addons/{addon}/uninstall`
|
- POST `/addons/{addon}/uninstall`
|
||||||
|
|
||||||
- POST `/addons/{addon}/update`
|
- POST `/addons/{addon}/update`
|
||||||
|
|
||||||
Optional:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"version": "VERSION"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- GET `/addons/{addon}/logs`
|
- GET `/addons/{addon}/logs`
|
||||||
|
|
||||||
Output is the raw Docker log.
|
Output is the raw Docker log.
|
||||||
@@ -442,6 +449,10 @@ Output is the raw Docker log.
|
|||||||
|
|
||||||
Only supported for local build addons
|
Only supported for local build addons
|
||||||
|
|
||||||
|
- POST `/addons/{addon}/stdin`
|
||||||
|
|
||||||
|
Write data to add-on stdin
|
||||||
|
|
||||||
## Host Control
|
## Host Control
|
||||||
|
|
||||||
Communicate over UNIX socket with a host daemon.
|
Communicate over UNIX socket with a host daemon.
|
||||||
|
27
Dockerfile
Normal file
27
Dockerfile
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
ARG BUILD_FROM
|
||||||
|
FROM $BUILD_FROM
|
||||||
|
|
||||||
|
# Add env
|
||||||
|
ENV LANG C.UTF-8
|
||||||
|
|
||||||
|
# Setup base
|
||||||
|
RUN apk add --no-cache \
|
||||||
|
python3 \
|
||||||
|
git \
|
||||||
|
socat \
|
||||||
|
libstdc++ \
|
||||||
|
&& apk add --no-cache --virtual .build-dependencies \
|
||||||
|
make \
|
||||||
|
python3-dev \
|
||||||
|
g++ \
|
||||||
|
&& pip3 install --no-cache-dir \
|
||||||
|
uvloop \
|
||||||
|
cchardet \
|
||||||
|
&& apk del .build-dependencies
|
||||||
|
|
||||||
|
# Install HassIO
|
||||||
|
COPY . /usr/src/hassio
|
||||||
|
RUN pip3 install --no-cache-dir /usr/src/hassio \
|
||||||
|
&& rm -rf /usr/src/hassio
|
||||||
|
|
||||||
|
CMD [ "python3", "-m", "hassio" ]
|
@@ -10,14 +10,25 @@ import hassio.core as core
|
|||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def attempt_use_uvloop():
|
||||||
|
"""Attempt to use uvloop."""
|
||||||
|
try:
|
||||||
|
import uvloop
|
||||||
|
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
# pylint: disable=invalid-name
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
bootstrap.initialize_logging()
|
bootstrap.initialize_logging()
|
||||||
|
attempt_use_uvloop()
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
|
||||||
if not bootstrap.check_environment():
|
if not bootstrap.check_environment():
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
loop = asyncio.get_event_loop()
|
# init executor pool
|
||||||
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
||||||
loop.set_default_executor(executor)
|
loop.set_default_executor(executor)
|
||||||
|
|
||||||
@@ -27,19 +38,20 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
bootstrap.migrate_system_env(config)
|
bootstrap.migrate_system_env(config)
|
||||||
|
|
||||||
_LOGGER.info("Run Hassio setup")
|
_LOGGER.info("Setup HassIO")
|
||||||
loop.run_until_complete(hassio.setup())
|
loop.run_until_complete(hassio.setup())
|
||||||
|
|
||||||
_LOGGER.info("Start Hassio")
|
|
||||||
loop.call_soon_threadsafe(loop.create_task, hassio.start())
|
loop.call_soon_threadsafe(loop.create_task, hassio.start())
|
||||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop, hassio)
|
loop.call_soon_threadsafe(bootstrap.reg_signal, loop)
|
||||||
|
|
||||||
_LOGGER.info("Run Hassio loop")
|
try:
|
||||||
loop.run_forever()
|
_LOGGER.info("Run HassIO")
|
||||||
|
loop.run_forever()
|
||||||
_LOGGER.info("Cleanup system")
|
finally:
|
||||||
executor.shutdown(wait=False)
|
_LOGGER.info("Stopping HassIO")
|
||||||
loop.close()
|
loop.run_until_complete(hassio.stop())
|
||||||
|
executor.shutdown(wait=False)
|
||||||
|
loop.close()
|
||||||
|
|
||||||
_LOGGER.info("Close Hassio")
|
_LOGGER.info("Close Hassio")
|
||||||
sys.exit(hassio.exit_code)
|
sys.exit(0)
|
||||||
|
@@ -8,7 +8,6 @@ import shutil
|
|||||||
import tarfile
|
import tarfile
|
||||||
from tempfile import TemporaryDirectory
|
from tempfile import TemporaryDirectory
|
||||||
|
|
||||||
from deepmerge import Merger
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
@@ -18,19 +17,21 @@ from ..const import (
|
|||||||
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
|
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
|
||||||
ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY,
|
ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY,
|
||||||
ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT,
|
ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT,
|
||||||
ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP,
|
ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP, ATTR_UUID,
|
||||||
STATE_STARTED, STATE_STOPPED, STATE_NONE, ATTR_USER, ATTR_SYSTEM,
|
STATE_STARTED, STATE_STOPPED, STATE_NONE, ATTR_USER, ATTR_SYSTEM,
|
||||||
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI,
|
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI,
|
||||||
ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT)
|
ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
|
||||||
|
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY, ATTR_HOST_IPC,
|
||||||
|
ATTR_HOST_DBUS, ATTR_AUTO_UART)
|
||||||
from .util import check_installed
|
from .util import check_installed
|
||||||
from ..dock.addon import DockerAddon
|
from ..dock.addon import DockerAddon
|
||||||
from ..tools import write_json_file, read_json_file
|
from ..tools import write_json_file, read_json_file
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
RE_WEBUI = re.compile(r"^(.*\[HOST\]:)\[PORT:(\d+)\](.*)$")
|
RE_WEBUI = re.compile(
|
||||||
|
r"^(?:(?P<s_prefix>https?)|\[PROTO:(?P<t_proto>\w+)\])"
|
||||||
MERGE_OPT = Merger([(dict, ['merge'])], ['override'], ['override'])
|
r":\/\/\[HOST\]:\[PORT:(?P<t_port>\d+)\](?P<s_suffix>.*)$")
|
||||||
|
|
||||||
|
|
||||||
class Addon(object):
|
class Addon(object):
|
||||||
@@ -106,10 +107,10 @@ class Addon(object):
|
|||||||
def options(self):
|
def options(self):
|
||||||
"""Return options with local changes."""
|
"""Return options with local changes."""
|
||||||
if self.is_installed:
|
if self.is_installed:
|
||||||
return MERGE_OPT.merge(
|
return {
|
||||||
self.data.system[self._id][ATTR_OPTIONS],
|
**self.data.system[self._id][ATTR_OPTIONS],
|
||||||
self.data.user[self._id][ATTR_OPTIONS],
|
**self.data.user[self._id][ATTR_OPTIONS]
|
||||||
)
|
}
|
||||||
return self.data.cache[self._id][ATTR_OPTIONS]
|
return self.data.cache[self._id][ATTR_OPTIONS]
|
||||||
|
|
||||||
@options.setter
|
@options.setter
|
||||||
@@ -136,6 +137,7 @@ class Addon(object):
|
|||||||
"""Return if auto update is enable."""
|
"""Return if auto update is enable."""
|
||||||
if ATTR_AUTO_UPDATE in self.data.user.get(self._id, {}):
|
if ATTR_AUTO_UPDATE in self.data.user.get(self._id, {}):
|
||||||
return self.data.user[self._id][ATTR_AUTO_UPDATE]
|
return self.data.user[self._id][ATTR_AUTO_UPDATE]
|
||||||
|
return None
|
||||||
|
|
||||||
@auto_update.setter
|
@auto_update.setter
|
||||||
def auto_update(self, value):
|
def auto_update(self, value):
|
||||||
@@ -153,11 +155,31 @@ class Addon(object):
|
|||||||
"""Return timeout of addon for docker stop."""
|
"""Return timeout of addon for docker stop."""
|
||||||
return self._mesh[ATTR_TIMEOUT]
|
return self._mesh[ATTR_TIMEOUT]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def api_token(self):
|
||||||
|
"""Return a API token for this add-on."""
|
||||||
|
if self.is_installed:
|
||||||
|
return self.data.user[self._id][ATTR_UUID]
|
||||||
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def description(self):
|
def description(self):
|
||||||
"""Return description of addon."""
|
"""Return description of addon."""
|
||||||
return self._mesh[ATTR_DESCRIPTON]
|
return self._mesh[ATTR_DESCRIPTON]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def long_description(self):
|
||||||
|
"""Return README.md as long_description."""
|
||||||
|
readme = Path(self.path_location, 'README.md')
|
||||||
|
|
||||||
|
# If readme not exists
|
||||||
|
if not readme.exists():
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Return data
|
||||||
|
with readme.open('r') as readme_file:
|
||||||
|
return readme_file.read()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def repository(self):
|
def repository(self):
|
||||||
"""Return repository of addon."""
|
"""Return repository of addon."""
|
||||||
@@ -206,30 +228,57 @@ class Addon(object):
|
|||||||
"""Return URL to webui or None."""
|
"""Return URL to webui or None."""
|
||||||
if ATTR_WEBUI not in self._mesh:
|
if ATTR_WEBUI not in self._mesh:
|
||||||
return None
|
return None
|
||||||
|
webui = RE_WEBUI.match(self._mesh[ATTR_WEBUI])
|
||||||
|
|
||||||
webui = self._mesh[ATTR_WEBUI]
|
# extract arguments
|
||||||
dock_port = RE_WEBUI.sub(r"\2", webui)
|
t_port = webui.group('t_port')
|
||||||
|
t_proto = webui.group('t_proto')
|
||||||
|
s_prefix = webui.group('s_prefix') or ""
|
||||||
|
s_suffix = webui.group('s_suffix') or ""
|
||||||
|
|
||||||
|
# search host port for this docker port
|
||||||
if self.ports is None:
|
if self.ports is None:
|
||||||
real_port = dock_port
|
port = t_port
|
||||||
else:
|
else:
|
||||||
real_port = self.ports.get("{}/tcp".format(dock_port), dock_port)
|
port = self.ports.get("{}/tcp".format(t_port), t_port)
|
||||||
|
|
||||||
# for interface config or port lists
|
# for interface config or port lists
|
||||||
if isinstance(real_port, (tuple, list)):
|
if isinstance(port, (tuple, list)):
|
||||||
real_port = real_port[-1]
|
port = port[-1]
|
||||||
|
|
||||||
return RE_WEBUI.sub(r"\g<1>{}\g<3>".format(real_port), webui)
|
# lookup the correct protocol from config
|
||||||
|
if t_proto:
|
||||||
|
proto = 'https' if self.options[t_proto] else 'http'
|
||||||
|
else:
|
||||||
|
proto = s_prefix
|
||||||
|
|
||||||
|
return "{}://[HOST]:{}{}".format(proto, port, s_suffix)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def host_network(self):
|
def host_network(self):
|
||||||
"""Return True if addon run on host network."""
|
"""Return True if addon run on host network."""
|
||||||
return self._mesh[ATTR_HOST_NETWORK]
|
return self._mesh[ATTR_HOST_NETWORK]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def host_ipc(self):
|
||||||
|
"""Return True if addon run on host IPC namespace."""
|
||||||
|
return self._mesh[ATTR_HOST_IPC]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def host_dbus(self):
|
||||||
|
"""Return True if addon run on host DBUS."""
|
||||||
|
return self._mesh[ATTR_HOST_DBUS]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def devices(self):
|
def devices(self):
|
||||||
"""Return devices of addon."""
|
"""Return devices of addon."""
|
||||||
return self._mesh.get(ATTR_DEVICES)
|
return self._mesh.get(ATTR_DEVICES)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def auto_uart(self):
|
||||||
|
"""Return True if we should map all uart device."""
|
||||||
|
return self._mesh.get(ATTR_AUTO_UART)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def tmpfs(self):
|
def tmpfs(self):
|
||||||
"""Return tmpfs of addon."""
|
"""Return tmpfs of addon."""
|
||||||
@@ -246,10 +295,30 @@ class Addon(object):
|
|||||||
return self._mesh.get(ATTR_PRIVILEGED)
|
return self._mesh.get(ATTR_PRIVILEGED)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def use_hassio_api(self):
|
def legacy(self):
|
||||||
|
"""Return if the add-on don't support hass labels."""
|
||||||
|
return self._mesh.get(ATTR_LEGACY)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def access_hassio_api(self):
|
||||||
"""Return True if the add-on access to hassio api."""
|
"""Return True if the add-on access to hassio api."""
|
||||||
return self._mesh[ATTR_HASSIO_API]
|
return self._mesh[ATTR_HASSIO_API]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def access_homeassistant_api(self):
|
||||||
|
"""Return True if the add-on access to Home-Assistant api proxy."""
|
||||||
|
return self._mesh[ATTR_HOMEASSISTANT_API]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_stdin(self):
|
||||||
|
"""Return True if the add-on access use stdin input."""
|
||||||
|
return self._mesh[ATTR_STDIN]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_gpio(self):
|
||||||
|
"""Return True if the add-on access to gpio interface."""
|
||||||
|
return self._mesh[ATTR_GPIO]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_audio(self):
|
def with_audio(self):
|
||||||
"""Return True if the add-on access to audio."""
|
"""Return True if the add-on access to audio."""
|
||||||
@@ -279,7 +348,7 @@ class Addon(object):
|
|||||||
def audio_input(self):
|
def audio_input(self):
|
||||||
"""Return ALSA config for input or None."""
|
"""Return ALSA config for input or None."""
|
||||||
if not self.with_audio:
|
if not self.with_audio:
|
||||||
return
|
return None
|
||||||
|
|
||||||
setting = self.config.audio_input
|
setting = self.config.audio_input
|
||||||
if self.is_installed and ATTR_AUDIO_INPUT in self.data.user[self._id]:
|
if self.is_installed and ATTR_AUDIO_INPUT in self.data.user[self._id]:
|
||||||
@@ -305,6 +374,11 @@ class Addon(object):
|
|||||||
"""Return True if a logo exists."""
|
"""Return True if a logo exists."""
|
||||||
return self.path_logo.exists()
|
return self.path_logo.exists()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_changelog(self):
|
||||||
|
"""Return True if a changelog exists."""
|
||||||
|
return self.path_changelog.exists()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def supported_arch(self):
|
def supported_arch(self):
|
||||||
"""Return list of supported arch."""
|
"""Return list of supported arch."""
|
||||||
@@ -364,6 +438,11 @@ class Addon(object):
|
|||||||
"""Return path to addon logo."""
|
"""Return path to addon logo."""
|
||||||
return Path(self.path_location, 'logo.png')
|
return Path(self.path_location, 'logo.png')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_changelog(self):
|
||||||
|
"""Return path to addon changelog."""
|
||||||
|
return Path(self.path_location, 'CHANGELOG.md')
|
||||||
|
|
||||||
def write_options(self):
|
def write_options(self):
|
||||||
"""Return True if addon options is written to data."""
|
"""Return True if addon options is written to data."""
|
||||||
schema = self.schema
|
schema = self.schema
|
||||||
@@ -417,7 +496,7 @@ class Addon(object):
|
|||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def install(self, version=None):
|
async def install(self):
|
||||||
"""Install a addon."""
|
"""Install a addon."""
|
||||||
if self.config.arch not in self.supported_arch:
|
if self.config.arch not in self.supported_arch:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
@@ -433,11 +512,10 @@ class Addon(object):
|
|||||||
"Create Home-Assistant addon data folder %s", self.path_data)
|
"Create Home-Assistant addon data folder %s", self.path_data)
|
||||||
self.path_data.mkdir()
|
self.path_data.mkdir()
|
||||||
|
|
||||||
version = version or self.last_version
|
if not await self.docker.install(self.last_version):
|
||||||
if not await self.docker.install(version):
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
self._set_install(version)
|
self._set_install(self.last_version)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
@@ -480,19 +558,18 @@ class Addon(object):
|
|||||||
return self.docker.stop()
|
return self.docker.stop()
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
async def update(self, version=None):
|
async def update(self):
|
||||||
"""Update addon."""
|
"""Update addon."""
|
||||||
version = version or self.last_version
|
|
||||||
last_state = await self.state()
|
last_state = await self.state()
|
||||||
|
|
||||||
if version == self.version_installed:
|
if self.last_version == self.version_installed:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Addon %s is already installed in %s", self._id, version)
|
"No update available for Addon %s", self._id)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not await self.docker.update(version):
|
if not await self.docker.update(self.last_version):
|
||||||
return False
|
return False
|
||||||
self._set_update(version)
|
self._set_update(self.last_version)
|
||||||
|
|
||||||
# restore state
|
# restore state
|
||||||
if last_state == STATE_STARTED:
|
if last_state == STATE_STARTED:
|
||||||
@@ -536,6 +613,18 @@ class Addon(object):
|
|||||||
await self.docker.run()
|
await self.docker.run()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@check_installed
|
||||||
|
async def write_stdin(self, data):
|
||||||
|
"""Write data to add-on stdin.
|
||||||
|
|
||||||
|
Return a coroutine.
|
||||||
|
"""
|
||||||
|
if not self.with_stdin:
|
||||||
|
_LOGGER.error("Add-on don't support write to stdin!")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return await self.docker.write_stdin(data)
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
async def snapshot(self, tar_file):
|
async def snapshot(self, tar_file):
|
||||||
"""Snapshot a state of a addon."""
|
"""Snapshot a state of a addon."""
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
"""HassIO addons build environment."""
|
"""HassIO addons build environment."""
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from .validate import SCHEMA_BUILD_CONFIG
|
from .validate import SCHEMA_BUILD_CONFIG, BASE_IMAGE
|
||||||
from ..const import ATTR_SQUASH, ATTR_BUILD_FROM, ATTR_ARGS, META_ADDON
|
from ..const import ATTR_SQUASH, ATTR_BUILD_FROM, ATTR_ARGS, META_ADDON
|
||||||
from ..tools import JsonConfig
|
from ..tools import JsonConfig
|
||||||
|
|
||||||
@@ -24,7 +24,8 @@ class AddonBuild(JsonConfig):
|
|||||||
@property
|
@property
|
||||||
def base_image(self):
|
def base_image(self):
|
||||||
"""Base images for this addon."""
|
"""Base images for this addon."""
|
||||||
return self._data[ATTR_BUILD_FROM][self.config.arch]
|
return self._data[ATTR_BUILD_FROM].get(
|
||||||
|
self.config.arch, BASE_IMAGE[self.config.arch])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def squash(self):
|
def squash(self):
|
||||||
@@ -38,11 +39,9 @@ class AddonBuild(JsonConfig):
|
|||||||
|
|
||||||
def get_docker_args(self, version):
|
def get_docker_args(self, version):
|
||||||
"""Create a dict with docker build arguments."""
|
"""Create a dict with docker build arguments."""
|
||||||
build_tag = "{}:{}".format(self.addon.image, version)
|
args = {
|
||||||
|
|
||||||
return {
|
|
||||||
'path': str(self.addon.path_location),
|
'path': str(self.addon.path_location),
|
||||||
'tag': build_tag,
|
'tag': "{}:{}".format(self.addon.image, version),
|
||||||
'pull': True,
|
'pull': True,
|
||||||
'forcerm': True,
|
'forcerm': True,
|
||||||
'squash': self.squash,
|
'squash': self.squash,
|
||||||
@@ -50,6 +49,8 @@ class AddonBuild(JsonConfig):
|
|||||||
'io.hass.version': version,
|
'io.hass.version': version,
|
||||||
'io.hass.arch': self.config.arch,
|
'io.hass.arch': self.config.arch,
|
||||||
'io.hass.type': META_ADDON,
|
'io.hass.type': META_ADDON,
|
||||||
|
'io.hass.name': self.addon.name,
|
||||||
|
'io.hass.description': self.addon.description,
|
||||||
},
|
},
|
||||||
'buildargs': {
|
'buildargs': {
|
||||||
'BUILD_FROM': self.base_image,
|
'BUILD_FROM': self.base_image,
|
||||||
@@ -58,3 +59,8 @@ class AddonBuild(JsonConfig):
|
|||||||
**self.additional_args,
|
**self.additional_args,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if self.addon.url:
|
||||||
|
args['labels']['io.hass.url'] = self.addon.url
|
||||||
|
|
||||||
|
return args
|
||||||
|
@@ -2,7 +2,7 @@
|
|||||||
"local": {
|
"local": {
|
||||||
"name": "Local Add-Ons",
|
"name": "Local Add-Ons",
|
||||||
"url": "https://home-assistant.io/hassio",
|
"url": "https://home-assistant.io/hassio",
|
||||||
"maintainer": "By our self"
|
"maintainer": "you"
|
||||||
},
|
},
|
||||||
"core": {
|
"core": {
|
||||||
"name": "Built-in Add-Ons",
|
"name": "Built-in Add-Ons",
|
||||||
|
@@ -73,7 +73,7 @@ class GitRepo(object):
|
|||||||
None, self.repo.remotes.origin.pull)
|
None, self.repo.remotes.origin.pull)
|
||||||
|
|
||||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||||
git.exc.GitCommandError) as err:
|
git.GitCommandError) as err:
|
||||||
_LOGGER.error("Can't pull %s repo: %s.", self.url, err)
|
_LOGGER.error("Can't pull %s repo: %s.", self.url, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@@ -1,5 +1,7 @@
|
|||||||
"""Validate addons options schema."""
|
"""Validate addons options schema."""
|
||||||
|
import logging
|
||||||
import re
|
import re
|
||||||
|
import uuid
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
@@ -11,12 +13,15 @@ from ..const import (
|
|||||||
ATTR_ARCH, ATTR_DEVICES, ATTR_ENVIRONMENT, ATTR_HOST_NETWORK, ARCH_ARMHF,
|
ATTR_ARCH, ATTR_DEVICES, ATTR_ENVIRONMENT, ATTR_HOST_NETWORK, ARCH_ARMHF,
|
||||||
ARCH_AARCH64, ARCH_AMD64, ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED,
|
ARCH_AARCH64, ARCH_AMD64, ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED,
|
||||||
ATTR_USER, ATTR_STATE, ATTR_SYSTEM, STATE_STARTED, STATE_STOPPED,
|
ATTR_USER, ATTR_STATE, ATTR_SYSTEM, STATE_STARTED, STATE_STOPPED,
|
||||||
ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT, ATTR_NETWORK,
|
ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT, ATTR_NETWORK, ATTR_UUID,
|
||||||
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_HOST_IPC,
|
||||||
ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API, ATTR_BUILD_FROM, ATTR_SQUASH,
|
ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API, ATTR_BUILD_FROM, ATTR_SQUASH,
|
||||||
ATTR_ARGS)
|
ATTR_ARGS, ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY,
|
||||||
|
ATTR_HOST_DBUS, ATTR_AUTO_UART)
|
||||||
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_CHANNEL
|
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_CHANNEL
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$")
|
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$")
|
||||||
|
|
||||||
@@ -35,7 +40,7 @@ RE_SCHEMA_ELEMENT = re.compile(
|
|||||||
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
|
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
|
||||||
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
|
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
|
||||||
r"|match\((?P<match>.*)\)"
|
r"|match\((?P<match>.*)\)"
|
||||||
r")$"
|
r")\??$"
|
||||||
)
|
)
|
||||||
|
|
||||||
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
|
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
|
||||||
@@ -86,26 +91,38 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
|||||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||||
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
||||||
vol.Optional(ATTR_WEBUI):
|
vol.Optional(ATTR_WEBUI):
|
||||||
vol.Match(r"^(?:https?):\/\/\[HOST\]:\[PORT:\d+\].*$"),
|
vol.Match(r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"),
|
||||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
||||||
|
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_TMPFS):
|
vol.Optional(ATTR_TMPFS):
|
||||||
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||||
vol.Optional(ATTR_MAP, default=[]): [vol.Match(RE_VOLUME)],
|
vol.Optional(ATTR_MAP, default=[]): [vol.Match(RE_VOLUME)],
|
||||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
|
||||||
vol.Required(ATTR_OPTIONS): dict,
|
vol.Required(ATTR_OPTIONS): dict,
|
||||||
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
|
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
|
||||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [
|
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [
|
||||||
vol.Any(SCHEMA_ELEMENT, {vol.Coerce(str): SCHEMA_ELEMENT})
|
vol.Any(
|
||||||
], vol.Schema({vol.Coerce(str): SCHEMA_ELEMENT}))
|
SCHEMA_ELEMENT,
|
||||||
|
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
|
||||||
|
),
|
||||||
|
], vol.Schema({
|
||||||
|
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])
|
||||||
|
}))
|
||||||
}), False),
|
}), False),
|
||||||
vol.Optional(ATTR_IMAGE): vol.Match(r"^[\-\w{}]+/[\-\w{}]+$"),
|
vol.Optional(ATTR_IMAGE): vol.Match(r"^[\w{}]+/[\-\w{}]+$"),
|
||||||
vol.Optional(ATTR_TIMEOUT, default=10):
|
vol.Optional(ATTR_TIMEOUT, default=10):
|
||||||
vol.All(vol.Coerce(int), vol.Range(min=10, max=120))
|
vol.All(vol.Coerce(int), vol.Range(min=10, max=120)),
|
||||||
})
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
@@ -113,24 +130,26 @@ SCHEMA_REPOSITORY_CONFIG = vol.Schema({
|
|||||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_URL): vol.Url(),
|
vol.Optional(ATTR_URL): vol.Url(),
|
||||||
vol.Optional(ATTR_MAINTAINER): vol.Coerce(str),
|
vol.Optional(ATTR_MAINTAINER): vol.Coerce(str),
|
||||||
})
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_BUILD_CONFIG = vol.Schema({
|
SCHEMA_BUILD_CONFIG = vol.Schema({
|
||||||
vol.Optional(ATTR_BUILD_FROM, default=BASE_IMAGE): vol.Schema({
|
vol.Optional(ATTR_BUILD_FROM, default=BASE_IMAGE): vol.Schema({
|
||||||
vol.In(ARCH_ALL): vol.Match(r"^[\-\w{}]+/[\-\w{}]+:[\-\w{}]+$"),
|
vol.In(ARCH_ALL): vol.Match(r"(?:^[\w{}]+/)?[\-\w{}]+:[\.\-\w{}]+$"),
|
||||||
}),
|
}),
|
||||||
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_ARGS, default={}): vol.Schema({
|
vol.Optional(ATTR_ARGS, default={}): vol.Schema({
|
||||||
vol.Coerce(str): vol.Coerce(str)
|
vol.Coerce(str): vol.Coerce(str)
|
||||||
}),
|
}),
|
||||||
})
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_ADDON_USER = vol.Schema({
|
SCHEMA_ADDON_USER = vol.Schema({
|
||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex):
|
||||||
|
vol.Match(r"^[0-9a-f]{32}$"),
|
||||||
vol.Optional(ATTR_OPTIONS, default={}): dict,
|
vol.Optional(ATTR_OPTIONS, default={}): dict,
|
||||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_BOOT):
|
vol.Optional(ATTR_BOOT):
|
||||||
@@ -138,7 +157,7 @@ SCHEMA_ADDON_USER = vol.Schema({
|
|||||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
||||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||||
})
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
|
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
|
||||||
@@ -173,8 +192,10 @@ def validate_options(raw_schema):
|
|||||||
|
|
||||||
# read options
|
# read options
|
||||||
for key, value in struct.items():
|
for key, value in struct.items():
|
||||||
|
# Ignore unknown options / remove from list
|
||||||
if key not in raw_schema:
|
if key not in raw_schema:
|
||||||
raise vol.Invalid("Unknown options {}.".format(key))
|
_LOGGER.warning("Unknown options %s", key)
|
||||||
|
continue
|
||||||
|
|
||||||
typ = raw_schema[key]
|
typ = raw_schema[key]
|
||||||
try:
|
try:
|
||||||
@@ -191,50 +212,48 @@ def validate_options(raw_schema):
|
|||||||
raise vol.Invalid(
|
raise vol.Invalid(
|
||||||
"Type error for {}.".format(key)) from None
|
"Type error for {}.".format(key)) from None
|
||||||
|
|
||||||
|
_check_missing_options(raw_schema, options, 'root')
|
||||||
return options
|
return options
|
||||||
|
|
||||||
return validate
|
return validate
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
|
# pylint: disable=inconsistent-return-statements
|
||||||
def _single_validate(typ, value, key):
|
def _single_validate(typ, value, key):
|
||||||
"""Validate a single element."""
|
"""Validate a single element."""
|
||||||
try:
|
# if required argument
|
||||||
# if required argument
|
if value is None:
|
||||||
if value is None:
|
raise vol.Invalid("Missing required option '{}'.".format(key))
|
||||||
raise vol.Invalid("Missing required option '{}'.".format(key))
|
|
||||||
|
|
||||||
# parse extend data from type
|
# parse extend data from type
|
||||||
match = RE_SCHEMA_ELEMENT.match(typ)
|
match = RE_SCHEMA_ELEMENT.match(typ)
|
||||||
|
|
||||||
# prepare range
|
# prepare range
|
||||||
range_args = {}
|
range_args = {}
|
||||||
for group_name in ('i_min', 'i_max', 'f_min', 'f_max'):
|
for group_name in ('i_min', 'i_max', 'f_min', 'f_max'):
|
||||||
group_value = match.group(group_name)
|
group_value = match.group(group_name)
|
||||||
if group_value:
|
if group_value:
|
||||||
range_args[group_name[2:]] = float(group_value)
|
range_args[group_name[2:]] = float(group_value)
|
||||||
|
|
||||||
if typ.startswith(V_STR):
|
if typ.startswith(V_STR):
|
||||||
return str(value)
|
return str(value)
|
||||||
elif typ.startswith(V_INT):
|
elif typ.startswith(V_INT):
|
||||||
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
|
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
|
||||||
elif typ.startswith(V_FLOAT):
|
elif typ.startswith(V_FLOAT):
|
||||||
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
|
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
|
||||||
elif typ.startswith(V_BOOL):
|
elif typ.startswith(V_BOOL):
|
||||||
return vol.Boolean()(value)
|
return vol.Boolean()(value)
|
||||||
elif typ.startswith(V_EMAIL):
|
elif typ.startswith(V_EMAIL):
|
||||||
return vol.Email()(value)
|
return vol.Email()(value)
|
||||||
elif typ.startswith(V_URL):
|
elif typ.startswith(V_URL):
|
||||||
return vol.Url()(value)
|
return vol.Url()(value)
|
||||||
elif typ.startswith(V_PORT):
|
elif typ.startswith(V_PORT):
|
||||||
return NETWORK_PORT(value)
|
return NETWORK_PORT(value)
|
||||||
elif typ.startswith(V_MATCH):
|
elif typ.startswith(V_MATCH):
|
||||||
return vol.Match(match.group('match'))(str(value))
|
return vol.Match(match.group('match'))(str(value))
|
||||||
|
|
||||||
raise vol.Invalid("Fatal error for {} type {}".format(key, typ))
|
raise vol.Invalid("Fatal error for {} type {}".format(key, typ))
|
||||||
except ValueError:
|
|
||||||
raise vol.Invalid(
|
|
||||||
"Type {} error for '{}' on {}.".format(typ, value, key)) from None
|
|
||||||
|
|
||||||
|
|
||||||
def _nested_validate_list(typ, data_list, key):
|
def _nested_validate_list(typ, data_list, key):
|
||||||
@@ -242,17 +261,10 @@ def _nested_validate_list(typ, data_list, key):
|
|||||||
options = []
|
options = []
|
||||||
|
|
||||||
for element in data_list:
|
for element in data_list:
|
||||||
# dict list
|
# Nested?
|
||||||
if isinstance(typ, dict):
|
if isinstance(typ, dict):
|
||||||
c_options = {}
|
c_options = _nested_validate_dict(typ, element, key)
|
||||||
for c_key, c_value in element.items():
|
|
||||||
if c_key not in typ:
|
|
||||||
raise vol.Invalid(
|
|
||||||
"Unknown nested options {}".format(c_key))
|
|
||||||
|
|
||||||
c_options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
|
||||||
options.append(c_options)
|
options.append(c_options)
|
||||||
# normal list
|
|
||||||
else:
|
else:
|
||||||
options.append(_single_validate(typ, element, key))
|
options.append(_single_validate(typ, element, key))
|
||||||
|
|
||||||
@@ -264,9 +276,28 @@ def _nested_validate_dict(typ, data_dict, key):
|
|||||||
options = {}
|
options = {}
|
||||||
|
|
||||||
for c_key, c_value in data_dict.items():
|
for c_key, c_value in data_dict.items():
|
||||||
|
# Ignore unknown options / remove from list
|
||||||
if c_key not in typ:
|
if c_key not in typ:
|
||||||
raise vol.Invalid("Unknow nested dict options {}".format(c_key))
|
_LOGGER.warning("Unknown options %s", c_key)
|
||||||
|
continue
|
||||||
|
|
||||||
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
# Nested?
|
||||||
|
if isinstance(typ[c_key], list):
|
||||||
|
options[c_key] = _nested_validate_list(typ[c_key][0],
|
||||||
|
c_value, c_key)
|
||||||
|
else:
|
||||||
|
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
||||||
|
|
||||||
|
_check_missing_options(typ, options, key)
|
||||||
return options
|
return options
|
||||||
|
|
||||||
|
|
||||||
|
def _check_missing_options(origin, exists, root):
|
||||||
|
"""Check if all options are exists."""
|
||||||
|
missing = set(origin) - set(exists)
|
||||||
|
for miss_opt in missing:
|
||||||
|
if isinstance(origin[miss_opt], str) and \
|
||||||
|
origin[miss_opt].endswith("?"):
|
||||||
|
continue
|
||||||
|
raise vol.Invalid(
|
||||||
|
"Missing option {} in {}".format(miss_opt, root))
|
||||||
|
@@ -8,6 +8,7 @@ from .addons import APIAddons
|
|||||||
from .homeassistant import APIHomeAssistant
|
from .homeassistant import APIHomeAssistant
|
||||||
from .host import APIHost
|
from .host import APIHost
|
||||||
from .network import APINetwork
|
from .network import APINetwork
|
||||||
|
from .proxy import APIProxy
|
||||||
from .supervisor import APISupervisor
|
from .supervisor import APISupervisor
|
||||||
from .security import APISecurity
|
from .security import APISecurity
|
||||||
from .snapshots import APISnapshots
|
from .snapshots import APISnapshots
|
||||||
@@ -63,9 +64,9 @@ class RestAPI(object):
|
|||||||
'/supervisor/options', api_supervisor.options)
|
'/supervisor/options', api_supervisor.options)
|
||||||
self.webapp.router.add_get('/supervisor/logs', api_supervisor.logs)
|
self.webapp.router.add_get('/supervisor/logs', api_supervisor.logs)
|
||||||
|
|
||||||
def register_homeassistant(self, dock_homeassistant):
|
def register_homeassistant(self, homeassistant):
|
||||||
"""Register homeassistant function."""
|
"""Register homeassistant function."""
|
||||||
api_hass = APIHomeAssistant(self.config, self.loop, dock_homeassistant)
|
api_hass = APIHomeAssistant(self.config, self.loop, homeassistant)
|
||||||
|
|
||||||
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
|
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
|
||||||
self.webapp.router.add_get('/homeassistant/logs', api_hass.logs)
|
self.webapp.router.add_get('/homeassistant/logs', api_hass.logs)
|
||||||
@@ -76,6 +77,21 @@ class RestAPI(object):
|
|||||||
self.webapp.router.add_post('/homeassistant/start', api_hass.start)
|
self.webapp.router.add_post('/homeassistant/start', api_hass.start)
|
||||||
self.webapp.router.add_post('/homeassistant/check', api_hass.check)
|
self.webapp.router.add_post('/homeassistant/check', api_hass.check)
|
||||||
|
|
||||||
|
def register_proxy(self, homeassistant):
|
||||||
|
"""Register HomeAssistant API Proxy."""
|
||||||
|
api_proxy = APIProxy(self.loop, homeassistant)
|
||||||
|
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/homeassistant/api/websocket', api_proxy.websocket)
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/homeassistant/websocket', api_proxy.websocket)
|
||||||
|
self.webapp.router.add_post(
|
||||||
|
'/homeassistant/api/{path:.+}', api_proxy.api)
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/homeassistant/api/{path:.+}', api_proxy.api)
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/homeassistant/api', api_proxy.api)
|
||||||
|
|
||||||
def register_addons(self, addons):
|
def register_addons(self, addons):
|
||||||
"""Register homeassistant function."""
|
"""Register homeassistant function."""
|
||||||
api_addons = APIAddons(self.config, self.loop, addons)
|
api_addons = APIAddons(self.config, self.loop, addons)
|
||||||
@@ -100,6 +116,9 @@ class RestAPI(object):
|
|||||||
'/addons/{addon}/rebuild', api_addons.rebuild)
|
'/addons/{addon}/rebuild', api_addons.rebuild)
|
||||||
self.webapp.router.add_get('/addons/{addon}/logs', api_addons.logs)
|
self.webapp.router.add_get('/addons/{addon}/logs', api_addons.logs)
|
||||||
self.webapp.router.add_get('/addons/{addon}/logo', api_addons.logo)
|
self.webapp.router.add_get('/addons/{addon}/logo', api_addons.logo)
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/addons/{addon}/changelog', api_addons.changelog)
|
||||||
|
self.webapp.router.add_post('/addons/{addon}/stdin', api_addons.stdin)
|
||||||
|
|
||||||
def register_security(self):
|
def register_security(self):
|
||||||
"""Register security function."""
|
"""Register security function."""
|
||||||
@@ -134,13 +153,18 @@ class RestAPI(object):
|
|||||||
|
|
||||||
def register_panel(self):
|
def register_panel(self):
|
||||||
"""Register panel for homeassistant."""
|
"""Register panel for homeassistant."""
|
||||||
panel = Path(__file__).parents[1].joinpath('panel/hassio-main.html')
|
def create_panel_response(build_type):
|
||||||
|
"""Create a function to generate a response."""
|
||||||
|
path = Path(__file__).parents[1].joinpath(
|
||||||
|
'panel/hassio-main-{}.html'.format(build_type))
|
||||||
|
|
||||||
def get_panel(request):
|
return lambda request: web.FileResponse(path)
|
||||||
"""Return file response with panel."""
|
|
||||||
return web.FileResponse(panel)
|
|
||||||
|
|
||||||
self.webapp.router.add_get('/panel', get_panel)
|
# This route is for backwards compatibility with HA < 0.58
|
||||||
|
self.webapp.router.add_get('/panel', create_panel_response('es5'))
|
||||||
|
self.webapp.router.add_get('/panel_es5', create_panel_response('es5'))
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/panel_latest', create_panel_response('latest'))
|
||||||
|
|
||||||
async def start(self):
|
async def start(self):
|
||||||
"""Run rest api webserver."""
|
"""Run rest api webserver."""
|
||||||
@@ -161,5 +185,5 @@ class RestAPI(object):
|
|||||||
await self.webapp.shutdown()
|
await self.webapp.shutdown()
|
||||||
|
|
||||||
if self._handler:
|
if self._handler:
|
||||||
await self._handler.finish_connections(60)
|
await self._handler.shutdown(60)
|
||||||
await self.webapp.cleanup()
|
await self.webapp.cleanup()
|
||||||
|
@@ -13,7 +13,9 @@ from ..const import (
|
|||||||
ATTR_SOURCE, ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_ARCH, ATTR_MAINTAINER,
|
ATTR_SOURCE, ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_ARCH, ATTR_MAINTAINER,
|
||||||
ATTR_INSTALLED, ATTR_LOGO, ATTR_WEBUI, ATTR_DEVICES, ATTR_PRIVILEGED,
|
ATTR_INSTALLED, ATTR_LOGO, ATTR_WEBUI, ATTR_DEVICES, ATTR_PRIVILEGED,
|
||||||
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API,
|
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API,
|
||||||
BOOT_AUTO, BOOT_MANUAL, CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY)
|
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, BOOT_AUTO, BOOT_MANUAL,
|
||||||
|
ATTR_CHANGELOG, ATTR_HOST_IPC, ATTR_HOST_DBUS, ATTR_LONG_DESCRIPTION,
|
||||||
|
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY, CONTENT_TYPE_TEXT)
|
||||||
from ..validate import DOCKER_PORTS
|
from ..validate import DOCKER_PORTS
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -55,7 +57,7 @@ class APIAddons(object):
|
|||||||
"""Return a simplified device list."""
|
"""Return a simplified device list."""
|
||||||
dev_list = addon.devices
|
dev_list = addon.devices
|
||||||
if not dev_list:
|
if not dev_list:
|
||||||
return
|
return None
|
||||||
return [row.split(':')[0] for row in dev_list]
|
return [row.split(':')[0] for row in dev_list]
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -73,12 +75,8 @@ class APIAddons(object):
|
|||||||
ATTR_DETACHED: addon.is_detached,
|
ATTR_DETACHED: addon.is_detached,
|
||||||
ATTR_REPOSITORY: addon.repository,
|
ATTR_REPOSITORY: addon.repository,
|
||||||
ATTR_BUILD: addon.need_build,
|
ATTR_BUILD: addon.need_build,
|
||||||
ATTR_PRIVILEGED: addon.privileged,
|
|
||||||
ATTR_DEVICES: self._pretty_devices(addon),
|
|
||||||
ATTR_URL: addon.url,
|
ATTR_URL: addon.url,
|
||||||
ATTR_LOGO: addon.with_logo,
|
ATTR_LOGO: addon.with_logo,
|
||||||
ATTR_HASSIO_API: addon.use_hassio_api,
|
|
||||||
ATTR_AUDIO: addon.with_audio,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
data_repositories = []
|
data_repositories = []
|
||||||
@@ -110,6 +108,7 @@ class APIAddons(object):
|
|||||||
return {
|
return {
|
||||||
ATTR_NAME: addon.name,
|
ATTR_NAME: addon.name,
|
||||||
ATTR_DESCRIPTON: addon.description,
|
ATTR_DESCRIPTON: addon.description,
|
||||||
|
ATTR_LONG_DESCRIPTION: addon.long_description,
|
||||||
ATTR_VERSION: addon.version_installed,
|
ATTR_VERSION: addon.version_installed,
|
||||||
ATTR_AUTO_UPDATE: addon.auto_update,
|
ATTR_AUTO_UPDATE: addon.auto_update,
|
||||||
ATTR_REPOSITORY: addon.repository,
|
ATTR_REPOSITORY: addon.repository,
|
||||||
@@ -122,11 +121,17 @@ class APIAddons(object):
|
|||||||
ATTR_BUILD: addon.need_build,
|
ATTR_BUILD: addon.need_build,
|
||||||
ATTR_NETWORK: addon.ports,
|
ATTR_NETWORK: addon.ports,
|
||||||
ATTR_HOST_NETWORK: addon.host_network,
|
ATTR_HOST_NETWORK: addon.host_network,
|
||||||
|
ATTR_HOST_IPC: addon.host_ipc,
|
||||||
|
ATTR_HOST_DBUS: addon.host_dbus,
|
||||||
ATTR_PRIVILEGED: addon.privileged,
|
ATTR_PRIVILEGED: addon.privileged,
|
||||||
ATTR_DEVICES: self._pretty_devices(addon),
|
ATTR_DEVICES: self._pretty_devices(addon),
|
||||||
ATTR_LOGO: addon.with_logo,
|
ATTR_LOGO: addon.with_logo,
|
||||||
|
ATTR_CHANGELOG: addon.with_changelog,
|
||||||
ATTR_WEBUI: addon.webui,
|
ATTR_WEBUI: addon.webui,
|
||||||
ATTR_HASSIO_API: addon.use_hassio_api,
|
ATTR_STDIN: addon.with_stdin,
|
||||||
|
ATTR_HASSIO_API: addon.access_hassio_api,
|
||||||
|
ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api,
|
||||||
|
ATTR_GPIO: addon.with_gpio,
|
||||||
ATTR_AUDIO: addon.with_audio,
|
ATTR_AUDIO: addon.with_audio,
|
||||||
ATTR_AUDIO_INPUT: addon.audio_input,
|
ATTR_AUDIO_INPUT: addon.audio_input,
|
||||||
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
||||||
@@ -159,14 +164,10 @@ class APIAddons(object):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def install(self, request):
|
def install(self, request):
|
||||||
"""Install addon."""
|
"""Install addon."""
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
|
||||||
addon = self._extract_addon(request, check_installed=False)
|
addon = self._extract_addon(request, check_installed=False)
|
||||||
version = body.get(ATTR_VERSION, addon.last_version)
|
return asyncio.shield(addon.install(), loop=self.loop)
|
||||||
|
|
||||||
return await asyncio.shield(
|
|
||||||
addon.install(version=version), loop=self.loop)
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def uninstall(self, request):
|
def uninstall(self, request):
|
||||||
@@ -195,17 +196,14 @@ class APIAddons(object):
|
|||||||
return asyncio.shield(addon.stop(), loop=self.loop)
|
return asyncio.shield(addon.stop(), loop=self.loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def update(self, request):
|
def update(self, request):
|
||||||
"""Update addon."""
|
"""Update addon."""
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
version = body.get(ATTR_VERSION, addon.last_version)
|
|
||||||
|
|
||||||
if version == addon.version_installed:
|
if addon.last_version == addon.version_installed:
|
||||||
raise RuntimeError("Version %s is already in use", version)
|
raise RuntimeError("No update available!")
|
||||||
|
|
||||||
return await asyncio.shield(
|
return asyncio.shield(addon.update(), loop=self.loop)
|
||||||
addon.update(version=version), loop=self.loop)
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request):
|
def restart(self, request):
|
||||||
@@ -237,3 +235,23 @@ class APIAddons(object):
|
|||||||
|
|
||||||
with addon.path_logo.open('rb') as png:
|
with addon.path_logo.open('rb') as png:
|
||||||
return png.read()
|
return png.read()
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||||
|
async def changelog(self, request):
|
||||||
|
"""Return changelog from addon."""
|
||||||
|
addon = self._extract_addon(request, check_installed=False)
|
||||||
|
if not addon.with_changelog:
|
||||||
|
raise RuntimeError("No changelog found!")
|
||||||
|
|
||||||
|
with addon.path_changelog.open('r') as changelog:
|
||||||
|
return changelog.read()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def stdin(self, request):
|
||||||
|
"""Write to stdin of addon."""
|
||||||
|
addon = self._extract_addon(request)
|
||||||
|
if not addon.with_stdin:
|
||||||
|
raise RuntimeError("STDIN not supported by addons")
|
||||||
|
|
||||||
|
data = await request.read()
|
||||||
|
return await asyncio.shield(addon.write_stdin(data), loop=self.loop)
|
||||||
|
@@ -7,8 +7,9 @@ import voluptuous as vol
|
|||||||
from .util import api_process, api_process_raw, api_validate
|
from .util import api_process, api_process_raw, api_validate
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_DEVICES, ATTR_IMAGE, ATTR_CUSTOM,
|
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_DEVICES, ATTR_IMAGE, ATTR_CUSTOM,
|
||||||
ATTR_BOOT, CONTENT_TYPE_BINARY)
|
ATTR_BOOT, ATTR_PORT, ATTR_PASSWORD, ATTR_SSL, ATTR_WATCHDOG,
|
||||||
from ..validate import HASS_DEVICES
|
CONTENT_TYPE_BINARY)
|
||||||
|
from ..validate import HASS_DEVICES, NETWORK_PORT
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -20,6 +21,10 @@ SCHEMA_OPTIONS = vol.Schema({
|
|||||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Any(None, vol.Coerce(str)),
|
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Any(None, vol.Coerce(str)),
|
||||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
|
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
|
||||||
vol.Any(None, vol.Coerce(str)),
|
vol.Any(None, vol.Coerce(str)),
|
||||||
|
vol.Optional(ATTR_PORT): NETWORK_PORT,
|
||||||
|
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||||
|
vol.Optional(ATTR_SSL): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
|
||||||
})
|
})
|
||||||
|
|
||||||
SCHEMA_VERSION = vol.Schema({
|
SCHEMA_VERSION = vol.Schema({
|
||||||
@@ -46,6 +51,9 @@ class APIHomeAssistant(object):
|
|||||||
ATTR_DEVICES: self.homeassistant.devices,
|
ATTR_DEVICES: self.homeassistant.devices,
|
||||||
ATTR_CUSTOM: self.homeassistant.is_custom_image,
|
ATTR_CUSTOM: self.homeassistant.is_custom_image,
|
||||||
ATTR_BOOT: self.homeassistant.boot,
|
ATTR_BOOT: self.homeassistant.boot,
|
||||||
|
ATTR_PORT: self.homeassistant.api_port,
|
||||||
|
ATTR_SSL: self.homeassistant.api_ssl,
|
||||||
|
ATTR_WATCHDOG: self.homeassistant.watchdog,
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -63,6 +71,18 @@ class APIHomeAssistant(object):
|
|||||||
if ATTR_BOOT in body:
|
if ATTR_BOOT in body:
|
||||||
self.homeassistant.boot = body[ATTR_BOOT]
|
self.homeassistant.boot = body[ATTR_BOOT]
|
||||||
|
|
||||||
|
if ATTR_PORT in body:
|
||||||
|
self.homeassistant.api_port = body[ATTR_PORT]
|
||||||
|
|
||||||
|
if ATTR_PASSWORD in body:
|
||||||
|
self.homeassistant.api_password = body[ATTR_PASSWORD]
|
||||||
|
|
||||||
|
if ATTR_SSL in body:
|
||||||
|
self.homeassistant.api_ssl = body[ATTR_SSL]
|
||||||
|
|
||||||
|
if ATTR_WATCHDOG in body:
|
||||||
|
self.homeassistant.watchdog = body[ATTR_WATCHDOG]
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
|
@@ -8,7 +8,7 @@ from .util import api_process_hostcontrol, api_process, api_validate
|
|||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_TYPE, ATTR_HOSTNAME, ATTR_FEATURES,
|
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_TYPE, ATTR_HOSTNAME, ATTR_FEATURES,
|
||||||
ATTR_OS, ATTR_SERIAL, ATTR_INPUT, ATTR_DISK, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
ATTR_OS, ATTR_SERIAL, ATTR_INPUT, ATTR_DISK, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
||||||
ATTR_AUDIO_OUTPUT)
|
ATTR_AUDIO_OUTPUT, ATTR_GPIO)
|
||||||
from ..validate import ALSA_CHANNEL
|
from ..validate import ALSA_CHANNEL
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -83,8 +83,9 @@ class APIHost(object):
|
|||||||
async def hardware(self, request):
|
async def hardware(self, request):
|
||||||
"""Return local hardware infos."""
|
"""Return local hardware infos."""
|
||||||
return {
|
return {
|
||||||
ATTR_SERIAL: self.local_hw.serial_devices,
|
ATTR_SERIAL: list(self.local_hw.serial_devices),
|
||||||
ATTR_INPUT: self.local_hw.input_devices,
|
ATTR_INPUT: list(self.local_hw.input_devices),
|
||||||
ATTR_DISK: self.local_hw.disk_devices,
|
ATTR_DISK: list(self.local_hw.disk_devices),
|
||||||
|
ATTR_GPIO: list(self.local_hw.gpio_devices),
|
||||||
ATTR_AUDIO: self.local_hw.audio_devices,
|
ATTR_AUDIO: self.local_hw.audio_devices,
|
||||||
}
|
}
|
||||||
|
197
hassio/api/proxy.py
Normal file
197
hassio/api/proxy.py
Normal file
@@ -0,0 +1,197 @@
|
|||||||
|
"""Utils for HomeAssistant Proxy."""
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
from aiohttp import web
|
||||||
|
from aiohttp.web_exceptions import HTTPBadGateway
|
||||||
|
from aiohttp.hdrs import CONTENT_TYPE
|
||||||
|
import async_timeout
|
||||||
|
|
||||||
|
from ..const import HEADER_HA_ACCESS
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class APIProxy(object):
|
||||||
|
"""API Proxy for Home-Assistant."""
|
||||||
|
|
||||||
|
def __init__(self, loop, homeassistant):
|
||||||
|
"""Initialize api proxy."""
|
||||||
|
self.loop = loop
|
||||||
|
self.homeassistant = homeassistant
|
||||||
|
|
||||||
|
# Use homeassistant websession to ignore SSL
|
||||||
|
self.websession = homeassistant.websession
|
||||||
|
|
||||||
|
async def _api_client(self, request, path, timeout=300):
|
||||||
|
"""Return a client request with proxy origin for Home-Assistant."""
|
||||||
|
url = f"{self.homeassistant.api_url}/api/{path}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = None
|
||||||
|
headers = {}
|
||||||
|
method = getattr(self.websession, request.method.lower())
|
||||||
|
|
||||||
|
# read data
|
||||||
|
with async_timeout.timeout(30, loop=self.loop):
|
||||||
|
data = await request.read()
|
||||||
|
|
||||||
|
if data:
|
||||||
|
headers.update({CONTENT_TYPE: request.content_type})
|
||||||
|
|
||||||
|
# need api password?
|
||||||
|
if self.homeassistant.api_password:
|
||||||
|
headers = {HEADER_HA_ACCESS: self.homeassistant.api_password}
|
||||||
|
|
||||||
|
# reset headers
|
||||||
|
if not headers:
|
||||||
|
headers = None
|
||||||
|
|
||||||
|
client = await method(
|
||||||
|
url, data=data, headers=headers, timeout=timeout
|
||||||
|
)
|
||||||
|
|
||||||
|
return client
|
||||||
|
|
||||||
|
except aiohttp.ClientError as err:
|
||||||
|
_LOGGER.error("Client error on API %s request %s.", path, err)
|
||||||
|
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
_LOGGER.error("Client timeout error on API request %s.", path)
|
||||||
|
|
||||||
|
raise HTTPBadGateway()
|
||||||
|
|
||||||
|
async def api(self, request):
|
||||||
|
"""Proxy HomeAssistant API Requests."""
|
||||||
|
path = request.match_info.get('path', '')
|
||||||
|
|
||||||
|
# API stream
|
||||||
|
if path.startswith("stream"):
|
||||||
|
_LOGGER.info("Home-Assistant Event-Stream start")
|
||||||
|
client = await self._api_client(request, path, timeout=None)
|
||||||
|
|
||||||
|
response = web.StreamResponse()
|
||||||
|
response.content_type = request.headers.get(CONTENT_TYPE)
|
||||||
|
try:
|
||||||
|
await response.prepare(request)
|
||||||
|
while True:
|
||||||
|
data = await client.content.read(10)
|
||||||
|
if not data:
|
||||||
|
await response.write_eof()
|
||||||
|
break
|
||||||
|
response.write(data)
|
||||||
|
|
||||||
|
except aiohttp.ClientError:
|
||||||
|
await response.write_eof()
|
||||||
|
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
finally:
|
||||||
|
client.close()
|
||||||
|
|
||||||
|
_LOGGER.info("Home-Assistant Event-Stream close")
|
||||||
|
|
||||||
|
# Normal request
|
||||||
|
else:
|
||||||
|
_LOGGER.info("Home-Assistant '/api/%s' request", path)
|
||||||
|
client = await self._api_client(request, path)
|
||||||
|
|
||||||
|
data = await client.read()
|
||||||
|
return web.Response(
|
||||||
|
body=data,
|
||||||
|
status=client.status,
|
||||||
|
content_type=client.content_type
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _websocket_client(self):
|
||||||
|
"""Initialize a websocket api connection."""
|
||||||
|
url = f"{self.homeassistant.api_url}/api/websocket"
|
||||||
|
|
||||||
|
try:
|
||||||
|
client = await self.websession.ws_connect(
|
||||||
|
url, heartbeat=60, verify_ssl=False)
|
||||||
|
|
||||||
|
# handle authentication
|
||||||
|
for _ in range(2):
|
||||||
|
data = await client.receive_json()
|
||||||
|
if data.get('type') == 'auth_ok':
|
||||||
|
return client
|
||||||
|
elif data.get('type') == 'auth_required':
|
||||||
|
await client.send_json({
|
||||||
|
'type': 'auth',
|
||||||
|
'api_password': self.homeassistant.api_password,
|
||||||
|
})
|
||||||
|
|
||||||
|
_LOGGER.error("Authentication to Home-Assistant websocket")
|
||||||
|
|
||||||
|
except (aiohttp.ClientError, RuntimeError) as err:
|
||||||
|
_LOGGER.error("Client error on websocket API %s.", err)
|
||||||
|
|
||||||
|
raise HTTPBadGateway()
|
||||||
|
|
||||||
|
async def websocket(self, request):
|
||||||
|
"""Initialize a websocket api connection."""
|
||||||
|
_LOGGER.info("Home-Assistant Websocket API request initialze")
|
||||||
|
|
||||||
|
# init server
|
||||||
|
server = web.WebSocketResponse(heartbeat=60)
|
||||||
|
await server.prepare(request)
|
||||||
|
|
||||||
|
# handle authentication
|
||||||
|
await server.send_json({'type': 'auth_required'})
|
||||||
|
await server.receive_json() # get internal token
|
||||||
|
await server.send_json({'type': 'auth_ok'})
|
||||||
|
|
||||||
|
# init connection to hass
|
||||||
|
client = await self._websocket_client()
|
||||||
|
|
||||||
|
_LOGGER.info("Home-Assistant Websocket API request running")
|
||||||
|
try:
|
||||||
|
client_read = None
|
||||||
|
server_read = None
|
||||||
|
while not server.closed and not client.closed:
|
||||||
|
if not client_read:
|
||||||
|
client_read = asyncio.ensure_future(
|
||||||
|
client.receive_str(), loop=self.loop)
|
||||||
|
if not server_read:
|
||||||
|
server_read = asyncio.ensure_future(
|
||||||
|
server.receive_str(), loop=self.loop)
|
||||||
|
|
||||||
|
# wait until data need to be processed
|
||||||
|
await asyncio.wait(
|
||||||
|
[client_read, server_read],
|
||||||
|
loop=self.loop, return_when=asyncio.FIRST_COMPLETED
|
||||||
|
)
|
||||||
|
|
||||||
|
# server
|
||||||
|
if server_read.done() and not client.closed:
|
||||||
|
server_read.exception()
|
||||||
|
await client.send_str(server_read.result())
|
||||||
|
server_read = None
|
||||||
|
|
||||||
|
# client
|
||||||
|
if client_read.done() and not server.closed:
|
||||||
|
client_read.exception()
|
||||||
|
await server.send_str(client_read.result())
|
||||||
|
client_read = None
|
||||||
|
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
except RuntimeError as err:
|
||||||
|
_LOGGER.info("Home-Assistant Websocket API error: %s", err)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
if client_read:
|
||||||
|
client_read.cancel()
|
||||||
|
if server_read:
|
||||||
|
server_read.cancel()
|
||||||
|
|
||||||
|
# close connections
|
||||||
|
await client.close()
|
||||||
|
await server.close()
|
||||||
|
|
||||||
|
_LOGGER.info("Home-Assistant Websocket API connection is closed")
|
||||||
|
return server
|
@@ -17,10 +17,12 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
|
|
||||||
def json_loads(data):
|
def json_loads(data):
|
||||||
"""Extract json from string with support for '' and None."""
|
"""Extract json from string with support for '' and None."""
|
||||||
|
if not data:
|
||||||
|
return {}
|
||||||
try:
|
try:
|
||||||
return json.loads(data)
|
return json.loads(data)
|
||||||
except json.JSONDecodeError:
|
except json.JSONDecodeError:
|
||||||
return {}
|
raise RuntimeError("Invalid json")
|
||||||
|
|
||||||
|
|
||||||
def api_process(method):
|
def api_process(method):
|
||||||
|
@@ -123,22 +123,22 @@ def check_environment():
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def reg_signal(loop, hassio):
|
def reg_signal(loop):
|
||||||
"""Register SIGTERM, SIGKILL to stop system."""
|
"""Register SIGTERM, SIGKILL to stop system."""
|
||||||
try:
|
try:
|
||||||
loop.add_signal_handler(
|
loop.add_signal_handler(
|
||||||
signal.SIGTERM, lambda: loop.create_task(hassio.stop()))
|
signal.SIGTERM, lambda: loop.call_soon(loop.stop))
|
||||||
except (ValueError, RuntimeError):
|
except (ValueError, RuntimeError):
|
||||||
_LOGGER.warning("Could not bind to SIGTERM")
|
_LOGGER.warning("Could not bind to SIGTERM")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
loop.add_signal_handler(
|
loop.add_signal_handler(
|
||||||
signal.SIGHUP, lambda: loop.create_task(hassio.stop()))
|
signal.SIGHUP, lambda: loop.call_soon(loop.stop))
|
||||||
except (ValueError, RuntimeError):
|
except (ValueError, RuntimeError):
|
||||||
_LOGGER.warning("Could not bind to SIGHUP")
|
_LOGGER.warning("Could not bind to SIGHUP")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
loop.add_signal_handler(
|
loop.add_signal_handler(
|
||||||
signal.SIGINT, lambda: loop.create_task(hassio.stop()))
|
signal.SIGINT, lambda: loop.call_soon(loop.stop))
|
||||||
except (ValueError, RuntimeError):
|
except (ValueError, RuntimeError):
|
||||||
_LOGGER.warning("Could not bind to SIGINT")
|
_LOGGER.warning("Could not bind to SIGINT")
|
||||||
|
@@ -2,7 +2,7 @@
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from ipaddress import ip_network
|
from ipaddress import ip_network
|
||||||
|
|
||||||
HASSIO_VERSION = '0.64'
|
HASSIO_VERSION = '0.78'
|
||||||
|
|
||||||
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
||||||
'hassio/{}/version.json')
|
'hassio/{}/version.json')
|
||||||
@@ -16,11 +16,10 @@ RUN_UPDATE_SUPERVISOR_TASKS = 29100
|
|||||||
RUN_UPDATE_ADDONS_TASKS = 57600
|
RUN_UPDATE_ADDONS_TASKS = 57600
|
||||||
RUN_RELOAD_ADDONS_TASKS = 28800
|
RUN_RELOAD_ADDONS_TASKS = 28800
|
||||||
RUN_RELOAD_SNAPSHOTS_TASKS = 72000
|
RUN_RELOAD_SNAPSHOTS_TASKS = 72000
|
||||||
RUN_WATCHDOG_HOMEASSISTANT = 15
|
RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15
|
||||||
|
RUN_WATCHDOG_HOMEASSISTANT_API = 300
|
||||||
RUN_CLEANUP_API_SESSIONS = 900
|
RUN_CLEANUP_API_SESSIONS = 900
|
||||||
|
|
||||||
RESTART_EXIT_CODE = 100
|
|
||||||
|
|
||||||
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
|
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
|
||||||
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
|
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
|
||||||
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
||||||
@@ -50,9 +49,15 @@ RESULT_OK = 'ok'
|
|||||||
|
|
||||||
CONTENT_TYPE_BINARY = 'application/octet-stream'
|
CONTENT_TYPE_BINARY = 'application/octet-stream'
|
||||||
CONTENT_TYPE_PNG = 'image/png'
|
CONTENT_TYPE_PNG = 'image/png'
|
||||||
|
CONTENT_TYPE_JSON = 'application/json'
|
||||||
|
CONTENT_TYPE_TEXT = 'text/plain'
|
||||||
|
HEADER_HA_ACCESS = 'x-ha-access'
|
||||||
|
|
||||||
|
ATTR_WATCHDOG = 'watchdog'
|
||||||
|
ATTR_CHANGELOG = 'changelog'
|
||||||
ATTR_DATE = 'date'
|
ATTR_DATE = 'date'
|
||||||
ATTR_ARCH = 'arch'
|
ATTR_ARCH = 'arch'
|
||||||
|
ATTR_LONG_DESCRIPTION = 'long_description'
|
||||||
ATTR_HOSTNAME = 'hostname'
|
ATTR_HOSTNAME = 'hostname'
|
||||||
ATTR_TIMEZONE = 'timezone'
|
ATTR_TIMEZONE = 'timezone'
|
||||||
ATTR_ARGS = 'args'
|
ATTR_ARGS = 'args'
|
||||||
@@ -62,6 +67,7 @@ ATTR_SOURCE = 'source'
|
|||||||
ATTR_FEATURES = 'features'
|
ATTR_FEATURES = 'features'
|
||||||
ATTR_ADDONS = 'addons'
|
ATTR_ADDONS = 'addons'
|
||||||
ATTR_VERSION = 'version'
|
ATTR_VERSION = 'version'
|
||||||
|
ATTR_AUTO_UART = 'auto_uart'
|
||||||
ATTR_LAST_BOOT = 'last_boot'
|
ATTR_LAST_BOOT = 'last_boot'
|
||||||
ATTR_LAST_VERSION = 'last_version'
|
ATTR_LAST_VERSION = 'last_version'
|
||||||
ATTR_BETA_CHANNEL = 'beta_channel'
|
ATTR_BETA_CHANNEL = 'beta_channel'
|
||||||
@@ -71,6 +77,8 @@ ATTR_DESCRIPTON = 'description'
|
|||||||
ATTR_STARTUP = 'startup'
|
ATTR_STARTUP = 'startup'
|
||||||
ATTR_BOOT = 'boot'
|
ATTR_BOOT = 'boot'
|
||||||
ATTR_PORTS = 'ports'
|
ATTR_PORTS = 'ports'
|
||||||
|
ATTR_PORT = 'port'
|
||||||
|
ATTR_SSL = 'ssl'
|
||||||
ATTR_MAP = 'map'
|
ATTR_MAP = 'map'
|
||||||
ATTR_WEBUI = 'webui'
|
ATTR_WEBUI = 'webui'
|
||||||
ATTR_OPTIONS = 'options'
|
ATTR_OPTIONS = 'options'
|
||||||
@@ -80,6 +88,7 @@ ATTR_STATE = 'state'
|
|||||||
ATTR_SCHEMA = 'schema'
|
ATTR_SCHEMA = 'schema'
|
||||||
ATTR_IMAGE = 'image'
|
ATTR_IMAGE = 'image'
|
||||||
ATTR_LOGO = 'logo'
|
ATTR_LOGO = 'logo'
|
||||||
|
ATTR_STDIN = 'stdin'
|
||||||
ATTR_ADDONS_REPOSITORIES = 'addons_repositories'
|
ATTR_ADDONS_REPOSITORIES = 'addons_repositories'
|
||||||
ATTR_REPOSITORY = 'repository'
|
ATTR_REPOSITORY = 'repository'
|
||||||
ATTR_REPOSITORIES = 'repositories'
|
ATTR_REPOSITORIES = 'repositories'
|
||||||
@@ -95,6 +104,8 @@ ATTR_BUILD = 'build'
|
|||||||
ATTR_DEVICES = 'devices'
|
ATTR_DEVICES = 'devices'
|
||||||
ATTR_ENVIRONMENT = 'environment'
|
ATTR_ENVIRONMENT = 'environment'
|
||||||
ATTR_HOST_NETWORK = 'host_network'
|
ATTR_HOST_NETWORK = 'host_network'
|
||||||
|
ATTR_HOST_IPC = 'host_ipc'
|
||||||
|
ATTR_HOST_DBUS = 'host_dbus'
|
||||||
ATTR_NETWORK = 'network'
|
ATTR_NETWORK = 'network'
|
||||||
ATTR_TMPFS = 'tmpfs'
|
ATTR_TMPFS = 'tmpfs'
|
||||||
ATTR_PRIVILEGED = 'privileged'
|
ATTR_PRIVILEGED = 'privileged'
|
||||||
@@ -104,6 +115,8 @@ ATTR_SNAPSHOTS = 'snapshots'
|
|||||||
ATTR_HOMEASSISTANT = 'homeassistant'
|
ATTR_HOMEASSISTANT = 'homeassistant'
|
||||||
ATTR_HASSIO = 'hassio'
|
ATTR_HASSIO = 'hassio'
|
||||||
ATTR_HASSIO_API = 'hassio_api'
|
ATTR_HASSIO_API = 'hassio_api'
|
||||||
|
ATTR_HOMEASSISTANT_API = 'homeassistant_api'
|
||||||
|
ATTR_UUID = 'uuid'
|
||||||
ATTR_FOLDERS = 'folders'
|
ATTR_FOLDERS = 'folders'
|
||||||
ATTR_SIZE = 'size'
|
ATTR_SIZE = 'size'
|
||||||
ATTR_TYPE = 'type'
|
ATTR_TYPE = 'type'
|
||||||
@@ -120,6 +133,8 @@ ATTR_SERIAL = 'serial'
|
|||||||
ATTR_SECURITY = 'security'
|
ATTR_SECURITY = 'security'
|
||||||
ATTR_BUILD_FROM = 'build_from'
|
ATTR_BUILD_FROM = 'build_from'
|
||||||
ATTR_SQUASH = 'squash'
|
ATTR_SQUASH = 'squash'
|
||||||
|
ATTR_GPIO = 'gpio'
|
||||||
|
ATTR_LEGACY = 'legacy'
|
||||||
ATTR_ADDONS_CUSTOM_LIST = 'addons_custom_list'
|
ATTR_ADDONS_CUSTOM_LIST = 'addons_custom_list'
|
||||||
|
|
||||||
STARTUP_INITIALIZE = 'initialize'
|
STARTUP_INITIALIZE = 'initialize'
|
||||||
|
@@ -9,7 +9,7 @@ from .api import RestAPI
|
|||||||
from .host_control import HostControl
|
from .host_control import HostControl
|
||||||
from .const import (
|
from .const import (
|
||||||
RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS,
|
RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS,
|
||||||
RUN_UPDATE_SUPERVISOR_TASKS, RUN_WATCHDOG_HOMEASSISTANT,
|
RUN_UPDATE_SUPERVISOR_TASKS, RUN_WATCHDOG_HOMEASSISTANT_DOCKER,
|
||||||
RUN_CLEANUP_API_SESSIONS, STARTUP_SYSTEM, STARTUP_SERVICES,
|
RUN_CLEANUP_API_SESSIONS, STARTUP_SYSTEM, STARTUP_SERVICES,
|
||||||
STARTUP_APPLICATION, STARTUP_INITIALIZE, RUN_RELOAD_SNAPSHOTS_TASKS,
|
STARTUP_APPLICATION, STARTUP_INITIALIZE, RUN_RELOAD_SNAPSHOTS_TASKS,
|
||||||
RUN_UPDATE_ADDONS_TASKS)
|
RUN_UPDATE_ADDONS_TASKS)
|
||||||
@@ -22,7 +22,8 @@ from .dns import DNSForward
|
|||||||
from .snapshots import SnapshotsManager
|
from .snapshots import SnapshotsManager
|
||||||
from .updater import Updater
|
from .updater import Updater
|
||||||
from .tasks import (
|
from .tasks import (
|
||||||
hassio_update, homeassistant_watchdog, api_sessions_cleanup, addons_update)
|
hassio_update, homeassistant_watchdog_docker, api_sessions_cleanup,
|
||||||
|
addons_update)
|
||||||
from .tools import fetch_timezone
|
from .tools import fetch_timezone
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -41,8 +42,8 @@ class HassIO(object):
|
|||||||
self.scheduler = Scheduler(loop)
|
self.scheduler = Scheduler(loop)
|
||||||
self.api = RestAPI(config, loop)
|
self.api = RestAPI(config, loop)
|
||||||
self.hardware = Hardware()
|
self.hardware = Hardware()
|
||||||
self.docker = DockerAPI()
|
self.docker = DockerAPI(self.hardware)
|
||||||
self.dns = DNSForward()
|
self.dns = DNSForward(loop)
|
||||||
|
|
||||||
# init basic docker container
|
# init basic docker container
|
||||||
self.supervisor = DockerSupervisor(
|
self.supervisor = DockerSupervisor(
|
||||||
@@ -90,6 +91,7 @@ class HassIO(object):
|
|||||||
self.supervisor, self.snapshots, self.addons, self.host_control,
|
self.supervisor, self.snapshots, self.addons, self.host_control,
|
||||||
self.updater)
|
self.updater)
|
||||||
self.api.register_homeassistant(self.homeassistant)
|
self.api.register_homeassistant(self.homeassistant)
|
||||||
|
self.api.register_proxy(self.homeassistant)
|
||||||
self.api.register_addons(self.addons)
|
self.api.register_addons(self.addons)
|
||||||
self.api.register_security()
|
self.api.register_security()
|
||||||
self.api.register_snapshots(self.snapshots)
|
self.api.register_snapshots(self.snapshots)
|
||||||
@@ -165,21 +167,25 @@ class HassIO(object):
|
|||||||
finally:
|
finally:
|
||||||
# schedule homeassistant watchdog
|
# schedule homeassistant watchdog
|
||||||
self.scheduler.register_task(
|
self.scheduler.register_task(
|
||||||
homeassistant_watchdog(self.loop, self.homeassistant),
|
homeassistant_watchdog_docker(self.loop, self.homeassistant),
|
||||||
RUN_WATCHDOG_HOMEASSISTANT)
|
RUN_WATCHDOG_HOMEASSISTANT_DOCKER)
|
||||||
|
|
||||||
|
# self.scheduler.register_task(
|
||||||
|
# homeassistant_watchdog_api(self.loop, self.homeassistant),
|
||||||
|
# RUN_WATCHDOG_HOMEASSISTANT_API)
|
||||||
|
|
||||||
# If landingpage / run upgrade in background
|
# If landingpage / run upgrade in background
|
||||||
if self.homeassistant.version == 'landingpage':
|
if self.homeassistant.version == 'landingpage':
|
||||||
self.loop.create_task(self.homeassistant.install())
|
self.loop.create_task(self.homeassistant.install())
|
||||||
|
|
||||||
async def stop(self, exit_code=0):
|
async def stop(self):
|
||||||
"""Stop a running orchestration."""
|
"""Stop a running orchestration."""
|
||||||
# don't process scheduler anymore
|
# don't process scheduler anymore
|
||||||
self.scheduler.suspend = True
|
self.scheduler.suspend = True
|
||||||
|
|
||||||
# process stop tasks
|
# process stop tasks
|
||||||
self.websession.close()
|
self.websession.close()
|
||||||
await asyncio.wait([self.api.stop(), self.dns.stop()], loop=self.loop)
|
self.homeassistant.websession.close()
|
||||||
|
|
||||||
self.exit_code = exit_code
|
# process async stop tasks
|
||||||
self.loop.stop()
|
await asyncio.wait([self.api.stop(), self.dns.stop()], loop=self.loop)
|
||||||
|
@@ -11,8 +11,9 @@ COMMAND = "socat UDP-RECVFROM:53,fork UDP-SENDTO:127.0.0.11:53"
|
|||||||
class DNSForward(object):
|
class DNSForward(object):
|
||||||
"""Manage DNS forwarding to internal DNS."""
|
"""Manage DNS forwarding to internal DNS."""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self, loop):
|
||||||
"""Initialize DNS forwarding."""
|
"""Initialize DNS forwarding."""
|
||||||
|
self.loop = loop
|
||||||
self.proc = None
|
self.proc = None
|
||||||
|
|
||||||
async def start(self):
|
async def start(self):
|
||||||
@@ -23,6 +24,7 @@ class DNSForward(object):
|
|||||||
stdin=asyncio.subprocess.DEVNULL,
|
stdin=asyncio.subprocess.DEVNULL,
|
||||||
stdout=asyncio.subprocess.DEVNULL,
|
stdout=asyncio.subprocess.DEVNULL,
|
||||||
stderr=asyncio.subprocess.DEVNULL,
|
stderr=asyncio.subprocess.DEVNULL,
|
||||||
|
loop=self.loop
|
||||||
)
|
)
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
_LOGGER.error("Can't start DNS forwarding -> %s", err)
|
_LOGGER.error("Can't start DNS forwarding -> %s", err)
|
||||||
|
@@ -16,11 +16,12 @@ class DockerAPI(object):
|
|||||||
This class is not AsyncIO safe!
|
This class is not AsyncIO safe!
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self, hardware):
|
||||||
"""Initialize docker base wrapper."""
|
"""Initialize docker base wrapper."""
|
||||||
self.docker = docker.DockerClient(
|
self.docker = docker.DockerClient(
|
||||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version='auto')
|
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version='auto')
|
||||||
self.network = DockerNetwork(self.docker)
|
self.network = DockerNetwork(self.docker)
|
||||||
|
self.hardware = hardware
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def images(self):
|
def images(self):
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
"""Init file for HassIO addon docker object."""
|
"""Init file for HassIO addon docker object."""
|
||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
import docker
|
import docker
|
||||||
import requests
|
import requests
|
||||||
@@ -24,11 +25,34 @@ class DockerAddon(DockerInterface):
|
|||||||
config, loop, api, image=addon.image, timeout=addon.timeout)
|
config, loop, api, image=addon.image, timeout=addon.timeout)
|
||||||
self.addon = addon
|
self.addon = addon
|
||||||
|
|
||||||
|
# pylint: disable=inconsistent-return-statements
|
||||||
|
def process_metadata(self, metadata, force=False):
|
||||||
|
"""Use addon data instead meta data with legacy."""
|
||||||
|
if not self.addon.legacy:
|
||||||
|
return super().process_metadata(metadata, force=force)
|
||||||
|
|
||||||
|
# set meta data
|
||||||
|
if not self.version or force:
|
||||||
|
if force: # called on install/update/build
|
||||||
|
self.version = self.addon.last_version
|
||||||
|
else:
|
||||||
|
self.version = self.addon.version_installed
|
||||||
|
|
||||||
|
if not self.arch:
|
||||||
|
self.arch = self.config.arch
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
"""Return name of docker container."""
|
"""Return name of docker container."""
|
||||||
return "addon_{}".format(self.addon.slug)
|
return "addon_{}".format(self.addon.slug)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def ipc(self):
|
||||||
|
"""Return the IPC namespace."""
|
||||||
|
if self.addon.host_ipc:
|
||||||
|
return 'host'
|
||||||
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def hostname(self):
|
def hostname(self):
|
||||||
"""Return slug/id of addon."""
|
"""Return slug/id of addon."""
|
||||||
@@ -44,6 +68,10 @@ class DockerAddon(DockerInterface):
|
|||||||
'ALSA_INPUT': self.addon.audio_input,
|
'ALSA_INPUT': self.addon.audio_input,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
# Set api token if any API access is needed
|
||||||
|
if self.addon.access_hassio_api or self.addon.access_homeassistant_api:
|
||||||
|
addon_env['API_TOKEN'] = self.addon.api_token
|
||||||
|
|
||||||
return {
|
return {
|
||||||
**addon_env,
|
**addon_env,
|
||||||
'TZ': self.config.timezone,
|
'TZ': self.config.timezone,
|
||||||
@@ -54,14 +82,17 @@ class DockerAddon(DockerInterface):
|
|||||||
"""Return needed devices."""
|
"""Return needed devices."""
|
||||||
devices = self.addon.devices or []
|
devices = self.addon.devices or []
|
||||||
|
|
||||||
# use audio devices
|
# Use audio devices
|
||||||
if self.addon.with_audio and AUDIO_DEVICE not in devices:
|
if self.addon.with_audio and AUDIO_DEVICE not in devices:
|
||||||
devices.append(AUDIO_DEVICE)
|
devices.append(AUDIO_DEVICE)
|
||||||
|
|
||||||
|
# Auto mapping UART devices
|
||||||
|
if self.addon.auto_uart:
|
||||||
|
for uart_dev in self.docker.hardware.serial_devices:
|
||||||
|
devices.append("{0}:{0}:rwm".format(uart_dev))
|
||||||
|
|
||||||
# Return None if no devices is present
|
# Return None if no devices is present
|
||||||
if devices:
|
return devices or None
|
||||||
return devices
|
|
||||||
return None
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ports(self):
|
def ports(self):
|
||||||
@@ -75,6 +106,18 @@ class DockerAddon(DockerInterface):
|
|||||||
if host_port
|
if host_port
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def security_opt(self):
|
||||||
|
"""Controlling security opt."""
|
||||||
|
privileged = self.addon.privileged or []
|
||||||
|
|
||||||
|
# Disable AppArmor sinse it make troubles wit SYS_ADMIN
|
||||||
|
if 'SYS_ADMIN' in privileged:
|
||||||
|
return [
|
||||||
|
"apparmor:unconfined",
|
||||||
|
]
|
||||||
|
return None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def tmpfs(self):
|
def tmpfs(self):
|
||||||
"""Return tmpfs for docker add-on."""
|
"""Return tmpfs for docker add-on."""
|
||||||
@@ -88,6 +131,7 @@ class DockerAddon(DockerInterface):
|
|||||||
"""Return hosts mapping."""
|
"""Return hosts mapping."""
|
||||||
return {
|
return {
|
||||||
'homeassistant': self.docker.network.gateway,
|
'homeassistant': self.docker.network.gateway,
|
||||||
|
'hassio': self.docker.network.supervisor,
|
||||||
}
|
}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -102,39 +146,58 @@ class DockerAddon(DockerInterface):
|
|||||||
"""Generate volumes for mappings."""
|
"""Generate volumes for mappings."""
|
||||||
volumes = {
|
volumes = {
|
||||||
str(self.addon.path_extern_data): {
|
str(self.addon.path_extern_data): {
|
||||||
'bind': '/data', 'mode': 'rw'
|
'bind': "/data", 'mode': 'rw'
|
||||||
}}
|
}}
|
||||||
|
|
||||||
addon_mapping = self.addon.map_volumes
|
addon_mapping = self.addon.map_volumes
|
||||||
|
|
||||||
|
# setup config mappings
|
||||||
if MAP_CONFIG in addon_mapping:
|
if MAP_CONFIG in addon_mapping:
|
||||||
volumes.update({
|
volumes.update({
|
||||||
str(self.config.path_extern_config): {
|
str(self.config.path_extern_config): {
|
||||||
'bind': '/config', 'mode': addon_mapping[MAP_CONFIG]
|
'bind': "/config", 'mode': addon_mapping[MAP_CONFIG]
|
||||||
}})
|
}})
|
||||||
|
|
||||||
if MAP_SSL in addon_mapping:
|
if MAP_SSL in addon_mapping:
|
||||||
volumes.update({
|
volumes.update({
|
||||||
str(self.config.path_extern_ssl): {
|
str(self.config.path_extern_ssl): {
|
||||||
'bind': '/ssl', 'mode': addon_mapping[MAP_SSL]
|
'bind': "/ssl", 'mode': addon_mapping[MAP_SSL]
|
||||||
}})
|
}})
|
||||||
|
|
||||||
if MAP_ADDONS in addon_mapping:
|
if MAP_ADDONS in addon_mapping:
|
||||||
volumes.update({
|
volumes.update({
|
||||||
str(self.config.path_extern_addons_local): {
|
str(self.config.path_extern_addons_local): {
|
||||||
'bind': '/addons', 'mode': addon_mapping[MAP_ADDONS]
|
'bind': "/addons", 'mode': addon_mapping[MAP_ADDONS]
|
||||||
}})
|
}})
|
||||||
|
|
||||||
if MAP_BACKUP in addon_mapping:
|
if MAP_BACKUP in addon_mapping:
|
||||||
volumes.update({
|
volumes.update({
|
||||||
str(self.config.path_extern_backup): {
|
str(self.config.path_extern_backup): {
|
||||||
'bind': '/backup', 'mode': addon_mapping[MAP_BACKUP]
|
'bind': "/backup", 'mode': addon_mapping[MAP_BACKUP]
|
||||||
}})
|
}})
|
||||||
|
|
||||||
if MAP_SHARE in addon_mapping:
|
if MAP_SHARE in addon_mapping:
|
||||||
volumes.update({
|
volumes.update({
|
||||||
str(self.config.path_extern_share): {
|
str(self.config.path_extern_share): {
|
||||||
'bind': '/share', 'mode': addon_mapping[MAP_SHARE]
|
'bind': "/share", 'mode': addon_mapping[MAP_SHARE]
|
||||||
|
}})
|
||||||
|
|
||||||
|
# init other hardware mappings
|
||||||
|
if self.addon.with_gpio:
|
||||||
|
volumes.update({
|
||||||
|
"/sys/class/gpio": {
|
||||||
|
'bind': "/sys/class/gpio", 'mode': 'rw'
|
||||||
|
},
|
||||||
|
"/sys/devices/platform/soc": {
|
||||||
|
'bind': "/sys/devices/platform/soc", 'mode': 'rw'
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
# host dbus system
|
||||||
|
if self.addon.host_dbus:
|
||||||
|
volumes.update({
|
||||||
|
"/var/run/dbus": {
|
||||||
|
'bind': "/var/run/dbus", 'mode': 'rw'
|
||||||
}})
|
}})
|
||||||
|
|
||||||
return volumes
|
return volumes
|
||||||
@@ -159,11 +222,15 @@ class DockerAddon(DockerInterface):
|
|||||||
name=self.name,
|
name=self.name,
|
||||||
hostname=self.hostname,
|
hostname=self.hostname,
|
||||||
detach=True,
|
detach=True,
|
||||||
|
init=True,
|
||||||
|
ipc_mode=self.ipc,
|
||||||
|
stdin_open=self.addon.with_stdin,
|
||||||
network_mode=self.network_mode,
|
network_mode=self.network_mode,
|
||||||
ports=self.ports,
|
ports=self.ports,
|
||||||
extra_hosts=self.network_mapping,
|
extra_hosts=self.network_mapping,
|
||||||
devices=self.devices,
|
devices=self.devices,
|
||||||
cap_add=self.addon.privileged,
|
cap_add=self.addon.privileged,
|
||||||
|
security_opt=self.security_opt,
|
||||||
environment=self.environment,
|
environment=self.environment,
|
||||||
volumes=self.volumes,
|
volumes=self.volumes,
|
||||||
tmpfs=self.tmpfs
|
tmpfs=self.tmpfs
|
||||||
@@ -266,3 +333,35 @@ class DockerAddon(DockerInterface):
|
|||||||
"""
|
"""
|
||||||
self._stop()
|
self._stop()
|
||||||
return self._run()
|
return self._run()
|
||||||
|
|
||||||
|
@docker_process
|
||||||
|
def write_stdin(self, data):
|
||||||
|
"""Write to add-on stdin."""
|
||||||
|
return self.loop.run_in_executor(None, self._write_stdin, data)
|
||||||
|
|
||||||
|
def _write_stdin(self, data):
|
||||||
|
"""Write to add-on stdin.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
if not self._is_running():
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
# load needed docker objects
|
||||||
|
container = self.docker.containers.get(self.name)
|
||||||
|
socket = container.attach_socket(params={'stdin': 1, 'stream': 1})
|
||||||
|
except docker.errors.DockerException as err:
|
||||||
|
_LOGGER.error("Can't attach to %s stdin -> %s", self.name, err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
# write to stdin
|
||||||
|
data += b"\n"
|
||||||
|
os.write(socket.fileno(), data)
|
||||||
|
socket.close()
|
||||||
|
except OSError as err:
|
||||||
|
_LOGGER.error("Can't write to %s stdin -> %s", self.name, err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
@@ -27,7 +27,7 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
def devices(self):
|
def devices(self):
|
||||||
"""Create list of special device to map into docker."""
|
"""Create list of special device to map into docker."""
|
||||||
if not self.data.devices:
|
if not self.data.devices:
|
||||||
return
|
return None
|
||||||
|
|
||||||
devices = []
|
devices = []
|
||||||
for device in self.data.devices:
|
for device in self.data.devices:
|
||||||
@@ -41,7 +41,7 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
if self._is_running():
|
if self._is_running():
|
||||||
return
|
return False
|
||||||
|
|
||||||
# cleanup
|
# cleanup
|
||||||
self._stop()
|
self._stop()
|
||||||
@@ -52,6 +52,7 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
hostname=self.name,
|
hostname=self.name,
|
||||||
detach=True,
|
detach=True,
|
||||||
privileged=True,
|
privileged=True,
|
||||||
|
init=True,
|
||||||
devices=self.devices,
|
devices=self.devices,
|
||||||
network_mode='host',
|
network_mode='host',
|
||||||
environment={
|
environment={
|
||||||
|
@@ -6,7 +6,6 @@ import docker
|
|||||||
|
|
||||||
from .interface import DockerInterface
|
from .interface import DockerInterface
|
||||||
from .util import docker_process
|
from .util import docker_process
|
||||||
from ..const import RESTART_EXIT_CODE
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -52,7 +51,7 @@ class DockerSupervisor(DockerInterface):
|
|||||||
_LOGGER.info("Update supervisor docker to %s:%s", self.image, tag)
|
_LOGGER.info("Update supervisor docker to %s:%s", self.image, tag)
|
||||||
|
|
||||||
if await self.loop.run_in_executor(None, self._install, tag):
|
if await self.loop.run_in_executor(None, self._install, tag):
|
||||||
self.loop.create_task(self.stop_callback(RESTART_EXIT_CODE))
|
self.loop.call_later(1, self.loop.stop)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
@@ -19,6 +19,9 @@ RE_DEVICES = re.compile(r"\[.*(\d+)- (\d+).*\]: ([\w ]*)")
|
|||||||
PROC_STAT = Path("/proc/stat")
|
PROC_STAT = Path("/proc/stat")
|
||||||
RE_BOOT_TIME = re.compile(r"btime (\d+)")
|
RE_BOOT_TIME = re.compile(r"btime (\d+)")
|
||||||
|
|
||||||
|
GPIO_DEVICES = Path("/sys/class/gpio")
|
||||||
|
RE_TTY = re.compile(r"tty[A-Z]+")
|
||||||
|
|
||||||
|
|
||||||
class Hardware(object):
|
class Hardware(object):
|
||||||
"""Represent a interface to procfs, sysfs and udev."""
|
"""Represent a interface to procfs, sysfs and udev."""
|
||||||
@@ -32,10 +35,10 @@ class Hardware(object):
|
|||||||
"""Return all serial and connected devices."""
|
"""Return all serial and connected devices."""
|
||||||
dev_list = set()
|
dev_list = set()
|
||||||
for device in self.context.list_devices(subsystem='tty'):
|
for device in self.context.list_devices(subsystem='tty'):
|
||||||
if 'ID_VENDOR' in device:
|
if 'ID_VENDOR' in device or RE_TTY.search(device.device_node):
|
||||||
dev_list.add(device.device_node)
|
dev_list.add(device.device_node)
|
||||||
|
|
||||||
return list(dev_list)
|
return dev_list
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def input_devices(self):
|
def input_devices(self):
|
||||||
@@ -45,7 +48,7 @@ class Hardware(object):
|
|||||||
if 'NAME' in device:
|
if 'NAME' in device:
|
||||||
dev_list.add(device['NAME'].replace('"', ''))
|
dev_list.add(device['NAME'].replace('"', ''))
|
||||||
|
|
||||||
return list(dev_list)
|
return dev_list
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def disk_devices(self):
|
def disk_devices(self):
|
||||||
@@ -55,7 +58,7 @@ class Hardware(object):
|
|||||||
if device.device_node.startswith('/dev/sd'):
|
if device.device_node.startswith('/dev/sd'):
|
||||||
dev_list.add(device.device_node)
|
dev_list.add(device.device_node)
|
||||||
|
|
||||||
return list(dev_list)
|
return dev_list
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def audio_devices(self):
|
def audio_devices(self):
|
||||||
@@ -67,7 +70,7 @@ class Hardware(object):
|
|||||||
devices = devices_file.read()
|
devices = devices_file.read()
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
_LOGGER.error("Can't read asound data -> %s", err)
|
_LOGGER.error("Can't read asound data -> %s", err)
|
||||||
return
|
return None
|
||||||
|
|
||||||
audio_list = {}
|
audio_list = {}
|
||||||
|
|
||||||
@@ -90,6 +93,15 @@ class Hardware(object):
|
|||||||
|
|
||||||
return audio_list
|
return audio_list
|
||||||
|
|
||||||
|
@property
|
||||||
|
def gpio_devices(self):
|
||||||
|
"""Return list of GPIO interface on device."""
|
||||||
|
dev_list = set()
|
||||||
|
for interface in GPIO_DEVICES.glob("gpio*"):
|
||||||
|
dev_list.add(interface.name)
|
||||||
|
|
||||||
|
return dev_list
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def last_boot(self):
|
def last_boot(self):
|
||||||
"""Return last boot time."""
|
"""Return last boot time."""
|
||||||
@@ -98,12 +110,12 @@ class Hardware(object):
|
|||||||
stats = stat_file.read()
|
stats = stat_file.read()
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
_LOGGER.error("Can't read stat data -> %s", err)
|
_LOGGER.error("Can't read stat data -> %s", err)
|
||||||
return
|
return None
|
||||||
|
|
||||||
# parse stat file
|
# parse stat file
|
||||||
found = RE_BOOT_TIME.search(stats)
|
found = RE_BOOT_TIME.search(stats)
|
||||||
if not found:
|
if not found:
|
||||||
_LOGGER.error("Can't found last boot time!")
|
_LOGGER.error("Can't found last boot time!")
|
||||||
return
|
return None
|
||||||
|
|
||||||
return datetime.utcfromtimestamp(int(found.group(1)))
|
return datetime.utcfromtimestamp(int(found.group(1)))
|
||||||
|
@@ -4,9 +4,14 @@ import logging
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
from aiohttp.hdrs import CONTENT_TYPE
|
||||||
|
import async_timeout
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
FILE_HASSIO_HOMEASSISTANT, ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION,
|
FILE_HASSIO_HOMEASSISTANT, ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION,
|
||||||
ATTR_VERSION, ATTR_BOOT)
|
ATTR_VERSION, ATTR_BOOT, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG,
|
||||||
|
HEADER_HA_ACCESS, CONTENT_TYPE_JSON)
|
||||||
from .dock.homeassistant import DockerHomeAssistant
|
from .dock.homeassistant import DockerHomeAssistant
|
||||||
from .tools import JsonConfig, convert_to_ascii
|
from .tools import JsonConfig, convert_to_ascii
|
||||||
from .validate import SCHEMA_HASS_CONFIG
|
from .validate import SCHEMA_HASS_CONFIG
|
||||||
@@ -26,6 +31,9 @@ class HomeAssistant(JsonConfig):
|
|||||||
self.loop = loop
|
self.loop = loop
|
||||||
self.updater = updater
|
self.updater = updater
|
||||||
self.docker = DockerHomeAssistant(config, loop, docker, self)
|
self.docker = DockerHomeAssistant(config, loop, docker, self)
|
||||||
|
self.api_ip = docker.network.gateway
|
||||||
|
self.websession = aiohttp.ClientSession(
|
||||||
|
connector=aiohttp.TCPConnector(verify_ssl=False), loop=loop)
|
||||||
|
|
||||||
async def prepare(self):
|
async def prepare(self):
|
||||||
"""Prepare HomeAssistant object."""
|
"""Prepare HomeAssistant object."""
|
||||||
@@ -38,6 +46,57 @@ class HomeAssistant(JsonConfig):
|
|||||||
else:
|
else:
|
||||||
await self.docker.attach()
|
await self.docker.attach()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def api_port(self):
|
||||||
|
"""Return network port to home-assistant instance."""
|
||||||
|
return self._data[ATTR_PORT]
|
||||||
|
|
||||||
|
@api_port.setter
|
||||||
|
def api_port(self, value):
|
||||||
|
"""Set network port for home-assistant instance."""
|
||||||
|
self._data[ATTR_PORT] = value
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def api_password(self):
|
||||||
|
"""Return password for home-assistant instance."""
|
||||||
|
return self._data.get(ATTR_PASSWORD)
|
||||||
|
|
||||||
|
@api_password.setter
|
||||||
|
def api_password(self, value):
|
||||||
|
"""Set password for home-assistant instance."""
|
||||||
|
self._data[ATTR_PASSWORD] = value
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def api_ssl(self):
|
||||||
|
"""Return if we need ssl to home-assistant instance."""
|
||||||
|
return self._data[ATTR_SSL]
|
||||||
|
|
||||||
|
@api_ssl.setter
|
||||||
|
def api_ssl(self, value):
|
||||||
|
"""Set SSL for home-assistant instance."""
|
||||||
|
self._data[ATTR_SSL] = value
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def api_url(self):
|
||||||
|
"""Return API url to Home-Assistant."""
|
||||||
|
return "{}://{}:{}".format(
|
||||||
|
'https' if self.api_ssl else 'http', self.api_ip, self.api_port
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def watchdog(self):
|
||||||
|
"""Return True if the watchdog should protect Home-Assistant."""
|
||||||
|
return self._data[ATTR_WATCHDOG]
|
||||||
|
|
||||||
|
@watchdog.setter
|
||||||
|
def watchdog(self, value):
|
||||||
|
"""Return True if the watchdog should protect Home-Assistant."""
|
||||||
|
self._data[ATTR_WATCHDOG] = value
|
||||||
|
self.save()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self):
|
def version(self):
|
||||||
"""Return version of running homeassistant."""
|
"""Return version of running homeassistant."""
|
||||||
@@ -209,3 +268,23 @@ class HomeAssistant(JsonConfig):
|
|||||||
if exit_code != 0 or RE_YAML_ERROR.search(log):
|
if exit_code != 0 or RE_YAML_ERROR.search(log):
|
||||||
return (False, log)
|
return (False, log)
|
||||||
return (True, log)
|
return (True, log)
|
||||||
|
|
||||||
|
async def check_api_state(self):
|
||||||
|
"""Check if Home-Assistant up and running."""
|
||||||
|
url = "{}/api/".format(self.api_url)
|
||||||
|
header = {CONTENT_TYPE: CONTENT_TYPE_JSON}
|
||||||
|
|
||||||
|
if self.api_password:
|
||||||
|
header.update({HEADER_HA_ACCESS: self.api_password})
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with async_timeout.timeout(30, loop=self.loop):
|
||||||
|
async with self.websession.get(url, headers=header) as request:
|
||||||
|
status = request.status
|
||||||
|
|
||||||
|
except (asyncio.TimeoutError, aiohttp.ClientError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
if status not in (200, 201):
|
||||||
|
_LOGGER.warning("Home-Assistant API config missmatch")
|
||||||
|
return True
|
||||||
|
75
hassio/panel/hassio-main-es5.html
Normal file
75
hassio/panel/hassio-main-es5.html
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/panel/hassio-main-es5.html.gz
Normal file
BIN
hassio/panel/hassio-main-es5.html.gz
Normal file
Binary file not shown.
75
hassio/panel/hassio-main-latest.html
Normal file
75
hassio/panel/hassio-main-latest.html
Normal file
File diff suppressed because one or more lines are too long
BIN
hassio/panel/hassio-main-latest.html.gz
Normal file
BIN
hassio/panel/hassio-main-latest.html.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -197,6 +197,8 @@ class SnapshotsManager(object):
|
|||||||
await snapshot.restore_folders()
|
await snapshot.restore_folders()
|
||||||
|
|
||||||
# start homeassistant restore
|
# start homeassistant restore
|
||||||
|
_LOGGER.info("Full-Restore %s restore Home-Assistant",
|
||||||
|
snapshot.slug)
|
||||||
snapshot.restore_homeassistant(self.homeassistant)
|
snapshot.restore_homeassistant(self.homeassistant)
|
||||||
task_hass = self.loop.create_task(
|
task_hass = self.loop.create_task(
|
||||||
self.homeassistant.update(snapshot.homeassistant_version))
|
self.homeassistant.update(snapshot.homeassistant_version))
|
||||||
@@ -279,6 +281,8 @@ class SnapshotsManager(object):
|
|||||||
await snapshot.restore_folders(folders)
|
await snapshot.restore_folders(folders)
|
||||||
|
|
||||||
if homeassistant:
|
if homeassistant:
|
||||||
|
_LOGGER.info("Partial-Restore %s restore Home-Assistant",
|
||||||
|
snapshot.slug)
|
||||||
snapshot.restore_homeassistant(self.homeassistant)
|
snapshot.restore_homeassistant(self.homeassistant)
|
||||||
tasks.append(self.homeassistant.update(
|
tasks.append(self.homeassistant.update(
|
||||||
snapshot.homeassistant_version))
|
snapshot.homeassistant_version))
|
||||||
|
@@ -14,7 +14,7 @@ from .util import remove_folder
|
|||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_SLUG, ATTR_NAME, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
ATTR_SLUG, ATTR_NAME, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
|
||||||
ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_VERSION, ATTR_TYPE, ATTR_DEVICES,
|
ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_VERSION, ATTR_TYPE, ATTR_DEVICES,
|
||||||
ATTR_IMAGE)
|
ATTR_IMAGE, ATTR_PORT, ATTR_SSL, ATTR_PASSWORD, ATTR_WATCHDOG, ATTR_BOOT)
|
||||||
from ..tools import write_json_file
|
from ..tools import write_json_file
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -101,6 +101,56 @@ class Snapshot(object):
|
|||||||
"""Set snapshot homeassistant custom image."""
|
"""Set snapshot homeassistant custom image."""
|
||||||
self._data[ATTR_HOMEASSISTANT][ATTR_IMAGE] = value
|
self._data[ATTR_HOMEASSISTANT][ATTR_IMAGE] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def homeassistant_ssl(self):
|
||||||
|
"""Return snapshot homeassistant api ssl."""
|
||||||
|
return self._data[ATTR_HOMEASSISTANT].get(ATTR_SSL)
|
||||||
|
|
||||||
|
@homeassistant_ssl.setter
|
||||||
|
def homeassistant_ssl(self, value):
|
||||||
|
"""Set snapshot homeassistant api ssl."""
|
||||||
|
self._data[ATTR_HOMEASSISTANT][ATTR_SSL] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def homeassistant_port(self):
|
||||||
|
"""Return snapshot homeassistant api port."""
|
||||||
|
return self._data[ATTR_HOMEASSISTANT].get(ATTR_PORT)
|
||||||
|
|
||||||
|
@homeassistant_port.setter
|
||||||
|
def homeassistant_port(self, value):
|
||||||
|
"""Set snapshot homeassistant api port."""
|
||||||
|
self._data[ATTR_HOMEASSISTANT][ATTR_PORT] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def homeassistant_password(self):
|
||||||
|
"""Return snapshot homeassistant api password."""
|
||||||
|
return self._data[ATTR_HOMEASSISTANT].get(ATTR_PASSWORD)
|
||||||
|
|
||||||
|
@homeassistant_password.setter
|
||||||
|
def homeassistant_password(self, value):
|
||||||
|
"""Set snapshot homeassistant api password."""
|
||||||
|
self._data[ATTR_HOMEASSISTANT][ATTR_PASSWORD] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def homeassistant_watchdog(self):
|
||||||
|
"""Return snapshot homeassistant watchdog options."""
|
||||||
|
return self._data[ATTR_HOMEASSISTANT].get(ATTR_WATCHDOG)
|
||||||
|
|
||||||
|
@homeassistant_watchdog.setter
|
||||||
|
def homeassistant_watchdog(self, value):
|
||||||
|
"""Set snapshot homeassistant watchdog options."""
|
||||||
|
self._data[ATTR_HOMEASSISTANT][ATTR_WATCHDOG] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def homeassistant_boot(self):
|
||||||
|
"""Return snapshot homeassistant boot options."""
|
||||||
|
return self._data[ATTR_HOMEASSISTANT].get(ATTR_BOOT)
|
||||||
|
|
||||||
|
@homeassistant_boot.setter
|
||||||
|
def homeassistant_boot(self, value):
|
||||||
|
"""Set snapshot homeassistant boot options."""
|
||||||
|
self._data[ATTR_HOMEASSISTANT][ATTR_BOOT] = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def size(self):
|
def size(self):
|
||||||
"""Return snapshot size."""
|
"""Return snapshot size."""
|
||||||
@@ -126,20 +176,34 @@ class Snapshot(object):
|
|||||||
"""Read all data from homeassistant object."""
|
"""Read all data from homeassistant object."""
|
||||||
self.homeassistant_version = homeassistant.version
|
self.homeassistant_version = homeassistant.version
|
||||||
self.homeassistant_devices = homeassistant.devices
|
self.homeassistant_devices = homeassistant.devices
|
||||||
|
self.homeassistant_watchdog = homeassistant.watchdog
|
||||||
|
self.homeassistant_boot = homeassistant.boot
|
||||||
|
|
||||||
# custom image
|
# custom image
|
||||||
if homeassistant.is_custom_image:
|
if homeassistant.is_custom_image:
|
||||||
self.homeassistant_image = homeassistant.image
|
self.homeassistant_image = homeassistant.image
|
||||||
|
|
||||||
|
# api
|
||||||
|
self.homeassistant_port = homeassistant.api_port
|
||||||
|
self.homeassistant_ssl = homeassistant.api_ssl
|
||||||
|
self.homeassistant_password = homeassistant.api_password
|
||||||
|
|
||||||
def restore_homeassistant(self, homeassistant):
|
def restore_homeassistant(self, homeassistant):
|
||||||
"""Write all data to homeassistant object."""
|
"""Write all data to homeassistant object."""
|
||||||
homeassistant.devices = self.homeassistant_devices
|
homeassistant.devices = self.homeassistant_devices
|
||||||
|
homeassistant.watchdog = self.homeassistant_watchdog
|
||||||
|
homeassistant.boot = self.homeassistant_boot
|
||||||
|
|
||||||
# custom image
|
# custom image
|
||||||
if self.homeassistant_image:
|
if self.homeassistant_image:
|
||||||
homeassistant.set_custom(
|
homeassistant.set_custom(
|
||||||
self.homeassistant_image, self.homeassistant_version)
|
self.homeassistant_image, self.homeassistant_version)
|
||||||
|
|
||||||
|
# api
|
||||||
|
homeassistant.api_port = self.homeassistant_port
|
||||||
|
homeassistant.api_ssl = self.homeassistant_ssl
|
||||||
|
homeassistant.api_password = self.homeassistant_password
|
||||||
|
|
||||||
async def load(self):
|
async def load(self):
|
||||||
"""Read snapshot.json from tar file."""
|
"""Read snapshot.json from tar file."""
|
||||||
if not self.tar_file.is_file():
|
if not self.tar_file.is_file():
|
||||||
@@ -197,7 +261,8 @@ class Snapshot(object):
|
|||||||
"""Async context to close a snapshot."""
|
"""Async context to close a snapshot."""
|
||||||
# exists snapshot or exception on build
|
# exists snapshot or exception on build
|
||||||
if self.tar_file.is_file() or exception_type is not None:
|
if self.tar_file.is_file() or exception_type is not None:
|
||||||
return self._tmp.cleanup()
|
self._tmp.cleanup()
|
||||||
|
return
|
||||||
|
|
||||||
# validate data
|
# validate data
|
||||||
try:
|
try:
|
||||||
@@ -219,7 +284,6 @@ class Snapshot(object):
|
|||||||
_LOGGER.error("Can't write snapshot.json")
|
_LOGGER.error("Can't write snapshot.json")
|
||||||
|
|
||||||
self._tmp.cleanup()
|
self._tmp.cleanup()
|
||||||
self._tmp = None
|
|
||||||
|
|
||||||
async def import_addon(self, addon):
|
async def import_addon(self, addon):
|
||||||
"""Add a addon into snapshot."""
|
"""Add a addon into snapshot."""
|
||||||
@@ -259,9 +323,11 @@ class Snapshot(object):
|
|||||||
origin_dir = Path(self.config.path_hassio, name)
|
origin_dir = Path(self.config.path_hassio, name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
_LOGGER.info("Snapshot folder %s", name)
|
||||||
with tarfile.open(snapshot_tar, "w:gz",
|
with tarfile.open(snapshot_tar, "w:gz",
|
||||||
compresslevel=1) as tar_file:
|
compresslevel=1) as tar_file:
|
||||||
tar_file.add(origin_dir, arcname=".")
|
tar_file.add(origin_dir, arcname=".")
|
||||||
|
_LOGGER.info("Snapshot folder %s done", name)
|
||||||
|
|
||||||
self._data[ATTR_FOLDERS].append(name)
|
self._data[ATTR_FOLDERS].append(name)
|
||||||
except tarfile.TarError as err:
|
except tarfile.TarError as err:
|
||||||
@@ -288,8 +354,10 @@ class Snapshot(object):
|
|||||||
remove_folder(origin_dir)
|
remove_folder(origin_dir)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
_LOGGER.info("Restore folder %s", name)
|
||||||
with tarfile.open(snapshot_tar, "r:gz") as tar_file:
|
with tarfile.open(snapshot_tar, "r:gz") as tar_file:
|
||||||
tar_file.extractall(path=origin_dir)
|
tar_file.extractall(path=origin_dir)
|
||||||
|
_LOGGER.info("Restore folder %s done", name)
|
||||||
except tarfile.TarError as err:
|
except tarfile.TarError as err:
|
||||||
_LOGGER.warning("Can't restore folder %s -> %s", name, err)
|
_LOGGER.warning("Can't restore folder %s -> %s", name, err)
|
||||||
|
|
||||||
|
@@ -5,9 +5,10 @@ import voluptuous as vol
|
|||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_NAME, ATTR_SLUG, ATTR_DATE,
|
ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_NAME, ATTR_SLUG, ATTR_DATE,
|
||||||
ATTR_VERSION, ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_TYPE, ATTR_DEVICES,
|
ATTR_VERSION, ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_TYPE, ATTR_DEVICES,
|
||||||
ATTR_IMAGE, FOLDER_SHARE, FOLDER_HOMEASSISTANT, FOLDER_ADDONS, FOLDER_SSL,
|
ATTR_IMAGE, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL, ATTR_WATCHDOG, ATTR_BOOT,
|
||||||
|
FOLDER_SHARE, FOLDER_HOMEASSISTANT, FOLDER_ADDONS, FOLDER_SSL,
|
||||||
SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
|
SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
|
||||||
from ..validate import HASS_DEVICES
|
from ..validate import HASS_DEVICES, NETWORK_PORT
|
||||||
|
|
||||||
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]
|
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]
|
||||||
|
|
||||||
@@ -21,6 +22,11 @@ SCHEMA_SNAPSHOT = vol.Schema({
|
|||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_DEVICES, default=[]): HASS_DEVICES,
|
vol.Optional(ATTR_DEVICES, default=[]): HASS_DEVICES,
|
||||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
||||||
|
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||||
|
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
|
||||||
}),
|
}),
|
||||||
vol.Optional(ATTR_FOLDERS, default=[]): [vol.In(ALL_FOLDERS)],
|
vol.Optional(ATTR_FOLDERS, default=[]): [vol.In(ALL_FOLDERS)],
|
||||||
vol.Optional(ATTR_ADDONS, default=[]): [vol.Schema({
|
vol.Optional(ATTR_ADDONS, default=[]): [vol.Schema({
|
||||||
|
@@ -62,18 +62,54 @@ def hassio_update(supervisor, updater):
|
|||||||
return _hassio_update
|
return _hassio_update
|
||||||
|
|
||||||
|
|
||||||
def homeassistant_watchdog(loop, homeassistant):
|
def homeassistant_watchdog_docker(loop, homeassistant):
|
||||||
"""Create scheduler task for montoring running state."""
|
"""Create scheduler task for montoring running state of docker."""
|
||||||
async def _homeassistant_watchdog():
|
async def _homeassistant_watchdog_docker():
|
||||||
"""Check running state and start if they is close."""
|
"""Check running state of docker and start if they is close."""
|
||||||
# if Home-Assistant is active
|
# if Home-Assistant is active
|
||||||
if not await homeassistant.is_initialize():
|
if not await homeassistant.is_initialize() or \
|
||||||
|
not homeassistant.watchdog:
|
||||||
return
|
return
|
||||||
|
|
||||||
# If Home-Assistant is running
|
# if Home-Assistant is running
|
||||||
if homeassistant.in_progress or await homeassistant.is_running():
|
if homeassistant.in_progress or await homeassistant.is_running():
|
||||||
return
|
return
|
||||||
|
|
||||||
loop.create_task(homeassistant.run())
|
loop.create_task(homeassistant.run())
|
||||||
|
_LOGGER.error("Watchdog found a problem with Home-Assistant docker!")
|
||||||
|
|
||||||
return _homeassistant_watchdog
|
return _homeassistant_watchdog_docker
|
||||||
|
|
||||||
|
|
||||||
|
def homeassistant_watchdog_api(loop, homeassistant):
|
||||||
|
"""Create scheduler task for montoring running state of API.
|
||||||
|
|
||||||
|
Try 2 times to call API before we restart Home-Assistant. Maybe we had a
|
||||||
|
delay in our system.
|
||||||
|
"""
|
||||||
|
retry_scan = 0
|
||||||
|
|
||||||
|
async def _homeassistant_watchdog_api():
|
||||||
|
"""Check running state of API and start if they is close."""
|
||||||
|
nonlocal retry_scan
|
||||||
|
|
||||||
|
# if Home-Assistant is active
|
||||||
|
if not await homeassistant.is_initialize() or \
|
||||||
|
not homeassistant.watchdog:
|
||||||
|
return
|
||||||
|
|
||||||
|
# if Home-Assistant API is up
|
||||||
|
if homeassistant.in_progress or await homeassistant.check_api_state():
|
||||||
|
return
|
||||||
|
retry_scan += 1
|
||||||
|
|
||||||
|
# Retry active
|
||||||
|
if retry_scan == 1:
|
||||||
|
_LOGGER.warning("Watchdog miss API response from Home-Assistant")
|
||||||
|
return
|
||||||
|
|
||||||
|
loop.create_task(homeassistant.restart())
|
||||||
|
_LOGGER.error("Watchdog found a problem with Home-Assistant API!")
|
||||||
|
retry_scan = 0
|
||||||
|
|
||||||
|
return _homeassistant_watchdog_api
|
||||||
|
@@ -7,7 +7,8 @@ from .const import (
|
|||||||
ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_SESSIONS, ATTR_PASSWORD,
|
ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_SESSIONS, ATTR_PASSWORD,
|
||||||
ATTR_TOTP, ATTR_SECURITY, ATTR_BETA_CHANNEL, ATTR_TIMEZONE,
|
ATTR_TOTP, ATTR_SECURITY, ATTR_BETA_CHANNEL, ATTR_TIMEZONE,
|
||||||
ATTR_ADDONS_CUSTOM_LIST, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
|
ATTR_ADDONS_CUSTOM_LIST, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
|
||||||
ATTR_HOMEASSISTANT, ATTR_HASSIO, ATTR_BOOT, ATTR_LAST_BOOT)
|
ATTR_HOMEASSISTANT, ATTR_HASSIO, ATTR_BOOT, ATTR_LAST_BOOT, ATTR_SSL,
|
||||||
|
ATTR_PORT, ATTR_WATCHDOG)
|
||||||
|
|
||||||
|
|
||||||
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
||||||
@@ -28,11 +29,12 @@ def validate_timezone(timezone):
|
|||||||
return timezone
|
return timezone
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=inconsistent-return-statements
|
||||||
def convert_to_docker_ports(data):
|
def convert_to_docker_ports(data):
|
||||||
"""Convert data into docker port list."""
|
"""Convert data into docker port list."""
|
||||||
# dynamic ports
|
# dynamic ports
|
||||||
if data is None:
|
if data is None:
|
||||||
return
|
return None
|
||||||
|
|
||||||
# single port
|
# single port
|
||||||
if isinstance(data, int):
|
if isinstance(data, int):
|
||||||
@@ -61,7 +63,11 @@ SCHEMA_HASS_CONFIG = vol.Schema({
|
|||||||
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
||||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Coerce(str),
|
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Coerce(str),
|
||||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
|
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
|
||||||
})
|
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
||||||
|
vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)),
|
||||||
|
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
|
||||||
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
@@ -69,7 +75,7 @@ SCHEMA_UPDATER_CONFIG = vol.Schema({
|
|||||||
vol.Optional(ATTR_BETA_CHANNEL, default=False): vol.Boolean(),
|
vol.Optional(ATTR_BETA_CHANNEL, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Coerce(str),
|
vol.Optional(ATTR_HOMEASSISTANT): vol.Coerce(str),
|
||||||
vol.Optional(ATTR_HASSIO): vol.Coerce(str),
|
vol.Optional(ATTR_HASSIO): vol.Coerce(str),
|
||||||
})
|
}, extra=vol.REMOVE_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
|
7
setup.py
7
setup.py
@@ -12,7 +12,7 @@ setup(
|
|||||||
url='https://home-assistant.io/',
|
url='https://home-assistant.io/',
|
||||||
description=('Open-source private cloud os for Home-Assistant'
|
description=('Open-source private cloud os for Home-Assistant'
|
||||||
' based on ResinOS'),
|
' based on ResinOS'),
|
||||||
long_description=('A maintenainless private cloud operator system that'
|
long_description=('A maintainless private cloud operator system that'
|
||||||
'setup a Home-Assistant instance. Based on ResinOS'),
|
'setup a Home-Assistant instance. Based on ResinOS'),
|
||||||
classifiers=[
|
classifiers=[
|
||||||
'Intended Audience :: End Users/Desktop',
|
'Intended Audience :: End Users/Desktop',
|
||||||
@@ -24,7 +24,7 @@ setup(
|
|||||||
'Topic :: Scientific/Engineering :: Atmospheric Science',
|
'Topic :: Scientific/Engineering :: Atmospheric Science',
|
||||||
'Development Status :: 5 - Production/Stable',
|
'Development Status :: 5 - Production/Stable',
|
||||||
'Intended Audience :: Developers',
|
'Intended Audience :: Developers',
|
||||||
'Programming Language :: Python :: 3.5',
|
'Programming Language :: Python :: 3.6',
|
||||||
],
|
],
|
||||||
keywords=['docker', 'home-assistant', 'api'],
|
keywords=['docker', 'home-assistant', 'api'],
|
||||||
zip_safe=False,
|
zip_safe=False,
|
||||||
@@ -47,7 +47,6 @@ setup(
|
|||||||
'pyotp',
|
'pyotp',
|
||||||
'pyqrcode',
|
'pyqrcode',
|
||||||
'pytz',
|
'pytz',
|
||||||
'pyudev',
|
'pyudev'
|
||||||
'deepmerge'
|
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"hassio": "0.64",
|
"hassio": "0.78",
|
||||||
"homeassistant": "0.54",
|
"homeassistant": "0.60",
|
||||||
"resinos": "1.1",
|
"resinos": "1.1",
|
||||||
"resinhup": "0.3",
|
"resinhup": "0.3",
|
||||||
"generic": "0.3",
|
"generic": "0.3",
|
||||||
|
Reference in New Issue
Block a user