mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-15 20:19:21 +00:00
Compare commits
23 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
67e2ad99c9 | ||
![]() |
ea2edadac2 | ||
![]() |
1e78c60a65 | ||
![]() |
5f3147d6f4 | ||
![]() |
03c3c9b6a1 | ||
![]() |
f056d175b7 | ||
![]() |
9fb1aa626d | ||
![]() |
30243c39e6 | ||
![]() |
d285fd4ad4 | ||
![]() |
7a0b9cc1ac | ||
![]() |
cc63008a86 | ||
![]() |
f9c7371140 | ||
![]() |
71590f90ae | ||
![]() |
e1028d6eca | ||
![]() |
f231d54daa | ||
![]() |
094c5968f4 | ||
![]() |
6c217d506c | ||
![]() |
0d867af79f | ||
![]() |
c9876988da | ||
![]() |
454d82d985 | ||
![]() |
14ee26ea29 | ||
![]() |
86a7f11f64 | ||
![]() |
78d1e1d9e7 |
78
API.md
78
API.md
@@ -1,43 +1,5 @@
|
||||
# HassIO Server
|
||||
|
||||
## Host Controll
|
||||
|
||||
Communicate over unix socket with a host daemon.
|
||||
|
||||
- commands
|
||||
```
|
||||
# info
|
||||
-> {'os', 'version', 'current', 'level', 'hostname'}
|
||||
# reboot
|
||||
# shutdown
|
||||
# host-update [v]
|
||||
# supervisor-update [v]
|
||||
|
||||
# network info
|
||||
# network hostname xy
|
||||
# network wlan ssd xy
|
||||
# network wlan password xy
|
||||
# network int ip xy
|
||||
# network int netmask xy
|
||||
# network int route xy
|
||||
```
|
||||
|
||||
level:
|
||||
- 1: power functions
|
||||
- 2: supervisor update
|
||||
- 4: host update
|
||||
- 8: network functions
|
||||
|
||||
Answer:
|
||||
```
|
||||
{}|OK|ERROR|WRONG
|
||||
```
|
||||
|
||||
- {}: json
|
||||
- OK: call was successfully
|
||||
- ERROR: error on call
|
||||
- WRONG: not supported
|
||||
|
||||
## HassIO REST API
|
||||
|
||||
Interface for HomeAssistant to controll things from supervisor.
|
||||
@@ -96,6 +58,10 @@ Optional:
|
||||
}
|
||||
```
|
||||
|
||||
- `/supervisor/reload`
|
||||
|
||||
Reload addons/version.
|
||||
|
||||
### Host
|
||||
|
||||
- `/host/shutdown`
|
||||
@@ -196,3 +162,39 @@ Optional:
|
||||
"version": "VERSION"
|
||||
}
|
||||
```
|
||||
|
||||
## Host Controll
|
||||
|
||||
Communicate over unix socket with a host daemon.
|
||||
|
||||
- commands
|
||||
```
|
||||
# info
|
||||
-> {'os', 'version', 'current', 'level', 'hostname'}
|
||||
# reboot
|
||||
# shutdown
|
||||
# host-update [v]
|
||||
|
||||
# network info
|
||||
# network hostname xy
|
||||
# network wlan ssd xy
|
||||
# network wlan password xy
|
||||
# network int ip xy
|
||||
# network int netmask xy
|
||||
# network int route xy
|
||||
```
|
||||
|
||||
level:
|
||||
- 1: power functions
|
||||
- 2: host update
|
||||
- 4: network functions
|
||||
|
||||
Answer:
|
||||
```
|
||||
{}|OK|ERROR|WRONG
|
||||
```
|
||||
|
||||
- {}: json
|
||||
- OK: call was successfully
|
||||
- ERROR: error on call
|
||||
- WRONG: not supported
|
||||
|
@@ -6,13 +6,6 @@ It is a docker image (supervisor) they manage HomeAssistant docker and give a in
|
||||
[HassIO-Addons](https://github.com/pvizeli/hassio-addons)
|
||||
[HassIO-Build](https://github.com/pvizeli/hassio-build)
|
||||
|
||||
## History
|
||||
- **0.1**: Initial supervisor with setup HomeAssistant docker
|
||||
- **0.2**: Support for basic HostControll
|
||||
- **0.3**: Refactor code and add basic rest api
|
||||
- **0.4**: Move network api code / ssl folder
|
||||
- **0.5**: Make api compatible to hass component v1
|
||||
|
||||
# Hardware Image
|
||||
The image is based on ResinOS and Yocto Linux. It comes with the HassIO supervisor pre-installed. This includes support to update the supervisor over the air. After flashing your host OS will not require any more maintenance! The image does not include Home Assistant, instead it will downloaded when the image boots up for the first time.
|
||||
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Main file for HassIO."""
|
||||
import asyncio
|
||||
import logging
|
||||
import signal
|
||||
import sys
|
||||
|
||||
import hassio.bootstrap as bootstrap
|
||||
import hassio.core as core
|
||||
@@ -24,13 +24,10 @@ if __name__ == "__main__":
|
||||
|
||||
_LOGGER.info("Start Hassio task")
|
||||
loop.call_soon_threadsafe(loop.create_task, hassio.start())
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(
|
||||
signal.SIGTERM, lambda: loop.create_task(hassio.stop()))
|
||||
except ValueError:
|
||||
_LOGGER.warning("Could not bind to SIGTERM")
|
||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop, hassio)
|
||||
|
||||
loop.run_forever()
|
||||
loop.close()
|
||||
|
||||
_LOGGER.info("Close Hassio")
|
||||
sys.exit(hassio.exit_code)
|
||||
|
@@ -37,7 +37,7 @@ class AddonManager(AddonsData):
|
||||
self.dockers[addon] = DockerAddon(
|
||||
self.config, self.loop, self.dock, self, addon)
|
||||
|
||||
async def relaod(self):
|
||||
async def reload(self):
|
||||
"""Update addons from repo and reload list."""
|
||||
if not await self.repo.pull():
|
||||
return
|
||||
@@ -47,7 +47,7 @@ class AddonManager(AddonsData):
|
||||
tasks = []
|
||||
for addon in self.list_removed:
|
||||
_LOGGER.info("Old addon %s found")
|
||||
tasks.append(self.loop.create_task(self.dockers[addon].remove()))
|
||||
tasks.append(self.loop.create_task(self.uninstall(addon)))
|
||||
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
@@ -5,11 +5,12 @@ import glob
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .validate import validate_options, SCHEMA_ADDON_CONFIG
|
||||
from ..const import (
|
||||
FILE_HASSIO_ADDONS, ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON,
|
||||
ATTR_STARTUP, ATTR_BOOT, ATTR_MAP_SSL, ATTR_MAP_CONFIG, ATTR_OPTIONS,
|
||||
ATTR_PORTS, STARTUP_ONCE, STARTUP_AFTER, STARTUP_BEFORE, BOOT_AUTO,
|
||||
BOOT_MANUAL, DOCKER_REPO, ATTR_INSTALLED, ATTR_SCHEMA, ATTR_IMAGE)
|
||||
ATTR_PORTS, BOOT_AUTO, DOCKER_REPO, ATTR_INSTALLED, ATTR_SCHEMA,
|
||||
ATTR_IMAGE, ATTR_MAP_HASSIO)
|
||||
from ..config import Config
|
||||
from ..tools import read_json_file, write_json_file
|
||||
|
||||
@@ -17,32 +18,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ADDONS_REPO_PATTERN = "{}/*/config.json"
|
||||
|
||||
V_STR = 'str'
|
||||
V_INT = 'int'
|
||||
V_FLOAT = 'float'
|
||||
V_BOOL = 'bool'
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
|
||||
vol.Required(ATTR_STARTUP):
|
||||
vol.In([STARTUP_BEFORE, STARTUP_AFTER, STARTUP_ONCE]),
|
||||
vol.Required(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_PORTS): dict,
|
||||
vol.Required(ATTR_MAP_CONFIG): vol.Boolean(),
|
||||
vol.Required(ATTR_MAP_SSL): vol.Boolean(),
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Required(ATTR_SCHEMA): {
|
||||
vol.Coerce(str): vol.In([V_STR, V_INT, V_FLOAT, V_BOOL])
|
||||
},
|
||||
vol.Optional(ATTR_IMAGE): vol.Match(r"\w*/\w*"),
|
||||
})
|
||||
|
||||
|
||||
class AddonsData(Config):
|
||||
"""Hold data for addons inside HassIO."""
|
||||
@@ -56,6 +31,8 @@ class AddonsData(Config):
|
||||
|
||||
def read_addons_repo(self):
|
||||
"""Read data from addons repository."""
|
||||
self._addons_data = {}
|
||||
|
||||
self._read_addons_folder(self.config.path_addons_repo)
|
||||
self._read_addons_folder(self.config.path_addons_custom)
|
||||
|
||||
@@ -213,6 +190,10 @@ class AddonsData(Config):
|
||||
"""Return True if ssl map is needed."""
|
||||
return self._addons_data[addon][ATTR_MAP_SSL]
|
||||
|
||||
def need_hassio(self, addon):
|
||||
"""Return True if hassio map is needed."""
|
||||
return self._addons_data[addon][ATTR_MAP_HASSIO]
|
||||
|
||||
def path_data(self, addon):
|
||||
"""Return addon data path inside supervisor."""
|
||||
return "{}/{}".format(
|
||||
@@ -229,35 +210,21 @@ class AddonsData(Config):
|
||||
|
||||
def write_addon_options(self, addon):
|
||||
"""Return True if addon options is written to data."""
|
||||
return write_json_file(
|
||||
self.path_addon_options(addon), self.get_options(addon))
|
||||
schema = self.get_schema(addon)
|
||||
options = self.get_options(addon)
|
||||
|
||||
try:
|
||||
schema(options)
|
||||
return write_json_file(self.path_addon_options(addon), options)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Addon %s have wrong options -> %s", addon,
|
||||
humanize_error(options, ex))
|
||||
|
||||
return False
|
||||
|
||||
def get_schema(self, addon):
|
||||
"""Create a schema for addon options."""
|
||||
raw_schema = self._addons_data[addon][ATTR_SCHEMA]
|
||||
|
||||
def validate(struct):
|
||||
"""Validate schema."""
|
||||
options = {}
|
||||
for key, value in struct.items():
|
||||
if key not in raw_schema:
|
||||
raise vol.Invalid("Unknown options {}.".format(key))
|
||||
|
||||
typ = raw_schema[key]
|
||||
try:
|
||||
if typ == V_STR:
|
||||
options[key] = str(value)
|
||||
elif typ == V_INT:
|
||||
options[key] = int(value)
|
||||
elif typ == V_FLOAT:
|
||||
options[key] = float(value)
|
||||
elif typ == V_BOOL:
|
||||
options[key] = vol.Boolean()(value)
|
||||
except TypeError:
|
||||
raise vol.Invalid(
|
||||
"Type error for {}.".format(key)) from None
|
||||
|
||||
return options
|
||||
|
||||
schema = vol.Schema(vol.All(dict(), validate))
|
||||
schema = vol.Schema(vol.All(dict, validate_options(raw_schema)))
|
||||
return schema
|
||||
|
113
hassio/addons/validate.py
Normal file
113
hassio/addons/validate.py
Normal file
@@ -0,0 +1,113 @@
|
||||
"""Validate addons options schema."""
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_STARTUP,
|
||||
ATTR_BOOT, ATTR_MAP_SSL, ATTR_MAP_CONFIG, ATTR_OPTIONS,
|
||||
ATTR_PORTS, STARTUP_ONCE, STARTUP_AFTER, STARTUP_BEFORE, BOOT_AUTO,
|
||||
BOOT_MANUAL, ATTR_SCHEMA, ATTR_IMAGE, ATTR_MAP_HASSIO)
|
||||
|
||||
V_STR = 'str'
|
||||
V_INT = 'int'
|
||||
V_FLOAT = 'float'
|
||||
V_BOOL = 'bool'
|
||||
V_EMAIL = 'email'
|
||||
V_URL = 'url'
|
||||
|
||||
ADDON_ELEMENT = vol.In([V_STR, V_INT, V_FLOAT, V_BOOL, V_EMAIL, V_URL])
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
|
||||
vol.Required(ATTR_STARTUP):
|
||||
vol.In([STARTUP_BEFORE, STARTUP_AFTER, STARTUP_ONCE]),
|
||||
vol.Required(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_PORTS): dict,
|
||||
vol.Optional(ATTR_MAP_CONFIG, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_MAP_SSL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_MAP_HASSIO, default=False): vol.Boolean(),
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Required(ATTR_SCHEMA): {
|
||||
vol.Coerce(str): vol.Any(ADDON_ELEMENT, [
|
||||
vol.Any(ADDON_ELEMENT, {vol.Coerce(str): ADDON_ELEMENT})
|
||||
])
|
||||
},
|
||||
vol.Optional(ATTR_IMAGE): vol.Match(r"\w*/\w*"),
|
||||
})
|
||||
|
||||
|
||||
def validate_options(raw_schema):
|
||||
"""Validate schema."""
|
||||
def validate(struct):
|
||||
"""Create schema validator for addons options."""
|
||||
options = {}
|
||||
|
||||
# read options
|
||||
for key, value in struct.items():
|
||||
if key not in raw_schema:
|
||||
raise vol.Invalid("Unknown options {}.".format(key))
|
||||
|
||||
typ = raw_schema[key]
|
||||
try:
|
||||
if isinstance(typ, list):
|
||||
# nested value
|
||||
options[key] = _nested_validate(typ[0], value)
|
||||
else:
|
||||
# normal value
|
||||
options[key] = _single_validate(typ, value)
|
||||
except (IndexError, KeyError):
|
||||
raise vol.Invalid(
|
||||
"Type error for {}.".format(key)) from None
|
||||
|
||||
return options
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
def _single_validate(typ, value):
|
||||
"""Validate a single element."""
|
||||
try:
|
||||
if typ == V_STR:
|
||||
return str(value)
|
||||
elif typ == V_INT:
|
||||
return int(value)
|
||||
elif typ == V_FLOAT:
|
||||
return float(value)
|
||||
elif typ == V_BOOL:
|
||||
return vol.Boolean()(value)
|
||||
elif typ == V_EMAIL:
|
||||
return vol.Email()(value)
|
||||
elif typ == V_URL:
|
||||
return vol.Url()(value)
|
||||
|
||||
raise vol.Invalid("Fatal error for {}.".format(value))
|
||||
except TypeError:
|
||||
raise vol.Invalid(
|
||||
"Type {} error for {}.".format(typ, value)) from None
|
||||
|
||||
|
||||
def _nested_validate(typ, data_list):
|
||||
"""Validate nested items."""
|
||||
options = []
|
||||
|
||||
for element in data_list:
|
||||
# dict list
|
||||
if isinstance(typ, dict):
|
||||
c_options = {}
|
||||
for c_key, c_value in element.items():
|
||||
if c_key not in typ:
|
||||
raise vol.Invalid(
|
||||
"Unknown nested options {}.".format(c_key))
|
||||
|
||||
c_options[c_key] = _single_validate(typ[c_key], c_value)
|
||||
options.append(c_options)
|
||||
# normal list
|
||||
else:
|
||||
options.append(_single_validate(typ, element))
|
||||
|
||||
return options
|
@@ -41,14 +41,15 @@ class RestAPI(object):
|
||||
self.webapp.router.add_get('/network/info', api_net.info)
|
||||
self.webapp.router.add_get('/network/options', api_net.options)
|
||||
|
||||
def register_supervisor(self, host_controll, addons):
|
||||
def register_supervisor(self, supervisor, addons):
|
||||
"""Register supervisor function."""
|
||||
api_supervisor = APISupervisor(
|
||||
self.config, self.loop, host_controll, addons)
|
||||
self.config, self.loop, supervisor, addons)
|
||||
|
||||
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
|
||||
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
|
||||
self.webapp.router.add_get('/supervisor/update', api_supervisor.update)
|
||||
self.webapp.router.add_get('/supervisor/reload', api_supervisor.reload)
|
||||
self.webapp.router.add_get(
|
||||
'/supervisor/options', api_supervisor.options)
|
||||
|
||||
|
@@ -3,6 +3,7 @@ import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .util import api_process, api_validate
|
||||
from ..const import (
|
||||
@@ -88,6 +89,14 @@ class APIAddons(object):
|
||||
if await self.addons.state(addon) == STATE_STARTED:
|
||||
raise RuntimeError("Addon is already running")
|
||||
|
||||
# validate options
|
||||
try:
|
||||
schema = self.addons.get_schema(addon)
|
||||
options = self.addons.get_options(addon)
|
||||
schema(options)
|
||||
except vol.Invalid as ex:
|
||||
raise RuntimeError(humanize_error(options, ex)) from None
|
||||
|
||||
return await asyncio.shield(
|
||||
self.addons.start(addon), loop=self.loop)
|
||||
|
||||
|
@@ -1,9 +1,10 @@
|
||||
"""Init file for HassIO supervisor rest api."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_process_hostcontroll, api_validate
|
||||
from .util import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_ADDONS, ATTR_VERSION, ATTR_CURRENT, ATTR_BETA, HASSIO_VERSION)
|
||||
|
||||
@@ -22,11 +23,11 @@ SCHEMA_VERSION = vol.Schema({
|
||||
class APISupervisor(object):
|
||||
"""Handle rest api for supervisor functions."""
|
||||
|
||||
def __init__(self, config, loop, host_controll, addons):
|
||||
def __init__(self, config, loop, supervisor, addons):
|
||||
"""Initialize supervisor rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.host_controll = host_controll
|
||||
self.supervisor = supervisor
|
||||
self.addons = addons
|
||||
|
||||
@api_process
|
||||
@@ -55,13 +56,27 @@ class APISupervisor(object):
|
||||
|
||||
return self.config.save()
|
||||
|
||||
@api_process_hostcontroll
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update host OS."""
|
||||
"""Update supervisor OS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.config.current_hassio)
|
||||
|
||||
if version == HASSIO_VERSION:
|
||||
if version == self.supervisor.version:
|
||||
raise RuntimeError("Version is already in use")
|
||||
|
||||
return await self.host_controll.supervisor_update(version=version)
|
||||
return await asyncio.shield(
|
||||
self.supervisor.update(version), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload addons, config ect."""
|
||||
tasks = [self.addons.reload(), self.config.fetch_update_infos()]
|
||||
results, _ = await asyncio.shield(
|
||||
asyncio.wait(tasks, loop=self.loop), loop=self.loop)
|
||||
|
||||
for result in results:
|
||||
if result.exception() is not None:
|
||||
raise RuntimeError("Some reload task fails!")
|
||||
|
||||
return True
|
||||
|
@@ -2,6 +2,7 @@
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
import signal
|
||||
|
||||
from colorlog import ColoredFormatter
|
||||
|
||||
@@ -81,3 +82,24 @@ def check_environment():
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def reg_signal(loop, hassio):
|
||||
"""Register SIGTERM, SIGKILL to stop system."""
|
||||
try:
|
||||
loop.add_signal_handler(
|
||||
signal.SIGTERM, lambda: loop.create_task(hassio.stop()))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGTERM")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(
|
||||
signal.SIGHUP, lambda: loop.create_task(hassio.stop()))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGHUP")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(
|
||||
signal.SIGINT, lambda: loop.create_task(hassio.stop()))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGINT")
|
||||
|
@@ -14,6 +14,7 @@ HOMEASSISTANT_CURRENT = 'homeassistant_current'
|
||||
|
||||
HASSIO_SSL = "{}/ssl"
|
||||
HASSIO_CURRENT = 'hassio_current'
|
||||
HASSIO_CLEANUP = 'hassio_cleanup'
|
||||
|
||||
ADDONS_REPO = "{}/addons"
|
||||
ADDONS_DATA = "{}/addons_data"
|
||||
@@ -21,6 +22,8 @@ ADDONS_CUSTOM = "{}/addons_custom"
|
||||
|
||||
UPSTREAM_BETA = 'upstream_beta'
|
||||
|
||||
API_ENDPOINT = 'api_endpoint'
|
||||
|
||||
|
||||
class Config(object):
|
||||
"""Hold all config data."""
|
||||
@@ -77,6 +80,16 @@ class CoreConfig(Config):
|
||||
|
||||
return False
|
||||
|
||||
@property
|
||||
def api_endpoint(self):
|
||||
"""Return IP address of api endpoint."""
|
||||
return self._data[API_ENDPOINT]
|
||||
|
||||
@api_endpoint.setter
|
||||
def api_endpoint(self, value):
|
||||
"""Store IP address of api endpoint."""
|
||||
self._data[API_ENDPOINT] = value
|
||||
|
||||
@property
|
||||
def upstream_beta(self):
|
||||
"""Return True if we run in beta upstream."""
|
||||
@@ -87,6 +100,20 @@ class CoreConfig(Config):
|
||||
"""Set beta upstream mode."""
|
||||
self._data[UPSTREAM_BETA] = bool(value)
|
||||
|
||||
@property
|
||||
def hassio_cleanup(self):
|
||||
"""Return Version they need to cleanup."""
|
||||
return self._data.get(HASSIO_CLEANUP)
|
||||
|
||||
@hassio_cleanup.setter
|
||||
def hassio_cleanup(self, version):
|
||||
"""Set or remove cleanup flag."""
|
||||
if version is None:
|
||||
self._data.pop(HASSIO_CLEANUP, None)
|
||||
else:
|
||||
self._data[HASSIO_CLEANUP] = version
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def homeassistant_image(self):
|
||||
"""Return docker homeassistant repository."""
|
||||
@@ -102,10 +129,15 @@ class CoreConfig(Config):
|
||||
"""Actual version of hassio."""
|
||||
return self._data.get(HASSIO_CURRENT)
|
||||
|
||||
@property
|
||||
def path_hassio_docker(self):
|
||||
"""Return hassio data path extern for docker."""
|
||||
return os.environ['SUPERVISOR_SHARE']
|
||||
|
||||
@property
|
||||
def path_config_docker(self):
|
||||
"""Return config path extern for docker."""
|
||||
return HOMEASSISTANT_CONFIG.format(os.environ['SUPERVISOR_SHARE'])
|
||||
return HOMEASSISTANT_CONFIG.format(self.path_hassio_docker)
|
||||
|
||||
@property
|
||||
def path_config(self):
|
||||
@@ -115,7 +147,7 @@ class CoreConfig(Config):
|
||||
@property
|
||||
def path_ssl_docker(self):
|
||||
"""Return SSL path extern for docker."""
|
||||
return HASSIO_SSL.format(os.environ['SUPERVISOR_SHARE'])
|
||||
return HASSIO_SSL.format(self.path_hassio_docker)
|
||||
|
||||
@property
|
||||
def path_ssl(self):
|
||||
@@ -140,4 +172,4 @@ class CoreConfig(Config):
|
||||
@property
|
||||
def path_addons_data_docker(self):
|
||||
"""Return root addon data folder extern for docker."""
|
||||
return ADDONS_DATA.format(os.environ['SUPERVISOR_SHARE'])
|
||||
return ADDONS_DATA.format(self.path_hassio_docker)
|
||||
|
@@ -1,5 +1,5 @@
|
||||
"""Const file for HassIO."""
|
||||
HASSIO_VERSION = '0.6'
|
||||
HASSIO_VERSION = '0.10'
|
||||
|
||||
URL_HASSIO_VERSION = \
|
||||
'https://raw.githubusercontent.com/pvizeli/hassio/master/version.json'
|
||||
@@ -13,8 +13,11 @@ DOCKER_REPO = "pvizeli"
|
||||
HASSIO_SHARE = "/data"
|
||||
|
||||
RUN_UPDATE_INFO_TASKS = 28800
|
||||
RUN_UPDATE_SUPERVISOR_TASKS = 29100
|
||||
RUN_RELOAD_ADDONS_TASKS = 28800
|
||||
|
||||
RESTART_EXIT_CODE = 100
|
||||
|
||||
FILE_HASSIO_ADDONS = "{}/addons.json".format(HASSIO_SHARE)
|
||||
FILE_HASSIO_CONFIG = "{}/config.json".format(HASSIO_SHARE)
|
||||
|
||||
@@ -40,6 +43,7 @@ ATTR_BOOT = 'boot'
|
||||
ATTR_PORTS = 'ports'
|
||||
ATTR_MAP_CONFIG = 'map_config'
|
||||
ATTR_MAP_SSL = 'map_ssl'
|
||||
ATTR_MAP_HASSIO = 'map_hassio'
|
||||
ATTR_OPTIONS = 'options'
|
||||
ATTR_INSTALLED = 'installed'
|
||||
ATTR_STATE = 'state'
|
||||
@@ -49,7 +53,9 @@ ATTR_IMAGE = 'image'
|
||||
STARTUP_BEFORE = 'before'
|
||||
STARTUP_AFTER = 'after'
|
||||
STARTUP_ONCE = 'once'
|
||||
|
||||
BOOT_AUTO = 'auto'
|
||||
BOOT_MANUAL = 'manual'
|
||||
|
||||
STATE_STARTED = 'started'
|
||||
STATE_STOPPED = 'stopped'
|
||||
|
@@ -11,11 +11,11 @@ from .api import RestAPI
|
||||
from .host_controll import HostControll
|
||||
from .const import (
|
||||
SOCKET_DOCKER, RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS,
|
||||
STARTUP_AFTER, STARTUP_BEFORE)
|
||||
RUN_UPDATE_SUPERVISOR_TASKS, STARTUP_AFTER, STARTUP_BEFORE)
|
||||
from .scheduler import Scheduler
|
||||
from .dock.homeassistant import DockerHomeAssistant
|
||||
from .dock.supervisor import DockerSupervisor
|
||||
from .tools import get_arch_from_image
|
||||
from .tools import get_arch_from_image, get_local_ip
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -25,6 +25,7 @@ class HassIO(object):
|
||||
|
||||
def __init__(self, loop):
|
||||
"""Initialize hassio object."""
|
||||
self.exit_code = 0
|
||||
self.loop = loop
|
||||
self.websession = aiohttp.ClientSession(loop=self.loop)
|
||||
self.config = bootstrap.initialize_system_data(self.websession)
|
||||
@@ -35,7 +36,7 @@ class HassIO(object):
|
||||
|
||||
# init basic docker container
|
||||
self.supervisor = DockerSupervisor(
|
||||
self.config, self.loop, self.dock)
|
||||
self.config, self.loop, self.dock, self)
|
||||
self.homeassistant = DockerHomeAssistant(
|
||||
self.config, self.loop, self.dock)
|
||||
|
||||
@@ -49,6 +50,10 @@ class HassIO(object):
|
||||
"""Setup HassIO orchestration."""
|
||||
# supervisor
|
||||
await self.supervisor.attach()
|
||||
await self.supervisor.cleanup()
|
||||
|
||||
# set api endpoint
|
||||
self.config.api_endpoint = await get_local_ip(self.loop)
|
||||
|
||||
# hostcontroll
|
||||
host_info = await self.host_controll.info()
|
||||
@@ -63,14 +68,14 @@ class HassIO(object):
|
||||
# rest api views
|
||||
self.api.register_host(self.host_controll)
|
||||
self.api.register_network(self.host_controll)
|
||||
self.api.register_supervisor(self.host_controll, self.addons)
|
||||
self.api.register_supervisor(self.supervisor, self.addons)
|
||||
self.api.register_homeassistant(self.homeassistant)
|
||||
self.api.register_addons(self.addons)
|
||||
|
||||
# schedule update info tasks
|
||||
self.scheduler.register_task(
|
||||
self.config.fetch_update_infos, RUN_UPDATE_INFO_TASKS,
|
||||
first_run=True)
|
||||
now=True)
|
||||
|
||||
# first start of supervisor?
|
||||
if not await self.homeassistant.exists():
|
||||
@@ -83,12 +88,17 @@ class HassIO(object):
|
||||
|
||||
# schedule addon update task
|
||||
self.scheduler.register_task(
|
||||
self.addons.relaod, RUN_RELOAD_ADDONS_TASKS, first_run=True)
|
||||
self.addons.reload, RUN_RELOAD_ADDONS_TASKS, now=True)
|
||||
|
||||
# schedule self update task
|
||||
self.scheduler.register_task(
|
||||
self._hassio_update, RUN_UPDATE_SUPERVISOR_TASKS)
|
||||
|
||||
async def start(self):
|
||||
"""Start HassIO orchestration."""
|
||||
# start api
|
||||
await self.api.start()
|
||||
_LOGGER.info("Start hassio api on %s", self.config.api_endpoint)
|
||||
|
||||
# HomeAssistant is already running / supervisor have only reboot
|
||||
if await self.homeassistant.is_running():
|
||||
@@ -104,11 +114,16 @@ class HassIO(object):
|
||||
# start addon mark as after
|
||||
await self.addons.auto_boot(STARTUP_AFTER)
|
||||
|
||||
async def stop(self):
|
||||
async def stop(self, exit_code=0):
|
||||
"""Stop a running orchestration."""
|
||||
# don't process scheduler anymore
|
||||
self.scheduler.stop()
|
||||
|
||||
# process stop task pararell
|
||||
tasks = [self.websession.close(), self.api.stop()]
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
self.exit_code = exit_code
|
||||
self.loop.stop()
|
||||
|
||||
async def _setup_homeassistant(self):
|
||||
@@ -126,3 +141,12 @@ class HassIO(object):
|
||||
|
||||
# store version
|
||||
_LOGGER.info("HomeAssistant docker now installed.")
|
||||
|
||||
async def _hassio_update(self):
|
||||
"""Check and run update of supervisor hassio."""
|
||||
if self.config.current_hassio == self.supervisor.version:
|
||||
return
|
||||
|
||||
_LOGGER.info(
|
||||
"Found new HassIO version %s.", self.config.current_hassio)
|
||||
await self.supervisor.update(self.config.current_hassio)
|
||||
|
@@ -138,8 +138,6 @@ class DockerBase(object):
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
_LOGGER.info("Run docker image %s with version %s",
|
||||
self.image, self.version)
|
||||
return await self.loop.run_in_executor(None, self._run)
|
||||
|
||||
def _run(self):
|
||||
@@ -167,6 +165,8 @@ class DockerBase(object):
|
||||
if not self.container:
|
||||
return
|
||||
|
||||
_LOGGER.info("Stop %s docker application", self.image)
|
||||
|
||||
self.container.reload()
|
||||
if self.container.status == 'running':
|
||||
with suppress(docker.errors.DockerException):
|
||||
@@ -209,10 +209,7 @@ class DockerBase(object):
|
||||
return True
|
||||
|
||||
async def update(self, tag):
|
||||
"""Update a docker image.
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
"""Update a docker image."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute update while a task is in progress")
|
||||
return False
|
||||
@@ -233,7 +230,7 @@ class DockerBase(object):
|
||||
|
||||
# update docker image
|
||||
if self._install(tag):
|
||||
_LOGGER.info("Cleanup old %s docker.", old_image)
|
||||
_LOGGER.info("Cleanup old %s docker", old_image)
|
||||
self._stop()
|
||||
try:
|
||||
self.dock.images.remove(image=old_image, force=True)
|
||||
|
@@ -52,6 +52,11 @@ class DockerAddon(DockerBase):
|
||||
self.config.path_ssl_docker: {
|
||||
'bind': '/ssl', 'mode': 'rw'
|
||||
}})
|
||||
if self.addons_data.need_hassio(self.addon):
|
||||
volumes.update({
|
||||
self.config.path_hassio_docker: {
|
||||
'bind': '/hassio', 'mode': 'rw'
|
||||
}})
|
||||
|
||||
try:
|
||||
self.container = self.dock.containers.run(
|
||||
@@ -60,15 +65,15 @@ class DockerAddon(DockerBase):
|
||||
detach=True,
|
||||
network_mode='bridge',
|
||||
ports=self.addons_data.get_ports(self.addon),
|
||||
restart_policy={
|
||||
"Name": "on-failure",
|
||||
"MaximumRetryCount": 10,
|
||||
},
|
||||
volumes=volumes,
|
||||
)
|
||||
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
|
||||
_LOGGER.info("Start docker addon %s with version %s",
|
||||
self.image, self.version)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't run %s -> %s", self.image, err)
|
||||
return False
|
||||
|
@@ -4,7 +4,7 @@ import logging
|
||||
import docker
|
||||
|
||||
from . import DockerBase
|
||||
from ..tools import get_version_from_env, get_local_ip
|
||||
from ..tools import get_version_from_env
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -31,8 +31,6 @@ class DockerHomeAssistant(DockerBase):
|
||||
if self._is_running():
|
||||
return
|
||||
|
||||
api_endpoint = get_local_ip(self.loop)
|
||||
|
||||
# cleanup old container
|
||||
self._stop()
|
||||
|
||||
@@ -43,12 +41,8 @@ class DockerHomeAssistant(DockerBase):
|
||||
detach=True,
|
||||
privileged=True,
|
||||
network_mode='host',
|
||||
restart_policy={
|
||||
"Name": "always",
|
||||
"MaximumRetryCount": 10,
|
||||
},
|
||||
environment={
|
||||
'HASSIO': api_endpoint,
|
||||
'HASSIO': self.config.api_endpoint,
|
||||
},
|
||||
volumes={
|
||||
self.config.path_config_docker:
|
||||
@@ -59,6 +53,10 @@ class DockerHomeAssistant(DockerBase):
|
||||
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
|
||||
_LOGGER.info("Start docker addon %s with version %s",
|
||||
self.image, self.version)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't run %s -> %s", self.image, err)
|
||||
return False
|
||||
|
@@ -1,17 +1,71 @@
|
||||
"""Init file for HassIO docker object."""
|
||||
import logging
|
||||
import os
|
||||
|
||||
import docker
|
||||
|
||||
from . import DockerBase
|
||||
from ..const import RESTART_EXIT_CODE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerSupervisor(DockerBase):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, config, loop, dock, hassio, image=None):
|
||||
"""Initialize docker base wrapper."""
|
||||
super().__init__(config, loop, dock, image=image)
|
||||
|
||||
self.hassio = hassio
|
||||
|
||||
@property
|
||||
def docker_name(self):
|
||||
"""Return name of docker container."""
|
||||
return os.environ['SUPERVISOR_NAME']
|
||||
|
||||
async def update(self, tag):
|
||||
"""Update a supervisor docker image."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute update while a task is in progress")
|
||||
return False
|
||||
|
||||
_LOGGER.info("Update supervisor docker to %s:%s", self.image, tag)
|
||||
old_version = self.version
|
||||
|
||||
async with self._lock:
|
||||
if await self.loop.run_in_executor(None, self._install, tag):
|
||||
self.config.hassio_cleanup = old_version
|
||||
self.loop.create_task(self.hassio.stop(RESTART_EXIT_CODE))
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def cleanup(self):
|
||||
"""Check if old supervisor version exists and cleanup."""
|
||||
if not self.config.hassio_cleanup:
|
||||
return
|
||||
|
||||
async with self._lock:
|
||||
if await self.loop.run_in_executor(None, self._cleanup):
|
||||
self.config.hassio_cleanup = None
|
||||
|
||||
def _cleanup(self):
|
||||
"""Remove old image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
old_image = "{}:{}".format(self.image, self.config.hassio_cleanup)
|
||||
|
||||
_LOGGER.info("Old supervisor docker found %s", old_image)
|
||||
try:
|
||||
self.dock.images.remove(image=old_image, force=True)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't remove old image %s -> %s", old_image, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def run(self):
|
||||
"""Run docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
@@ -24,10 +78,6 @@ class DockerSupervisor(DockerBase):
|
||||
"""Stop/remove docker container."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
||||
async def update(self, tag):
|
||||
"""Update docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
||||
async def remove(self):
|
||||
"""Remove docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
@@ -14,9 +14,8 @@ _LOGGER = logging.getLogger(__name__)
|
||||
TIMEOUT = 15
|
||||
|
||||
LEVEL_POWER = 1
|
||||
LEVEL_UPDATE_SUPERVISOR = 2
|
||||
LEVEL_UPDATE_HOST = 4
|
||||
LEVEL_NETWORK = 8
|
||||
LEVEL_UPDATE_HOST = 2
|
||||
LEVEL_NETWORK = 4
|
||||
|
||||
|
||||
class HostControll(object):
|
||||
@@ -101,12 +100,3 @@ class HostControll(object):
|
||||
if version:
|
||||
return self._send_command("host-update {}".format(version))
|
||||
return self._send_command("host-update")
|
||||
|
||||
def supervisor_update(self, version=None):
|
||||
"""Update the supervisor on host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
if version:
|
||||
return self._send_command("supervisor-update {}".format(version))
|
||||
return self._send_command("supervisor-update")
|
||||
|
@@ -16,9 +16,14 @@ class Scheduler(object):
|
||||
"""Initialize task schedule."""
|
||||
self.loop = loop
|
||||
self._data = {}
|
||||
self._stop = False
|
||||
|
||||
def stop(self):
|
||||
"""Stop to execute tasks in scheduler."""
|
||||
self._stop = True
|
||||
|
||||
def register_task(self, coro_callback, seconds, repeat=True,
|
||||
first_run=False):
|
||||
now=False):
|
||||
"""Schedule a coroutine.
|
||||
|
||||
The coroutien need to be a callback without arguments.
|
||||
@@ -34,7 +39,7 @@ class Scheduler(object):
|
||||
self._data[idx] = opts
|
||||
|
||||
# schedule task
|
||||
if first_run:
|
||||
if now:
|
||||
self._run_task(idx)
|
||||
else:
|
||||
task = self.loop.call_later(seconds, self._run_task, idx)
|
||||
@@ -46,6 +51,10 @@ class Scheduler(object):
|
||||
"""Run a scheduled task."""
|
||||
data = self._data.pop(idx)
|
||||
|
||||
# stop execute tasks
|
||||
if self._stop:
|
||||
return
|
||||
|
||||
self.loop.create_task(data[CALL]())
|
||||
|
||||
if data[REPEAT]:
|
||||
|
@@ -55,19 +55,23 @@ def get_version_from_env(env_list):
|
||||
def get_local_ip(loop):
|
||||
"""Retrieve local IP address.
|
||||
|
||||
Need run inside executor.
|
||||
Return a future.
|
||||
"""
|
||||
try:
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
def local_ip():
|
||||
"""Return local ip."""
|
||||
try:
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
|
||||
# Use Google Public DNS server to determine own IP
|
||||
sock.connect(('8.8.8.8', 80))
|
||||
# Use Google Public DNS server to determine own IP
|
||||
sock.connect(('8.8.8.8', 80))
|
||||
|
||||
return sock.getsockname()[0]
|
||||
except socket.error:
|
||||
return socket.gethostbyname(socket.gethostname())
|
||||
finally:
|
||||
sock.close()
|
||||
return sock.getsockname()[0]
|
||||
except socket.error:
|
||||
return socket.gethostbyname(socket.gethostname())
|
||||
finally:
|
||||
sock.close()
|
||||
|
||||
return loop.run_in_executor(None, local_ip)
|
||||
|
||||
|
||||
def write_json_file(jsonfile, data):
|
||||
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"hassio_tag": "0.6",
|
||||
"homeassistant_tag": "0.42.3",
|
||||
"hassio_tag": "0.10",
|
||||
"homeassistant_tag": "0.42.4",
|
||||
"resinos_version": "0.3",
|
||||
"resinhup_version": "0.1"
|
||||
}
|
||||
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"hassio_tag": "0.6",
|
||||
"homeassistant_tag": "0.42.3",
|
||||
"hassio_tag": "0.10",
|
||||
"homeassistant_tag": "0.42.4",
|
||||
"resinos_version": "0.3",
|
||||
"resinhup_version": "0.1"
|
||||
}
|
||||
|
Reference in New Issue
Block a user