mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-15 03:59:20 +00:00
Compare commits
66 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
30243c39e6 | ||
![]() |
d285fd4ad4 | ||
![]() |
7a0b9cc1ac | ||
![]() |
cc63008a86 | ||
![]() |
f9c7371140 | ||
![]() |
71590f90ae | ||
![]() |
e1028d6eca | ||
![]() |
f231d54daa | ||
![]() |
094c5968f4 | ||
![]() |
6c217d506c | ||
![]() |
0d867af79f | ||
![]() |
c9876988da | ||
![]() |
454d82d985 | ||
![]() |
14ee26ea29 | ||
![]() |
86a7f11f64 | ||
![]() |
78d1e1d9e7 | ||
![]() |
a3f67809a6 | ||
![]() |
e0be15cb45 | ||
![]() |
f1ce5faf17 | ||
![]() |
322480bba1 | ||
![]() |
d2db89a665 | ||
![]() |
fc17893158 | ||
![]() |
e2bf267713 | ||
![]() |
e25d30af52 | ||
![]() |
2bd1636097 | ||
![]() |
c019d1f3c5 | ||
![]() |
5b23347563 | ||
![]() |
daab4a86b2 | ||
![]() |
5831177fd8 | ||
![]() |
f9500f6d90 | ||
![]() |
29ac861b87 | ||
![]() |
b05f2db023 | ||
![]() |
8af1dfc882 | ||
![]() |
c76e851029 | ||
![]() |
b5ec1e0cfd | ||
![]() |
fe72e768ec | ||
![]() |
360f546ab0 | ||
![]() |
eb0ee31b5a | ||
![]() |
62df079be7 | ||
![]() |
40e5e6eb9d | ||
![]() |
dbc080c24d | ||
![]() |
f340a19e40 | ||
![]() |
20856126c8 | ||
![]() |
3ef76a4ada | ||
![]() |
14500d3ac4 | ||
![]() |
318ca828cc | ||
![]() |
5c70d68262 | ||
![]() |
082770256b | ||
![]() |
ae003e5b76 | ||
![]() |
530f17d502 | ||
![]() |
f127de8059 | ||
![]() |
9afb136648 | ||
![]() |
07239fec08 | ||
![]() |
23661dc2fd | ||
![]() |
de34c058a1 | ||
![]() |
820daa4f2b | ||
![]() |
fe4e1a1933 | ||
![]() |
8c191e8c98 | ||
![]() |
9e95e8671e | ||
![]() |
1ad196424f | ||
![]() |
878eb40258 | ||
![]() |
61e133df6b | ||
![]() |
23278550be | ||
![]() |
ce01e53806 | ||
![]() |
716018a26f | ||
![]() |
912e24229f |
18
API.md
18
API.md
@@ -11,7 +11,6 @@ Communicate over unix socket with a host daemon.
|
||||
# reboot
|
||||
# shutdown
|
||||
# host-update [v]
|
||||
# supervisor-update [v]
|
||||
|
||||
# network info
|
||||
# network hostname xy
|
||||
@@ -24,9 +23,8 @@ Communicate over unix socket with a host daemon.
|
||||
|
||||
level:
|
||||
- 1: power functions
|
||||
- 2: supervisor update
|
||||
- 4: host update
|
||||
- 8: network functions
|
||||
- 2: host update
|
||||
- 4: network functions
|
||||
|
||||
Answer:
|
||||
```
|
||||
@@ -60,6 +58,8 @@ On success
|
||||
|
||||
### HassIO
|
||||
|
||||
- `/supervisor/ping`
|
||||
|
||||
- `/supervisor/info`
|
||||
|
||||
```json
|
||||
@@ -67,7 +67,15 @@ On success
|
||||
"version": "INSTALL_VERSION",
|
||||
"current": "CURRENT_VERSION",
|
||||
"beta": "true|false",
|
||||
"addons": {}
|
||||
"addons": [
|
||||
{
|
||||
"name": "xy bla",
|
||||
"slug": "xy",
|
||||
"version": "CURRENT_VERSION",
|
||||
"installed": "none|INSTALL_VERSION",
|
||||
"description": "description"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
|
@@ -6,12 +6,6 @@ It is a docker image (supervisor) they manage HomeAssistant docker and give a in
|
||||
[HassIO-Addons](https://github.com/pvizeli/hassio-addons)
|
||||
[HassIO-Build](https://github.com/pvizeli/hassio-build)
|
||||
|
||||
## History
|
||||
- **0.1**: Initial supervisor with setup HomeAssistant docker
|
||||
- **0.2**: Support for basic HostControll
|
||||
- **0.3**: Refactor code and add basic rest api
|
||||
- **0.4**: Move network api code / ssl folder
|
||||
|
||||
# Hardware Image
|
||||
The image is based on ResinOS and Yocto Linux. It comes with the HassIO supervisor pre-installed. This includes support to update the supervisor over the air. After flashing your host OS will not require any more maintenance! The image does not include Home Assistant, instead it will downloaded when the image boots up for the first time.
|
||||
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Main file for HassIO."""
|
||||
import asyncio
|
||||
import logging
|
||||
import signal
|
||||
import sys
|
||||
|
||||
import hassio.bootstrap as bootstrap
|
||||
import hassio.core as core
|
||||
@@ -23,13 +23,11 @@ if __name__ == "__main__":
|
||||
loop.run_until_complete(hassio.setup())
|
||||
|
||||
_LOGGER.info("Start Hassio task")
|
||||
loop.create_task(hassio.start())
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(
|
||||
signal.SIGTERM, lambda: loop.create_task(hassio.stop()))
|
||||
except ValueError:
|
||||
_LOGGER.warning("Could not bind to SIGTERM")
|
||||
loop.call_soon_threadsafe(loop.create_task, hassio.start())
|
||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop, hassio)
|
||||
|
||||
loop.run_forever()
|
||||
loop.close()
|
||||
|
||||
_LOGGER.info("Close Hassio")
|
||||
sys.exit(hassio.exit_code)
|
||||
|
159
hassio/addons/__init__.py
Normal file
159
hassio/addons/__init__.py
Normal file
@@ -0,0 +1,159 @@
|
||||
"""Init file for HassIO addons."""
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from .data import AddonsData
|
||||
from .git import AddonsRepo
|
||||
from ..const import STATE_STOPPED, STATE_STARTED
|
||||
from ..dock.addon import DockerAddon
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AddonManager(AddonsData):
|
||||
"""Manage addons inside HassIO."""
|
||||
|
||||
def __init__(self, config, loop, dock):
|
||||
"""Initialize docker base wrapper."""
|
||||
super().__init__(config)
|
||||
|
||||
self.loop = loop
|
||||
self.dock = dock
|
||||
self.repo = AddonsRepo(config, loop)
|
||||
self.dockers = {}
|
||||
|
||||
async def prepare(self, arch):
|
||||
"""Startup addon management."""
|
||||
self.arch = arch
|
||||
|
||||
# load addon repository
|
||||
if await self.repo.load():
|
||||
self.read_addons_repo()
|
||||
|
||||
# load installed addons
|
||||
for addon in self.list_installed:
|
||||
self.dockers[addon] = DockerAddon(
|
||||
self.config, self.loop, self.dock, self, addon)
|
||||
|
||||
async def relaod(self):
|
||||
"""Update addons from repo and reload list."""
|
||||
if not await self.repo.pull():
|
||||
return
|
||||
self.read_addons_repo()
|
||||
|
||||
# remove stalled addons
|
||||
tasks = []
|
||||
for addon in self.list_removed:
|
||||
_LOGGER.info("Old addon %s found")
|
||||
tasks.append(self.loop.create_task(self.uninstall(addon)))
|
||||
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
async def auto_boot(self, start_type):
|
||||
"""Boot addons with mode auto."""
|
||||
boot_list = self.list_startup(start_type)
|
||||
tasks = []
|
||||
|
||||
for addon in boot_list:
|
||||
tasks.append(self.loop.create_task(self.start(addon)))
|
||||
|
||||
_LOGGER.info("Startup %s run %d addons", start_type, len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
async def install(self, addon, version=None):
|
||||
"""Install a addon."""
|
||||
if not self.exists_addon(addon):
|
||||
_LOGGER.error("Addon %s not exists for install", addon)
|
||||
return False
|
||||
|
||||
if self.is_installed(addon):
|
||||
_LOGGER.error("Addon %s is already installed", addon)
|
||||
return False
|
||||
|
||||
if not os.path.isdir(self.path_data(addon)):
|
||||
_LOGGER.info("Create Home-Assistant addon data folder %s",
|
||||
self.path_data(addon))
|
||||
os.mkdir(self.path_data(addon))
|
||||
|
||||
addon_docker = DockerAddon(
|
||||
self.config, self.loop, self.dock, self, addon)
|
||||
|
||||
version = version or self.get_version(addon)
|
||||
if not await addon_docker.install(version):
|
||||
return False
|
||||
|
||||
self.dockers[addon] = addon_docker
|
||||
self.set_install_addon(addon, version)
|
||||
return True
|
||||
|
||||
async def uninstall(self, addon):
|
||||
"""Remove a addon."""
|
||||
if not self.is_installed(addon):
|
||||
_LOGGER.error("Addon %s is already uninstalled", addon)
|
||||
return False
|
||||
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return False
|
||||
|
||||
if not await self.dockers[addon].remove():
|
||||
return False
|
||||
|
||||
if os.path.isdir(self.path_data(addon)):
|
||||
_LOGGER.info("Remove Home-Assistant addon data folder %s",
|
||||
self.path_data(addon))
|
||||
shutil.rmtree(self.path_data(addon))
|
||||
|
||||
self.dockers.pop(addon)
|
||||
self.set_uninstall_addon(addon)
|
||||
return True
|
||||
|
||||
async def state(self, addon):
|
||||
"""Return running state of addon."""
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return
|
||||
|
||||
if await self.dockers[addon].is_running():
|
||||
return STATE_STARTED
|
||||
return STATE_STOPPED
|
||||
|
||||
async def start(self, addon):
|
||||
"""Set options and start addon."""
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return False
|
||||
|
||||
if not self.write_addon_options(addon):
|
||||
_LOGGER.error("Can't write options for addon %s", addon)
|
||||
return False
|
||||
|
||||
return await self.dockers[addon].run()
|
||||
|
||||
async def stop(self, addon):
|
||||
"""Stop addon."""
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return False
|
||||
|
||||
return await self.dockers[addon].stop()
|
||||
|
||||
async def update(self, addon, version=None):
|
||||
"""Update addon."""
|
||||
if not self.is_installed(addon):
|
||||
_LOGGER.error("Addon %s is not installed", addon)
|
||||
return False
|
||||
|
||||
if addon not in self.dockers:
|
||||
_LOGGER.error("No docker found for addon %s", addon)
|
||||
return False
|
||||
|
||||
version = version or self.get_version(addon)
|
||||
if await self.dockers[addon].update(version):
|
||||
self.set_version(addon, version)
|
||||
return True
|
||||
return False
|
230
hassio/addons/data.py
Normal file
230
hassio/addons/data.py
Normal file
@@ -0,0 +1,230 @@
|
||||
"""Init file for HassIO addons."""
|
||||
import logging
|
||||
import glob
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .validate import validate_options, SCHEMA_ADDON_CONFIG
|
||||
from ..const import (
|
||||
FILE_HASSIO_ADDONS, ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON,
|
||||
ATTR_STARTUP, ATTR_BOOT, ATTR_MAP_SSL, ATTR_MAP_CONFIG, ATTR_OPTIONS,
|
||||
ATTR_PORTS, BOOT_AUTO, DOCKER_REPO, ATTR_INSTALLED, ATTR_SCHEMA,
|
||||
ATTR_IMAGE, ATTR_MAP_HASSIO)
|
||||
from ..config import Config
|
||||
from ..tools import read_json_file, write_json_file
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ADDONS_REPO_PATTERN = "{}/*/config.json"
|
||||
|
||||
|
||||
class AddonsData(Config):
|
||||
"""Hold data for addons inside HassIO."""
|
||||
|
||||
def __init__(self, config):
|
||||
"""Initialize data holder."""
|
||||
super().__init__(FILE_HASSIO_ADDONS)
|
||||
self.config = config
|
||||
self._addons_data = {}
|
||||
self.arch = None
|
||||
|
||||
def read_addons_repo(self):
|
||||
"""Read data from addons repository."""
|
||||
self._addons_data = {}
|
||||
|
||||
self._read_addons_folder(self.config.path_addons_repo)
|
||||
self._read_addons_folder(self.config.path_addons_custom)
|
||||
|
||||
def _read_addons_folder(self, folder):
|
||||
"""Read data from addons folder."""
|
||||
pattern = ADDONS_REPO_PATTERN.format(folder)
|
||||
|
||||
for addon in glob.iglob(pattern):
|
||||
try:
|
||||
addon_config = read_json_file(addon)
|
||||
|
||||
addon_config = SCHEMA_ADDON_CONFIG(addon_config)
|
||||
self._addons_data[addon_config[ATTR_SLUG]] = addon_config
|
||||
|
||||
except (OSError, KeyError):
|
||||
_LOGGER.warning("Can't read %s", addon)
|
||||
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.warning("Can't read %s -> %s", addon,
|
||||
humanize_error(addon_config, ex))
|
||||
|
||||
@property
|
||||
def list_installed(self):
|
||||
"""Return a list of installed addons."""
|
||||
return set(self._data.keys())
|
||||
|
||||
@property
|
||||
def list_all(self):
|
||||
"""Return a list of available addons."""
|
||||
return set(self._addons_data.keys())
|
||||
|
||||
@property
|
||||
def list(self):
|
||||
"""Return a list of available addons."""
|
||||
data = []
|
||||
for addon, values in self._addons_data.items():
|
||||
data.append({
|
||||
ATTR_NAME: values[ATTR_NAME],
|
||||
ATTR_SLUG: values[ATTR_SLUG],
|
||||
ATTR_DESCRIPTON: values[ATTR_DESCRIPTON],
|
||||
ATTR_VERSION: values[ATTR_VERSION],
|
||||
ATTR_INSTALLED: self._data.get(addon, {}).get(ATTR_VERSION),
|
||||
})
|
||||
|
||||
return data
|
||||
|
||||
def list_startup(self, start_type):
|
||||
"""Get list of installed addon with need start by type."""
|
||||
addon_list = set()
|
||||
for addon in self._data.keys():
|
||||
if self.get_boot(addon) != BOOT_AUTO:
|
||||
continue
|
||||
|
||||
try:
|
||||
if self._addons_data[addon][ATTR_STARTUP] == start_type:
|
||||
addon_list.add(addon)
|
||||
except KeyError:
|
||||
_LOGGER.warning("Orphaned addon detect %s", addon)
|
||||
continue
|
||||
|
||||
return addon_list
|
||||
|
||||
@property
|
||||
def list_removed(self):
|
||||
"""Return local addons they not support from repo."""
|
||||
addon_list = set()
|
||||
for addon in self._data.keys():
|
||||
if addon not in self._addons_data:
|
||||
addon_list.add(addon)
|
||||
|
||||
return addon_list
|
||||
|
||||
def exists_addon(self, addon):
|
||||
"""Return True if a addon exists."""
|
||||
return addon in self._addons_data
|
||||
|
||||
def is_installed(self, addon):
|
||||
"""Return True if a addon is installed."""
|
||||
return addon in self._data
|
||||
|
||||
def version_installed(self, addon):
|
||||
"""Return installed version."""
|
||||
return self._data[addon][ATTR_VERSION]
|
||||
|
||||
def set_install_addon(self, addon, version):
|
||||
"""Set addon as installed."""
|
||||
self._data[addon] = {
|
||||
ATTR_VERSION: version,
|
||||
ATTR_OPTIONS: {}
|
||||
}
|
||||
self.save()
|
||||
|
||||
def set_uninstall_addon(self, addon):
|
||||
"""Set addon as uninstalled."""
|
||||
self._data.pop(addon, None)
|
||||
self.save()
|
||||
|
||||
def set_options(self, addon, options):
|
||||
"""Store user addon options."""
|
||||
self._data[addon][ATTR_OPTIONS] = options
|
||||
self.save()
|
||||
|
||||
def set_version(self, addon, version):
|
||||
"""Update version of addon."""
|
||||
self._data[addon][ATTR_VERSION] = version
|
||||
self.save()
|
||||
|
||||
def get_options(self, addon):
|
||||
"""Return options with local changes."""
|
||||
opt = self._addons_data[addon][ATTR_OPTIONS]
|
||||
if addon in self._data:
|
||||
opt.update(self._data[addon][ATTR_OPTIONS])
|
||||
return opt
|
||||
|
||||
def get_boot(self, addon):
|
||||
"""Return boot config with prio local settings."""
|
||||
if ATTR_BOOT in self._data[addon]:
|
||||
return self._data[addon][ATTR_BOOT]
|
||||
|
||||
return self._addons_data[addon][ATTR_BOOT]
|
||||
|
||||
def get_name(self, addon):
|
||||
"""Return name of addon."""
|
||||
return self._addons_data[addon][ATTR_NAME]
|
||||
|
||||
def get_description(self, addon):
|
||||
"""Return description of addon."""
|
||||
return self._addons_data[addon][ATTR_DESCRIPTON]
|
||||
|
||||
def get_version(self, addon):
|
||||
"""Return version of addon."""
|
||||
return self._addons_data[addon][ATTR_VERSION]
|
||||
|
||||
def get_slug(self, addon):
|
||||
"""Return slug of addon."""
|
||||
return self._addons_data[addon][ATTR_SLUG]
|
||||
|
||||
def get_ports(self, addon):
|
||||
"""Return ports of addon."""
|
||||
return self._addons_data[addon].get(ATTR_PORTS)
|
||||
|
||||
def get_image(self, addon):
|
||||
"""Return image name of addon."""
|
||||
if ATTR_IMAGE not in self._addons_data[addon]:
|
||||
return "{}/{}-addon-{}".format(
|
||||
DOCKER_REPO, self.arch, self.get_slug(addon))
|
||||
|
||||
return self._addons_data[addon][ATTR_IMAGE]
|
||||
|
||||
def need_config(self, addon):
|
||||
"""Return True if config map is needed."""
|
||||
return self._addons_data[addon][ATTR_MAP_CONFIG]
|
||||
|
||||
def need_ssl(self, addon):
|
||||
"""Return True if ssl map is needed."""
|
||||
return self._addons_data[addon][ATTR_MAP_SSL]
|
||||
|
||||
def need_hassio(self, addon):
|
||||
"""Return True if hassio map is needed."""
|
||||
return self._addons_data[addon][ATTR_MAP_HASSIO]
|
||||
|
||||
def path_data(self, addon):
|
||||
"""Return addon data path inside supervisor."""
|
||||
return "{}/{}".format(
|
||||
self.config.path_addons_data, self._addons_data[addon][ATTR_SLUG])
|
||||
|
||||
def path_data_docker(self, addon):
|
||||
"""Return addon data path external for docker."""
|
||||
return "{}/{}".format(self.config.path_addons_data_docker,
|
||||
self._addons_data[addon][ATTR_SLUG])
|
||||
|
||||
def path_addon_options(self, addon):
|
||||
"""Return path to addons options."""
|
||||
return "{}/options.json".format(self.path_data(addon))
|
||||
|
||||
def write_addon_options(self, addon):
|
||||
"""Return True if addon options is written to data."""
|
||||
schema = self.get_schema(addon)
|
||||
options = self.get_options(addon)
|
||||
|
||||
try:
|
||||
schema(options)
|
||||
return write_json_file(self.path_addon_options(addon), options)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Addon %s have wrong options -> %s", addon,
|
||||
humanize_error(options, ex))
|
||||
|
||||
return False
|
||||
|
||||
def get_schema(self, addon):
|
||||
"""Create a schema for addon options."""
|
||||
raw_schema = self._addons_data[addon][ATTR_SCHEMA]
|
||||
|
||||
schema = vol.Schema(vol.All(dict, validate_options(raw_schema)))
|
||||
return schema
|
71
hassio/addons/git.py
Normal file
71
hassio/addons/git.py
Normal file
@@ -0,0 +1,71 @@
|
||||
"""Init file for HassIO addons git."""
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
|
||||
import git
|
||||
|
||||
from ..const import URL_HASSIO_ADDONS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AddonsRepo(object):
|
||||
"""Manage addons git repo."""
|
||||
|
||||
def __init__(self, config, loop):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.repo = None
|
||||
self._lock = asyncio.Lock(loop=loop)
|
||||
|
||||
async def load(self):
|
||||
"""Init git addon repo."""
|
||||
if not os.path.isdir(self.config.path_addons_repo):
|
||||
return await self.clone()
|
||||
|
||||
async with self._lock:
|
||||
try:
|
||||
_LOGGER.info("Load addons repository")
|
||||
self.repo = await self.loop.run_in_executor(
|
||||
None, git.Repo, self.config.path_addons_repo)
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError) as err:
|
||||
_LOGGER.error("Can't load addons repo: %s.", err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def clone(self):
|
||||
"""Clone git addon repo."""
|
||||
async with self._lock:
|
||||
try:
|
||||
_LOGGER.info("Clone addons repository")
|
||||
self.repo = await self.loop.run_in_executor(
|
||||
None, git.Repo.clone_from, URL_HASSIO_ADDONS,
|
||||
self.config.path_addons_repo)
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError) as err:
|
||||
_LOGGER.error("Can't clone addons repo: %s.", err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def pull(self):
|
||||
"""Pull git addon repo."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.warning("It is already a task in progress.")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
try:
|
||||
_LOGGER.info("Pull addons repository")
|
||||
await self.loop.run_in_executor(
|
||||
None, self.repo.remotes.origin.pull)
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError) as err:
|
||||
_LOGGER.error("Can't pull addons repo: %s.", err)
|
||||
return False
|
||||
|
||||
return True
|
113
hassio/addons/validate.py
Normal file
113
hassio/addons/validate.py
Normal file
@@ -0,0 +1,113 @@
|
||||
"""Validate addons options schema."""
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_STARTUP,
|
||||
ATTR_BOOT, ATTR_MAP_SSL, ATTR_MAP_CONFIG, ATTR_OPTIONS,
|
||||
ATTR_PORTS, STARTUP_ONCE, STARTUP_AFTER, STARTUP_BEFORE, BOOT_AUTO,
|
||||
BOOT_MANUAL, ATTR_SCHEMA, ATTR_IMAGE, ATTR_MAP_HASSIO)
|
||||
|
||||
V_STR = 'str'
|
||||
V_INT = 'int'
|
||||
V_FLOAT = 'float'
|
||||
V_BOOL = 'bool'
|
||||
V_EMAIL = 'email'
|
||||
V_URL = 'url'
|
||||
|
||||
ADDON_ELEMENT = vol.In([V_STR, V_INT, V_FLOAT, V_BOOL, V_EMAIL, V_URL])
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
|
||||
vol.Required(ATTR_STARTUP):
|
||||
vol.In([STARTUP_BEFORE, STARTUP_AFTER, STARTUP_ONCE]),
|
||||
vol.Required(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_PORTS): dict,
|
||||
vol.Optional(ATTR_MAP_CONFIG, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_MAP_SSL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_MAP_HASSIO, default=False): vol.Boolean(),
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Required(ATTR_SCHEMA): {
|
||||
vol.Coerce(str): vol.Any(ADDON_ELEMENT, [
|
||||
vol.Any(ADDON_ELEMENT, {vol.Coerce(str): ADDON_ELEMENT})
|
||||
])
|
||||
},
|
||||
vol.Optional(ATTR_IMAGE): vol.Match(r"\w*/\w*"),
|
||||
})
|
||||
|
||||
|
||||
def validate_options(raw_schema):
|
||||
"""Validate schema."""
|
||||
def validate(struct):
|
||||
"""Create schema validator for addons options."""
|
||||
options = {}
|
||||
|
||||
# read options
|
||||
for key, value in struct.items():
|
||||
if key not in raw_schema:
|
||||
raise vol.Invalid("Unknown options {}.".format(key))
|
||||
|
||||
typ = raw_schema[key]
|
||||
try:
|
||||
if isinstance(typ, list):
|
||||
# nested value
|
||||
options[key] = _nested_validate(typ[0], value)
|
||||
else:
|
||||
# normal value
|
||||
options[key] = _single_validate(typ, value)
|
||||
except (IndexError, KeyError):
|
||||
raise vol.Invalid(
|
||||
"Type error for {}.".format(key)) from None
|
||||
|
||||
return options
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
def _single_validate(typ, value):
|
||||
"""Validate a single element."""
|
||||
try:
|
||||
if typ == V_STR:
|
||||
return str(value)
|
||||
elif typ == V_INT:
|
||||
return int(value)
|
||||
elif typ == V_FLOAT:
|
||||
return float(value)
|
||||
elif typ == V_BOOL:
|
||||
return vol.Boolean()(value)
|
||||
elif typ == V_EMAIL:
|
||||
return vol.Email()(value)
|
||||
elif typ == V_URL:
|
||||
return vol.Url()(value)
|
||||
|
||||
raise vol.Invalid("Fatal error for {}.".format(value))
|
||||
except TypeError:
|
||||
raise vol.Invalid(
|
||||
"Type {} error for {}.".format(typ, value)) from None
|
||||
|
||||
|
||||
def _nested_validate(typ, data_list):
|
||||
"""Validate nested items."""
|
||||
options = []
|
||||
|
||||
for element in data_list:
|
||||
# dict list
|
||||
if isinstance(typ, dict):
|
||||
c_options = {}
|
||||
for c_key, c_value in element.items():
|
||||
if c_key not in typ:
|
||||
raise vol.Invalid(
|
||||
"Unknown nested options {}.".format(c_key))
|
||||
|
||||
c_options[c_key] = _single_validate(typ[c_key], c_value)
|
||||
options.append(c_options)
|
||||
# normal list
|
||||
else:
|
||||
options.append(_single_validate(typ, element))
|
||||
|
||||
return options
|
@@ -3,10 +3,11 @@ import logging
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from .addons import APIAddons
|
||||
from .homeassistant import APIHomeAssistant
|
||||
from .host import APIHost
|
||||
from .network import APINetwork
|
||||
from .supervisor import APISupervisor
|
||||
from .homeassistant import APIHomeAssistant
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -40,10 +41,12 @@ class RestAPI(object):
|
||||
self.webapp.router.add_get('/network/info', api_net.info)
|
||||
self.webapp.router.add_get('/network/options', api_net.options)
|
||||
|
||||
def register_supervisor(self, host_controll):
|
||||
def register_supervisor(self, supervisor, addons):
|
||||
"""Register supervisor function."""
|
||||
api_supervisor = APISupervisor(self.config, self.loop, host_controll)
|
||||
api_supervisor = APISupervisor(
|
||||
self.config, self.loop, supervisor, addons)
|
||||
|
||||
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
|
||||
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
|
||||
self.webapp.router.add_get('/supervisor/update', api_supervisor.update)
|
||||
self.webapp.router.add_get(
|
||||
@@ -56,6 +59,21 @@ class RestAPI(object):
|
||||
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
|
||||
self.webapp.router.add_get('/homeassistant/update', api_hass.update)
|
||||
|
||||
def register_addons(self, addons):
|
||||
"""Register homeassistant function."""
|
||||
api_addons = APIAddons(self.config, self.loop, addons)
|
||||
|
||||
self.webapp.router.add_get('/addons/{addon}/info', api_addons.info)
|
||||
self.webapp.router.add_get(
|
||||
'/addons/{addon}/install', api_addons.install)
|
||||
self.webapp.router.add_get(
|
||||
'/addons/{addon}/uninstall', api_addons.uninstall)
|
||||
self.webapp.router.add_get('/addons/{addon}/start', api_addons.start)
|
||||
self.webapp.router.add_get('/addons/{addon}/stop', api_addons.stop)
|
||||
self.webapp.router.add_get('/addons/{addon}/update', api_addons.update)
|
||||
self.webapp.router.add_get(
|
||||
'/addons/{addon}/options', api_addons.options)
|
||||
|
||||
async def start(self):
|
||||
"""Run rest api webserver."""
|
||||
self._handler = self.webapp.make_handler(loop=self.loop)
|
||||
|
126
hassio/api/addons.py
Normal file
126
hassio/api/addons.py
Normal file
@@ -0,0 +1,126 @@
|
||||
"""Init file for HassIO homeassistant rest api."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .util import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_CURRENT, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS,
|
||||
STATE_STOPPED, STATE_STARTED)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
class APIAddons(object):
|
||||
"""Handle rest api for addons functions."""
|
||||
|
||||
def __init__(self, config, loop, addons):
|
||||
"""Initialize homeassistant rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.addons = addons
|
||||
|
||||
def _extract_addon(self, request, check_installed=True):
|
||||
"""Return addon and if not exists trow a exception."""
|
||||
addon = request.match_info.get('addon')
|
||||
|
||||
# check data
|
||||
if not self.addons.exists_addon(addon):
|
||||
raise RuntimeError("Addon not exists")
|
||||
if check_installed and not self.addons.is_installed(addon):
|
||||
raise RuntimeError("Addon is not installed")
|
||||
|
||||
return addon
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return addon information."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
info = {
|
||||
ATTR_VERSION: self.addons.version_installed(addon),
|
||||
ATTR_CURRENT: self.addons.get_version(addon),
|
||||
ATTR_STATE: await self.addons.state(addon),
|
||||
ATTR_BOOT: self.addons.get_boot(addon),
|
||||
ATTR_OPTIONS: self.addons.get_options(addon),
|
||||
}
|
||||
return info
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Store user options for addon."""
|
||||
addon = self._extract_addon(request)
|
||||
schema = self.addons.get_schema(addon)
|
||||
|
||||
options = await api_validate(schema, request)
|
||||
self.addons.set_options(addon, options)
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def install(self, request):
|
||||
"""Install addon."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
version = body.get(
|
||||
ATTR_VERSION, self.addons.get_version(addon))
|
||||
|
||||
return await asyncio.shield(
|
||||
self.addons.install(addon, version), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def uninstall(self, request):
|
||||
"""Uninstall addon."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
return await asyncio.shield(
|
||||
self.addons.uninstall(addon), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def start(self, request):
|
||||
"""Start addon."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
if await self.addons.state(addon) == STATE_STARTED:
|
||||
raise RuntimeError("Addon is already running")
|
||||
|
||||
# validate options
|
||||
try:
|
||||
schema = self.addons.get_schema(addon)
|
||||
options = self.addons.get_options(addon)
|
||||
schema(options)
|
||||
except vol.Invalid as ex:
|
||||
raise RuntimeError(humanize_error(options, ex)) from None
|
||||
|
||||
return await asyncio.shield(
|
||||
self.addons.start(addon), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def stop(self, request):
|
||||
"""Stop addon."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
if await self.addons.state(addon) == STATE_STOPPED:
|
||||
raise RuntimeError("Addon is already stoped")
|
||||
|
||||
return await asyncio.shield(
|
||||
self.addons.stop(addon), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update addon."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
addon = self._extract_addon(request)
|
||||
version = body.get(
|
||||
ATTR_VERSION, self.addons.get_version(addon))
|
||||
|
||||
if version == self.addons.version_installed(addon):
|
||||
raise RuntimeError("Version is already in use")
|
||||
|
||||
return await asyncio.shield(
|
||||
self.addons.update(addon, version), loop=self.loop)
|
@@ -2,11 +2,17 @@
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from .util import api_process, json_loads
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_validate
|
||||
from ..const import ATTR_VERSION, ATTR_CURRENT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
class APIHomeAssistant(object):
|
||||
"""Handle rest api for homeassistant functions."""
|
||||
@@ -30,13 +36,14 @@ class APIHomeAssistant(object):
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update host OS."""
|
||||
body = await request.json(loads=json_loads)
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.config.current_homeassistant)
|
||||
|
||||
if self.dock_hass.in_progress:
|
||||
raise RuntimeError("Other task is in progress.")
|
||||
raise RuntimeError("Other task is in progress")
|
||||
|
||||
if version == self.dock_hass.version:
|
||||
raise RuntimeError("%s is already in use.", version)
|
||||
raise RuntimeError("Version is already in use")
|
||||
|
||||
return await asyncio.shield(self.dock_hass.update(version))
|
||||
return await asyncio.shield(
|
||||
self.dock_hass.update(version), loop=self.loop)
|
||||
|
@@ -1,11 +1,19 @@
|
||||
"""Init file for HassIO host rest api."""
|
||||
import logging
|
||||
|
||||
from .util import api_process_hostcontroll, json_loads
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process_hostcontroll, api_process, api_validate
|
||||
from ..const import ATTR_VERSION
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
UNKNOWN = 'unknown'
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
class APIHost(object):
|
||||
"""Handle rest api for host functions."""
|
||||
@@ -16,10 +24,20 @@ class APIHost(object):
|
||||
self.loop = loop
|
||||
self.host_controll = host_controll
|
||||
|
||||
@api_process_hostcontroll
|
||||
def info(self, request):
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
return self.host_controll.info()
|
||||
if not self.host_controll.active:
|
||||
info = {
|
||||
'os': UNKNOWN,
|
||||
'version': UNKNOWN,
|
||||
'current': UNKNOWN,
|
||||
'level': 0,
|
||||
'hostname': UNKNOWN,
|
||||
}
|
||||
return info
|
||||
|
||||
return await self.host_controll.info()
|
||||
|
||||
@api_process_hostcontroll
|
||||
def reboot(self, request):
|
||||
@@ -31,23 +49,13 @@ class APIHost(object):
|
||||
"""Poweroff host."""
|
||||
return self.host_controll.shutdown()
|
||||
|
||||
@api_process_hostcontroll
|
||||
def network_info(self, request):
|
||||
"""Edit network settings."""
|
||||
pass
|
||||
|
||||
@api_process_hostcontroll
|
||||
def network_update(self, request):
|
||||
"""Edit network settings."""
|
||||
pass
|
||||
|
||||
@api_process_hostcontroll
|
||||
async def update(self, request):
|
||||
"""Update host OS."""
|
||||
body = await request.json(loads=json_loads)
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION)
|
||||
|
||||
if version == self.host_controll.version:
|
||||
raise RuntimeError("%s is already in use.", version)
|
||||
raise RuntimeError("Version is already in use")
|
||||
|
||||
return await self.host_controll.host_update(version=version)
|
||||
|
@@ -1,20 +1,39 @@
|
||||
"""Init file for HassIO supervisor rest api."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from .util import api_process, api_process_hostcontroll, json_loads
|
||||
from ..const import ATTR_VERSION, ATTR_CURRENT, ATTR_BETA, HASSIO_VERSION
|
||||
import voluptuous as vol
|
||||
|
||||
from .util import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_ADDONS, ATTR_VERSION, ATTR_CURRENT, ATTR_BETA, HASSIO_VERSION)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
# pylint: disable=no-value-for-parameter
|
||||
vol.Optional(ATTR_BETA): vol.Boolean(),
|
||||
})
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
class APISupervisor(object):
|
||||
"""Handle rest api for supervisor functions."""
|
||||
|
||||
def __init__(self, config, loop, host_controll):
|
||||
def __init__(self, config, loop, supervisor, addons):
|
||||
"""Initialize supervisor rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.host_controll = host_controll
|
||||
self.supervisor = supervisor
|
||||
self.addons = addons
|
||||
|
||||
@api_process
|
||||
async def ping(self, request):
|
||||
"""Return ok for signal that the api is ready."""
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
@@ -23,27 +42,27 @@ class APISupervisor(object):
|
||||
ATTR_VERSION: HASSIO_VERSION,
|
||||
ATTR_CURRENT: self.config.current_hassio,
|
||||
ATTR_BETA: self.config.upstream_beta,
|
||||
ATTR_ADDONS: self.addons.list,
|
||||
}
|
||||
|
||||
return info
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Set supervisor options."""
|
||||
body = await request.json(loads=json_loads)
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_BETA in body:
|
||||
self.config.upstream_beta = body[ATTR_BETA]
|
||||
|
||||
return self.config.save()
|
||||
|
||||
@api_process_hostcontroll
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update host OS."""
|
||||
body = await request.json(loads=json_loads)
|
||||
"""Update supervisor OS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.config.current_hassio)
|
||||
|
||||
if version == HASSIO_VERSION:
|
||||
raise RuntimeError("%s is already in use.", version)
|
||||
if version == self.supervisor.version:
|
||||
raise RuntimeError("Version is already in use")
|
||||
|
||||
return await self.host_controll.supervisor_update(version=version)
|
||||
return await asyncio.shield(self.supervisor.update(version))
|
||||
|
@@ -4,6 +4,8 @@ import logging
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp.web_exceptions import HTTPServiceUnavailable
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..const import (
|
||||
JSON_RESULT, JSON_DATA, JSON_MESSAGE, RESULT_OK, RESULT_ERROR)
|
||||
@@ -52,7 +54,7 @@ def api_process_hostcontroll(method):
|
||||
if isinstance(answer, dict):
|
||||
return api_return_ok(data=answer)
|
||||
elif answer is None:
|
||||
return api_not_supported()
|
||||
return api_return_error("Function is not supported")
|
||||
elif answer:
|
||||
return api_return_ok()
|
||||
return api_return_error()
|
||||
@@ -72,10 +74,16 @@ def api_return_ok(data=None):
|
||||
"""Return a API ok answer."""
|
||||
return web.json_response({
|
||||
JSON_RESULT: RESULT_OK,
|
||||
JSON_DATA: data,
|
||||
JSON_DATA: data or {},
|
||||
})
|
||||
|
||||
|
||||
def api_not_supported():
|
||||
"""Return a api error with not supported."""
|
||||
return api_return_error("Function is not supported")
|
||||
async def api_validate(schema, request):
|
||||
"""Validate request data with schema."""
|
||||
data = await request.json(loads=json_loads)
|
||||
try:
|
||||
data = schema(data)
|
||||
except vol.Invalid as ex:
|
||||
raise RuntimeError(humanize_error(data, ex)) from None
|
||||
|
||||
return data
|
||||
|
@@ -2,6 +2,7 @@
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
import signal
|
||||
|
||||
from colorlog import ColoredFormatter
|
||||
|
||||
@@ -26,6 +27,17 @@ def initialize_system_data(websession):
|
||||
_LOGGER.info("Create Home-Assistant ssl folder %s", config.path_ssl)
|
||||
os.mkdir(config.path_ssl)
|
||||
|
||||
# homeassistant addon data folder
|
||||
if not os.path.isdir(config.path_addons_data):
|
||||
_LOGGER.info("Create Home-Assistant addon data folder %s",
|
||||
config.path_addons_data)
|
||||
os.mkdir(config.path_addons_data)
|
||||
|
||||
if not os.path.isdir(config.path_addons_custom):
|
||||
_LOGGER.info("Create Home-Assistant addon custom folder %s",
|
||||
config.path_addons_custom)
|
||||
os.mkdir(config.path_addons_custom)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
@@ -70,3 +82,24 @@ def check_environment():
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def reg_signal(loop, hassio):
|
||||
"""Register SIGTERM, SIGKILL to stop system."""
|
||||
try:
|
||||
loop.add_signal_handler(
|
||||
signal.SIGTERM, lambda: loop.create_task(hassio.stop()))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGTERM")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(
|
||||
signal.SIGHUP, lambda: loop.create_task(hassio.stop()))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGHUP")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(
|
||||
signal.SIGINT, lambda: loop.create_task(hassio.stop()))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGINT")
|
||||
|
101
hassio/config.py
101
hassio/config.py
@@ -1,10 +1,10 @@
|
||||
"""Bootstrap HassIO."""
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
from .const import FILE_HASSIO_CONFIG, HASSIO_SHARE
|
||||
from .tools import fetch_current_versions
|
||||
from .tools import (
|
||||
fetch_current_versions, write_json_file, read_json_file)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -14,26 +14,49 @@ HOMEASSISTANT_CURRENT = 'homeassistant_current'
|
||||
|
||||
HASSIO_SSL = "{}/ssl"
|
||||
HASSIO_CURRENT = 'hassio_current'
|
||||
HASSIO_CLEANUP = 'hassio_cleanup'
|
||||
|
||||
ADDONS_REPO = "{}/addons"
|
||||
ADDONS_DATA = "{}/addons_data"
|
||||
ADDONS_CUSTOM = "{}/addons_custom"
|
||||
|
||||
UPSTREAM_BETA = 'upstream_beta'
|
||||
|
||||
API_ENDPOINT = 'api_endpoint'
|
||||
|
||||
class CoreConfig(object):
|
||||
|
||||
class Config(object):
|
||||
"""Hold all config data."""
|
||||
|
||||
def __init__(self, websession, config_file=FILE_HASSIO_CONFIG):
|
||||
def __init__(self, config_file):
|
||||
"""Initialize config object."""
|
||||
self.websession = websession
|
||||
self._filename = config_file
|
||||
self._data = {}
|
||||
|
||||
# init or load data
|
||||
if os.path.isfile(self._filename):
|
||||
try:
|
||||
with open(self._filename, 'r') as cfile:
|
||||
self._data = json.loads(cfile.read())
|
||||
self._data = read_json_file(self._filename)
|
||||
except OSError:
|
||||
_LOGGER.warning("Can't read %s", self._filename)
|
||||
|
||||
def save(self):
|
||||
"""Store data to config file."""
|
||||
if not write_json_file(self._filename, self._data):
|
||||
_LOGGER.exception("Can't store config in %s", self._filename)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class CoreConfig(Config):
|
||||
"""Hold all core config data."""
|
||||
|
||||
def __init__(self, websession):
|
||||
"""Initialize config object."""
|
||||
self.websession = websession
|
||||
|
||||
super().__init__(FILE_HASSIO_CONFIG)
|
||||
|
||||
# init data
|
||||
if not self._data:
|
||||
self._data.update({
|
||||
@@ -42,17 +65,6 @@ class CoreConfig(object):
|
||||
})
|
||||
self.save()
|
||||
|
||||
def save(self):
|
||||
"""Store data to config file."""
|
||||
try:
|
||||
with open(self._filename, 'w') as conf_file:
|
||||
conf_file.write(json.dumps(self._data))
|
||||
except OSError:
|
||||
_LOGGER.exception("Can't store config in %s", self._filename)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def fetch_update_infos(self):
|
||||
"""Read current versions from web."""
|
||||
current = await fetch_current_versions(
|
||||
@@ -68,6 +80,16 @@ class CoreConfig(object):
|
||||
|
||||
return False
|
||||
|
||||
@property
|
||||
def api_endpoint(self):
|
||||
"""Return IP address of api endpoint."""
|
||||
return self._data[API_ENDPOINT]
|
||||
|
||||
@api_endpoint.setter
|
||||
def api_endpoint(self, value):
|
||||
"""Store IP address of api endpoint."""
|
||||
self._data[API_ENDPOINT] = value
|
||||
|
||||
@property
|
||||
def upstream_beta(self):
|
||||
"""Return True if we run in beta upstream."""
|
||||
@@ -78,6 +100,20 @@ class CoreConfig(object):
|
||||
"""Set beta upstream mode."""
|
||||
self._data[UPSTREAM_BETA] = bool(value)
|
||||
|
||||
@property
|
||||
def hassio_cleanup(self):
|
||||
"""Return Version they need to cleanup."""
|
||||
return self._data.get(HASSIO_CLEANUP)
|
||||
|
||||
@hassio_cleanup.setter
|
||||
def hassio_cleanup(self, version):
|
||||
"""Set or remove cleanup flag."""
|
||||
if version is None:
|
||||
self._data.pop(HASSIO_CLEANUP, None)
|
||||
else:
|
||||
self._data[HASSIO_CLEANUP] = version
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def homeassistant_image(self):
|
||||
"""Return docker homeassistant repository."""
|
||||
@@ -93,10 +129,15 @@ class CoreConfig(object):
|
||||
"""Actual version of hassio."""
|
||||
return self._data.get(HASSIO_CURRENT)
|
||||
|
||||
@property
|
||||
def path_hassio_docker(self):
|
||||
"""Return hassio data path extern for docker."""
|
||||
return os.environ['SUPERVISOR_SHARE']
|
||||
|
||||
@property
|
||||
def path_config_docker(self):
|
||||
"""Return config path extern for docker."""
|
||||
return HOMEASSISTANT_CONFIG.format(os.environ['SUPERVISOR_SHARE'])
|
||||
return HOMEASSISTANT_CONFIG.format(self.path_hassio_docker)
|
||||
|
||||
@property
|
||||
def path_config(self):
|
||||
@@ -106,9 +147,29 @@ class CoreConfig(object):
|
||||
@property
|
||||
def path_ssl_docker(self):
|
||||
"""Return SSL path extern for docker."""
|
||||
return HASSIO_SSL.format(os.environ['SUPERVISOR_SHARE'])
|
||||
return HASSIO_SSL.format(self.path_hassio_docker)
|
||||
|
||||
@property
|
||||
def path_ssl(self):
|
||||
"""Return SSL path inside supervisor."""
|
||||
return HASSIO_SSL.format(HASSIO_SHARE)
|
||||
|
||||
@property
|
||||
def path_addons_repo(self):
|
||||
"""Return git repo path for addons."""
|
||||
return ADDONS_REPO.format(HASSIO_SHARE)
|
||||
|
||||
@property
|
||||
def path_addons_custom(self):
|
||||
"""Return path for customs addons."""
|
||||
return ADDONS_CUSTOM.format(HASSIO_SHARE)
|
||||
|
||||
@property
|
||||
def path_addons_data(self):
|
||||
"""Return root addon data folder."""
|
||||
return ADDONS_DATA.format(HASSIO_SHARE)
|
||||
|
||||
@property
|
||||
def path_addons_data_docker(self):
|
||||
"""Return root addon data folder extern for docker."""
|
||||
return ADDONS_DATA.format(self.path_hassio_docker)
|
||||
|
@@ -1,16 +1,22 @@
|
||||
"""Const file for HassIO."""
|
||||
HASSIO_VERSION = '0.4'
|
||||
HASSIO_VERSION = '0.9'
|
||||
|
||||
URL_HASSIO_VERSION = \
|
||||
'https://raw.githubusercontent.com/pvizeli/hassio/master/version.json'
|
||||
URL_HASSIO_VERSION_BETA = \
|
||||
'https://raw.githubusercontent.com/pvizeli/hassio/master/version_beta.json'
|
||||
|
||||
URL_ADDONS_REPO = 'https://github.com/pvizeli/hassio-addons'
|
||||
URL_HASSIO_ADDONS = 'https://github.com/pvizeli/hassio-addons'
|
||||
|
||||
DOCKER_REPO = "pvizeli"
|
||||
|
||||
HASSIO_SHARE = "/data"
|
||||
|
||||
RUN_UPDATE_INFO_TASKS = 28800
|
||||
RUN_UPDATE_SUPERVISOR_TASKS = 29100
|
||||
RUN_RELOAD_ADDONS_TASKS = 28800
|
||||
|
||||
RESTART_EXIT_CODE = 100
|
||||
|
||||
FILE_HASSIO_ADDONS = "{}/addons.json".format(HASSIO_SHARE)
|
||||
FILE_HASSIO_CONFIG = "{}/config.json".format(HASSIO_SHARE)
|
||||
@@ -25,6 +31,31 @@ JSON_MESSAGE = 'message'
|
||||
RESULT_ERROR = 'error'
|
||||
RESULT_OK = 'ok'
|
||||
|
||||
ATTR_ADDONS = 'addons'
|
||||
ATTR_VERSION = 'version'
|
||||
ATTR_CURRENT = 'current'
|
||||
ATTR_BETA = 'beta'
|
||||
ATTR_NAME = 'name'
|
||||
ATTR_SLUG = 'slug'
|
||||
ATTR_DESCRIPTON = 'description'
|
||||
ATTR_STARTUP = 'startup'
|
||||
ATTR_BOOT = 'boot'
|
||||
ATTR_PORTS = 'ports'
|
||||
ATTR_MAP_CONFIG = 'map_config'
|
||||
ATTR_MAP_SSL = 'map_ssl'
|
||||
ATTR_MAP_HASSIO = 'map_hassio'
|
||||
ATTR_OPTIONS = 'options'
|
||||
ATTR_INSTALLED = 'installed'
|
||||
ATTR_STATE = 'state'
|
||||
ATTR_SCHEMA = 'schema'
|
||||
ATTR_IMAGE = 'image'
|
||||
|
||||
STARTUP_BEFORE = 'before'
|
||||
STARTUP_AFTER = 'after'
|
||||
STARTUP_ONCE = 'once'
|
||||
|
||||
BOOT_AUTO = 'auto'
|
||||
BOOT_MANUAL = 'manual'
|
||||
|
||||
STATE_STARTED = 'started'
|
||||
STATE_STOPPED = 'stopped'
|
||||
|
@@ -6,12 +6,16 @@ import aiohttp
|
||||
import docker
|
||||
|
||||
from . import bootstrap
|
||||
from .addons import AddonManager
|
||||
from .api import RestAPI
|
||||
from .host_controll import HostControll
|
||||
from .const import SOCKET_DOCKER, RUN_UPDATE_INFO_TASKS
|
||||
from .const import (
|
||||
SOCKET_DOCKER, RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS,
|
||||
RUN_UPDATE_SUPERVISOR_TASKS, STARTUP_AFTER, STARTUP_BEFORE)
|
||||
from .scheduler import Scheduler
|
||||
from .dock.homeassistant import DockerHomeAssistant
|
||||
from .dock.supervisor import DockerSupervisor
|
||||
from .tools import get_arch_from_image, get_local_ip
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -21,6 +25,7 @@ class HassIO(object):
|
||||
|
||||
def __init__(self, loop):
|
||||
"""Initialize hassio object."""
|
||||
self.exit_code = 0
|
||||
self.loop = loop
|
||||
self.websession = aiohttp.ClientSession(loop=self.loop)
|
||||
self.config = bootstrap.initialize_system_data(self.websession)
|
||||
@@ -31,17 +36,24 @@ class HassIO(object):
|
||||
|
||||
# init basic docker container
|
||||
self.supervisor = DockerSupervisor(
|
||||
self.config, self.loop, self.dock)
|
||||
self.config, self.loop, self.dock, self)
|
||||
self.homeassistant = DockerHomeAssistant(
|
||||
self.config, self.loop, self.dock)
|
||||
|
||||
# init HostControll
|
||||
self.host_controll = HostControll(self.loop)
|
||||
|
||||
# init addon system
|
||||
self.addons = AddonManager(self.config, self.loop, self.dock)
|
||||
|
||||
async def setup(self):
|
||||
"""Setup HassIO orchestration."""
|
||||
# supervisor
|
||||
await self.supervisor.attach()
|
||||
await self.supervisor.cleanup()
|
||||
|
||||
# set api endpoint
|
||||
self.config.api_endpoint = await get_local_ip(self.loop)
|
||||
|
||||
# hostcontroll
|
||||
host_info = await self.host_controll.info()
|
||||
@@ -56,32 +68,62 @@ class HassIO(object):
|
||||
# rest api views
|
||||
self.api.register_host(self.host_controll)
|
||||
self.api.register_network(self.host_controll)
|
||||
self.api.register_supervisor(self.host_controll)
|
||||
self.api.register_supervisor(self.supervisor, self.addons)
|
||||
self.api.register_homeassistant(self.homeassistant)
|
||||
self.api.register_addons(self.addons)
|
||||
|
||||
# schedule update info tasks
|
||||
self.scheduler.register_task(
|
||||
self.config.fetch_update_infos, RUN_UPDATE_INFO_TASKS,
|
||||
first_run=True)
|
||||
now=True)
|
||||
|
||||
# first start of supervisor?
|
||||
if not await self.homeassistant.exists():
|
||||
_LOGGER.info("No HomeAssistant docker found.")
|
||||
await self._setup_homeassistant()
|
||||
|
||||
# Load addons
|
||||
arch = get_arch_from_image(self.supervisor.image)
|
||||
await self.addons.prepare(arch)
|
||||
|
||||
# schedule addon update task
|
||||
self.scheduler.register_task(
|
||||
self.addons.relaod, RUN_RELOAD_ADDONS_TASKS, now=True)
|
||||
|
||||
# schedule self update task
|
||||
self.scheduler.register_task(
|
||||
self._hassio_update, RUN_UPDATE_SUPERVISOR_TASKS)
|
||||
|
||||
async def start(self):
|
||||
"""Start HassIO orchestration."""
|
||||
# start api
|
||||
await self.api.start()
|
||||
_LOGGER.info("Start hassio api on %s", self.config.api_endpoint)
|
||||
|
||||
# HomeAssistant is already running / supervisor have only reboot
|
||||
if await self.homeassistant.is_running():
|
||||
_LOGGER.info("HassIO reboot detected")
|
||||
return
|
||||
|
||||
# start addon mark as before
|
||||
await self.addons.auto_boot(STARTUP_BEFORE)
|
||||
|
||||
# run HomeAssistant
|
||||
await self.homeassistant.run()
|
||||
|
||||
async def stop(self):
|
||||
# start addon mark as after
|
||||
await self.addons.auto_boot(STARTUP_AFTER)
|
||||
|
||||
async def stop(self, exit_code=0):
|
||||
"""Stop a running orchestration."""
|
||||
# don't process scheduler anymore
|
||||
self.scheduler.stop()
|
||||
|
||||
# process stop task pararell
|
||||
tasks = [self.websession.close(), self.api.stop()]
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
self.exit_code = exit_code
|
||||
self.loop.stop()
|
||||
|
||||
async def _setup_homeassistant(self):
|
||||
@@ -99,3 +141,12 @@ class HassIO(object):
|
||||
|
||||
# store version
|
||||
_LOGGER.info("HomeAssistant docker now installed.")
|
||||
|
||||
async def _hassio_update(self):
|
||||
"""Check and run update of supervisor hassio."""
|
||||
if self.config.current_hassio == self.supervisor.version:
|
||||
return
|
||||
|
||||
_LOGGER.info(
|
||||
"Found new HassIO version %s.", self.config.current_hassio)
|
||||
await self.supervisor.update(self.config.current_hassio)
|
||||
|
@@ -53,7 +53,7 @@ class DockerBase(object):
|
||||
|
||||
image.tag(self.image, tag='latest')
|
||||
self.version = get_version_from_env(image.attrs['Config']['Env'])
|
||||
_LOGGER.info("Tag image %s with version %s as latest.",
|
||||
_LOGGER.info("Tag image %s with version %s as latest",
|
||||
self.image, self.version)
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
|
||||
@@ -122,7 +122,7 @@ class DockerBase(object):
|
||||
self.image = self.container.attrs['Config']['Image']
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
_LOGGER.info("Attach to image %s with version %s.",
|
||||
_LOGGER.info("Attach to image %s with version %s",
|
||||
self.image, self.version)
|
||||
except (docker.errors.DockerException, KeyError):
|
||||
_LOGGER.fatal(
|
||||
@@ -138,8 +138,6 @@ class DockerBase(object):
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
_LOGGER.info("Run docker image %s with version %s.",
|
||||
self.image, self.version)
|
||||
return await self.loop.run_in_executor(None, self._run)
|
||||
|
||||
def _run(self):
|
||||
@@ -167,6 +165,8 @@ class DockerBase(object):
|
||||
if not self.container:
|
||||
return
|
||||
|
||||
_LOGGER.info("Stop %s docker application.", self.image)
|
||||
|
||||
self.container.reload()
|
||||
if self.container.status == 'running':
|
||||
with suppress(docker.errors.DockerException):
|
||||
@@ -177,11 +177,39 @@ class DockerBase(object):
|
||||
|
||||
self.container = None
|
||||
|
||||
async def update(self, tag):
|
||||
"""Update a docker image.
|
||||
async def remove(self):
|
||||
"""Remove docker container."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute remove while a task is in progress")
|
||||
return False
|
||||
|
||||
Return a Future.
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(None, self._remove)
|
||||
|
||||
def _remove(self):
|
||||
"""remove docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
self._stop()
|
||||
|
||||
_LOGGER.info("Remove docker %s with latest and %s",
|
||||
self.image, self.version)
|
||||
|
||||
try:
|
||||
self.dock.images.remove(
|
||||
image="{}:latest".format(self.image), force=True)
|
||||
self.dock.images.remove(
|
||||
image="{}:{}".format(self.image, self.version), force=True)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't remove image %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def update(self, tag):
|
||||
"""Update a docker image."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute update while a task is in progress")
|
||||
return False
|
||||
@@ -194,10 +222,10 @@ class DockerBase(object):
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
old_image = "{}:{}".format(self.image, self.version)
|
||||
old_run = self._is_running()
|
||||
old_image = "{}:{}".format(self.image, self.version)
|
||||
|
||||
_LOGGER.info("Update docker %s with %s:%s.",
|
||||
_LOGGER.info("Update docker %s with %s:%s",
|
||||
old_image, self.image, tag)
|
||||
|
||||
# update docker image
|
||||
@@ -208,7 +236,7 @@ class DockerBase(object):
|
||||
self.dock.images.remove(image=old_image, force=True)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning(
|
||||
"Can't remove old image %s -> %s.", old_image, err)
|
||||
"Can't remove old image %s -> %s", old_image, err)
|
||||
# restore
|
||||
if old_run:
|
||||
self._run()
|
||||
|
81
hassio/dock/addon.py
Normal file
81
hassio/dock/addon.py
Normal file
@@ -0,0 +1,81 @@
|
||||
"""Init file for HassIO addon docker object."""
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from . import DockerBase
|
||||
from ..tools import get_version_from_env
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HASS_DOCKER_NAME = 'homeassistant'
|
||||
|
||||
|
||||
class DockerAddon(DockerBase):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, config, loop, dock, addons_data, addon):
|
||||
"""Initialize docker homeassistant wrapper."""
|
||||
super().__init__(
|
||||
config, loop, dock, image=addons_data.get_image(addon))
|
||||
self.addon = addon
|
||||
self.addons_data = addons_data
|
||||
|
||||
@property
|
||||
def docker_name(self):
|
||||
"""Return name of docker container."""
|
||||
return "addon_{}".format(self.addons_data.get_slug(self.addon))
|
||||
|
||||
def _run(self):
|
||||
"""Run docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
return
|
||||
|
||||
# cleanup old container
|
||||
self._stop()
|
||||
|
||||
# volumes
|
||||
volumes = {
|
||||
self.addons_data.path_data_docker(self.addon): {
|
||||
'bind': '/data', 'mode': 'rw'
|
||||
}}
|
||||
if self.addons_data.need_config(self.addon):
|
||||
volumes.update({
|
||||
self.config.path_config_docker: {
|
||||
'bind': '/config', 'mode': 'rw'
|
||||
}})
|
||||
if self.addons_data.need_ssl(self.addon):
|
||||
volumes.update({
|
||||
self.config.path_ssl_docker: {
|
||||
'bind': '/ssl', 'mode': 'rw'
|
||||
}})
|
||||
if self.addons_data.need_hassio(self.addon):
|
||||
volumes.update({
|
||||
self.config.path_hassio_docker: {
|
||||
'bind': '/hassio', 'mode': 'rw'
|
||||
}})
|
||||
|
||||
try:
|
||||
self.container = self.dock.containers.run(
|
||||
self.image,
|
||||
name=self.docker_name,
|
||||
detach=True,
|
||||
network_mode='bridge',
|
||||
ports=self.addons_data.get_ports(self.addon),
|
||||
volumes=volumes,
|
||||
)
|
||||
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
|
||||
_LOGGER.info("Start docker addon %s with version %s",
|
||||
self.image, self.version)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't run %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
return True
|
@@ -4,7 +4,7 @@ import logging
|
||||
import docker
|
||||
|
||||
from . import DockerBase
|
||||
from ..tools import get_version_from_env, get_local_ip
|
||||
from ..tools import get_version_from_env
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -31,8 +31,6 @@ class DockerHomeAssistant(DockerBase):
|
||||
if self._is_running():
|
||||
return
|
||||
|
||||
api_endpoint = get_local_ip(self.loop)
|
||||
|
||||
# cleanup old container
|
||||
self._stop()
|
||||
|
||||
@@ -43,12 +41,8 @@ class DockerHomeAssistant(DockerBase):
|
||||
detach=True,
|
||||
privileged=True,
|
||||
network_mode='host',
|
||||
restart_policy={
|
||||
"Name": "always",
|
||||
"MaximumRetryCount": 10,
|
||||
},
|
||||
environment={
|
||||
'HASSIO': api_endpoint,
|
||||
'HASSIO': self.config.api_endpoint,
|
||||
},
|
||||
volumes={
|
||||
self.config.path_config_docker:
|
||||
@@ -59,8 +53,12 @@ class DockerHomeAssistant(DockerBase):
|
||||
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
|
||||
_LOGGER.info("Start docker addon %s with version %s",
|
||||
self.image, self.version)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't run %s -> %s.", self.image, err)
|
||||
_LOGGER.error("Can't run %s -> %s", self.image, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@@ -1,17 +1,71 @@
|
||||
"""Init file for HassIO docker object."""
|
||||
import logging
|
||||
import os
|
||||
|
||||
import docker
|
||||
|
||||
from . import DockerBase
|
||||
from ..const import RESTART_EXIT_CODE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerSupervisor(DockerBase):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, config, loop, dock, hassio, image=None):
|
||||
"""Initialize docker base wrapper."""
|
||||
super().__init__(config, loop, dock, image=image)
|
||||
|
||||
self.hassio = hassio
|
||||
|
||||
@property
|
||||
def docker_name(self):
|
||||
"""Return name of docker container."""
|
||||
return os.environ['SUPERVISOR_NAME']
|
||||
|
||||
async def update(self, tag):
|
||||
"""Update a supervisor docker image."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute update while a task is in progress")
|
||||
return False
|
||||
|
||||
_LOGGER.info("Update supervisor docker to %s:%s", self.image, tag)
|
||||
old_version = self.version
|
||||
|
||||
async with self._lock:
|
||||
if await self.loop.run_in_executor(None, self._install, tag):
|
||||
self.config.hassio_cleanup = old_version
|
||||
self.loop.create_task(self.hassio.stop(RESTART_EXIT_CODE))
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def cleanup(self):
|
||||
"""Check if old supervisor version exists and cleanup."""
|
||||
if not self.config.hassio_cleanup:
|
||||
return
|
||||
|
||||
async with self._lock:
|
||||
if await self.loop.run_in_executor(None, self._cleanup):
|
||||
self.config.hassio_cleanup = None
|
||||
|
||||
def _cleanup(self):
|
||||
"""Remove old image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
old_image = "{}:{}".format(self.image, self.config.hassio_cleanup)
|
||||
|
||||
_LOGGER.info("Old supervisor docker found %s", old_image)
|
||||
try:
|
||||
self.dock.images.remove(image=old_image, force=True)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't remove old image %s -> %s", old_image, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def run(self):
|
||||
"""Run docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
@@ -24,6 +78,6 @@ class DockerSupervisor(DockerBase):
|
||||
"""Stop/remove docker container."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
||||
async def update(self, tag):
|
||||
"""Update docker image."""
|
||||
async def remove(self):
|
||||
"""Remove docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
@@ -14,9 +14,8 @@ _LOGGER = logging.getLogger(__name__)
|
||||
TIMEOUT = 15
|
||||
|
||||
LEVEL_POWER = 1
|
||||
LEVEL_UPDATE_SUPERVISOR = 2
|
||||
LEVEL_UPDATE_HOST = 4
|
||||
LEVEL_NETWORK = 8
|
||||
LEVEL_UPDATE_HOST = 2
|
||||
LEVEL_NETWORK = 4
|
||||
|
||||
|
||||
class HostControll(object):
|
||||
@@ -101,12 +100,3 @@ class HostControll(object):
|
||||
if version:
|
||||
return self._send_command("host-update {}".format(version))
|
||||
return self._send_command("host-update")
|
||||
|
||||
def supervisor_update(self, version=None):
|
||||
"""Update the supervisor on host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
if version:
|
||||
return self._send_command("supervisor-update {}".format(version))
|
||||
return self._send_command("supervisor-update")
|
||||
|
@@ -16,9 +16,14 @@ class Scheduler(object):
|
||||
"""Initialize task schedule."""
|
||||
self.loop = loop
|
||||
self._data = {}
|
||||
self._stop = False
|
||||
|
||||
def stop(self):
|
||||
"""Stop to execute tasks in scheduler."""
|
||||
self._stop = True
|
||||
|
||||
def register_task(self, coro_callback, seconds, repeat=True,
|
||||
first_run=False):
|
||||
now=False):
|
||||
"""Schedule a coroutine.
|
||||
|
||||
The coroutien need to be a callback without arguments.
|
||||
@@ -34,7 +39,7 @@ class Scheduler(object):
|
||||
self._data[idx] = opts
|
||||
|
||||
# schedule task
|
||||
if first_run:
|
||||
if now:
|
||||
self._run_task(idx)
|
||||
else:
|
||||
task = self.loop.call_later(seconds, self._run_task, idx)
|
||||
@@ -46,6 +51,10 @@ class Scheduler(object):
|
||||
"""Run a scheduled task."""
|
||||
data = self._data.pop(idx)
|
||||
|
||||
# stop execute tasks
|
||||
if self._stop:
|
||||
return
|
||||
|
||||
self.loop.create_task(data[CALL]())
|
||||
|
||||
if data[REPEAT]:
|
||||
|
@@ -1,5 +1,6 @@
|
||||
"""Tools file for HassIO."""
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import socket
|
||||
@@ -12,6 +13,7 @@ from .const import URL_HASSIO_VERSION, URL_HASSIO_VERSION_BETA
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_RE_VERSION = re.compile(r"VERSION=(.*)")
|
||||
_IMAGE_ARCH = re.compile(r".*/([a-z0-9]*)-hassio-supervisor")
|
||||
|
||||
|
||||
async def fetch_current_versions(websession, beta=False):
|
||||
@@ -25,9 +27,19 @@ async def fetch_current_versions(websession, beta=False):
|
||||
async with websession.get(url) as request:
|
||||
return await request.json(content_type=None)
|
||||
|
||||
except (ValueError, aiohttp.ClientError, asyncio.TimeoutError) as err:
|
||||
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
|
||||
_LOGGER.warning("Can't fetch versions from %s! %s", url, err)
|
||||
|
||||
except json.JSONDecodeError as err:
|
||||
_LOGGER.warning("Can't parse versions from %s! %s", url, err)
|
||||
|
||||
|
||||
def get_arch_from_image(image):
|
||||
"""Return arch from hassio image name."""
|
||||
found = _IMAGE_ARCH.match(image)
|
||||
if found:
|
||||
return found.group(1)
|
||||
|
||||
|
||||
def get_version_from_env(env_list):
|
||||
"""Extract Version from ENV list."""
|
||||
@@ -43,16 +55,37 @@ def get_version_from_env(env_list):
|
||||
def get_local_ip(loop):
|
||||
"""Retrieve local IP address.
|
||||
|
||||
Need run inside executor.
|
||||
Return a future.
|
||||
"""
|
||||
def local_ip():
|
||||
"""Return local ip."""
|
||||
try:
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
|
||||
# Use Google Public DNS server to determine own IP
|
||||
sock.connect(('8.8.8.8', 80))
|
||||
|
||||
return sock.getsockname()[0]
|
||||
except socket.error:
|
||||
return socket.gethostbyname(socket.gethostname())
|
||||
finally:
|
||||
sock.close()
|
||||
|
||||
return loop.run_in_executor(None, local_ip)
|
||||
|
||||
|
||||
def write_json_file(jsonfile, data):
|
||||
"""Write a json file."""
|
||||
try:
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
with open(jsonfile, 'w') as conf_file:
|
||||
conf_file.write(json.dumps(data))
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
# Use Google Public DNS server to determine own IP
|
||||
sock.connect(('8.8.8.8', 80))
|
||||
return True
|
||||
|
||||
return sock.getsockname()[0]
|
||||
except socket.error:
|
||||
return socket.gethostbyname(socket.gethostname())
|
||||
finally:
|
||||
sock.close()
|
||||
|
||||
def read_json_file(jsonfile):
|
||||
"""Read a json file and return a dict."""
|
||||
with open(jsonfile, 'r') as cfile:
|
||||
return json.loads(cfile.read())
|
||||
|
4
setup.py
4
setup.py
@@ -29,12 +29,14 @@ setup(
|
||||
keywords=['docker', 'home-assistant', 'api'],
|
||||
zip_safe=False,
|
||||
platforms='any',
|
||||
packages=['hassio', 'hassio.dock', 'hassio.api'],
|
||||
packages=['hassio', 'hassio.dock', 'hassio.api', 'hassio.addons'],
|
||||
include_package_data=True,
|
||||
install_requires=[
|
||||
'async_timeout',
|
||||
'aiohttp',
|
||||
'docker',
|
||||
'colorlog',
|
||||
'voluptuous',
|
||||
'gitpython',
|
||||
]
|
||||
)
|
||||
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"hassio_tag": "0.4",
|
||||
"homeassistant_tag": "0.41",
|
||||
"hassio_tag": "0.9",
|
||||
"homeassistant_tag": "0.42.3",
|
||||
"resinos_version": "0.3",
|
||||
"resinhup_version": "0.1"
|
||||
}
|
||||
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"hassio_tag": "0.4",
|
||||
"homeassistant_tag": "0.41",
|
||||
"hassio_tag": "0.9",
|
||||
"homeassistant_tag": "0.42.3",
|
||||
"resinos_version": "0.3",
|
||||
"resinhup_version": "0.1"
|
||||
}
|
||||
|
Reference in New Issue
Block a user