mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-09-01 20:30:20 +00:00
Compare commits
52 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
cc63008a86 | ||
![]() |
f9c7371140 | ||
![]() |
71590f90ae | ||
![]() |
e1028d6eca | ||
![]() |
f231d54daa | ||
![]() |
094c5968f4 | ||
![]() |
6c217d506c | ||
![]() |
0d867af79f | ||
![]() |
c9876988da | ||
![]() |
454d82d985 | ||
![]() |
14ee26ea29 | ||
![]() |
86a7f11f64 | ||
![]() |
78d1e1d9e7 | ||
![]() |
a3f67809a6 | ||
![]() |
e0be15cb45 | ||
![]() |
f1ce5faf17 | ||
![]() |
322480bba1 | ||
![]() |
d2db89a665 | ||
![]() |
fc17893158 | ||
![]() |
e2bf267713 | ||
![]() |
e25d30af52 | ||
![]() |
2bd1636097 | ||
![]() |
c019d1f3c5 | ||
![]() |
5b23347563 | ||
![]() |
daab4a86b2 | ||
![]() |
5831177fd8 | ||
![]() |
f9500f6d90 | ||
![]() |
29ac861b87 | ||
![]() |
b05f2db023 | ||
![]() |
8af1dfc882 | ||
![]() |
c76e851029 | ||
![]() |
b5ec1e0cfd | ||
![]() |
fe72e768ec | ||
![]() |
360f546ab0 | ||
![]() |
eb0ee31b5a | ||
![]() |
62df079be7 | ||
![]() |
40e5e6eb9d | ||
![]() |
dbc080c24d | ||
![]() |
f340a19e40 | ||
![]() |
20856126c8 | ||
![]() |
3ef76a4ada | ||
![]() |
14500d3ac4 | ||
![]() |
318ca828cc | ||
![]() |
5c70d68262 | ||
![]() |
082770256b | ||
![]() |
ae003e5b76 | ||
![]() |
530f17d502 | ||
![]() |
f127de8059 | ||
![]() |
9afb136648 | ||
![]() |
07239fec08 | ||
![]() |
23661dc2fd | ||
![]() |
de34c058a1 |
16
API.md
16
API.md
@@ -11,7 +11,6 @@ Communicate over unix socket with a host daemon.
|
|||||||
# reboot
|
# reboot
|
||||||
# shutdown
|
# shutdown
|
||||||
# host-update [v]
|
# host-update [v]
|
||||||
# supervisor-update [v]
|
|
||||||
|
|
||||||
# network info
|
# network info
|
||||||
# network hostname xy
|
# network hostname xy
|
||||||
@@ -24,9 +23,8 @@ Communicate over unix socket with a host daemon.
|
|||||||
|
|
||||||
level:
|
level:
|
||||||
- 1: power functions
|
- 1: power functions
|
||||||
- 2: supervisor update
|
- 2: host update
|
||||||
- 4: host update
|
- 4: network functions
|
||||||
- 8: network functions
|
|
||||||
|
|
||||||
Answer:
|
Answer:
|
||||||
```
|
```
|
||||||
@@ -69,7 +67,15 @@ On success
|
|||||||
"version": "INSTALL_VERSION",
|
"version": "INSTALL_VERSION",
|
||||||
"current": "CURRENT_VERSION",
|
"current": "CURRENT_VERSION",
|
||||||
"beta": "true|false",
|
"beta": "true|false",
|
||||||
"addons": {}
|
"addons": [
|
||||||
|
{
|
||||||
|
"name": "xy bla",
|
||||||
|
"slug": "xy",
|
||||||
|
"version": "CURRENT_VERSION",
|
||||||
|
"installed": "none|INSTALL_VERSION",
|
||||||
|
"description": "description"
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@@ -6,13 +6,6 @@ It is a docker image (supervisor) they manage HomeAssistant docker and give a in
|
|||||||
[HassIO-Addons](https://github.com/pvizeli/hassio-addons)
|
[HassIO-Addons](https://github.com/pvizeli/hassio-addons)
|
||||||
[HassIO-Build](https://github.com/pvizeli/hassio-build)
|
[HassIO-Build](https://github.com/pvizeli/hassio-build)
|
||||||
|
|
||||||
## History
|
|
||||||
- **0.1**: Initial supervisor with setup HomeAssistant docker
|
|
||||||
- **0.2**: Support for basic HostControll
|
|
||||||
- **0.3**: Refactor code and add basic rest api
|
|
||||||
- **0.4**: Move network api code / ssl folder
|
|
||||||
- **0.5**: Make api compatible to hass component v1
|
|
||||||
|
|
||||||
# Hardware Image
|
# Hardware Image
|
||||||
The image is based on ResinOS and Yocto Linux. It comes with the HassIO supervisor pre-installed. This includes support to update the supervisor over the air. After flashing your host OS will not require any more maintenance! The image does not include Home Assistant, instead it will downloaded when the image boots up for the first time.
|
The image is based on ResinOS and Yocto Linux. It comes with the HassIO supervisor pre-installed. This includes support to update the supervisor over the air. After flashing your host OS will not require any more maintenance! The image does not include Home Assistant, instead it will downloaded when the image boots up for the first time.
|
||||||
|
|
||||||
|
@@ -2,6 +2,7 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
import signal
|
import signal
|
||||||
|
import sys
|
||||||
|
|
||||||
import hassio.bootstrap as bootstrap
|
import hassio.bootstrap as bootstrap
|
||||||
import hassio.core as core
|
import hassio.core as core
|
||||||
@@ -23,7 +24,7 @@ if __name__ == "__main__":
|
|||||||
loop.run_until_complete(hassio.setup())
|
loop.run_until_complete(hassio.setup())
|
||||||
|
|
||||||
_LOGGER.info("Start Hassio task")
|
_LOGGER.info("Start Hassio task")
|
||||||
loop.call_soon_threadsafe(asyncio.ensure_future, hassio.start(), loop)
|
loop.call_soon_threadsafe(loop.create_task, hassio.start())
|
||||||
|
|
||||||
try:
|
try:
|
||||||
loop.add_signal_handler(
|
loop.add_signal_handler(
|
||||||
@@ -33,4 +34,6 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
loop.run_forever()
|
loop.run_forever()
|
||||||
loop.close()
|
loop.close()
|
||||||
|
|
||||||
_LOGGER.info("Close Hassio")
|
_LOGGER.info("Close Hassio")
|
||||||
|
sys.exit(hassio.exit_code)
|
||||||
|
159
hassio/addons/__init__.py
Normal file
159
hassio/addons/__init__.py
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
"""Init file for HassIO addons."""
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
from .data import AddonsData
|
||||||
|
from .git import AddonsRepo
|
||||||
|
from ..const import STATE_STOPPED, STATE_STARTED
|
||||||
|
from ..dock.addon import DockerAddon
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AddonManager(AddonsData):
|
||||||
|
"""Manage addons inside HassIO."""
|
||||||
|
|
||||||
|
def __init__(self, config, loop, dock):
|
||||||
|
"""Initialize docker base wrapper."""
|
||||||
|
super().__init__(config)
|
||||||
|
|
||||||
|
self.loop = loop
|
||||||
|
self.dock = dock
|
||||||
|
self.repo = AddonsRepo(config, loop)
|
||||||
|
self.dockers = {}
|
||||||
|
|
||||||
|
async def prepare(self, arch):
|
||||||
|
"""Startup addon management."""
|
||||||
|
self.arch = arch
|
||||||
|
|
||||||
|
# load addon repository
|
||||||
|
if await self.repo.load():
|
||||||
|
self.read_addons_repo()
|
||||||
|
|
||||||
|
# load installed addons
|
||||||
|
for addon in self.list_installed:
|
||||||
|
self.dockers[addon] = DockerAddon(
|
||||||
|
self.config, self.loop, self.dock, self, addon)
|
||||||
|
|
||||||
|
async def relaod(self):
|
||||||
|
"""Update addons from repo and reload list."""
|
||||||
|
if not await self.repo.pull():
|
||||||
|
return
|
||||||
|
self.read_addons_repo()
|
||||||
|
|
||||||
|
# remove stalled addons
|
||||||
|
tasks = []
|
||||||
|
for addon in self.list_removed:
|
||||||
|
_LOGGER.info("Old addon %s found")
|
||||||
|
tasks.append(self.loop.create_task(self.uninstall(addon)))
|
||||||
|
|
||||||
|
if tasks:
|
||||||
|
await asyncio.wait(tasks, loop=self.loop)
|
||||||
|
|
||||||
|
async def auto_boot(self, start_type):
|
||||||
|
"""Boot addons with mode auto."""
|
||||||
|
boot_list = self.list_startup(start_type)
|
||||||
|
tasks = []
|
||||||
|
|
||||||
|
for addon in boot_list:
|
||||||
|
tasks.append(self.loop.create_task(self.start(addon)))
|
||||||
|
|
||||||
|
_LOGGER.info("Startup %s run %d addons", start_type, len(tasks))
|
||||||
|
if tasks:
|
||||||
|
await asyncio.wait(tasks, loop=self.loop)
|
||||||
|
|
||||||
|
async def install(self, addon, version=None):
|
||||||
|
"""Install a addon."""
|
||||||
|
if not self.exists_addon(addon):
|
||||||
|
_LOGGER.error("Addon %s not exists for install", addon)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if self.is_installed(addon):
|
||||||
|
_LOGGER.error("Addon %s is already installed", addon)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not os.path.isdir(self.path_data(addon)):
|
||||||
|
_LOGGER.info("Create Home-Assistant addon data folder %s",
|
||||||
|
self.path_data(addon))
|
||||||
|
os.mkdir(self.path_data(addon))
|
||||||
|
|
||||||
|
addon_docker = DockerAddon(
|
||||||
|
self.config, self.loop, self.dock, self, addon)
|
||||||
|
|
||||||
|
version = version or self.get_version(addon)
|
||||||
|
if not await addon_docker.install(version):
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.dockers[addon] = addon_docker
|
||||||
|
self.set_install_addon(addon, version)
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def uninstall(self, addon):
|
||||||
|
"""Remove a addon."""
|
||||||
|
if not self.is_installed(addon):
|
||||||
|
_LOGGER.error("Addon %s is already uninstalled", addon)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if addon not in self.dockers:
|
||||||
|
_LOGGER.error("No docker found for addon %s", addon)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not await self.dockers[addon].remove():
|
||||||
|
return False
|
||||||
|
|
||||||
|
if os.path.isdir(self.path_data(addon)):
|
||||||
|
_LOGGER.info("Remove Home-Assistant addon data folder %s",
|
||||||
|
self.path_data(addon))
|
||||||
|
shutil.rmtree(self.path_data(addon))
|
||||||
|
|
||||||
|
self.dockers.pop(addon)
|
||||||
|
self.set_uninstall_addon(addon)
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def state(self, addon):
|
||||||
|
"""Return running state of addon."""
|
||||||
|
if addon not in self.dockers:
|
||||||
|
_LOGGER.error("No docker found for addon %s", addon)
|
||||||
|
return
|
||||||
|
|
||||||
|
if await self.dockers[addon].is_running():
|
||||||
|
return STATE_STARTED
|
||||||
|
return STATE_STOPPED
|
||||||
|
|
||||||
|
async def start(self, addon):
|
||||||
|
"""Set options and start addon."""
|
||||||
|
if addon not in self.dockers:
|
||||||
|
_LOGGER.error("No docker found for addon %s", addon)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not self.write_addon_options(addon):
|
||||||
|
_LOGGER.error("Can't write options for addon %s", addon)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return await self.dockers[addon].run()
|
||||||
|
|
||||||
|
async def stop(self, addon):
|
||||||
|
"""Stop addon."""
|
||||||
|
if addon not in self.dockers:
|
||||||
|
_LOGGER.error("No docker found for addon %s", addon)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return await self.dockers[addon].stop()
|
||||||
|
|
||||||
|
async def update(self, addon, version=None):
|
||||||
|
"""Update addon."""
|
||||||
|
if not self.is_installed(addon):
|
||||||
|
_LOGGER.error("Addon %s is not installed", addon)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if addon not in self.dockers:
|
||||||
|
_LOGGER.error("No docker found for addon %s", addon)
|
||||||
|
return False
|
||||||
|
|
||||||
|
version = version or self.get_version(addon)
|
||||||
|
if await self.dockers[addon].update(version):
|
||||||
|
self.set_version(addon, version)
|
||||||
|
return True
|
||||||
|
return False
|
230
hassio/addons/data.py
Normal file
230
hassio/addons/data.py
Normal file
@@ -0,0 +1,230 @@
|
|||||||
|
"""Init file for HassIO addons."""
|
||||||
|
import logging
|
||||||
|
import glob
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
|
from .validate import validate_options, SCHEMA_ADDON_CONFIG
|
||||||
|
from ..const import (
|
||||||
|
FILE_HASSIO_ADDONS, ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON,
|
||||||
|
ATTR_STARTUP, ATTR_BOOT, ATTR_MAP_SSL, ATTR_MAP_CONFIG, ATTR_OPTIONS,
|
||||||
|
ATTR_PORTS, BOOT_AUTO, DOCKER_REPO, ATTR_INSTALLED, ATTR_SCHEMA,
|
||||||
|
ATTR_IMAGE, ATTR_MAP_HASSIO)
|
||||||
|
from ..config import Config
|
||||||
|
from ..tools import read_json_file, write_json_file
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
ADDONS_REPO_PATTERN = "{}/*/config.json"
|
||||||
|
|
||||||
|
|
||||||
|
class AddonsData(Config):
|
||||||
|
"""Hold data for addons inside HassIO."""
|
||||||
|
|
||||||
|
def __init__(self, config):
|
||||||
|
"""Initialize data holder."""
|
||||||
|
super().__init__(FILE_HASSIO_ADDONS)
|
||||||
|
self.config = config
|
||||||
|
self._addons_data = {}
|
||||||
|
self.arch = None
|
||||||
|
|
||||||
|
def read_addons_repo(self):
|
||||||
|
"""Read data from addons repository."""
|
||||||
|
self._addons_data = {}
|
||||||
|
|
||||||
|
self._read_addons_folder(self.config.path_addons_repo)
|
||||||
|
self._read_addons_folder(self.config.path_addons_custom)
|
||||||
|
|
||||||
|
def _read_addons_folder(self, folder):
|
||||||
|
"""Read data from addons folder."""
|
||||||
|
pattern = ADDONS_REPO_PATTERN.format(folder)
|
||||||
|
|
||||||
|
for addon in glob.iglob(pattern):
|
||||||
|
try:
|
||||||
|
addon_config = read_json_file(addon)
|
||||||
|
|
||||||
|
addon_config = SCHEMA_ADDON_CONFIG(addon_config)
|
||||||
|
self._addons_data[addon_config[ATTR_SLUG]] = addon_config
|
||||||
|
|
||||||
|
except (OSError, KeyError):
|
||||||
|
_LOGGER.warning("Can't read %s", addon)
|
||||||
|
|
||||||
|
except vol.Invalid as ex:
|
||||||
|
_LOGGER.warning("Can't read %s -> %s", addon,
|
||||||
|
humanize_error(addon_config, ex))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def list_installed(self):
|
||||||
|
"""Return a list of installed addons."""
|
||||||
|
return set(self._data.keys())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def list_all(self):
|
||||||
|
"""Return a list of available addons."""
|
||||||
|
return set(self._addons_data.keys())
|
||||||
|
|
||||||
|
@property
|
||||||
|
def list(self):
|
||||||
|
"""Return a list of available addons."""
|
||||||
|
data = []
|
||||||
|
for addon, values in self._addons_data.items():
|
||||||
|
data.append({
|
||||||
|
ATTR_NAME: values[ATTR_NAME],
|
||||||
|
ATTR_SLUG: values[ATTR_SLUG],
|
||||||
|
ATTR_DESCRIPTON: values[ATTR_DESCRIPTON],
|
||||||
|
ATTR_VERSION: values[ATTR_VERSION],
|
||||||
|
ATTR_INSTALLED: self._data.get(addon, {}).get(ATTR_VERSION),
|
||||||
|
})
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
def list_startup(self, start_type):
|
||||||
|
"""Get list of installed addon with need start by type."""
|
||||||
|
addon_list = set()
|
||||||
|
for addon in self._data.keys():
|
||||||
|
if self.get_boot(addon) != BOOT_AUTO:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
if self._addons_data[addon][ATTR_STARTUP] == start_type:
|
||||||
|
addon_list.add(addon)
|
||||||
|
except KeyError:
|
||||||
|
_LOGGER.warning("Orphaned addon detect %s", addon)
|
||||||
|
continue
|
||||||
|
|
||||||
|
return addon_list
|
||||||
|
|
||||||
|
@property
|
||||||
|
def list_removed(self):
|
||||||
|
"""Return local addons they not support from repo."""
|
||||||
|
addon_list = set()
|
||||||
|
for addon in self._data.keys():
|
||||||
|
if addon not in self._addons_data:
|
||||||
|
addon_list.add(addon)
|
||||||
|
|
||||||
|
return addon_list
|
||||||
|
|
||||||
|
def exists_addon(self, addon):
|
||||||
|
"""Return True if a addon exists."""
|
||||||
|
return addon in self._addons_data
|
||||||
|
|
||||||
|
def is_installed(self, addon):
|
||||||
|
"""Return True if a addon is installed."""
|
||||||
|
return addon in self._data
|
||||||
|
|
||||||
|
def version_installed(self, addon):
|
||||||
|
"""Return installed version."""
|
||||||
|
return self._data[addon][ATTR_VERSION]
|
||||||
|
|
||||||
|
def set_install_addon(self, addon, version):
|
||||||
|
"""Set addon as installed."""
|
||||||
|
self._data[addon] = {
|
||||||
|
ATTR_VERSION: version,
|
||||||
|
ATTR_OPTIONS: {}
|
||||||
|
}
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
def set_uninstall_addon(self, addon):
|
||||||
|
"""Set addon as uninstalled."""
|
||||||
|
self._data.pop(addon, None)
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
def set_options(self, addon, options):
|
||||||
|
"""Store user addon options."""
|
||||||
|
self._data[addon][ATTR_OPTIONS] = options
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
def set_version(self, addon, version):
|
||||||
|
"""Update version of addon."""
|
||||||
|
self._data[addon][ATTR_VERSION] = version
|
||||||
|
self.save()
|
||||||
|
|
||||||
|
def get_options(self, addon):
|
||||||
|
"""Return options with local changes."""
|
||||||
|
opt = self._addons_data[addon][ATTR_OPTIONS]
|
||||||
|
if addon in self._data:
|
||||||
|
opt.update(self._data[addon][ATTR_OPTIONS])
|
||||||
|
return opt
|
||||||
|
|
||||||
|
def get_boot(self, addon):
|
||||||
|
"""Return boot config with prio local settings."""
|
||||||
|
if ATTR_BOOT in self._data[addon]:
|
||||||
|
return self._data[addon][ATTR_BOOT]
|
||||||
|
|
||||||
|
return self._addons_data[addon][ATTR_BOOT]
|
||||||
|
|
||||||
|
def get_name(self, addon):
|
||||||
|
"""Return name of addon."""
|
||||||
|
return self._addons_data[addon][ATTR_NAME]
|
||||||
|
|
||||||
|
def get_description(self, addon):
|
||||||
|
"""Return description of addon."""
|
||||||
|
return self._addons_data[addon][ATTR_DESCRIPTON]
|
||||||
|
|
||||||
|
def get_version(self, addon):
|
||||||
|
"""Return version of addon."""
|
||||||
|
return self._addons_data[addon][ATTR_VERSION]
|
||||||
|
|
||||||
|
def get_slug(self, addon):
|
||||||
|
"""Return slug of addon."""
|
||||||
|
return self._addons_data[addon][ATTR_SLUG]
|
||||||
|
|
||||||
|
def get_ports(self, addon):
|
||||||
|
"""Return ports of addon."""
|
||||||
|
return self._addons_data[addon].get(ATTR_PORTS)
|
||||||
|
|
||||||
|
def get_image(self, addon):
|
||||||
|
"""Return image name of addon."""
|
||||||
|
if ATTR_IMAGE not in self._addons_data[addon]:
|
||||||
|
return "{}/{}-addon-{}".format(
|
||||||
|
DOCKER_REPO, self.arch, self.get_slug(addon))
|
||||||
|
|
||||||
|
return self._addons_data[addon][ATTR_IMAGE]
|
||||||
|
|
||||||
|
def need_config(self, addon):
|
||||||
|
"""Return True if config map is needed."""
|
||||||
|
return self._addons_data[addon][ATTR_MAP_CONFIG]
|
||||||
|
|
||||||
|
def need_ssl(self, addon):
|
||||||
|
"""Return True if ssl map is needed."""
|
||||||
|
return self._addons_data[addon][ATTR_MAP_SSL]
|
||||||
|
|
||||||
|
def need_hassio(self, addon):
|
||||||
|
"""Return True if hassio map is needed."""
|
||||||
|
return self._addons_data[addon][ATTR_MAP_HASSIO]
|
||||||
|
|
||||||
|
def path_data(self, addon):
|
||||||
|
"""Return addon data path inside supervisor."""
|
||||||
|
return "{}/{}".format(
|
||||||
|
self.config.path_addons_data, self._addons_data[addon][ATTR_SLUG])
|
||||||
|
|
||||||
|
def path_data_docker(self, addon):
|
||||||
|
"""Return addon data path external for docker."""
|
||||||
|
return "{}/{}".format(self.config.path_addons_data_docker,
|
||||||
|
self._addons_data[addon][ATTR_SLUG])
|
||||||
|
|
||||||
|
def path_addon_options(self, addon):
|
||||||
|
"""Return path to addons options."""
|
||||||
|
return "{}/options.json".format(self.path_data(addon))
|
||||||
|
|
||||||
|
def write_addon_options(self, addon):
|
||||||
|
"""Return True if addon options is written to data."""
|
||||||
|
schema = self.get_schema(addon)
|
||||||
|
options = self.get_options(addon)
|
||||||
|
|
||||||
|
try:
|
||||||
|
schema(options)
|
||||||
|
return write_json_file(self.path_addon_options(addon), options)
|
||||||
|
except vol.Invalid as ex:
|
||||||
|
_LOGGER.error("Addon %s have wrong options -> %s", addon,
|
||||||
|
humanize_error(options, ex))
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_schema(self, addon):
|
||||||
|
"""Create a schema for addon options."""
|
||||||
|
raw_schema = self._addons_data[addon][ATTR_SCHEMA]
|
||||||
|
|
||||||
|
schema = vol.Schema(vol.All(dict, validate_options(raw_schema)))
|
||||||
|
return schema
|
71
hassio/addons/git.py
Normal file
71
hassio/addons/git.py
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
"""Init file for HassIO addons git."""
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
|
||||||
|
import git
|
||||||
|
|
||||||
|
from ..const import URL_HASSIO_ADDONS
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class AddonsRepo(object):
|
||||||
|
"""Manage addons git repo."""
|
||||||
|
|
||||||
|
def __init__(self, config, loop):
|
||||||
|
"""Initialize docker base wrapper."""
|
||||||
|
self.config = config
|
||||||
|
self.loop = loop
|
||||||
|
self.repo = None
|
||||||
|
self._lock = asyncio.Lock(loop=loop)
|
||||||
|
|
||||||
|
async def load(self):
|
||||||
|
"""Init git addon repo."""
|
||||||
|
if not os.path.isdir(self.config.path_addons_repo):
|
||||||
|
return await self.clone()
|
||||||
|
|
||||||
|
async with self._lock:
|
||||||
|
try:
|
||||||
|
_LOGGER.info("Load addons repository")
|
||||||
|
self.repo = await self.loop.run_in_executor(
|
||||||
|
None, git.Repo, self.config.path_addons_repo)
|
||||||
|
|
||||||
|
except (git.InvalidGitRepositoryError, git.NoSuchPathError) as err:
|
||||||
|
_LOGGER.error("Can't load addons repo: %s.", err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def clone(self):
|
||||||
|
"""Clone git addon repo."""
|
||||||
|
async with self._lock:
|
||||||
|
try:
|
||||||
|
_LOGGER.info("Clone addons repository")
|
||||||
|
self.repo = await self.loop.run_in_executor(
|
||||||
|
None, git.Repo.clone_from, URL_HASSIO_ADDONS,
|
||||||
|
self.config.path_addons_repo)
|
||||||
|
|
||||||
|
except (git.InvalidGitRepositoryError, git.NoSuchPathError) as err:
|
||||||
|
_LOGGER.error("Can't clone addons repo: %s.", err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def pull(self):
|
||||||
|
"""Pull git addon repo."""
|
||||||
|
if self._lock.locked():
|
||||||
|
_LOGGER.warning("It is already a task in progress.")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async with self._lock:
|
||||||
|
try:
|
||||||
|
_LOGGER.info("Pull addons repository")
|
||||||
|
await self.loop.run_in_executor(
|
||||||
|
None, self.repo.remotes.origin.pull)
|
||||||
|
|
||||||
|
except (git.InvalidGitRepositoryError, git.NoSuchPathError) as err:
|
||||||
|
_LOGGER.error("Can't pull addons repo: %s.", err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
113
hassio/addons/validate.py
Normal file
113
hassio/addons/validate.py
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
"""Validate addons options schema."""
|
||||||
|
import voluptuous as vol
|
||||||
|
|
||||||
|
from ..const import (
|
||||||
|
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_STARTUP,
|
||||||
|
ATTR_BOOT, ATTR_MAP_SSL, ATTR_MAP_CONFIG, ATTR_OPTIONS,
|
||||||
|
ATTR_PORTS, STARTUP_ONCE, STARTUP_AFTER, STARTUP_BEFORE, BOOT_AUTO,
|
||||||
|
BOOT_MANUAL, ATTR_SCHEMA, ATTR_IMAGE, ATTR_MAP_HASSIO)
|
||||||
|
|
||||||
|
V_STR = 'str'
|
||||||
|
V_INT = 'int'
|
||||||
|
V_FLOAT = 'float'
|
||||||
|
V_BOOL = 'bool'
|
||||||
|
V_EMAIL = 'email'
|
||||||
|
V_URL = 'url'
|
||||||
|
|
||||||
|
ADDON_ELEMENT = vol.In([V_STR, V_INT, V_FLOAT, V_BOOL, V_EMAIL, V_URL])
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||||
|
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||||
|
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||||
|
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||||
|
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
|
||||||
|
vol.Required(ATTR_STARTUP):
|
||||||
|
vol.In([STARTUP_BEFORE, STARTUP_AFTER, STARTUP_ONCE]),
|
||||||
|
vol.Required(ATTR_BOOT):
|
||||||
|
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||||
|
vol.Optional(ATTR_PORTS): dict,
|
||||||
|
vol.Optional(ATTR_MAP_CONFIG, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_MAP_SSL, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_MAP_HASSIO, default=False): vol.Boolean(),
|
||||||
|
vol.Required(ATTR_OPTIONS): dict,
|
||||||
|
vol.Required(ATTR_SCHEMA): {
|
||||||
|
vol.Coerce(str): vol.Any(ADDON_ELEMENT, [
|
||||||
|
vol.Any(ADDON_ELEMENT, {vol.Coerce(str): ADDON_ELEMENT})
|
||||||
|
])
|
||||||
|
},
|
||||||
|
vol.Optional(ATTR_IMAGE): vol.Match(r"\w*/\w*"),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def validate_options(raw_schema):
|
||||||
|
"""Validate schema."""
|
||||||
|
def validate(struct):
|
||||||
|
"""Create schema validator for addons options."""
|
||||||
|
options = {}
|
||||||
|
|
||||||
|
# read options
|
||||||
|
for key, value in struct.items():
|
||||||
|
if key not in raw_schema:
|
||||||
|
raise vol.Invalid("Unknown options {}.".format(key))
|
||||||
|
|
||||||
|
typ = raw_schema[key]
|
||||||
|
try:
|
||||||
|
if isinstance(typ, list):
|
||||||
|
# nested value
|
||||||
|
options[key] = _nested_validate(typ[0], value)
|
||||||
|
else:
|
||||||
|
# normal value
|
||||||
|
options[key] = _single_validate(typ, value)
|
||||||
|
except (IndexError, KeyError):
|
||||||
|
raise vol.Invalid(
|
||||||
|
"Type error for {}.".format(key)) from None
|
||||||
|
|
||||||
|
return options
|
||||||
|
|
||||||
|
return validate
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
def _single_validate(typ, value):
|
||||||
|
"""Validate a single element."""
|
||||||
|
try:
|
||||||
|
if typ == V_STR:
|
||||||
|
return str(value)
|
||||||
|
elif typ == V_INT:
|
||||||
|
return int(value)
|
||||||
|
elif typ == V_FLOAT:
|
||||||
|
return float(value)
|
||||||
|
elif typ == V_BOOL:
|
||||||
|
return vol.Boolean()(value)
|
||||||
|
elif typ == V_EMAIL:
|
||||||
|
return vol.Email()(value)
|
||||||
|
elif typ == V_URL:
|
||||||
|
return vol.Url()(value)
|
||||||
|
|
||||||
|
raise vol.Invalid("Fatal error for {}.".format(value))
|
||||||
|
except TypeError:
|
||||||
|
raise vol.Invalid(
|
||||||
|
"Type {} error for {}.".format(typ, value)) from None
|
||||||
|
|
||||||
|
|
||||||
|
def _nested_validate(typ, data_list):
|
||||||
|
"""Validate nested items."""
|
||||||
|
options = []
|
||||||
|
|
||||||
|
for element in data_list:
|
||||||
|
# dict list
|
||||||
|
if isinstance(typ, dict):
|
||||||
|
c_options = {}
|
||||||
|
for c_key, c_value in element.items():
|
||||||
|
if c_key not in typ:
|
||||||
|
raise vol.Invalid(
|
||||||
|
"Unknown nested options {}.".format(c_key))
|
||||||
|
|
||||||
|
c_options[c_key] = _single_validate(typ[c_key], c_value)
|
||||||
|
options.append(c_options)
|
||||||
|
# normal list
|
||||||
|
else:
|
||||||
|
options.append(_single_validate(typ, element))
|
||||||
|
|
||||||
|
return options
|
@@ -3,10 +3,11 @@ import logging
|
|||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
|
|
||||||
|
from .addons import APIAddons
|
||||||
|
from .homeassistant import APIHomeAssistant
|
||||||
from .host import APIHost
|
from .host import APIHost
|
||||||
from .network import APINetwork
|
from .network import APINetwork
|
||||||
from .supervisor import APISupervisor
|
from .supervisor import APISupervisor
|
||||||
from .homeassistant import APIHomeAssistant
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -40,9 +41,10 @@ class RestAPI(object):
|
|||||||
self.webapp.router.add_get('/network/info', api_net.info)
|
self.webapp.router.add_get('/network/info', api_net.info)
|
||||||
self.webapp.router.add_get('/network/options', api_net.options)
|
self.webapp.router.add_get('/network/options', api_net.options)
|
||||||
|
|
||||||
def register_supervisor(self, host_controll):
|
def register_supervisor(self, supervisor, addons):
|
||||||
"""Register supervisor function."""
|
"""Register supervisor function."""
|
||||||
api_supervisor = APISupervisor(self.config, self.loop, host_controll)
|
api_supervisor = APISupervisor(
|
||||||
|
self.config, self.loop, supervisor, addons)
|
||||||
|
|
||||||
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
|
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
|
||||||
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
|
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
|
||||||
@@ -57,6 +59,21 @@ class RestAPI(object):
|
|||||||
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
|
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
|
||||||
self.webapp.router.add_get('/homeassistant/update', api_hass.update)
|
self.webapp.router.add_get('/homeassistant/update', api_hass.update)
|
||||||
|
|
||||||
|
def register_addons(self, addons):
|
||||||
|
"""Register homeassistant function."""
|
||||||
|
api_addons = APIAddons(self.config, self.loop, addons)
|
||||||
|
|
||||||
|
self.webapp.router.add_get('/addons/{addon}/info', api_addons.info)
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/addons/{addon}/install', api_addons.install)
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/addons/{addon}/uninstall', api_addons.uninstall)
|
||||||
|
self.webapp.router.add_get('/addons/{addon}/start', api_addons.start)
|
||||||
|
self.webapp.router.add_get('/addons/{addon}/stop', api_addons.stop)
|
||||||
|
self.webapp.router.add_get('/addons/{addon}/update', api_addons.update)
|
||||||
|
self.webapp.router.add_get(
|
||||||
|
'/addons/{addon}/options', api_addons.options)
|
||||||
|
|
||||||
async def start(self):
|
async def start(self):
|
||||||
"""Run rest api webserver."""
|
"""Run rest api webserver."""
|
||||||
self._handler = self.webapp.make_handler(loop=self.loop)
|
self._handler = self.webapp.make_handler(loop=self.loop)
|
||||||
|
126
hassio/api/addons.py
Normal file
126
hassio/api/addons.py
Normal file
@@ -0,0 +1,126 @@
|
|||||||
|
"""Init file for HassIO homeassistant rest api."""
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import voluptuous as vol
|
||||||
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
|
from .util import api_process, api_validate
|
||||||
|
from ..const import (
|
||||||
|
ATTR_VERSION, ATTR_CURRENT, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS,
|
||||||
|
STATE_STOPPED, STATE_STARTED)
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
SCHEMA_VERSION = vol.Schema({
|
||||||
|
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
class APIAddons(object):
|
||||||
|
"""Handle rest api for addons functions."""
|
||||||
|
|
||||||
|
def __init__(self, config, loop, addons):
|
||||||
|
"""Initialize homeassistant rest api part."""
|
||||||
|
self.config = config
|
||||||
|
self.loop = loop
|
||||||
|
self.addons = addons
|
||||||
|
|
||||||
|
def _extract_addon(self, request, check_installed=True):
|
||||||
|
"""Return addon and if not exists trow a exception."""
|
||||||
|
addon = request.match_info.get('addon')
|
||||||
|
|
||||||
|
# check data
|
||||||
|
if not self.addons.exists_addon(addon):
|
||||||
|
raise RuntimeError("Addon not exists")
|
||||||
|
if check_installed and not self.addons.is_installed(addon):
|
||||||
|
raise RuntimeError("Addon is not installed")
|
||||||
|
|
||||||
|
return addon
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def info(self, request):
|
||||||
|
"""Return addon information."""
|
||||||
|
addon = self._extract_addon(request)
|
||||||
|
|
||||||
|
info = {
|
||||||
|
ATTR_VERSION: self.addons.version_installed(addon),
|
||||||
|
ATTR_CURRENT: self.addons.get_version(addon),
|
||||||
|
ATTR_STATE: await self.addons.state(addon),
|
||||||
|
ATTR_BOOT: self.addons.get_boot(addon),
|
||||||
|
ATTR_OPTIONS: self.addons.get_options(addon),
|
||||||
|
}
|
||||||
|
return info
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def options(self, request):
|
||||||
|
"""Store user options for addon."""
|
||||||
|
addon = self._extract_addon(request)
|
||||||
|
schema = self.addons.get_schema(addon)
|
||||||
|
|
||||||
|
options = await api_validate(schema, request)
|
||||||
|
self.addons.set_options(addon, options)
|
||||||
|
return True
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def install(self, request):
|
||||||
|
"""Install addon."""
|
||||||
|
body = await api_validate(SCHEMA_VERSION, request)
|
||||||
|
addon = self._extract_addon(request, check_installed=False)
|
||||||
|
version = body.get(
|
||||||
|
ATTR_VERSION, self.addons.get_version(addon))
|
||||||
|
|
||||||
|
return await asyncio.shield(
|
||||||
|
self.addons.install(addon, version), loop=self.loop)
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def uninstall(self, request):
|
||||||
|
"""Uninstall addon."""
|
||||||
|
addon = self._extract_addon(request)
|
||||||
|
|
||||||
|
return await asyncio.shield(
|
||||||
|
self.addons.uninstall(addon), loop=self.loop)
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def start(self, request):
|
||||||
|
"""Start addon."""
|
||||||
|
addon = self._extract_addon(request)
|
||||||
|
|
||||||
|
if await self.addons.state(addon) == STATE_STARTED:
|
||||||
|
raise RuntimeError("Addon is already running")
|
||||||
|
|
||||||
|
# validate options
|
||||||
|
try:
|
||||||
|
schema = self.addons.get_schema(addon)
|
||||||
|
options = self.addons.get_options(addon)
|
||||||
|
schema(options)
|
||||||
|
except vol.Invalid as ex:
|
||||||
|
raise RuntimeError(humanize_error(options, ex)) from None
|
||||||
|
|
||||||
|
return await asyncio.shield(
|
||||||
|
self.addons.start(addon), loop=self.loop)
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def stop(self, request):
|
||||||
|
"""Stop addon."""
|
||||||
|
addon = self._extract_addon(request)
|
||||||
|
|
||||||
|
if await self.addons.state(addon) == STATE_STOPPED:
|
||||||
|
raise RuntimeError("Addon is already stoped")
|
||||||
|
|
||||||
|
return await asyncio.shield(
|
||||||
|
self.addons.stop(addon), loop=self.loop)
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def update(self, request):
|
||||||
|
"""Update addon."""
|
||||||
|
body = await api_validate(SCHEMA_VERSION, request)
|
||||||
|
addon = self._extract_addon(request)
|
||||||
|
version = body.get(
|
||||||
|
ATTR_VERSION, self.addons.get_version(addon))
|
||||||
|
|
||||||
|
if version == self.addons.version_installed(addon):
|
||||||
|
raise RuntimeError("Version is already in use")
|
||||||
|
|
||||||
|
return await asyncio.shield(
|
||||||
|
self.addons.update(addon, version), loop=self.loop)
|
@@ -40,9 +40,10 @@ class APIHomeAssistant(object):
|
|||||||
version = body.get(ATTR_VERSION, self.config.current_homeassistant)
|
version = body.get(ATTR_VERSION, self.config.current_homeassistant)
|
||||||
|
|
||||||
if self.dock_hass.in_progress:
|
if self.dock_hass.in_progress:
|
||||||
raise RuntimeError("Other task is in progress.")
|
raise RuntimeError("Other task is in progress")
|
||||||
|
|
||||||
if version == self.dock_hass.version:
|
if version == self.dock_hass.version:
|
||||||
raise RuntimeError("%s is already in use.", version)
|
raise RuntimeError("Version is already in use")
|
||||||
|
|
||||||
return await asyncio.shield(self.dock_hass.update(version))
|
return await asyncio.shield(
|
||||||
|
self.dock_hass.update(version), loop=self.loop)
|
||||||
|
@@ -56,6 +56,6 @@ class APIHost(object):
|
|||||||
version = body.get(ATTR_VERSION)
|
version = body.get(ATTR_VERSION)
|
||||||
|
|
||||||
if version == self.host_controll.version:
|
if version == self.host_controll.version:
|
||||||
raise RuntimeError("%s is already in use.", version)
|
raise RuntimeError("Version is already in use")
|
||||||
|
|
||||||
return await self.host_controll.host_update(version=version)
|
return await self.host_controll.host_update(version=version)
|
||||||
|
@@ -1,10 +1,12 @@
|
|||||||
"""Init file for HassIO supervisor rest api."""
|
"""Init file for HassIO supervisor rest api."""
|
||||||
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from .util import api_process, api_process_hostcontroll, api_validate
|
from .util import api_process, api_validate
|
||||||
from ..const import ATTR_VERSION, ATTR_CURRENT, ATTR_BETA, HASSIO_VERSION
|
from ..const import (
|
||||||
|
ATTR_ADDONS, ATTR_VERSION, ATTR_CURRENT, ATTR_BETA, HASSIO_VERSION)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -21,11 +23,12 @@ SCHEMA_VERSION = vol.Schema({
|
|||||||
class APISupervisor(object):
|
class APISupervisor(object):
|
||||||
"""Handle rest api for supervisor functions."""
|
"""Handle rest api for supervisor functions."""
|
||||||
|
|
||||||
def __init__(self, config, loop, host_controll):
|
def __init__(self, config, loop, supervisor, addons):
|
||||||
"""Initialize supervisor rest api part."""
|
"""Initialize supervisor rest api part."""
|
||||||
self.config = config
|
self.config = config
|
||||||
self.loop = loop
|
self.loop = loop
|
||||||
self.host_controll = host_controll
|
self.supervisor = supervisor
|
||||||
|
self.addons = addons
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def ping(self, request):
|
async def ping(self, request):
|
||||||
@@ -39,8 +42,8 @@ class APISupervisor(object):
|
|||||||
ATTR_VERSION: HASSIO_VERSION,
|
ATTR_VERSION: HASSIO_VERSION,
|
||||||
ATTR_CURRENT: self.config.current_hassio,
|
ATTR_CURRENT: self.config.current_hassio,
|
||||||
ATTR_BETA: self.config.upstream_beta,
|
ATTR_BETA: self.config.upstream_beta,
|
||||||
|
ATTR_ADDONS: self.addons.list,
|
||||||
}
|
}
|
||||||
|
|
||||||
return info
|
return info
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -53,13 +56,13 @@ class APISupervisor(object):
|
|||||||
|
|
||||||
return self.config.save()
|
return self.config.save()
|
||||||
|
|
||||||
@api_process_hostcontroll
|
@api_process
|
||||||
async def update(self, request):
|
async def update(self, request):
|
||||||
"""Update host OS."""
|
"""Update supervisor OS."""
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
body = await api_validate(SCHEMA_VERSION, request)
|
||||||
version = body.get(ATTR_VERSION, self.config.current_hassio)
|
version = body.get(ATTR_VERSION, self.config.current_hassio)
|
||||||
|
|
||||||
if version == HASSIO_VERSION:
|
if version == self.supervisor.version:
|
||||||
raise RuntimeError("%s is already in use.", version)
|
raise RuntimeError("Version is already in use")
|
||||||
|
|
||||||
return await self.host_controll.supervisor_update(version=version)
|
return await asyncio.shield(self.supervisor.update(version))
|
||||||
|
@@ -82,7 +82,7 @@ async def api_validate(schema, request):
|
|||||||
"""Validate request data with schema."""
|
"""Validate request data with schema."""
|
||||||
data = await request.json(loads=json_loads)
|
data = await request.json(loads=json_loads)
|
||||||
try:
|
try:
|
||||||
schema(data)
|
data = schema(data)
|
||||||
except vol.Invalid as ex:
|
except vol.Invalid as ex:
|
||||||
raise RuntimeError(humanize_error(data, ex)) from None
|
raise RuntimeError(humanize_error(data, ex)) from None
|
||||||
|
|
||||||
|
@@ -26,6 +26,17 @@ def initialize_system_data(websession):
|
|||||||
_LOGGER.info("Create Home-Assistant ssl folder %s", config.path_ssl)
|
_LOGGER.info("Create Home-Assistant ssl folder %s", config.path_ssl)
|
||||||
os.mkdir(config.path_ssl)
|
os.mkdir(config.path_ssl)
|
||||||
|
|
||||||
|
# homeassistant addon data folder
|
||||||
|
if not os.path.isdir(config.path_addons_data):
|
||||||
|
_LOGGER.info("Create Home-Assistant addon data folder %s",
|
||||||
|
config.path_addons_data)
|
||||||
|
os.mkdir(config.path_addons_data)
|
||||||
|
|
||||||
|
if not os.path.isdir(config.path_addons_custom):
|
||||||
|
_LOGGER.info("Create Home-Assistant addon custom folder %s",
|
||||||
|
config.path_addons_custom)
|
||||||
|
os.mkdir(config.path_addons_custom)
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
|
@@ -1,10 +1,10 @@
|
|||||||
"""Bootstrap HassIO."""
|
"""Bootstrap HassIO."""
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from .const import FILE_HASSIO_CONFIG, HASSIO_SHARE
|
from .const import FILE_HASSIO_CONFIG, HASSIO_SHARE
|
||||||
from .tools import fetch_current_versions
|
from .tools import (
|
||||||
|
fetch_current_versions, write_json_file, read_json_file)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -14,26 +14,47 @@ HOMEASSISTANT_CURRENT = 'homeassistant_current'
|
|||||||
|
|
||||||
HASSIO_SSL = "{}/ssl"
|
HASSIO_SSL = "{}/ssl"
|
||||||
HASSIO_CURRENT = 'hassio_current'
|
HASSIO_CURRENT = 'hassio_current'
|
||||||
|
HASSIO_CLEANUP = 'hassio_cleanup'
|
||||||
|
|
||||||
|
ADDONS_REPO = "{}/addons"
|
||||||
|
ADDONS_DATA = "{}/addons_data"
|
||||||
|
ADDONS_CUSTOM = "{}/addons_custom"
|
||||||
|
|
||||||
UPSTREAM_BETA = 'upstream_beta'
|
UPSTREAM_BETA = 'upstream_beta'
|
||||||
|
|
||||||
|
|
||||||
class CoreConfig(object):
|
class Config(object):
|
||||||
"""Hold all config data."""
|
"""Hold all config data."""
|
||||||
|
|
||||||
def __init__(self, websession, config_file=FILE_HASSIO_CONFIG):
|
def __init__(self, config_file):
|
||||||
"""Initialize config object."""
|
"""Initialize config object."""
|
||||||
self.websession = websession
|
|
||||||
self._filename = config_file
|
self._filename = config_file
|
||||||
self._data = {}
|
self._data = {}
|
||||||
|
|
||||||
# init or load data
|
# init or load data
|
||||||
if os.path.isfile(self._filename):
|
if os.path.isfile(self._filename):
|
||||||
try:
|
try:
|
||||||
with open(self._filename, 'r') as cfile:
|
self._data = read_json_file(self._filename)
|
||||||
self._data = json.loads(cfile.read())
|
|
||||||
except OSError:
|
except OSError:
|
||||||
_LOGGER.warning("Can't read %s", self._filename)
|
_LOGGER.warning("Can't read %s", self._filename)
|
||||||
|
|
||||||
|
def save(self):
|
||||||
|
"""Store data to config file."""
|
||||||
|
if not write_json_file(self._filename, self._data):
|
||||||
|
_LOGGER.exception("Can't store config in %s", self._filename)
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class CoreConfig(Config):
|
||||||
|
"""Hold all core config data."""
|
||||||
|
|
||||||
|
def __init__(self, websession):
|
||||||
|
"""Initialize config object."""
|
||||||
|
self.websession = websession
|
||||||
|
|
||||||
|
super().__init__(FILE_HASSIO_CONFIG)
|
||||||
|
|
||||||
# init data
|
# init data
|
||||||
if not self._data:
|
if not self._data:
|
||||||
self._data.update({
|
self._data.update({
|
||||||
@@ -42,17 +63,6 @@ class CoreConfig(object):
|
|||||||
})
|
})
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
def save(self):
|
|
||||||
"""Store data to config file."""
|
|
||||||
try:
|
|
||||||
with open(self._filename, 'w') as conf_file:
|
|
||||||
conf_file.write(json.dumps(self._data))
|
|
||||||
except OSError:
|
|
||||||
_LOGGER.exception("Can't store config in %s", self._filename)
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
async def fetch_update_infos(self):
|
async def fetch_update_infos(self):
|
||||||
"""Read current versions from web."""
|
"""Read current versions from web."""
|
||||||
current = await fetch_current_versions(
|
current = await fetch_current_versions(
|
||||||
@@ -78,6 +88,20 @@ class CoreConfig(object):
|
|||||||
"""Set beta upstream mode."""
|
"""Set beta upstream mode."""
|
||||||
self._data[UPSTREAM_BETA] = bool(value)
|
self._data[UPSTREAM_BETA] = bool(value)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hassio_cleanup(self):
|
||||||
|
"""Return Version they need to cleanup."""
|
||||||
|
return self._data.get(HASSIO_CLEANUP)
|
||||||
|
|
||||||
|
@hassio_cleanup.setter
|
||||||
|
def hassio_cleanup(self, version):
|
||||||
|
"""Set or remove cleanup flag."""
|
||||||
|
if version is None:
|
||||||
|
self._data.pop(HASSIO_CLEANUP, None)
|
||||||
|
else:
|
||||||
|
self._data[HASSIO_CLEANUP] = version
|
||||||
|
self.save()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def homeassistant_image(self):
|
def homeassistant_image(self):
|
||||||
"""Return docker homeassistant repository."""
|
"""Return docker homeassistant repository."""
|
||||||
@@ -93,10 +117,15 @@ class CoreConfig(object):
|
|||||||
"""Actual version of hassio."""
|
"""Actual version of hassio."""
|
||||||
return self._data.get(HASSIO_CURRENT)
|
return self._data.get(HASSIO_CURRENT)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_hassio_docker(self):
|
||||||
|
"""Return hassio data path extern for docker."""
|
||||||
|
return os.environ['SUPERVISOR_SHARE']
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_config_docker(self):
|
def path_config_docker(self):
|
||||||
"""Return config path extern for docker."""
|
"""Return config path extern for docker."""
|
||||||
return HOMEASSISTANT_CONFIG.format(os.environ['SUPERVISOR_SHARE'])
|
return HOMEASSISTANT_CONFIG.format(self.path_hassio_docker)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_config(self):
|
def path_config(self):
|
||||||
@@ -106,9 +135,29 @@ class CoreConfig(object):
|
|||||||
@property
|
@property
|
||||||
def path_ssl_docker(self):
|
def path_ssl_docker(self):
|
||||||
"""Return SSL path extern for docker."""
|
"""Return SSL path extern for docker."""
|
||||||
return HASSIO_SSL.format(os.environ['SUPERVISOR_SHARE'])
|
return HASSIO_SSL.format(self.path_hassio_docker)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_ssl(self):
|
def path_ssl(self):
|
||||||
"""Return SSL path inside supervisor."""
|
"""Return SSL path inside supervisor."""
|
||||||
return HASSIO_SSL.format(HASSIO_SHARE)
|
return HASSIO_SSL.format(HASSIO_SHARE)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_addons_repo(self):
|
||||||
|
"""Return git repo path for addons."""
|
||||||
|
return ADDONS_REPO.format(HASSIO_SHARE)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_addons_custom(self):
|
||||||
|
"""Return path for customs addons."""
|
||||||
|
return ADDONS_CUSTOM.format(HASSIO_SHARE)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_addons_data(self):
|
||||||
|
"""Return root addon data folder."""
|
||||||
|
return ADDONS_DATA.format(HASSIO_SHARE)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_addons_data_docker(self):
|
||||||
|
"""Return root addon data folder extern for docker."""
|
||||||
|
return ADDONS_DATA.format(self.path_hassio_docker)
|
||||||
|
@@ -1,16 +1,22 @@
|
|||||||
"""Const file for HassIO."""
|
"""Const file for HassIO."""
|
||||||
HASSIO_VERSION = '0.5'
|
HASSIO_VERSION = '0.8'
|
||||||
|
|
||||||
URL_HASSIO_VERSION = \
|
URL_HASSIO_VERSION = \
|
||||||
'https://raw.githubusercontent.com/pvizeli/hassio/master/version.json'
|
'https://raw.githubusercontent.com/pvizeli/hassio/master/version.json'
|
||||||
URL_HASSIO_VERSION_BETA = \
|
URL_HASSIO_VERSION_BETA = \
|
||||||
'https://raw.githubusercontent.com/pvizeli/hassio/master/version_beta.json'
|
'https://raw.githubusercontent.com/pvizeli/hassio/master/version_beta.json'
|
||||||
|
|
||||||
URL_ADDONS_REPO = 'https://github.com/pvizeli/hassio-addons'
|
URL_HASSIO_ADDONS = 'https://github.com/pvizeli/hassio-addons'
|
||||||
|
|
||||||
|
DOCKER_REPO = "pvizeli"
|
||||||
|
|
||||||
HASSIO_SHARE = "/data"
|
HASSIO_SHARE = "/data"
|
||||||
|
|
||||||
RUN_UPDATE_INFO_TASKS = 28800
|
RUN_UPDATE_INFO_TASKS = 28800
|
||||||
|
RUN_UPDATE_SUPERVISOR_TASKS = 29100
|
||||||
|
RUN_RELOAD_ADDONS_TASKS = 28800
|
||||||
|
|
||||||
|
RESTART_EXIT_CODE = 100
|
||||||
|
|
||||||
FILE_HASSIO_ADDONS = "{}/addons.json".format(HASSIO_SHARE)
|
FILE_HASSIO_ADDONS = "{}/addons.json".format(HASSIO_SHARE)
|
||||||
FILE_HASSIO_CONFIG = "{}/config.json".format(HASSIO_SHARE)
|
FILE_HASSIO_CONFIG = "{}/config.json".format(HASSIO_SHARE)
|
||||||
@@ -25,6 +31,31 @@ JSON_MESSAGE = 'message'
|
|||||||
RESULT_ERROR = 'error'
|
RESULT_ERROR = 'error'
|
||||||
RESULT_OK = 'ok'
|
RESULT_OK = 'ok'
|
||||||
|
|
||||||
|
ATTR_ADDONS = 'addons'
|
||||||
ATTR_VERSION = 'version'
|
ATTR_VERSION = 'version'
|
||||||
ATTR_CURRENT = 'current'
|
ATTR_CURRENT = 'current'
|
||||||
ATTR_BETA = 'beta'
|
ATTR_BETA = 'beta'
|
||||||
|
ATTR_NAME = 'name'
|
||||||
|
ATTR_SLUG = 'slug'
|
||||||
|
ATTR_DESCRIPTON = 'description'
|
||||||
|
ATTR_STARTUP = 'startup'
|
||||||
|
ATTR_BOOT = 'boot'
|
||||||
|
ATTR_PORTS = 'ports'
|
||||||
|
ATTR_MAP_CONFIG = 'map_config'
|
||||||
|
ATTR_MAP_SSL = 'map_ssl'
|
||||||
|
ATTR_MAP_HASSIO = 'map_hassio'
|
||||||
|
ATTR_OPTIONS = 'options'
|
||||||
|
ATTR_INSTALLED = 'installed'
|
||||||
|
ATTR_STATE = 'state'
|
||||||
|
ATTR_SCHEMA = 'schema'
|
||||||
|
ATTR_IMAGE = 'image'
|
||||||
|
|
||||||
|
STARTUP_BEFORE = 'before'
|
||||||
|
STARTUP_AFTER = 'after'
|
||||||
|
STARTUP_ONCE = 'once'
|
||||||
|
|
||||||
|
BOOT_AUTO = 'auto'
|
||||||
|
BOOT_MANUAL = 'manual'
|
||||||
|
|
||||||
|
STATE_STARTED = 'started'
|
||||||
|
STATE_STOPPED = 'stopped'
|
||||||
|
@@ -6,12 +6,16 @@ import aiohttp
|
|||||||
import docker
|
import docker
|
||||||
|
|
||||||
from . import bootstrap
|
from . import bootstrap
|
||||||
|
from .addons import AddonManager
|
||||||
from .api import RestAPI
|
from .api import RestAPI
|
||||||
from .host_controll import HostControll
|
from .host_controll import HostControll
|
||||||
from .const import SOCKET_DOCKER, RUN_UPDATE_INFO_TASKS
|
from .const import (
|
||||||
|
SOCKET_DOCKER, RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS,
|
||||||
|
RUN_UPDATE_SUPERVISOR_TASKS, STARTUP_AFTER, STARTUP_BEFORE)
|
||||||
from .scheduler import Scheduler
|
from .scheduler import Scheduler
|
||||||
from .dock.homeassistant import DockerHomeAssistant
|
from .dock.homeassistant import DockerHomeAssistant
|
||||||
from .dock.supervisor import DockerSupervisor
|
from .dock.supervisor import DockerSupervisor
|
||||||
|
from .tools import get_arch_from_image
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -21,6 +25,7 @@ class HassIO(object):
|
|||||||
|
|
||||||
def __init__(self, loop):
|
def __init__(self, loop):
|
||||||
"""Initialize hassio object."""
|
"""Initialize hassio object."""
|
||||||
|
self.exit_code = 0
|
||||||
self.loop = loop
|
self.loop = loop
|
||||||
self.websession = aiohttp.ClientSession(loop=self.loop)
|
self.websession = aiohttp.ClientSession(loop=self.loop)
|
||||||
self.config = bootstrap.initialize_system_data(self.websession)
|
self.config = bootstrap.initialize_system_data(self.websession)
|
||||||
@@ -31,17 +36,21 @@ class HassIO(object):
|
|||||||
|
|
||||||
# init basic docker container
|
# init basic docker container
|
||||||
self.supervisor = DockerSupervisor(
|
self.supervisor = DockerSupervisor(
|
||||||
self.config, self.loop, self.dock)
|
self.config, self.loop, self.dock, self)
|
||||||
self.homeassistant = DockerHomeAssistant(
|
self.homeassistant = DockerHomeAssistant(
|
||||||
self.config, self.loop, self.dock)
|
self.config, self.loop, self.dock)
|
||||||
|
|
||||||
# init HostControll
|
# init HostControll
|
||||||
self.host_controll = HostControll(self.loop)
|
self.host_controll = HostControll(self.loop)
|
||||||
|
|
||||||
|
# init addon system
|
||||||
|
self.addons = AddonManager(self.config, self.loop, self.dock)
|
||||||
|
|
||||||
async def setup(self):
|
async def setup(self):
|
||||||
"""Setup HassIO orchestration."""
|
"""Setup HassIO orchestration."""
|
||||||
# supervisor
|
# supervisor
|
||||||
await self.supervisor.attach()
|
await self.supervisor.attach()
|
||||||
|
await self.supervisor.cleanup()
|
||||||
|
|
||||||
# hostcontroll
|
# hostcontroll
|
||||||
host_info = await self.host_controll.info()
|
host_info = await self.host_controll.info()
|
||||||
@@ -56,8 +65,9 @@ class HassIO(object):
|
|||||||
# rest api views
|
# rest api views
|
||||||
self.api.register_host(self.host_controll)
|
self.api.register_host(self.host_controll)
|
||||||
self.api.register_network(self.host_controll)
|
self.api.register_network(self.host_controll)
|
||||||
self.api.register_supervisor(self.host_controll)
|
self.api.register_supervisor(self.supervisor, self.addons)
|
||||||
self.api.register_homeassistant(self.homeassistant)
|
self.api.register_homeassistant(self.homeassistant)
|
||||||
|
self.api.register_addons(self.addons)
|
||||||
|
|
||||||
# schedule update info tasks
|
# schedule update info tasks
|
||||||
self.scheduler.register_task(
|
self.scheduler.register_task(
|
||||||
@@ -69,19 +79,43 @@ class HassIO(object):
|
|||||||
_LOGGER.info("No HomeAssistant docker found.")
|
_LOGGER.info("No HomeAssistant docker found.")
|
||||||
await self._setup_homeassistant()
|
await self._setup_homeassistant()
|
||||||
|
|
||||||
|
# Load addons
|
||||||
|
arch = get_arch_from_image(self.supervisor.image)
|
||||||
|
await self.addons.prepare(arch)
|
||||||
|
|
||||||
|
# schedule addon update task
|
||||||
|
self.scheduler.register_task(
|
||||||
|
self.addons.relaod, RUN_RELOAD_ADDONS_TASKS, first_run=True)
|
||||||
|
|
||||||
|
# schedule self update task
|
||||||
|
self.scheduler.register_task(
|
||||||
|
self._hassio_update, RUN_UPDATE_SUPERVISOR_TASKS)
|
||||||
|
|
||||||
async def start(self):
|
async def start(self):
|
||||||
"""Start HassIO orchestration."""
|
"""Start HassIO orchestration."""
|
||||||
# start api
|
# start api
|
||||||
await self.api.start()
|
await self.api.start()
|
||||||
|
|
||||||
|
# HomeAssistant is already running / supervisor have only reboot
|
||||||
|
if await self.homeassistant.is_running():
|
||||||
|
_LOGGER.info("HassIO reboot detected")
|
||||||
|
return
|
||||||
|
|
||||||
|
# start addon mark as before
|
||||||
|
await self.addons.auto_boot(STARTUP_BEFORE)
|
||||||
|
|
||||||
# run HomeAssistant
|
# run HomeAssistant
|
||||||
await self.homeassistant.run()
|
await self.homeassistant.run()
|
||||||
|
|
||||||
async def stop(self):
|
# start addon mark as after
|
||||||
|
await self.addons.auto_boot(STARTUP_AFTER)
|
||||||
|
|
||||||
|
async def stop(self, exit_code=0):
|
||||||
"""Stop a running orchestration."""
|
"""Stop a running orchestration."""
|
||||||
tasks = [self.websession.close(), self.api.stop()]
|
tasks = [self.websession.close(), self.api.stop()]
|
||||||
await asyncio.wait(tasks, loop=self.loop)
|
await asyncio.wait(tasks, loop=self.loop)
|
||||||
|
|
||||||
|
self.exit_code = exit_code
|
||||||
self.loop.stop()
|
self.loop.stop()
|
||||||
|
|
||||||
async def _setup_homeassistant(self):
|
async def _setup_homeassistant(self):
|
||||||
@@ -99,3 +133,12 @@ class HassIO(object):
|
|||||||
|
|
||||||
# store version
|
# store version
|
||||||
_LOGGER.info("HomeAssistant docker now installed.")
|
_LOGGER.info("HomeAssistant docker now installed.")
|
||||||
|
|
||||||
|
async def _hassio_update(self):
|
||||||
|
"""Check and run update of supervisor hassio."""
|
||||||
|
if self.config.current_hassio == self.supervisor.version:
|
||||||
|
return
|
||||||
|
|
||||||
|
_LOGGER.info(
|
||||||
|
"Found new HassIO version %s.", self.config.current_hassio)
|
||||||
|
await self.supervisor.update(self.config.current_hassio)
|
||||||
|
@@ -53,7 +53,7 @@ class DockerBase(object):
|
|||||||
|
|
||||||
image.tag(self.image, tag='latest')
|
image.tag(self.image, tag='latest')
|
||||||
self.version = get_version_from_env(image.attrs['Config']['Env'])
|
self.version = get_version_from_env(image.attrs['Config']['Env'])
|
||||||
_LOGGER.info("Tag image %s with version %s as latest.",
|
_LOGGER.info("Tag image %s with version %s as latest",
|
||||||
self.image, self.version)
|
self.image, self.version)
|
||||||
except docker.errors.APIError as err:
|
except docker.errors.APIError as err:
|
||||||
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
|
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
|
||||||
@@ -122,7 +122,7 @@ class DockerBase(object):
|
|||||||
self.image = self.container.attrs['Config']['Image']
|
self.image = self.container.attrs['Config']['Image']
|
||||||
self.version = get_version_from_env(
|
self.version = get_version_from_env(
|
||||||
self.container.attrs['Config']['Env'])
|
self.container.attrs['Config']['Env'])
|
||||||
_LOGGER.info("Attach to image %s with version %s.",
|
_LOGGER.info("Attach to image %s with version %s",
|
||||||
self.image, self.version)
|
self.image, self.version)
|
||||||
except (docker.errors.DockerException, KeyError):
|
except (docker.errors.DockerException, KeyError):
|
||||||
_LOGGER.fatal(
|
_LOGGER.fatal(
|
||||||
@@ -138,7 +138,7 @@ class DockerBase(object):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
async with self._lock:
|
async with self._lock:
|
||||||
_LOGGER.info("Run docker image %s with version %s.",
|
_LOGGER.info("Run docker image %s with version %s",
|
||||||
self.image, self.version)
|
self.image, self.version)
|
||||||
return await self.loop.run_in_executor(None, self._run)
|
return await self.loop.run_in_executor(None, self._run)
|
||||||
|
|
||||||
@@ -167,6 +167,8 @@ class DockerBase(object):
|
|||||||
if not self.container:
|
if not self.container:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
_LOGGER.info("Stop %s docker application.", self.image)
|
||||||
|
|
||||||
self.container.reload()
|
self.container.reload()
|
||||||
if self.container.status == 'running':
|
if self.container.status == 'running':
|
||||||
with suppress(docker.errors.DockerException):
|
with suppress(docker.errors.DockerException):
|
||||||
@@ -177,11 +179,39 @@ class DockerBase(object):
|
|||||||
|
|
||||||
self.container = None
|
self.container = None
|
||||||
|
|
||||||
async def update(self, tag):
|
async def remove(self):
|
||||||
"""Update a docker image.
|
"""Remove docker container."""
|
||||||
|
if self._lock.locked():
|
||||||
|
_LOGGER.error("Can't excute remove while a task is in progress")
|
||||||
|
return False
|
||||||
|
|
||||||
Return a Future.
|
async with self._lock:
|
||||||
|
return await self.loop.run_in_executor(None, self._remove)
|
||||||
|
|
||||||
|
def _remove(self):
|
||||||
|
"""remove docker container.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
|
if self._is_running():
|
||||||
|
self._stop()
|
||||||
|
|
||||||
|
_LOGGER.info("Remove docker %s with latest and %s",
|
||||||
|
self.image, self.version)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.dock.images.remove(
|
||||||
|
image="{}:latest".format(self.image), force=True)
|
||||||
|
self.dock.images.remove(
|
||||||
|
image="{}:{}".format(self.image, self.version), force=True)
|
||||||
|
except docker.errors.DockerException as err:
|
||||||
|
_LOGGER.warning("Can't remove image %s -> %s", self.image, err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
async def update(self, tag):
|
||||||
|
"""Update a docker image."""
|
||||||
if self._lock.locked():
|
if self._lock.locked():
|
||||||
_LOGGER.error("Can't excute update while a task is in progress")
|
_LOGGER.error("Can't excute update while a task is in progress")
|
||||||
return False
|
return False
|
||||||
@@ -194,10 +224,10 @@ class DockerBase(object):
|
|||||||
|
|
||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
old_image = "{}:{}".format(self.image, self.version)
|
|
||||||
old_run = self._is_running()
|
old_run = self._is_running()
|
||||||
|
old_image = "{}:{}".format(self.image, self.version)
|
||||||
|
|
||||||
_LOGGER.info("Update docker %s with %s:%s.",
|
_LOGGER.info("Update docker %s with %s:%s",
|
||||||
old_image, self.image, tag)
|
old_image, self.image, tag)
|
||||||
|
|
||||||
# update docker image
|
# update docker image
|
||||||
@@ -208,7 +238,7 @@ class DockerBase(object):
|
|||||||
self.dock.images.remove(image=old_image, force=True)
|
self.dock.images.remove(image=old_image, force=True)
|
||||||
except docker.errors.DockerException as err:
|
except docker.errors.DockerException as err:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Can't remove old image %s -> %s.", old_image, err)
|
"Can't remove old image %s -> %s", old_image, err)
|
||||||
# restore
|
# restore
|
||||||
if old_run:
|
if old_run:
|
||||||
self._run()
|
self._run()
|
||||||
|
81
hassio/dock/addon.py
Normal file
81
hassio/dock/addon.py
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
"""Init file for HassIO addon docker object."""
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import docker
|
||||||
|
|
||||||
|
from . import DockerBase
|
||||||
|
from ..tools import get_version_from_env
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
HASS_DOCKER_NAME = 'homeassistant'
|
||||||
|
|
||||||
|
|
||||||
|
class DockerAddon(DockerBase):
|
||||||
|
"""Docker hassio wrapper for HomeAssistant."""
|
||||||
|
|
||||||
|
def __init__(self, config, loop, dock, addons_data, addon):
|
||||||
|
"""Initialize docker homeassistant wrapper."""
|
||||||
|
super().__init__(
|
||||||
|
config, loop, dock, image=addons_data.get_image(addon))
|
||||||
|
self.addon = addon
|
||||||
|
self.addons_data = addons_data
|
||||||
|
|
||||||
|
@property
|
||||||
|
def docker_name(self):
|
||||||
|
"""Return name of docker container."""
|
||||||
|
return "addon_{}".format(self.addons_data.get_slug(self.addon))
|
||||||
|
|
||||||
|
def _run(self):
|
||||||
|
"""Run docker image.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
if self._is_running():
|
||||||
|
return
|
||||||
|
|
||||||
|
# cleanup old container
|
||||||
|
self._stop()
|
||||||
|
|
||||||
|
# volumes
|
||||||
|
volumes = {
|
||||||
|
self.addons_data.path_data_docker(self.addon): {
|
||||||
|
'bind': '/data', 'mode': 'rw'
|
||||||
|
}}
|
||||||
|
if self.addons_data.need_config(self.addon):
|
||||||
|
volumes.update({
|
||||||
|
self.config.path_config_docker: {
|
||||||
|
'bind': '/config', 'mode': 'rw'
|
||||||
|
}})
|
||||||
|
if self.addons_data.need_ssl(self.addon):
|
||||||
|
volumes.update({
|
||||||
|
self.config.path_ssl_docker: {
|
||||||
|
'bind': '/ssl', 'mode': 'rw'
|
||||||
|
}})
|
||||||
|
if self.addons_data.need_hassio(self.addon):
|
||||||
|
volumes.update({
|
||||||
|
self.config.path_hassio_docker: {
|
||||||
|
'bind': '/hassio', 'mode': 'rw'
|
||||||
|
}})
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.container = self.dock.containers.run(
|
||||||
|
self.image,
|
||||||
|
name=self.docker_name,
|
||||||
|
detach=True,
|
||||||
|
network_mode='bridge',
|
||||||
|
ports=self.addons_data.get_ports(self.addon),
|
||||||
|
restart_policy={
|
||||||
|
"Name": "on-failure",
|
||||||
|
"MaximumRetryCount": 10,
|
||||||
|
},
|
||||||
|
volumes=volumes,
|
||||||
|
)
|
||||||
|
|
||||||
|
self.version = get_version_from_env(
|
||||||
|
self.container.attrs['Config']['Env'])
|
||||||
|
except docker.errors.DockerException as err:
|
||||||
|
_LOGGER.error("Can't run %s -> %s", self.image, err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
@@ -60,7 +60,7 @@ class DockerHomeAssistant(DockerBase):
|
|||||||
self.version = get_version_from_env(
|
self.version = get_version_from_env(
|
||||||
self.container.attrs['Config']['Env'])
|
self.container.attrs['Config']['Env'])
|
||||||
except docker.errors.DockerException as err:
|
except docker.errors.DockerException as err:
|
||||||
_LOGGER.error("Can't run %s -> %s.", self.image, err)
|
_LOGGER.error("Can't run %s -> %s", self.image, err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@@ -1,17 +1,71 @@
|
|||||||
"""Init file for HassIO docker object."""
|
"""Init file for HassIO docker object."""
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
import docker
|
||||||
|
|
||||||
from . import DockerBase
|
from . import DockerBase
|
||||||
|
from ..const import RESTART_EXIT_CODE
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class DockerSupervisor(DockerBase):
|
class DockerSupervisor(DockerBase):
|
||||||
"""Docker hassio wrapper for HomeAssistant."""
|
"""Docker hassio wrapper for HomeAssistant."""
|
||||||
|
|
||||||
|
def __init__(self, config, loop, dock, hassio, image=None):
|
||||||
|
"""Initialize docker base wrapper."""
|
||||||
|
super().__init__(config, loop, dock, image=image)
|
||||||
|
|
||||||
|
self.hassio = hassio
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def docker_name(self):
|
def docker_name(self):
|
||||||
"""Return name of docker container."""
|
"""Return name of docker container."""
|
||||||
return os.environ['SUPERVISOR_NAME']
|
return os.environ['SUPERVISOR_NAME']
|
||||||
|
|
||||||
|
async def update(self, tag):
|
||||||
|
"""Update a supervisor docker image."""
|
||||||
|
if self._lock.locked():
|
||||||
|
_LOGGER.error("Can't excute update while a task is in progress")
|
||||||
|
return False
|
||||||
|
|
||||||
|
_LOGGER.info("Update supervisor docker to %s:%s", self.image, tag)
|
||||||
|
old_version = self.version
|
||||||
|
|
||||||
|
async with self._lock:
|
||||||
|
if await self.loop.run_in_executor(None, self._install, tag):
|
||||||
|
self.config.hassio_cleanup = old_version
|
||||||
|
self.loop.create_task(self.hassio.stop(RESTART_EXIT_CODE))
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def cleanup(self):
|
||||||
|
"""Check if old supervisor version exists and cleanup."""
|
||||||
|
if not self.config.hassio_cleanup:
|
||||||
|
return
|
||||||
|
|
||||||
|
async with self._lock:
|
||||||
|
if await self.loop.run_in_executor(None, self._cleanup):
|
||||||
|
self.config.hassio_cleanup = None
|
||||||
|
|
||||||
|
def _cleanup(self):
|
||||||
|
"""Remove old image.
|
||||||
|
|
||||||
|
Need run inside executor.
|
||||||
|
"""
|
||||||
|
old_image = "{}:{}".format(self.image, self.config.hassio_cleanup)
|
||||||
|
|
||||||
|
_LOGGER.info("Old supervisor docker found %s", old_image)
|
||||||
|
try:
|
||||||
|
self.dock.images.remove(image=old_image, force=True)
|
||||||
|
except docker.errors.DockerException as err:
|
||||||
|
_LOGGER.warning("Can't remove old image %s -> %s", old_image, err)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
async def run(self):
|
async def run(self):
|
||||||
"""Run docker image."""
|
"""Run docker image."""
|
||||||
raise RuntimeError("Not support on supervisor docker container!")
|
raise RuntimeError("Not support on supervisor docker container!")
|
||||||
@@ -24,6 +78,6 @@ class DockerSupervisor(DockerBase):
|
|||||||
"""Stop/remove docker container."""
|
"""Stop/remove docker container."""
|
||||||
raise RuntimeError("Not support on supervisor docker container!")
|
raise RuntimeError("Not support on supervisor docker container!")
|
||||||
|
|
||||||
async def update(self, tag):
|
async def remove(self):
|
||||||
"""Update docker image."""
|
"""Remove docker image."""
|
||||||
raise RuntimeError("Not support on supervisor docker container!")
|
raise RuntimeError("Not support on supervisor docker container!")
|
||||||
|
@@ -14,9 +14,8 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
TIMEOUT = 15
|
TIMEOUT = 15
|
||||||
|
|
||||||
LEVEL_POWER = 1
|
LEVEL_POWER = 1
|
||||||
LEVEL_UPDATE_SUPERVISOR = 2
|
LEVEL_UPDATE_HOST = 2
|
||||||
LEVEL_UPDATE_HOST = 4
|
LEVEL_NETWORK = 4
|
||||||
LEVEL_NETWORK = 8
|
|
||||||
|
|
||||||
|
|
||||||
class HostControll(object):
|
class HostControll(object):
|
||||||
@@ -101,12 +100,3 @@ class HostControll(object):
|
|||||||
if version:
|
if version:
|
||||||
return self._send_command("host-update {}".format(version))
|
return self._send_command("host-update {}".format(version))
|
||||||
return self._send_command("host-update")
|
return self._send_command("host-update")
|
||||||
|
|
||||||
def supervisor_update(self, version=None):
|
|
||||||
"""Update the supervisor on host system.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
if version:
|
|
||||||
return self._send_command("supervisor-update {}".format(version))
|
|
||||||
return self._send_command("supervisor-update")
|
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
"""Tools file for HassIO."""
|
"""Tools file for HassIO."""
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
import socket
|
import socket
|
||||||
@@ -12,6 +13,7 @@ from .const import URL_HASSIO_VERSION, URL_HASSIO_VERSION_BETA
|
|||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
_RE_VERSION = re.compile(r"VERSION=(.*)")
|
_RE_VERSION = re.compile(r"VERSION=(.*)")
|
||||||
|
_IMAGE_ARCH = re.compile(r".*/([a-z0-9]*)-hassio-supervisor")
|
||||||
|
|
||||||
|
|
||||||
async def fetch_current_versions(websession, beta=False):
|
async def fetch_current_versions(websession, beta=False):
|
||||||
@@ -25,9 +27,19 @@ async def fetch_current_versions(websession, beta=False):
|
|||||||
async with websession.get(url) as request:
|
async with websession.get(url) as request:
|
||||||
return await request.json(content_type=None)
|
return await request.json(content_type=None)
|
||||||
|
|
||||||
except (ValueError, aiohttp.ClientError, asyncio.TimeoutError) as err:
|
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
|
||||||
_LOGGER.warning("Can't fetch versions from %s! %s", url, err)
|
_LOGGER.warning("Can't fetch versions from %s! %s", url, err)
|
||||||
|
|
||||||
|
except json.JSONDecodeError as err:
|
||||||
|
_LOGGER.warning("Can't parse versions from %s! %s", url, err)
|
||||||
|
|
||||||
|
|
||||||
|
def get_arch_from_image(image):
|
||||||
|
"""Return arch from hassio image name."""
|
||||||
|
found = _IMAGE_ARCH.match(image)
|
||||||
|
if found:
|
||||||
|
return found.group(1)
|
||||||
|
|
||||||
|
|
||||||
def get_version_from_env(env_list):
|
def get_version_from_env(env_list):
|
||||||
"""Extract Version from ENV list."""
|
"""Extract Version from ENV list."""
|
||||||
@@ -56,3 +68,20 @@ def get_local_ip(loop):
|
|||||||
return socket.gethostbyname(socket.gethostname())
|
return socket.gethostbyname(socket.gethostname())
|
||||||
finally:
|
finally:
|
||||||
sock.close()
|
sock.close()
|
||||||
|
|
||||||
|
|
||||||
|
def write_json_file(jsonfile, data):
|
||||||
|
"""Write a json file."""
|
||||||
|
try:
|
||||||
|
with open(jsonfile, 'w') as conf_file:
|
||||||
|
conf_file.write(json.dumps(data))
|
||||||
|
except OSError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def read_json_file(jsonfile):
|
||||||
|
"""Read a json file and return a dict."""
|
||||||
|
with open(jsonfile, 'r') as cfile:
|
||||||
|
return json.loads(cfile.read())
|
||||||
|
3
setup.py
3
setup.py
@@ -29,7 +29,7 @@ setup(
|
|||||||
keywords=['docker', 'home-assistant', 'api'],
|
keywords=['docker', 'home-assistant', 'api'],
|
||||||
zip_safe=False,
|
zip_safe=False,
|
||||||
platforms='any',
|
platforms='any',
|
||||||
packages=['hassio', 'hassio.dock', 'hassio.api'],
|
packages=['hassio', 'hassio.dock', 'hassio.api', 'hassio.addons'],
|
||||||
include_package_data=True,
|
include_package_data=True,
|
||||||
install_requires=[
|
install_requires=[
|
||||||
'async_timeout',
|
'async_timeout',
|
||||||
@@ -37,5 +37,6 @@ setup(
|
|||||||
'docker',
|
'docker',
|
||||||
'colorlog',
|
'colorlog',
|
||||||
'voluptuous',
|
'voluptuous',
|
||||||
|
'gitpython',
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"hassio_tag": "0.5",
|
"hassio_tag": "0.8",
|
||||||
"homeassistant_tag": "0.41",
|
"homeassistant_tag": "0.42.3",
|
||||||
"resinos_version": "0.3",
|
"resinos_version": "0.3",
|
||||||
"resinhup_version": "0.1"
|
"resinhup_version": "0.1"
|
||||||
}
|
}
|
||||||
|
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"hassio_tag": "0.5",
|
"hassio_tag": "0.8",
|
||||||
"homeassistant_tag": "0.41",
|
"homeassistant_tag": "0.42.3",
|
||||||
"resinos_version": "0.3",
|
"resinos_version": "0.3",
|
||||||
"resinhup_version": "0.1"
|
"resinhup_version": "0.1"
|
||||||
}
|
}
|
||||||
|
Reference in New Issue
Block a user