Update version after merge

This commit is contained in:
pvizeli 2017-06-29 12:00:39 +02:00
commit 766a9af54e
26 changed files with 939 additions and 664 deletions

View File

@ -2,7 +2,7 @@ sudo: false
matrix: matrix:
fast_finish: true fast_finish: true
include: include:
- python: "3.5" - python: "3.6"
cache: cache:
directories: directories:

12
API.md
View File

@ -78,10 +78,10 @@ Get all available addons
"repositories": [ "repositories": [
{ {
"slug": "12345678", "slug": "12345678",
"name": "Repitory Name", "name": "Repitory Name|unknown",
"source": "URL_OF_REPOSITORY", "source": "URL_OF_REPOSITORY",
"url": "null|WEBSITE", "url": "WEBSITE|REPOSITORY",
"maintainer": "null|BLA BLU <fla@dld.ch>" "maintainer": "BLA BLU <fla@dld.ch>|unknown"
} }
] ]
} }
@ -239,12 +239,12 @@ Output the raw docker log
"url": "null|url of addon", "url": "null|url of addon",
"detached": "bool", "detached": "bool",
"repository": "12345678|null", "repository": "12345678|null",
"version": "VERSION", "version": "null|VERSION_INSTALLED",
"last_version": "LAST_VERSION", "last_version": "LAST_VERSION",
"state": "started|stopped", "state": "none|started|stopped",
"boot": "auto|manual", "boot": "auto|manual",
"build": "bool", "build": "bool",
"options": {}, "options": "{}",
} }
``` ```

View File

@ -1,5 +1,6 @@
"""Main file for HassIO.""" """Main file for HassIO."""
import asyncio import asyncio
from concurrent.futures import ThreadPoolExecutor
import logging import logging
import sys import sys
@ -17,7 +18,14 @@ if __name__ == "__main__":
exit(1) exit(1)
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
hassio = core.HassIO(loop) executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
loop.set_default_executor(executor)
_LOGGER.info("Initialize Hassio setup")
config = bootstrap.initialize_system_data()
hassio = core.HassIO(loop, config)
bootstrap.migrate_system_env(config)
_LOGGER.info("Run Hassio setup") _LOGGER.info("Run Hassio setup")
loop.run_until_complete(hassio.setup()) loop.run_until_complete(hassio.setup())
@ -26,7 +34,11 @@ if __name__ == "__main__":
loop.call_soon_threadsafe(loop.create_task, hassio.start()) loop.call_soon_threadsafe(loop.create_task, hassio.start())
loop.call_soon_threadsafe(bootstrap.reg_signal, loop, hassio) loop.call_soon_threadsafe(bootstrap.reg_signal, loop, hassio)
_LOGGER.info("Run Hassio loop")
loop.run_forever() loop.run_forever()
_LOGGER.info("Cleanup system")
executor.shutdown(wait=False)
loop.close() loop.close()
_LOGGER.info("Close Hassio") _LOGGER.info("Close Hassio")

View File

@ -1,220 +1,133 @@
"""Init file for HassIO addons.""" """Init file for HassIO addons."""
import asyncio import asyncio
import logging import logging
import shutil
from .data import AddonsData from .addon import Addon
from .git import AddonsRepoHassIO, AddonsRepoCustom from .repository import Repository
from ..const import STATE_STOPPED, STATE_STARTED from .data import Data
from ..dock.addon import DockerAddon from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
class AddonManager(AddonsData):
class AddonManager(object):
"""Manage addons inside HassIO.""" """Manage addons inside HassIO."""
def __init__(self, config, loop, dock): def __init__(self, config, loop, dock):
"""Initialize docker base wrapper.""" """Initialize docker base wrapper."""
super().__init__(config)
self.loop = loop self.loop = loop
self.config = config
self.dock = dock self.dock = dock
self.repositories = [] self.data = Data(config)
self.dockers = {} self.addons = {}
self.repositories = {}
async def prepare(self, arch): @property
def list_addons(self):
"""Return a list of all addons."""
return list(self.addons.values())
@property
def list_repositories(self):
"""Return list of addon repositories."""
return list(self.repositories.values())
def get(self, addon_slug):
"""Return a adddon from slug."""
return self.addons.get(addon_slug)
async def prepare(self):
"""Startup addon management.""" """Startup addon management."""
self.arch = arch self.data.reload()
# init hassio repository # init hassio built-in repositories
self.repositories.append(AddonsRepoHassIO(self.config, self.loop)) repositories = \
set(self.config.addons_repositories) | BUILTIN_REPOSITORIES
# init custom repositories # init custom repositories & load addons
for url in self.config.addons_repositories: await self.load_repositories(repositories)
self.repositories.append(
AddonsRepoCustom(self.config, self.loop, url))
# load addon repository
tasks = [addon.load() for addon in self.repositories]
if tasks:
await asyncio.wait(tasks, loop=self.loop)
# read data from repositories
self.read_data_from_repositories()
self.merge_update_config()
# load installed addons
for addon in self.list_installed:
self.dockers[addon] = DockerAddon(
self.config, self.loop, self.dock, self, addon)
await self.dockers[addon].attach()
async def add_git_repository(self, url):
"""Add a new custom repository."""
if url in self.config.addons_repositories:
_LOGGER.warning("Repository already exists %s", url)
return False
repo = AddonsRepoCustom(self.config, self.loop, url)
if not await repo.load():
_LOGGER.error("Can't load from repository %s", url)
return False
self.config.addons_repositories = url
self.repositories.append(repo)
return True
def drop_git_repository(self, url):
"""Remove a custom repository."""
for repo in self.repositories:
if repo.url == url:
self.repositories.remove(repo)
self.config.drop_addon_repository(url)
repo.remove()
return True
return False
async def reload(self): async def reload(self):
"""Update addons from repo and reload list.""" """Update addons from repo and reload list."""
tasks = [addon.pull() for addon in self.repositories] tasks = [repository.update() for repository in
if not tasks: self.repositories.values()]
return
await asyncio.wait(tasks, loop=self.loop)
# read data from repositories
self.read_data_from_repositories()
self.merge_update_config()
# remove stalled addons
for addon in self.list_detached:
_LOGGER.warning("Dedicated addon '%s' found!", addon)
async def auto_boot(self, start_type):
"""Boot addons with mode auto."""
boot_list = self.list_startup(start_type)
tasks = [self.start(addon) for addon in boot_list]
_LOGGER.info("Startup %s run %d addons", start_type, len(tasks))
if tasks: if tasks:
await asyncio.wait(tasks, loop=self.loop) await asyncio.wait(tasks, loop=self.loop)
async def install(self, addon, version=None): # read data from repositories
"""Install a addon.""" self.data.reload()
if not self.exists_addon(addon):
_LOGGER.error("Addon %s not exists for install", addon)
return False
if self.arch not in self.get_arch(addon): # update addons
_LOGGER.error("Addon %s not supported on %s", addon, self.arch) await self.load_addons()
return False
if self.is_installed(addon): async def load_repositories(self, list_repositories):
_LOGGER.error("Addon %s is already installed", addon) """Add a new custom repository."""
return False new_rep = set(list_repositories)
old_rep = set(self.repositories)
if not self.path_data(addon).is_dir(): # add new repository
_LOGGER.info("Create Home-Assistant addon data folder %s", async def _add_repository(url):
self.path_data(addon)) """Helper function to async add repository."""
self.path_data(addon).mkdir() repository = Repository(self.config, self.loop, self.data, url)
if not await repository.load():
addon_docker = DockerAddon( _LOGGER.error("Can't load from repository %s", url)
self.config, self.loop, self.dock, self, addon)
version = version or self.get_last_version(addon)
if not await addon_docker.install(version):
return False
self.dockers[addon] = addon_docker
self.set_addon_install(addon, version)
return True
async def uninstall(self, addon):
"""Remove a addon."""
if not self.is_installed(addon):
_LOGGER.error("Addon %s is already uninstalled", addon)
return False
if addon not in self.dockers:
_LOGGER.error("No docker found for addon %s", addon)
return False
if not await self.dockers[addon].remove():
return False
if self.path_data(addon).is_dir():
_LOGGER.info("Remove Home-Assistant addon data folder %s",
self.path_data(addon))
shutil.rmtree(str(self.path_data(addon)))
self.dockers.pop(addon)
self.set_addon_uninstall(addon)
return True
async def state(self, addon):
"""Return running state of addon."""
if addon not in self.dockers:
_LOGGER.error("No docker found for addon %s", addon)
return return
self.repositories[url] = repository
if await self.dockers[addon].is_running(): # don't add built-in repository to config
return STATE_STARTED if url not in BUILTIN_REPOSITORIES:
return STATE_STOPPED self.config.addons_repositories = url
async def start(self, addon): tasks = [_add_repository(url) for url in new_rep - old_rep]
"""Set options and start addon.""" if tasks:
if addon not in self.dockers: await asyncio.wait(tasks, loop=self.loop)
_LOGGER.error("No docker found for addon %s", addon)
return False
if not self.write_addon_options(addon): # del new repository
_LOGGER.error("Can't write options for addon %s", addon) for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
return False self.repositories.pop(url).remove()
self.config.drop_addon_repository(url)
return await self.dockers[addon].run() # update data
self.data.reload()
await self.load_addons()
async def stop(self, addon): async def load_addons(self):
"""Stop addon.""" """Update/add internal addon store."""
if addon not in self.dockers: all_addons = set(self.data.system) | set(self.data.cache)
_LOGGER.error("No docker found for addon %s", addon)
return False
return await self.dockers[addon].stop() # calc diff
add_addons = all_addons - set(self.addons)
del_addons = set(self.addons) - all_addons
async def update(self, addon, version=None): _LOGGER.info("Load addons: %d all - %d new - %d remove",
"""Update addon.""" len(all_addons), len(add_addons), len(del_addons))
if addon not in self.dockers:
_LOGGER.error("No docker found for addon %s", addon)
return False
version = version or self.get_last_version(addon) # new addons
tasks = []
for addon_slug in add_addons:
addon = Addon(
self.config, self.loop, self.dock, self.data, addon_slug)
# update tasks.append(addon.load())
if not await self.dockers[addon].update(version): self.addons[addon_slug] = addon
return False
self.set_addon_update(addon, version) if tasks:
return True await asyncio.wait(tasks, loop=self.loop)
async def restart(self, addon): # remove
"""Restart addon.""" for addon_slug in del_addons:
if addon not in self.dockers: self.addons.pop(addon_slug)
_LOGGER.error("No docker found for addon %s", addon)
return False
if not self.write_addon_options(addon): async def auto_boot(self, stage):
_LOGGER.error("Can't write options for addon %s", addon) """Boot addons with mode auto."""
return False tasks = []
for addon in self.addons.values():
if addon.is_installed and addon.boot == BOOT_AUTO and \
addon.startup == stage:
tasks.append(addon.start())
return await self.dockers[addon].restart() _LOGGER.info("Startup %s run %d addons", stage, len(tasks))
if tasks:
async def logs(self, addon): await asyncio.wait(tasks, loop=self.loop)
"""Return addons log output."""
if addon not in self.dockers:
_LOGGER.error("No docker found for addon %s", addon)
return False
return await self.dockers[addon].logs()

358
hassio/addons/addon.py Normal file
View File

@ -0,0 +1,358 @@
"""Init file for HassIO addons."""
from copy import deepcopy
import logging
from pathlib import Path, PurePath
import re
import shutil
import voluptuous as vol
from voluptuous.humanize import humanize_error
from .validate import validate_options, MAP_VOLUME
from ..const import (
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY,
ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT,
ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP,
STATE_STARTED, STATE_STOPPED, STATE_NONE)
from ..dock.addon import DockerAddon
from ..tools import write_json_file
_LOGGER = logging.getLogger(__name__)
RE_VOLUME = re.compile(MAP_VOLUME)
class Addon(object):
"""Hold data for addon inside HassIO."""
def __init__(self, config, loop, dock, data, addon_slug):
"""Initialize data holder."""
self.config = config
self.data = data
self._id = addon_slug
if self._mesh is None:
raise RuntimeError("{} not a valid addon!".format(self._id))
self.addon_docker = DockerAddon(config, loop, dock, self)
async def load(self):
"""Async initialize of object."""
if self.is_installed:
await self.addon_docker.attach()
@property
def slug(self):
"""Return slug/id of addon."""
return self._id
@property
def _mesh(self):
"""Return addon data from system or cache."""
return self.data.system.get(self._id, self.data.cache.get(self._id))
@property
def is_installed(self):
"""Return True if a addon is installed."""
return self._id in self.data.system
@property
def is_detached(self):
"""Return True if addon is detached."""
return self._id not in self.data.cache
@property
def version_installed(self):
"""Return installed version."""
return self.data.user.get(self._id, {}).get(ATTR_VERSION)
def _set_install(self, version):
"""Set addon as installed."""
self.data.system[self._id] = deepcopy(self.data.cache[self._id])
self.data.user[self._id] = {
ATTR_OPTIONS: {},
ATTR_VERSION: version,
}
self.data.save()
def _set_uninstall(self):
"""Set addon as uninstalled."""
self.data.system.pop(self._id, None)
self.data.user.pop(self._id, None)
self.data.save()
def _set_update(self, version):
"""Update version of addon."""
self.data.system[self._id] = deepcopy(self.data.cache[self._id])
self.data.user[self._id][ATTR_VERSION] = version
self.data.save()
@property
def options(self):
"""Return options with local changes."""
if self.is_installed:
return {
**self.data.system[self._id][ATTR_OPTIONS],
**self.data.user[self._id][ATTR_OPTIONS],
}
return self.data.cache[self._id][ATTR_OPTIONS]
@options.setter
def options(self, value):
"""Store user addon options."""
self.data.user[self._id][ATTR_OPTIONS] = deepcopy(value)
self.data.save()
@property
def boot(self):
"""Return boot config with prio local settings."""
if ATTR_BOOT in self.data.user.get(self._id, {}):
return self.data.user[self._id][ATTR_BOOT]
return self._mesh[ATTR_BOOT]
@boot.setter
def boot(self, value):
"""Store user boot options."""
self.data.user[self._id][ATTR_BOOT] = value
self.data.save()
@property
def name(self):
"""Return name of addon."""
return self._mesh[ATTR_NAME]
@property
def description(self):
"""Return description of addon."""
return self._mesh[ATTR_DESCRIPTON]
@property
def repository(self):
"""Return repository of addon."""
return self._mesh[ATTR_REPOSITORY]
@property
def last_version(self):
"""Return version of addon."""
if self._id in self.data.cache:
return self.data.cache[self._id][ATTR_VERSION]
return self.version_installed
@property
def startup(self):
"""Return startup type of addon."""
return self._mesh.get(ATTR_STARTUP)
@property
def ports(self):
"""Return ports of addon."""
return self._mesh.get(ATTR_PORTS)
@property
def network_mode(self):
"""Return network mode of addon."""
if self._mesh[ATTR_HOST_NETWORK]:
return 'host'
return 'bridge'
@property
def devices(self):
"""Return devices of addon."""
return self._mesh.get(ATTR_DEVICES)
@property
def tmpfs(self):
"""Return tmpfs of addon."""
return self._mesh.get(ATTR_TMPFS)
@property
def environment(self):
"""Return environment of addon."""
return self._mesh.get(ATTR_ENVIRONMENT)
@property
def privileged(self):
"""Return list of privilege."""
return self._mesh.get(ATTR_PRIVILEGED)
@property
def url(self):
"""Return url of addon."""
return self._mesh.get(ATTR_URL)
@property
def supported_arch(self):
"""Return list of supported arch."""
return self._mesh[ATTR_ARCH]
@property
def image(self):
"""Return image name of addon."""
addon_data = self._mesh
# Repository with dockerhub images
if ATTR_IMAGE in addon_data:
return addon_data[ATTR_IMAGE].format(arch=self.config.arch)
# local build
return "{}/{}-addon-{}".format(
addon_data[ATTR_REPOSITORY], self.config.arch,
addon_data[ATTR_SLUG])
@property
def need_build(self):
"""Return True if this addon need a local build."""
return ATTR_IMAGE not in self._mesh
@property
def map_volumes(self):
"""Return a dict of {volume: policy} from addon."""
volumes = {}
for volume in self._mesh[ATTR_MAP]:
result = RE_VOLUME.match(volume)
volumes[result.group(1)] = result.group(2) or 'ro'
return volumes
@property
def path_data(self):
"""Return addon data path inside supervisor."""
return Path(self.config.path_addons_data, self._id)
@property
def path_extern_data(self):
"""Return addon data path external for docker."""
return PurePath(self.config.path_extern_addons_data, self._id)
@property
def path_addon_options(self):
"""Return path to addons options."""
return Path(self.path_data, "options.json")
@property
def path_addon_location(self):
"""Return path to this addon."""
return Path(self._mesh[ATTR_LOCATON])
def write_options(self):
"""Return True if addon options is written to data."""
schema = self.schema
options = self.options
try:
schema(options)
return write_json_file(self.path_addon_options, options)
except vol.Invalid as ex:
_LOGGER.error("Addon %s have wrong options -> %s", self._id,
humanize_error(options, ex))
return False
@property
def schema(self):
"""Create a schema for addon options."""
raw_schema = self._mesh[ATTR_SCHEMA]
if isinstance(raw_schema, bool):
return vol.Schema(dict)
return vol.Schema(vol.All(dict, validate_options(raw_schema)))
async def install(self, version=None):
"""Install a addon."""
if self.config.arch not in self.supported_arch:
_LOGGER.error(
"Addon %s not supported on %s", self._id, self.config.arch)
return False
if self.is_installed:
_LOGGER.error("Addon %s is already installed", self._id)
return False
if not self.path_data.is_dir():
_LOGGER.info(
"Create Home-Assistant addon data folder %s", self.path_data)
self.path_data.mkdir()
version = version or self.last_version
if not await self.addon_docker.install(version):
return False
self._set_install(version)
return True
async def uninstall(self):
"""Remove a addon."""
if not self.is_installed:
_LOGGER.error("Addon %s is not installed", self._id)
return False
if not await self.addon_docker.remove():
return False
if self.path_data.is_dir():
_LOGGER.info(
"Remove Home-Assistant addon data folder %s", self.path_data)
shutil.rmtree(str(self.path_data))
self._set_uninstall()
return True
async def state(self):
"""Return running state of addon."""
if not self.is_installed:
return STATE_NONE
if await self.addon_docker.is_running():
return STATE_STARTED
return STATE_STOPPED
async def start(self):
"""Set options and start addon."""
if not self.is_installed:
_LOGGER.error("Addon %s is not installed", self._id)
return False
return await self.addon_docker.run()
async def stop(self):
"""Stop addon."""
if not self.is_installed:
_LOGGER.error("Addon %s is not installed", self._id)
return False
return await self.addon_docker.stop()
async def update(self, version=None):
"""Update addon."""
if not self.is_installed:
_LOGGER.error("Addon %s is not installed", self._id)
return False
version = version or self.last_version
if version == self.version_installed:
_LOGGER.warning(
"Addon %s is already installed in %s", self._id, version)
return True
if not await self.addon_docker.update(version):
return False
self._set_update(version)
return True
async def restart(self):
"""Restart addon."""
if not self.is_installed:
_LOGGER.error("Addon %s is not installed", self._id)
return False
return await self.addon_docker.restart()
async def logs(self):
"""Return addons log output."""
if not self.is_installed:
_LOGGER.error("Addon %s is not installed", self._id)
return False
return await self.addon_docker.logs()

View File

@ -1,12 +1,10 @@
{ {
"local": { "local": {
"slug": "local",
"name": "Local Add-Ons", "name": "Local Add-Ons",
"url": "https://home-assistant.io/hassio", "url": "https://home-assistant.io/hassio",
"maintainer": "By our self" "maintainer": "By our self"
}, },
"core": { "core": {
"slug": "core",
"name": "Built-in Add-Ons", "name": "Built-in Add-Ons",
"url": "https://home-assistant.io/addons", "url": "https://home-assistant.io/addons",
"maintainer": "Home Assistant authors" "maintainer": "Home Assistant authors"

View File

@ -2,7 +2,7 @@
import copy import copy
import logging import logging
import json import json
from pathlib import Path, PurePath from pathlib import Path
import re import re
import voluptuous as vol import voluptuous as vol
@ -10,29 +10,22 @@ from voluptuous.humanize import humanize_error
from .util import extract_hash_from_path from .util import extract_hash_from_path
from .validate import ( from .validate import (
validate_options, SCHEMA_ADDON_CONFIG, SCHEMA_REPOSITORY_CONFIG, SCHEMA_ADDON_CONFIG, SCHEMA_REPOSITORY_CONFIG, MAP_VOLUME)
MAP_VOLUME)
from ..const import ( from ..const import (
FILE_HASSIO_ADDONS, ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
ATTR_STARTUP, ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, BOOT_AUTO, REPOSITORY_CORE, REPOSITORY_LOCAL)
ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY, ATTR_URL, ATTR_ARCH,
ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT, ATTR_HOST_NETWORK,
ATTR_TMPFS, ATTR_PRIVILEGED)
from ..config import Config from ..config import Config
from ..tools import read_json_file, write_json_file from ..tools import read_json_file
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
SYSTEM = 'system' SYSTEM = 'system'
USER = 'user' USER = 'user'
REPOSITORY_CORE = 'core'
REPOSITORY_LOCAL = 'local'
RE_VOLUME = re.compile(MAP_VOLUME) RE_VOLUME = re.compile(MAP_VOLUME)
class AddonsData(Config): class Data(Config):
"""Hold data for addons inside HassIO.""" """Hold data for addons inside HassIO."""
def __init__(self, config): def __init__(self, config):
@ -41,9 +34,8 @@ class AddonsData(Config):
self.config = config self.config = config
self._system_data = self._data.get(SYSTEM, {}) self._system_data = self._data.get(SYSTEM, {})
self._user_data = self._data.get(USER, {}) self._user_data = self._data.get(USER, {})
self._addons_cache = {} self._cache_data = {}
self._repositories_data = {} self._repositories_data = {}
self.arch = None
def save(self): def save(self):
"""Store data to config file.""" """Store data to config file."""
@ -53,9 +45,29 @@ class AddonsData(Config):
} }
super().save() super().save()
def read_data_from_repositories(self): @property
def user(self):
"""Return local addon user data."""
return self._user_data
@property
def system(self):
"""Return local addon data."""
return self._system_data
@property
def cache(self):
"""Return addon data from cache/repositories."""
return self._cache_data
@property
def repositories(self):
"""Return addon data from repositories."""
return self._repositories_data
def reload(self):
"""Read data from addons repository.""" """Read data from addons repository."""
self._addons_cache = {} self._cache_data = {}
self._repositories_data = {} self._repositories_data = {}
# read core repository # read core repository
@ -74,17 +86,19 @@ class AddonsData(Config):
if repository_element.is_dir(): if repository_element.is_dir():
self._read_git_repository(repository_element) self._read_git_repository(repository_element)
# update local data
self._merge_config()
def _read_git_repository(self, path): def _read_git_repository(self, path):
"""Process a custom repository folder.""" """Process a custom repository folder."""
slug = extract_hash_from_path(path) slug = extract_hash_from_path(path)
repository_info = {ATTR_SLUG: slug}
# exists repository json # exists repository json
repository_file = Path(path, "repository.json") repository_file = Path(path, "repository.json")
try: try:
repository_info.update(SCHEMA_REPOSITORY_CONFIG( repository_info = SCHEMA_REPOSITORY_CONFIG(
read_json_file(repository_file) read_json_file(repository_file)
)) )
except OSError: except OSError:
_LOGGER.warning("Can't read repository information from %s", _LOGGER.warning("Can't read repository information from %s",
@ -115,7 +129,7 @@ class AddonsData(Config):
# store # store
addon_config[ATTR_REPOSITORY] = repository addon_config[ATTR_REPOSITORY] = repository
addon_config[ATTR_LOCATON] = str(addon.parent) addon_config[ATTR_LOCATON] = str(addon.parent)
self._addons_cache[addon_slug] = addon_config self._cache_data[addon_slug] = addon_config
except OSError: except OSError:
_LOGGER.warning("Can't read %s", addon) _LOGGER.warning("Can't read %s", addon)
@ -133,33 +147,27 @@ class AddonsData(Config):
_LOGGER.warning("Can't read built-in.json -> %s", err) _LOGGER.warning("Can't read built-in.json -> %s", err)
return return
# if core addons are available # core repository
for data in self._addons_cache.values():
if data[ATTR_REPOSITORY] == REPOSITORY_CORE:
self._repositories_data[REPOSITORY_CORE] = \ self._repositories_data[REPOSITORY_CORE] = \
builtin_data[REPOSITORY_CORE] builtin_data[REPOSITORY_CORE]
break
# if local addons are available # local repository
for data in self._addons_cache.values():
if data[ATTR_REPOSITORY] == REPOSITORY_LOCAL:
self._repositories_data[REPOSITORY_LOCAL] = \ self._repositories_data[REPOSITORY_LOCAL] = \
builtin_data[REPOSITORY_LOCAL] builtin_data[REPOSITORY_LOCAL]
break
def merge_update_config(self): def _merge_config(self):
"""Update local config if they have update. """Update local config if they have update.
It need to be the same version as the local version is. It need to be the same version as the local version is for merge.
""" """
have_change = False have_change = False
for addon in self.list_installed: for addon in set(self._system_data):
# detached # detached
if addon not in self._addons_cache: if addon not in self._cache_data:
continue continue
cache = self._addons_cache[addon] cache = self._cache_data[addon]
data = self._system_data[addon] data = self._system_data[addon]
if data[ATTR_VERSION] == cache[ATTR_VERSION]: if data[ATTR_VERSION] == cache[ATTR_VERSION]:
if data != cache: if data != cache:
@ -168,232 +176,3 @@ class AddonsData(Config):
if have_change: if have_change:
self.save() self.save()
@property
def list_installed(self):
"""Return a list of installed addons."""
return set(self._system_data)
@property
def list_all(self):
"""Return a dict of all addons."""
return set(self._system_data) | set(self._addons_cache)
def list_startup(self, start_type):
"""Get list of installed addon with need start by type."""
addon_list = set()
for addon in self._system_data.keys():
if self.get_boot(addon) != BOOT_AUTO:
continue
try:
if self._system_data[addon][ATTR_STARTUP] == start_type:
addon_list.add(addon)
except KeyError:
_LOGGER.warning("Orphaned addon detect %s", addon)
continue
return addon_list
@property
def list_detached(self):
"""Return local addons they not support from repo."""
addon_list = set()
for addon in self._system_data.keys():
if addon not in self._addons_cache:
addon_list.add(addon)
return addon_list
@property
def list_repositories(self):
"""Return list of addon repositories."""
return list(self._repositories_data.values())
def exists_addon(self, addon):
"""Return True if a addon exists."""
return addon in self._addons_cache or addon in self._system_data
def is_installed(self, addon):
"""Return True if a addon is installed."""
return addon in self._system_data
def version_installed(self, addon):
"""Return installed version."""
return self._user_data.get(addon, {}).get(ATTR_VERSION)
def set_addon_install(self, addon, version):
"""Set addon as installed."""
self._system_data[addon] = copy.deepcopy(self._addons_cache[addon])
self._user_data[addon] = {
ATTR_OPTIONS: {},
ATTR_VERSION: version,
}
self.save()
def set_addon_uninstall(self, addon):
"""Set addon as uninstalled."""
self._system_data.pop(addon, None)
self._user_data.pop(addon, None)
self.save()
def set_addon_update(self, addon, version):
"""Update version of addon."""
self._system_data[addon] = copy.deepcopy(self._addons_cache[addon])
self._user_data[addon][ATTR_VERSION] = version
self.save()
def set_options(self, addon, options):
"""Store user addon options."""
self._user_data[addon][ATTR_OPTIONS] = copy.deepcopy(options)
self.save()
def set_boot(self, addon, boot):
"""Store user boot options."""
self._user_data[addon][ATTR_BOOT] = boot
self.save()
def get_options(self, addon):
"""Return options with local changes."""
return {
**self._system_data[addon][ATTR_OPTIONS],
**self._user_data[addon][ATTR_OPTIONS],
}
def get_boot(self, addon):
"""Return boot config with prio local settings."""
if ATTR_BOOT in self._user_data[addon]:
return self._user_data[addon][ATTR_BOOT]
return self._system_data[addon][ATTR_BOOT]
def get_name(self, addon):
"""Return name of addon."""
if addon in self._addons_cache:
return self._addons_cache[addon][ATTR_NAME]
return self._system_data[addon][ATTR_NAME]
def get_description(self, addon):
"""Return description of addon."""
if addon in self._addons_cache:
return self._addons_cache[addon][ATTR_DESCRIPTON]
return self._system_data[addon][ATTR_DESCRIPTON]
def get_repository(self, addon):
"""Return repository of addon."""
if addon in self._addons_cache:
return self._addons_cache[addon][ATTR_REPOSITORY]
return self._system_data[addon][ATTR_REPOSITORY]
def get_last_version(self, addon):
"""Return version of addon."""
if addon in self._addons_cache:
return self._addons_cache[addon][ATTR_VERSION]
return self.version_installed(addon)
def get_ports(self, addon):
"""Return ports of addon."""
return self._system_data[addon].get(ATTR_PORTS)
def get_network_mode(self, addon):
"""Return network mode of addon."""
if self._system_data[addon][ATTR_HOST_NETWORK]:
return 'host'
return 'bridge'
def get_devices(self, addon):
"""Return devices of addon."""
return self._system_data[addon].get(ATTR_DEVICES)
def get_tmpfs(self, addon):
"""Return tmpfs of addon."""
return self._system_data[addon].get(ATTR_TMPFS)
def get_environment(self, addon):
"""Return environment of addon."""
return self._system_data[addon].get(ATTR_ENVIRONMENT)
def get_privileged(self, addon):
"""Return list of privilege."""
return self._system_data[addon].get(ATTR_PRIVILEGED)
def get_url(self, addon):
"""Return url of addon."""
if addon in self._addons_cache:
return self._addons_cache[addon].get(ATTR_URL)
return self._system_data[addon].get(ATTR_URL)
def get_arch(self, addon):
"""Return list of supported arch."""
if addon in self._addons_cache:
return self._addons_cache[addon][ATTR_ARCH]
return self._system_data[addon][ATTR_ARCH]
def get_image(self, addon):
"""Return image name of addon."""
addon_data = self._system_data.get(
addon, self._addons_cache.get(addon)
)
# Repository with dockerhub images
if ATTR_IMAGE in addon_data:
return addon_data[ATTR_IMAGE].format(arch=self.arch)
# local build
return "{}/{}-addon-{}".format(
addon_data[ATTR_REPOSITORY], self.arch, addon_data[ATTR_SLUG])
def need_build(self, addon):
"""Return True if this addon need a local build."""
addon_data = self._system_data.get(
addon, self._addons_cache.get(addon)
)
return ATTR_IMAGE not in addon_data
def map_volumes(self, addon):
"""Return a dict of {volume: policy} from addon."""
volumes = {}
for volume in self._system_data[addon][ATTR_MAP]:
result = RE_VOLUME.match(volume)
volumes[result.group(1)] = result.group(2) or 'ro'
return volumes
def path_data(self, addon):
"""Return addon data path inside supervisor."""
return Path(self.config.path_addons_data, addon)
def path_extern_data(self, addon):
"""Return addon data path external for docker."""
return PurePath(self.config.path_extern_addons_data, addon)
def path_addon_options(self, addon):
"""Return path to addons options."""
return Path(self.path_data(addon), "options.json")
def path_addon_location(self, addon):
"""Return path to this addon."""
return Path(self._addons_cache[addon][ATTR_LOCATON])
def write_addon_options(self, addon):
"""Return True if addon options is written to data."""
schema = self.get_schema(addon)
options = self.get_options(addon)
try:
schema(options)
return write_json_file(self.path_addon_options(addon), options)
except vol.Invalid as ex:
_LOGGER.error("Addon %s have wrong options -> %s", addon,
humanize_error(options, ex))
return False
def get_schema(self, addon):
"""Create a schema for addon options."""
raw_schema = self._system_data[addon][ATTR_SCHEMA]
if isinstance(raw_schema, bool):
return vol.Schema(dict)
return vol.Schema(vol.All(dict, validate_options(raw_schema)))

View File

@ -12,7 +12,7 @@ from ..const import URL_HASSIO_ADDONS
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
class AddonsRepo(object): class GitRepo(object):
"""Manage addons git repo.""" """Manage addons git repo."""
def __init__(self, config, loop, path, url): def __init__(self, config, loop, path, url):
@ -77,7 +77,7 @@ class AddonsRepo(object):
return True return True
class AddonsRepoHassIO(AddonsRepo): class GitRepoHassIO(GitRepo):
"""HassIO addons repository.""" """HassIO addons repository."""
def __init__(self, config, loop): def __init__(self, config, loop):
@ -86,7 +86,7 @@ class AddonsRepoHassIO(AddonsRepo):
config, loop, config.path_addons_core, URL_HASSIO_ADDONS) config, loop, config.path_addons_core, URL_HASSIO_ADDONS)
class AddonsRepoCustom(AddonsRepo): class GitRepoCustom(GitRepo):
"""Custom addons repository.""" """Custom addons repository."""
def __init__(self, config, loop, url): def __init__(self, config, loop, url):

View File

@ -0,0 +1,71 @@
"""Represent a HassIO repository."""
from .git import GitRepoHassIO, GitRepoCustom
from .util import get_hash_from_repository
from ..const import (
REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_NAME, ATTR_URL, ATTR_MAINTAINER)
UNKNOWN = 'unknown'
class Repository(object):
"""Repository in HassIO."""
def __init__(self, config, loop, data, repository):
"""Initialize repository object."""
self.data = data
self.source = None
self.git = None
if repository == REPOSITORY_LOCAL:
self._id = repository
elif repository == REPOSITORY_CORE:
self._id = repository
self.git = GitRepoHassIO(config, loop)
else:
self._id = get_hash_from_repository(repository)
self.git = GitRepoCustom(config, loop, repository)
self.source = repository
@property
def _mesh(self):
"""Return data struct repository."""
return self.data.repositories.get(self._id, {})
@property
def slug(self):
"""Return slug of repository."""
return self._id
@property
def name(self):
"""Return name of repository."""
return self._mesh.get(ATTR_NAME, UNKNOWN)
@property
def url(self):
"""Return url of repository."""
return self._mesh.get(ATTR_URL, self.source)
@property
def maintainer(self):
"""Return url of repository."""
return self._mesh.get(ATTR_MAINTAINER, UNKNOWN)
async def load(self):
"""Load addon repository."""
if self.git:
return await self.git.load()
return True
async def update(self):
"""Update addon repository."""
if self.git:
return await self.git.pull()
return True
def remove(self):
"""Remove addon repository."""
if self._id in (REPOSITORY_CORE, REPOSITORY_LOCAL):
raise RuntimeError("Can't remove built-in repositories!")
self.git.remove()

View File

@ -19,8 +19,3 @@ def extract_hash_from_path(path):
if not RE_SHA1.match(repo_dir): if not RE_SHA1.match(repo_dir):
return get_hash_from_repository(repo_dir) return get_hash_from_repository(repo_dir)
return repo_dir return repo_dir
def create_hash_index_list(name_list):
"""Create a dict with hash from repositories list."""
return {get_hash_from_repository(repo): repo for repo in name_list}

View File

@ -43,10 +43,12 @@ class RestAPI(object):
self.webapp.router.add_get('/network/info', api_net.info) self.webapp.router.add_get('/network/info', api_net.info)
self.webapp.router.add_post('/network/options', api_net.options) self.webapp.router.add_post('/network/options', api_net.options)
def register_supervisor(self, supervisor, addons, host_control): def register_supervisor(self, supervisor, addons, host_control,
websession):
"""Register supervisor function.""" """Register supervisor function."""
api_supervisor = APISupervisor( api_supervisor = APISupervisor(
self.config, self.loop, supervisor, addons, host_control) self.config, self.loop, supervisor, addons, host_control,
websession)
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping) self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
self.webapp.router.add_get('/supervisor/info', api_supervisor.info) self.webapp.router.add_get('/supervisor/info', api_supervisor.info)

View File

@ -9,7 +9,7 @@ from .util import api_process, api_process_raw, api_validate
from ..const import ( from ..const import (
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS,
ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY, ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY,
ATTR_BUILD, STATE_STOPPED, STATE_STARTED, BOOT_AUTO, BOOT_MANUAL) ATTR_BUILD, BOOT_AUTO, BOOT_MANUAL)
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -33,12 +33,11 @@ class APIAddons(object):
def _extract_addon(self, request, check_installed=True): def _extract_addon(self, request, check_installed=True):
"""Return addon and if not exists trow a exception.""" """Return addon and if not exists trow a exception."""
addon = request.match_info.get('addon') addon = self.addons.get(request.match_info.get('addon'))
if not addon:
# check data
if not self.addons.exists_addon(addon):
raise RuntimeError("Addon not exists") raise RuntimeError("Addon not exists")
if check_installed and not self.addons.is_installed(addon):
if check_installed and not addon.is_installed:
raise RuntimeError("Addon is not installed") raise RuntimeError("Addon is not installed")
return addon return addon
@ -46,38 +45,37 @@ class APIAddons(object):
@api_process @api_process
async def info(self, request): async def info(self, request):
"""Return addon information.""" """Return addon information."""
addon = self._extract_addon(request) addon = self._extract_addon(request, check_installed=False)
return { return {
ATTR_NAME: self.addons.get_name(addon), ATTR_NAME: addon.name,
ATTR_DESCRIPTON: self.addons.get_description(addon), ATTR_DESCRIPTON: addon.description,
ATTR_VERSION: self.addons.version_installed(addon), ATTR_VERSION: addon.version_installed,
ATTR_REPOSITORY: self.addons.get_repository(addon), ATTR_REPOSITORY: addon.repository,
ATTR_LAST_VERSION: self.addons.get_last_version(addon), ATTR_LAST_VERSION: addon.last_version,
ATTR_STATE: await self.addons.state(addon), ATTR_STATE: await addon.state(),
ATTR_BOOT: self.addons.get_boot(addon), ATTR_BOOT: addon.boot,
ATTR_OPTIONS: self.addons.get_options(addon), ATTR_OPTIONS: addon.options,
ATTR_URL: self.addons.get_url(addon), ATTR_URL: addon.url,
ATTR_DETACHED: addon in self.addons.list_detached, ATTR_DETACHED: addon.is_detached,
ATTR_BUILD: self.addons.need_build(addon), ATTR_BUILD: addon.need_build,
} }
@api_process @api_process
async def options(self, request): async def options(self, request):
"""Store user options for addon.""" """Store user options for addon."""
addon = self._extract_addon(request) addon = self._extract_addon(request)
options_schema = self.addons.get_schema(addon)
addon_schema = SCHEMA_OPTIONS.extend({ addon_schema = SCHEMA_OPTIONS.extend({
vol.Optional(ATTR_OPTIONS): options_schema, vol.Optional(ATTR_OPTIONS): addon.schema,
}) })
body = await api_validate(addon_schema, request) body = await api_validate(addon_schema, request)
if ATTR_OPTIONS in body: if ATTR_OPTIONS in body:
self.addons.set_options(addon, body[ATTR_OPTIONS]) addon.options = body[ATTR_OPTIONS]
if ATTR_BOOT in body: if ATTR_BOOT in body:
self.addons.set_boot(addon, body[ATTR_BOOT]) addon.boot = body[ATTR_BOOT]
return True return True
@ -86,77 +84,55 @@ class APIAddons(object):
"""Install addon.""" """Install addon."""
body = await api_validate(SCHEMA_VERSION, request) body = await api_validate(SCHEMA_VERSION, request)
addon = self._extract_addon(request, check_installed=False) addon = self._extract_addon(request, check_installed=False)
version = body.get( version = body.get(ATTR_VERSION)
ATTR_VERSION, self.addons.get_last_version(addon))
# check if arch supported
if self.addons.arch not in self.addons.get_arch(addon):
raise RuntimeError(
"Addon is not supported on {}".format(self.addons.arch))
return await asyncio.shield( return await asyncio.shield(
self.addons.install(addon, version), loop=self.loop) addon.install(version=version), loop=self.loop)
@api_process @api_process
async def uninstall(self, request): async def uninstall(self, request):
"""Uninstall addon.""" """Uninstall addon."""
addon = self._extract_addon(request) addon = self._extract_addon(request)
return await asyncio.shield(addon.uninstall(), loop=self.loop)
return await asyncio.shield(
self.addons.uninstall(addon), loop=self.loop)
@api_process @api_process
async def start(self, request): async def start(self, request):
"""Start addon.""" """Start addon."""
addon = self._extract_addon(request) addon = self._extract_addon(request)
if await self.addons.state(addon) == STATE_STARTED: # check options
raise RuntimeError("Addon is already running") options = addon.options
# validate options
try: try:
schema = self.addons.get_schema(addon) addon.schema(options)
options = self.addons.get_options(addon)
schema(options)
except vol.Invalid as ex: except vol.Invalid as ex:
raise RuntimeError(humanize_error(options, ex)) from None raise RuntimeError(humanize_error(options, ex)) from None
return await asyncio.shield( return await asyncio.shield(addon.start(), loop=self.loop)
self.addons.start(addon), loop=self.loop)
@api_process @api_process
async def stop(self, request): async def stop(self, request):
"""Stop addon.""" """Stop addon."""
addon = self._extract_addon(request) addon = self._extract_addon(request)
return await asyncio.shield(addon.stop(), loop=self.loop)
if await self.addons.state(addon) == STATE_STOPPED:
raise RuntimeError("Addon is already stoped")
return await asyncio.shield(
self.addons.stop(addon), loop=self.loop)
@api_process @api_process
async def update(self, request): async def update(self, request):
"""Update addon.""" """Update addon."""
body = await api_validate(SCHEMA_VERSION, request) body = await api_validate(SCHEMA_VERSION, request)
addon = self._extract_addon(request) addon = self._extract_addon(request)
version = body.get( version = body.get(ATTR_VERSION)
ATTR_VERSION, self.addons.get_last_version(addon))
if version == self.addons.version_installed(addon):
raise RuntimeError("Version is already in use")
return await asyncio.shield( return await asyncio.shield(
self.addons.update(addon, version), loop=self.loop) addon.update(version=version), loop=self.loop)
@api_process @api_process
async def restart(self, request): async def restart(self, request):
"""Restart addon.""" """Restart addon."""
addon = self._extract_addon(request) addon = self._extract_addon(request)
return await asyncio.shield(self.addons.restart(addon), loop=self.loop) return await asyncio.shield(addon.restart(), loop=self.loop)
@api_process_raw @api_process_raw
def logs(self, request): def logs(self, request):
"""Return logs from addon.""" """Return logs from addon."""
addon = self._extract_addon(request) addon = self._extract_addon(request)
return self.addons.logs(addon) return addon.logs()

View File

@ -5,7 +5,6 @@ import logging
import voluptuous as vol import voluptuous as vol
from .util import api_process, api_process_raw, api_validate from .util import api_process, api_process_raw, api_validate
from ..addons.util import create_hash_index_list
from ..const import ( from ..const import (
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL,
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_REPOSITORIES, HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_REPOSITORIES,
@ -31,36 +30,34 @@ SCHEMA_VERSION = vol.Schema({
class APISupervisor(object): class APISupervisor(object):
"""Handle rest api for supervisor functions.""" """Handle rest api for supervisor functions."""
def __init__(self, config, loop, supervisor, addons, host_control): def __init__(self, config, loop, supervisor, addons, host_control,
websession):
"""Initialize supervisor rest api part.""" """Initialize supervisor rest api part."""
self.config = config self.config = config
self.loop = loop self.loop = loop
self.supervisor = supervisor self.supervisor = supervisor
self.addons = addons self.addons = addons
self.host_control = host_control self.host_control = host_control
self.websession = websession
def _addons_list(self, only_installed=False): def _addons_list(self, only_installed=False):
"""Return a list of addons.""" """Return a list of addons."""
detached = self.addons.list_detached
if only_installed:
addons = self.addons.list_installed
else:
addons = self.addons.list_all
data = [] data = []
for addon in addons: for addon in self.addons.list_addons:
if only_installed and not addon.is_installed:
continue
data.append({ data.append({
ATTR_NAME: self.addons.get_name(addon), ATTR_NAME: addon.name,
ATTR_SLUG: addon, ATTR_SLUG: addon.slug,
ATTR_DESCRIPTON: self.addons.get_description(addon), ATTR_DESCRIPTON: addon.description,
ATTR_VERSION: self.addons.get_last_version(addon), ATTR_VERSION: addon.last_version,
ATTR_INSTALLED: self.addons.version_installed(addon), ATTR_INSTALLED: addon.version_installed,
ATTR_ARCH: self.addons.get_arch(addon), ATTR_ARCH: addon.supported_arch,
ATTR_DETACHED: addon in detached, ATTR_DETACHED: addon.is_detached,
ATTR_REPOSITORY: self.addons.get_repository(addon), ATTR_REPOSITORY: addon.repository,
ATTR_BUILD: self.addons.need_build(addon), ATTR_BUILD: addon.need_build,
ATTR_URL: self.addons.get_url(addon), ATTR_URL: addon.url,
}) })
return data return data
@ -68,15 +65,13 @@ class APISupervisor(object):
def _repositories_list(self): def _repositories_list(self):
"""Return a list of addons repositories.""" """Return a list of addons repositories."""
data = [] data = []
list_id = create_hash_index_list(self.config.addons_repositories)
for repository in self.addons.list_repositories: for repository in self.addons.list_repositories:
data.append({ data.append({
ATTR_SLUG: repository[ATTR_SLUG], ATTR_SLUG: repository.slug,
ATTR_NAME: repository[ATTR_NAME], ATTR_NAME: repository.name,
ATTR_SOURCE: list_id.get(repository[ATTR_SLUG]), ATTR_SOURCE: repository.source,
ATTR_URL: repository.get(ATTR_URL), ATTR_URL: repository.url,
ATTR_MAINTAINER: repository.get(ATTR_MAINTAINER), ATTR_MAINTAINER: repository.maintainer,
}) })
return data return data
@ -93,7 +88,7 @@ class APISupervisor(object):
ATTR_VERSION: HASSIO_VERSION, ATTR_VERSION: HASSIO_VERSION,
ATTR_LAST_VERSION: self.config.last_hassio, ATTR_LAST_VERSION: self.config.last_hassio,
ATTR_BETA_CHANNEL: self.config.upstream_beta, ATTR_BETA_CHANNEL: self.config.upstream_beta,
ATTR_ARCH: self.addons.arch, ATTR_ARCH: self.config.arch,
ATTR_TIMEZONE: self.config.timezone, ATTR_TIMEZONE: self.config.timezone,
ATTR_ADDONS: self._addons_list(only_installed=True), ATTR_ADDONS: self._addons_list(only_installed=True),
ATTR_ADDONS_REPOSITORIES: self.config.addons_repositories, ATTR_ADDONS_REPOSITORIES: self.config.addons_repositories,
@ -120,21 +115,7 @@ class APISupervisor(object):
if ATTR_ADDONS_REPOSITORIES in body: if ATTR_ADDONS_REPOSITORIES in body:
new = set(body[ATTR_ADDONS_REPOSITORIES]) new = set(body[ATTR_ADDONS_REPOSITORIES])
old = set(self.config.addons_repositories) await asyncio.shield(self.addons.load_repositories(new))
# add new repositories
tasks = [self.addons.add_git_repository(url) for url in
set(new - old)]
if tasks:
await asyncio.shield(
asyncio.wait(tasks, loop=self.loop), loop=self.loop)
# remove old repositories
for url in set(old - new):
self.addons.drop_git_repository(url)
# read repository
self.addons.read_data_from_repositories()
return True return True
@ -154,7 +135,8 @@ class APISupervisor(object):
async def reload(self, request): async def reload(self, request):
"""Reload addons, config ect.""" """Reload addons, config ect."""
tasks = [ tasks = [
self.addons.reload(), self.config.fetch_update_infos(), self.addons.reload(),
self.config.fetch_update_infos(self.websession),
self.host_control.load() self.host_control.load()
] ]
results, _ = await asyncio.shield( results, _ = await asyncio.shield(

View File

@ -81,6 +81,8 @@ def api_process_raw(method):
def api_return_error(message=None): def api_return_error(message=None):
"""Return a API error message.""" """Return a API error message."""
_LOGGER.error(message)
return web.json_response({ return web.json_response({
JSON_RESULT: RESULT_ERROR, JSON_RESULT: RESULT_ERROR,
JSON_MESSAGE: message, JSON_MESSAGE: message,

View File

@ -2,6 +2,7 @@
import logging import logging
import os import os
import signal import signal
from pathlib import Path
from colorlog import ColoredFormatter from colorlog import ColoredFormatter
@ -11,9 +12,9 @@ from .config import CoreConfig
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
def initialize_system_data(websession): def initialize_system_data():
"""Setup default config and create folders.""" """Setup default config and create folders."""
config = CoreConfig(websession) config = CoreConfig()
# homeassistant config folder # homeassistant config folder
if not config.path_config.is_dir(): if not config.path_config.is_dir():
@ -42,10 +43,10 @@ def initialize_system_data(websession):
config.path_addons_git) config.path_addons_git)
config.path_addons_git.mkdir(parents=True) config.path_addons_git.mkdir(parents=True)
if not config.path_addons_build.is_dir(): # hassio tmp folder
_LOGGER.info("Create Home-Assistant addon build folder %s", if not config.path_tmp.is_dir():
config.path_addons_build) _LOGGER.info("Create hassio temp folder %s", config.path_tmp)
config.path_addons_build.mkdir(parents=True) config.path_tmp.mkdir(parents=True)
# hassio backup folder # hassio backup folder
if not config.path_backup.is_dir(): if not config.path_backup.is_dir():
@ -60,6 +61,18 @@ def initialize_system_data(websession):
return config return config
def migrate_system_env(config):
"""Cleanup some stuff after update."""
# hass.io 0.37 -> 0.38
old_build = Path(config.path_hassio, "addons/build")
if old_build.is_dir():
try:
old_build.rmdir()
except OSError:
_LOGGER.warning("Can't cleanup old addons build dir.")
def initialize_logging(): def initialize_logging():
"""Setup the logging.""" """Setup the logging."""
logging.basicConfig(level=logging.INFO) logging.basicConfig(level=logging.INFO)

View File

@ -8,7 +8,7 @@ from pathlib import Path, PurePath
import voluptuous as vol import voluptuous as vol
from voluptuous.humanize import humanize_error from voluptuous.humanize import humanize_error
from .const import FILE_HASSIO_CONFIG, HASSIO_SHARE from .const import FILE_HASSIO_CONFIG, HASSIO_DATA
from .tools import ( from .tools import (
fetch_last_versions, write_json_file, read_json_file, validate_timezone) fetch_last_versions, write_json_file, read_json_file, validate_timezone)
@ -27,12 +27,11 @@ ADDONS_CORE = PurePath("addons/core")
ADDONS_LOCAL = PurePath("addons/local") ADDONS_LOCAL = PurePath("addons/local")
ADDONS_GIT = PurePath("addons/git") ADDONS_GIT = PurePath("addons/git")
ADDONS_DATA = PurePath("addons/data") ADDONS_DATA = PurePath("addons/data")
ADDONS_BUILD = PurePath("addons/build")
ADDONS_CUSTOM_LIST = 'addons_custom_list' ADDONS_CUSTOM_LIST = 'addons_custom_list'
BACKUP_DATA = PurePath("backup") BACKUP_DATA = PurePath("backup")
SHARE_DATA = PurePath("share") SHARE_DATA = PurePath("share")
TMP_DATA = PurePath("tmp")
UPSTREAM_BETA = 'upstream_beta' UPSTREAM_BETA = 'upstream_beta'
API_ENDPOINT = 'api_endpoint' API_ENDPOINT = 'api_endpoint'
@ -88,9 +87,9 @@ class Config(object):
class CoreConfig(Config): class CoreConfig(Config):
"""Hold all core config data.""" """Hold all core config data."""
def __init__(self, websession): def __init__(self):
"""Initialize config object.""" """Initialize config object."""
self.websession = websession self.arch = None
super().__init__(FILE_HASSIO_CONFIG) super().__init__(FILE_HASSIO_CONFIG)
@ -102,10 +101,9 @@ class CoreConfig(Config):
_LOGGER.warning( _LOGGER.warning(
"Invalid config %s", humanize_error(self._data, ex)) "Invalid config %s", humanize_error(self._data, ex))
async def fetch_update_infos(self): async def fetch_update_infos(self, websession):
"""Read current versions from web.""" """Read current versions from web."""
last = await fetch_last_versions( last = await fetch_last_versions(websession, beta=self.upstream_beta)
self.websession, beta=self.upstream_beta)
if last: if last:
self._data.update({ self._data.update({
@ -175,6 +173,11 @@ class CoreConfig(Config):
"""Actual version of hassio.""" """Actual version of hassio."""
return self._data.get(HASSIO_LAST) return self._data.get(HASSIO_LAST)
@property
def path_hassio(self):
"""Return hassio data path."""
return HASSIO_DATA
@property @property
def path_extern_hassio(self): def path_extern_hassio(self):
"""Return hassio data path extern for docker.""" """Return hassio data path extern for docker."""
@ -188,7 +191,7 @@ class CoreConfig(Config):
@property @property
def path_config(self): def path_config(self):
"""Return config path inside supervisor.""" """Return config path inside supervisor."""
return Path(HASSIO_SHARE, HOMEASSISTANT_CONFIG) return Path(HASSIO_DATA, HOMEASSISTANT_CONFIG)
@property @property
def path_extern_ssl(self): def path_extern_ssl(self):
@ -198,22 +201,22 @@ class CoreConfig(Config):
@property @property
def path_ssl(self): def path_ssl(self):
"""Return SSL path inside supervisor.""" """Return SSL path inside supervisor."""
return Path(HASSIO_SHARE, HASSIO_SSL) return Path(HASSIO_DATA, HASSIO_SSL)
@property @property
def path_addons_core(self): def path_addons_core(self):
"""Return git path for core addons.""" """Return git path for core addons."""
return Path(HASSIO_SHARE, ADDONS_CORE) return Path(HASSIO_DATA, ADDONS_CORE)
@property @property
def path_addons_git(self): def path_addons_git(self):
"""Return path for git addons.""" """Return path for git addons."""
return Path(HASSIO_SHARE, ADDONS_GIT) return Path(HASSIO_DATA, ADDONS_GIT)
@property @property
def path_addons_local(self): def path_addons_local(self):
"""Return path for customs addons.""" """Return path for customs addons."""
return Path(HASSIO_SHARE, ADDONS_LOCAL) return Path(HASSIO_DATA, ADDONS_LOCAL)
@property @property
def path_extern_addons_local(self): def path_extern_addons_local(self):
@ -223,7 +226,7 @@ class CoreConfig(Config):
@property @property
def path_addons_data(self): def path_addons_data(self):
"""Return root addon data folder.""" """Return root addon data folder."""
return Path(HASSIO_SHARE, ADDONS_DATA) return Path(HASSIO_DATA, ADDONS_DATA)
@property @property
def path_extern_addons_data(self): def path_extern_addons_data(self):
@ -231,14 +234,14 @@ class CoreConfig(Config):
return PurePath(self.path_extern_hassio, ADDONS_DATA) return PurePath(self.path_extern_hassio, ADDONS_DATA)
@property @property
def path_addons_build(self): def path_tmp(self):
"""Return root addon build folder.""" """Return hass.io temp folder."""
return Path(HASSIO_SHARE, ADDONS_BUILD) return Path(HASSIO_DATA, TMP_DATA)
@property @property
def path_backup(self): def path_backup(self):
"""Return root backup data folder.""" """Return root backup data folder."""
return Path(HASSIO_SHARE, BACKUP_DATA) return Path(HASSIO_DATA, BACKUP_DATA)
@property @property
def path_extern_backup(self): def path_extern_backup(self):
@ -248,7 +251,7 @@ class CoreConfig(Config):
@property @property
def path_share(self): def path_share(self):
"""Return root share data folder.""" """Return root share data folder."""
return Path(HASSIO_SHARE, SHARE_DATA) return Path(HASSIO_DATA, SHARE_DATA)
@property @property
def path_extern_share(self): def path_extern_share(self):

View File

@ -1,7 +1,7 @@
"""Const file for HassIO.""" """Const file for HassIO."""
from pathlib import Path from pathlib import Path
HASSIO_VERSION = '0.37' HASSIO_VERSION = '0.38'
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/' URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
'hassio/master/version.json') 'hassio/master/version.json')
@ -10,7 +10,7 @@ URL_HASSIO_VERSION_BETA = ('https://raw.githubusercontent.com/home-assistant/'
URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons' URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons'
HASSIO_SHARE = Path("/data") HASSIO_DATA = Path("/data")
RUN_UPDATE_INFO_TASKS = 28800 RUN_UPDATE_INFO_TASKS = 28800
RUN_UPDATE_SUPERVISOR_TASKS = 29100 RUN_UPDATE_SUPERVISOR_TASKS = 29100
@ -20,8 +20,8 @@ RUN_CLEANUP_API_SESSIONS = 900
RESTART_EXIT_CODE = 100 RESTART_EXIT_CODE = 100
FILE_HASSIO_ADDONS = Path(HASSIO_SHARE, "addons.json") FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
FILE_HASSIO_CONFIG = Path(HASSIO_SHARE, "config.json") FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
SOCKET_DOCKER = Path("/var/run/docker.sock") SOCKET_DOCKER = Path("/var/run/docker.sock")
SOCKET_HC = Path("/var/run/hassio-hc.sock") SOCKET_HC = Path("/var/run/hassio-hc.sock")
@ -92,6 +92,7 @@ BOOT_MANUAL = 'manual'
STATE_STARTED = 'started' STATE_STARTED = 'started'
STATE_STOPPED = 'stopped' STATE_STOPPED = 'stopped'
STATE_NONE = 'none'
MAP_CONFIG = 'config' MAP_CONFIG = 'config'
MAP_SSL = 'ssl' MAP_SSL = 'ssl'
@ -103,3 +104,6 @@ ARCH_ARMHF = 'armhf'
ARCH_AARCH64 = 'aarch64' ARCH_AARCH64 = 'aarch64'
ARCH_AMD64 = 'amd64' ARCH_AMD64 = 'amd64'
ARCH_I386 = 'i386' ARCH_I386 = 'i386'
REPOSITORY_CORE = 'core'
REPOSITORY_LOCAL = 'local'

View File

@ -5,7 +5,6 @@ import logging
import aiohttp import aiohttp
import docker import docker
from . import bootstrap
from .addons import AddonManager from .addons import AddonManager
from .api import RestAPI from .api import RestAPI
from .host_control import HostControl from .host_control import HostControl
@ -20,7 +19,7 @@ from .dock.supervisor import DockerSupervisor
from .tasks import ( from .tasks import (
hassio_update, homeassistant_watchdog, homeassistant_setup, hassio_update, homeassistant_watchdog, homeassistant_setup,
api_sessions_cleanup) api_sessions_cleanup)
from .tools import get_arch_from_image, get_local_ip, fetch_timezone from .tools import get_local_ip, fetch_timezone
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -28,28 +27,26 @@ _LOGGER = logging.getLogger(__name__)
class HassIO(object): class HassIO(object):
"""Main object of hassio.""" """Main object of hassio."""
def __init__(self, loop): def __init__(self, loop, config):
"""Initialize hassio object.""" """Initialize hassio object."""
self.exit_code = 0 self.exit_code = 0
self.loop = loop self.loop = loop
self.websession = aiohttp.ClientSession(loop=self.loop) self.config = config
self.config = bootstrap.initialize_system_data(self.websession) self.websession = aiohttp.ClientSession(loop=loop)
self.scheduler = Scheduler(self.loop) self.scheduler = Scheduler(loop)
self.api = RestAPI(self.config, self.loop) self.api = RestAPI(config, loop)
self.dock = docker.DockerClient( self.dock = docker.DockerClient(
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version='auto') base_url="unix:/{}".format(str(SOCKET_DOCKER)), version='auto')
# init basic docker container # init basic docker container
self.supervisor = DockerSupervisor( self.supervisor = DockerSupervisor(config, loop, self.dock, self.stop)
self.config, self.loop, self.dock, self.stop) self.homeassistant = DockerHomeAssistant(config, loop, self.dock)
self.homeassistant = DockerHomeAssistant(
self.config, self.loop, self.dock)
# init HostControl # init HostControl
self.host_control = HostControl(self.loop) self.host_control = HostControl(loop)
# init addon system # init addon system
self.addons = AddonManager(self.config, self.loop, self.dock) self.addons = AddonManager(config, loop, self.dock)
async def setup(self): async def setup(self):
"""Setup HassIO orchestration.""" """Setup HassIO orchestration."""
@ -58,6 +55,9 @@ class HassIO(object):
_LOGGER.fatal("Can't attach to supervisor docker container!") _LOGGER.fatal("Can't attach to supervisor docker container!")
await self.supervisor.cleanup() await self.supervisor.cleanup()
# set running arch
self.config.arch = self.supervisor.arch
# set api endpoint # set api endpoint
self.config.api_endpoint = await get_local_ip(self.loop) self.config.api_endpoint = await get_local_ip(self.loop)
@ -70,13 +70,13 @@ class HassIO(object):
# schedule update info tasks # schedule update info tasks
self.scheduler.register_task( self.scheduler.register_task(
self.host_control.load, RUN_UPDATE_INFO_TASKS) self.host_control.load, RUN_UPDATE_INFO_TASKS)
# rest api views # rest api views
self.api.register_host(self.host_control) self.api.register_host(self.host_control)
self.api.register_network(self.host_control) self.api.register_network(self.host_control)
self.api.register_supervisor( self.api.register_supervisor(
self.supervisor, self.addons, self.host_control) self.supervisor, self.addons, self.host_control, self.websession)
self.api.register_homeassistant(self.homeassistant) self.api.register_homeassistant(self.homeassistant)
self.api.register_addons(self.addons) self.api.register_addons(self.addons)
self.api.register_security() self.api.register_security()
@ -87,22 +87,16 @@ class HassIO(object):
api_sessions_cleanup(self.config), RUN_CLEANUP_API_SESSIONS, api_sessions_cleanup(self.config), RUN_CLEANUP_API_SESSIONS,
now=True) now=True)
# schedule update info tasks
self.scheduler.register_task(
self.config.fetch_update_infos, RUN_UPDATE_INFO_TASKS,
now=True)
# first start of supervisor? # first start of supervisor?
if not await self.homeassistant.exists(): if not await self.homeassistant.exists():
_LOGGER.info("No HomeAssistant docker found.") _LOGGER.info("No HomeAssistant docker found.")
await homeassistant_setup( await homeassistant_setup(
self.config, self.loop, self.homeassistant) self.config, self.loop, self.homeassistant, self.websession)
else: else:
await self.homeassistant.attach() await self.homeassistant.attach()
# Load addons # Load addons
arch = get_arch_from_image(self.supervisor.image) await self.addons.prepare()
await self.addons.prepare(arch)
# schedule addon update task # schedule addon update task
self.scheduler.register_task( self.scheduler.register_task(
@ -110,7 +104,7 @@ class HassIO(object):
# schedule self update task # schedule self update task
self.scheduler.register_task( self.scheduler.register_task(
hassio_update(self.config, self.supervisor), hassio_update(self.config, self.supervisor, self.websession),
RUN_UPDATE_SUPERVISOR_TASKS) RUN_UPDATE_SUPERVISOR_TASKS)
# start addon mark as initialize # start addon mark as initialize
@ -118,6 +112,13 @@ class HassIO(object):
async def start(self): async def start(self):
"""Start HassIO orchestration.""" """Start HassIO orchestration."""
# on release channel, try update itself
# on beta channel, only read new versions
await asyncio.wait(
[hassio_update(self.config, self.supervisor, self.websession)()],
loop=self.loop
)
# start api # start api
await self.api.start() await self.api.start()
_LOGGER.info("Start hassio api on %s", self.config.api_endpoint) _LOGGER.info("Start hassio api on %s", self.config.api_endpoint)
@ -148,9 +149,9 @@ class HassIO(object):
# don't process scheduler anymore # don't process scheduler anymore
self.scheduler.stop() self.scheduler.stop()
# process stop task pararell # process stop tasks
tasks = [self.websession.close(), self.api.stop()] self.websession.close()
await asyncio.wait(tasks, loop=self.loop) await self.api.stop()
self.exit_code = exit_code self.exit_code = exit_code
self.loop.stop() self.loop.stop()

View File

@ -5,7 +5,7 @@ import logging
import docker import docker
from ..const import LABEL_VERSION from ..const import LABEL_VERSION, LABEL_ARCH
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -20,6 +20,7 @@ class DockerBase(object):
self.dock = dock self.dock = dock
self.image = image self.image = image
self.version = None self.version = None
self.arch = None
self._lock = asyncio.Lock(loop=loop) self._lock = asyncio.Lock(loop=loop)
@property @property
@ -38,13 +39,18 @@ class DockerBase(object):
if not self.image: if not self.image:
self.image = metadata['Config']['Image'] self.image = metadata['Config']['Image']
# read metadata # read version
need_version = force or not self.version need_version = force or not self.version
if need_version and LABEL_VERSION in metadata['Config']['Labels']: if need_version and LABEL_VERSION in metadata['Config']['Labels']:
self.version = metadata['Config']['Labels'][LABEL_VERSION] self.version = metadata['Config']['Labels'][LABEL_VERSION]
elif need_version: elif need_version:
_LOGGER.warning("Can't read version from %s", self.name) _LOGGER.warning("Can't read version from %s", self.name)
# read arch
need_arch = force or not self.arch
if need_arch and LABEL_ARCH in metadata['Config']['Labels']:
self.arch = metadata['Config']['Labels'][LABEL_ARCH]
async def install(self, tag): async def install(self, tag):
"""Pull docker image.""" """Pull docker image."""
if self._lock.locked(): if self._lock.locked():
@ -183,13 +189,13 @@ class DockerBase(object):
except docker.errors.DockerException: except docker.errors.DockerException:
return return
_LOGGER.info("Stop %s docker application", self.image)
if container.status == 'running': if container.status == 'running':
_LOGGER.info("Stop %s docker application", self.image)
with suppress(docker.errors.DockerException): with suppress(docker.errors.DockerException):
container.stop() container.stop(timeout=15)
with suppress(docker.errors.DockerException): with suppress(docker.errors.DockerException):
_LOGGER.info("Clean %s docker application", self.image)
container.remove(force=True) container.remove(force=True)
async def remove(self): async def remove(self):
@ -261,7 +267,7 @@ class DockerBase(object):
"""Return docker logs of container.""" """Return docker logs of container."""
if self._lock.locked(): if self._lock.locked():
_LOGGER.error("Can't excute logs while a task is in progress") _LOGGER.error("Can't excute logs while a task is in progress")
return False return b""
async with self._lock: async with self._lock:
return await self.loop.run_in_executor(None, self._logs) return await self.loop.run_in_executor(None, self._logs)

View File

@ -1,4 +1,5 @@
"""Init file for HassIO addon docker object.""" """Init file for HassIO addon docker object."""
from contextlib import suppress
import logging import logging
from pathlib import Path from pathlib import Path
import shutil import shutil
@ -16,22 +17,21 @@ _LOGGER = logging.getLogger(__name__)
class DockerAddon(DockerBase): class DockerAddon(DockerBase):
"""Docker hassio wrapper for HomeAssistant.""" """Docker hassio wrapper for HomeAssistant."""
def __init__(self, config, loop, dock, addons_data, addon): def __init__(self, config, loop, dock, addon):
"""Initialize docker homeassistant wrapper.""" """Initialize docker homeassistant wrapper."""
super().__init__( super().__init__(
config, loop, dock, image=addons_data.get_image(addon)) config, loop, dock, image=addon.image)
self.addon = addon self.addon = addon
self.addons_data = addons_data
@property @property
def name(self): def name(self):
"""Return name of docker container.""" """Return name of docker container."""
return "addon_{}".format(self.addon) return "addon_{}".format(self.addon.slug)
@property @property
def environment(self): def environment(self):
"""Return environment for docker add-on.""" """Return environment for docker add-on."""
addon_env = self.addons_data.get_environment(self.addon) or {} addon_env = self.addon.environment or {}
return { return {
**addon_env, **addon_env,
@ -41,7 +41,7 @@ class DockerAddon(DockerBase):
@property @property
def tmpfs(self): def tmpfs(self):
"""Return tmpfs for docker add-on.""" """Return tmpfs for docker add-on."""
options = self.addons_data.get_tmpfs(self.addon) options = self.addon.tmpfs
if options: if options:
return {"/tmpfs": "{}".format(options)} return {"/tmpfs": "{}".format(options)}
return None return None
@ -50,11 +50,11 @@ class DockerAddon(DockerBase):
def volumes(self): def volumes(self):
"""Generate volumes for mappings.""" """Generate volumes for mappings."""
volumes = { volumes = {
str(self.addons_data.path_extern_data(self.addon)): { str(self.addon.path_extern_data): {
'bind': '/data', 'mode': 'rw' 'bind': '/data', 'mode': 'rw'
}} }}
addon_mapping = self.addons_data.map_volumes(self.addon) addon_mapping = self.addon.map_volumes
if MAP_CONFIG in addon_mapping: if MAP_CONFIG in addon_mapping:
volumes.update({ volumes.update({
@ -94,20 +94,24 @@ class DockerAddon(DockerBase):
Need run inside executor. Need run inside executor.
""" """
if self._is_running(): if self._is_running():
return return True
# cleanup # cleanup
self._stop() self._stop()
# write config
if not self.addon.write_options():
return False
try: try:
self.dock.containers.run( self.dock.containers.run(
self.image, self.image,
name=self.name, name=self.name,
detach=True, detach=True,
network_mode=self.addons_data.get_network_mode(self.addon), network_mode=self.addon.network_mode,
ports=self.addons_data.get_ports(self.addon), ports=self.addon.ports,
devices=self.addons_data.get_devices(self.addon), devices=self.addon.devices,
cap_add=self.addons_data.get_privileged(self.addon), cap_add=self.addon.privileged,
environment=self.environment, environment=self.environment,
volumes=self.volumes, volumes=self.volumes,
tmpfs=self.tmpfs tmpfs=self.tmpfs
@ -126,7 +130,7 @@ class DockerAddon(DockerBase):
Need run inside executor. Need run inside executor.
""" """
if self.addons_data.need_build(self.addon): if self.addon.need_build:
return self._build(tag) return self._build(tag)
return super()._install(tag) return super()._install(tag)
@ -145,11 +149,11 @@ class DockerAddon(DockerBase):
Need run inside executor. Need run inside executor.
""" """
build_dir = Path(self.config.path_addons_build, self.addon) build_dir = Path(self.config.path_tmp, self.addon.slug)
try: try:
# prepare temporary addon build folder # prepare temporary addon build folder
try: try:
source = self.addons_data.path_addon_location(self.addon) source = self.addon.path_addon_location
shutil.copytree(str(source), str(build_dir)) shutil.copytree(str(source), str(build_dir))
except shutil.Error as err: except shutil.Error as err:
_LOGGER.error("Can't copy %s to temporary build folder -> %s", _LOGGER.error("Can't copy %s to temporary build folder -> %s",
@ -159,7 +163,7 @@ class DockerAddon(DockerBase):
# prepare Dockerfile # prepare Dockerfile
try: try:
dockerfile_template( dockerfile_template(
Path(build_dir, 'Dockerfile'), self.addons_data.arch, Path(build_dir, 'Dockerfile'), self.config.arch,
tag, META_ADDON) tag, META_ADDON)
except OSError as err: except OSError as err:
_LOGGER.error("Can't prepare dockerfile -> %s", err) _LOGGER.error("Can't prepare dockerfile -> %s", err)
@ -184,3 +188,21 @@ class DockerAddon(DockerBase):
finally: finally:
shutil.rmtree(str(build_dir), ignore_errors=True) shutil.rmtree(str(build_dir), ignore_errors=True)
def _restart(self):
"""Restart docker container.
Addons prepare some thing on start and that is normaly not repeatable.
Need run inside executor.
"""
try:
container = self.dock.containers.get(self.name)
except docker.errors.DockerException:
return False
_LOGGER.info("Restart %s", self.image)
with suppress(docker.errors.DockerException):
container.stop(timeout=15)
return self._run()

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@ -18,13 +18,19 @@ def api_sessions_cleanup(config):
return _api_sessions_cleanup return _api_sessions_cleanup
def hassio_update(config, supervisor): def hassio_update(config, supervisor, websession):
"""Create scheduler task for update of supervisor hassio.""" """Create scheduler task for update of supervisor hassio."""
async def _hassio_update(): async def _hassio_update():
"""Check and run update of supervisor hassio.""" """Check and run update of supervisor hassio."""
await config.fetch_update_infos(websession)
if config.last_hassio == supervisor.version: if config.last_hassio == supervisor.version:
return return
# don't perform a update on beta/dev channel
if config.upstream_beta:
_LOGGER.warning("Ignore Hass.IO update on beta upstream!")
return
_LOGGER.info("Found new HassIO version %s.", config.last_hassio) _LOGGER.info("Found new HassIO version %s.", config.last_hassio)
await supervisor.update(config.last_hassio) await supervisor.update(config.last_hassio)
@ -43,12 +49,12 @@ def homeassistant_watchdog(loop, homeassistant):
return _homeassistant_watchdog return _homeassistant_watchdog
async def homeassistant_setup(config, loop, homeassistant): async def homeassistant_setup(config, loop, homeassistant, websession):
"""Install a homeassistant docker container.""" """Install a homeassistant docker container."""
while True: while True:
# read homeassistant tag and install it # read homeassistant tag and install it
if not config.last_homeassistant: if not config.last_homeassistant:
await config.fetch_update_infos() await config.fetch_update_infos(websession)
tag = config.last_homeassistant tag = config.last_homeassistant
if tag and await homeassistant.install(tag): if tag and await homeassistant.install(tag):

View File

@ -3,7 +3,6 @@ import asyncio
from contextlib import suppress from contextlib import suppress
import json import json
import logging import logging
import re
import socket import socket
import aiohttp import aiohttp
@ -17,9 +16,6 @@ _LOGGER = logging.getLogger(__name__)
FREEGEOIP_URL = "https://freegeoip.io/json/" FREEGEOIP_URL = "https://freegeoip.io/json/"
_RE_VERSION = re.compile(r"VERSION=(.*)")
_IMAGE_ARCH = re.compile(r".*/([a-z0-9]*)-hassio-supervisor")
async def fetch_last_versions(websession, beta=False): async def fetch_last_versions(websession, beta=False):
"""Fetch current versions from github. """Fetch current versions from github.
@ -39,13 +35,6 @@ async def fetch_last_versions(websession, beta=False):
_LOGGER.warning("Can't parse versions from %s! %s", url, err) _LOGGER.warning("Can't parse versions from %s! %s", url, err)
def get_arch_from_image(image):
"""Return arch from hassio image name."""
found = _IMAGE_ARCH.match(image)
if found:
return found.group(1)
def get_local_ip(loop): def get_local_ip(loop):
"""Retrieve local IP address. """Retrieve local IP address.

@ -1 +1 @@
Subproject commit c5a5f41d3c1f512266ab93a5ef6d0479608865f0 Subproject commit d2a56655d086a040e712680e46e191d78949dfa3

View File

@ -1,5 +1,5 @@
{ {
"hassio": "0.37", "hassio": "0.38",
"homeassistant": "0.47.1", "homeassistant": "0.47.1",
"resinos": "0.8", "resinos": "0.8",
"resinhup": "0.1", "resinhup": "0.1",