diff --git a/.travis.yml b/.travis.yml index 129484ea8..e16f6a9fb 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,7 +2,7 @@ sudo: false matrix: fast_finish: true include: - - python: "3.5" + - python: "3.6" cache: directories: diff --git a/API.md b/API.md index aada231de..ddd0e95a4 100644 --- a/API.md +++ b/API.md @@ -78,10 +78,10 @@ Get all available addons "repositories": [ { "slug": "12345678", - "name": "Repitory Name", + "name": "Repitory Name|unknown", "source": "URL_OF_REPOSITORY", - "url": "null|WEBSITE", - "maintainer": "null|BLA BLU " + "url": "WEBSITE|REPOSITORY", + "maintainer": "BLA BLU |unknown" } ] } @@ -239,12 +239,12 @@ Output the raw docker log "url": "null|url of addon", "detached": "bool", "repository": "12345678|null", - "version": "VERSION", + "version": "null|VERSION_INSTALLED", "last_version": "LAST_VERSION", - "state": "started|stopped", + "state": "none|started|stopped", "boot": "auto|manual", "build": "bool", - "options": {}, + "options": "{}", } ``` diff --git a/hassio/__main__.py b/hassio/__main__.py index 8f4d2bd0d..d8ba5bd6c 100644 --- a/hassio/__main__.py +++ b/hassio/__main__.py @@ -1,5 +1,6 @@ """Main file for HassIO.""" import asyncio +from concurrent.futures import ThreadPoolExecutor import logging import sys @@ -17,7 +18,14 @@ if __name__ == "__main__": exit(1) loop = asyncio.get_event_loop() - hassio = core.HassIO(loop) + executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker") + loop.set_default_executor(executor) + + _LOGGER.info("Initialize Hassio setup") + config = bootstrap.initialize_system_data() + hassio = core.HassIO(loop, config) + + bootstrap.migrate_system_env(config) _LOGGER.info("Run Hassio setup") loop.run_until_complete(hassio.setup()) @@ -26,7 +34,11 @@ if __name__ == "__main__": loop.call_soon_threadsafe(loop.create_task, hassio.start()) loop.call_soon_threadsafe(bootstrap.reg_signal, loop, hassio) + _LOGGER.info("Run Hassio loop") loop.run_forever() + + _LOGGER.info("Cleanup system") + executor.shutdown(wait=False) loop.close() _LOGGER.info("Close Hassio") diff --git a/hassio/addons/__init__.py b/hassio/addons/__init__.py index 9860e9801..f59b47320 100644 --- a/hassio/addons/__init__.py +++ b/hassio/addons/__init__.py @@ -1,220 +1,133 @@ """Init file for HassIO addons.""" import asyncio import logging -import shutil -from .data import AddonsData -from .git import AddonsRepoHassIO, AddonsRepoCustom -from ..const import STATE_STOPPED, STATE_STARTED -from ..dock.addon import DockerAddon +from .addon import Addon +from .repository import Repository +from .data import Data +from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO _LOGGER = logging.getLogger(__name__) +BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL)) -class AddonManager(AddonsData): + +class AddonManager(object): """Manage addons inside HassIO.""" def __init__(self, config, loop, dock): """Initialize docker base wrapper.""" - super().__init__(config) - self.loop = loop + self.config = config self.dock = dock - self.repositories = [] - self.dockers = {} + self.data = Data(config) + self.addons = {} + self.repositories = {} - async def prepare(self, arch): + @property + def list_addons(self): + """Return a list of all addons.""" + return list(self.addons.values()) + + @property + def list_repositories(self): + """Return list of addon repositories.""" + return list(self.repositories.values()) + + def get(self, addon_slug): + """Return a adddon from slug.""" + return self.addons.get(addon_slug) + + async def prepare(self): """Startup addon management.""" - self.arch = arch + self.data.reload() - # init hassio repository - self.repositories.append(AddonsRepoHassIO(self.config, self.loop)) + # init hassio built-in repositories + repositories = \ + set(self.config.addons_repositories) | BUILTIN_REPOSITORIES - # init custom repositories - for url in self.config.addons_repositories: - self.repositories.append( - AddonsRepoCustom(self.config, self.loop, url)) - - # load addon repository - tasks = [addon.load() for addon in self.repositories] - if tasks: - await asyncio.wait(tasks, loop=self.loop) - - # read data from repositories - self.read_data_from_repositories() - self.merge_update_config() - - # load installed addons - for addon in self.list_installed: - self.dockers[addon] = DockerAddon( - self.config, self.loop, self.dock, self, addon) - await self.dockers[addon].attach() - - async def add_git_repository(self, url): - """Add a new custom repository.""" - if url in self.config.addons_repositories: - _LOGGER.warning("Repository already exists %s", url) - return False - - repo = AddonsRepoCustom(self.config, self.loop, url) - - if not await repo.load(): - _LOGGER.error("Can't load from repository %s", url) - return False - - self.config.addons_repositories = url - self.repositories.append(repo) - return True - - def drop_git_repository(self, url): - """Remove a custom repository.""" - for repo in self.repositories: - if repo.url == url: - self.repositories.remove(repo) - self.config.drop_addon_repository(url) - repo.remove() - return True - - return False + # init custom repositories & load addons + await self.load_repositories(repositories) async def reload(self): """Update addons from repo and reload list.""" - tasks = [addon.pull() for addon in self.repositories] - if not tasks: - return - - await asyncio.wait(tasks, loop=self.loop) - - # read data from repositories - self.read_data_from_repositories() - self.merge_update_config() - - # remove stalled addons - for addon in self.list_detached: - _LOGGER.warning("Dedicated addon '%s' found!", addon) - - async def auto_boot(self, start_type): - """Boot addons with mode auto.""" - boot_list = self.list_startup(start_type) - tasks = [self.start(addon) for addon in boot_list] - - _LOGGER.info("Startup %s run %d addons", start_type, len(tasks)) + tasks = [repository.update() for repository in + self.repositories.values()] if tasks: await asyncio.wait(tasks, loop=self.loop) - async def install(self, addon, version=None): - """Install a addon.""" - if not self.exists_addon(addon): - _LOGGER.error("Addon %s not exists for install", addon) - return False + # read data from repositories + self.data.reload() - if self.arch not in self.get_arch(addon): - _LOGGER.error("Addon %s not supported on %s", addon, self.arch) - return False + # update addons + await self.load_addons() - if self.is_installed(addon): - _LOGGER.error("Addon %s is already installed", addon) - return False + async def load_repositories(self, list_repositories): + """Add a new custom repository.""" + new_rep = set(list_repositories) + old_rep = set(self.repositories) - if not self.path_data(addon).is_dir(): - _LOGGER.info("Create Home-Assistant addon data folder %s", - self.path_data(addon)) - self.path_data(addon).mkdir() + # add new repository + async def _add_repository(url): + """Helper function to async add repository.""" + repository = Repository(self.config, self.loop, self.data, url) + if not await repository.load(): + _LOGGER.error("Can't load from repository %s", url) + return + self.repositories[url] = repository - addon_docker = DockerAddon( - self.config, self.loop, self.dock, self, addon) + # don't add built-in repository to config + if url not in BUILTIN_REPOSITORIES: + self.config.addons_repositories = url - version = version or self.get_last_version(addon) - if not await addon_docker.install(version): - return False + tasks = [_add_repository(url) for url in new_rep - old_rep] + if tasks: + await asyncio.wait(tasks, loop=self.loop) - self.dockers[addon] = addon_docker - self.set_addon_install(addon, version) - return True + # del new repository + for url in old_rep - new_rep - BUILTIN_REPOSITORIES: + self.repositories.pop(url).remove() + self.config.drop_addon_repository(url) - async def uninstall(self, addon): - """Remove a addon.""" - if not self.is_installed(addon): - _LOGGER.error("Addon %s is already uninstalled", addon) - return False + # update data + self.data.reload() + await self.load_addons() - if addon not in self.dockers: - _LOGGER.error("No docker found for addon %s", addon) - return False + async def load_addons(self): + """Update/add internal addon store.""" + all_addons = set(self.data.system) | set(self.data.cache) - if not await self.dockers[addon].remove(): - return False + # calc diff + add_addons = all_addons - set(self.addons) + del_addons = set(self.addons) - all_addons - if self.path_data(addon).is_dir(): - _LOGGER.info("Remove Home-Assistant addon data folder %s", - self.path_data(addon)) - shutil.rmtree(str(self.path_data(addon))) + _LOGGER.info("Load addons: %d all - %d new - %d remove", + len(all_addons), len(add_addons), len(del_addons)) - self.dockers.pop(addon) - self.set_addon_uninstall(addon) - return True + # new addons + tasks = [] + for addon_slug in add_addons: + addon = Addon( + self.config, self.loop, self.dock, self.data, addon_slug) - async def state(self, addon): - """Return running state of addon.""" - if addon not in self.dockers: - _LOGGER.error("No docker found for addon %s", addon) - return + tasks.append(addon.load()) + self.addons[addon_slug] = addon - if await self.dockers[addon].is_running(): - return STATE_STARTED - return STATE_STOPPED + if tasks: + await asyncio.wait(tasks, loop=self.loop) - async def start(self, addon): - """Set options and start addon.""" - if addon not in self.dockers: - _LOGGER.error("No docker found for addon %s", addon) - return False + # remove + for addon_slug in del_addons: + self.addons.pop(addon_slug) - if not self.write_addon_options(addon): - _LOGGER.error("Can't write options for addon %s", addon) - return False + async def auto_boot(self, stage): + """Boot addons with mode auto.""" + tasks = [] + for addon in self.addons.values(): + if addon.is_installed and addon.boot == BOOT_AUTO and \ + addon.startup == stage: + tasks.append(addon.start()) - return await self.dockers[addon].run() - - async def stop(self, addon): - """Stop addon.""" - if addon not in self.dockers: - _LOGGER.error("No docker found for addon %s", addon) - return False - - return await self.dockers[addon].stop() - - async def update(self, addon, version=None): - """Update addon.""" - if addon not in self.dockers: - _LOGGER.error("No docker found for addon %s", addon) - return False - - version = version or self.get_last_version(addon) - - # update - if not await self.dockers[addon].update(version): - return False - - self.set_addon_update(addon, version) - return True - - async def restart(self, addon): - """Restart addon.""" - if addon not in self.dockers: - _LOGGER.error("No docker found for addon %s", addon) - return False - - if not self.write_addon_options(addon): - _LOGGER.error("Can't write options for addon %s", addon) - return False - - return await self.dockers[addon].restart() - - async def logs(self, addon): - """Return addons log output.""" - if addon not in self.dockers: - _LOGGER.error("No docker found for addon %s", addon) - return False - - return await self.dockers[addon].logs() + _LOGGER.info("Startup %s run %d addons", stage, len(tasks)) + if tasks: + await asyncio.wait(tasks, loop=self.loop) diff --git a/hassio/addons/addon.py b/hassio/addons/addon.py new file mode 100644 index 000000000..c43b01558 --- /dev/null +++ b/hassio/addons/addon.py @@ -0,0 +1,358 @@ +"""Init file for HassIO addons.""" +from copy import deepcopy +import logging +from pathlib import Path, PurePath +import re +import shutil + +import voluptuous as vol +from voluptuous.humanize import humanize_error + +from .validate import validate_options, MAP_VOLUME +from ..const import ( + ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP, + ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY, + ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT, + ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP, + STATE_STARTED, STATE_STOPPED, STATE_NONE) +from ..dock.addon import DockerAddon +from ..tools import write_json_file + +_LOGGER = logging.getLogger(__name__) + +RE_VOLUME = re.compile(MAP_VOLUME) + + +class Addon(object): + """Hold data for addon inside HassIO.""" + + def __init__(self, config, loop, dock, data, addon_slug): + """Initialize data holder.""" + self.config = config + self.data = data + self._id = addon_slug + + if self._mesh is None: + raise RuntimeError("{} not a valid addon!".format(self._id)) + + self.addon_docker = DockerAddon(config, loop, dock, self) + + async def load(self): + """Async initialize of object.""" + if self.is_installed: + await self.addon_docker.attach() + + @property + def slug(self): + """Return slug/id of addon.""" + return self._id + + @property + def _mesh(self): + """Return addon data from system or cache.""" + return self.data.system.get(self._id, self.data.cache.get(self._id)) + + @property + def is_installed(self): + """Return True if a addon is installed.""" + return self._id in self.data.system + + @property + def is_detached(self): + """Return True if addon is detached.""" + return self._id not in self.data.cache + + @property + def version_installed(self): + """Return installed version.""" + return self.data.user.get(self._id, {}).get(ATTR_VERSION) + + def _set_install(self, version): + """Set addon as installed.""" + self.data.system[self._id] = deepcopy(self.data.cache[self._id]) + self.data.user[self._id] = { + ATTR_OPTIONS: {}, + ATTR_VERSION: version, + } + self.data.save() + + def _set_uninstall(self): + """Set addon as uninstalled.""" + self.data.system.pop(self._id, None) + self.data.user.pop(self._id, None) + self.data.save() + + def _set_update(self, version): + """Update version of addon.""" + self.data.system[self._id] = deepcopy(self.data.cache[self._id]) + self.data.user[self._id][ATTR_VERSION] = version + self.data.save() + + @property + def options(self): + """Return options with local changes.""" + if self.is_installed: + return { + **self.data.system[self._id][ATTR_OPTIONS], + **self.data.user[self._id][ATTR_OPTIONS], + } + return self.data.cache[self._id][ATTR_OPTIONS] + + @options.setter + def options(self, value): + """Store user addon options.""" + self.data.user[self._id][ATTR_OPTIONS] = deepcopy(value) + self.data.save() + + @property + def boot(self): + """Return boot config with prio local settings.""" + if ATTR_BOOT in self.data.user.get(self._id, {}): + return self.data.user[self._id][ATTR_BOOT] + return self._mesh[ATTR_BOOT] + + @boot.setter + def boot(self, value): + """Store user boot options.""" + self.data.user[self._id][ATTR_BOOT] = value + self.data.save() + + @property + def name(self): + """Return name of addon.""" + return self._mesh[ATTR_NAME] + + @property + def description(self): + """Return description of addon.""" + return self._mesh[ATTR_DESCRIPTON] + + @property + def repository(self): + """Return repository of addon.""" + return self._mesh[ATTR_REPOSITORY] + + @property + def last_version(self): + """Return version of addon.""" + if self._id in self.data.cache: + return self.data.cache[self._id][ATTR_VERSION] + return self.version_installed + + @property + def startup(self): + """Return startup type of addon.""" + return self._mesh.get(ATTR_STARTUP) + + @property + def ports(self): + """Return ports of addon.""" + return self._mesh.get(ATTR_PORTS) + + @property + def network_mode(self): + """Return network mode of addon.""" + if self._mesh[ATTR_HOST_NETWORK]: + return 'host' + return 'bridge' + + @property + def devices(self): + """Return devices of addon.""" + return self._mesh.get(ATTR_DEVICES) + + @property + def tmpfs(self): + """Return tmpfs of addon.""" + return self._mesh.get(ATTR_TMPFS) + + @property + def environment(self): + """Return environment of addon.""" + return self._mesh.get(ATTR_ENVIRONMENT) + + @property + def privileged(self): + """Return list of privilege.""" + return self._mesh.get(ATTR_PRIVILEGED) + + @property + def url(self): + """Return url of addon.""" + return self._mesh.get(ATTR_URL) + + @property + def supported_arch(self): + """Return list of supported arch.""" + return self._mesh[ATTR_ARCH] + + @property + def image(self): + """Return image name of addon.""" + addon_data = self._mesh + + # Repository with dockerhub images + if ATTR_IMAGE in addon_data: + return addon_data[ATTR_IMAGE].format(arch=self.config.arch) + + # local build + return "{}/{}-addon-{}".format( + addon_data[ATTR_REPOSITORY], self.config.arch, + addon_data[ATTR_SLUG]) + + @property + def need_build(self): + """Return True if this addon need a local build.""" + return ATTR_IMAGE not in self._mesh + + @property + def map_volumes(self): + """Return a dict of {volume: policy} from addon.""" + volumes = {} + for volume in self._mesh[ATTR_MAP]: + result = RE_VOLUME.match(volume) + volumes[result.group(1)] = result.group(2) or 'ro' + + return volumes + + @property + def path_data(self): + """Return addon data path inside supervisor.""" + return Path(self.config.path_addons_data, self._id) + + @property + def path_extern_data(self): + """Return addon data path external for docker.""" + return PurePath(self.config.path_extern_addons_data, self._id) + + @property + def path_addon_options(self): + """Return path to addons options.""" + return Path(self.path_data, "options.json") + + @property + def path_addon_location(self): + """Return path to this addon.""" + return Path(self._mesh[ATTR_LOCATON]) + + def write_options(self): + """Return True if addon options is written to data.""" + schema = self.schema + options = self.options + + try: + schema(options) + return write_json_file(self.path_addon_options, options) + except vol.Invalid as ex: + _LOGGER.error("Addon %s have wrong options -> %s", self._id, + humanize_error(options, ex)) + + return False + + @property + def schema(self): + """Create a schema for addon options.""" + raw_schema = self._mesh[ATTR_SCHEMA] + + if isinstance(raw_schema, bool): + return vol.Schema(dict) + return vol.Schema(vol.All(dict, validate_options(raw_schema))) + + async def install(self, version=None): + """Install a addon.""" + if self.config.arch not in self.supported_arch: + _LOGGER.error( + "Addon %s not supported on %s", self._id, self.config.arch) + return False + + if self.is_installed: + _LOGGER.error("Addon %s is already installed", self._id) + return False + + if not self.path_data.is_dir(): + _LOGGER.info( + "Create Home-Assistant addon data folder %s", self.path_data) + self.path_data.mkdir() + + version = version or self.last_version + if not await self.addon_docker.install(version): + return False + + self._set_install(version) + return True + + async def uninstall(self): + """Remove a addon.""" + if not self.is_installed: + _LOGGER.error("Addon %s is not installed", self._id) + return False + + if not await self.addon_docker.remove(): + return False + + if self.path_data.is_dir(): + _LOGGER.info( + "Remove Home-Assistant addon data folder %s", self.path_data) + shutil.rmtree(str(self.path_data)) + + self._set_uninstall() + return True + + async def state(self): + """Return running state of addon.""" + if not self.is_installed: + return STATE_NONE + + if await self.addon_docker.is_running(): + return STATE_STARTED + return STATE_STOPPED + + async def start(self): + """Set options and start addon.""" + if not self.is_installed: + _LOGGER.error("Addon %s is not installed", self._id) + return False + + return await self.addon_docker.run() + + async def stop(self): + """Stop addon.""" + if not self.is_installed: + _LOGGER.error("Addon %s is not installed", self._id) + return False + + return await self.addon_docker.stop() + + async def update(self, version=None): + """Update addon.""" + if not self.is_installed: + _LOGGER.error("Addon %s is not installed", self._id) + return False + + version = version or self.last_version + if version == self.version_installed: + _LOGGER.warning( + "Addon %s is already installed in %s", self._id, version) + return True + + if not await self.addon_docker.update(version): + return False + + self._set_update(version) + return True + + async def restart(self): + """Restart addon.""" + if not self.is_installed: + _LOGGER.error("Addon %s is not installed", self._id) + return False + + return await self.addon_docker.restart() + + async def logs(self): + """Return addons log output.""" + if not self.is_installed: + _LOGGER.error("Addon %s is not installed", self._id) + return False + + return await self.addon_docker.logs() diff --git a/hassio/addons/built-in.json b/hassio/addons/built-in.json index 2b802f355..4d0b11ac6 100644 --- a/hassio/addons/built-in.json +++ b/hassio/addons/built-in.json @@ -1,12 +1,10 @@ { "local": { - "slug": "local", "name": "Local Add-Ons", "url": "https://home-assistant.io/hassio", "maintainer": "By our self" }, "core": { - "slug": "core", "name": "Built-in Add-Ons", "url": "https://home-assistant.io/addons", "maintainer": "Home Assistant authors" diff --git a/hassio/addons/data.py b/hassio/addons/data.py index e17f68041..ffb685dd9 100644 --- a/hassio/addons/data.py +++ b/hassio/addons/data.py @@ -2,7 +2,7 @@ import copy import logging import json -from pathlib import Path, PurePath +from pathlib import Path import re import voluptuous as vol @@ -10,29 +10,22 @@ from voluptuous.humanize import humanize_error from .util import extract_hash_from_path from .validate import ( - validate_options, SCHEMA_ADDON_CONFIG, SCHEMA_REPOSITORY_CONFIG, - MAP_VOLUME) + SCHEMA_ADDON_CONFIG, SCHEMA_REPOSITORY_CONFIG, MAP_VOLUME) from ..const import ( - FILE_HASSIO_ADDONS, ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, - ATTR_STARTUP, ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, BOOT_AUTO, - ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY, ATTR_URL, ATTR_ARCH, - ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT, ATTR_HOST_NETWORK, - ATTR_TMPFS, ATTR_PRIVILEGED) + FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON, + REPOSITORY_CORE, REPOSITORY_LOCAL) from ..config import Config -from ..tools import read_json_file, write_json_file +from ..tools import read_json_file _LOGGER = logging.getLogger(__name__) SYSTEM = 'system' USER = 'user' -REPOSITORY_CORE = 'core' -REPOSITORY_LOCAL = 'local' - RE_VOLUME = re.compile(MAP_VOLUME) -class AddonsData(Config): +class Data(Config): """Hold data for addons inside HassIO.""" def __init__(self, config): @@ -41,9 +34,8 @@ class AddonsData(Config): self.config = config self._system_data = self._data.get(SYSTEM, {}) self._user_data = self._data.get(USER, {}) - self._addons_cache = {} + self._cache_data = {} self._repositories_data = {} - self.arch = None def save(self): """Store data to config file.""" @@ -53,9 +45,29 @@ class AddonsData(Config): } super().save() - def read_data_from_repositories(self): + @property + def user(self): + """Return local addon user data.""" + return self._user_data + + @property + def system(self): + """Return local addon data.""" + return self._system_data + + @property + def cache(self): + """Return addon data from cache/repositories.""" + return self._cache_data + + @property + def repositories(self): + """Return addon data from repositories.""" + return self._repositories_data + + def reload(self): """Read data from addons repository.""" - self._addons_cache = {} + self._cache_data = {} self._repositories_data = {} # read core repository @@ -74,17 +86,19 @@ class AddonsData(Config): if repository_element.is_dir(): self._read_git_repository(repository_element) + # update local data + self._merge_config() + def _read_git_repository(self, path): """Process a custom repository folder.""" slug = extract_hash_from_path(path) - repository_info = {ATTR_SLUG: slug} # exists repository json repository_file = Path(path, "repository.json") try: - repository_info.update(SCHEMA_REPOSITORY_CONFIG( + repository_info = SCHEMA_REPOSITORY_CONFIG( read_json_file(repository_file) - )) + ) except OSError: _LOGGER.warning("Can't read repository information from %s", @@ -115,7 +129,7 @@ class AddonsData(Config): # store addon_config[ATTR_REPOSITORY] = repository addon_config[ATTR_LOCATON] = str(addon.parent) - self._addons_cache[addon_slug] = addon_config + self._cache_data[addon_slug] = addon_config except OSError: _LOGGER.warning("Can't read %s", addon) @@ -133,33 +147,27 @@ class AddonsData(Config): _LOGGER.warning("Can't read built-in.json -> %s", err) return - # if core addons are available - for data in self._addons_cache.values(): - if data[ATTR_REPOSITORY] == REPOSITORY_CORE: - self._repositories_data[REPOSITORY_CORE] = \ - builtin_data[REPOSITORY_CORE] - break + # core repository + self._repositories_data[REPOSITORY_CORE] = \ + builtin_data[REPOSITORY_CORE] - # if local addons are available - for data in self._addons_cache.values(): - if data[ATTR_REPOSITORY] == REPOSITORY_LOCAL: - self._repositories_data[REPOSITORY_LOCAL] = \ - builtin_data[REPOSITORY_LOCAL] - break + # local repository + self._repositories_data[REPOSITORY_LOCAL] = \ + builtin_data[REPOSITORY_LOCAL] - def merge_update_config(self): + def _merge_config(self): """Update local config if they have update. - It need to be the same version as the local version is. + It need to be the same version as the local version is for merge. """ have_change = False - for addon in self.list_installed: + for addon in set(self._system_data): # detached - if addon not in self._addons_cache: + if addon not in self._cache_data: continue - cache = self._addons_cache[addon] + cache = self._cache_data[addon] data = self._system_data[addon] if data[ATTR_VERSION] == cache[ATTR_VERSION]: if data != cache: @@ -168,232 +176,3 @@ class AddonsData(Config): if have_change: self.save() - - @property - def list_installed(self): - """Return a list of installed addons.""" - return set(self._system_data) - - @property - def list_all(self): - """Return a dict of all addons.""" - return set(self._system_data) | set(self._addons_cache) - - def list_startup(self, start_type): - """Get list of installed addon with need start by type.""" - addon_list = set() - for addon in self._system_data.keys(): - if self.get_boot(addon) != BOOT_AUTO: - continue - - try: - if self._system_data[addon][ATTR_STARTUP] == start_type: - addon_list.add(addon) - except KeyError: - _LOGGER.warning("Orphaned addon detect %s", addon) - continue - - return addon_list - - @property - def list_detached(self): - """Return local addons they not support from repo.""" - addon_list = set() - for addon in self._system_data.keys(): - if addon not in self._addons_cache: - addon_list.add(addon) - - return addon_list - - @property - def list_repositories(self): - """Return list of addon repositories.""" - return list(self._repositories_data.values()) - - def exists_addon(self, addon): - """Return True if a addon exists.""" - return addon in self._addons_cache or addon in self._system_data - - def is_installed(self, addon): - """Return True if a addon is installed.""" - return addon in self._system_data - - def version_installed(self, addon): - """Return installed version.""" - return self._user_data.get(addon, {}).get(ATTR_VERSION) - - def set_addon_install(self, addon, version): - """Set addon as installed.""" - self._system_data[addon] = copy.deepcopy(self._addons_cache[addon]) - self._user_data[addon] = { - ATTR_OPTIONS: {}, - ATTR_VERSION: version, - } - self.save() - - def set_addon_uninstall(self, addon): - """Set addon as uninstalled.""" - self._system_data.pop(addon, None) - self._user_data.pop(addon, None) - self.save() - - def set_addon_update(self, addon, version): - """Update version of addon.""" - self._system_data[addon] = copy.deepcopy(self._addons_cache[addon]) - self._user_data[addon][ATTR_VERSION] = version - self.save() - - def set_options(self, addon, options): - """Store user addon options.""" - self._user_data[addon][ATTR_OPTIONS] = copy.deepcopy(options) - self.save() - - def set_boot(self, addon, boot): - """Store user boot options.""" - self._user_data[addon][ATTR_BOOT] = boot - self.save() - - def get_options(self, addon): - """Return options with local changes.""" - return { - **self._system_data[addon][ATTR_OPTIONS], - **self._user_data[addon][ATTR_OPTIONS], - } - - def get_boot(self, addon): - """Return boot config with prio local settings.""" - if ATTR_BOOT in self._user_data[addon]: - return self._user_data[addon][ATTR_BOOT] - - return self._system_data[addon][ATTR_BOOT] - - def get_name(self, addon): - """Return name of addon.""" - if addon in self._addons_cache: - return self._addons_cache[addon][ATTR_NAME] - return self._system_data[addon][ATTR_NAME] - - def get_description(self, addon): - """Return description of addon.""" - if addon in self._addons_cache: - return self._addons_cache[addon][ATTR_DESCRIPTON] - return self._system_data[addon][ATTR_DESCRIPTON] - - def get_repository(self, addon): - """Return repository of addon.""" - if addon in self._addons_cache: - return self._addons_cache[addon][ATTR_REPOSITORY] - return self._system_data[addon][ATTR_REPOSITORY] - - def get_last_version(self, addon): - """Return version of addon.""" - if addon in self._addons_cache: - return self._addons_cache[addon][ATTR_VERSION] - return self.version_installed(addon) - - def get_ports(self, addon): - """Return ports of addon.""" - return self._system_data[addon].get(ATTR_PORTS) - - def get_network_mode(self, addon): - """Return network mode of addon.""" - if self._system_data[addon][ATTR_HOST_NETWORK]: - return 'host' - return 'bridge' - - def get_devices(self, addon): - """Return devices of addon.""" - return self._system_data[addon].get(ATTR_DEVICES) - - def get_tmpfs(self, addon): - """Return tmpfs of addon.""" - return self._system_data[addon].get(ATTR_TMPFS) - - def get_environment(self, addon): - """Return environment of addon.""" - return self._system_data[addon].get(ATTR_ENVIRONMENT) - - def get_privileged(self, addon): - """Return list of privilege.""" - return self._system_data[addon].get(ATTR_PRIVILEGED) - - def get_url(self, addon): - """Return url of addon.""" - if addon in self._addons_cache: - return self._addons_cache[addon].get(ATTR_URL) - return self._system_data[addon].get(ATTR_URL) - - def get_arch(self, addon): - """Return list of supported arch.""" - if addon in self._addons_cache: - return self._addons_cache[addon][ATTR_ARCH] - return self._system_data[addon][ATTR_ARCH] - - def get_image(self, addon): - """Return image name of addon.""" - addon_data = self._system_data.get( - addon, self._addons_cache.get(addon) - ) - - # Repository with dockerhub images - if ATTR_IMAGE in addon_data: - return addon_data[ATTR_IMAGE].format(arch=self.arch) - - # local build - return "{}/{}-addon-{}".format( - addon_data[ATTR_REPOSITORY], self.arch, addon_data[ATTR_SLUG]) - - def need_build(self, addon): - """Return True if this addon need a local build.""" - addon_data = self._system_data.get( - addon, self._addons_cache.get(addon) - ) - return ATTR_IMAGE not in addon_data - - def map_volumes(self, addon): - """Return a dict of {volume: policy} from addon.""" - volumes = {} - for volume in self._system_data[addon][ATTR_MAP]: - result = RE_VOLUME.match(volume) - volumes[result.group(1)] = result.group(2) or 'ro' - - return volumes - - def path_data(self, addon): - """Return addon data path inside supervisor.""" - return Path(self.config.path_addons_data, addon) - - def path_extern_data(self, addon): - """Return addon data path external for docker.""" - return PurePath(self.config.path_extern_addons_data, addon) - - def path_addon_options(self, addon): - """Return path to addons options.""" - return Path(self.path_data(addon), "options.json") - - def path_addon_location(self, addon): - """Return path to this addon.""" - return Path(self._addons_cache[addon][ATTR_LOCATON]) - - def write_addon_options(self, addon): - """Return True if addon options is written to data.""" - schema = self.get_schema(addon) - options = self.get_options(addon) - - try: - schema(options) - return write_json_file(self.path_addon_options(addon), options) - except vol.Invalid as ex: - _LOGGER.error("Addon %s have wrong options -> %s", addon, - humanize_error(options, ex)) - - return False - - def get_schema(self, addon): - """Create a schema for addon options.""" - raw_schema = self._system_data[addon][ATTR_SCHEMA] - - if isinstance(raw_schema, bool): - return vol.Schema(dict) - - return vol.Schema(vol.All(dict, validate_options(raw_schema))) diff --git a/hassio/addons/git.py b/hassio/addons/git.py index f9f8ab6bd..5e9c02618 100644 --- a/hassio/addons/git.py +++ b/hassio/addons/git.py @@ -12,7 +12,7 @@ from ..const import URL_HASSIO_ADDONS _LOGGER = logging.getLogger(__name__) -class AddonsRepo(object): +class GitRepo(object): """Manage addons git repo.""" def __init__(self, config, loop, path, url): @@ -77,7 +77,7 @@ class AddonsRepo(object): return True -class AddonsRepoHassIO(AddonsRepo): +class GitRepoHassIO(GitRepo): """HassIO addons repository.""" def __init__(self, config, loop): @@ -86,7 +86,7 @@ class AddonsRepoHassIO(AddonsRepo): config, loop, config.path_addons_core, URL_HASSIO_ADDONS) -class AddonsRepoCustom(AddonsRepo): +class GitRepoCustom(GitRepo): """Custom addons repository.""" def __init__(self, config, loop, url): diff --git a/hassio/addons/repository.py b/hassio/addons/repository.py new file mode 100644 index 000000000..73859c987 --- /dev/null +++ b/hassio/addons/repository.py @@ -0,0 +1,71 @@ +"""Represent a HassIO repository.""" +from .git import GitRepoHassIO, GitRepoCustom +from .util import get_hash_from_repository +from ..const import ( + REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_NAME, ATTR_URL, ATTR_MAINTAINER) + +UNKNOWN = 'unknown' + + +class Repository(object): + """Repository in HassIO.""" + + def __init__(self, config, loop, data, repository): + """Initialize repository object.""" + self.data = data + self.source = None + self.git = None + + if repository == REPOSITORY_LOCAL: + self._id = repository + elif repository == REPOSITORY_CORE: + self._id = repository + self.git = GitRepoHassIO(config, loop) + else: + self._id = get_hash_from_repository(repository) + self.git = GitRepoCustom(config, loop, repository) + self.source = repository + + @property + def _mesh(self): + """Return data struct repository.""" + return self.data.repositories.get(self._id, {}) + + @property + def slug(self): + """Return slug of repository.""" + return self._id + + @property + def name(self): + """Return name of repository.""" + return self._mesh.get(ATTR_NAME, UNKNOWN) + + @property + def url(self): + """Return url of repository.""" + return self._mesh.get(ATTR_URL, self.source) + + @property + def maintainer(self): + """Return url of repository.""" + return self._mesh.get(ATTR_MAINTAINER, UNKNOWN) + + async def load(self): + """Load addon repository.""" + if self.git: + return await self.git.load() + return True + + async def update(self): + """Update addon repository.""" + if self.git: + return await self.git.pull() + return True + + def remove(self): + """Remove addon repository.""" + if self._id in (REPOSITORY_CORE, REPOSITORY_LOCAL): + raise RuntimeError("Can't remove built-in repositories!") + + self.git.remove() diff --git a/hassio/addons/util.py b/hassio/addons/util.py index c0d097734..152c28866 100644 --- a/hassio/addons/util.py +++ b/hassio/addons/util.py @@ -19,8 +19,3 @@ def extract_hash_from_path(path): if not RE_SHA1.match(repo_dir): return get_hash_from_repository(repo_dir) return repo_dir - - -def create_hash_index_list(name_list): - """Create a dict with hash from repositories list.""" - return {get_hash_from_repository(repo): repo for repo in name_list} diff --git a/hassio/api/__init__.py b/hassio/api/__init__.py index 97c38c389..856f8aabf 100644 --- a/hassio/api/__init__.py +++ b/hassio/api/__init__.py @@ -43,10 +43,12 @@ class RestAPI(object): self.webapp.router.add_get('/network/info', api_net.info) self.webapp.router.add_post('/network/options', api_net.options) - def register_supervisor(self, supervisor, addons, host_control): + def register_supervisor(self, supervisor, addons, host_control, + websession): """Register supervisor function.""" api_supervisor = APISupervisor( - self.config, self.loop, supervisor, addons, host_control) + self.config, self.loop, supervisor, addons, host_control, + websession) self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping) self.webapp.router.add_get('/supervisor/info', api_supervisor.info) diff --git a/hassio/api/addons.py b/hassio/api/addons.py index 3f15c67b1..eb45e3f4d 100644 --- a/hassio/api/addons.py +++ b/hassio/api/addons.py @@ -9,7 +9,7 @@ from .util import api_process, api_process_raw, api_validate from ..const import ( ATTR_VERSION, ATTR_LAST_VERSION, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS, ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY, - ATTR_BUILD, STATE_STOPPED, STATE_STARTED, BOOT_AUTO, BOOT_MANUAL) + ATTR_BUILD, BOOT_AUTO, BOOT_MANUAL) _LOGGER = logging.getLogger(__name__) @@ -33,12 +33,11 @@ class APIAddons(object): def _extract_addon(self, request, check_installed=True): """Return addon and if not exists trow a exception.""" - addon = request.match_info.get('addon') - - # check data - if not self.addons.exists_addon(addon): + addon = self.addons.get(request.match_info.get('addon')) + if not addon: raise RuntimeError("Addon not exists") - if check_installed and not self.addons.is_installed(addon): + + if check_installed and not addon.is_installed: raise RuntimeError("Addon is not installed") return addon @@ -46,38 +45,37 @@ class APIAddons(object): @api_process async def info(self, request): """Return addon information.""" - addon = self._extract_addon(request) + addon = self._extract_addon(request, check_installed=False) return { - ATTR_NAME: self.addons.get_name(addon), - ATTR_DESCRIPTON: self.addons.get_description(addon), - ATTR_VERSION: self.addons.version_installed(addon), - ATTR_REPOSITORY: self.addons.get_repository(addon), - ATTR_LAST_VERSION: self.addons.get_last_version(addon), - ATTR_STATE: await self.addons.state(addon), - ATTR_BOOT: self.addons.get_boot(addon), - ATTR_OPTIONS: self.addons.get_options(addon), - ATTR_URL: self.addons.get_url(addon), - ATTR_DETACHED: addon in self.addons.list_detached, - ATTR_BUILD: self.addons.need_build(addon), + ATTR_NAME: addon.name, + ATTR_DESCRIPTON: addon.description, + ATTR_VERSION: addon.version_installed, + ATTR_REPOSITORY: addon.repository, + ATTR_LAST_VERSION: addon.last_version, + ATTR_STATE: await addon.state(), + ATTR_BOOT: addon.boot, + ATTR_OPTIONS: addon.options, + ATTR_URL: addon.url, + ATTR_DETACHED: addon.is_detached, + ATTR_BUILD: addon.need_build, } @api_process async def options(self, request): """Store user options for addon.""" addon = self._extract_addon(request) - options_schema = self.addons.get_schema(addon) addon_schema = SCHEMA_OPTIONS.extend({ - vol.Optional(ATTR_OPTIONS): options_schema, + vol.Optional(ATTR_OPTIONS): addon.schema, }) body = await api_validate(addon_schema, request) if ATTR_OPTIONS in body: - self.addons.set_options(addon, body[ATTR_OPTIONS]) + addon.options = body[ATTR_OPTIONS] if ATTR_BOOT in body: - self.addons.set_boot(addon, body[ATTR_BOOT]) + addon.boot = body[ATTR_BOOT] return True @@ -86,77 +84,55 @@ class APIAddons(object): """Install addon.""" body = await api_validate(SCHEMA_VERSION, request) addon = self._extract_addon(request, check_installed=False) - version = body.get( - ATTR_VERSION, self.addons.get_last_version(addon)) - - # check if arch supported - if self.addons.arch not in self.addons.get_arch(addon): - raise RuntimeError( - "Addon is not supported on {}".format(self.addons.arch)) + version = body.get(ATTR_VERSION) return await asyncio.shield( - self.addons.install(addon, version), loop=self.loop) + addon.install(version=version), loop=self.loop) @api_process async def uninstall(self, request): """Uninstall addon.""" addon = self._extract_addon(request) - - return await asyncio.shield( - self.addons.uninstall(addon), loop=self.loop) + return await asyncio.shield(addon.uninstall(), loop=self.loop) @api_process async def start(self, request): """Start addon.""" addon = self._extract_addon(request) - if await self.addons.state(addon) == STATE_STARTED: - raise RuntimeError("Addon is already running") - - # validate options + # check options + options = addon.options try: - schema = self.addons.get_schema(addon) - options = self.addons.get_options(addon) - schema(options) + addon.schema(options) except vol.Invalid as ex: raise RuntimeError(humanize_error(options, ex)) from None - return await asyncio.shield( - self.addons.start(addon), loop=self.loop) + return await asyncio.shield(addon.start(), loop=self.loop) @api_process async def stop(self, request): """Stop addon.""" addon = self._extract_addon(request) - - if await self.addons.state(addon) == STATE_STOPPED: - raise RuntimeError("Addon is already stoped") - - return await asyncio.shield( - self.addons.stop(addon), loop=self.loop) + return await asyncio.shield(addon.stop(), loop=self.loop) @api_process async def update(self, request): """Update addon.""" body = await api_validate(SCHEMA_VERSION, request) addon = self._extract_addon(request) - version = body.get( - ATTR_VERSION, self.addons.get_last_version(addon)) - - if version == self.addons.version_installed(addon): - raise RuntimeError("Version is already in use") + version = body.get(ATTR_VERSION) return await asyncio.shield( - self.addons.update(addon, version), loop=self.loop) + addon.update(version=version), loop=self.loop) @api_process async def restart(self, request): """Restart addon.""" addon = self._extract_addon(request) - return await asyncio.shield(self.addons.restart(addon), loop=self.loop) + return await asyncio.shield(addon.restart(), loop=self.loop) @api_process_raw def logs(self, request): """Return logs from addon.""" addon = self._extract_addon(request) - return self.addons.logs(addon) + return addon.logs() diff --git a/hassio/api/supervisor.py b/hassio/api/supervisor.py index 0b51d9be4..8bddfd9b7 100644 --- a/hassio/api/supervisor.py +++ b/hassio/api/supervisor.py @@ -5,7 +5,6 @@ import logging import voluptuous as vol from .util import api_process, api_process_raw, api_validate -from ..addons.util import create_hash_index_list from ..const import ( ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_REPOSITORIES, @@ -31,36 +30,34 @@ SCHEMA_VERSION = vol.Schema({ class APISupervisor(object): """Handle rest api for supervisor functions.""" - def __init__(self, config, loop, supervisor, addons, host_control): + def __init__(self, config, loop, supervisor, addons, host_control, + websession): """Initialize supervisor rest api part.""" self.config = config self.loop = loop self.supervisor = supervisor self.addons = addons self.host_control = host_control + self.websession = websession def _addons_list(self, only_installed=False): """Return a list of addons.""" - detached = self.addons.list_detached - - if only_installed: - addons = self.addons.list_installed - else: - addons = self.addons.list_all - data = [] - for addon in addons: + for addon in self.addons.list_addons: + if only_installed and not addon.is_installed: + continue + data.append({ - ATTR_NAME: self.addons.get_name(addon), - ATTR_SLUG: addon, - ATTR_DESCRIPTON: self.addons.get_description(addon), - ATTR_VERSION: self.addons.get_last_version(addon), - ATTR_INSTALLED: self.addons.version_installed(addon), - ATTR_ARCH: self.addons.get_arch(addon), - ATTR_DETACHED: addon in detached, - ATTR_REPOSITORY: self.addons.get_repository(addon), - ATTR_BUILD: self.addons.need_build(addon), - ATTR_URL: self.addons.get_url(addon), + ATTR_NAME: addon.name, + ATTR_SLUG: addon.slug, + ATTR_DESCRIPTON: addon.description, + ATTR_VERSION: addon.last_version, + ATTR_INSTALLED: addon.version_installed, + ATTR_ARCH: addon.supported_arch, + ATTR_DETACHED: addon.is_detached, + ATTR_REPOSITORY: addon.repository, + ATTR_BUILD: addon.need_build, + ATTR_URL: addon.url, }) return data @@ -68,15 +65,13 @@ class APISupervisor(object): def _repositories_list(self): """Return a list of addons repositories.""" data = [] - list_id = create_hash_index_list(self.config.addons_repositories) - for repository in self.addons.list_repositories: data.append({ - ATTR_SLUG: repository[ATTR_SLUG], - ATTR_NAME: repository[ATTR_NAME], - ATTR_SOURCE: list_id.get(repository[ATTR_SLUG]), - ATTR_URL: repository.get(ATTR_URL), - ATTR_MAINTAINER: repository.get(ATTR_MAINTAINER), + ATTR_SLUG: repository.slug, + ATTR_NAME: repository.name, + ATTR_SOURCE: repository.source, + ATTR_URL: repository.url, + ATTR_MAINTAINER: repository.maintainer, }) return data @@ -93,7 +88,7 @@ class APISupervisor(object): ATTR_VERSION: HASSIO_VERSION, ATTR_LAST_VERSION: self.config.last_hassio, ATTR_BETA_CHANNEL: self.config.upstream_beta, - ATTR_ARCH: self.addons.arch, + ATTR_ARCH: self.config.arch, ATTR_TIMEZONE: self.config.timezone, ATTR_ADDONS: self._addons_list(only_installed=True), ATTR_ADDONS_REPOSITORIES: self.config.addons_repositories, @@ -120,21 +115,7 @@ class APISupervisor(object): if ATTR_ADDONS_REPOSITORIES in body: new = set(body[ATTR_ADDONS_REPOSITORIES]) - old = set(self.config.addons_repositories) - - # add new repositories - tasks = [self.addons.add_git_repository(url) for url in - set(new - old)] - if tasks: - await asyncio.shield( - asyncio.wait(tasks, loop=self.loop), loop=self.loop) - - # remove old repositories - for url in set(old - new): - self.addons.drop_git_repository(url) - - # read repository - self.addons.read_data_from_repositories() + await asyncio.shield(self.addons.load_repositories(new)) return True @@ -154,7 +135,8 @@ class APISupervisor(object): async def reload(self, request): """Reload addons, config ect.""" tasks = [ - self.addons.reload(), self.config.fetch_update_infos(), + self.addons.reload(), + self.config.fetch_update_infos(self.websession), self.host_control.load() ] results, _ = await asyncio.shield( diff --git a/hassio/api/util.py b/hassio/api/util.py index b59352dea..0c8583c43 100644 --- a/hassio/api/util.py +++ b/hassio/api/util.py @@ -81,6 +81,8 @@ def api_process_raw(method): def api_return_error(message=None): """Return a API error message.""" + _LOGGER.error(message) + return web.json_response({ JSON_RESULT: RESULT_ERROR, JSON_MESSAGE: message, diff --git a/hassio/bootstrap.py b/hassio/bootstrap.py index 81648467e..0e161f9d5 100644 --- a/hassio/bootstrap.py +++ b/hassio/bootstrap.py @@ -2,6 +2,7 @@ import logging import os import signal +from pathlib import Path from colorlog import ColoredFormatter @@ -11,9 +12,9 @@ from .config import CoreConfig _LOGGER = logging.getLogger(__name__) -def initialize_system_data(websession): +def initialize_system_data(): """Setup default config and create folders.""" - config = CoreConfig(websession) + config = CoreConfig() # homeassistant config folder if not config.path_config.is_dir(): @@ -42,10 +43,10 @@ def initialize_system_data(websession): config.path_addons_git) config.path_addons_git.mkdir(parents=True) - if not config.path_addons_build.is_dir(): - _LOGGER.info("Create Home-Assistant addon build folder %s", - config.path_addons_build) - config.path_addons_build.mkdir(parents=True) + # hassio tmp folder + if not config.path_tmp.is_dir(): + _LOGGER.info("Create hassio temp folder %s", config.path_tmp) + config.path_tmp.mkdir(parents=True) # hassio backup folder if not config.path_backup.is_dir(): @@ -60,6 +61,18 @@ def initialize_system_data(websession): return config +def migrate_system_env(config): + """Cleanup some stuff after update.""" + + # hass.io 0.37 -> 0.38 + old_build = Path(config.path_hassio, "addons/build") + if old_build.is_dir(): + try: + old_build.rmdir() + except OSError: + _LOGGER.warning("Can't cleanup old addons build dir.") + + def initialize_logging(): """Setup the logging.""" logging.basicConfig(level=logging.INFO) diff --git a/hassio/config.py b/hassio/config.py index 00f074c4d..30ff88d08 100644 --- a/hassio/config.py +++ b/hassio/config.py @@ -8,7 +8,7 @@ from pathlib import Path, PurePath import voluptuous as vol from voluptuous.humanize import humanize_error -from .const import FILE_HASSIO_CONFIG, HASSIO_SHARE +from .const import FILE_HASSIO_CONFIG, HASSIO_DATA from .tools import ( fetch_last_versions, write_json_file, read_json_file, validate_timezone) @@ -27,12 +27,11 @@ ADDONS_CORE = PurePath("addons/core") ADDONS_LOCAL = PurePath("addons/local") ADDONS_GIT = PurePath("addons/git") ADDONS_DATA = PurePath("addons/data") -ADDONS_BUILD = PurePath("addons/build") ADDONS_CUSTOM_LIST = 'addons_custom_list' BACKUP_DATA = PurePath("backup") - SHARE_DATA = PurePath("share") +TMP_DATA = PurePath("tmp") UPSTREAM_BETA = 'upstream_beta' API_ENDPOINT = 'api_endpoint' @@ -88,9 +87,9 @@ class Config(object): class CoreConfig(Config): """Hold all core config data.""" - def __init__(self, websession): + def __init__(self): """Initialize config object.""" - self.websession = websession + self.arch = None super().__init__(FILE_HASSIO_CONFIG) @@ -102,10 +101,9 @@ class CoreConfig(Config): _LOGGER.warning( "Invalid config %s", humanize_error(self._data, ex)) - async def fetch_update_infos(self): + async def fetch_update_infos(self, websession): """Read current versions from web.""" - last = await fetch_last_versions( - self.websession, beta=self.upstream_beta) + last = await fetch_last_versions(websession, beta=self.upstream_beta) if last: self._data.update({ @@ -175,6 +173,11 @@ class CoreConfig(Config): """Actual version of hassio.""" return self._data.get(HASSIO_LAST) + @property + def path_hassio(self): + """Return hassio data path.""" + return HASSIO_DATA + @property def path_extern_hassio(self): """Return hassio data path extern for docker.""" @@ -188,7 +191,7 @@ class CoreConfig(Config): @property def path_config(self): """Return config path inside supervisor.""" - return Path(HASSIO_SHARE, HOMEASSISTANT_CONFIG) + return Path(HASSIO_DATA, HOMEASSISTANT_CONFIG) @property def path_extern_ssl(self): @@ -198,22 +201,22 @@ class CoreConfig(Config): @property def path_ssl(self): """Return SSL path inside supervisor.""" - return Path(HASSIO_SHARE, HASSIO_SSL) + return Path(HASSIO_DATA, HASSIO_SSL) @property def path_addons_core(self): """Return git path for core addons.""" - return Path(HASSIO_SHARE, ADDONS_CORE) + return Path(HASSIO_DATA, ADDONS_CORE) @property def path_addons_git(self): """Return path for git addons.""" - return Path(HASSIO_SHARE, ADDONS_GIT) + return Path(HASSIO_DATA, ADDONS_GIT) @property def path_addons_local(self): """Return path for customs addons.""" - return Path(HASSIO_SHARE, ADDONS_LOCAL) + return Path(HASSIO_DATA, ADDONS_LOCAL) @property def path_extern_addons_local(self): @@ -223,7 +226,7 @@ class CoreConfig(Config): @property def path_addons_data(self): """Return root addon data folder.""" - return Path(HASSIO_SHARE, ADDONS_DATA) + return Path(HASSIO_DATA, ADDONS_DATA) @property def path_extern_addons_data(self): @@ -231,14 +234,14 @@ class CoreConfig(Config): return PurePath(self.path_extern_hassio, ADDONS_DATA) @property - def path_addons_build(self): - """Return root addon build folder.""" - return Path(HASSIO_SHARE, ADDONS_BUILD) + def path_tmp(self): + """Return hass.io temp folder.""" + return Path(HASSIO_DATA, TMP_DATA) @property def path_backup(self): """Return root backup data folder.""" - return Path(HASSIO_SHARE, BACKUP_DATA) + return Path(HASSIO_DATA, BACKUP_DATA) @property def path_extern_backup(self): @@ -248,7 +251,7 @@ class CoreConfig(Config): @property def path_share(self): """Return root share data folder.""" - return Path(HASSIO_SHARE, SHARE_DATA) + return Path(HASSIO_DATA, SHARE_DATA) @property def path_extern_share(self): diff --git a/hassio/const.py b/hassio/const.py index 826cbabee..d1c9df809 100644 --- a/hassio/const.py +++ b/hassio/const.py @@ -1,7 +1,7 @@ """Const file for HassIO.""" from pathlib import Path -HASSIO_VERSION = '0.37' +HASSIO_VERSION = '0.38' URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/' 'hassio/master/version.json') @@ -10,7 +10,7 @@ URL_HASSIO_VERSION_BETA = ('https://raw.githubusercontent.com/home-assistant/' URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons' -HASSIO_SHARE = Path("/data") +HASSIO_DATA = Path("/data") RUN_UPDATE_INFO_TASKS = 28800 RUN_UPDATE_SUPERVISOR_TASKS = 29100 @@ -20,8 +20,8 @@ RUN_CLEANUP_API_SESSIONS = 900 RESTART_EXIT_CODE = 100 -FILE_HASSIO_ADDONS = Path(HASSIO_SHARE, "addons.json") -FILE_HASSIO_CONFIG = Path(HASSIO_SHARE, "config.json") +FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json") +FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json") SOCKET_DOCKER = Path("/var/run/docker.sock") SOCKET_HC = Path("/var/run/hassio-hc.sock") @@ -92,6 +92,7 @@ BOOT_MANUAL = 'manual' STATE_STARTED = 'started' STATE_STOPPED = 'stopped' +STATE_NONE = 'none' MAP_CONFIG = 'config' MAP_SSL = 'ssl' @@ -103,3 +104,6 @@ ARCH_ARMHF = 'armhf' ARCH_AARCH64 = 'aarch64' ARCH_AMD64 = 'amd64' ARCH_I386 = 'i386' + +REPOSITORY_CORE = 'core' +REPOSITORY_LOCAL = 'local' diff --git a/hassio/core.py b/hassio/core.py index 32767418d..ffaa717b6 100644 --- a/hassio/core.py +++ b/hassio/core.py @@ -5,7 +5,6 @@ import logging import aiohttp import docker -from . import bootstrap from .addons import AddonManager from .api import RestAPI from .host_control import HostControl @@ -20,7 +19,7 @@ from .dock.supervisor import DockerSupervisor from .tasks import ( hassio_update, homeassistant_watchdog, homeassistant_setup, api_sessions_cleanup) -from .tools import get_arch_from_image, get_local_ip, fetch_timezone +from .tools import get_local_ip, fetch_timezone _LOGGER = logging.getLogger(__name__) @@ -28,28 +27,26 @@ _LOGGER = logging.getLogger(__name__) class HassIO(object): """Main object of hassio.""" - def __init__(self, loop): + def __init__(self, loop, config): """Initialize hassio object.""" self.exit_code = 0 self.loop = loop - self.websession = aiohttp.ClientSession(loop=self.loop) - self.config = bootstrap.initialize_system_data(self.websession) - self.scheduler = Scheduler(self.loop) - self.api = RestAPI(self.config, self.loop) + self.config = config + self.websession = aiohttp.ClientSession(loop=loop) + self.scheduler = Scheduler(loop) + self.api = RestAPI(config, loop) self.dock = docker.DockerClient( base_url="unix:/{}".format(str(SOCKET_DOCKER)), version='auto') # init basic docker container - self.supervisor = DockerSupervisor( - self.config, self.loop, self.dock, self.stop) - self.homeassistant = DockerHomeAssistant( - self.config, self.loop, self.dock) + self.supervisor = DockerSupervisor(config, loop, self.dock, self.stop) + self.homeassistant = DockerHomeAssistant(config, loop, self.dock) # init HostControl - self.host_control = HostControl(self.loop) + self.host_control = HostControl(loop) # init addon system - self.addons = AddonManager(self.config, self.loop, self.dock) + self.addons = AddonManager(config, loop, self.dock) async def setup(self): """Setup HassIO orchestration.""" @@ -58,6 +55,9 @@ class HassIO(object): _LOGGER.fatal("Can't attach to supervisor docker container!") await self.supervisor.cleanup() + # set running arch + self.config.arch = self.supervisor.arch + # set api endpoint self.config.api_endpoint = await get_local_ip(self.loop) @@ -70,13 +70,13 @@ class HassIO(object): # schedule update info tasks self.scheduler.register_task( - self.host_control.load, RUN_UPDATE_INFO_TASKS) + # rest api views self.api.register_host(self.host_control) self.api.register_network(self.host_control) self.api.register_supervisor( - self.supervisor, self.addons, self.host_control) + self.supervisor, self.addons, self.host_control, self.websession) self.api.register_homeassistant(self.homeassistant) self.api.register_addons(self.addons) self.api.register_security() @@ -87,22 +87,16 @@ class HassIO(object): api_sessions_cleanup(self.config), RUN_CLEANUP_API_SESSIONS, now=True) - # schedule update info tasks - self.scheduler.register_task( - self.config.fetch_update_infos, RUN_UPDATE_INFO_TASKS, - now=True) - # first start of supervisor? if not await self.homeassistant.exists(): _LOGGER.info("No HomeAssistant docker found.") await homeassistant_setup( - self.config, self.loop, self.homeassistant) + self.config, self.loop, self.homeassistant, self.websession) else: await self.homeassistant.attach() # Load addons - arch = get_arch_from_image(self.supervisor.image) - await self.addons.prepare(arch) + await self.addons.prepare() # schedule addon update task self.scheduler.register_task( @@ -110,7 +104,7 @@ class HassIO(object): # schedule self update task self.scheduler.register_task( - hassio_update(self.config, self.supervisor), + hassio_update(self.config, self.supervisor, self.websession), RUN_UPDATE_SUPERVISOR_TASKS) # start addon mark as initialize @@ -118,6 +112,13 @@ class HassIO(object): async def start(self): """Start HassIO orchestration.""" + # on release channel, try update itself + # on beta channel, only read new versions + await asyncio.wait( + [hassio_update(self.config, self.supervisor, self.websession)()], + loop=self.loop + ) + # start api await self.api.start() _LOGGER.info("Start hassio api on %s", self.config.api_endpoint) @@ -148,9 +149,9 @@ class HassIO(object): # don't process scheduler anymore self.scheduler.stop() - # process stop task pararell - tasks = [self.websession.close(), self.api.stop()] - await asyncio.wait(tasks, loop=self.loop) + # process stop tasks + self.websession.close() + await self.api.stop() self.exit_code = exit_code self.loop.stop() diff --git a/hassio/dock/__init__.py b/hassio/dock/__init__.py index 764edd67c..e53a361c4 100644 --- a/hassio/dock/__init__.py +++ b/hassio/dock/__init__.py @@ -5,7 +5,7 @@ import logging import docker -from ..const import LABEL_VERSION +from ..const import LABEL_VERSION, LABEL_ARCH _LOGGER = logging.getLogger(__name__) @@ -20,6 +20,7 @@ class DockerBase(object): self.dock = dock self.image = image self.version = None + self.arch = None self._lock = asyncio.Lock(loop=loop) @property @@ -38,13 +39,18 @@ class DockerBase(object): if not self.image: self.image = metadata['Config']['Image'] - # read metadata + # read version need_version = force or not self.version if need_version and LABEL_VERSION in metadata['Config']['Labels']: self.version = metadata['Config']['Labels'][LABEL_VERSION] elif need_version: _LOGGER.warning("Can't read version from %s", self.name) + # read arch + need_arch = force or not self.arch + if need_arch and LABEL_ARCH in metadata['Config']['Labels']: + self.arch = metadata['Config']['Labels'][LABEL_ARCH] + async def install(self, tag): """Pull docker image.""" if self._lock.locked(): @@ -183,13 +189,13 @@ class DockerBase(object): except docker.errors.DockerException: return - _LOGGER.info("Stop %s docker application", self.image) - if container.status == 'running': + _LOGGER.info("Stop %s docker application", self.image) with suppress(docker.errors.DockerException): - container.stop() + container.stop(timeout=15) with suppress(docker.errors.DockerException): + _LOGGER.info("Clean %s docker application", self.image) container.remove(force=True) async def remove(self): @@ -261,7 +267,7 @@ class DockerBase(object): """Return docker logs of container.""" if self._lock.locked(): _LOGGER.error("Can't excute logs while a task is in progress") - return False + return b"" async with self._lock: return await self.loop.run_in_executor(None, self._logs) diff --git a/hassio/dock/addon.py b/hassio/dock/addon.py index fbd8bca02..ffa0e9e21 100644 --- a/hassio/dock/addon.py +++ b/hassio/dock/addon.py @@ -1,4 +1,5 @@ """Init file for HassIO addon docker object.""" +from contextlib import suppress import logging from pathlib import Path import shutil @@ -16,22 +17,21 @@ _LOGGER = logging.getLogger(__name__) class DockerAddon(DockerBase): """Docker hassio wrapper for HomeAssistant.""" - def __init__(self, config, loop, dock, addons_data, addon): + def __init__(self, config, loop, dock, addon): """Initialize docker homeassistant wrapper.""" super().__init__( - config, loop, dock, image=addons_data.get_image(addon)) + config, loop, dock, image=addon.image) self.addon = addon - self.addons_data = addons_data @property def name(self): """Return name of docker container.""" - return "addon_{}".format(self.addon) + return "addon_{}".format(self.addon.slug) @property def environment(self): """Return environment for docker add-on.""" - addon_env = self.addons_data.get_environment(self.addon) or {} + addon_env = self.addon.environment or {} return { **addon_env, @@ -41,7 +41,7 @@ class DockerAddon(DockerBase): @property def tmpfs(self): """Return tmpfs for docker add-on.""" - options = self.addons_data.get_tmpfs(self.addon) + options = self.addon.tmpfs if options: return {"/tmpfs": "{}".format(options)} return None @@ -50,11 +50,11 @@ class DockerAddon(DockerBase): def volumes(self): """Generate volumes for mappings.""" volumes = { - str(self.addons_data.path_extern_data(self.addon)): { + str(self.addon.path_extern_data): { 'bind': '/data', 'mode': 'rw' }} - addon_mapping = self.addons_data.map_volumes(self.addon) + addon_mapping = self.addon.map_volumes if MAP_CONFIG in addon_mapping: volumes.update({ @@ -94,20 +94,24 @@ class DockerAddon(DockerBase): Need run inside executor. """ if self._is_running(): - return + return True # cleanup self._stop() + # write config + if not self.addon.write_options(): + return False + try: self.dock.containers.run( self.image, name=self.name, detach=True, - network_mode=self.addons_data.get_network_mode(self.addon), - ports=self.addons_data.get_ports(self.addon), - devices=self.addons_data.get_devices(self.addon), - cap_add=self.addons_data.get_privileged(self.addon), + network_mode=self.addon.network_mode, + ports=self.addon.ports, + devices=self.addon.devices, + cap_add=self.addon.privileged, environment=self.environment, volumes=self.volumes, tmpfs=self.tmpfs @@ -126,7 +130,7 @@ class DockerAddon(DockerBase): Need run inside executor. """ - if self.addons_data.need_build(self.addon): + if self.addon.need_build: return self._build(tag) return super()._install(tag) @@ -145,11 +149,11 @@ class DockerAddon(DockerBase): Need run inside executor. """ - build_dir = Path(self.config.path_addons_build, self.addon) + build_dir = Path(self.config.path_tmp, self.addon.slug) try: # prepare temporary addon build folder try: - source = self.addons_data.path_addon_location(self.addon) + source = self.addon.path_addon_location shutil.copytree(str(source), str(build_dir)) except shutil.Error as err: _LOGGER.error("Can't copy %s to temporary build folder -> %s", @@ -159,7 +163,7 @@ class DockerAddon(DockerBase): # prepare Dockerfile try: dockerfile_template( - Path(build_dir, 'Dockerfile'), self.addons_data.arch, + Path(build_dir, 'Dockerfile'), self.config.arch, tag, META_ADDON) except OSError as err: _LOGGER.error("Can't prepare dockerfile -> %s", err) @@ -184,3 +188,21 @@ class DockerAddon(DockerBase): finally: shutil.rmtree(str(build_dir), ignore_errors=True) + + def _restart(self): + """Restart docker container. + + Addons prepare some thing on start and that is normaly not repeatable. + Need run inside executor. + """ + try: + container = self.dock.containers.get(self.name) + except docker.errors.DockerException: + return False + + _LOGGER.info("Restart %s", self.image) + + with suppress(docker.errors.DockerException): + container.stop(timeout=15) + + return self._run() diff --git a/hassio/panel/hassio-main.html b/hassio/panel/hassio-main.html index 788a3529b..4220cfd4b 100644 --- a/hassio/panel/hassio-main.html +++ b/hassio/panel/hassio-main.html @@ -1,4 +1,147 @@ - \ No newline at end of file +}); \ No newline at end of file diff --git a/hassio/panel/hassio-main.html.gz b/hassio/panel/hassio-main.html.gz index d4ba81331..7f64acebd 100644 Binary files a/hassio/panel/hassio-main.html.gz and b/hassio/panel/hassio-main.html.gz differ diff --git a/hassio/tasks.py b/hassio/tasks.py index 32e4c8a6d..65d04a140 100644 --- a/hassio/tasks.py +++ b/hassio/tasks.py @@ -18,13 +18,19 @@ def api_sessions_cleanup(config): return _api_sessions_cleanup -def hassio_update(config, supervisor): +def hassio_update(config, supervisor, websession): """Create scheduler task for update of supervisor hassio.""" async def _hassio_update(): """Check and run update of supervisor hassio.""" + await config.fetch_update_infos(websession) if config.last_hassio == supervisor.version: return + # don't perform a update on beta/dev channel + if config.upstream_beta: + _LOGGER.warning("Ignore Hass.IO update on beta upstream!") + return + _LOGGER.info("Found new HassIO version %s.", config.last_hassio) await supervisor.update(config.last_hassio) @@ -43,12 +49,12 @@ def homeassistant_watchdog(loop, homeassistant): return _homeassistant_watchdog -async def homeassistant_setup(config, loop, homeassistant): +async def homeassistant_setup(config, loop, homeassistant, websession): """Install a homeassistant docker container.""" while True: # read homeassistant tag and install it if not config.last_homeassistant: - await config.fetch_update_infos() + await config.fetch_update_infos(websession) tag = config.last_homeassistant if tag and await homeassistant.install(tag): diff --git a/hassio/tools.py b/hassio/tools.py index 0879928fc..7147fe62d 100644 --- a/hassio/tools.py +++ b/hassio/tools.py @@ -3,7 +3,6 @@ import asyncio from contextlib import suppress import json import logging -import re import socket import aiohttp @@ -17,9 +16,6 @@ _LOGGER = logging.getLogger(__name__) FREEGEOIP_URL = "https://freegeoip.io/json/" -_RE_VERSION = re.compile(r"VERSION=(.*)") -_IMAGE_ARCH = re.compile(r".*/([a-z0-9]*)-hassio-supervisor") - async def fetch_last_versions(websession, beta=False): """Fetch current versions from github. @@ -39,13 +35,6 @@ async def fetch_last_versions(websession, beta=False): _LOGGER.warning("Can't parse versions from %s! %s", url, err) -def get_arch_from_image(image): - """Return arch from hassio image name.""" - found = _IMAGE_ARCH.match(image) - if found: - return found.group(1) - - def get_local_ip(loop): """Retrieve local IP address. diff --git a/home-assistant-polymer b/home-assistant-polymer index c5a5f41d3..d2a56655d 160000 --- a/home-assistant-polymer +++ b/home-assistant-polymer @@ -1 +1 @@ -Subproject commit c5a5f41d3c1f512266ab93a5ef6d0479608865f0 +Subproject commit d2a56655d086a040e712680e46e191d78949dfa3 diff --git a/version.json b/version.json index 58fa35175..21b6da62b 100644 --- a/version.json +++ b/version.json @@ -1,5 +1,5 @@ { - "hassio": "0.37", + "hassio": "0.38", "homeassistant": "0.47.1", "resinos": "0.8", "resinhup": "0.1",