Merge pull request #20 from pvizeli/multible_git_repos

Allow custom repository  / improve config validate
This commit is contained in:
Pascal Vizeli 2017-04-27 23:54:29 +02:00 committed by GitHub
commit c8e3f2b48a
10 changed files with 323 additions and 72 deletions

8
API.md
View File

@ -40,6 +40,9 @@ On success
"dedicated": "bool",
"description": "description"
}
],
"addons_repositories": [
"REPO_URL"
]
}
```
@ -55,7 +58,10 @@ Optional:
- POST `/supervisor/options`
```json
{
"beta_channel": "true|false"
"beta_channel": "true|false",
"addons_repositories": [
"REPO_URL"
]
}
```

View File

@ -5,7 +5,7 @@ import os
import shutil
from .data import AddonsData
from .git import AddonsRepo
from .git import AddonsRepoHassIO, AddonsRepoCustom
from ..const import STATE_STOPPED, STATE_STARTED
from ..dock.addon import DockerAddon
@ -21,16 +21,29 @@ class AddonManager(AddonsData):
self.loop = loop
self.dock = dock
self.repo = AddonsRepo(config, loop)
self.repositories = []
self.dockers = {}
async def prepare(self, arch):
"""Startup addon management."""
self.arch = arch
# init hassio repository
self.repositories.append(AddonsRepoHassIO(self.config, self.loop))
# init custom repositories
for url in self.config.addons_repositories:
self.repositories.append(
AddonsRepoCustom(self.config, self.loop, url))
# load addon repository
if await self.repo.load():
self.read_addons_repo()
tasks = [addon.load() for addon in self.repositories]
if tasks:
await asyncio.wait(tasks, loop=self.loop)
# read data from repositories
self.read_data_from_repositories()
self.merge_update_config()
# load installed addons
for addon in self.list_installed:
@ -38,11 +51,44 @@ class AddonManager(AddonsData):
self.config, self.loop, self.dock, self, addon)
await self.dockers[addon].attach()
async def add_custom_repository(self, url):
"""Add a new custom repository."""
if url in self.config.addons_repositories:
_LOGGER.warning("Repository already exists %s", url)
return False
repo = AddonsRepoCustom(self.config, self.loop, url)
if not await repo.load():
_LOGGER.error("Can't load from repository %s", url)
return False
self.config.addons_repositories = repo
self.repositories.append(repo)
return True
def drop_custom_repository(self, url):
"""Remove a custom repository."""
for repo in self.repositories:
if repo.url == url:
repo = self.repositories.pop(repo, repo)
self.config.drop_addon_repository(url)
repo.remove()
return True
return False
async def reload(self):
"""Update addons from repo and reload list."""
if not await self.repo.pull():
tasks = [addon.pull() for addon in self.repositories]
if not tasks:
return
self.read_addons_repo()
await asyncio.wait(tasks, loop=self.loop)
# read data from repositories
self.read_data_from_repositories()
self.merge_update_config()
# remove stalled addons
for addon in self.list_removed:
@ -51,10 +97,7 @@ class AddonManager(AddonsData):
async def auto_boot(self, start_type):
"""Boot addons with mode auto."""
boot_list = self.list_startup(start_type)
tasks = []
for addon in boot_list:
tasks.append(self.loop.create_task(self.start(addon)))
tasks = [self.start(addon) for addon in boot_list]
_LOGGER.info("Startup %s run %d addons", start_type, len(tasks))
if tasks:

View File

@ -1,22 +1,24 @@
"""Init file for HassIO addons."""
import copy
import logging
import glob
import voluptuous as vol
from voluptuous.humanize import humanize_error
from .util import extract_hash_from_path
from .validate import validate_options, SCHEMA_ADDON_CONFIG
from ..const import (
FILE_HASSIO_ADDONS, ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON,
ATTR_STARTUP, ATTR_BOOT, ATTR_MAP_SSL, ATTR_MAP_CONFIG, ATTR_OPTIONS,
ATTR_PORTS, BOOT_AUTO, DOCKER_REPO, ATTR_INSTALLED, ATTR_SCHEMA,
ATTR_IMAGE, ATTR_DEDICATED)
ATTR_STARTUP, ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, BOOT_AUTO,
DOCKER_REPO, ATTR_INSTALLED, ATTR_SCHEMA, ATTR_IMAGE, ATTR_DEDICATED,
MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP)
from ..config import Config
from ..tools import read_json_file, write_json_file
_LOGGER = logging.getLogger(__name__)
ADDONS_REPO_PATTERN = "{}/*/config.json"
ADDONS_REPO_PATTERN = "{}/**/config.json"
SYSTEM = "system"
USER = "user"
@ -41,31 +43,60 @@ class AddonsData(Config):
}
super().save()
def read_addons_repo(self):
def read_data_from_repositories(self):
"""Read data from addons repository."""
self._current_data = {}
self._read_addons_folder(self.config.path_addons_repo)
self._read_addons_folder(self.config.path_addons_custom)
self._read_addons_folder(self.config.path_addons_custom, custom=True)
def _read_addons_folder(self, folder):
def _read_addons_folder(self, folder, custom=False):
"""Read data from addons folder."""
pattern = ADDONS_REPO_PATTERN.format(folder)
for addon in glob.iglob(pattern):
for addon in glob.iglob(pattern, recursive=True):
try:
addon_config = read_json_file(addon)
addon_config = SCHEMA_ADDON_CONFIG(addon_config)
self._current_data[addon_config[ATTR_SLUG]] = addon_config
if custom:
addon_slug = "{}_{}".format(
extract_hash_from_path(folder, addon),
addon_config[ATTR_SLUG],
)
else:
addon_slug = addon_config[ATTR_SLUG]
except (OSError, KeyError):
self._current_data[addon_slug] = addon_config
except OSError:
_LOGGER.warning("Can't read %s", addon)
except vol.Invalid as ex:
_LOGGER.warning("Can't read %s -> %s", addon,
humanize_error(addon_config, ex))
def merge_update_config(self):
"""Update local config if they have update.
It need to be the same version as the local version is.
"""
have_change = False
for addon, data in self._system_data.items():
# dedicated
if addon not in self._current_data:
continue
current = self._current_data[addon]
if data[ATTR_VERSION] == current[ATTR_VERSION]:
if data != current[addon]:
self._system_data[addon] = copy.deepcopy(current[data])
have_change = True
if have_change:
self.save()
@property
def list_installed(self):
"""Return a list of installed addons."""
@ -83,7 +114,7 @@ class AddonsData(Config):
data.append({
ATTR_NAME: values[ATTR_NAME],
ATTR_SLUG: values[ATTR_SLUG],
ATTR_SLUG: addon,
ATTR_DESCRIPTON: values[ATTR_DESCRIPTON],
ATTR_VERSION: values[ATTR_VERSION],
ATTR_INSTALLED: i_version,
@ -132,7 +163,7 @@ class AddonsData(Config):
def set_addon_install(self, addon, version):
"""Set addon as installed."""
self._system_data[addon] = self._current_data[addon]
self._system_data[addon] = copy.deepcopy(self._current_data[addon])
self._user_data[addon] = {
ATTR_OPTIONS: {},
ATTR_VERSION: version,
@ -147,13 +178,13 @@ class AddonsData(Config):
def set_addon_update(self, addon, version):
"""Update version of addon."""
self._system_data[addon] = self._current_data[addon]
self._system_data[addon] = copy.deepcopy(self._current_data[addon])
self._user_data[addon][ATTR_VERSION] = version
self.save()
def set_options(self, addon, options):
"""Store user addon options."""
self._user_data[addon][ATTR_OPTIONS] = options
self._user_data[addon][ATTR_OPTIONS] = copy.deepcopy(options)
self.save()
def set_boot(self, addon, boot):
@ -202,13 +233,21 @@ class AddonsData(Config):
return addon_data[ATTR_IMAGE]
def need_config(self, addon):
def map_config(self, addon):
"""Return True if config map is needed."""
return self._system_data[addon][ATTR_MAP_CONFIG]
return MAP_CONFIG in self._system_data[addon][ATTR_MAP]
def need_ssl(self, addon):
def map_ssl(self, addon):
"""Return True if ssl map is needed."""
return self._system_data[addon][ATTR_MAP_SSL]
return MAP_SSL in self._system_data[addon][ATTR_MAP]
def map_addons(self, addon):
"""Return True if addons map is needed."""
return MAP_ADDONS in self._system_data[addon][ATTR_MAP]
def map_backup(self, addon):
"""Return True if backup map is needed."""
return MAP_BACKUP in self._system_data[addon][ATTR_MAP]
def path_data(self, addon):
"""Return addon data path inside supervisor."""

View File

@ -2,9 +2,11 @@
import asyncio
import logging
import os
import shutil
import git
from .util import get_hash_from_repository
from ..const import URL_HASSIO_ADDONS
_LOGGER = logging.getLogger(__name__)
@ -13,26 +15,28 @@ _LOGGER = logging.getLogger(__name__)
class AddonsRepo(object):
"""Manage addons git repo."""
def __init__(self, config, loop):
"""Initialize docker base wrapper."""
def __init__(self, config, loop, path, url):
"""Initialize git base wrapper."""
self.config = config
self.loop = loop
self.repo = None
self.path = path
self.url = url
self._lock = asyncio.Lock(loop=loop)
async def load(self):
"""Init git addon repo."""
if not os.path.isdir(self.config.path_addons_repo):
if not os.path.isdir(self.path):
return await self.clone()
async with self._lock:
try:
_LOGGER.info("Load addons repository")
self.repo = await self.loop.run_in_executor(
None, git.Repo, self.config.path_addons_repo)
None, git.Repo, self.path)
except (git.InvalidGitRepositoryError, git.NoSuchPathError) as err:
_LOGGER.error("Can't load addons repo: %s.", err)
_LOGGER.error("Can't load %s repo: %s.", self.path, err)
return False
return True
@ -43,11 +47,10 @@ class AddonsRepo(object):
try:
_LOGGER.info("Clone addons repository")
self.repo = await self.loop.run_in_executor(
None, git.Repo.clone_from, URL_HASSIO_ADDONS,
self.config.path_addons_repo)
None, git.Repo.clone_from, self.url, self.path)
except (git.InvalidGitRepositoryError, git.NoSuchPathError) as err:
_LOGGER.error("Can't clone addons repo: %s.", err)
_LOGGER.error("Can't clone %s repo: %s.", self.url, err)
return False
return True
@ -65,7 +68,38 @@ class AddonsRepo(object):
None, self.repo.remotes.origin.pull)
except (git.InvalidGitRepositoryError, git.NoSuchPathError) as err:
_LOGGER.error("Can't pull addons repo: %s.", err)
_LOGGER.error("Can't pull %s repo: %s.", self.url, err)
return False
return True
class AddonsRepoHassIO(AddonsRepo):
"""HassIO addons repository."""
def __init__(self, config, loop):
"""Initialize git hassio addon repository."""
super().__init__(
config, loop, config.path_addons_repo, URL_HASSIO_ADDONS)
class AddonsRepoCustom(AddonsRepo):
"""Custom addons repository."""
def __init__(self, config, loop, url):
"""Initialize git hassio addon repository."""
path = os.path.join(
config.path_addons_custom, get_hash_from_repository(url))
super().__init__(config, loop, path, url)
def remove(self):
"""Remove a custom addon."""
if os.path.isdir(self.path):
_LOGGER.info("Remove custom addon repository %s", self.url)
def log_err(funct, path, _):
"""Log error."""
_LOGGER.warning("Can't remove %s", path)
shutil.rmtree(self.path, onerror=log_err)

34
hassio/addons/util.py Normal file
View File

@ -0,0 +1,34 @@
"""Util addons functions."""
import hashlib
import pathlib
import re
import unicodedata
RE_SLUGIFY = re.compile(r'[^a-z0-9_]+')
def slugify(text):
"""Slugify a given text."""
text = unicodedata.normalize('NFKD', text)
text = text.lower()
text = text.replace(" ", "_")
text = RE_SLUGIFY.sub("", text)
return text
def get_hash_from_repository(repo):
"""Generate a hash from repository."""
key = repo.lower().encode()
return hashlib.sha1(key).hexdigest()[:8]
def extract_hash_from_path(base_path, options_path):
"""Extract repo id from path."""
base_dir = pathlib.PurePosixPath(base_path).parts[-1]
dirlist = iter(pathlib.PurePosixPath(options_path).parts)
for obj in dirlist:
if obj != base_dir:
continue
return slugify(next(dirlist))

View File

@ -3,9 +3,9 @@ import voluptuous as vol
from ..const import (
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_STARTUP,
ATTR_BOOT, ATTR_MAP_SSL, ATTR_MAP_CONFIG, ATTR_OPTIONS,
ATTR_PORTS, STARTUP_ONCE, STARTUP_AFTER, STARTUP_BEFORE, BOOT_AUTO,
BOOT_MANUAL, ATTR_SCHEMA, ATTR_IMAGE)
ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, STARTUP_ONCE, STARTUP_AFTER,
STARTUP_BEFORE, BOOT_AUTO, BOOT_MANUAL, ATTR_SCHEMA, ATTR_IMAGE, MAP_SSL,
MAP_CONFIG, MAP_ADDONS, MAP_BACKUP)
V_STR = 'str'
V_INT = 'int'
@ -27,8 +27,9 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
vol.Required(ATTR_BOOT):
vol.In([BOOT_AUTO, BOOT_MANUAL]),
vol.Optional(ATTR_PORTS): dict,
vol.Optional(ATTR_MAP_CONFIG, default=False): vol.Boolean(),
vol.Optional(ATTR_MAP_SSL, default=False): vol.Boolean(),
vol.Optional(ATTR_MAP, default=[]): [
vol.In([MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP])
],
vol.Required(ATTR_OPTIONS): dict,
vol.Required(ATTR_SCHEMA): {
vol.Coerce(str): vol.Any(ADDON_ELEMENT, [
@ -36,7 +37,7 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
])
},
vol.Optional(ATTR_IMAGE): vol.Match(r"\w*/\w*"),
})
}, extra=vol.ALLOW_EXTRA)
def validate_options(raw_schema):

View File

@ -7,13 +7,14 @@ import voluptuous as vol
from .util import api_process, api_process_raw, api_validate
from ..const import (
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL,
HASSIO_VERSION)
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES)
_LOGGER = logging.getLogger(__name__)
SCHEMA_OPTIONS = vol.Schema({
# pylint: disable=no-value-for-parameter
vol.Optional(ATTR_BETA_CHANNEL): vol.Boolean(),
vol.Optional(ATTR_ADDONS_REPOSITORIES): [vol.Url()],
})
SCHEMA_VERSION = vol.Schema({
@ -45,6 +46,7 @@ class APISupervisor(object):
ATTR_LAST_VERSION: self.config.last_hassio,
ATTR_BETA_CHANNEL: self.config.upstream_beta,
ATTR_ADDONS: self.addons.list_api,
ATTR_ADDONS_REPOSITORIES: list(self.config.addons_repositories),
}
@api_process
@ -55,7 +57,19 @@ class APISupervisor(object):
if ATTR_BETA_CHANNEL in body:
self.config.upstream_beta = body[ATTR_BETA_CHANNEL]
return self.config.save()
if ATTR_ADDONS_REPOSITORIES in body:
new = set(body[ATTR_ADDONS_REPOSITORIES])
old = set(self.config.addons_repositories)
# add new repositories
for url in set(new - old):
await self.addons.add_custom_repository(url)
# remove old repositories
for url in set(old - new):
self.addons.drop_custom_repository(url)
return True
@api_process
async def update(self, request):

View File

@ -2,6 +2,9 @@
import logging
import os
import voluptuous as vol
from voluptuous.humanize import humanize_error
from .const import FILE_HASSIO_CONFIG, HASSIO_SHARE
from .tools import (
fetch_current_versions, write_json_file, read_json_file)
@ -19,12 +22,32 @@ HASSIO_CLEANUP = 'hassio_cleanup'
ADDONS_REPO = "{}/addons"
ADDONS_DATA = "{}/addons_data"
ADDONS_CUSTOM = "{}/addons_custom"
ADDONS_CUSTOM_LIST = 'addons_custom_list'
BACKUP_DATA = "{}/backup"
UPSTREAM_BETA = 'upstream_beta'
API_ENDPOINT = 'api_endpoint'
def hass_image():
"""Return HomeAssistant docker Image."""
return os.environ.get('HOMEASSISTANT_REPOSITORY')
# pylint: disable=no-value-for-parameter
SCHEMA_CONFIG = vol.Schema({
vol.Optional(HOMEASSISTANT_IMAGE, default=hass_image): vol.Coerce(str),
vol.Optional(UPSTREAM_BETA, default=False): vol.Boolean(),
vol.Optional(API_ENDPOINT): vol.Coerce(str),
vol.Optional(HOMEASSISTANT_LAST): vol.Coerce(str),
vol.Optional(HASSIO_LAST): vol.Coerce(str),
vol.Optional(HASSIO_CLEANUP): vol.Coerce(str),
vol.Optional(ADDONS_CUSTOM_LIST, default=[]): [vol.Url()],
}, extra=vol.REMOVE_EXTRA)
class Config(object):
"""Hold all config data."""
@ -57,13 +80,13 @@ class CoreConfig(Config):
super().__init__(FILE_HASSIO_CONFIG)
# init data
if not self._data:
self._data.update({
HOMEASSISTANT_IMAGE: os.environ['HOMEASSISTANT_REPOSITORY'],
UPSTREAM_BETA: False,
})
# validate data
try:
self._data = SCHEMA_CONFIG(self._data)
self.save()
except vol.Invalid as ex:
_LOGGER.warning(
"Invalid config %s", humanize_error(self._data, ex))
async def fetch_update_infos(self):
"""Read current versions from web."""
@ -93,7 +116,7 @@ class CoreConfig(Config):
@property
def upstream_beta(self):
"""Return True if we run in beta upstream."""
return self._data.get(UPSTREAM_BETA, False)
return self._data[UPSTREAM_BETA]
@upstream_beta.setter
def upstream_beta(self, value):
@ -164,6 +187,11 @@ class CoreConfig(Config):
"""Return path for customs addons."""
return ADDONS_CUSTOM.format(HASSIO_SHARE)
@property
def path_addons_custom_docker(self):
"""Return path for customs addons."""
return ADDONS_CUSTOM.format(self.path_hassio_docker)
@property
def path_addons_data(self):
"""Return root addon data folder."""
@ -173,3 +201,32 @@ class CoreConfig(Config):
def path_addons_data_docker(self):
"""Return root addon data folder extern for docker."""
return ADDONS_DATA.format(self.path_hassio_docker)
@property
def path_backup(self):
"""Return root backup data folder."""
return BACKUP_DATA.format(HASSIO_SHARE)
@property
def path_backup_docker(self):
"""Return root backup data folder extern for docker."""
return BACKUP_DATA.format(self.path_hassio_docker)
@property
def addons_repositories(self):
"""Return list of addons custom repositories."""
return self._data[ADDONS_CUSTOM_LIST]
@addons_repositories.setter
def addons_repositories(self, repo):
"""Add a custom repository to list."""
if repo in self._data[ADDONS_CUSTOM_LIST]:
return
self._data[ADDONS_CUSTOM_LIST].append(repo)
self.save()
def drop_addon_repository(self, repo):
"""Remove a custom repository from list."""
if self._data[ADDONS_CUSTOM_LIST].pop(repo, False):
self.save()

View File

@ -45,14 +45,14 @@ ATTR_DESCRIPTON = 'description'
ATTR_STARTUP = 'startup'
ATTR_BOOT = 'boot'
ATTR_PORTS = 'ports'
ATTR_MAP_CONFIG = 'map_config'
ATTR_MAP_SSL = 'map_ssl'
ATTR_MAP = 'map'
ATTR_OPTIONS = 'options'
ATTR_INSTALLED = 'installed'
ATTR_DEDICATED = 'dedicated'
ATTR_STATE = 'state'
ATTR_SCHEMA = 'schema'
ATTR_IMAGE = 'image'
ATTR_ADDONS_REPOSITORIES = 'addons_repositories'
STARTUP_BEFORE = 'before'
STARTUP_AFTER = 'after'
@ -63,3 +63,8 @@ BOOT_MANUAL = 'manual'
STATE_STARTED = 'started'
STATE_STOPPED = 'stopped'
MAP_CONFIG = 'config'
MAP_SSL = 'ssl'
MAP_ADDONS = 'addons'
MAP_BACKUP = 'backup'

View File

@ -26,6 +26,40 @@ class DockerAddon(DockerBase):
"""Return name of docker container."""
return "addon_{}".format(self.addon)
@property
def volumes(self):
"""Generate volumes for mappings."""
volumes = {
self.addons_data.path_data_docker(self.addon): {
'bind': '/data', 'mode': 'rw'
}}
if self.addons_data.map_config(self.addon):
volumes.update({
self.config.path_config_docker: {
'bind': '/config', 'mode': 'rw'
}})
if self.addons_data.map_ssl(self.addon):
volumes.update({
self.config.path_ssl_docker: {
'bind': '/ssl', 'mode': 'rw'
}})
if self.addons_data.map_addons(self.addon):
volumes.update({
self.config.path_addons_custom_docker: {
'bind': '/addons', 'mode': 'rw'
}})
if self.addons_data.map_backup(self.addon):
volumes.update({
self.config.path_backup_docker: {
'bind': '/backup', 'mode': 'rw'
}})
return volumes
def _run(self):
"""Run docker image.
@ -37,22 +71,6 @@ class DockerAddon(DockerBase):
# cleanup old container
self._stop()
# volumes
volumes = {
self.addons_data.path_data_docker(self.addon): {
'bind': '/data', 'mode': 'rw'
}}
if self.addons_data.need_config(self.addon):
volumes.update({
self.config.path_config_docker: {
'bind': '/config', 'mode': 'rw'
}})
if self.addons_data.need_ssl(self.addon):
volumes.update({
self.config.path_ssl_docker: {
'bind': '/ssl', 'mode': 'rw'
}})
try:
self.container = self.dock.containers.run(
self.image,
@ -60,7 +78,7 @@ class DockerAddon(DockerBase):
detach=True,
network_mode='bridge',
ports=self.addons_data.get_ports(self.addon),
volumes=volumes,
volumes=self.volumes,
)
self.version = get_version_from_env(