mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-07-27 11:06:32 +00:00
Support for repository store. (#26)
* Support for repository store. * Fix api * part 1 of restruct and migrate pathlib * Migrate p2 * fix lint / cleanups * fix lint p2 * fix lint p3
This commit is contained in:
parent
c76408e4e8
commit
ff640c598d
37
API.md
37
API.md
@ -37,9 +37,9 @@ The addons from `addons` are only installed one.
|
|||||||
{
|
{
|
||||||
"name": "xy bla",
|
"name": "xy bla",
|
||||||
"slug": "xy",
|
"slug": "xy",
|
||||||
"version": "LAST_VERSION",
|
"version": "INSTALL_VERSION",
|
||||||
"installed": "INSTALL_VERSION",
|
"last_version": "VERSION_FOR_UPDATE",
|
||||||
"dedicated": "bool",
|
"detached": "bool",
|
||||||
"description": "description"
|
"description": "description"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
@ -54,16 +54,27 @@ The addons from `addons` are only installed one.
|
|||||||
Get all available addons
|
Get all available addons
|
||||||
|
|
||||||
```json
|
```json
|
||||||
[
|
{
|
||||||
{
|
"addons": [
|
||||||
"name": "xy bla",
|
{
|
||||||
"slug": "xy",
|
"name": "xy bla",
|
||||||
"version": "LAST_VERSION",
|
"slug": "xy",
|
||||||
"installed": "none|INSTALL_VERSION",
|
"repository": "12345678|null",
|
||||||
"dedicated": "bool",
|
"version": "LAST_VERSION",
|
||||||
"description": "description"
|
"installed": "none|INSTALL_VERSION",
|
||||||
}
|
"detached": "bool",
|
||||||
]
|
"description": "description"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"repositories": [
|
||||||
|
{
|
||||||
|
"slug": "12345678",
|
||||||
|
"name": "Repitory Name",
|
||||||
|
"url": "WEBSITE",
|
||||||
|
"maintainer": "BLA BLU <fla@dld.ch>"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
- POST `/supervisor/update`
|
- POST `/supervisor/update`
|
||||||
|
@ -51,7 +51,7 @@ class AddonManager(AddonsData):
|
|||||||
self.config, self.loop, self.dock, self, addon)
|
self.config, self.loop, self.dock, self, addon)
|
||||||
await self.dockers[addon].attach()
|
await self.dockers[addon].attach()
|
||||||
|
|
||||||
async def add_custom_repository(self, url):
|
async def add_git_repository(self, url):
|
||||||
"""Add a new custom repository."""
|
"""Add a new custom repository."""
|
||||||
if url in self.config.addons_repositories:
|
if url in self.config.addons_repositories:
|
||||||
_LOGGER.warning("Repository already exists %s", url)
|
_LOGGER.warning("Repository already exists %s", url)
|
||||||
@ -67,7 +67,7 @@ class AddonManager(AddonsData):
|
|||||||
self.repositories.append(repo)
|
self.repositories.append(repo)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def drop_custom_repository(self, url):
|
def drop_git_repository(self, url):
|
||||||
"""Remove a custom repository."""
|
"""Remove a custom repository."""
|
||||||
for repo in self.repositories:
|
for repo in self.repositories:
|
||||||
if repo.url == url:
|
if repo.url == url:
|
||||||
@ -91,7 +91,7 @@ class AddonManager(AddonsData):
|
|||||||
self.merge_update_config()
|
self.merge_update_config()
|
||||||
|
|
||||||
# remove stalled addons
|
# remove stalled addons
|
||||||
for addon in self.list_removed:
|
for addon in self.list_detached:
|
||||||
_LOGGER.warning("Dedicated addon '%s' found!", addon)
|
_LOGGER.warning("Dedicated addon '%s' found!", addon)
|
||||||
|
|
||||||
async def auto_boot(self, start_type):
|
async def auto_boot(self, start_type):
|
||||||
|
@ -1,26 +1,30 @@
|
|||||||
"""Init file for HassIO addons."""
|
"""Init file for HassIO addons."""
|
||||||
import copy
|
import copy
|
||||||
import logging
|
import logging
|
||||||
import glob
|
from pathlib import Path, PurePath
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
from .util import extract_hash_from_path
|
from .util import extract_hash_from_path
|
||||||
from .validate import validate_options, SCHEMA_ADDON_CONFIG
|
from .validate import (
|
||||||
|
validate_options, SCHEMA_ADDON_CONFIG, SCHEMA_REPOSITORY_CONFIG)
|
||||||
from ..const import (
|
from ..const import (
|
||||||
FILE_HASSIO_ADDONS, ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON,
|
FILE_HASSIO_ADDONS, ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON,
|
||||||
ATTR_STARTUP, ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, BOOT_AUTO,
|
ATTR_STARTUP, ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, BOOT_AUTO,
|
||||||
DOCKER_REPO, ATTR_INSTALLED, ATTR_SCHEMA, ATTR_IMAGE, ATTR_DEDICATED,
|
DOCKER_REPO, ATTR_INSTALLED, ATTR_SCHEMA, ATTR_IMAGE, ATTR_DETACHED,
|
||||||
MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP)
|
MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, ATTR_REPOSITORY, ATTR_URL,
|
||||||
|
ATTR_MAINTAINER, ATTR_LAST_VERSION)
|
||||||
from ..config import Config
|
from ..config import Config
|
||||||
from ..tools import read_json_file, write_json_file
|
from ..tools import read_json_file, write_json_file
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
ADDONS_REPO_PATTERN = "{}/**/config.json"
|
SYSTEM = 'system'
|
||||||
SYSTEM = "system"
|
USER = 'user'
|
||||||
USER = "user"
|
|
||||||
|
REPOSITORY_CORE = 'core'
|
||||||
|
REPOSITORY_LOCAL = 'local'
|
||||||
|
|
||||||
|
|
||||||
class AddonsData(Config):
|
class AddonsData(Config):
|
||||||
@ -32,7 +36,8 @@ class AddonsData(Config):
|
|||||||
self.config = config
|
self.config = config
|
||||||
self._system_data = self._data.get(SYSTEM, {})
|
self._system_data = self._data.get(SYSTEM, {})
|
||||||
self._user_data = self._data.get(USER, {})
|
self._user_data = self._data.get(USER, {})
|
||||||
self._current_data = {}
|
self._addons_cache = {}
|
||||||
|
self._repositories_data = {}
|
||||||
self.arch = None
|
self.arch = None
|
||||||
|
|
||||||
def save(self):
|
def save(self):
|
||||||
@ -45,29 +50,62 @@ class AddonsData(Config):
|
|||||||
|
|
||||||
def read_data_from_repositories(self):
|
def read_data_from_repositories(self):
|
||||||
"""Read data from addons repository."""
|
"""Read data from addons repository."""
|
||||||
self._current_data = {}
|
self._addons_cache = {}
|
||||||
|
self._repositories_data = {}
|
||||||
|
|
||||||
self._read_addons_folder(self.config.path_addons_repo)
|
# read core repository
|
||||||
self._read_addons_folder(self.config.path_addons_custom, custom=True)
|
self._read_addons_folder(
|
||||||
|
self.config.path_addons_core, REPOSITORY_CORE)
|
||||||
|
|
||||||
def _read_addons_folder(self, folder, custom=False):
|
# read local repository
|
||||||
|
self._read_addons_folder(
|
||||||
|
self.config.path_addons_local, REPOSITORY_LOCAL)
|
||||||
|
|
||||||
|
# read custom git repositories
|
||||||
|
for repository_dir in self.config.path_addons_git.glob("/*/"):
|
||||||
|
self._read_git_repository(repository_dir)
|
||||||
|
|
||||||
|
def _read_git_repository(self, path):
|
||||||
|
"""Process a custom repository folder."""
|
||||||
|
slug = extract_hash_from_path(path)
|
||||||
|
repository_info = {ATTR_SLUG: slug}
|
||||||
|
|
||||||
|
# exists repository json
|
||||||
|
repository_file = Path(path, "repository.json")
|
||||||
|
try:
|
||||||
|
repository_info.update(SCHEMA_REPOSITORY_CONFIG(
|
||||||
|
read_json_file(repository_file)
|
||||||
|
))
|
||||||
|
|
||||||
|
except OSError:
|
||||||
|
_LOGGER.warning("Can't read repository information from %s",
|
||||||
|
repository_file)
|
||||||
|
return
|
||||||
|
|
||||||
|
except vol.Invalid:
|
||||||
|
_LOGGER.warning("Repository parse error %s", repository_file)
|
||||||
|
return
|
||||||
|
|
||||||
|
# process data
|
||||||
|
self._repositories_data[slug] = repository_info
|
||||||
|
self._read_addons_folder(path, slug)
|
||||||
|
|
||||||
|
def _read_addons_folder(self, path, repository):
|
||||||
"""Read data from addons folder."""
|
"""Read data from addons folder."""
|
||||||
pattern = ADDONS_REPO_PATTERN.format(folder)
|
for addon in path.glob("**/*.config.json"):
|
||||||
|
|
||||||
for addon in glob.iglob(pattern, recursive=True):
|
|
||||||
try:
|
try:
|
||||||
addon_config = read_json_file(addon)
|
addon_config = read_json_file(addon)
|
||||||
|
|
||||||
|
# validate
|
||||||
addon_config = SCHEMA_ADDON_CONFIG(addon_config)
|
addon_config = SCHEMA_ADDON_CONFIG(addon_config)
|
||||||
if custom:
|
|
||||||
addon_slug = "{}_{}".format(
|
|
||||||
extract_hash_from_path(folder, addon),
|
|
||||||
addon_config[ATTR_SLUG],
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
addon_slug = addon_config[ATTR_SLUG]
|
|
||||||
|
|
||||||
self._current_data[addon_slug] = addon_config
|
# Generate slug
|
||||||
|
addon_slug = "{}_{}".format(
|
||||||
|
repository, addon_config[ATTR_SLUG])
|
||||||
|
|
||||||
|
# store
|
||||||
|
addon_config[ATTR_REPOSITORY] = repository
|
||||||
|
self._addons_cache[addon_slug] = addon_config
|
||||||
|
|
||||||
except OSError:
|
except OSError:
|
||||||
_LOGGER.warning("Can't read %s", addon)
|
_LOGGER.warning("Can't read %s", addon)
|
||||||
@ -84,14 +122,14 @@ class AddonsData(Config):
|
|||||||
have_change = False
|
have_change = False
|
||||||
|
|
||||||
for addon, data in self._system_data.items():
|
for addon, data in self._system_data.items():
|
||||||
# dedicated
|
# detached
|
||||||
if addon not in self._current_data:
|
if addon not in self._addons_cache:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
current = self._current_data[addon]
|
cache = self._addons_cache[addon]
|
||||||
if data[ATTR_VERSION] == current[ATTR_VERSION]:
|
if data[ATTR_VERSION] == cache[ATTR_VERSION]:
|
||||||
if data != current:
|
if data != cache:
|
||||||
self._system_data[addon] = copy.deepcopy(current)
|
self._system_data[addon] = copy.deepcopy(cache)
|
||||||
have_change = True
|
have_change = True
|
||||||
|
|
||||||
if have_change:
|
if have_change:
|
||||||
@ -103,11 +141,11 @@ class AddonsData(Config):
|
|||||||
return set(self._system_data.keys())
|
return set(self._system_data.keys())
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def list_api(self):
|
def list_all_api(self):
|
||||||
"""Return a list of available addons for api."""
|
"""Return a list of available addons for api."""
|
||||||
data = []
|
data = []
|
||||||
all_addons = {**self._system_data, **self._current_data}
|
all_addons = {**self._system_data, **self._addons_cache}
|
||||||
dedicated = self.list_removed
|
detached = self.list_detached
|
||||||
|
|
||||||
for addon, values in all_addons.items():
|
for addon, values in all_addons.items():
|
||||||
i_version = self._user_data.get(addon, {}).get(ATTR_VERSION)
|
i_version = self._user_data.get(addon, {}).get(ATTR_VERSION)
|
||||||
@ -118,7 +156,30 @@ class AddonsData(Config):
|
|||||||
ATTR_DESCRIPTON: values[ATTR_DESCRIPTON],
|
ATTR_DESCRIPTON: values[ATTR_DESCRIPTON],
|
||||||
ATTR_VERSION: values[ATTR_VERSION],
|
ATTR_VERSION: values[ATTR_VERSION],
|
||||||
ATTR_INSTALLED: i_version,
|
ATTR_INSTALLED: i_version,
|
||||||
ATTR_DEDICATED: addon in dedicated,
|
ATTR_DETACHED: addon in detached,
|
||||||
|
ATTR_REPOSITORY: values[ATTR_REPOSITORY],
|
||||||
|
})
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
@property
|
||||||
|
def list_installed_api(self):
|
||||||
|
"""Return a list of available addons for api."""
|
||||||
|
data = []
|
||||||
|
all_addons = {**self._system_data, **self._addons_cache}
|
||||||
|
detached = self.list_detached
|
||||||
|
|
||||||
|
for addon, values in all_addons.items():
|
||||||
|
i_version = self._user_data.get(addon, {}).get(ATTR_VERSION)
|
||||||
|
|
||||||
|
data.append({
|
||||||
|
ATTR_NAME: values[ATTR_NAME],
|
||||||
|
ATTR_SLUG: addon,
|
||||||
|
ATTR_DESCRIPTON: values[ATTR_DESCRIPTON],
|
||||||
|
ATTR_VERSION: values[ATTR_VERSION],
|
||||||
|
ATTR_LAST_VERSION: values[ATTR_VERSION],
|
||||||
|
ATTR_INSTALLED: i_version,
|
||||||
|
ATTR_DETACHED: addon in detached
|
||||||
})
|
})
|
||||||
|
|
||||||
return data
|
return data
|
||||||
@ -140,18 +201,33 @@ class AddonsData(Config):
|
|||||||
return addon_list
|
return addon_list
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def list_removed(self):
|
def list_detached(self):
|
||||||
"""Return local addons they not support from repo."""
|
"""Return local addons they not support from repo."""
|
||||||
addon_list = set()
|
addon_list = set()
|
||||||
for addon in self._system_data.keys():
|
for addon in self._system_data.keys():
|
||||||
if addon not in self._current_data:
|
if addon not in self._addons_cache:
|
||||||
addon_list.add(addon)
|
addon_list.add(addon)
|
||||||
|
|
||||||
return addon_list
|
return addon_list
|
||||||
|
|
||||||
|
@property
|
||||||
|
def list_repositories_api(self):
|
||||||
|
"""Return list of addon repositories."""
|
||||||
|
repositories = []
|
||||||
|
|
||||||
|
for slug, data in self._repositories_data.items():
|
||||||
|
repositories.append({
|
||||||
|
ATTR_SLUG: slug,
|
||||||
|
ATTR_NAME: data[ATTR_NAME],
|
||||||
|
ATTR_URL: data.get(ATTR_URL),
|
||||||
|
ATTR_MAINTAINER: data.get(ATTR_MAINTAINER),
|
||||||
|
})
|
||||||
|
|
||||||
|
return repositories
|
||||||
|
|
||||||
def exists_addon(self, addon):
|
def exists_addon(self, addon):
|
||||||
"""Return True if a addon exists."""
|
"""Return True if a addon exists."""
|
||||||
return addon in self._current_data or addon in self._system_data
|
return addon in self._addons_cache or addon in self._system_data
|
||||||
|
|
||||||
def is_installed(self, addon):
|
def is_installed(self, addon):
|
||||||
"""Return True if a addon is installed."""
|
"""Return True if a addon is installed."""
|
||||||
@ -163,7 +239,7 @@ class AddonsData(Config):
|
|||||||
|
|
||||||
def set_addon_install(self, addon, version):
|
def set_addon_install(self, addon, version):
|
||||||
"""Set addon as installed."""
|
"""Set addon as installed."""
|
||||||
self._system_data[addon] = copy.deepcopy(self._current_data[addon])
|
self._system_data[addon] = copy.deepcopy(self._addons_cache[addon])
|
||||||
self._user_data[addon] = {
|
self._user_data[addon] = {
|
||||||
ATTR_OPTIONS: {},
|
ATTR_OPTIONS: {},
|
||||||
ATTR_VERSION: version,
|
ATTR_VERSION: version,
|
||||||
@ -178,7 +254,7 @@ class AddonsData(Config):
|
|||||||
|
|
||||||
def set_addon_update(self, addon, version):
|
def set_addon_update(self, addon, version):
|
||||||
"""Update version of addon."""
|
"""Update version of addon."""
|
||||||
self._system_data[addon] = copy.deepcopy(self._current_data[addon])
|
self._system_data[addon] = copy.deepcopy(self._addons_cache[addon])
|
||||||
self._user_data[addon][ATTR_VERSION] = version
|
self._user_data[addon][ATTR_VERSION] = version
|
||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
@ -216,9 +292,9 @@ class AddonsData(Config):
|
|||||||
|
|
||||||
def get_last_version(self, addon):
|
def get_last_version(self, addon):
|
||||||
"""Return version of addon."""
|
"""Return version of addon."""
|
||||||
if addon not in self._current_data:
|
if addon not in self._addons_cache:
|
||||||
return self.version_installed(addon)
|
return self.version_installed(addon)
|
||||||
return self._current_data[addon][ATTR_VERSION]
|
return self._addons_cache[addon][ATTR_VERSION]
|
||||||
|
|
||||||
def get_ports(self, addon):
|
def get_ports(self, addon):
|
||||||
"""Return ports of addon."""
|
"""Return ports of addon."""
|
||||||
@ -226,10 +302,11 @@ class AddonsData(Config):
|
|||||||
|
|
||||||
def get_image(self, addon):
|
def get_image(self, addon):
|
||||||
"""Return image name of addon."""
|
"""Return image name of addon."""
|
||||||
addon_data = self._system_data.get(addon, self._current_data[addon])
|
addon_data = self._system_data.get(addon, self._addons_cache[addon])
|
||||||
|
|
||||||
if ATTR_IMAGE not in addon_data:
|
if ATTR_IMAGE not in addon_data:
|
||||||
return "{}/{}-addon-{}".format(DOCKER_REPO, self.arch, addon)
|
return "{}/{}-addon-{}".format(
|
||||||
|
DOCKER_REPO, self.arch, addon_data[ATTR_SLUG])
|
||||||
|
|
||||||
return addon_data[ATTR_IMAGE].format(arch=self.arch)
|
return addon_data[ATTR_IMAGE].format(arch=self.arch)
|
||||||
|
|
||||||
@ -251,15 +328,15 @@ class AddonsData(Config):
|
|||||||
|
|
||||||
def path_data(self, addon):
|
def path_data(self, addon):
|
||||||
"""Return addon data path inside supervisor."""
|
"""Return addon data path inside supervisor."""
|
||||||
return "{}/{}".format(self.config.path_addons_data, addon)
|
return Path(self.config.path_addons_data, addon)
|
||||||
|
|
||||||
def path_data_docker(self, addon):
|
def path_extern_data(self, addon):
|
||||||
"""Return addon data path external for docker."""
|
"""Return addon data path external for docker."""
|
||||||
return "{}/{}".format(self.config.path_addons_data_docker, addon)
|
return PurePath(self.config.path_extern_addons_data, addon)
|
||||||
|
|
||||||
def path_addon_options(self, addon):
|
def path_addon_options(self, addon):
|
||||||
"""Return path to addons options."""
|
"""Return path to addons options."""
|
||||||
return "{}/options.json".format(self.path_data(addon))
|
return Path(self.path_data, addon, "options.json")
|
||||||
|
|
||||||
def write_addon_options(self, addon):
|
def write_addon_options(self, addon):
|
||||||
"""Return True if addon options is written to data."""
|
"""Return True if addon options is written to data."""
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
"""Init file for HassIO addons git."""
|
"""Init file for HassIO addons git."""
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
import os
|
from pathlib import Path
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
import git
|
import git
|
||||||
@ -26,14 +26,14 @@ class AddonsRepo(object):
|
|||||||
|
|
||||||
async def load(self):
|
async def load(self):
|
||||||
"""Init git addon repo."""
|
"""Init git addon repo."""
|
||||||
if not os.path.isdir(self.path):
|
if not self.path.is_dir():
|
||||||
return await self.clone()
|
return await self.clone()
|
||||||
|
|
||||||
async with self._lock:
|
async with self._lock:
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Load addon %s repository", self.path)
|
_LOGGER.info("Load addon %s repository", self.path)
|
||||||
self.repo = await self.loop.run_in_executor(
|
self.repo = await self.loop.run_in_executor(
|
||||||
None, git.Repo, self.path)
|
None, git.Repo, str(self.path))
|
||||||
|
|
||||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError) as err:
|
except (git.InvalidGitRepositoryError, git.NoSuchPathError) as err:
|
||||||
_LOGGER.error("Can't load %s repo: %s.", self.path, err)
|
_LOGGER.error("Can't load %s repo: %s.", self.path, err)
|
||||||
@ -47,7 +47,7 @@ class AddonsRepo(object):
|
|||||||
try:
|
try:
|
||||||
_LOGGER.info("Clone addon %s repository", self.url)
|
_LOGGER.info("Clone addon %s repository", self.url)
|
||||||
self.repo = await self.loop.run_in_executor(
|
self.repo = await self.loop.run_in_executor(
|
||||||
None, git.Repo.clone_from, self.url, self.path)
|
None, git.Repo.clone_from, self.url, str(self.path))
|
||||||
|
|
||||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError) as err:
|
except (git.InvalidGitRepositoryError, git.NoSuchPathError) as err:
|
||||||
_LOGGER.error("Can't clone %s repo: %s.", self.url, err)
|
_LOGGER.error("Can't clone %s repo: %s.", self.url, err)
|
||||||
@ -88,18 +88,17 @@ class AddonsRepoCustom(AddonsRepo):
|
|||||||
|
|
||||||
def __init__(self, config, loop, url):
|
def __init__(self, config, loop, url):
|
||||||
"""Initialize git hassio addon repository."""
|
"""Initialize git hassio addon repository."""
|
||||||
path = os.path.join(
|
path = Path(config.path_addons_git, get_hash_from_repository(url))
|
||||||
config.path_addons_custom, get_hash_from_repository(url))
|
|
||||||
|
|
||||||
super().__init__(config, loop, path, url)
|
super().__init__(config, loop, path, url)
|
||||||
|
|
||||||
def remove(self):
|
def remove(self):
|
||||||
"""Remove a custom addon."""
|
"""Remove a custom addon."""
|
||||||
if os.path.isdir(self.path):
|
if self.path.is_dir():
|
||||||
_LOGGER.info("Remove custom addon repository %s", self.url)
|
_LOGGER.info("Remove custom addon repository %s", self.url)
|
||||||
|
|
||||||
def log_err(funct, path, _):
|
def log_err(funct, path, _):
|
||||||
"""Log error."""
|
"""Log error."""
|
||||||
_LOGGER.warning("Can't remove %s", path)
|
_LOGGER.warning("Can't remove %s", path)
|
||||||
|
|
||||||
shutil.rmtree(self.path, onerror=log_err)
|
shutil.rmtree(str(self.path), onerror=log_err)
|
||||||
|
@ -1,28 +1,21 @@
|
|||||||
"""Util addons functions."""
|
"""Util addons functions."""
|
||||||
import hashlib
|
import hashlib
|
||||||
import pathlib
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
RE_SLUGIFY = re.compile(r'[^a-z0-9_]+')
|
RE_SLUGIFY = re.compile(r'[^a-z0-9_]+')
|
||||||
RE_SHA1 = re.compile(r"[a-f0-9]{8}")
|
RE_SHA1 = re.compile(r"[a-f0-9]{8}")
|
||||||
|
|
||||||
|
|
||||||
def get_hash_from_repository(repo):
|
def get_hash_from_repository(name):
|
||||||
"""Generate a hash from repository."""
|
"""Generate a hash from repository."""
|
||||||
key = repo.lower().encode()
|
key = name.lower().encode()
|
||||||
return hashlib.sha1(key).hexdigest()[:8]
|
return hashlib.sha1(key).hexdigest()[:8]
|
||||||
|
|
||||||
|
|
||||||
def extract_hash_from_path(base_path, options_path):
|
def extract_hash_from_path(path):
|
||||||
"""Extract repo id from path."""
|
"""Extract repo id from path."""
|
||||||
base_dir = pathlib.PurePosixPath(base_path).parts[-1]
|
repo_dir = path.parts[-1]
|
||||||
|
|
||||||
dirlist = iter(pathlib.PurePosixPath(options_path).parts)
|
if not RE_SHA1.match(repo_dir):
|
||||||
for obj in dirlist:
|
return get_hash_from_repository(repo_dir)
|
||||||
if obj != base_dir:
|
return repo_dir
|
||||||
continue
|
|
||||||
|
|
||||||
repo_dir = next(dirlist)
|
|
||||||
if not RE_SHA1.match(repo_dir):
|
|
||||||
return get_hash_from_repository(repo_dir)
|
|
||||||
return repo_dir
|
|
||||||
|
@ -5,7 +5,7 @@ from ..const import (
|
|||||||
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_STARTUP,
|
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_STARTUP,
|
||||||
ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, STARTUP_ONCE, STARTUP_AFTER,
|
ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, STARTUP_ONCE, STARTUP_AFTER,
|
||||||
STARTUP_BEFORE, BOOT_AUTO, BOOT_MANUAL, ATTR_SCHEMA, ATTR_IMAGE, MAP_SSL,
|
STARTUP_BEFORE, BOOT_AUTO, BOOT_MANUAL, ATTR_SCHEMA, ATTR_IMAGE, MAP_SSL,
|
||||||
MAP_CONFIG, MAP_ADDONS, MAP_BACKUP)
|
MAP_CONFIG, MAP_ADDONS, MAP_BACKUP, ATTR_URL, ATTR_MAINTAINER)
|
||||||
|
|
||||||
V_STR = 'str'
|
V_STR = 'str'
|
||||||
V_INT = 'int'
|
V_INT = 'int'
|
||||||
@ -40,6 +40,14 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
|||||||
}, extra=vol.ALLOW_EXTRA)
|
}, extra=vol.ALLOW_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_REPOSITORY_CONFIG = vol.Schema({
|
||||||
|
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||||
|
vol.Optional(ATTR_URL): vol.Url(),
|
||||||
|
vol.Optional(ATTR_MAINTAINER): vol.Coerce(str),
|
||||||
|
}, extra=vol.ALLOW_EXTRA)
|
||||||
|
|
||||||
|
|
||||||
def validate_options(raw_schema):
|
def validate_options(raw_schema):
|
||||||
"""Validate schema."""
|
"""Validate schema."""
|
||||||
def validate(struct):
|
def validate(struct):
|
||||||
|
@ -7,7 +7,7 @@ import voluptuous as vol
|
|||||||
from .util import api_process, api_process_raw, api_validate
|
from .util import api_process, api_process_raw, api_validate
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL,
|
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL,
|
||||||
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES)
|
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_REPOSITORIES)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -45,14 +45,17 @@ class APISupervisor(object):
|
|||||||
ATTR_VERSION: HASSIO_VERSION,
|
ATTR_VERSION: HASSIO_VERSION,
|
||||||
ATTR_LAST_VERSION: self.config.last_hassio,
|
ATTR_LAST_VERSION: self.config.last_hassio,
|
||||||
ATTR_BETA_CHANNEL: self.config.upstream_beta,
|
ATTR_BETA_CHANNEL: self.config.upstream_beta,
|
||||||
ATTR_ADDONS: self.addons.list_api,
|
ATTR_ADDONS: self.addons.list_installed_api,
|
||||||
ATTR_ADDONS_REPOSITORIES: self.config.addons_repositories,
|
ATTR_ADDONS_REPOSITORIES: self.config.addons_repositories,
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def available_addons(self, request):
|
async def available_addons(self, request):
|
||||||
"""Return information for all available addons."""
|
"""Return information for all available addons."""
|
||||||
return self.addons.list_api
|
return {
|
||||||
|
ATTR_ADDONS: self.addons.list_all_api,
|
||||||
|
ATTR_REPOSITORIES: self.addons.list_repositories_api,
|
||||||
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def options(self, request):
|
async def options(self, request):
|
||||||
@ -67,12 +70,15 @@ class APISupervisor(object):
|
|||||||
old = set(self.config.addons_repositories)
|
old = set(self.config.addons_repositories)
|
||||||
|
|
||||||
# add new repositories
|
# add new repositories
|
||||||
for url in set(new - old):
|
tasks = [self.addons.add_git_repository(url) for url in
|
||||||
await self.addons.add_custom_repository(url)
|
set(new - old)]
|
||||||
|
if tasks:
|
||||||
|
await asyncio.shield(
|
||||||
|
asyncio.wait(tasks, loop=self.loop), loop=self.loop)
|
||||||
|
|
||||||
# remove old repositories
|
# remove old repositories
|
||||||
for url in set(old - new):
|
for url in set(old - new):
|
||||||
self.addons.drop_custom_repository(url)
|
self.addons.drop_git_repository(url)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -30,7 +30,7 @@ def api_process(method):
|
|||||||
except RuntimeError as err:
|
except RuntimeError as err:
|
||||||
return api_return_error(message=str(err))
|
return api_return_error(message=str(err))
|
||||||
|
|
||||||
if isinstance(answer, (dict, list)):
|
if isinstance(answer, dict):
|
||||||
return api_return_ok(data=answer)
|
return api_return_ok(data=answer)
|
||||||
elif answer:
|
elif answer:
|
||||||
return api_return_ok()
|
return api_return_ok()
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
"""Bootstrap HassIO."""
|
"""Bootstrap HassIO."""
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import stat
|
|
||||||
import signal
|
import signal
|
||||||
|
|
||||||
from colorlog import ColoredFormatter
|
from colorlog import ColoredFormatter
|
||||||
@ -17,26 +16,37 @@ def initialize_system_data(websession):
|
|||||||
config = CoreConfig(websession)
|
config = CoreConfig(websession)
|
||||||
|
|
||||||
# homeassistant config folder
|
# homeassistant config folder
|
||||||
if not os.path.isdir(config.path_config):
|
if not config.path_config.is_dir():
|
||||||
_LOGGER.info(
|
_LOGGER.info(
|
||||||
"Create Home-Assistant config folder %s", config.path_config)
|
"Create Home-Assistant config folder %s", config.path_config)
|
||||||
os.mkdir(config.path_config)
|
config.path_config.mkdir()
|
||||||
|
|
||||||
# homeassistant ssl folder
|
# homeassistant ssl folder
|
||||||
if not os.path.isdir(config.path_ssl):
|
if not config.path_ssl.is_dir():
|
||||||
_LOGGER.info("Create Home-Assistant ssl folder %s", config.path_ssl)
|
_LOGGER.info("Create Home-Assistant ssl folder %s", config.path_ssl)
|
||||||
os.mkdir(config.path_ssl)
|
config.path_ssl.mkdir()
|
||||||
|
|
||||||
# homeassistant addon data folder
|
# homeassistant addon data folder
|
||||||
if not os.path.isdir(config.path_addons_data):
|
if not config.path_addons_data.is_dir():
|
||||||
_LOGGER.info("Create Home-Assistant addon data folder %s",
|
_LOGGER.info("Create Home-Assistant addon data folder %s",
|
||||||
config.path_addons_data)
|
config.path_addons_data)
|
||||||
os.mkdir(config.path_addons_data)
|
config.path_addons_data.mkdir(parents=True)
|
||||||
|
|
||||||
if not os.path.isdir(config.path_addons_custom):
|
if not config.path_addons_local.is_dir():
|
||||||
_LOGGER.info("Create Home-Assistant addon custom folder %s",
|
_LOGGER.info("Create Home-Assistant addon local repository folder %s",
|
||||||
config.path_addons_custom)
|
config.path_addons_local)
|
||||||
os.mkdir(config.path_addons_custom)
|
config.path_addons_local.mkdir(parents=True)
|
||||||
|
|
||||||
|
if not config.path_addons_git.is_dir():
|
||||||
|
_LOGGER.info("Create Home-Assistant addon git repositories folder %s",
|
||||||
|
config.path_addons_git)
|
||||||
|
config.path_addons_git.mkdir(parents=True)
|
||||||
|
|
||||||
|
# homeassistant backup folder
|
||||||
|
if not config.path_backup.is_dir():
|
||||||
|
_LOGGER.info("Create Home-Assistant backup folder %s",
|
||||||
|
config.path_backup)
|
||||||
|
config.path_backup.mkdir()
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
@ -76,8 +86,7 @@ def check_environment():
|
|||||||
_LOGGER.fatal("Can't find %s in env!", key)
|
_LOGGER.fatal("Can't find %s in env!", key)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
mode = os.stat(SOCKET_DOCKER)[stat.ST_MODE]
|
if not SOCKET_DOCKER.is_socket():
|
||||||
if not stat.S_ISSOCK(mode):
|
|
||||||
_LOGGER.fatal("Can't find docker socket!")
|
_LOGGER.fatal("Can't find docker socket!")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -2,6 +2,7 @@
|
|||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
from pathlib import Path, PurePath
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
@ -12,19 +13,20 @@ from .tools import (
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
HOMEASSISTANT_CONFIG = "{}/homeassistant"
|
HOMEASSISTANT_CONFIG = PurePath("homeassistant")
|
||||||
HOMEASSISTANT_LAST = 'homeassistant_last'
|
HOMEASSISTANT_LAST = 'homeassistant_last'
|
||||||
|
|
||||||
HASSIO_SSL = "{}/ssl"
|
HASSIO_SSL = PurePath("ssl")
|
||||||
HASSIO_LAST = 'hassio_last'
|
HASSIO_LAST = 'hassio_last'
|
||||||
HASSIO_CLEANUP = 'hassio_cleanup'
|
HASSIO_CLEANUP = 'hassio_cleanup'
|
||||||
|
|
||||||
ADDONS_REPO = "{}/addons"
|
ADDONS_CORE = PurePath("addons/core")
|
||||||
ADDONS_DATA = "{}/addons_data"
|
ADDONS_LOCAL = PurePath("addons/local")
|
||||||
ADDONS_CUSTOM = "{}/addons_custom"
|
ADDONS_GIT = PurePath("addons/git")
|
||||||
|
ADDONS_DATA = PurePath("addons/data")
|
||||||
ADDONS_CUSTOM_LIST = 'addons_custom_list'
|
ADDONS_CUSTOM_LIST = 'addons_custom_list'
|
||||||
|
|
||||||
BACKUP_DATA = "{}/backup"
|
BACKUP_DATA = PurePath("backup")
|
||||||
|
|
||||||
UPSTREAM_BETA = 'upstream_beta'
|
UPSTREAM_BETA = 'upstream_beta'
|
||||||
|
|
||||||
@ -47,21 +49,21 @@ class Config(object):
|
|||||||
|
|
||||||
def __init__(self, config_file):
|
def __init__(self, config_file):
|
||||||
"""Initialize config object."""
|
"""Initialize config object."""
|
||||||
self._filename = config_file
|
self._file = config_file
|
||||||
self._data = {}
|
self._data = {}
|
||||||
|
|
||||||
# init or load data
|
# init or load data
|
||||||
if os.path.isfile(self._filename):
|
if self._file.is_file():
|
||||||
try:
|
try:
|
||||||
self._data = read_json_file(self._filename)
|
self._data = read_json_file(self._file)
|
||||||
except (OSError, json.JSONDecodeError):
|
except (OSError, json.JSONDecodeError):
|
||||||
_LOGGER.warning("Can't read %s", self._filename)
|
_LOGGER.warning("Can't read %s", self._file)
|
||||||
self._data = {}
|
self._data = {}
|
||||||
|
|
||||||
def save(self):
|
def save(self):
|
||||||
"""Store data to config file."""
|
"""Store data to config file."""
|
||||||
if not write_json_file(self._filename, self._data):
|
if not write_json_file(self._file, self._data):
|
||||||
_LOGGER.error("Can't store config in %s", self._filename)
|
_LOGGER.error("Can't store config in %s", self._file)
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -148,64 +150,69 @@ class CoreConfig(Config):
|
|||||||
return self._data.get(HASSIO_LAST)
|
return self._data.get(HASSIO_LAST)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_hassio_docker(self):
|
def path_extern_hassio(self):
|
||||||
"""Return hassio data path extern for docker."""
|
"""Return hassio data path extern for docker."""
|
||||||
return os.environ['SUPERVISOR_SHARE']
|
return PurePath(os.environ['SUPERVISOR_SHARE'])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_config_docker(self):
|
def path_extern_config(self):
|
||||||
"""Return config path extern for docker."""
|
"""Return config path extern for docker."""
|
||||||
return HOMEASSISTANT_CONFIG.format(self.path_hassio_docker)
|
return str(PurePath(self.path_extern_hassio, HOMEASSISTANT_CONFIG))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_config(self):
|
def path_config(self):
|
||||||
"""Return config path inside supervisor."""
|
"""Return config path inside supervisor."""
|
||||||
return HOMEASSISTANT_CONFIG.format(HASSIO_SHARE)
|
return Path(HASSIO_SHARE, HOMEASSISTANT_CONFIG)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_ssl_docker(self):
|
def path_extern_ssl(self):
|
||||||
"""Return SSL path extern for docker."""
|
"""Return SSL path extern for docker."""
|
||||||
return HASSIO_SSL.format(self.path_hassio_docker)
|
return str(PurePath(self.path_extern_hassio, HASSIO_SSL))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_ssl(self):
|
def path_ssl(self):
|
||||||
"""Return SSL path inside supervisor."""
|
"""Return SSL path inside supervisor."""
|
||||||
return HASSIO_SSL.format(HASSIO_SHARE)
|
return Path(HASSIO_SHARE, HASSIO_SSL)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_addons_repo(self):
|
def path_addons_core(self):
|
||||||
"""Return git repo path for addons."""
|
"""Return git path for core addons."""
|
||||||
return ADDONS_REPO.format(HASSIO_SHARE)
|
return Path(HASSIO_SHARE, ADDONS_CORE)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_addons_custom(self):
|
def path_addons_git(self):
|
||||||
|
"""Return path for git addons."""
|
||||||
|
return Path(HASSIO_SHARE, ADDONS_GIT)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path_addons_local(self):
|
||||||
"""Return path for customs addons."""
|
"""Return path for customs addons."""
|
||||||
return ADDONS_CUSTOM.format(HASSIO_SHARE)
|
return Path(HASSIO_SHARE, ADDONS_LOCAL)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_addons_custom_docker(self):
|
def path_extern_addons_local(self):
|
||||||
"""Return path for customs addons."""
|
"""Return path for customs addons."""
|
||||||
return ADDONS_CUSTOM.format(self.path_hassio_docker)
|
return str(PurePath(self.path_extern_hassio, ADDONS_LOCAL))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_addons_data(self):
|
def path_addons_data(self):
|
||||||
"""Return root addon data folder."""
|
"""Return root addon data folder."""
|
||||||
return ADDONS_DATA.format(HASSIO_SHARE)
|
return Path(HASSIO_SHARE, ADDONS_DATA)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_addons_data_docker(self):
|
def path_extern_addons_data(self):
|
||||||
"""Return root addon data folder extern for docker."""
|
"""Return root addon data folder extern for docker."""
|
||||||
return ADDONS_DATA.format(self.path_hassio_docker)
|
return str(PurePath(self.path_extern_hassio, ADDONS_DATA))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_backup(self):
|
def path_backup(self):
|
||||||
"""Return root backup data folder."""
|
"""Return root backup data folder."""
|
||||||
return BACKUP_DATA.format(HASSIO_SHARE)
|
return Path(HASSIO_SHARE, BACKUP_DATA)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_backup_docker(self):
|
def path_extern_backup(self):
|
||||||
"""Return root backup data folder extern for docker."""
|
"""Return root backup data folder extern for docker."""
|
||||||
return BACKUP_DATA.format(self.path_hassio_docker)
|
return str(PurePath(self.path_extern_hassio, BACKUP_DATA))
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def addons_repositories(self):
|
def addons_repositories(self):
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
"""Const file for HassIO."""
|
"""Const file for HassIO."""
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
HASSIO_VERSION = '0.19'
|
HASSIO_VERSION = '0.19'
|
||||||
|
|
||||||
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
||||||
@ -10,7 +12,7 @@ URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons'
|
|||||||
|
|
||||||
DOCKER_REPO = "homeassistant"
|
DOCKER_REPO = "homeassistant"
|
||||||
|
|
||||||
HASSIO_SHARE = "/data"
|
HASSIO_SHARE = Path("/data")
|
||||||
|
|
||||||
RUN_UPDATE_INFO_TASKS = 28800
|
RUN_UPDATE_INFO_TASKS = 28800
|
||||||
RUN_UPDATE_SUPERVISOR_TASKS = 29100
|
RUN_UPDATE_SUPERVISOR_TASKS = 29100
|
||||||
@ -18,11 +20,11 @@ RUN_RELOAD_ADDONS_TASKS = 28800
|
|||||||
|
|
||||||
RESTART_EXIT_CODE = 100
|
RESTART_EXIT_CODE = 100
|
||||||
|
|
||||||
FILE_HASSIO_ADDONS = "{}/addons.json".format(HASSIO_SHARE)
|
FILE_HASSIO_ADDONS = Path(HASSIO_SHARE, "addons.json")
|
||||||
FILE_HASSIO_CONFIG = "{}/config.json".format(HASSIO_SHARE)
|
FILE_HASSIO_CONFIG = Path(HASSIO_SHARE, "config.json")
|
||||||
|
|
||||||
SOCKET_DOCKER = "/var/run/docker.sock"
|
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
||||||
SOCKET_HC = "/var/run/hassio-hc.sock"
|
SOCKET_HC = Path("/var/run/hassio-hc.sock")
|
||||||
|
|
||||||
JSON_RESULT = 'result'
|
JSON_RESULT = 'result'
|
||||||
JSON_DATA = 'data'
|
JSON_DATA = 'data'
|
||||||
@ -48,11 +50,15 @@ ATTR_PORTS = 'ports'
|
|||||||
ATTR_MAP = 'map'
|
ATTR_MAP = 'map'
|
||||||
ATTR_OPTIONS = 'options'
|
ATTR_OPTIONS = 'options'
|
||||||
ATTR_INSTALLED = 'installed'
|
ATTR_INSTALLED = 'installed'
|
||||||
ATTR_DEDICATED = 'dedicated'
|
ATTR_DETACHED = 'detached'
|
||||||
ATTR_STATE = 'state'
|
ATTR_STATE = 'state'
|
||||||
ATTR_SCHEMA = 'schema'
|
ATTR_SCHEMA = 'schema'
|
||||||
ATTR_IMAGE = 'image'
|
ATTR_IMAGE = 'image'
|
||||||
ATTR_ADDONS_REPOSITORIES = 'addons_repositories'
|
ATTR_ADDONS_REPOSITORIES = 'addons_repositories'
|
||||||
|
ATTR_REPOSITORY = 'repository'
|
||||||
|
ATTR_REPOSITORIES = 'repositories'
|
||||||
|
ATTR_URL = 'url'
|
||||||
|
ATTR_MAINTAINER = 'maintainer'
|
||||||
|
|
||||||
STARTUP_BEFORE = 'before'
|
STARTUP_BEFORE = 'before'
|
||||||
STARTUP_AFTER = 'after'
|
STARTUP_AFTER = 'after'
|
||||||
|
@ -30,31 +30,31 @@ class DockerAddon(DockerBase):
|
|||||||
def volumes(self):
|
def volumes(self):
|
||||||
"""Generate volumes for mappings."""
|
"""Generate volumes for mappings."""
|
||||||
volumes = {
|
volumes = {
|
||||||
self.addons_data.path_data_docker(self.addon): {
|
self.addons_data.path_extern_data(self.addon): {
|
||||||
'bind': '/data', 'mode': 'rw'
|
'bind': '/data', 'mode': 'rw'
|
||||||
}}
|
}}
|
||||||
|
|
||||||
if self.addons_data.map_config(self.addon):
|
if self.addons_data.map_config(self.addon):
|
||||||
volumes.update({
|
volumes.update({
|
||||||
self.config.path_config_docker: {
|
self.config.path_extern_config: {
|
||||||
'bind': '/config', 'mode': 'rw'
|
'bind': '/config', 'mode': 'rw'
|
||||||
}})
|
}})
|
||||||
|
|
||||||
if self.addons_data.map_ssl(self.addon):
|
if self.addons_data.map_ssl(self.addon):
|
||||||
volumes.update({
|
volumes.update({
|
||||||
self.config.path_ssl_docker: {
|
self.config.path_extern_ssl: {
|
||||||
'bind': '/ssl', 'mode': 'rw'
|
'bind': '/ssl', 'mode': 'rw'
|
||||||
}})
|
}})
|
||||||
|
|
||||||
if self.addons_data.map_addons(self.addon):
|
if self.addons_data.map_addons(self.addon):
|
||||||
volumes.update({
|
volumes.update({
|
||||||
self.config.path_addons_custom_docker: {
|
self.config.path_extern_addons_local: {
|
||||||
'bind': '/addons', 'mode': 'rw'
|
'bind': '/addons', 'mode': 'rw'
|
||||||
}})
|
}})
|
||||||
|
|
||||||
if self.addons_data.map_backup(self.addon):
|
if self.addons_data.map_backup(self.addon):
|
||||||
volumes.update({
|
volumes.update({
|
||||||
self.config.path_backup_docker: {
|
self.config.path_extern_backup: {
|
||||||
'bind': '/backup', 'mode': 'rw'
|
'bind': '/backup', 'mode': 'rw'
|
||||||
}})
|
}})
|
||||||
|
|
||||||
|
@ -45,9 +45,9 @@ class DockerHomeAssistant(DockerBase):
|
|||||||
'HASSIO': self.config.api_endpoint,
|
'HASSIO': self.config.api_endpoint,
|
||||||
},
|
},
|
||||||
volumes={
|
volumes={
|
||||||
self.config.path_config_docker:
|
self.config.path_extern_config:
|
||||||
{'bind': '/config', 'mode': 'rw'},
|
{'bind': '/config', 'mode': 'rw'},
|
||||||
self.config.path_ssl_docker:
|
self.config.path_extern_ssl:
|
||||||
{'bind': '/ssl', 'mode': 'rw'},
|
{'bind': '/ssl', 'mode': 'rw'},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -78,7 +78,7 @@ def write_json_file(jsonfile, data):
|
|||||||
"""Write a json file."""
|
"""Write a json file."""
|
||||||
try:
|
try:
|
||||||
json_str = json.dumps(data, indent=2)
|
json_str = json.dumps(data, indent=2)
|
||||||
with open(jsonfile, 'w') as conf_file:
|
with jsonfile.open('w') as conf_file:
|
||||||
conf_file.write(json_str)
|
conf_file.write(json_str)
|
||||||
except (OSError, json.JSONDecodeError):
|
except (OSError, json.JSONDecodeError):
|
||||||
return False
|
return False
|
||||||
@ -88,5 +88,5 @@ def write_json_file(jsonfile, data):
|
|||||||
|
|
||||||
def read_json_file(jsonfile):
|
def read_json_file(jsonfile):
|
||||||
"""Read a json file and return a dict."""
|
"""Read a json file and return a dict."""
|
||||||
with open(jsonfile, 'r') as cfile:
|
with jsonfile.open('r') as cfile:
|
||||||
return json.loads(cfile.read())
|
return json.loads(cfile.read())
|
||||||
|
Loading…
x
Reference in New Issue
Block a user