Compare commits

...

31 Commits
0.24 ... 0.29

Author SHA1 Message Date
Pascal Vizeli
81e1227a7b Merge pull request #59 from home-assistant/dev
Release 0.29
2017-05-17 23:43:09 +02:00
Pascal Vizeli
75be8666a6 Update version.json 2017-05-17 23:41:50 +02:00
Pascal Vizeli
6031a60084 Add addon share and allow to mount host mnt (#58)
* Add addon share and allow to mount host mnt

* fix comments logs
2017-05-17 17:21:54 +02:00
Pascal Vizeli
39d5785118 Allow config.json to manipulate docker env. (#57)
Allow config.json to manipulate docker env.
2017-05-17 14:45:02 +02:00
Pascal Vizeli
bddcdcadb2 Pump version 2017-05-17 14:25:57 +02:00
Pascal Vizeli
3eac6a3366 Merge pull request #56 from home-assistant/dev
Release 0.28
2017-05-16 22:31:47 +02:00
Pascal Vizeli
3c7b962cf9 update hass.io 2017-05-16 22:17:19 +02:00
Pascal Vizeli
bd756e2a9c fix dev regex 2017-05-16 22:11:42 +02:00
Pascal Vizeli
e7920bee2a Fix group regex 2017-05-16 22:03:51 +02:00
Pascal Vizeli
ebcc21370e Add policy for mappings (#55)
* Add policy for mappings

* fix travis
2017-05-16 17:15:35 +02:00
Pascal Vizeli
34c4acf199 Add device support (#54)
Add device support
2017-05-16 14:50:47 +02:00
Pascal Vizeli
47e45dfc9f Pump version 2017-05-16 11:45:20 +02:00
Pascal Vizeli
2ecea7c1b4 Merge pull request #53 from home-assistant/dev
Release 0.27
2017-05-16 00:20:29 +02:00
Pascal Vizeli
5c0eccd12f Bugfix attach container/image (#52) 2017-05-16 00:07:43 +02:00
Pascal Vizeli
f34ab9402b Fix remove (#51) 2017-05-15 23:39:34 +02:00
Pascal Vizeli
2569a82caf Update Hass.IO version 2017-05-15 23:27:18 +02:00
Pascal Vizeli
4bdd256000 Use label instead env, cleanup build (#50)
* Use label instead env, cleanup build

* Update const.py

* fix lint

* add space

* fix lint

* use dynamic type

* fix lint

* fix path

* fix label read

* fix bug
2017-05-15 23:19:35 +02:00
Pascal Vizeli
6f4f6338c5 Pump version 2017-05-15 16:35:30 +02:00
Pascal Vizeli
7cb72b55a8 Merge pull request #49 from home-assistant/dev
Release 0.26
2017-05-15 00:26:30 +02:00
Pascal Vizeli
1a9a08cbfb Update version.json 2017-05-15 00:17:59 +02:00
Pascal Vizeli
237ee0363d Update error message (#48) 2017-05-15 00:08:15 +02:00
Pascal Vizeli
86180ddc34 Allow every repository to make a local build (#47)
* Allow every repository to make a local build

* store version of build

* cleanup code

* fix lint
2017-05-14 23:32:54 +02:00
Pascal Vizeli
eed41d30ec Update const.py 2017-05-13 23:17:23 +02:00
Pascal Vizeli
0b0fd6b910 Add files via upload 2017-05-13 19:14:54 +02:00
Pascal Vizeli
1f887b47ab register panel on core 2017-05-13 17:44:16 +02:00
Pascal Vizeli
affd8057ca WIP Add panel to hass.io (#46)
* Add poliymare

* Add commit

* add static route

* fix name

* add panel
2017-05-13 17:41:46 +02:00
Pascal Vizeli
7a8ee2c46a Merge pull request #45 from home-assistant/dev
Release 0.25
2017-05-12 16:20:12 +02:00
Pascal Vizeli
35fe1f464c Update Hass.IO 0.25 2017-05-12 16:15:18 +02:00
Pascal Vizeli
0955bafebd Update data handling of addons (#44)
* Update data handling of addons

* Update addons api

* Update data.py

* Update data.py

* Add url fix bug
2017-05-12 16:14:49 +02:00
Pascal Vizeli
2e0c540c63 Pump version 2017-05-12 08:53:20 +02:00
Pascal Vizeli
6e9ef17a28 Merge pull request #43 from home-assistant/dev
Release 0.24
2017-05-12 01:47:48 +02:00
20 changed files with 251 additions and 131 deletions

3
.gitmodules vendored Normal file
View File

@@ -0,0 +1,3 @@
[submodule "home-assistant-polymer"]
path = home-assistant-polymer
url = https://github.com/home-assistant/home-assistant-polymer

9
API.md
View File

@@ -43,7 +43,9 @@ The addons from `addons` are only installed one.
"repository": "12345678|null", "repository": "12345678|null",
"version": "LAST_VERSION", "version": "LAST_VERSION",
"installed": "INSTALL_VERSION", "installed": "INSTALL_VERSION",
"detached": "bool" "detached": "bool",
"build": "bool",
"url": "null|url"
} }
], ],
"addons_repositories": [ "addons_repositories": [
@@ -67,7 +69,9 @@ Get all available addons
"repository": "core|local|REP_ID", "repository": "core|local|REP_ID",
"version": "LAST_VERSION", "version": "LAST_VERSION",
"installed": "none|INSTALL_VERSION", "installed": "none|INSTALL_VERSION",
"detached": "bool" "detached": "bool",
"build": "bool",
"url": "null|url"
} }
], ],
"repositories": [ "repositories": [
@@ -224,6 +228,7 @@ Output the raw docker log
"last_version": "LAST_VERSION", "last_version": "LAST_VERSION",
"state": "started|stopped", "state": "started|stopped",
"boot": "auto|manual", "boot": "auto|manual",
"build": "bool",
"options": {}, "options": {},
} }
``` ```

View File

@@ -191,7 +191,7 @@ class AddonManager(AddonsData):
return False return False
version = version or self.get_last_version(addon) version = version or self.get_last_version(addon)
is_running = self.dockers[addon].is_running() is_running = await self.dockers[addon].is_running()
# update # update
if await self.dockers[addon].update(version): if await self.dockers[addon].update(version):

View File

@@ -3,18 +3,20 @@ import copy
import logging import logging
import json import json
from pathlib import Path, PurePath from pathlib import Path, PurePath
import re
import voluptuous as vol import voluptuous as vol
from voluptuous.humanize import humanize_error from voluptuous.humanize import humanize_error
from .util import extract_hash_from_path from .util import extract_hash_from_path
from .validate import ( from .validate import (
validate_options, SCHEMA_ADDON_CONFIG, SCHEMA_REPOSITORY_CONFIG) validate_options, SCHEMA_ADDON_CONFIG, SCHEMA_REPOSITORY_CONFIG,
MAP_VOLUME)
from ..const import ( from ..const import (
FILE_HASSIO_ADDONS, ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, FILE_HASSIO_ADDONS, ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON,
ATTR_STARTUP, ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, BOOT_AUTO, ATTR_STARTUP, ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, BOOT_AUTO,
DOCKER_REPO, ATTR_SCHEMA, ATTR_IMAGE, MAP_CONFIG, MAP_SSL, MAP_ADDONS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY, ATTR_URL, ATTR_ARCH,
MAP_BACKUP, ATTR_REPOSITORY, ATTR_URL, ATTR_ARCH, ATTR_LOCATON) ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT)
from ..config import Config from ..config import Config
from ..tools import read_json_file, write_json_file from ..tools import read_json_file, write_json_file
@@ -26,6 +28,8 @@ USER = 'user'
REPOSITORY_CORE = 'core' REPOSITORY_CORE = 'core'
REPOSITORY_LOCAL = 'local' REPOSITORY_LOCAL = 'local'
RE_VOLUME = re.compile(MAP_VOLUME)
class AddonsData(Config): class AddonsData(Config):
"""Hold data for addons inside HassIO.""" """Hold data for addons inside HassIO."""
@@ -149,12 +153,13 @@ class AddonsData(Config):
""" """
have_change = False have_change = False
for addon, data in self._system_data.items(): for addon in self.list_installed:
# detached # detached
if addon not in self._addons_cache: if addon not in self._addons_cache:
continue continue
cache = self._addons_cache[addon] cache = self._addons_cache[addon]
data = self._system_data[addon]
if data[ATTR_VERSION] == cache[ATTR_VERSION]: if data[ATTR_VERSION] == cache[ATTR_VERSION]:
if data != cache: if data != cache:
self._system_data[addon] = copy.deepcopy(cache) self._system_data[addon] = copy.deepcopy(cache)
@@ -166,20 +171,12 @@ class AddonsData(Config):
@property @property
def list_installed(self): def list_installed(self):
"""Return a list of installed addons.""" """Return a list of installed addons."""
return set(self._system_data.keys()) return set(self._system_data)
@property @property
def data_all(self): def list_all(self):
"""Return a dict of all addons.""" """Return a dict of all addons."""
return { return set(self._system_data) | set(self._addons_cache)
**self._system_data,
**self._addons_cache
}
@property
def data_installed(self):
"""Return a dict of installed addons."""
return self._system_data.copy()
def list_startup(self, start_type): def list_startup(self, start_type):
"""Get list of installed addon with need start by type.""" """Get list of installed addon with need start by type."""
@@ -271,35 +268,51 @@ class AddonsData(Config):
def get_name(self, addon): def get_name(self, addon):
"""Return name of addon.""" """Return name of addon."""
if addon in self._addons_cache:
return self._addons_cache[addon][ATTR_NAME]
return self._system_data[addon][ATTR_NAME] return self._system_data[addon][ATTR_NAME]
def get_description(self, addon): def get_description(self, addon):
"""Return description of addon.""" """Return description of addon."""
if addon in self._addons_cache:
return self._addons_cache[addon][ATTR_DESCRIPTON]
return self._system_data[addon][ATTR_DESCRIPTON] return self._system_data[addon][ATTR_DESCRIPTON]
def get_repository(self, addon): def get_repository(self, addon):
"""Return repository of addon.""" """Return repository of addon."""
if addon in self._addons_cache:
return self._addons_cache[addon][ATTR_REPOSITORY]
return self._system_data[addon][ATTR_REPOSITORY] return self._system_data[addon][ATTR_REPOSITORY]
def get_last_version(self, addon): def get_last_version(self, addon):
"""Return version of addon.""" """Return version of addon."""
if addon not in self._addons_cache: if addon in self._addons_cache:
return self.version_installed(addon) return self._addons_cache[addon][ATTR_VERSION]
return self._addons_cache[addon][ATTR_VERSION] return self.version_installed(addon)
def get_ports(self, addon): def get_ports(self, addon):
"""Return ports of addon.""" """Return ports of addon."""
return self._system_data[addon].get(ATTR_PORTS) return self._system_data[addon].get(ATTR_PORTS)
def get_devices(self, addon):
"""Return devices of addon."""
return self._system_data[addon].get(ATTR_DEVICES)
def get_environment(self, addon):
"""Return environment of addon."""
return self._system_data[addon].get(ATTR_ENVIRONMENT)
def get_url(self, addon): def get_url(self, addon):
"""Return url of addon.""" """Return url of addon."""
if addon in self._addons_cache:
return self._addons_cache[addon].get(ATTR_URL)
return self._system_data[addon].get(ATTR_URL) return self._system_data[addon].get(ATTR_URL)
def get_arch(self, addon): def get_arch(self, addon):
"""Return list of supported arch.""" """Return list of supported arch."""
if addon not in self._addons_cache: if addon in self._addons_cache:
return self._system_data[addon][ATTR_ARCH] return self._addons_cache[addon][ATTR_ARCH]
return self._addons_cache[addon][ATTR_ARCH] return self._system_data[addon][ATTR_ARCH]
def get_image(self, addon): def get_image(self, addon):
"""Return image name of addon.""" """Return image name of addon."""
@@ -307,44 +320,29 @@ class AddonsData(Config):
addon, self._addons_cache.get(addon) addon, self._addons_cache.get(addon)
) )
# core repository
if addon_data[ATTR_REPOSITORY] == REPOSITORY_CORE:
return "{}/{}-addon-{}".format(
DOCKER_REPO, self.arch, addon_data[ATTR_SLUG])
# Repository with dockerhub images # Repository with dockerhub images
if ATTR_IMAGE in addon_data: if ATTR_IMAGE in addon_data:
return addon_data[ATTR_IMAGE].format(arch=self.arch) return addon_data[ATTR_IMAGE].format(arch=self.arch)
# Local build addon # local build
if addon_data[ATTR_REPOSITORY] == REPOSITORY_LOCAL: return "{}/{}-addon-{}".format(
return "local/{}-addon-{}".format(self.arch, addon_data[ATTR_SLUG]) addon_data[ATTR_REPOSITORY], self.arch, addon_data[ATTR_SLUG])
_LOGGER.error("No image for %s", addon)
def need_build(self, addon): def need_build(self, addon):
"""Return True if this addon need a local build.""" """Return True if this addon need a local build."""
addon_data = self._system_data.get( addon_data = self._system_data.get(
addon, self._addons_cache.get(addon) addon, self._addons_cache.get(addon)
) )
return addon_data[ATTR_REPOSITORY] == REPOSITORY_LOCAL \ return ATTR_IMAGE not in addon_data
and not addon_data.get(ATTR_IMAGE)
def map_config(self, addon): def map_volumes(self, addon):
"""Return True if config map is needed.""" """Return a dict of {volume: policy} from addon."""
return MAP_CONFIG in self._system_data[addon][ATTR_MAP] volumes = {}
for volume in self._system_data[addon][ATTR_MAP]:
result = RE_VOLUME.match(volume)
volumes[result.group(1)] = result.group(2) or 'ro'
def map_ssl(self, addon): return volumes
"""Return True if ssl map is needed."""
return MAP_SSL in self._system_data[addon][ATTR_MAP]
def map_addons(self, addon):
"""Return True if addons map is needed."""
return MAP_ADDONS in self._system_data[addon][ATTR_MAP]
def map_backup(self, addon):
"""Return True if backup map is needed."""
return MAP_BACKUP in self._system_data[addon][ATTR_MAP]
def path_data(self, addon): def path_data(self, addon):
"""Return addon data path inside supervisor.""" """Return addon data path inside supervisor."""

View File

@@ -4,10 +4,13 @@ import voluptuous as vol
from ..const import ( from ..const import (
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_STARTUP, ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_STARTUP,
ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, STARTUP_ONCE, STARTUP_AFTER, ATTR_BOOT, ATTR_MAP, ATTR_OPTIONS, ATTR_PORTS, STARTUP_ONCE, STARTUP_AFTER,
STARTUP_BEFORE, BOOT_AUTO, BOOT_MANUAL, ATTR_SCHEMA, ATTR_IMAGE, MAP_SSL, STARTUP_BEFORE, BOOT_AUTO, BOOT_MANUAL, ATTR_SCHEMA, ATTR_IMAGE,
MAP_CONFIG, MAP_ADDONS, MAP_BACKUP, ATTR_URL, ATTR_MAINTAINER, ATTR_ARCH, ATTR_URL, ATTR_MAINTAINER, ATTR_ARCH, ATTR_DEVICES, ATTR_ENVIRONMENT,
ARCH_ARMHF, ARCH_AARCH64, ARCH_AMD64, ARCH_I386) ARCH_ARMHF, ARCH_AARCH64, ARCH_AMD64, ARCH_I386)
MAP_VOLUME = r"^(config|ssl|addons|backup|share|mnt)(?::(rw|:ro))?$"
V_STR = 'str' V_STR = 'str'
V_INT = 'int' V_INT = 'int'
V_FLOAT = 'float' V_FLOAT = 'float'
@@ -34,9 +37,9 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
vol.Required(ATTR_BOOT): vol.Required(ATTR_BOOT):
vol.In([BOOT_AUTO, BOOT_MANUAL]), vol.In([BOOT_AUTO, BOOT_MANUAL]),
vol.Optional(ATTR_PORTS): dict, vol.Optional(ATTR_PORTS): dict,
vol.Optional(ATTR_MAP, default=[]): [ vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
vol.In([MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP]) vol.Optional(ATTR_MAP, default=[]): [vol.Match(MAP_VOLUME)],
], vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
vol.Required(ATTR_OPTIONS): dict, vol.Required(ATTR_OPTIONS): dict,
vol.Required(ATTR_SCHEMA): { vol.Required(ATTR_SCHEMA): {
vol.Coerce(str): vol.Any(ADDON_ELEMENT, [ vol.Coerce(str): vol.Any(ADDON_ELEMENT, [
@@ -70,10 +73,10 @@ def validate_options(raw_schema):
try: try:
if isinstance(typ, list): if isinstance(typ, list):
# nested value # nested value
options[key] = _nested_validate(typ[0], value) options[key] = _nested_validate(typ[0], value, key)
else: else:
# normal value # normal value
options[key] = _single_validate(typ, value) options[key] = _single_validate(typ, value, key)
except (IndexError, KeyError): except (IndexError, KeyError):
raise vol.Invalid( raise vol.Invalid(
"Type error for {}.".format(key)) from None "Type error for {}.".format(key)) from None
@@ -84,12 +87,12 @@ def validate_options(raw_schema):
# pylint: disable=no-value-for-parameter # pylint: disable=no-value-for-parameter
def _single_validate(typ, value): def _single_validate(typ, value, key):
"""Validate a single element.""" """Validate a single element."""
try: try:
# if required argument # if required argument
if value is None: if value is None:
raise vol.Invalid("A required argument is not set!") raise vol.Invalid("Missing required option '{}'.".format(key))
if typ == V_STR: if typ == V_STR:
return str(value) return str(value)
@@ -104,13 +107,13 @@ def _single_validate(typ, value):
elif typ == V_URL: elif typ == V_URL:
return vol.Url()(value) return vol.Url()(value)
raise vol.Invalid("Fatal error for {}.".format(value)) raise vol.Invalid("Fatal error for {} type {}.".format(key, typ))
except ValueError: except ValueError:
raise vol.Invalid( raise vol.Invalid(
"Type {} error for {}.".format(typ, value)) from None "Type {} error for '{}' on {}.".format(typ, value, key)) from None
def _nested_validate(typ, data_list): def _nested_validate(typ, data_list, key):
"""Validate nested items.""" """Validate nested items."""
options = [] options = []
@@ -123,10 +126,10 @@ def _nested_validate(typ, data_list):
raise vol.Invalid( raise vol.Invalid(
"Unknown nested options {}.".format(c_key)) "Unknown nested options {}.".format(c_key))
c_options[c_key] = _single_validate(typ[c_key], c_value) c_options[c_key] = _single_validate(typ[c_key], c_value, c_key)
options.append(c_options) options.append(c_options)
# normal list # normal list
else: else:
options.append(_single_validate(typ, element)) options.append(_single_validate(typ, element, key))
return options return options

View File

@@ -1,5 +1,6 @@
"""Init file for HassIO rest api.""" """Init file for HassIO rest api."""
import logging import logging
from pathlib import Path
from aiohttp import web from aiohttp import web
@@ -96,6 +97,13 @@ class RestAPI(object):
self.webapp.router.add_post('/security/totp', api_security.totp) self.webapp.router.add_post('/security/totp', api_security.totp)
self.webapp.router.add_post('/security/session', api_security.session) self.webapp.router.add_post('/security/session', api_security.session)
def register_panel(self):
"""Register panel for homeassistant."""
panel_dir = Path(__file__).parents[1].joinpath('panel')
self.webapp.router.register_resource(
web.StaticResource('/panel', str(panel_dir)))
async def start(self): async def start(self):
"""Run rest api webserver.""" """Run rest api webserver."""
self._handler = self.webapp.make_handler(loop=self.loop) self._handler = self.webapp.make_handler(loop=self.loop)

View File

@@ -9,7 +9,7 @@ from .util import api_process, api_process_raw, api_validate
from ..const import ( from ..const import (
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS,
ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY, ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY,
STATE_STOPPED, STATE_STARTED, BOOT_AUTO, BOOT_MANUAL) ATTR_BUILD, STATE_STOPPED, STATE_STARTED, BOOT_AUTO, BOOT_MANUAL)
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -59,6 +59,7 @@ class APIAddons(object):
ATTR_OPTIONS: self.addons.get_options(addon), ATTR_OPTIONS: self.addons.get_options(addon),
ATTR_URL: self.addons.get_url(addon), ATTR_URL: self.addons.get_url(addon),
ATTR_DETACHED: addon in self.addons.list_detached, ATTR_DETACHED: addon in self.addons.list_detached,
ATTR_BUILD: self.addons.need_build(addon),
} }
@api_process @api_process

View File

@@ -10,7 +10,8 @@ from ..const import (
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL,
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_REPOSITORIES, HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_REPOSITORIES,
ATTR_REPOSITORY, ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_REPOSITORY, ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED,
ATTR_DETACHED, ATTR_SOURCE, ATTR_MAINTAINER, ATTR_URL, ATTR_ARCH) ATTR_DETACHED, ATTR_SOURCE, ATTR_MAINTAINER, ATTR_URL, ATTR_ARCH,
ATTR_BUILD)
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -41,23 +42,23 @@ class APISupervisor(object):
detached = self.addons.list_detached detached = self.addons.list_detached
if only_installed: if only_installed:
addons = self.addons.data_installed addons = self.addons.list_installed
else: else:
addons = self.addons.data_all addons = self.addons.list_all
data = [] data = []
for addon, values in addons.items(): for addon in addons:
i_version = self.addons.version_installed(addon)
data.append({ data.append({
ATTR_NAME: values[ATTR_NAME], ATTR_NAME: self.addons.get_name(addon),
ATTR_SLUG: addon, ATTR_SLUG: addon,
ATTR_DESCRIPTON: values[ATTR_DESCRIPTON], ATTR_DESCRIPTON: self.addons.get_description(addon),
ATTR_VERSION: values[ATTR_VERSION], ATTR_VERSION: self.addons.get_last_version(addon),
ATTR_INSTALLED: i_version, ATTR_INSTALLED: self.addons.version_installed(addon),
ATTR_ARCH: values[ATTR_ARCH], ATTR_ARCH: self.addons.get_arch(addon),
ATTR_DETACHED: addon in detached, ATTR_DETACHED: addon in detached,
ATTR_REPOSITORY: values[ATTR_REPOSITORY], ATTR_REPOSITORY: self.addons.get_repository(addon),
ATTR_BUILD: self.addons.need_build(addon),
ATTR_URL: self.addons.get_url(addon),
}) })
return data return data

View File

@@ -21,24 +21,24 @@ def initialize_system_data(websession):
"Create Home-Assistant config folder %s", config.path_config) "Create Home-Assistant config folder %s", config.path_config)
config.path_config.mkdir() config.path_config.mkdir()
# homeassistant ssl folder # hassio ssl folder
if not config.path_ssl.is_dir(): if not config.path_ssl.is_dir():
_LOGGER.info("Create Home-Assistant ssl folder %s", config.path_ssl) _LOGGER.info("Create hassio ssl folder %s", config.path_ssl)
config.path_ssl.mkdir() config.path_ssl.mkdir()
# homeassistant addon data folder # hassio addon data folder
if not config.path_addons_data.is_dir(): if not config.path_addons_data.is_dir():
_LOGGER.info("Create Home-Assistant addon data folder %s", _LOGGER.info(
config.path_addons_data) "Create hassio addon data folder %s", config.path_addons_data)
config.path_addons_data.mkdir(parents=True) config.path_addons_data.mkdir(parents=True)
if not config.path_addons_local.is_dir(): if not config.path_addons_local.is_dir():
_LOGGER.info("Create Home-Assistant addon local repository folder %s", _LOGGER.info("Create hassio addon local repository folder %s",
config.path_addons_local) config.path_addons_local)
config.path_addons_local.mkdir(parents=True) config.path_addons_local.mkdir(parents=True)
if not config.path_addons_git.is_dir(): if not config.path_addons_git.is_dir():
_LOGGER.info("Create Home-Assistant addon git repositories folder %s", _LOGGER.info("Create hassio addon git repositories folder %s",
config.path_addons_git) config.path_addons_git)
config.path_addons_git.mkdir(parents=True) config.path_addons_git.mkdir(parents=True)
@@ -47,12 +47,16 @@ def initialize_system_data(websession):
config.path_addons_build) config.path_addons_build)
config.path_addons_build.mkdir(parents=True) config.path_addons_build.mkdir(parents=True)
# homeassistant backup folder # hassio backup folder
if not config.path_backup.is_dir(): if not config.path_backup.is_dir():
_LOGGER.info("Create Home-Assistant backup folder %s", _LOGGER.info("Create hassio backup folder %s", config.path_backup)
config.path_backup)
config.path_backup.mkdir() config.path_backup.mkdir()
# share folder
if not config.path_share.is_dir():
_LOGGER.info("Create hassio share folder %s", config.path_share)
config.path_share.mkdir()
return config return config

View File

@@ -32,6 +32,8 @@ ADDONS_CUSTOM_LIST = 'addons_custom_list'
BACKUP_DATA = PurePath("backup") BACKUP_DATA = PurePath("backup")
SHARE_DATA = PurePath("share")
UPSTREAM_BETA = 'upstream_beta' UPSTREAM_BETA = 'upstream_beta'
API_ENDPOINT = 'api_endpoint' API_ENDPOINT = 'api_endpoint'
@@ -233,6 +235,16 @@ class CoreConfig(Config):
"""Return root backup data folder extern for docker.""" """Return root backup data folder extern for docker."""
return PurePath(self.path_extern_hassio, BACKUP_DATA) return PurePath(self.path_extern_hassio, BACKUP_DATA)
@property
def path_share(self):
"""Return root share data folder."""
return Path(HASSIO_SHARE, SHARE_DATA)
@property
def path_extern_share(self):
"""Return root share data folder extern for docker."""
return PurePath(self.path_extern_hassio, SHARE_DATA)
@property @property
def addons_repositories(self): def addons_repositories(self):
"""Return list of addons custom repositories.""" """Return list of addons custom repositories."""

View File

@@ -1,7 +1,7 @@
"""Const file for HassIO.""" """Const file for HassIO."""
from pathlib import Path from pathlib import Path
HASSIO_VERSION = '0.24' HASSIO_VERSION = '0.29'
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/' URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
'hassio/master/version.json') 'hassio/master/version.json')
@@ -10,8 +10,6 @@ URL_HASSIO_VERSION_BETA = ('https://raw.githubusercontent.com/home-assistant/'
URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons' URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons'
DOCKER_REPO = "homeassistant"
HASSIO_SHARE = Path("/data") HASSIO_SHARE = Path("/data")
RUN_UPDATE_INFO_TASKS = 28800 RUN_UPDATE_INFO_TASKS = 28800
@@ -28,6 +26,14 @@ FILE_HASSIO_CONFIG = Path(HASSIO_SHARE, "config.json")
SOCKET_DOCKER = Path("/var/run/docker.sock") SOCKET_DOCKER = Path("/var/run/docker.sock")
SOCKET_HC = Path("/var/run/hassio-hc.sock") SOCKET_HC = Path("/var/run/hassio-hc.sock")
LABEL_VERSION = 'io.hass.version'
LABEL_ARCH = 'io.hass.arch'
LABEL_TYPE = 'io.hass.type'
META_ADDON = 'addon'
META_SUPERVISOR = 'supervisor'
META_HOMEASSISTANT = 'homeassistant'
JSON_RESULT = 'result' JSON_RESULT = 'result'
JSON_DATA = 'data' JSON_DATA = 'data'
JSON_MESSAGE = 'message' JSON_MESSAGE = 'message'
@@ -68,6 +74,9 @@ ATTR_TOTP = 'totp'
ATTR_INITIALIZE = 'initialize' ATTR_INITIALIZE = 'initialize'
ATTR_SESSION = 'session' ATTR_SESSION = 'session'
ATTR_LOCATON = 'location' ATTR_LOCATON = 'location'
ATTR_BUILD = 'build'
ATTR_DEVICES = 'devices'
ATTR_ENVIRONMENT = 'environment'
STARTUP_BEFORE = 'before' STARTUP_BEFORE = 'before'
STARTUP_AFTER = 'after' STARTUP_AFTER = 'after'
@@ -83,6 +92,8 @@ MAP_CONFIG = 'config'
MAP_SSL = 'ssl' MAP_SSL = 'ssl'
MAP_ADDONS = 'addons' MAP_ADDONS = 'addons'
MAP_BACKUP = 'backup' MAP_BACKUP = 'backup'
MAP_SHARE = 'share'
MAP_MNT = 'mnt'
ARCH_ARMHF = 'armhf' ARCH_ARMHF = 'armhf'
ARCH_AARCH64 = 'aarch64' ARCH_AARCH64 = 'aarch64'

View File

@@ -74,6 +74,7 @@ class HassIO(object):
self.api.register_homeassistant(self.homeassistant) self.api.register_homeassistant(self.homeassistant)
self.api.register_addons(self.addons) self.api.register_addons(self.addons)
self.api.register_security() self.api.register_security()
self.api.register_panel()
# schedule api session cleanup # schedule api session cleanup
self.scheduler.register_task( self.scheduler.register_task(

View File

@@ -5,6 +5,7 @@ import logging
import docker import docker
from ..const import LABEL_VERSION
from ..tools import get_version_from_env from ..tools import get_version_from_env
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -33,6 +34,19 @@ class DockerBase(object):
"""Return True if a task is in progress.""" """Return True if a task is in progress."""
return self._lock.locked() return self._lock.locked()
def process_metadata(self, metadata=None, force=False):
"""Read metadata and set it to object."""
if not force and self.version:
return
# read metadata
metadata = metadata or self.container.attrs
if LABEL_VERSION in metadata['Config']['Labels']:
self.version = metadata['Config']['Labels'][LABEL_VERSION]
else:
# dedicated
self.version = get_version_from_env(metadata['Config']['Env'])
async def install(self, tag): async def install(self, tag):
"""Pull docker image.""" """Pull docker image."""
if self._lock.locked(): if self._lock.locked():
@@ -52,12 +66,12 @@ class DockerBase(object):
image = self.dock.images.pull("{}:{}".format(self.image, tag)) image = self.dock.images.pull("{}:{}".format(self.image, tag))
image.tag(self.image, tag='latest') image.tag(self.image, tag='latest')
self.version = get_version_from_env(image.attrs['Config']['Env']) self.process_metadata(metadata=image.attrs, force=True)
_LOGGER.info("Tag image %s with version %s as latest",
self.image, self.version)
except docker.errors.APIError as err: except docker.errors.APIError as err:
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err) _LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
return False return False
_LOGGER.info("Tag image %s with version %s as latest", self.image, tag)
return True return True
def exists(self): def exists(self):
@@ -74,7 +88,7 @@ class DockerBase(object):
""" """
try: try:
image = self.dock.images.get(self.image) image = self.dock.images.get(self.image)
self.version = get_version_from_env(image.attrs['Config']['Env']) self.process_metadata(metadata=image.attrs)
except docker.errors.DockerException: except docker.errors.DockerException:
return False return False
@@ -95,8 +109,7 @@ class DockerBase(object):
if not self.container: if not self.container:
try: try:
self.container = self.dock.containers.get(self.docker_name) self.container = self.dock.containers.get(self.docker_name)
self.version = get_version_from_env( self.process_metadata()
self.container.attrs['Config']['Env'])
except docker.errors.DockerException: except docker.errors.DockerException:
return False return False
else: else:
@@ -121,8 +134,7 @@ class DockerBase(object):
try: try:
self.container = self.dock.containers.get(self.docker_name) self.container = self.dock.containers.get(self.docker_name)
self.image = self.container.attrs['Config']['Image'] self.image = self.container.attrs['Config']['Image']
self.version = get_version_from_env( self.process_metadata()
self.container.attrs['Config']['Env'])
_LOGGER.info("Attach to image %s with version %s", _LOGGER.info("Attach to image %s with version %s",
self.image, self.version) self.image, self.version)
except (docker.errors.DockerException, KeyError): except (docker.errors.DockerException, KeyError):
@@ -199,12 +211,14 @@ class DockerBase(object):
self.image, self.version) self.image, self.version)
try: try:
self.dock.images.remove( with suppress(docker.errors.ImageNotFound):
image="{}:latest".format(self.image), force=True) self.dock.images.remove(
self.dock.images.remove( image="{}:latest".format(self.image), force=True)
image="{}:{}".format(self.image, self.version), force=True)
except docker.errors.ImageNotFound: with suppress(docker.errors.ImageNotFound):
return True self.dock.images.remove(
image="{}:{}".format(self.image, self.version), force=True)
except docker.errors.DockerException as err: except docker.errors.DockerException as err:
_LOGGER.warning("Can't remove image %s -> %s", self.image, err) _LOGGER.warning("Can't remove image %s -> %s", self.image, err)
return False return False

View File

@@ -7,7 +7,9 @@ import docker
from . import DockerBase from . import DockerBase
from .util import dockerfile_template from .util import dockerfile_template
from ..tools import get_version_from_env from ..const import (
META_ADDON, MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE,
MAP_MNT)
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -35,28 +37,42 @@ class DockerAddon(DockerBase):
'bind': '/data', 'mode': 'rw' 'bind': '/data', 'mode': 'rw'
}} }}
if self.addons_data.map_config(self.addon): addon_mapping = self.addons_data.map_volumes(self.addon)
if MAP_CONFIG in addon_mapping:
volumes.update({ volumes.update({
str(self.config.path_extern_config): { str(self.config.path_extern_config): {
'bind': '/config', 'mode': 'rw' 'bind': '/config', 'mode': addon_mapping[MAP_CONFIG]
}}) }})
if self.addons_data.map_ssl(self.addon): if MAP_SSL in addon_mapping:
volumes.update({ volumes.update({
str(self.config.path_extern_ssl): { str(self.config.path_extern_ssl): {
'bind': '/ssl', 'mode': 'rw' 'bind': '/ssl', 'mode': addon_mapping[MAP_SSL]
}}) }})
if self.addons_data.map_addons(self.addon): if MAP_ADDONS in addon_mapping:
volumes.update({ volumes.update({
str(self.config.path_extern_addons_local): { str(self.config.path_extern_addons_local): {
'bind': '/addons', 'mode': 'rw' 'bind': '/addons', 'mode': addon_mapping[MAP_ADDONS]
}}) }})
if self.addons_data.map_backup(self.addon): if MAP_BACKUP in addon_mapping:
volumes.update({ volumes.update({
str(self.config.path_extern_backup): { str(self.config.path_extern_backup): {
'bind': '/backup', 'mode': 'rw' 'bind': '/backup', 'mode': addon_mapping[MAP_BACKUP]
}})
if MAP_SHARE in addon_mapping:
volumes.update({
str(self.config.path_extern_share): {
'bind': '/share', 'mode': addon_mapping[MAP_SHARE]
}})
if MAP_MNT in addon_mapping:
volumes.update({
'/mnt': {
'bind': '/mnt', 'mode': addon_mapping[MAP_MNT]
}}) }})
return volumes return volumes
@@ -79,12 +95,12 @@ class DockerAddon(DockerBase):
detach=True, detach=True,
network_mode='bridge', network_mode='bridge',
ports=self.addons_data.get_ports(self.addon), ports=self.addons_data.get_ports(self.addon),
devices=self.addons_data.get_devices(self.addon),
environment=self.addons_data.get_environment(self.addon),
volumes=self.volumes, volumes=self.volumes,
) )
self.version = get_version_from_env( self.process_metadata()
self.container.attrs['Config']['Env'])
_LOGGER.info("Start docker addon %s with version %s", _LOGGER.info("Start docker addon %s with version %s",
self.image, self.version) self.image, self.version)
@@ -99,16 +115,27 @@ class DockerAddon(DockerBase):
Need run inside executor. Need run inside executor.
""" """
# read container
try: try:
self.container = self.dock.containers.get(self.docker_name) self.container = self.dock.containers.get(self.docker_name)
self.version = get_version_from_env( self.process_metadata()
self.container.attrs['Config']['Env'])
_LOGGER.info( _LOGGER.info("Attach to container %s with version %s",
"Attach to image %s with version %s", self.image, self.version) self.image, self.version)
return
except (docker.errors.DockerException, KeyError): except (docker.errors.DockerException, KeyError):
pass pass
# read image
try:
image = self.dock.images.get(self.image)
self.process_metadata(metadata=image.attrs)
_LOGGER.info("Attach to image %s with version %s",
self.image, self.version)
except (docker.errors.DockerException, KeyError):
_LOGGER.error("No container/image found for %s", self.image)
def _install(self, tag): def _install(self, tag):
"""Pull docker image or build it. """Pull docker image or build it.
@@ -147,7 +174,8 @@ class DockerAddon(DockerBase):
# prepare Dockerfile # prepare Dockerfile
try: try:
dockerfile_template( dockerfile_template(
Path(build_dir, 'Dockerfile'), self.addons_data.arch, tag) Path(build_dir, 'Dockerfile'), self.addons_data.arch,
tag, META_ADDON)
except OSError as err: except OSError as err:
_LOGGER.error("Can't prepare dockerfile -> %s", err) _LOGGER.error("Can't prepare dockerfile -> %s", err)
@@ -159,12 +187,14 @@ class DockerAddon(DockerBase):
image = self.dock.images.build( image = self.dock.images.build(
path=str(build_dir), tag=build_tag, pull=True) path=str(build_dir), tag=build_tag, pull=True)
_LOGGER.info("Build %s done", build_tag)
image.tag(self.image, tag='latest') image.tag(self.image, tag='latest')
self.process_metadata(metadata=image.attrs, force=True)
except (docker.errors.DockerException, TypeError) as err: except (docker.errors.DockerException, TypeError) as err:
_LOGGER.error("Can't build %s -> %s", build_tag, err) _LOGGER.error("Can't build %s -> %s", build_tag, err)
return False return False
_LOGGER.info("Build %s done", build_tag)
return True return True
finally: finally:

View File

@@ -4,7 +4,6 @@ import logging
import docker import docker
from . import DockerBase from . import DockerBase
from ..tools import get_version_from_env
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@@ -48,11 +47,12 @@ class DockerHomeAssistant(DockerBase):
str(self.config.path_extern_config): str(self.config.path_extern_config):
{'bind': '/config', 'mode': 'rw'}, {'bind': '/config', 'mode': 'rw'},
str(self.config.path_extern_ssl): str(self.config.path_extern_ssl):
{'bind': '/ssl', 'mode': 'rw'}, {'bind': '/ssl', 'mode': 'ro'},
str(self.config.path_extern_share):
{'bind': '/share', 'mode': 'rw'},
}) })
self.version = get_version_from_env( self.process_metadata()
self.container.attrs['Config']['Env'])
_LOGGER.info("Start docker addon %s with version %s", _LOGGER.info("Start docker addon %s with version %s",
self.image, self.version) self.image, self.version)

View File

@@ -11,11 +11,10 @@ RESIN_BASE_IMAGE = {
ARCH_AMD64: "resin/amd64-alpine:3.5", ARCH_AMD64: "resin/amd64-alpine:3.5",
} }
TMPL_VERSION = re.compile(r"%%VERSION%%")
TMPL_IMAGE = re.compile(r"%%BASE_IMAGE%%") TMPL_IMAGE = re.compile(r"%%BASE_IMAGE%%")
def dockerfile_template(dockerfile, arch, version): def dockerfile_template(dockerfile, arch, version, meta_type):
"""Prepare a Hass.IO dockerfile.""" """Prepare a Hass.IO dockerfile."""
buff = [] buff = []
resin_image = RESIN_BASE_IMAGE[arch] resin_image = RESIN_BASE_IMAGE[arch]
@@ -23,10 +22,19 @@ def dockerfile_template(dockerfile, arch, version):
# read docker # read docker
with dockerfile.open('r') as dock_input: with dockerfile.open('r') as dock_input:
for line in dock_input: for line in dock_input:
line = TMPL_VERSION.sub(version, line)
line = TMPL_IMAGE.sub(resin_image, line) line = TMPL_IMAGE.sub(resin_image, line)
buff.append(line) buff.append(line)
# add metadata
buff.append(create_metadata(version, arch, meta_type))
# write docker # write docker
with dockerfile.open('w') as dock_output: with dockerfile.open('w') as dock_output:
dock_output.writelines(buff) dock_output.writelines(buff)
def create_metadata(version, arch, meta_type):
"""Generate docker label layer for hassio."""
return ('LABEL io.hass.version="{}" '
'io.hass.arch="{}" '
'io.hass.type="{}"').format(version, arch, meta_type)

File diff suppressed because one or more lines are too long

Binary file not shown.

View File

@@ -1,5 +1,5 @@
{ {
"hassio": "0.24", "hassio": "0.29",
"homeassistant": "0.44.2", "homeassistant": "0.44.2",
"resinos": "0.7", "resinos": "0.7",
"resinhup": "0.1", "resinhup": "0.1",