Merge remote-tracking branch 'origin/dev'

This commit is contained in:
Pascal Vizeli 2017-07-06 01:42:07 +02:00
commit 4dbece8e8e
21 changed files with 1219 additions and 130 deletions

67
API.md
View File

@ -51,6 +51,13 @@ The addons from `addons` are only installed one.
], ],
"addons_repositories": [ "addons_repositories": [
"REPO_URL" "REPO_URL"
],
"snapshots": [
{
"slug": "SLUG",
"data": "ISO",
"name": "Custom name"
}
] ]
} }
``` ```
@ -148,7 +155,65 @@ Return QR-Code
} }
``` ```
### Backup/Snapshot
- POST `/snapshots/new/full`
```json
{
"name": "Optional"
}
```
- POST `/snapshots/new/partial`
```json
{
"name": "Optional",
"addons": ["ADDON_SLUG"],
"folders": ["FOLDER_NAME"]
}
```
- POST `/snapshots/reload`
- GET `/snapshots/{slug}/info`
```json
{
"slug": "SNAPSHOT ID",
"type": "full|partial",
"name": "custom snapshot name / description",
"date": "ISO",
"size": "SIZE_IN_MB",
"homeassistant": {
"version": "INSTALLED_HASS_VERSION",
"devices": []
},
"addons": [
{
"slug": "ADDON_SLUG",
"name": "NAME",
"version": "INSTALLED_VERSION"
}
],
"repositories": ["URL"],
"folders": ["NAME"]
}
```
- POST `/snapshots/{slug}/remove`
- POST `/snapshots/{slug}/restore/full`
- POST `/snapshots/{slug}/restore/partial`
```json
{
"homeassistant": "bool",
"addons": ["ADDON_SLUG"],
"folders": ["FOLDER_NAME"]
}
```
### Host ### Host
- POST `/host/reload`
- POST `/host/shutdown` - POST `/host/shutdown`
@ -231,6 +296,8 @@ Output the raw docker log
### REST API addons ### REST API addons
- POST `/addons/reload`
- GET `/addons/{addon}/info` - GET `/addons/{addon}/info`
```json ```json
{ {

View File

@ -30,7 +30,7 @@ if __name__ == "__main__":
_LOGGER.info("Run Hassio setup") _LOGGER.info("Run Hassio setup")
loop.run_until_complete(hassio.setup()) loop.run_until_complete(hassio.setup())
_LOGGER.info("Start Hassio task") _LOGGER.info("Start Hassio")
loop.call_soon_threadsafe(loop.create_task, hassio.start()) loop.call_soon_threadsafe(loop.create_task, hassio.start())
loop.call_soon_threadsafe(bootstrap.reg_signal, loop, hassio) loop.call_soon_threadsafe(bootstrap.reg_signal, loop, hassio)

View File

@ -1,22 +1,29 @@
"""Init file for HassIO addons.""" """Init file for HassIO addons."""
from copy import deepcopy from copy import deepcopy
import logging import logging
import json
from pathlib import Path, PurePath from pathlib import Path, PurePath
import re import re
import shutil import shutil
import tarfile
from tempfile import TemporaryDirectory
import voluptuous as vol import voluptuous as vol
from voluptuous.humanize import humanize_error from voluptuous.humanize import humanize_error
from .validate import validate_options, MAP_VOLUME from .validate import (
validate_options, SCHEMA_ADDON_USER, SCHEMA_ADDON_SYSTEM,
SCHEMA_ADDON_SNAPSHOT, MAP_VOLUME)
from ..const import ( from ..const import (
ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP, ATTR_NAME, ATTR_VERSION, ATTR_SLUG, ATTR_DESCRIPTON, ATTR_BOOT, ATTR_MAP,
ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY, ATTR_OPTIONS, ATTR_PORTS, ATTR_SCHEMA, ATTR_IMAGE, ATTR_REPOSITORY,
ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT, ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT,
ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP, ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP,
STATE_STARTED, STATE_STOPPED, STATE_NONE) STATE_STARTED, STATE_STOPPED, STATE_NONE, ATTR_USER, ATTR_SYSTEM,
ATTR_STATE)
from .util import check_installed
from ..dock.addon import DockerAddon from ..dock.addon import DockerAddon
from ..tools import write_json_file from ..tools import write_json_file, read_json_file
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -26,22 +33,33 @@ RE_VOLUME = re.compile(MAP_VOLUME)
class Addon(object): class Addon(object):
"""Hold data for addon inside HassIO.""" """Hold data for addon inside HassIO."""
def __init__(self, config, loop, dock, data, addon_slug): def __init__(self, config, loop, dock, data, slug):
"""Initialize data holder.""" """Initialize data holder."""
self.loop = loop
self.config = config self.config = config
self.data = data self.data = data
self._id = addon_slug self._id = slug
if self._mesh is None:
raise RuntimeError("{} not a valid addon!".format(self._id))
self.addon_docker = DockerAddon(config, loop, dock, self) self.addon_docker = DockerAddon(config, loop, dock, self)
async def load(self): async def load(self):
"""Async initialize of object.""" """Async initialize of object."""
if self.is_installed: if self.is_installed:
self._validate_system_user()
await self.addon_docker.attach() await self.addon_docker.attach()
def _validate_system_user(self):
"""Validate internal data they read from file."""
for data, schema in ((self.data.system, SCHEMA_ADDON_SYSTEM),
(self.data.user, SCHEMA_ADDON_USER)):
try:
data[self._id] = schema(data[self._id])
except vol.Invalid as err:
_LOGGER.warning("Can't validate addon load %s -> %s", self._id,
humanize_error(data[self._id], err))
except KeyError:
pass
@property @property
def slug(self): def slug(self):
"""Return slug/id of addon.""" """Return slug/id of addon."""
@ -88,6 +106,12 @@ class Addon(object):
self.data.user[self._id][ATTR_VERSION] = version self.data.user[self._id][ATTR_VERSION] = version
self.data.save() self.data.save()
def _restore_data(self, user, system):
"""Restore data to addon."""
self.data.user[self._id] = deepcopy(user)
self.data.system[self._id] = deepcopy(system)
self.data.save()
@property @property
def options(self): def options(self):
"""Return options with local changes.""" """Return options with local changes."""
@ -281,12 +305,9 @@ class Addon(object):
self._set_install(version) self._set_install(version)
return True return True
@check_installed
async def uninstall(self): async def uninstall(self):
"""Remove a addon.""" """Remove a addon."""
if not self.is_installed:
_LOGGER.error("Addon %s is not installed", self._id)
return False
if not await self.addon_docker.remove(): if not await self.addon_docker.remove():
return False return False
@ -307,29 +328,21 @@ class Addon(object):
return STATE_STARTED return STATE_STARTED
return STATE_STOPPED return STATE_STOPPED
@check_installed
async def start(self): async def start(self):
"""Set options and start addon.""" """Set options and start addon."""
if not self.is_installed:
_LOGGER.error("Addon %s is not installed", self._id)
return False
return await self.addon_docker.run() return await self.addon_docker.run()
@check_installed
async def stop(self): async def stop(self):
"""Stop addon.""" """Stop addon."""
if not self.is_installed:
_LOGGER.error("Addon %s is not installed", self._id)
return False
return await self.addon_docker.stop() return await self.addon_docker.stop()
@check_installed
async def update(self, version=None): async def update(self, version=None):
"""Update addon.""" """Update addon."""
if not self.is_installed:
_LOGGER.error("Addon %s is not installed", self._id)
return False
version = version or self.last_version version = version or self.last_version
if version == self.version_installed: if version == self.version_installed:
_LOGGER.warning( _LOGGER.warning(
"Addon %s is already installed in %s", self._id, version) "Addon %s is already installed in %s", self._id, version)
@ -341,18 +354,112 @@ class Addon(object):
self._set_update(version) self._set_update(version)
return True return True
@check_installed
async def restart(self): async def restart(self):
"""Restart addon.""" """Restart addon."""
if not self.is_installed:
_LOGGER.error("Addon %s is not installed", self._id)
return False
return await self.addon_docker.restart() return await self.addon_docker.restart()
@check_installed
async def logs(self): async def logs(self):
"""Return addons log output.""" """Return addons log output."""
if not self.is_installed:
_LOGGER.error("Addon %s is not installed", self._id)
return False
return await self.addon_docker.logs() return await self.addon_docker.logs()
@check_installed
async def snapshot(self, tar_file):
"""Snapshot a state of a addon."""
with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp:
# store local image
if self.need_build and not await \
self.addon_docker.export_image(Path(temp, "image.tar")):
return False
data = {
ATTR_USER: self.data.user.get(self._id, {}),
ATTR_SYSTEM: self.data.system.get(self._id, {}),
ATTR_VERSION: self.version_installed,
ATTR_STATE: await self.state(),
}
# store local configs/state
if not write_json_file(Path(temp, "addon.json"), data):
_LOGGER.error("Can't write addon.json for %s", self._id)
return False
# write into tarfile
def _create_tar():
"""Write tar inside loop."""
with tarfile.open(tar_file, "w:gz",
compresslevel=1) as snapshot:
snapshot.add(temp, arcname=".")
snapshot.add(self.path_data, arcname="data")
try:
await self.loop.run_in_executor(None, _create_tar)
except tarfile.TarError as err:
_LOGGER.error("Can't write tarfile %s -> %s", tar_file, err)
return False
return True
async def restore(self, tar_file):
"""Restore a state of a addon."""
with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp:
# extract snapshot
def _extract_tar():
"""Extract tar snapshot."""
with tarfile.open(tar_file, "r:gz") as snapshot:
snapshot.extractall(path=Path(temp))
try:
await self.loop.run_in_executor(None, _extract_tar)
except tarfile.TarError as err:
_LOGGER.error("Can't read tarfile %s -> %s", tar_file, err)
return False
# read snapshot data
try:
data = read_json_file(Path(temp, "addon.json"))
except (OSError, json.JSONDecodeError) as err:
_LOGGER.error("Can't read addon.json -> %s", err)
# validate
try:
data = SCHEMA_ADDON_SNAPSHOT(data)
except vol.Invalid as err:
_LOGGER.error("Can't validate %s, snapshot data -> %s",
self._id, humanize_error(data, err))
return False
# restore data / reload addon
self._restore_data(data[ATTR_USER], data[ATTR_SYSTEM])
# check version / restore image
version = data[ATTR_VERSION]
if version != self.addon_docker.version:
image_file = Path(temp, "image.tar")
if image_file.is_file():
await self.addon_docker.import_image(image_file, version)
else:
if await self.addon_docker.install(version):
await self.addon_docker.cleanup()
else:
await self.addon_docker.stop()
# restore data
def _restore_data():
"""Restore data."""
if self.path_data.is_dir():
shutil.rmtree(str(self.path_data), ignore_errors=True)
shutil.copytree(str(Path(temp, "data")), str(self.path_data))
try:
await self.loop.run_in_executor(None, _restore_data)
except shutil.Error as err:
_LOGGER.error("Can't restore origin data -> %s", err)
return False
# run addon
if data[ATTR_STATE] == STATE_STARTED:
return await self.start()
return True

View File

@ -10,65 +10,72 @@ from voluptuous.humanize import humanize_error
from .util import extract_hash_from_path from .util import extract_hash_from_path
from .validate import ( from .validate import (
SCHEMA_ADDON_CONFIG, SCHEMA_REPOSITORY_CONFIG, MAP_VOLUME) SCHEMA_ADDON, SCHEMA_REPOSITORY_CONFIG, MAP_VOLUME)
from ..const import ( from ..const import (
FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON, FILE_HASSIO_ADDONS, ATTR_VERSION, ATTR_SLUG, ATTR_REPOSITORY, ATTR_LOCATON,
REPOSITORY_CORE, REPOSITORY_LOCAL) REPOSITORY_CORE, REPOSITORY_LOCAL, ATTR_USER, ATTR_SYSTEM)
from ..config import Config from ..tools import read_json_file, write_json_file
from ..tools import read_json_file
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
SYSTEM = 'system'
USER = 'user'
RE_VOLUME = re.compile(MAP_VOLUME) RE_VOLUME = re.compile(MAP_VOLUME)
class Data(Config): class Data(object):
"""Hold data for addons inside HassIO.""" """Hold data for addons inside HassIO."""
def __init__(self, config): def __init__(self, config):
"""Initialize data holder.""" """Initialize data holder."""
super().__init__(FILE_HASSIO_ADDONS) self._file = FILE_HASSIO_ADDONS
self._data = {}
self.config = config self.config = config
self._system_data = self._data.get(SYSTEM, {}) self._cache = {}
self._user_data = self._data.get(USER, {}) self._repositories = {}
self._cache_data = {}
self._repositories_data = {} # init or load data
if self._file.is_file():
try:
self._data = read_json_file(self._file)
except (OSError, json.JSONDecodeError):
_LOGGER.warning("Can't read %s", self._file)
self._data = {}
# init data
if not self._data:
self._data[ATTR_USER] = {}
self._data[ATTR_SYSTEM] = {}
def save(self): def save(self):
"""Store data to config file.""" """Store data to config file."""
self._data = { if not write_json_file(self._file, self._data):
USER: self._user_data, _LOGGER.error("Can't store config in %s", self._file)
SYSTEM: self._system_data, return False
} return True
super().save()
@property @property
def user(self): def user(self):
"""Return local addon user data.""" """Return local addon user data."""
return self._user_data return self._data[ATTR_USER]
@property @property
def system(self): def system(self):
"""Return local addon data.""" """Return local addon data."""
return self._system_data return self._data[ATTR_SYSTEM]
@property @property
def cache(self): def cache(self):
"""Return addon data from cache/repositories.""" """Return addon data from cache/repositories."""
return self._cache_data return self._cache
@property @property
def repositories(self): def repositories(self):
"""Return addon data from repositories.""" """Return addon data from repositories."""
return self._repositories_data return self._repositories
def reload(self): def reload(self):
"""Read data from addons repository.""" """Read data from addons repository."""
self._cache_data = {} self._cache = {}
self._repositories_data = {} self._repositories = {}
# read core repository # read core repository
self._read_addons_folder( self._read_addons_folder(
@ -100,7 +107,7 @@ class Data(Config):
read_json_file(repository_file) read_json_file(repository_file)
) )
except OSError: except (OSError, json.JSONDecodeError):
_LOGGER.warning("Can't read repository information from %s", _LOGGER.warning("Can't read repository information from %s",
repository_file) repository_file)
return return
@ -110,7 +117,7 @@ class Data(Config):
return return
# process data # process data
self._repositories_data[slug] = repository_info self._repositories[slug] = repository_info
self._read_addons_folder(path, slug) self._read_addons_folder(path, slug)
def _read_addons_folder(self, path, repository): def _read_addons_folder(self, path, repository):
@ -120,7 +127,7 @@ class Data(Config):
addon_config = read_json_file(addon) addon_config = read_json_file(addon)
# validate # validate
addon_config = SCHEMA_ADDON_CONFIG(addon_config) addon_config = SCHEMA_ADDON(addon_config)
# Generate slug # Generate slug
addon_slug = "{}_{}".format( addon_slug = "{}_{}".format(
@ -129,7 +136,7 @@ class Data(Config):
# store # store
addon_config[ATTR_REPOSITORY] = repository addon_config[ATTR_REPOSITORY] = repository
addon_config[ATTR_LOCATON] = str(addon.parent) addon_config[ATTR_LOCATON] = str(addon.parent)
self._cache_data[addon_slug] = addon_config self._cache[addon_slug] = addon_config
except OSError: except OSError:
_LOGGER.warning("Can't read %s", addon) _LOGGER.warning("Can't read %s", addon)
@ -148,11 +155,11 @@ class Data(Config):
return return
# core repository # core repository
self._repositories_data[REPOSITORY_CORE] = \ self._repositories[REPOSITORY_CORE] = \
builtin_data[REPOSITORY_CORE] builtin_data[REPOSITORY_CORE]
# local repository # local repository
self._repositories_data[REPOSITORY_LOCAL] = \ self._repositories[REPOSITORY_LOCAL] = \
builtin_data[REPOSITORY_LOCAL] builtin_data[REPOSITORY_LOCAL]
def _merge_config(self): def _merge_config(self):
@ -162,16 +169,16 @@ class Data(Config):
""" """
have_change = False have_change = False
for addon in set(self._system_data): for addon in set(self.system):
# detached # detached
if addon not in self._cache_data: if addon not in self._cache:
continue continue
cache = self._cache_data[addon] cache = self._cache[addon]
data = self._system_data[addon] data = self.system[addon]
if data[ATTR_VERSION] == cache[ATTR_VERSION]: if data[ATTR_VERSION] == cache[ATTR_VERSION]:
if data != cache: if data != cache:
self._system_data[addon] = copy.deepcopy(cache) self.system[addon] = copy.deepcopy(cache)
have_change = True have_change = True
if have_change: if have_change:

View File

@ -1,10 +1,12 @@
"""Util addons functions.""" """Util addons functions."""
import hashlib import hashlib
import logging
import re import re
RE_SLUGIFY = re.compile(r'[^a-z0-9_]+')
RE_SHA1 = re.compile(r"[a-f0-9]{8}") RE_SHA1 = re.compile(r"[a-f0-9]{8}")
_LOGGER = logging.getLogger(__name__)
def get_hash_from_repository(name): def get_hash_from_repository(name):
"""Generate a hash from repository.""" """Generate a hash from repository."""
@ -19,3 +21,15 @@ def extract_hash_from_path(path):
if not RE_SHA1.match(repo_dir): if not RE_SHA1.match(repo_dir):
return get_hash_from_repository(repo_dir) return get_hash_from_repository(repo_dir)
return repo_dir return repo_dir
def check_installed(method):
"""Wrap function with check if addon is installed."""
async def wrap_check(addon, *args, **kwargs):
"""Return False if not installed or the function."""
if not addon.is_installed:
_LOGGER.error("Addon %s is not installed", addon.slug)
return False
return await method(addon, *args, **kwargs)
return wrap_check

View File

@ -7,7 +7,8 @@ from ..const import (
STARTUP_BEFORE, STARTUP_INITIALIZE, BOOT_AUTO, BOOT_MANUAL, ATTR_SCHEMA, STARTUP_BEFORE, STARTUP_INITIALIZE, BOOT_AUTO, BOOT_MANUAL, ATTR_SCHEMA,
ATTR_IMAGE, ATTR_URL, ATTR_MAINTAINER, ATTR_ARCH, ATTR_DEVICES, ATTR_IMAGE, ATTR_URL, ATTR_MAINTAINER, ATTR_ARCH, ATTR_DEVICES,
ATTR_ENVIRONMENT, ATTR_HOST_NETWORK, ARCH_ARMHF, ARCH_AARCH64, ARCH_AMD64, ATTR_ENVIRONMENT, ATTR_HOST_NETWORK, ARCH_ARMHF, ARCH_AARCH64, ARCH_AMD64,
ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED) ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_USER, ATTR_STATE, ATTR_SYSTEM,
STATE_STARTED, STATE_STOPPED, ATTR_LOCATON, ATTR_REPOSITORY)
MAP_VOLUME = r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$" MAP_VOLUME = r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$"
@ -41,7 +42,7 @@ def check_network(data):
# pylint: disable=no-value-for-parameter # pylint: disable=no-value-for-parameter
SCHEMA_ADDON_CONFIG = vol.Schema(vol.All({ SCHEMA_ADDON_CONFIG = vol.Schema({
vol.Required(ATTR_NAME): vol.Coerce(str), vol.Required(ATTR_NAME): vol.Coerce(str),
vol.Required(ATTR_VERSION): vol.Coerce(str), vol.Required(ATTR_VERSION): vol.Coerce(str),
vol.Required(ATTR_SLUG): vol.Coerce(str), vol.Required(ATTR_SLUG): vol.Coerce(str),
@ -68,8 +69,8 @@ SCHEMA_ADDON_CONFIG = vol.Schema(vol.All({
]) ])
}, False), }, False),
vol.Optional(ATTR_IMAGE): vol.Match(r"\w*/\w*"), vol.Optional(ATTR_IMAGE): vol.Match(r"\w*/\w*"),
}, check_network), extra=vol.ALLOW_EXTRA) }, extra=vol.ALLOW_EXTRA)
SCHEMA_ADDON = vol.Schema(vol.All(SCHEMA_ADDON_CONFIG, check_network))
# pylint: disable=no-value-for-parameter # pylint: disable=no-value-for-parameter
SCHEMA_REPOSITORY_CONFIG = vol.Schema({ SCHEMA_REPOSITORY_CONFIG = vol.Schema({
@ -79,6 +80,28 @@ SCHEMA_REPOSITORY_CONFIG = vol.Schema({
}, extra=vol.ALLOW_EXTRA) }, extra=vol.ALLOW_EXTRA)
SCHEMA_ADDON_USER = vol.Schema({
vol.Required(ATTR_VERSION): vol.Coerce(str),
vol.Required(ATTR_OPTIONS): dict,
vol.Optional(ATTR_BOOT):
vol.In([BOOT_AUTO, BOOT_MANUAL]),
})
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
vol.Required(ATTR_LOCATON): vol.Coerce(str),
vol.Required(ATTR_REPOSITORY): vol.Coerce(str),
})
SCHEMA_ADDON_SNAPSHOT = vol.Schema({
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
vol.Required(ATTR_VERSION): vol.Coerce(str),
})
def validate_options(raw_schema): def validate_options(raw_schema):
"""Validate schema.""" """Validate schema."""
def validate(struct): def validate(struct):

View File

@ -10,6 +10,7 @@ from .host import APIHost
from .network import APINetwork from .network import APINetwork
from .supervisor import APISupervisor from .supervisor import APISupervisor
from .security import APISecurity from .security import APISecurity
from .snapshots import APISnapshots
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -43,12 +44,12 @@ class RestAPI(object):
self.webapp.router.add_get('/network/info', api_net.info) self.webapp.router.add_get('/network/info', api_net.info)
self.webapp.router.add_post('/network/options', api_net.options) self.webapp.router.add_post('/network/options', api_net.options)
def register_supervisor(self, supervisor, addons, host_control, def register_supervisor(self, supervisor, snapshots, addons, host_control,
websession): websession):
"""Register supervisor function.""" """Register supervisor function."""
api_supervisor = APISupervisor( api_supervisor = APISupervisor(
self.config, self.loop, supervisor, addons, host_control, self.config, self.loop, supervisor, snapshots, addons,
websession) host_control, websession)
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping) self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
self.webapp.router.add_get('/supervisor/info', api_supervisor.info) self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
@ -100,6 +101,25 @@ class RestAPI(object):
self.webapp.router.add_post('/security/totp', api_security.totp) self.webapp.router.add_post('/security/totp', api_security.totp)
self.webapp.router.add_post('/security/session', api_security.session) self.webapp.router.add_post('/security/session', api_security.session)
def register_snapshots(self, snapshots):
"""Register snapshots function."""
api_snapshots = APISnapshots(self.config, self.loop, snapshots)
self.webapp.router.add_post(
'/snapshots/new/full', api_snapshots.snapshot_full)
self.webapp.router.add_post(
'/snapshots/new/partial', api_snapshots.snapshot_partial)
self.webapp.router.add_get(
'/snapshots/{snapshot}/info', api_snapshots.info)
self.webapp.router.add_post(
'/snapshots/{snapshot}/remove', api_snapshots.remove)
self.webapp.router.add_post(
'/snapshots/{snapshot}/restore/full', api_snapshots.restore_full)
self.webapp.router.add_post(
'/snapshots/{snapshot}/restore/partial',
api_snapshots.restore_partial)
def register_panel(self): def register_panel(self):
"""Register panel for homeassistant.""" """Register panel for homeassistant."""
panel = Path(__file__).parents[1].joinpath('panel/hassio-main.html') panel = Path(__file__).parents[1].joinpath('panel/hassio-main.html')

117
hassio/api/snapshots.py Normal file
View File

@ -0,0 +1,117 @@
"""Init file for HassIO snapshot rest api."""
import asyncio
import logging
import voluptuous as vol
from .util import api_process, api_validate
from ..snapshots.validate import ALL_FOLDERS
from ..const import (
ATTR_NAME, ATTR_SLUG, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
ATTR_HOMEASSISTANT, ATTR_VERSION, ATTR_SIZE, ATTR_FOLDERS, ATTR_TYPE,
ATTR_DEVICES)
_LOGGER = logging.getLogger(__name__)
# pylint: disable=no-value-for-parameter
SCHEMA_RESTORE_PARTIAL = vol.Schema({
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
vol.Optional(ATTR_ADDONS): [vol.Coerce(str)],
vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)],
})
SCHEMA_SNAPSHOT_FULL = vol.Schema({
vol.Optional(ATTR_NAME): vol.Coerce(str),
})
SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend({
vol.Optional(ATTR_ADDONS): [vol.Coerce(str)],
vol.Optional(ATTR_FOLDERS): [vol.In(ALL_FOLDERS)],
})
class APISnapshots(object):
"""Handle rest api for snapshot functions."""
def __init__(self, config, loop, snapshots):
"""Initialize network rest api part."""
self.config = config
self.loop = loop
self.snapshots = snapshots
def _extract_snapshot(self, request):
"""Return addon and if not exists trow a exception."""
snapshot = self.snapshots.get(request.match_info.get('snapshot'))
if not snapshot:
raise RuntimeError("Snapshot not exists")
return snapshot
@staticmethod
def _addons_list(snapshot):
"""Generate a list with addons data."""
data = []
for addon_data in snapshot.addons:
data.append({
ATTR_SLUG: addon_data[ATTR_SLUG],
ATTR_NAME: addon_data[ATTR_NAME],
ATTR_VERSION: addon_data[ATTR_VERSION],
})
return data
@api_process
async def info(self, request):
"""Return snapshot info."""
snapshot = self._extract_snapshot(request)
return {
ATTR_SLUG: snapshot.slug,
ATTR_TYPE: snapshot.sys_type,
ATTR_NAME: snapshot.name,
ATTR_DATE: snapshot.date,
ATTR_SIZE: snapshot.size,
ATTR_HOMEASSISTANT: {
ATTR_VERSION: snapshot.homeassistant_version,
ATTR_DEVICES: snapshot.homeassistant_devices,
},
ATTR_ADDONS: self._addons_list(snapshot),
ATTR_REPOSITORIES: snapshot.repositories,
ATTR_FOLDERS: snapshot.folders,
}
@api_process
async def snapshot_full(self, request):
"""Full-Snapshot a snapshot."""
body = await api_validate(SCHEMA_SNAPSHOT_FULL, request)
return await asyncio.shield(
self.snapshots.do_snapshot_full(**body), loop=self.loop)
@api_process
async def snapshot_partial(self, request):
"""Partial-Snapshot a snapshot."""
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
return await asyncio.shield(
self.snapshots.do_snapshot_partial(**body), loop=self.loop)
@api_process
async def restore_full(self, request):
"""Full-Restore a snapshot."""
snapshot = self._extract_snapshot(request)
return await asyncio.shield(
self.snapshots.do_restore_full(snapshot), loop=self.loop)
@api_process
async def restore_partial(self, request):
"""Partial-Restore a snapshot."""
snapshot = self._extract_snapshot(request)
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
return await asyncio.shield(
self.snapshots.do_restore_partial(snapshot, **body),
loop=self.loop)
@api_process
async def remove(self, request):
"""Remove a snapshot."""
snapshot = self._extract_snapshot(request)
return self.snapshots.remove(snapshot)

View File

@ -10,7 +10,7 @@ from ..const import (
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_REPOSITORIES, HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_REPOSITORIES,
ATTR_REPOSITORY, ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_REPOSITORY, ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED,
ATTR_DETACHED, ATTR_SOURCE, ATTR_MAINTAINER, ATTR_URL, ATTR_ARCH, ATTR_DETACHED, ATTR_SOURCE, ATTR_MAINTAINER, ATTR_URL, ATTR_ARCH,
ATTR_BUILD, ATTR_TIMEZONE) ATTR_BUILD, ATTR_TIMEZONE, ATTR_DATE, ATTR_SNAPSHOTS)
from ..tools import validate_timezone from ..tools import validate_timezone
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
@ -30,13 +30,14 @@ SCHEMA_VERSION = vol.Schema({
class APISupervisor(object): class APISupervisor(object):
"""Handle rest api for supervisor functions.""" """Handle rest api for supervisor functions."""
def __init__(self, config, loop, supervisor, addons, host_control, def __init__(self, config, loop, supervisor, snapshots, addons,
websession): host_control, websession):
"""Initialize supervisor rest api part.""" """Initialize supervisor rest api part."""
self.config = config self.config = config
self.loop = loop self.loop = loop
self.supervisor = supervisor self.supervisor = supervisor
self.addons = addons self.addons = addons
self.snapshots = snapshots
self.host_control = host_control self.host_control = host_control
self.websession = websession self.websession = websession
@ -76,6 +77,18 @@ class APISupervisor(object):
return data return data
def _snapshots_list(self):
"""Return a list of available snapshots."""
data = []
for snapshot in self.snapshots.list_snapshots:
data.append({
ATTR_SLUG: snapshot.slug,
ATTR_NAME: snapshot.name,
ATTR_DATE: snapshot.date,
})
return data
@api_process @api_process
async def ping(self, request): async def ping(self, request):
"""Return ok for signal that the api is ready.""" """Return ok for signal that the api is ready."""
@ -92,6 +105,7 @@ class APISupervisor(object):
ATTR_TIMEZONE: self.config.timezone, ATTR_TIMEZONE: self.config.timezone,
ATTR_ADDONS: self._addons_list(only_installed=True), ATTR_ADDONS: self._addons_list(only_installed=True),
ATTR_ADDONS_REPOSITORIES: self.config.addons_repositories, ATTR_ADDONS_REPOSITORIES: self.config.addons_repositories,
ATTR_SNAPSHOTS: self._snapshots_list(),
} }
@api_process @api_process
@ -136,6 +150,7 @@ class APISupervisor(object):
"""Reload addons, config ect.""" """Reload addons, config ect."""
tasks = [ tasks = [
self.addons.reload(), self.addons.reload(),
self.snapshots.reload(),
self.config.fetch_update_infos(self.websession), self.config.fetch_update_infos(self.websession),
self.host_control.load() self.host_control.load()
] ]

View File

@ -81,7 +81,8 @@ def api_process_raw(method):
def api_return_error(message=None): def api_return_error(message=None):
"""Return a API error message.""" """Return a API error message."""
_LOGGER.error(message) if message:
_LOGGER.error(message)
return web.json_response({ return web.json_response({
JSON_RESULT: RESULT_ERROR, JSON_RESULT: RESULT_ERROR,

View File

@ -60,12 +60,13 @@ SCHEMA_CONFIG = vol.Schema({
}, extra=vol.REMOVE_EXTRA) }, extra=vol.REMOVE_EXTRA)
class Config(object): class CoreConfig(object):
"""Hold all config data.""" """Hold all core config data."""
def __init__(self, config_file): def __init__(self):
"""Initialize config object.""" """Initialize config object."""
self._file = config_file self.arch = None
self._file = FILE_HASSIO_CONFIG
self._data = {} self._data = {}
# init or load data # init or load data
@ -76,31 +77,32 @@ class Config(object):
_LOGGER.warning("Can't read %s", self._file) _LOGGER.warning("Can't read %s", self._file)
self._data = {} self._data = {}
# validate data
if not self._validate_config():
self._data = SCHEMA_CONFIG({})
def _validate_config(self):
"""Validate config and return True or False."""
# validate data
try:
self._data = SCHEMA_CONFIG(self._data)
except vol.Invalid as ex:
_LOGGER.warning(
"Invalid config %s", humanize_error(self._data, ex))
return False
return True
def save(self): def save(self):
"""Store data to config file.""" """Store data to config file."""
if not self._validate_config():
return False
if not write_json_file(self._file, self._data): if not write_json_file(self._file, self._data):
_LOGGER.error("Can't store config in %s", self._file) _LOGGER.error("Can't store config in %s", self._file)
return False return False
return True return True
class CoreConfig(Config):
"""Hold all core config data."""
def __init__(self):
"""Initialize config object."""
self.arch = None
super().__init__(FILE_HASSIO_CONFIG)
# validate data
try:
self._data = SCHEMA_CONFIG(self._data)
self.save()
except vol.Invalid as ex:
_LOGGER.warning(
"Invalid config %s", humanize_error(self._data, ex))
async def fetch_update_infos(self, websession): async def fetch_update_infos(self, websession):
"""Read current versions from web.""" """Read current versions from web."""
last = await fetch_last_versions(websession, beta=self.upstream_beta) last = await fetch_last_versions(websession, beta=self.upstream_beta)

View File

@ -1,7 +1,7 @@
"""Const file for HassIO.""" """Const file for HassIO."""
from pathlib import Path from pathlib import Path
HASSIO_VERSION = '0.38' HASSIO_VERSION = '0.39'
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/' URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
'hassio/master/version.json') 'hassio/master/version.json')
@ -15,6 +15,7 @@ HASSIO_DATA = Path("/data")
RUN_UPDATE_INFO_TASKS = 28800 RUN_UPDATE_INFO_TASKS = 28800
RUN_UPDATE_SUPERVISOR_TASKS = 29100 RUN_UPDATE_SUPERVISOR_TASKS = 29100
RUN_RELOAD_ADDONS_TASKS = 28800 RUN_RELOAD_ADDONS_TASKS = 28800
RUN_RELOAD_SNAPSHOTS_TASKS = 72000
RUN_WATCHDOG_HOMEASSISTANT = 15 RUN_WATCHDOG_HOMEASSISTANT = 15
RUN_CLEANUP_API_SESSIONS = 900 RUN_CLEANUP_API_SESSIONS = 900
@ -41,6 +42,7 @@ JSON_MESSAGE = 'message'
RESULT_ERROR = 'error' RESULT_ERROR = 'error'
RESULT_OK = 'ok' RESULT_OK = 'ok'
ATTR_DATE = 'date'
ATTR_ARCH = 'arch' ATTR_ARCH = 'arch'
ATTR_HOSTNAME = 'hostname' ATTR_HOSTNAME = 'hostname'
ATTR_TIMEZONE = 'timezone' ATTR_TIMEZONE = 'timezone'
@ -81,6 +83,13 @@ ATTR_ENVIRONMENT = 'environment'
ATTR_HOST_NETWORK = 'host_network' ATTR_HOST_NETWORK = 'host_network'
ATTR_TMPFS = 'tmpfs' ATTR_TMPFS = 'tmpfs'
ATTR_PRIVILEGED = 'privileged' ATTR_PRIVILEGED = 'privileged'
ATTR_USER = 'user'
ATTR_SYSTEM = 'system'
ATTR_SNAPSHOTS = 'snapshots'
ATTR_HOMEASSISTANT = 'homeassistant'
ATTR_FOLDERS = 'folders'
ATTR_SIZE = 'size'
ATTR_TYPE = 'type'
STARTUP_INITIALIZE = 'initialize' STARTUP_INITIALIZE = 'initialize'
STARTUP_BEFORE = 'before' STARTUP_BEFORE = 'before'
@ -107,3 +116,11 @@ ARCH_I386 = 'i386'
REPOSITORY_CORE = 'core' REPOSITORY_CORE = 'core'
REPOSITORY_LOCAL = 'local' REPOSITORY_LOCAL = 'local'
FOLDER_HOMEASSISTANT = 'homeassistant'
FOLDER_SHARE = 'share'
FOLDER_ADDONS = 'addons/local'
FOLDER_SSL = 'ssl'
SNAPSHOT_FULL = 'full'
SNAPSHOT_PARTIAL = 'partial'

View File

@ -12,10 +12,11 @@ from .const import (
SOCKET_DOCKER, RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS, SOCKET_DOCKER, RUN_UPDATE_INFO_TASKS, RUN_RELOAD_ADDONS_TASKS,
RUN_UPDATE_SUPERVISOR_TASKS, RUN_WATCHDOG_HOMEASSISTANT, RUN_UPDATE_SUPERVISOR_TASKS, RUN_WATCHDOG_HOMEASSISTANT,
RUN_CLEANUP_API_SESSIONS, STARTUP_AFTER, STARTUP_BEFORE, RUN_CLEANUP_API_SESSIONS, STARTUP_AFTER, STARTUP_BEFORE,
STARTUP_INITIALIZE) STARTUP_INITIALIZE, RUN_RELOAD_SNAPSHOTS_TASKS)
from .scheduler import Scheduler from .scheduler import Scheduler
from .dock.homeassistant import DockerHomeAssistant from .dock.homeassistant import DockerHomeAssistant
from .dock.supervisor import DockerSupervisor from .dock.supervisor import DockerSupervisor
from .snapshots import SnapshotsManager
from .tasks import ( from .tasks import (
hassio_update, homeassistant_watchdog, homeassistant_setup, hassio_update, homeassistant_watchdog, homeassistant_setup,
api_sessions_cleanup) api_sessions_cleanup)
@ -48,6 +49,10 @@ class HassIO(object):
# init addon system # init addon system
self.addons = AddonManager(config, loop, self.dock) self.addons = AddonManager(config, loop, self.dock)
# init snapshot system
self.snapshots = SnapshotsManager(
config, loop, self.scheduler, self.addons, self.homeassistant)
async def setup(self): async def setup(self):
"""Setup HassIO orchestration.""" """Setup HassIO orchestration."""
# supervisor # supervisor
@ -76,10 +81,12 @@ class HassIO(object):
self.api.register_host(self.host_control) self.api.register_host(self.host_control)
self.api.register_network(self.host_control) self.api.register_network(self.host_control)
self.api.register_supervisor( self.api.register_supervisor(
self.supervisor, self.addons, self.host_control, self.websession) self.supervisor, self.snapshots, self.addons, self.host_control,
self.websession)
self.api.register_homeassistant(self.homeassistant) self.api.register_homeassistant(self.homeassistant)
self.api.register_addons(self.addons) self.api.register_addons(self.addons)
self.api.register_security() self.api.register_security()
self.api.register_snapshots(self.snapshots)
self.api.register_panel() self.api.register_panel()
# schedule api session cleanup # schedule api session cleanup
@ -107,6 +114,10 @@ class HassIO(object):
hassio_update(self.config, self.supervisor, self.websession), hassio_update(self.config, self.supervisor, self.websession),
RUN_UPDATE_SUPERVISOR_TASKS) RUN_UPDATE_SUPERVISOR_TASKS)
# schedule snapshot update tasks
self.scheduler.register_task(
self.snapshots.reload, RUN_RELOAD_SNAPSHOTS_TASKS, now=True)
# start addon mark as initialize # start addon mark as initialize
await self.addons.auto_boot(STARTUP_INITIALIZE) await self.addons.auto_boot(STARTUP_INITIALIZE)
@ -147,7 +158,7 @@ class HassIO(object):
async def stop(self, exit_code=0): async def stop(self, exit_code=0):
"""Stop a running orchestration.""" """Stop a running orchestration."""
# don't process scheduler anymore # don't process scheduler anymore
self.scheduler.stop() self.scheduler.suspend = True
# process stop tasks # process stop tasks
self.websession.close() self.websession.close()

View File

@ -199,7 +199,7 @@ class DockerBase(object):
container.remove(force=True) container.remove(force=True)
async def remove(self): async def remove(self):
"""Remove docker container.""" """Remove docker images."""
if self._lock.locked(): if self._lock.locked():
_LOGGER.error("Can't excute remove while a task is in progress") _LOGGER.error("Can't excute remove while a task is in progress")
return False return False
@ -208,7 +208,7 @@ class DockerBase(object):
return await self.loop.run_in_executor(None, self._remove) return await self.loop.run_in_executor(None, self._remove)
def _remove(self): def _remove(self):
"""remove docker container. """remove docker images.
Need run inside executor. Need run inside executor.
""" """
@ -231,6 +231,9 @@ class DockerBase(object):
_LOGGER.warning("Can't remove image %s -> %s", self.image, err) _LOGGER.warning("Can't remove image %s -> %s", self.image, err)
return False return False
# clean metadata
self.version = None
self.arch = None
return True return True
async def update(self, tag): async def update(self, tag):

View File

@ -5,6 +5,7 @@ from pathlib import Path
import shutil import shutil
import docker import docker
import requests
from . import DockerBase from . import DockerBase
from .util import dockerfile_template from .util import dockerfile_template
@ -135,15 +136,6 @@ class DockerAddon(DockerBase):
return super()._install(tag) return super()._install(tag)
async def build(self, tag):
"""Build a docker container."""
if self._lock.locked():
_LOGGER.error("Can't excute build while a task is in progress")
return False
async with self._lock:
return await self.loop.run_in_executor(None, self._build, tag)
def _build(self, tag): def _build(self, tag):
"""Build a docker container. """Build a docker container.
@ -157,7 +149,7 @@ class DockerAddon(DockerBase):
shutil.copytree(str(source), str(build_dir)) shutil.copytree(str(source), str(build_dir))
except shutil.Error as err: except shutil.Error as err:
_LOGGER.error("Can't copy %s to temporary build folder -> %s", _LOGGER.error("Can't copy %s to temporary build folder -> %s",
source, build_dir) source, err)
return False return False
# prepare Dockerfile # prepare Dockerfile
@ -189,6 +181,68 @@ class DockerAddon(DockerBase):
finally: finally:
shutil.rmtree(str(build_dir), ignore_errors=True) shutil.rmtree(str(build_dir), ignore_errors=True)
async def export_image(self, path):
"""Export current images into a tar file."""
if self._lock.locked():
_LOGGER.error("Can't excute export while a task is in progress")
return False
async with self._lock:
return await self.loop.run_in_executor(
None, self._export_image, path)
def _export_image(self, tar_file):
"""Export current images into a tar file.
Need run inside executor.
"""
try:
image = self.dock.api.get_image(self.image)
except docker.errors.DockerException as err:
_LOGGER.error("Can't fetch image %s -> %s", self.image, err)
return False
try:
with tar_file.open("wb") as write_tar:
for chunk in image.stream():
write_tar.write(chunk)
except (OSError, requests.exceptions.ReadTimeout) as err:
_LOGGER.error("Can't write tar file %s -> %s", tar_file, err)
return False
_LOGGER.info("Export image %s to %s", self.image, tar_file)
return True
async def import_image(self, path, tag):
"""Import a tar file as image."""
if self._lock.locked():
_LOGGER.error("Can't excute import while a task is in progress")
return False
async with self._lock:
return await self.loop.run_in_executor(
None, self._import_image, path, tag)
def _import_image(self, tar_file, tag):
"""Import a tar file as image.
Need run inside executor.
"""
try:
with tar_file.open("rb") as read_tar:
self.dock.api.load_image(read_tar)
image = self.dock.images.get(self.image)
image.tag(self.image, tag=tag)
except (docker.errors.DockerException, OSError) as err:
_LOGGER.error("Can't import image %s -> %s", self.image, err)
return False
_LOGGER.info("Import image %s and tag %s", tar_file, tag)
self.process_metadata(image.attrs, force=True)
self._cleanup()
return True
def _restart(self): def _restart(self):
"""Restart docker container. """Restart docker container.
@ -200,8 +254,11 @@ class DockerAddon(DockerBase):
except docker.errors.DockerException: except docker.errors.DockerException:
return False return False
_LOGGER.info("Restart %s", self.image) # for restart it need to run!
if container.status != 'running':
return False
_LOGGER.info("Restart %s", self.image)
with suppress(docker.errors.DockerException): with suppress(docker.errors.DockerException):
container.stop(timeout=15) container.stop(timeout=15)

View File

@ -16,11 +16,7 @@ class Scheduler(object):
"""Initialize task schedule.""" """Initialize task schedule."""
self.loop = loop self.loop = loop
self._data = {} self._data = {}
self._stop = False self.suspend = False
def stop(self):
"""Stop to execute tasks in scheduler."""
self._stop = True
def register_task(self, coro_callback, seconds, repeat=True, def register_task(self, coro_callback, seconds, repeat=True,
now=False): now=False):
@ -51,11 +47,8 @@ class Scheduler(object):
"""Run a scheduled task.""" """Run a scheduled task."""
data = self._data.pop(idx) data = self._data.pop(idx)
# stop execute tasks if not self.suspend:
if self._stop: self.loop.create_task(data[CALL]())
return
self.loop.create_task(data[CALL]())
if data[REPEAT]: if data[REPEAT]:
task = self.loop.call_later(data[SEC], self._run_task, idx) task = self.loop.call_later(data[SEC], self._run_task, idx)

View File

@ -0,0 +1,313 @@
"""Snapshot system control."""
import asyncio
from datetime import datetime
import logging
from pathlib import Path
import tarfile
from .snapshot import Snapshot
from .util import create_slug
from ..const import (
ATTR_SLUG, FOLDER_HOMEASSISTANT, SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
_LOGGER = logging.getLogger(__name__)
class SnapshotsManager(object):
"""Manage snapshots."""
def __init__(self, config, loop, sheduler, addons, homeassistant):
"""Initialize a snapshot manager."""
self.config = config
self.loop = loop
self.sheduler = sheduler
self.addons = addons
self.homeassistant = homeassistant
self.snapshots = {}
self._lock = asyncio.Lock(loop=loop)
@property
def list_snapshots(self):
"""Return a list of all snapshot object."""
return set(self.snapshots.values())
def get(self, slug):
"""Return snapshot object."""
return self.snapshots.get(slug)
def _create_snapshot(self, name, sys_type):
"""Initialize a new snapshot object from name."""
date_str = str(datetime.utcnow())
slug = create_slug(name, date_str)
tar_file = Path(self.config.path_backup, "{}.tar".format(slug))
# init object
snapshot = Snapshot(self.config, self.loop, tar_file)
snapshot.create(slug, name, date_str, sys_type)
# set general data
snapshot.homeassistant_version = self.homeassistant.version
snapshot.homeassistant_devices = self.config.homeassistant_devices
snapshot.repositories = self.config.addons_repositories
return snapshot
async def reload(self):
"""Load exists backups."""
self.snapshots = {}
async def _load_snapshot(tar_file):
"""Internal function to load snapshot."""
snapshot = Snapshot(self.config, self.loop, tar_file)
if await snapshot.load():
self.snapshots[snapshot.slug] = snapshot
tasks = [_load_snapshot(tar_file) for tar_file in
self.config.path_backup.glob("*.tar")]
_LOGGER.info("Found %d snapshot files", len(tasks))
if tasks:
await asyncio.wait(tasks, loop=self.loop)
def remove(self, snapshot):
"""Remove a snapshot."""
try:
snapshot.tar_file.unlink()
self.snapshots.pop(snapshot.slug, None)
except OSError as err:
_LOGGER.error("Can't remove snapshot %s -> %s", snapshot.slug, err)
return False
return True
async def do_snapshot_full(self, name=""):
"""Create a full snapshot."""
if self._lock.locked():
_LOGGER.error("It is already a snapshot/restore process running")
return False
snapshot = self._create_snapshot(name, SNAPSHOT_FULL)
_LOGGER.info("Full-Snapshot %s start", snapshot.slug)
try:
self.sheduler.suspend = True
await self._lock.acquire()
async with snapshot:
# snapshot addons
tasks = []
for addon in self.addons.list_addons:
if not addon.is_installed:
continue
tasks.append(snapshot.import_addon(addon))
if tasks:
_LOGGER.info("Full-Snapshot %s run %d addons",
snapshot.slug, len(tasks))
await asyncio.wait(tasks, loop=self.loop)
# snapshot folders
_LOGGER.info("Full-Snapshot %s store folders", snapshot.slug)
await snapshot.store_folders()
_LOGGER.info("Full-Snapshot %s done", snapshot.slug)
self.snapshots[snapshot.slug] = snapshot
return True
except (OSError, ValueError, tarfile.TarError) as err:
_LOGGER.info("Full-Snapshot %s error -> %s", snapshot.slug, err)
return False
finally:
self.sheduler.suspend = False
self._lock.release()
async def do_snapshot_partial(self, name="", addons=None, folders=None):
"""Create a partial snapshot."""
if self._lock.locked():
_LOGGER.error("It is already a snapshot/restore process running")
return False
addons = addons or []
folders = folders or []
snapshot = self._create_snapshot(name, SNAPSHOT_PARTIAL)
_LOGGER.info("Partial-Snapshot %s start", snapshot.slug)
try:
self.sheduler.suspend = True
await self._lock.acquire()
async with snapshot:
# snapshot addons
tasks = []
for slug in addons:
addon = self.addons.get(slug)
if addon.is_installed:
tasks.append(snapshot.import_addon(addon))
if tasks:
_LOGGER.info("Partial-Snapshot %s run %d addons",
snapshot.slug, len(tasks))
await asyncio.wait(tasks, loop=self.loop)
# snapshot folders
_LOGGER.info("Partial-Snapshot %s store folders %s",
snapshot.slug, folders)
await snapshot.store_folders(folders)
_LOGGER.info("Partial-Snapshot %s done", snapshot.slug)
self.snapshots[snapshot.slug] = snapshot
return True
except (OSError, ValueError, tarfile.TarError) as err:
_LOGGER.info("Partial-Snapshot %s error -> %s", snapshot.slug, err)
return False
finally:
self.sheduler.suspend = False
self._lock.release()
async def do_restore_full(self, snapshot):
"""Restore a snapshot."""
if self._lock.locked():
_LOGGER.error("It is already a snapshot/restore process running")
return False
if snapshot.sys_type != SNAPSHOT_FULL:
_LOGGER.error(
"Full-Restore %s is only a partial snapshot!", snapshot.slug)
return False
_LOGGER.info("Full-Restore %s start", snapshot.slug)
try:
self.sheduler.suspend = True
await self._lock.acquire()
async with snapshot:
# stop system
tasks = []
tasks.append(self.homeassistant.stop())
for addon in self.addons.list_addons:
if addon.is_installed:
tasks.append(addon.stop())
await asyncio.wait(tasks, loop=self.loop)
# restore folders
_LOGGER.info("Full-Restore %s restore folders", snapshot.slug)
await snapshot.restore_folders()
# start homeassistant restore
self.config.homeassistant_devices = \
snapshot.homeassistant_devices
task_hass = self.loop.create_task(
self.homeassistant.update(snapshot.homeassistant_version))
# restore repositories
await self.addons.load_repositories(snapshot.repositories)
# restore addons
tasks = []
actual_addons = \
set(addon.slug for addon in self.addons.list_addons
if addon.is_installed)
restore_addons = \
set(data[ATTR_SLUG] for data in snapshot.addons)
remove_addons = actual_addons - restore_addons
_LOGGER.info("Full-Restore %s restore addons %s, remove %s",
snapshot.slug, restore_addons, remove_addons)
for slug in remove_addons:
addon = self.addons.get(slug)
if addon:
tasks.append(addon.uninstall())
else:
_LOGGER.warning("Can't remove addon %s", slug)
for slug in restore_addons:
addon = self.addons.get(slug)
if addon:
tasks.append(snapshot.export_addon(addon))
else:
_LOGGER.warning("Can't restore addon %s", slug)
if tasks:
_LOGGER.info("Full-Restore %s restore addons tasks %d",
snapshot.slug, len(tasks))
await asyncio.wait(tasks, loop=self.loop)
# finish homeassistant task
_LOGGER.info("Full-Restore %s wait until homeassistant ready",
snapshot.slug)
await task_hass
await self.homeassistant.run()
_LOGGER.info("Full-Restore %s done", snapshot.slug)
return True
except (OSError, ValueError, tarfile.TarError) as err:
_LOGGER.info("Full-Restore %s error -> %s", slug, err)
return False
finally:
self.sheduler.suspend = False
self._lock.release()
async def do_restore_partial(self, snapshot, homeassistant=False,
addons=None, folders=None):
"""Restore a snapshot."""
if self._lock.locked():
_LOGGER.error("It is already a snapshot/restore process running")
return False
addons = addons or []
folders = folders or []
_LOGGER.info("Partial-Restore %s start", snapshot.slug)
try:
self.sheduler.suspend = True
await self._lock.acquire()
async with snapshot:
tasks = []
if FOLDER_HOMEASSISTANT in folders:
await self.homeassistant.stop()
if folders:
_LOGGER.info("Partial-Restore %s restore folders %s",
snapshot.slug, folders)
await snapshot.restore_folders(folders)
if homeassistant:
self.config.homeassistant_devices = \
snapshot.homeassistant_devices
tasks.append(self.homeassistant.update(
snapshot.homeassistant_version))
for slug in addons:
addon = self.addons.get(slug)
if addon:
tasks.append(snapshot.export_addon(addon))
else:
_LOGGER.warning("Can't restore addon %s", slug)
if tasks:
_LOGGER.info("Partial-Restore %s run %d tasks",
snapshot.slug, len(tasks))
await asyncio.wait(tasks, loop=self.loop)
# make sure homeassistant run agen
await self.homeassistant.run()
_LOGGER.info("Partial-Restore %s done", snapshot.slug)
return True
except (OSError, ValueError, tarfile.TarError) as err:
_LOGGER.info("Partial-Restore %s error -> %s", slug, err)
return False
finally:
self.sheduler.suspend = False
self._lock.release()

View File

@ -0,0 +1,271 @@
"""Represent a snapshot file."""
import asyncio
import json
import logging
from pathlib import Path
import tarfile
from tempfile import TemporaryDirectory
import voluptuous as vol
from voluptuous.humanize import humanize_error
from .validate import SCHEMA_SNAPSHOT, ALL_FOLDERS
from .util import remove_folder
from ..const import (
ATTR_SLUG, ATTR_NAME, ATTR_DATE, ATTR_ADDONS, ATTR_REPOSITORIES,
ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_VERSION, ATTR_TYPE, ATTR_DEVICES)
from ..tools import write_json_file
_LOGGER = logging.getLogger(__name__)
class Snapshot(object):
"""A signle hassio snapshot."""
def __init__(self, config, loop, tar_file):
"""Initialize a snapshot."""
self.loop = loop
self.config = config
self.tar_file = tar_file
self._data = {}
self._tmp = None
@property
def slug(self):
"""Return snapshot slug."""
return self._data.get(ATTR_SLUG)
@property
def sys_type(self):
"""Return snapshot type."""
return self._data.get(ATTR_TYPE)
@property
def name(self):
"""Return snapshot name."""
return self._data[ATTR_NAME]
@property
def date(self):
"""Return snapshot date."""
return self._data[ATTR_DATE]
@property
def addons(self):
"""Return snapshot date."""
return self._data[ATTR_ADDONS]
@property
def folders(self):
"""Return list of saved folders."""
return self._data[ATTR_FOLDERS]
@property
def repositories(self):
"""Return snapshot date."""
return self._data[ATTR_REPOSITORIES]
@repositories.setter
def repositories(self, value):
"""Set snapshot date."""
self._data[ATTR_REPOSITORIES] = value
@property
def homeassistant_version(self):
"""Return snapshot homeassistant version."""
return self._data[ATTR_HOMEASSISTANT].get(ATTR_VERSION)
@homeassistant_version.setter
def homeassistant_version(self, value):
"""Set snapshot homeassistant version."""
self._data[ATTR_HOMEASSISTANT][ATTR_VERSION] = value
@property
def homeassistant_devices(self):
"""Return snapshot homeassistant devices."""
return self._data[ATTR_HOMEASSISTANT].get(ATTR_DEVICES)
@homeassistant_devices.setter
def homeassistant_devices(self, value):
"""Set snapshot homeassistant devices."""
self._data[ATTR_HOMEASSISTANT][ATTR_DEVICES] = value
@property
def size(self):
"""Return snapshot size."""
if not self.tar_file.is_file():
return 0
return self.tar_file.stat().st_size / 1048576 # calc mbyte
def create(self, slug, name, date, sys_type):
"""Initialize a new snapshot."""
# init metadata
self._data[ATTR_SLUG] = slug
self._data[ATTR_NAME] = name
self._data[ATTR_DATE] = date
self._data[ATTR_TYPE] = sys_type
# init other constructs
self._data[ATTR_HOMEASSISTANT] = {}
self._data[ATTR_ADDONS] = []
self._data[ATTR_REPOSITORIES] = []
self._data[ATTR_FOLDERS] = []
async def load(self):
"""Read snapshot.json from tar file."""
if not self.tar_file.is_file():
_LOGGER.error("No tarfile %s", self.tar_file)
return False
def _load_file():
"""Read snapshot.json."""
with tarfile.open(self.tar_file, "r:") as snapshot:
json_file = snapshot.extractfile("./snapshot.json")
return json_file.read()
# read snapshot.json
try:
raw = await self.loop.run_in_executor(None, _load_file)
except (tarfile.TarError, KeyError) as err:
_LOGGER.error(
"Can't read snapshot tarfile %s -> %s", self.tar_file, err)
return False
# parse data
try:
raw_dict = json.loads(raw)
except json.JSONDecodeError as err:
_LOGGER.error("Can't read data for %s -> %s", self.tar_file, err)
return False
# validate
try:
self._data = SCHEMA_SNAPSHOT(raw_dict)
except vol.Invalid as err:
_LOGGER.error("Can't validate data for %s -> %s", self.tar_file,
humanize_error(raw_dict, err))
return False
return True
async def __aenter__(self):
"""Async context to open a snapshot."""
self._tmp = TemporaryDirectory(dir=str(self.config.path_tmp))
# create a snapshot
if not self.tar_file.is_file():
return self
# extract a exists snapshot
def _extract_snapshot():
"""Extract a snapshot."""
with tarfile.open(self.tar_file, "r:") as tar:
tar.extractall(path=self._tmp.name)
await self.loop.run_in_executor(None, _extract_snapshot)
async def __aexit__(self, exception_type, exception_value, traceback):
"""Async context to close a snapshot."""
# exists snapshot or exception on build
if self.tar_file.is_file() or exception_type is not None:
return self._tmp.cleanup()
# validate data
try:
self._data = SCHEMA_SNAPSHOT(self._data)
except vol.Invalid as err:
_LOGGER.error("Invalid data for %s -> %s", self.tar_file,
humanize_error(self._data, err))
raise ValueError("Invalid config") from None
# new snapshot, build it
def _create_snapshot():
"""Create a new snapshot."""
with tarfile.open(self.tar_file, "w:") as tar:
tar.add(self._tmp.name, arcname=".")
if write_json_file(Path(self._tmp.name, "snapshot.json"), self._data):
await self.loop.run_in_executor(None, _create_snapshot)
else:
_LOGGER.error("Can't write snapshot.json")
self._tmp.cleanup()
self._tmp = None
async def import_addon(self, addon):
"""Add a addon into snapshot."""
snapshot_file = Path(self._tmp.name, "{}.tar.gz".format(addon.slug))
if not await addon.snapshot(snapshot_file):
_LOGGER.error("Can't make snapshot from %s", addon.slug)
return False
# store to config
self._data[ATTR_ADDONS].append({
ATTR_SLUG: addon.slug,
ATTR_NAME: addon.name,
ATTR_VERSION: addon.version_installed,
})
return True
async def export_addon(self, addon):
"""Restore a addon from snapshot."""
snapshot_file = Path(self._tmp.name, "{}.tar.gz".format(addon.slug))
if not await addon.restore(snapshot_file):
_LOGGER.error("Can't restore snapshot for %s", addon.slug)
return False
return True
async def store_folders(self, folder_list=None):
"""Backup hassio data into snapshot."""
folder_list = folder_list or ALL_FOLDERS
def _folder_save(name):
"""Intenal function to snapshot a folder."""
slug_name = name.replace("/", "_")
snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name))
origin_dir = Path(self.config.path_hassio, name)
try:
with tarfile.open(snapshot_tar, "w:gz",
compresslevel=1) as tar_file:
tar_file.add(origin_dir, arcname=".")
self._data[ATTR_FOLDERS].append(name)
except tarfile.TarError as err:
_LOGGER.warning("Can't snapshot folder %s -> %s", name, err)
# run tasks
tasks = [self.loop.run_in_executor(None, _folder_save, folder)
for folder in folder_list]
if tasks:
await asyncio.wait(tasks, loop=self.loop)
async def restore_folders(self, folder_list=None):
"""Backup hassio data into snapshot."""
folder_list = folder_list or ALL_FOLDERS
def _folder_restore(name):
"""Intenal function to restore a folder."""
slug_name = name.replace("/", "_")
snapshot_tar = Path(self._tmp.name, "{}.tar.gz".format(slug_name))
origin_dir = Path(self.config.path_hassio, name)
# clean old stuff
if origin_dir.is_dir():
remove_folder(origin_dir)
try:
with tarfile.open(snapshot_tar, "r:gz") as tar_file:
tar_file.extractall(path=origin_dir)
except tarfile.TarError as err:
_LOGGER.warning("Can't restore folder %s -> %s", name, err)
# run tasks
tasks = [self.loop.run_in_executor(None, _folder_restore, folder)
for folder in folder_list]
if tasks:
await asyncio.wait(tasks, loop=self.loop)

21
hassio/snapshots/util.py Normal file
View File

@ -0,0 +1,21 @@
"""Util addons functions."""
import hashlib
import shutil
def create_slug(name, date_str):
"""Generate a hash from repository."""
key = "{} - {}".format(date_str, name).lower().encode()
return hashlib.sha1(key).hexdigest()[:8]
def remove_folder(folder):
"""Remove folder data but not the folder itself."""
for obj in folder.iterdir():
try:
if obj.is_dir():
shutil.rmtree(str(obj), ignore_errors=True)
else:
obj.unlink()
except (OSError, shutil.Error):
pass

View File

@ -0,0 +1,30 @@
"""Validate some things around restore."""
import voluptuous as vol
from ..const import (
ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_NAME, ATTR_SLUG, ATTR_DATE,
ATTR_VERSION, ATTR_HOMEASSISTANT, ATTR_FOLDERS, ATTR_TYPE, ATTR_DEVICES,
FOLDER_SHARE, FOLDER_HOMEASSISTANT, FOLDER_ADDONS, FOLDER_SSL,
SNAPSHOT_FULL, SNAPSHOT_PARTIAL)
ALL_FOLDERS = [FOLDER_HOMEASSISTANT, FOLDER_SHARE, FOLDER_ADDONS, FOLDER_SSL]
# pylint: disable=no-value-for-parameter
SCHEMA_SNAPSHOT = vol.Schema({
vol.Required(ATTR_SLUG): vol.Coerce(str),
vol.Required(ATTR_TYPE): vol.In([SNAPSHOT_FULL, SNAPSHOT_PARTIAL]),
vol.Required(ATTR_NAME): vol.Coerce(str),
vol.Required(ATTR_DATE): vol.Coerce(str),
vol.Required(ATTR_HOMEASSISTANT): vol.Schema({
vol.Required(ATTR_VERSION): vol.Coerce(str),
vol.Optional(ATTR_DEVICES, default=[]): [vol.Match(r"^[^/]*$")],
}),
vol.Optional(ATTR_FOLDERS, default=[]): [vol.In(ALL_FOLDERS)],
vol.Optional(ATTR_ADDONS, default=[]): [vol.Schema({
vol.Required(ATTR_SLUG): vol.Coerce(str),
vol.Required(ATTR_NAME): vol.Coerce(str),
vol.Required(ATTR_VERSION): vol.Coerce(str),
})],
vol.Optional(ATTR_REPOSITORIES, default=[]): [vol.Url()],
}, extra=vol.ALLOW_EXTRA)

View File

@ -1,5 +1,5 @@
{ {
"hassio": "0.38", "hassio": "0.39",
"homeassistant": "0.48.1", "homeassistant": "0.48.1",
"resinos": "0.8", "resinos": "0.8",
"resinhup": "0.1", "resinhup": "0.1",