mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-07-14 20:56:30 +00:00
Fix version merge conflict
This commit is contained in:
commit
cbf79f1fab
16
API.md
16
API.md
@ -430,26 +430,10 @@ For reset custom network/audio settings, set it `null`.
|
|||||||
|
|
||||||
- POST `/addons/{addon}/install`
|
- POST `/addons/{addon}/install`
|
||||||
|
|
||||||
Optional:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"version": "VERSION"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- POST `/addons/{addon}/uninstall`
|
- POST `/addons/{addon}/uninstall`
|
||||||
|
|
||||||
- POST `/addons/{addon}/update`
|
- POST `/addons/{addon}/update`
|
||||||
|
|
||||||
Optional:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"version": "VERSION"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
- GET `/addons/{addon}/logs`
|
- GET `/addons/{addon}/logs`
|
||||||
|
|
||||||
Output is the raw Docker log.
|
Output is the raw Docker log.
|
||||||
|
@ -447,7 +447,7 @@ class Addon(object):
|
|||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def install(self, version=None):
|
async def install(self):
|
||||||
"""Install a addon."""
|
"""Install a addon."""
|
||||||
if self.config.arch not in self.supported_arch:
|
if self.config.arch not in self.supported_arch:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
@ -463,11 +463,10 @@ class Addon(object):
|
|||||||
"Create Home-Assistant addon data folder %s", self.path_data)
|
"Create Home-Assistant addon data folder %s", self.path_data)
|
||||||
self.path_data.mkdir()
|
self.path_data.mkdir()
|
||||||
|
|
||||||
version = version or self.last_version
|
if not await self.docker.install(self.last_version):
|
||||||
if not await self.docker.install(version):
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
self._set_install(version)
|
self._set_install(self.last_version)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
@ -510,19 +509,18 @@ class Addon(object):
|
|||||||
return self.docker.stop()
|
return self.docker.stop()
|
||||||
|
|
||||||
@check_installed
|
@check_installed
|
||||||
async def update(self, version=None):
|
async def update(self):
|
||||||
"""Update addon."""
|
"""Update addon."""
|
||||||
version = version or self.last_version
|
|
||||||
last_state = await self.state()
|
last_state = await self.state()
|
||||||
|
|
||||||
if version == self.version_installed:
|
if self.last_version == self.version_installed:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
"Addon %s is already installed in %s", self._id, version)
|
"No update available for Addon %s", self._id)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not await self.docker.update(version):
|
if not await self.docker.update(self.last_version):
|
||||||
return False
|
return False
|
||||||
self._set_update(version)
|
self._set_update(self.last_version)
|
||||||
|
|
||||||
# restore state
|
# restore state
|
||||||
if last_state == STATE_STARTED:
|
if last_state == STATE_STARTED:
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Validate addons options schema."""
|
"""Validate addons options schema."""
|
||||||
|
import logging
|
||||||
import re
|
import re
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
@ -17,6 +18,8 @@ from ..const import (
|
|||||||
ATTR_ARGS, ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN)
|
ATTR_ARGS, ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN)
|
||||||
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_CHANNEL
|
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_CHANNEL
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$")
|
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$")
|
||||||
|
|
||||||
@ -176,8 +179,10 @@ def validate_options(raw_schema):
|
|||||||
|
|
||||||
# read options
|
# read options
|
||||||
for key, value in struct.items():
|
for key, value in struct.items():
|
||||||
|
# Ignore unknown options / remove from list
|
||||||
if key not in raw_schema:
|
if key not in raw_schema:
|
||||||
raise vol.Invalid("Unknown options {}.".format(key))
|
_LOGGER.warning("Unknown options %s", key)
|
||||||
|
continue
|
||||||
|
|
||||||
typ = raw_schema[key]
|
typ = raw_schema[key]
|
||||||
try:
|
try:
|
||||||
@ -202,42 +207,38 @@ def validate_options(raw_schema):
|
|||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
def _single_validate(typ, value, key):
|
def _single_validate(typ, value, key):
|
||||||
"""Validate a single element."""
|
"""Validate a single element."""
|
||||||
try:
|
# if required argument
|
||||||
# if required argument
|
if value is None:
|
||||||
if value is None:
|
raise vol.Invalid("Missing required option '{}'.".format(key))
|
||||||
raise vol.Invalid("Missing required option '{}'.".format(key))
|
|
||||||
|
|
||||||
# parse extend data from type
|
# parse extend data from type
|
||||||
match = RE_SCHEMA_ELEMENT.match(typ)
|
match = RE_SCHEMA_ELEMENT.match(typ)
|
||||||
|
|
||||||
# prepare range
|
# prepare range
|
||||||
range_args = {}
|
range_args = {}
|
||||||
for group_name in ('i_min', 'i_max', 'f_min', 'f_max'):
|
for group_name in ('i_min', 'i_max', 'f_min', 'f_max'):
|
||||||
group_value = match.group(group_name)
|
group_value = match.group(group_name)
|
||||||
if group_value:
|
if group_value:
|
||||||
range_args[group_name[2:]] = float(group_value)
|
range_args[group_name[2:]] = float(group_value)
|
||||||
|
|
||||||
if typ.startswith(V_STR):
|
if typ.startswith(V_STR):
|
||||||
return str(value)
|
return str(value)
|
||||||
elif typ.startswith(V_INT):
|
elif typ.startswith(V_INT):
|
||||||
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
|
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
|
||||||
elif typ.startswith(V_FLOAT):
|
elif typ.startswith(V_FLOAT):
|
||||||
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
|
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
|
||||||
elif typ.startswith(V_BOOL):
|
elif typ.startswith(V_BOOL):
|
||||||
return vol.Boolean()(value)
|
return vol.Boolean()(value)
|
||||||
elif typ.startswith(V_EMAIL):
|
elif typ.startswith(V_EMAIL):
|
||||||
return vol.Email()(value)
|
return vol.Email()(value)
|
||||||
elif typ.startswith(V_URL):
|
elif typ.startswith(V_URL):
|
||||||
return vol.Url()(value)
|
return vol.Url()(value)
|
||||||
elif typ.startswith(V_PORT):
|
elif typ.startswith(V_PORT):
|
||||||
return NETWORK_PORT(value)
|
return NETWORK_PORT(value)
|
||||||
elif typ.startswith(V_MATCH):
|
elif typ.startswith(V_MATCH):
|
||||||
return vol.Match(match.group('match'))(str(value))
|
return vol.Match(match.group('match'))(str(value))
|
||||||
|
|
||||||
raise vol.Invalid("Fatal error for {} type {}".format(key, typ))
|
raise vol.Invalid("Fatal error for {} type {}".format(key, typ))
|
||||||
except ValueError:
|
|
||||||
raise vol.Invalid(
|
|
||||||
"Type {} error for '{}' on {}.".format(typ, value, key)) from None
|
|
||||||
|
|
||||||
|
|
||||||
def _nested_validate_list(typ, data_list, key):
|
def _nested_validate_list(typ, data_list, key):
|
||||||
@ -249,11 +250,19 @@ def _nested_validate_list(typ, data_list, key):
|
|||||||
if isinstance(typ, dict):
|
if isinstance(typ, dict):
|
||||||
c_options = {}
|
c_options = {}
|
||||||
for c_key, c_value in element.items():
|
for c_key, c_value in element.items():
|
||||||
|
# Ignore unknown options / remove from list
|
||||||
if c_key not in typ:
|
if c_key not in typ:
|
||||||
raise vol.Invalid(
|
_LOGGER.warning("Unknown options %s", c_key)
|
||||||
"Unknown nested options {}".format(c_key))
|
continue
|
||||||
|
|
||||||
c_options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
c_options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
||||||
|
|
||||||
|
# check if all options are exists
|
||||||
|
missing = set(typ) - set(c_options)
|
||||||
|
if missing:
|
||||||
|
raise vol.Invalid(
|
||||||
|
"Missing {} options inside nested list".format(missing))
|
||||||
|
|
||||||
options.append(c_options)
|
options.append(c_options)
|
||||||
# normal list
|
# normal list
|
||||||
else:
|
else:
|
||||||
@ -267,8 +276,10 @@ def _nested_validate_dict(typ, data_dict, key):
|
|||||||
options = {}
|
options = {}
|
||||||
|
|
||||||
for c_key, c_value in data_dict.items():
|
for c_key, c_value in data_dict.items():
|
||||||
|
# Ignore unknown options / remove from list
|
||||||
if c_key not in typ:
|
if c_key not in typ:
|
||||||
raise vol.Invalid("Unknow nested dict options {}".format(c_key))
|
_LOGGER.warning("Unknown options %s", c_key)
|
||||||
|
continue
|
||||||
|
|
||||||
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
||||||
|
|
||||||
|
@ -166,14 +166,10 @@ class APIAddons(object):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def install(self, request):
|
def install(self, request):
|
||||||
"""Install addon."""
|
"""Install addon."""
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
|
||||||
addon = self._extract_addon(request, check_installed=False)
|
addon = self._extract_addon(request, check_installed=False)
|
||||||
version = body.get(ATTR_VERSION, addon.last_version)
|
return asyncio.shield(addon.install(), loop=self.loop)
|
||||||
|
|
||||||
return await asyncio.shield(
|
|
||||||
addon.install(version=version), loop=self.loop)
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def uninstall(self, request):
|
def uninstall(self, request):
|
||||||
@ -202,17 +198,14 @@ class APIAddons(object):
|
|||||||
return asyncio.shield(addon.stop(), loop=self.loop)
|
return asyncio.shield(addon.stop(), loop=self.loop)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def update(self, request):
|
def update(self, request):
|
||||||
"""Update addon."""
|
"""Update addon."""
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
|
||||||
addon = self._extract_addon(request)
|
addon = self._extract_addon(request)
|
||||||
version = body.get(ATTR_VERSION, addon.last_version)
|
|
||||||
|
|
||||||
if version == addon.version_installed:
|
if addon.last_version == addon.version_installed:
|
||||||
raise RuntimeError("Version %s is already in use", version)
|
raise RuntimeError("No update available!")
|
||||||
|
|
||||||
return await asyncio.shield(
|
return asyncio.shield(addon.update(), loop=self.loop)
|
||||||
addon.update(version=version), loop=self.loop)
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request):
|
def restart(self, request):
|
||||||
@ -253,4 +246,4 @@ class APIAddons(object):
|
|||||||
raise RuntimeError("STDIN not supported by addons")
|
raise RuntimeError("STDIN not supported by addons")
|
||||||
|
|
||||||
data = await request.read()
|
data = await request.read()
|
||||||
return asyncio.shield(addon.write_stdin(data), loop=self.loop)
|
return await asyncio.shield(addon.write_stdin(data), loop=self.loop)
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from ipaddress import ip_network
|
from ipaddress import ip_network
|
||||||
|
|
||||||
HASSIO_VERSION = '0.68'
|
HASSIO_VERSION = '0.69'
|
||||||
|
|
||||||
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
||||||
'hassio/{}/version.json')
|
'hassio/{}/version.json')
|
||||||
|
@ -95,7 +95,7 @@ class HomeAssistant(JsonConfig):
|
|||||||
def watchdog(self, value):
|
def watchdog(self, value):
|
||||||
"""Return True if the watchdog should protect Home-Assistant."""
|
"""Return True if the watchdog should protect Home-Assistant."""
|
||||||
self._data[ATTR_WATCHDOG] = value
|
self._data[ATTR_WATCHDOG] = value
|
||||||
self._data.save()
|
self.save()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self):
|
def version(self):
|
||||||
|
@ -197,6 +197,8 @@ class SnapshotsManager(object):
|
|||||||
await snapshot.restore_folders()
|
await snapshot.restore_folders()
|
||||||
|
|
||||||
# start homeassistant restore
|
# start homeassistant restore
|
||||||
|
_LOGGER.info("Full-Restore %s restore Home-Assistant",
|
||||||
|
snapshot.slug)
|
||||||
snapshot.restore_homeassistant(self.homeassistant)
|
snapshot.restore_homeassistant(self.homeassistant)
|
||||||
task_hass = self.loop.create_task(
|
task_hass = self.loop.create_task(
|
||||||
self.homeassistant.update(snapshot.homeassistant_version))
|
self.homeassistant.update(snapshot.homeassistant_version))
|
||||||
@ -279,6 +281,8 @@ class SnapshotsManager(object):
|
|||||||
await snapshot.restore_folders(folders)
|
await snapshot.restore_folders(folders)
|
||||||
|
|
||||||
if homeassistant:
|
if homeassistant:
|
||||||
|
_LOGGER.info("Partial-Restore %s restore Home-Assistant",
|
||||||
|
snapshot.slug)
|
||||||
snapshot.restore_homeassistant(self.homeassistant)
|
snapshot.restore_homeassistant(self.homeassistant)
|
||||||
tasks.append(self.homeassistant.update(
|
tasks.append(self.homeassistant.update(
|
||||||
snapshot.homeassistant_version))
|
snapshot.homeassistant_version))
|
||||||
|
@ -261,7 +261,8 @@ class Snapshot(object):
|
|||||||
"""Async context to close a snapshot."""
|
"""Async context to close a snapshot."""
|
||||||
# exists snapshot or exception on build
|
# exists snapshot or exception on build
|
||||||
if self.tar_file.is_file() or exception_type is not None:
|
if self.tar_file.is_file() or exception_type is not None:
|
||||||
return self._tmp.cleanup()
|
self._tmp.cleanup()
|
||||||
|
return
|
||||||
|
|
||||||
# validate data
|
# validate data
|
||||||
try:
|
try:
|
||||||
@ -283,7 +284,6 @@ class Snapshot(object):
|
|||||||
_LOGGER.error("Can't write snapshot.json")
|
_LOGGER.error("Can't write snapshot.json")
|
||||||
|
|
||||||
self._tmp.cleanup()
|
self._tmp.cleanup()
|
||||||
self._tmp = None
|
|
||||||
|
|
||||||
async def import_addon(self, addon):
|
async def import_addon(self, addon):
|
||||||
"""Add a addon into snapshot."""
|
"""Add a addon into snapshot."""
|
||||||
@ -323,9 +323,11 @@ class Snapshot(object):
|
|||||||
origin_dir = Path(self.config.path_hassio, name)
|
origin_dir = Path(self.config.path_hassio, name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
_LOGGER.info("Snapshot folder %s", name)
|
||||||
with tarfile.open(snapshot_tar, "w:gz",
|
with tarfile.open(snapshot_tar, "w:gz",
|
||||||
compresslevel=1) as tar_file:
|
compresslevel=1) as tar_file:
|
||||||
tar_file.add(origin_dir, arcname=".")
|
tar_file.add(origin_dir, arcname=".")
|
||||||
|
_LOGGER.info("Snapshot folder %s done", name)
|
||||||
|
|
||||||
self._data[ATTR_FOLDERS].append(name)
|
self._data[ATTR_FOLDERS].append(name)
|
||||||
except tarfile.TarError as err:
|
except tarfile.TarError as err:
|
||||||
@ -352,8 +354,10 @@ class Snapshot(object):
|
|||||||
remove_folder(origin_dir)
|
remove_folder(origin_dir)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
_LOGGER.info("Restore folder %s", name)
|
||||||
with tarfile.open(snapshot_tar, "r:gz") as tar_file:
|
with tarfile.open(snapshot_tar, "r:gz") as tar_file:
|
||||||
tar_file.extractall(path=origin_dir)
|
tar_file.extractall(path=origin_dir)
|
||||||
|
_LOGGER.info("Restore folder %s done", name)
|
||||||
except tarfile.TarError as err:
|
except tarfile.TarError as err:
|
||||||
_LOGGER.warning("Can't restore folder %s -> %s", name, err)
|
_LOGGER.warning("Can't restore folder %s -> %s", name, err)
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"hassio": "0.68",
|
"hassio": "0.69",
|
||||||
"homeassistant": "0.55",
|
"homeassistant": "0.55",
|
||||||
"resinos": "1.1",
|
"resinos": "1.1",
|
||||||
"resinhup": "0.3",
|
"resinhup": "0.3",
|
||||||
|
Loading…
x
Reference in New Issue
Block a user