mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-09-27 05:49:32 +00:00
Compare commits
14 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
b33b26018d | ||
![]() |
66c93e7176 | ||
![]() |
5674d32bad | ||
![]() |
7a84972770 | ||
![]() |
638f0f5371 | ||
![]() |
dca1b6f1d3 | ||
![]() |
2b0ee109d6 | ||
![]() |
e7430d87d7 | ||
![]() |
9751c1de79 | ||
![]() |
c497167b64 | ||
![]() |
7fb2aca88b | ||
![]() |
0d544845b1 | ||
![]() |
602eb472f9 | ||
![]() |
f22fa46bdb |
@@ -13,11 +13,12 @@ _LOGGER = logging.getLogger(__name__)
|
||||
# pylint: disable=invalid-name
|
||||
if __name__ == "__main__":
|
||||
bootstrap.initialize_logging()
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
if not bootstrap.check_environment():
|
||||
exit(1)
|
||||
sys.exit(1)
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
# init executor pool
|
||||
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
||||
loop.set_default_executor(executor)
|
||||
|
||||
@@ -27,19 +28,20 @@ if __name__ == "__main__":
|
||||
|
||||
bootstrap.migrate_system_env(config)
|
||||
|
||||
_LOGGER.info("Run Hassio setup")
|
||||
_LOGGER.info("Setup HassIO")
|
||||
loop.run_until_complete(hassio.setup())
|
||||
|
||||
_LOGGER.info("Start Hassio")
|
||||
loop.call_soon_threadsafe(loop.create_task, hassio.start())
|
||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop, hassio)
|
||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop)
|
||||
|
||||
_LOGGER.info("Run Hassio loop")
|
||||
loop.run_forever()
|
||||
|
||||
_LOGGER.info("Cleanup system")
|
||||
executor.shutdown(wait=False)
|
||||
loop.close()
|
||||
try:
|
||||
_LOGGER.info("Run HassIO")
|
||||
loop.run_forever()
|
||||
finally:
|
||||
_LOGGER.info("Stopping HassIO")
|
||||
loop.run_until_complete(hassio.stop())
|
||||
executor.shutdown(wait=False)
|
||||
loop.close()
|
||||
|
||||
_LOGGER.info("Close Hassio")
|
||||
sys.exit(hassio.exit_code)
|
||||
sys.exit(0)
|
||||
|
@@ -8,7 +8,6 @@ import shutil
|
||||
import tarfile
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from deepmerge import Merger
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
@@ -33,8 +32,6 @@ RE_WEBUI = re.compile(
|
||||
r"^(?:(?P<s_prefix>https?)|\[PROTO:(?P<t_proto>\w+)\])"
|
||||
r":\/\/\[HOST\]:\[PORT:(?P<t_port>\d+)\](?P<s_suffix>.*)$")
|
||||
|
||||
MERGE_OPT = Merger([(dict, ['merge'])], ['override'], ['override'])
|
||||
|
||||
|
||||
class Addon(object):
|
||||
"""Hold data for addon inside HassIO."""
|
||||
@@ -109,10 +106,10 @@ class Addon(object):
|
||||
def options(self):
|
||||
"""Return options with local changes."""
|
||||
if self.is_installed:
|
||||
return MERGE_OPT.merge(
|
||||
self.data.system[self._id][ATTR_OPTIONS],
|
||||
self.data.user[self._id][ATTR_OPTIONS],
|
||||
)
|
||||
return {
|
||||
**self.data.system[self._id][ATTR_OPTIONS],
|
||||
**self.data.user[self._id][ATTR_OPTIONS]
|
||||
}
|
||||
return self.data.cache[self._id][ATTR_OPTIONS]
|
||||
|
||||
@options.setter
|
||||
|
@@ -38,7 +38,7 @@ RE_SCHEMA_ELEMENT = re.compile(
|
||||
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
|
||||
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
|
||||
r"|match\((?P<match>.*)\)"
|
||||
r")$"
|
||||
r")\??$"
|
||||
)
|
||||
|
||||
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
|
||||
@@ -105,10 +105,15 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
|
||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [
|
||||
vol.Any(SCHEMA_ELEMENT, {vol.Coerce(str): SCHEMA_ELEMENT})
|
||||
], vol.Schema({vol.Coerce(str): SCHEMA_ELEMENT}))
|
||||
vol.Any(
|
||||
SCHEMA_ELEMENT,
|
||||
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
|
||||
),
|
||||
], vol.Schema({
|
||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])
|
||||
}))
|
||||
}), False),
|
||||
vol.Optional(ATTR_IMAGE): vol.Match(r"^[\-\w{}]+/[\-\w{}]+$"),
|
||||
vol.Optional(ATTR_IMAGE): vol.Match(r"^[\w{}]+/[\-\w{}]+$"),
|
||||
vol.Optional(ATTR_TIMEOUT, default=10):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=10, max=120))
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
@@ -125,7 +130,7 @@ SCHEMA_REPOSITORY_CONFIG = vol.Schema({
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_BUILD_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_BUILD_FROM, default=BASE_IMAGE): vol.Schema({
|
||||
vol.In(ARCH_ALL): vol.Match(r"^[\-\w{}]+/[\-\w{}]+:[\-\w{}]+$"),
|
||||
vol.In(ARCH_ALL): vol.Match(r"(?:^[\w{}]+/)?[\-\w{}]+:[\.\-\w{}]+$"),
|
||||
}),
|
||||
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_ARGS, default={}): vol.Schema({
|
||||
@@ -199,6 +204,7 @@ def validate_options(raw_schema):
|
||||
raise vol.Invalid(
|
||||
"Type error for {}.".format(key)) from None
|
||||
|
||||
_check_missing_options(raw_schema, options, 'root')
|
||||
return options
|
||||
|
||||
return validate
|
||||
@@ -246,25 +252,10 @@ def _nested_validate_list(typ, data_list, key):
|
||||
options = []
|
||||
|
||||
for element in data_list:
|
||||
# dict list
|
||||
# Nested?
|
||||
if isinstance(typ, dict):
|
||||
c_options = {}
|
||||
for c_key, c_value in element.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if c_key not in typ:
|
||||
_LOGGER.warning("Unknown options %s", c_key)
|
||||
continue
|
||||
|
||||
c_options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
||||
|
||||
# check if all options are exists
|
||||
missing = set(typ) - set(c_options)
|
||||
if missing:
|
||||
raise vol.Invalid(
|
||||
"Missing {} options inside nested list".format(missing))
|
||||
|
||||
c_options = _nested_validate_dict(typ, element, key)
|
||||
options.append(c_options)
|
||||
# normal list
|
||||
else:
|
||||
options.append(_single_validate(typ, element, key))
|
||||
|
||||
@@ -281,6 +272,23 @@ def _nested_validate_dict(typ, data_dict, key):
|
||||
_LOGGER.warning("Unknown options %s", c_key)
|
||||
continue
|
||||
|
||||
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
||||
# Nested?
|
||||
if isinstance(typ[c_key], list):
|
||||
options[c_key] = _nested_validate_list(typ[c_key][0],
|
||||
c_value, c_key)
|
||||
else:
|
||||
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
||||
|
||||
_check_missing_options(typ, options, key)
|
||||
return options
|
||||
|
||||
|
||||
def _check_missing_options(origin, exists, root):
|
||||
"""Check if all options are exists."""
|
||||
missing = set(origin) - set(exists)
|
||||
for miss_opt in missing:
|
||||
if isinstance(origin[miss_opt], str) and \
|
||||
origin[miss_opt].endswith("?"):
|
||||
continue
|
||||
raise vol.Invalid(
|
||||
"Missing option {} in {}".format(miss_opt, root))
|
||||
|
@@ -123,22 +123,22 @@ def check_environment():
|
||||
return True
|
||||
|
||||
|
||||
def reg_signal(loop, hassio):
|
||||
def reg_signal(loop):
|
||||
"""Register SIGTERM, SIGKILL to stop system."""
|
||||
try:
|
||||
loop.add_signal_handler(
|
||||
signal.SIGTERM, lambda: loop.create_task(hassio.stop()))
|
||||
signal.SIGTERM, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGTERM")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(
|
||||
signal.SIGHUP, lambda: loop.create_task(hassio.stop()))
|
||||
signal.SIGHUP, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGHUP")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(
|
||||
signal.SIGINT, lambda: loop.create_task(hassio.stop()))
|
||||
signal.SIGINT, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGINT")
|
||||
|
@@ -2,7 +2,7 @@
|
||||
from pathlib import Path
|
||||
from ipaddress import ip_network
|
||||
|
||||
HASSIO_VERSION = '0.70'
|
||||
HASSIO_VERSION = '0.72'
|
||||
|
||||
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
||||
'hassio/{}/version.json')
|
||||
@@ -20,8 +20,6 @@ RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15
|
||||
RUN_WATCHDOG_HOMEASSISTANT_API = 300
|
||||
RUN_CLEANUP_API_SESSIONS = 900
|
||||
|
||||
RESTART_EXIT_CODE = 100
|
||||
|
||||
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
|
||||
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
|
||||
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
||||
|
@@ -177,7 +177,7 @@ class HassIO(object):
|
||||
if self.homeassistant.version == 'landingpage':
|
||||
self.loop.create_task(self.homeassistant.install())
|
||||
|
||||
async def stop(self, exit_code=0):
|
||||
async def stop(self):
|
||||
"""Stop a running orchestration."""
|
||||
# don't process scheduler anymore
|
||||
self.scheduler.suspend = True
|
||||
@@ -185,7 +185,6 @@ class HassIO(object):
|
||||
# process stop tasks
|
||||
self.websession.close()
|
||||
self.homeassistant.websession.close()
|
||||
await asyncio.wait([self.api.stop(), self.dns.stop()], loop=self.loop)
|
||||
|
||||
self.exit_code = exit_code
|
||||
self.loop.stop()
|
||||
# process async stop tasks
|
||||
await asyncio.wait([self.api.stop(), self.dns.stop()], loop=self.loop)
|
||||
|
@@ -6,7 +6,6 @@ import docker
|
||||
|
||||
from .interface import DockerInterface
|
||||
from .util import docker_process
|
||||
from ..const import RESTART_EXIT_CODE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -52,7 +51,7 @@ class DockerSupervisor(DockerInterface):
|
||||
_LOGGER.info("Update supervisor docker to %s:%s", self.image, tag)
|
||||
|
||||
if await self.loop.run_in_executor(None, self._install, tag):
|
||||
self.loop.create_task(self.stop_callback(RESTART_EXIT_CODE))
|
||||
self.loop.call_later(1, self.loop.stop)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
3
setup.py
3
setup.py
@@ -47,7 +47,6 @@ setup(
|
||||
'pyotp',
|
||||
'pyqrcode',
|
||||
'pytz',
|
||||
'pyudev',
|
||||
'deepmerge'
|
||||
'pyudev'
|
||||
]
|
||||
)
|
||||
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"hassio": "0.70",
|
||||
"homeassistant": "0.55",
|
||||
"hassio": "0.72",
|
||||
"homeassistant": "0.55.2",
|
||||
"resinos": "1.1",
|
||||
"resinhup": "0.3",
|
||||
"generic": "0.3",
|
||||
|
Reference in New Issue
Block a user