mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-15 20:19:21 +00:00
Compare commits
43 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
05554ccf7e | ||
![]() |
d6fc8892db | ||
![]() |
fa9b3b939e | ||
![]() |
70685c41be | ||
![]() |
f3e60f6c28 | ||
![]() |
7798e7cde2 | ||
![]() |
4af92b9d25 | ||
![]() |
eab958860c | ||
![]() |
a34806d4e2 | ||
![]() |
f00b21dc28 | ||
![]() |
021946e181 | ||
![]() |
5999b48be4 | ||
![]() |
57f3178408 | ||
![]() |
14013ac923 | ||
![]() |
2f9f9c6165 | ||
![]() |
0119b52e11 | ||
![]() |
2eeb8bf388 | ||
![]() |
5af3040223 | ||
![]() |
b06ce9b6b4 | ||
![]() |
38284e036d | ||
![]() |
27a079742d | ||
![]() |
7f33b3b5aa | ||
![]() |
c39d6357f3 | ||
![]() |
d1b30a0e95 | ||
![]() |
6a74893a30 | ||
![]() |
b61d5625fe | ||
![]() |
8d468328f3 | ||
![]() |
cd3b382902 | ||
![]() |
99cf44aacd | ||
![]() |
eaa489abec | ||
![]() |
46f323791d | ||
![]() |
ec72d38220 | ||
![]() |
f5b166a7f0 | ||
![]() |
8afde1e881 | ||
![]() |
3809f20c6a | ||
![]() |
68390469df | ||
![]() |
4c122a0630 | ||
![]() |
d06696cd94 | ||
![]() |
8d094d5c70 | ||
![]() |
068c463c98 | ||
![]() |
fc95933098 | ||
![]() |
630137a576 | ||
![]() |
857f346b35 |
39
API.md
39
API.md
@@ -203,16 +203,27 @@ Return QR-Code
|
||||
- POST `/host/reboot`
|
||||
- GET `/host/info`
|
||||
|
||||
See HostControl info command.
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "",
|
||||
"version": "",
|
||||
"last_version": "",
|
||||
"features": ["shutdown", "reboot", "update", "network_info", "network_control"],
|
||||
"features": ["shutdown", "reboot", "update", "hostname", "network_info", "network_control"],
|
||||
"hostname": "",
|
||||
"os": ""
|
||||
"os": "",
|
||||
"audio": {
|
||||
"input": "0,0",
|
||||
"output": "0,0"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/host/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"audio_input": "0,0",
|
||||
"audio_output": "0,0"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -259,11 +270,6 @@ Optional:
|
||||
```json
|
||||
{
|
||||
"hostname": "",
|
||||
"mode": "dhcp|fixed",
|
||||
"ssid": "",
|
||||
"ip": "",
|
||||
"netmask": "",
|
||||
"gateway": ""
|
||||
}
|
||||
```
|
||||
|
||||
@@ -297,6 +303,7 @@ Output is the raw Docker log.
|
||||
|
||||
- POST `/homeassistant/restart`
|
||||
- POST `/homeassistant/options`
|
||||
- POST `/homeassistant/check`
|
||||
|
||||
```json
|
||||
{
|
||||
@@ -330,7 +337,9 @@ Get all available addons.
|
||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||
"devices": ["/dev/xy"],
|
||||
"url": "null|url",
|
||||
"logo": "bool"
|
||||
"logo": "bool",
|
||||
"audio": "bool",
|
||||
"hassio_api": "bool"
|
||||
}
|
||||
],
|
||||
"repositories": [
|
||||
@@ -367,7 +376,11 @@ Get all available addons.
|
||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||
"devices": ["/dev/xy"],
|
||||
"logo": "bool",
|
||||
"webui": "null|http(s)://[HOST]:port/xy/zx"
|
||||
"hassio_api": "bool",
|
||||
"webui": "null|http(s)://[HOST]:port/xy/zx",
|
||||
"audio": "bool",
|
||||
"audio_input": "null|0,0",
|
||||
"audio_output": "null|0,0"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -383,10 +396,12 @@ Get all available addons.
|
||||
"CONTAINER": "port|[ip, port]"
|
||||
},
|
||||
"options": {},
|
||||
"audio_output": "null|0,0",
|
||||
"audio_input": "null|0,0"
|
||||
}
|
||||
```
|
||||
|
||||
For reset custom network settings, set it `null`.
|
||||
For reset custom network/audio settings, set it `null`.
|
||||
|
||||
- POST `/addons/{addon}/start`
|
||||
|
||||
|
@@ -1,11 +1,13 @@
|
||||
# Hass.io
|
||||
|
||||
### First private cloud solution for home automation.
|
||||
|
||||
Hass.io is a Docker based system for managing your Home Assistant installation and related applications. The system is controlled via Home Assistant which communicates with the supervisor. The supervisor provides an API to manage the installation. This includes changing network settings or installing and updating software.
|
||||
|
||||

|
||||
|
||||
[HassIO-Addons](https://github.com/home-assistant/hassio-addons) | [HassIO-Build](https://github.com/home-assistant/hassio-build)
|
||||
- [Hass.io Addons](https://github.com/home-assistant/hassio-addons)
|
||||
- [Hass.io Build](https://github.com/home-assistant/hassio-build)
|
||||
|
||||
## Installation
|
||||
|
||||
|
@@ -78,7 +78,7 @@ class AddonManager(object):
|
||||
|
||||
# don't add built-in repository to config
|
||||
if url not in BUILTIN_REPOSITORIES:
|
||||
self.config.addons_repositories = url
|
||||
self.config.add_addon_repository(url)
|
||||
|
||||
tasks = [_add_repository(url) for url in new_rep - old_rep]
|
||||
if tasks:
|
||||
|
@@ -19,7 +19,8 @@ from ..const import (
|
||||
ATTR_URL, ATTR_ARCH, ATTR_LOCATON, ATTR_DEVICES, ATTR_ENVIRONMENT,
|
||||
ATTR_HOST_NETWORK, ATTR_TMPFS, ATTR_PRIVILEGED, ATTR_STARTUP,
|
||||
STATE_STARTED, STATE_STOPPED, STATE_NONE, ATTR_USER, ATTR_SYSTEM,
|
||||
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI)
|
||||
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI,
|
||||
ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT)
|
||||
from .util import check_installed
|
||||
from ..dock.addon import DockerAddon
|
||||
from ..tools import write_json_file, read_json_file
|
||||
@@ -40,12 +41,12 @@ class Addon(object):
|
||||
self.data = data
|
||||
self._id = slug
|
||||
|
||||
self.addon_docker = DockerAddon(config, loop, dock, self)
|
||||
self.docker = DockerAddon(config, loop, dock, self)
|
||||
|
||||
async def load(self):
|
||||
"""Async initialize of object."""
|
||||
if self.is_installed:
|
||||
await self.addon_docker.attach()
|
||||
await self.docker.attach()
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
@@ -244,6 +245,56 @@ class Addon(object):
|
||||
"""Return list of privilege."""
|
||||
return self._mesh.get(ATTR_PRIVILEGED)
|
||||
|
||||
@property
|
||||
def use_hassio_api(self):
|
||||
"""Return True if the add-on access to hassio api."""
|
||||
return self._mesh[ATTR_HASSIO_API]
|
||||
|
||||
@property
|
||||
def with_audio(self):
|
||||
"""Return True if the add-on access to audio."""
|
||||
return self._mesh[ATTR_AUDIO]
|
||||
|
||||
@property
|
||||
def audio_output(self):
|
||||
"""Return ALSA config for output or None."""
|
||||
if not self.with_audio:
|
||||
return
|
||||
|
||||
setting = self.config.audio_output
|
||||
if self.is_installed and ATTR_AUDIO_OUTPUT in self.data.user[self._id]:
|
||||
setting = self.data.user[self._id][ATTR_AUDIO_OUTPUT]
|
||||
return setting
|
||||
|
||||
@audio_output.setter
|
||||
def audio_output(self, value):
|
||||
"""Set/remove custom audio output settings."""
|
||||
if value is None:
|
||||
self.data.user[self._id].pop(ATTR_AUDIO_OUTPUT, None)
|
||||
else:
|
||||
self.data.user[self._id][ATTR_AUDIO_OUTPUT] = value
|
||||
self.data.save()
|
||||
|
||||
@property
|
||||
def audio_input(self):
|
||||
"""Return ALSA config for input or None."""
|
||||
if not self.with_audio:
|
||||
return
|
||||
|
||||
setting = self.config.audio_input
|
||||
if self.is_installed and ATTR_AUDIO_INPUT in self.data.user[self._id]:
|
||||
setting = self.data.user[self._id][ATTR_AUDIO_INPUT]
|
||||
return setting
|
||||
|
||||
@audio_input.setter
|
||||
def audio_input(self, value):
|
||||
"""Set/remove custom audio input settings."""
|
||||
if value is None:
|
||||
self.data.user[self._id].pop(ATTR_AUDIO_INPUT, None)
|
||||
else:
|
||||
self.data.user[self._id][ATTR_AUDIO_INPUT] = value
|
||||
self.data.save()
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
"""Return url of addon."""
|
||||
@@ -383,7 +434,7 @@ class Addon(object):
|
||||
self.path_data.mkdir()
|
||||
|
||||
version = version or self.last_version
|
||||
if not await self.addon_docker.install(version):
|
||||
if not await self.docker.install(version):
|
||||
return False
|
||||
|
||||
self._set_install(version)
|
||||
@@ -392,7 +443,7 @@ class Addon(object):
|
||||
@check_installed
|
||||
async def uninstall(self):
|
||||
"""Remove a addon."""
|
||||
if not await self.addon_docker.remove():
|
||||
if not await self.docker.remove():
|
||||
return False
|
||||
|
||||
if self.path_data.is_dir():
|
||||
@@ -408,45 +459,61 @@ class Addon(object):
|
||||
if not self.is_installed:
|
||||
return STATE_NONE
|
||||
|
||||
if await self.addon_docker.is_running():
|
||||
if await self.docker.is_running():
|
||||
return STATE_STARTED
|
||||
return STATE_STOPPED
|
||||
|
||||
@check_installed
|
||||
async def start(self):
|
||||
"""Set options and start addon."""
|
||||
return await self.addon_docker.run()
|
||||
def start(self):
|
||||
"""Set options and start addon.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.run()
|
||||
|
||||
@check_installed
|
||||
async def stop(self):
|
||||
"""Stop addon."""
|
||||
return await self.addon_docker.stop()
|
||||
def stop(self):
|
||||
"""Stop addon.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.stop()
|
||||
|
||||
@check_installed
|
||||
async def update(self, version=None):
|
||||
"""Update addon."""
|
||||
version = version or self.last_version
|
||||
last_state = await self.state()
|
||||
|
||||
if version == self.version_installed:
|
||||
_LOGGER.warning(
|
||||
"Addon %s is already installed in %s", self._id, version)
|
||||
return True
|
||||
|
||||
if not await self.addon_docker.update(version):
|
||||
return False
|
||||
|
||||
if not await self.docker.update(version):
|
||||
return False
|
||||
self._set_update(version)
|
||||
|
||||
# restore state
|
||||
if last_state == STATE_STARTED:
|
||||
await self.docker.run()
|
||||
return True
|
||||
|
||||
@check_installed
|
||||
async def restart(self):
|
||||
"""Restart addon."""
|
||||
return await self.addon_docker.restart()
|
||||
def restart(self):
|
||||
"""Restart addon.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.restart()
|
||||
|
||||
@check_installed
|
||||
async def logs(self):
|
||||
"""Return addons log output."""
|
||||
return await self.addon_docker.logs()
|
||||
def logs(self):
|
||||
"""Return addons log output.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.docker.logs()
|
||||
|
||||
@check_installed
|
||||
async def snapshot(self, tar_file):
|
||||
@@ -454,7 +521,7 @@ class Addon(object):
|
||||
with TemporaryDirectory(dir=str(self.config.path_tmp)) as temp:
|
||||
# store local image
|
||||
if self.need_build and not await \
|
||||
self.addon_docker.export_image(Path(temp, "image.tar")):
|
||||
self.docker.export_image(Path(temp, "image.tar")):
|
||||
return False
|
||||
|
||||
data = {
|
||||
@@ -519,15 +586,15 @@ class Addon(object):
|
||||
|
||||
# check version / restore image
|
||||
version = data[ATTR_VERSION]
|
||||
if version != self.addon_docker.version:
|
||||
if version != self.docker.version:
|
||||
image_file = Path(temp, "image.tar")
|
||||
if image_file.is_file():
|
||||
await self.addon_docker.import_image(image_file, version)
|
||||
await self.docker.import_image(image_file, version)
|
||||
else:
|
||||
if await self.addon_docker.install(version):
|
||||
await self.addon_docker.cleanup()
|
||||
if await self.docker.install(version):
|
||||
await self.docker.cleanup()
|
||||
else:
|
||||
await self.addon_docker.stop()
|
||||
await self.docker.stop()
|
||||
|
||||
# restore data
|
||||
def _restore_data():
|
||||
|
@@ -118,7 +118,7 @@ class Data(JsonConfig):
|
||||
addon_config[ATTR_LOCATON] = str(addon.parent)
|
||||
self._cache[addon_slug] = addon_config
|
||||
|
||||
except OSError:
|
||||
except (OSError, json.JSONDecodeError):
|
||||
_LOGGER.warning("Can't read %s", addon)
|
||||
|
||||
except vol.Invalid as ex:
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""Init file for HassIO addons git."""
|
||||
import asyncio
|
||||
import logging
|
||||
import functools as ft
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
|
||||
@@ -48,7 +49,9 @@ class GitRepo(object):
|
||||
try:
|
||||
_LOGGER.info("Clone addon %s repository", self.url)
|
||||
self.repo = await self.loop.run_in_executor(
|
||||
None, git.Repo.clone_from, self.url, str(self.path))
|
||||
None, ft.partial(
|
||||
git.Repo.clone_from, self.url, str(self.path),
|
||||
recursive=True))
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||
git.GitCommandError) as err:
|
||||
|
@@ -10,8 +10,9 @@ from ..const import (
|
||||
ARCH_AARCH64, ARCH_AMD64, ARCH_I386, ATTR_TMPFS, ATTR_PRIVILEGED,
|
||||
ATTR_USER, ATTR_STATE, ATTR_SYSTEM, STATE_STARTED, STATE_STOPPED,
|
||||
ATTR_LOCATON, ATTR_REPOSITORY, ATTR_TIMEOUT, ATTR_NETWORK,
|
||||
ATTR_AUTO_UPDATE, ATTR_WEBUI)
|
||||
from ..validate import NETWORK_PORT, DOCKER_PORTS
|
||||
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API)
|
||||
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_CHANNEL
|
||||
|
||||
|
||||
MAP_VOLUME = r"^(config|ssl|addons|backup|share)(?::(rw|:ro))?$"
|
||||
@@ -38,14 +39,12 @@ STARTUP_ALL = [
|
||||
PRIVILEGED_ALL = [
|
||||
"NET_ADMIN",
|
||||
"SYS_ADMIN",
|
||||
"SYS_RAWIO"
|
||||
]
|
||||
|
||||
|
||||
def _migrate_startup(value):
|
||||
"""Migrate startup schema.
|
||||
|
||||
REMOVE after 0.50-
|
||||
"""
|
||||
def _simple_startup(value):
|
||||
"""Simple startup schema."""
|
||||
if value == "before":
|
||||
return STARTUP_SERVICES
|
||||
if value == "after":
|
||||
@@ -62,7 +61,7 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Optional(ATTR_ARCH, default=ARCH_ALL): [vol.In(ARCH_ALL)],
|
||||
vol.Required(ATTR_STARTUP):
|
||||
vol.All(_migrate_startup, vol.In(STARTUP_ALL)),
|
||||
vol.All(_simple_startup, vol.In(STARTUP_ALL)),
|
||||
vol.Required(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
||||
@@ -75,6 +74,8 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_MAP, default=[]): [vol.Match(MAP_VOLUME)],
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
|
||||
vol.Coerce(str): vol.Any(ADDON_ELEMENT, [
|
||||
@@ -103,6 +104,8 @@ SCHEMA_ADDON_USER = vol.Schema({
|
||||
vol.Optional(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||
})
|
||||
|
||||
|
||||
|
@@ -37,6 +37,7 @@ class RestAPI(object):
|
||||
self.webapp.router.add_post('/host/reboot', api_host.reboot)
|
||||
self.webapp.router.add_post('/host/shutdown', api_host.shutdown)
|
||||
self.webapp.router.add_post('/host/update', api_host.update)
|
||||
self.webapp.router.add_post('/host/options', api_host.options)
|
||||
|
||||
def register_network(self, host_control):
|
||||
"""Register network function."""
|
||||
@@ -46,11 +47,11 @@ class RestAPI(object):
|
||||
self.webapp.router.add_post('/network/options', api_net.options)
|
||||
|
||||
def register_supervisor(self, supervisor, snapshots, addons, host_control,
|
||||
websession):
|
||||
updater):
|
||||
"""Register supervisor function."""
|
||||
api_supervisor = APISupervisor(
|
||||
self.config, self.loop, supervisor, snapshots, addons,
|
||||
host_control, websession)
|
||||
host_control, updater)
|
||||
|
||||
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
|
||||
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
|
||||
@@ -67,10 +68,11 @@ class RestAPI(object):
|
||||
api_hass = APIHomeAssistant(self.config, self.loop, dock_homeassistant)
|
||||
|
||||
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
|
||||
self.webapp.router.add_get('/homeassistant/logs', api_hass.logs)
|
||||
self.webapp.router.add_post('/homeassistant/options', api_hass.options)
|
||||
self.webapp.router.add_post('/homeassistant/update', api_hass.update)
|
||||
self.webapp.router.add_post('/homeassistant/restart', api_hass.restart)
|
||||
self.webapp.router.add_get('/homeassistant/logs', api_hass.logs)
|
||||
self.webapp.router.add_post('/homeassistant/check', api_hass.check)
|
||||
|
||||
def register_addons(self, addons):
|
||||
"""Register homeassistant function."""
|
||||
|
@@ -12,6 +12,7 @@ from ..const import (
|
||||
ATTR_BUILD, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_HOST_NETWORK, ATTR_SLUG,
|
||||
ATTR_SOURCE, ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_ARCH, ATTR_MAINTAINER,
|
||||
ATTR_INSTALLED, ATTR_LOGO, ATTR_WEBUI, ATTR_DEVICES, ATTR_PRIVILEGED,
|
||||
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API,
|
||||
BOOT_AUTO, BOOT_MANUAL, CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY)
|
||||
from ..validate import DOCKER_PORTS
|
||||
|
||||
@@ -76,6 +77,8 @@ class APIAddons(object):
|
||||
ATTR_DEVICES: self._pretty_devices(addon),
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
ATTR_HASSIO_API: addon.use_hassio_api,
|
||||
ATTR_AUDIO: addon.with_audio,
|
||||
})
|
||||
|
||||
data_repositories = []
|
||||
@@ -123,6 +126,10 @@ class APIAddons(object):
|
||||
ATTR_DEVICES: self._pretty_devices(addon),
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
ATTR_WEBUI: addon.webui,
|
||||
ATTR_HASSIO_API: addon.use_hassio_api,
|
||||
ATTR_AUDIO: addon.with_audio,
|
||||
ATTR_AUDIO_INPUT: addon.audio_input,
|
||||
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
||||
}
|
||||
|
||||
@api_process
|
||||
@@ -144,6 +151,10 @@ class APIAddons(object):
|
||||
addon.auto_update = body[ATTR_AUTO_UPDATE]
|
||||
if ATTR_NETWORK in body:
|
||||
addon.ports = body[ATTR_NETWORK]
|
||||
if ATTR_AUDIO_INPUT in body:
|
||||
addon.audio_input = body[ATTR_AUDIO_INPUT]
|
||||
if ATTR_AUDIO_OUTPUT in body:
|
||||
addon.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
|
||||
return True
|
||||
|
||||
@@ -152,19 +163,19 @@ class APIAddons(object):
|
||||
"""Install addon."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
version = body.get(ATTR_VERSION)
|
||||
version = body.get(ATTR_VERSION, addon.last_version)
|
||||
|
||||
return await asyncio.shield(
|
||||
addon.install(version=version), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def uninstall(self, request):
|
||||
def uninstall(self, request):
|
||||
"""Uninstall addon."""
|
||||
addon = self._extract_addon(request)
|
||||
return await asyncio.shield(addon.uninstall(), loop=self.loop)
|
||||
return asyncio.shield(addon.uninstall(), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def start(self, request):
|
||||
def start(self, request):
|
||||
"""Start addon."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
@@ -175,29 +186,32 @@ class APIAddons(object):
|
||||
except vol.Invalid as ex:
|
||||
raise RuntimeError(humanize_error(options, ex)) from None
|
||||
|
||||
return await asyncio.shield(addon.start(), loop=self.loop)
|
||||
return asyncio.shield(addon.start(), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def stop(self, request):
|
||||
def stop(self, request):
|
||||
"""Stop addon."""
|
||||
addon = self._extract_addon(request)
|
||||
return await asyncio.shield(addon.stop(), loop=self.loop)
|
||||
return asyncio.shield(addon.stop(), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update addon."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
addon = self._extract_addon(request)
|
||||
version = body.get(ATTR_VERSION)
|
||||
version = body.get(ATTR_VERSION, addon.last_version)
|
||||
|
||||
if version == addon.version_installed:
|
||||
raise RuntimeError("Version %s is already in use", version)
|
||||
|
||||
return await asyncio.shield(
|
||||
addon.update(version=version), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def restart(self, request):
|
||||
def restart(self, request):
|
||||
"""Restart addon."""
|
||||
addon = self._extract_addon(request)
|
||||
return await asyncio.shield(addon.restart(), loop=self.loop)
|
||||
return asyncio.shield(addon.restart(), loop=self.loop)
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
|
@@ -63,27 +63,29 @@ class APIHomeAssistant(object):
|
||||
async def update(self, request):
|
||||
"""Update homeassistant."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.config.last_homeassistant)
|
||||
version = body.get(ATTR_VERSION, self.homeassistant.last_version)
|
||||
|
||||
if self.homeassistant.in_progress:
|
||||
raise RuntimeError("Other task is in progress")
|
||||
if version == self.homeassistant.version:
|
||||
raise RuntimeError("Version {} is already in use".format(version))
|
||||
|
||||
return await asyncio.shield(
|
||||
self.homeassistant.update(version), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def restart(self, request):
|
||||
def restart(self, request):
|
||||
"""Restart homeassistant."""
|
||||
if self.homeassistant.in_progress:
|
||||
raise RuntimeError("Other task is in progress")
|
||||
|
||||
return await asyncio.shield(
|
||||
self.homeassistant.restart(), loop=self.loop)
|
||||
return asyncio.shield(self.homeassistant.restart(), loop=self.loop)
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
"""Return homeassistant docker logs.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
"""Return homeassistant docker logs."""
|
||||
return self.homeassistant.logs()
|
||||
|
||||
@api_process
|
||||
async def check(self, request):
|
||||
"""Check config of homeassistant."""
|
||||
code, message = await self.homeassistant.check_config()
|
||||
if not code:
|
||||
raise RuntimeError(message)
|
||||
|
||||
return True
|
||||
|
@@ -7,7 +7,9 @@ import voluptuous as vol
|
||||
from .util import api_process_hostcontrol, api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_TYPE, ATTR_HOSTNAME, ATTR_FEATURES,
|
||||
ATTR_OS, ATTR_SERIAL, ATTR_INPUT, ATTR_DISK, ATTR_AUDIO)
|
||||
ATTR_OS, ATTR_SERIAL, ATTR_INPUT, ATTR_DISK, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT)
|
||||
from ..validate import ALSA_CHANNEL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -15,6 +17,11 @@ SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||
})
|
||||
|
||||
|
||||
class APIHost(object):
|
||||
"""Handle rest api for host functions."""
|
||||
@@ -38,6 +45,18 @@ class APIHost(object):
|
||||
ATTR_OS: self.host_control.os_info,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Process host options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_AUDIO_OUTPUT in body:
|
||||
self.config.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
if ATTR_AUDIO_INPUT in body:
|
||||
self.config.audio_input = body[ATTR_AUDIO_INPUT]
|
||||
|
||||
return True
|
||||
|
||||
@api_process_hostcontrol
|
||||
def reboot(self, request):
|
||||
"""Reboot host."""
|
||||
@@ -55,7 +74,7 @@ class APIHost(object):
|
||||
version = body.get(ATTR_VERSION, self.host_control.last_version)
|
||||
|
||||
if version == self.host_control.version:
|
||||
raise RuntimeError("Version is already in use")
|
||||
raise RuntimeError("Version {} is already in use".format(version))
|
||||
|
||||
return await asyncio.shield(
|
||||
self.host_control.update(version=version), loop=self.loop)
|
||||
|
@@ -98,5 +98,5 @@ class APISecurity(object):
|
||||
session = hashlib.sha256(os.urandom(54)).hexdigest()
|
||||
|
||||
# store session
|
||||
self.config.security_sessions = (session, valid_until)
|
||||
self.config.add_security_session(session, valid_until)
|
||||
return {ATTR_SESSION: session}
|
||||
|
@@ -111,10 +111,10 @@ class APISnapshots(object):
|
||||
self.snapshots.do_snapshot_partial(**body), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
async def restore_full(self, request):
|
||||
def restore_full(self, request):
|
||||
"""Full-Restore a snapshot."""
|
||||
snapshot = self._extract_snapshot(request)
|
||||
return await asyncio.shield(
|
||||
return asyncio.shield(
|
||||
self.snapshots.do_restore_full(snapshot), loop=self.loop)
|
||||
|
||||
@api_process
|
||||
@@ -125,7 +125,8 @@ class APISnapshots(object):
|
||||
|
||||
return await asyncio.shield(
|
||||
self.snapshots.do_restore_partial(snapshot, **body),
|
||||
loop=self.loop)
|
||||
loop=self.loop
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def remove(self, request):
|
||||
|
@@ -10,7 +10,7 @@ from ..const import (
|
||||
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY,
|
||||
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
|
||||
ATTR_STATE, CONTENT_TYPE_BINARY)
|
||||
from ..tools import validate_timezone
|
||||
from ..validate import validate_timezone
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -30,7 +30,7 @@ class APISupervisor(object):
|
||||
"""Handle rest api for supervisor functions."""
|
||||
|
||||
def __init__(self, config, loop, supervisor, snapshots, addons,
|
||||
host_control, websession):
|
||||
host_control, updater):
|
||||
"""Initialize supervisor rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
@@ -38,7 +38,7 @@ class APISupervisor(object):
|
||||
self.addons = addons
|
||||
self.snapshots = snapshots
|
||||
self.host_control = host_control
|
||||
self.websession = websession
|
||||
self.updater = updater
|
||||
|
||||
@api_process
|
||||
async def ping(self, request):
|
||||
@@ -64,8 +64,8 @@ class APISupervisor(object):
|
||||
|
||||
return {
|
||||
ATTR_VERSION: HASSIO_VERSION,
|
||||
ATTR_LAST_VERSION: self.config.last_hassio,
|
||||
ATTR_BETA_CHANNEL: self.config.upstream_beta,
|
||||
ATTR_LAST_VERSION: self.updater.version_hassio,
|
||||
ATTR_BETA_CHANNEL: self.updater.beta_channel,
|
||||
ATTR_ARCH: self.config.arch,
|
||||
ATTR_TIMEZONE: self.config.timezone,
|
||||
ATTR_ADDONS: list_addons,
|
||||
@@ -78,7 +78,7 @@ class APISupervisor(object):
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_BETA_CHANNEL in body:
|
||||
self.config.upstream_beta = body[ATTR_BETA_CHANNEL]
|
||||
self.updater.beta_channel = body[ATTR_BETA_CHANNEL]
|
||||
|
||||
if ATTR_TIMEZONE in body:
|
||||
self.config.timezone = body[ATTR_TIMEZONE]
|
||||
@@ -93,10 +93,10 @@ class APISupervisor(object):
|
||||
async def update(self, request):
|
||||
"""Update supervisor OS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.config.last_hassio)
|
||||
version = body.get(ATTR_VERSION, self.updater.version_hassio)
|
||||
|
||||
if version == self.supervisor.version:
|
||||
raise RuntimeError("Version is already in use")
|
||||
raise RuntimeError("Version {} is already in use".format(version))
|
||||
|
||||
return await asyncio.shield(
|
||||
self.supervisor.update(version), loop=self.loop)
|
||||
@@ -107,7 +107,7 @@ class APISupervisor(object):
|
||||
tasks = [
|
||||
self.addons.reload(),
|
||||
self.snapshots.reload(),
|
||||
self.config.fetch_update_infos(self.websession),
|
||||
self.updater.fetch_data(),
|
||||
self.host_control.load()
|
||||
]
|
||||
results, _ = await asyncio.shield(
|
||||
@@ -121,8 +121,5 @@ class APISupervisor(object):
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
"""Return supervisor docker logs.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
"""Return supervisor docker logs."""
|
||||
return self.supervisor.logs()
|
||||
|
@@ -87,9 +87,6 @@ def api_process_raw(content):
|
||||
|
||||
def api_return_error(message=None):
|
||||
"""Return a API error message."""
|
||||
if message:
|
||||
_LOGGER.error(message)
|
||||
|
||||
return web.json_response({
|
||||
JSON_RESULT: RESULT_ERROR,
|
||||
JSON_MESSAGE: message,
|
||||
|
155
hassio/config.py
155
hassio/config.py
@@ -4,121 +4,60 @@ import logging
|
||||
import os
|
||||
from pathlib import Path, PurePath
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .const import FILE_HASSIO_CONFIG, HASSIO_DATA
|
||||
from .tools import fetch_last_versions, JsonConfig, validate_timezone
|
||||
from .const import (
|
||||
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_SECURITY, ATTR_SESSIONS,
|
||||
ATTR_PASSWORD, ATTR_TOTP, ATTR_TIMEZONE, ATTR_API_ENDPOINT,
|
||||
ATTR_ADDONS_CUSTOM_LIST, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT)
|
||||
from .tools import JsonConfig
|
||||
from .validate import SCHEMA_HASSIO_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DATETIME_FORMAT = "%Y%m%d %H:%M:%S"
|
||||
|
||||
HOMEASSISTANT_CONFIG = PurePath("homeassistant")
|
||||
HOMEASSISTANT_LAST = 'homeassistant_last'
|
||||
|
||||
HASSIO_SSL = PurePath("ssl")
|
||||
HASSIO_LAST = 'hassio_last'
|
||||
|
||||
ADDONS_CORE = PurePath("addons/core")
|
||||
ADDONS_LOCAL = PurePath("addons/local")
|
||||
ADDONS_GIT = PurePath("addons/git")
|
||||
ADDONS_DATA = PurePath("addons/data")
|
||||
ADDONS_CUSTOM_LIST = 'addons_custom_list'
|
||||
|
||||
BACKUP_DATA = PurePath("backup")
|
||||
SHARE_DATA = PurePath("share")
|
||||
TMP_DATA = PurePath("tmp")
|
||||
|
||||
UPSTREAM_BETA = 'upstream_beta'
|
||||
API_ENDPOINT = 'api_endpoint'
|
||||
TIMEZONE = 'timezone'
|
||||
|
||||
SECURITY_INITIALIZE = 'security_initialize'
|
||||
SECURITY_TOTP = 'security_totp'
|
||||
SECURITY_PASSWORD = 'security_password'
|
||||
SECURITY_SESSIONS = 'security_sessions'
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_CONFIG = vol.Schema({
|
||||
vol.Optional(UPSTREAM_BETA, default=False): vol.Boolean(),
|
||||
vol.Optional(API_ENDPOINT): vol.Coerce(str),
|
||||
vol.Optional(TIMEZONE, default='UTC'): validate_timezone,
|
||||
vol.Optional(HOMEASSISTANT_LAST): vol.Coerce(str),
|
||||
vol.Optional(HASSIO_LAST): vol.Coerce(str),
|
||||
vol.Optional(ADDONS_CUSTOM_LIST, default=[]): [vol.Url()],
|
||||
vol.Optional(SECURITY_INITIALIZE, default=False): vol.Boolean(),
|
||||
vol.Optional(SECURITY_TOTP): vol.Coerce(str),
|
||||
vol.Optional(SECURITY_PASSWORD): vol.Coerce(str),
|
||||
vol.Optional(SECURITY_SESSIONS, default={}):
|
||||
{vol.Coerce(str): vol.Coerce(str)},
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
class CoreConfig(JsonConfig):
|
||||
"""Hold all core config data."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize config object."""
|
||||
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_CONFIG)
|
||||
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_HASSIO_CONFIG)
|
||||
self.arch = None
|
||||
|
||||
async def fetch_update_infos(self, websession):
|
||||
"""Read current versions from web."""
|
||||
last = await fetch_last_versions(websession, beta=self.upstream_beta)
|
||||
|
||||
if last:
|
||||
self._data.update({
|
||||
HOMEASSISTANT_LAST: last.get('homeassistant'),
|
||||
HASSIO_LAST: last.get('hassio'),
|
||||
})
|
||||
self.save()
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@property
|
||||
def api_endpoint(self):
|
||||
"""Return IP address of api endpoint."""
|
||||
return self._data[API_ENDPOINT]
|
||||
return self._data[ATTR_API_ENDPOINT]
|
||||
|
||||
@api_endpoint.setter
|
||||
def api_endpoint(self, value):
|
||||
"""Store IP address of api endpoint."""
|
||||
self._data[API_ENDPOINT] = value
|
||||
|
||||
@property
|
||||
def upstream_beta(self):
|
||||
"""Return True if we run in beta upstream."""
|
||||
return self._data[UPSTREAM_BETA]
|
||||
|
||||
@upstream_beta.setter
|
||||
def upstream_beta(self, value):
|
||||
"""Set beta upstream mode."""
|
||||
self._data[UPSTREAM_BETA] = bool(value)
|
||||
self.save()
|
||||
self._data[ATTR_API_ENDPOINT] = value
|
||||
|
||||
@property
|
||||
def timezone(self):
|
||||
"""Return system timezone."""
|
||||
return self._data[TIMEZONE]
|
||||
return self._data[ATTR_TIMEZONE]
|
||||
|
||||
@timezone.setter
|
||||
def timezone(self, value):
|
||||
"""Set system timezone."""
|
||||
self._data[TIMEZONE] = value
|
||||
self._data[ATTR_TIMEZONE] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def last_homeassistant(self):
|
||||
"""Actual version of homeassistant."""
|
||||
return self._data.get(HOMEASSISTANT_LAST)
|
||||
|
||||
@property
|
||||
def last_hassio(self):
|
||||
"""Actual version of hassio."""
|
||||
return self._data.get(HASSIO_LAST)
|
||||
|
||||
@property
|
||||
def path_hassio(self):
|
||||
"""Return hassio data path."""
|
||||
@@ -207,73 +146,95 @@ class CoreConfig(JsonConfig):
|
||||
@property
|
||||
def addons_repositories(self):
|
||||
"""Return list of addons custom repositories."""
|
||||
return self._data[ADDONS_CUSTOM_LIST]
|
||||
return self._data[ATTR_ADDONS_CUSTOM_LIST]
|
||||
|
||||
@addons_repositories.setter
|
||||
def addons_repositories(self, repo):
|
||||
def add_addon_repository(self, repo):
|
||||
"""Add a custom repository to list."""
|
||||
if repo in self._data[ADDONS_CUSTOM_LIST]:
|
||||
if repo in self._data[ATTR_ADDONS_CUSTOM_LIST]:
|
||||
return
|
||||
|
||||
self._data[ADDONS_CUSTOM_LIST].append(repo)
|
||||
self._data[ATTR_ADDONS_CUSTOM_LIST].append(repo)
|
||||
self.save()
|
||||
|
||||
def drop_addon_repository(self, repo):
|
||||
"""Remove a custom repository from list."""
|
||||
if repo not in self._data[ADDONS_CUSTOM_LIST]:
|
||||
if repo not in self._data[ATTR_ADDONS_CUSTOM_LIST]:
|
||||
return
|
||||
|
||||
self._data[ADDONS_CUSTOM_LIST].remove(repo)
|
||||
self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo)
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_initialize(self):
|
||||
"""Return is security was initialize."""
|
||||
return self._data[SECURITY_INITIALIZE]
|
||||
return self._data[ATTR_SECURITY]
|
||||
|
||||
@security_initialize.setter
|
||||
def security_initialize(self, value):
|
||||
"""Set is security initialize."""
|
||||
self._data[SECURITY_INITIALIZE] = value
|
||||
self._data[ATTR_SECURITY] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_totp(self):
|
||||
"""Return the TOTP key."""
|
||||
return self._data.get(SECURITY_TOTP)
|
||||
return self._data.get(ATTR_TOTP)
|
||||
|
||||
@security_totp.setter
|
||||
def security_totp(self, value):
|
||||
"""Set the TOTP key."""
|
||||
self._data[SECURITY_TOTP] = value
|
||||
self._data[ATTR_TOTP] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_password(self):
|
||||
"""Return the password key."""
|
||||
return self._data.get(SECURITY_PASSWORD)
|
||||
return self._data.get(ATTR_PASSWORD)
|
||||
|
||||
@security_password.setter
|
||||
def security_password(self, value):
|
||||
"""Set the password key."""
|
||||
self._data[SECURITY_PASSWORD] = value
|
||||
self._data[ATTR_PASSWORD] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def security_sessions(self):
|
||||
"""Return api sessions."""
|
||||
return {session: datetime.strptime(until, DATETIME_FORMAT) for
|
||||
session, until in self._data[SECURITY_SESSIONS].items()}
|
||||
return {
|
||||
session: datetime.strptime(until, DATETIME_FORMAT) for
|
||||
session, until in self._data[ATTR_SESSIONS].items()
|
||||
}
|
||||
|
||||
@security_sessions.setter
|
||||
def security_sessions(self, value):
|
||||
def add_security_session(self, session, valid):
|
||||
"""Set the a new session."""
|
||||
session, valid = value
|
||||
if valid is None:
|
||||
self._data[SECURITY_SESSIONS].pop(session, None)
|
||||
else:
|
||||
self._data[SECURITY_SESSIONS].update(
|
||||
{session: valid.strftime(DATETIME_FORMAT)}
|
||||
)
|
||||
|
||||
self._data[ATTR_SESSIONS].update(
|
||||
{session: valid.strftime(DATETIME_FORMAT)}
|
||||
)
|
||||
self.save()
|
||||
|
||||
def drop_security_session(self, session):
|
||||
"""Delete the a session."""
|
||||
self._data[ATTR_SESSIONS].pop(session, None)
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def audio_output(self):
|
||||
"""Return ALSA audio output card,dev."""
|
||||
return self._data.get(ATTR_AUDIO_OUTPUT)
|
||||
|
||||
@audio_output.setter
|
||||
def audio_output(self, value):
|
||||
"""Set ALSA audio output card,dev."""
|
||||
self._data[ATTR_AUDIO_OUTPUT] = value
|
||||
self.save()
|
||||
|
||||
@property
|
||||
def audio_input(self):
|
||||
"""Return ALSA audio input card,dev."""
|
||||
return self._data.get(ATTR_AUDIO_INPUT)
|
||||
|
||||
@audio_input.setter
|
||||
def audio_input(self, value):
|
||||
"""Set ALSA audio input card,dev."""
|
||||
self._data[ATTR_AUDIO_INPUT] = value
|
||||
self.save()
|
||||
|
@@ -1,12 +1,10 @@
|
||||
"""Const file for HassIO."""
|
||||
from pathlib import Path
|
||||
|
||||
HASSIO_VERSION = '0.49'
|
||||
HASSIO_VERSION = '0.57'
|
||||
|
||||
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
||||
'hassio/master/version.json')
|
||||
URL_HASSIO_VERSION_BETA = ('https://raw.githubusercontent.com/home-assistant/'
|
||||
'hassio/dev/version.json')
|
||||
'hassio/{}/version.json')
|
||||
|
||||
URL_HASSIO_ADDONS = 'https://github.com/home-assistant/hassio-addons'
|
||||
|
||||
@@ -25,6 +23,7 @@ RESTART_EXIT_CODE = 100
|
||||
FILE_HASSIO_ADDONS = Path(HASSIO_DATA, "addons.json")
|
||||
FILE_HASSIO_CONFIG = Path(HASSIO_DATA, "config.json")
|
||||
FILE_HASSIO_HOMEASSISTANT = Path(HASSIO_DATA, "homeassistant.json")
|
||||
FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json")
|
||||
|
||||
SOCKET_DOCKER = Path("/var/run/docker.sock")
|
||||
SOCKET_HC = Path("/var/run/hassio-hc.sock")
|
||||
@@ -83,6 +82,7 @@ ATTR_PASSWORD = 'password'
|
||||
ATTR_TOTP = 'totp'
|
||||
ATTR_INITIALIZE = 'initialize'
|
||||
ATTR_SESSION = 'session'
|
||||
ATTR_SESSIONS = 'sessions'
|
||||
ATTR_LOCATON = 'location'
|
||||
ATTR_BUILD = 'build'
|
||||
ATTR_DEVICES = 'devices'
|
||||
@@ -95,6 +95,8 @@ ATTR_USER = 'user'
|
||||
ATTR_SYSTEM = 'system'
|
||||
ATTR_SNAPSHOTS = 'snapshots'
|
||||
ATTR_HOMEASSISTANT = 'homeassistant'
|
||||
ATTR_HASSIO = 'hassio'
|
||||
ATTR_HASSIO_API = 'hassio_api'
|
||||
ATTR_FOLDERS = 'folders'
|
||||
ATTR_SIZE = 'size'
|
||||
ATTR_TYPE = 'type'
|
||||
@@ -102,9 +104,15 @@ ATTR_TIMEOUT = 'timeout'
|
||||
ATTR_AUTO_UPDATE = 'auto_update'
|
||||
ATTR_CUSTOM = 'custom'
|
||||
ATTR_AUDIO = 'audio'
|
||||
ATTR_AUDIO_INPUT = 'audio_input'
|
||||
ATTR_AUDIO_OUTPUT = 'audio_output'
|
||||
ATTR_INPUT = 'input'
|
||||
ATTR_OUTPUT = 'output'
|
||||
ATTR_DISK = 'disk'
|
||||
ATTR_SERIAL = 'serial'
|
||||
ATTR_SECURITY = 'security'
|
||||
ATTR_API_ENDPOINT = 'api_endpoint'
|
||||
ATTR_ADDONS_CUSTOM_LIST = 'addons_custom_list'
|
||||
|
||||
STARTUP_INITIALIZE = 'initialize'
|
||||
STARTUP_SYSTEM = 'system'
|
||||
|
@@ -19,6 +19,7 @@ from .homeassistant import HomeAssistant
|
||||
from .scheduler import Scheduler
|
||||
from .dock.supervisor import DockerSupervisor
|
||||
from .snapshots import SnapshotsManager
|
||||
from .updater import Updater
|
||||
from .tasks import (
|
||||
hassio_update, homeassistant_watchdog, api_sessions_cleanup, addons_update)
|
||||
from .tools import get_local_ip, fetch_timezone
|
||||
@@ -35,6 +36,7 @@ class HassIO(object):
|
||||
self.loop = loop
|
||||
self.config = config
|
||||
self.websession = aiohttp.ClientSession(loop=loop)
|
||||
self.updater = Updater(config, loop, self.websession)
|
||||
self.scheduler = Scheduler(loop)
|
||||
self.api = RestAPI(config, loop)
|
||||
self.hardware = Hardware()
|
||||
@@ -46,7 +48,7 @@ class HassIO(object):
|
||||
|
||||
# init homeassistant
|
||||
self.homeassistant = HomeAssistant(
|
||||
config, loop, self.dock, self.websession)
|
||||
config, loop, self.dock, self.updater)
|
||||
|
||||
# init HostControl
|
||||
self.host_control = HostControl(loop)
|
||||
@@ -87,7 +89,7 @@ class HassIO(object):
|
||||
self.api.register_network(self.host_control)
|
||||
self.api.register_supervisor(
|
||||
self.supervisor, self.snapshots, self.addons, self.host_control,
|
||||
self.websession)
|
||||
self.updater)
|
||||
self.api.register_homeassistant(self.homeassistant)
|
||||
self.api.register_addons(self.addons)
|
||||
self.api.register_security()
|
||||
@@ -113,7 +115,7 @@ class HassIO(object):
|
||||
|
||||
# schedule self update task
|
||||
self.scheduler.register_task(
|
||||
hassio_update(self.config, self.supervisor, self.websession),
|
||||
hassio_update(self.supervisor, self.updater),
|
||||
RUN_UPDATE_SUPERVISOR_TASKS)
|
||||
|
||||
# schedule snapshot update tasks
|
||||
@@ -128,7 +130,7 @@ class HassIO(object):
|
||||
# on release channel, try update itself
|
||||
# on beta channel, only read new versions
|
||||
await asyncio.wait(
|
||||
[hassio_update(self.config, self.supervisor, self.websession)()],
|
||||
[hassio_update(self.supervisor, self.updater)()],
|
||||
loop=self.loop
|
||||
)
|
||||
|
||||
@@ -136,15 +138,15 @@ class HassIO(object):
|
||||
await self.api.start()
|
||||
_LOGGER.info("Start hassio api on %s", self.config.api_endpoint)
|
||||
|
||||
# start addon mark as system
|
||||
await self.addons.auto_boot(STARTUP_SYSTEM)
|
||||
|
||||
try:
|
||||
# HomeAssistant is already running / supervisor have only reboot
|
||||
if await self.homeassistant.is_running():
|
||||
_LOGGER.info("HassIO reboot detected")
|
||||
return
|
||||
|
||||
# start addon mark as system
|
||||
await self.addons.auto_boot(STARTUP_SYSTEM)
|
||||
|
||||
# start addon mark as services
|
||||
await self.addons.auto_boot(STARTUP_SERVICES)
|
||||
|
||||
|
@@ -5,6 +5,7 @@ import logging
|
||||
|
||||
import docker
|
||||
|
||||
from .util import docker_process
|
||||
from ..const import LABEL_VERSION, LABEL_ARCH
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -52,14 +53,10 @@ class DockerBase(object):
|
||||
if need_arch and LABEL_ARCH in metadata['Config']['Labels']:
|
||||
self.arch = metadata['Config']['Labels'][LABEL_ARCH]
|
||||
|
||||
async def install(self, tag):
|
||||
@docker_process
|
||||
def install(self, tag):
|
||||
"""Pull docker image."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute install while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(None, self._install, tag)
|
||||
return self.loop.run_in_executor(None, self._install, tag)
|
||||
|
||||
def _install(self, tag):
|
||||
"""Pull docker image.
|
||||
@@ -80,10 +77,7 @@ class DockerBase(object):
|
||||
return True
|
||||
|
||||
def exists(self):
|
||||
"""Return True if docker image exists in local repo.
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
"""Return True if docker image exists in local repo."""
|
||||
return self.loop.run_in_executor(None, self._exists)
|
||||
|
||||
def _exists(self):
|
||||
@@ -126,14 +120,10 @@ class DockerBase(object):
|
||||
|
||||
return True
|
||||
|
||||
async def attach(self):
|
||||
@docker_process
|
||||
def attach(self):
|
||||
"""Attach to running docker container."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute attach while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(None, self._attach)
|
||||
return self.loop.run_in_executor(None, self._attach)
|
||||
|
||||
def _attach(self):
|
||||
"""Attach to running docker container.
|
||||
@@ -154,14 +144,10 @@ class DockerBase(object):
|
||||
|
||||
return True
|
||||
|
||||
async def run(self):
|
||||
@docker_process
|
||||
def run(self):
|
||||
"""Run docker image."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute run while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(None, self._run)
|
||||
return self.loop.run_in_executor(None, self._run)
|
||||
|
||||
def _run(self):
|
||||
"""Run docker image.
|
||||
@@ -170,15 +156,10 @@ class DockerBase(object):
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
async def stop(self):
|
||||
@docker_process
|
||||
def stop(self):
|
||||
"""Stop/remove docker container."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute stop while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
await self.loop.run_in_executor(None, self._stop)
|
||||
return True
|
||||
return self.loop.run_in_executor(None, self._stop)
|
||||
|
||||
def _stop(self):
|
||||
"""Stop/remove and remove docker container.
|
||||
@@ -188,7 +169,7 @@ class DockerBase(object):
|
||||
try:
|
||||
container = self.dock.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return
|
||||
return False
|
||||
|
||||
if container.status == 'running':
|
||||
_LOGGER.info("Stop %s docker application", self.image)
|
||||
@@ -199,14 +180,12 @@ class DockerBase(object):
|
||||
_LOGGER.info("Clean %s docker application", self.image)
|
||||
container.remove(force=True)
|
||||
|
||||
async def remove(self):
|
||||
"""Remove docker images."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute remove while a task is in progress")
|
||||
return False
|
||||
return True
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(None, self._remove)
|
||||
@docker_process
|
||||
def remove(self):
|
||||
"""Remove docker images."""
|
||||
return self.loop.run_in_executor(None, self._remove)
|
||||
|
||||
def _remove(self):
|
||||
"""remove docker images.
|
||||
@@ -235,24 +214,19 @@ class DockerBase(object):
|
||||
# clean metadata
|
||||
self.version = None
|
||||
self.arch = None
|
||||
|
||||
return True
|
||||
|
||||
async def update(self, tag):
|
||||
@docker_process
|
||||
def update(self, tag):
|
||||
"""Update a docker image."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute update while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(None, self._update, tag)
|
||||
return self.loop.run_in_executor(None, self._update, tag)
|
||||
|
||||
def _update(self, tag):
|
||||
"""Update a docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
was_running = self._is_running()
|
||||
|
||||
_LOGGER.info(
|
||||
"Update docker %s with %s:%s", self.version, self.image, tag)
|
||||
|
||||
@@ -260,25 +234,16 @@ class DockerBase(object):
|
||||
if not self._install(tag):
|
||||
return False
|
||||
|
||||
# run or cleanup container
|
||||
if was_running:
|
||||
self._run()
|
||||
else:
|
||||
self._stop()
|
||||
|
||||
# cleanup images
|
||||
# stop container & cleanup
|
||||
self._stop()
|
||||
self._cleanup()
|
||||
|
||||
return True
|
||||
|
||||
async def logs(self):
|
||||
@docker_process
|
||||
def logs(self):
|
||||
"""Return docker logs of container."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute logs while a task is in progress")
|
||||
return b""
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(None, self._logs)
|
||||
return self.loop.run_in_executor(None, self._logs)
|
||||
|
||||
def _logs(self):
|
||||
"""Return docker logs of container.
|
||||
@@ -295,14 +260,10 @@ class DockerBase(object):
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't grap logs from %s -> %s", self.image, err)
|
||||
|
||||
async def restart(self):
|
||||
@docker_process
|
||||
def restart(self):
|
||||
"""Restart docker container."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute restart while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(None, self._restart)
|
||||
return self.loop.run_in_executor(None, self._restart)
|
||||
|
||||
def _restart(self):
|
||||
"""Restart docker container.
|
||||
@@ -324,14 +285,10 @@ class DockerBase(object):
|
||||
|
||||
return True
|
||||
|
||||
async def cleanup(self):
|
||||
@docker_process
|
||||
def cleanup(self):
|
||||
"""Check if old version exists and cleanup."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute cleanup while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
await self.loop.run_in_executor(None, self._cleanup)
|
||||
return self.loop.run_in_executor(None, self._cleanup)
|
||||
|
||||
def _cleanup(self):
|
||||
"""Check if old version exists and cleanup.
|
||||
@@ -342,7 +299,7 @@ class DockerBase(object):
|
||||
latest = self.dock.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
_LOGGER.warning("Can't find %s for cleanup", self.image)
|
||||
return
|
||||
return False
|
||||
|
||||
for image in self.dock.images.list(name=self.image):
|
||||
if latest.id == image.id:
|
||||
@@ -351,3 +308,17 @@ class DockerBase(object):
|
||||
with suppress(docker.errors.DockerException):
|
||||
_LOGGER.info("Cleanup docker images: %s", image.tags)
|
||||
self.dock.images.remove(image.id, force=True)
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
def execute_command(self, command):
|
||||
"""Create a temporary container and run command."""
|
||||
return self.loop.run_in_executor(None, self._execute_command, command)
|
||||
|
||||
def _execute_command(self, command):
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
@@ -7,12 +7,14 @@ import docker
|
||||
import requests
|
||||
|
||||
from . import DockerBase
|
||||
from .util import dockerfile_template
|
||||
from .util import dockerfile_template, docker_process
|
||||
from ..const import (
|
||||
META_ADDON, MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
AUDIO_DEVICE = "/dev/snd:/dev/snd:rwm"
|
||||
|
||||
|
||||
class DockerAddon(DockerBase):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
@@ -28,16 +30,40 @@ class DockerAddon(DockerBase):
|
||||
"""Return name of docker container."""
|
||||
return "addon_{}".format(self.addon.slug)
|
||||
|
||||
@property
|
||||
def hostname(self):
|
||||
"""Return slug/id of addon."""
|
||||
return self.addon.slug.replace('_', '-')
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
"""Return environment for docker add-on."""
|
||||
addon_env = self.addon.environment or {}
|
||||
if self.addon.with_audio:
|
||||
addon_env.update({
|
||||
'ALSA_OUTPUT': self.addon.audio_output,
|
||||
'ALSA_INPUT': self.addon.audio_input,
|
||||
})
|
||||
|
||||
return {
|
||||
**addon_env,
|
||||
'TZ': self.config.timezone,
|
||||
}
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Return needed devices."""
|
||||
devices = self.addon.devices or []
|
||||
|
||||
# use audio devices
|
||||
if self.addon.with_audio and AUDIO_DEVICE not in devices:
|
||||
devices.append(AUDIO_DEVICE)
|
||||
|
||||
# Return None if no devices is present
|
||||
if devices:
|
||||
return devices
|
||||
return None
|
||||
|
||||
@property
|
||||
def tmpfs(self):
|
||||
"""Return tmpfs for docker add-on."""
|
||||
@@ -46,6 +72,16 @@ class DockerAddon(DockerBase):
|
||||
return {"/tmpfs": "{}".format(options)}
|
||||
return None
|
||||
|
||||
@property
|
||||
def mapping(self):
|
||||
"""Return hosts mapping."""
|
||||
if not self.addon.use_hassio_api:
|
||||
return None
|
||||
|
||||
return {
|
||||
'hassio': self.config.api_endpoint,
|
||||
}
|
||||
|
||||
@property
|
||||
def volumes(self):
|
||||
"""Generate volumes for mappings."""
|
||||
@@ -107,11 +143,12 @@ class DockerAddon(DockerBase):
|
||||
self.dock.containers.run(
|
||||
self.image,
|
||||
name=self.name,
|
||||
hostname=self.name,
|
||||
hostname=self.hostname,
|
||||
detach=True,
|
||||
network_mode=self.addon.network_mode,
|
||||
ports=self.addon.ports,
|
||||
devices=self.addon.devices,
|
||||
extra_hosts=self.mapping,
|
||||
devices=self.devices,
|
||||
cap_add=self.addon.privileged,
|
||||
environment=self.environment,
|
||||
volumes=self.volumes,
|
||||
@@ -181,15 +218,10 @@ class DockerAddon(DockerBase):
|
||||
finally:
|
||||
shutil.rmtree(str(build_dir), ignore_errors=True)
|
||||
|
||||
async def export_image(self, path):
|
||||
@docker_process
|
||||
def export_image(self, path):
|
||||
"""Export current images into a tar file."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute export while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(
|
||||
None, self._export_image, path)
|
||||
return self.loop.run_in_executor(None, self._export_image, path)
|
||||
|
||||
def _export_image(self, tar_file):
|
||||
"""Export current images into a tar file.
|
||||
@@ -213,15 +245,10 @@ class DockerAddon(DockerBase):
|
||||
_LOGGER.info("Export image %s to %s", self.image, tar_file)
|
||||
return True
|
||||
|
||||
async def import_image(self, path, tag):
|
||||
@docker_process
|
||||
def import_image(self, path, tag):
|
||||
"""Import a tar file as image."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute import while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(
|
||||
None, self._import_image, path, tag)
|
||||
return self.loop.run_in_executor(None, self._import_image, path, tag)
|
||||
|
||||
def _import_image(self, tar_file, tag):
|
||||
"""Import a tar file as image.
|
||||
|
@@ -1,4 +1,5 @@
|
||||
"""Init file for HassIO docker object."""
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
|
||||
import docker
|
||||
@@ -66,7 +67,8 @@ class DockerHomeAssistant(DockerBase):
|
||||
{'bind': '/ssl', 'mode': 'ro'},
|
||||
str(self.config.path_extern_share):
|
||||
{'bind': '/share', 'mode': 'rw'},
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't run %s -> %s", self.image, err)
|
||||
@@ -75,3 +77,41 @@ class DockerHomeAssistant(DockerBase):
|
||||
_LOGGER.info(
|
||||
"Start homeassistant %s with version %s", self.image, self.version)
|
||||
return True
|
||||
|
||||
def _execute_command(self, command):
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
_LOGGER.info("Run command '%s' on %s", command, self.image)
|
||||
try:
|
||||
container = self.dock.containers.run(
|
||||
self.image,
|
||||
command=command,
|
||||
detach=True,
|
||||
stdout=True,
|
||||
stderr=True,
|
||||
environment={
|
||||
'TZ': self.config.timezone,
|
||||
},
|
||||
volumes={
|
||||
str(self.config.path_extern_config):
|
||||
{'bind': '/config', 'mode': 'ro'},
|
||||
str(self.config.path_extern_ssl):
|
||||
{'bind': '/ssl', 'mode': 'ro'},
|
||||
}
|
||||
)
|
||||
|
||||
# wait until command is done
|
||||
exit_code = container.wait()
|
||||
output = container.logs()
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't execute command -> %s", err)
|
||||
return (None, b"")
|
||||
|
||||
# cleanup container
|
||||
with suppress(docker.errors.DockerException):
|
||||
container.remove(force=True)
|
||||
|
||||
return (exit_code, output)
|
||||
|
@@ -3,6 +3,7 @@ import logging
|
||||
import os
|
||||
|
||||
from . import DockerBase
|
||||
from .util import docker_process
|
||||
from ..const import RESTART_EXIT_CODE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -21,20 +22,16 @@ class DockerSupervisor(DockerBase):
|
||||
"""Return name of docker container."""
|
||||
return os.environ['SUPERVISOR_NAME']
|
||||
|
||||
@docker_process
|
||||
async def update(self, tag):
|
||||
"""Update a supervisor docker image."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute update while a task is in progress")
|
||||
return False
|
||||
|
||||
_LOGGER.info("Update supervisor docker to %s:%s", self.image, tag)
|
||||
|
||||
async with self._lock:
|
||||
if await self.loop.run_in_executor(None, self._install, tag):
|
||||
self.loop.create_task(self.stop_callback(RESTART_EXIT_CODE))
|
||||
return True
|
||||
if await self.loop.run_in_executor(None, self._install, tag):
|
||||
self.loop.create_task(self.stop_callback(RESTART_EXIT_CODE))
|
||||
return True
|
||||
|
||||
return False
|
||||
return False
|
||||
|
||||
async def run(self):
|
||||
"""Run docker image."""
|
||||
|
@@ -1,8 +1,10 @@
|
||||
"""HassIO docker utilitys."""
|
||||
import logging
|
||||
import re
|
||||
|
||||
from ..const import ARCH_AARCH64, ARCH_ARMHF, ARCH_I386, ARCH_AMD64
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HASSIO_BASE_IMAGE = {
|
||||
ARCH_ARMHF: "homeassistant/armhf-base:latest",
|
||||
@@ -40,3 +42,19 @@ def create_metadata(version, arch, meta_type):
|
||||
return ('LABEL io.hass.version="{}" '
|
||||
'io.hass.arch="{}" '
|
||||
'io.hass.type="{}"').format(version, arch, meta_type)
|
||||
|
||||
|
||||
# pylint: disable=protected-access
|
||||
def docker_process(method):
|
||||
"""Wrap function with only run once."""
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return api wrapper."""
|
||||
if api._lock.locked():
|
||||
_LOGGER.error(
|
||||
"Can't excute %s while a task is in progress", method.__name__)
|
||||
return False
|
||||
|
||||
async with api._lock:
|
||||
return await method(api, *args, **kwargs)
|
||||
|
||||
return wrap_api
|
||||
|
@@ -48,7 +48,7 @@ class Hardware(object):
|
||||
"""Return all disk devices."""
|
||||
dev_list = set()
|
||||
for device in self.context.list_devices(subsystem='block'):
|
||||
if 'ID_VENDOR' in device:
|
||||
if device.device_node.startswith('/dev/sd'):
|
||||
dev_list.add(device.device_node)
|
||||
|
||||
return list(dev_list)
|
||||
|
@@ -2,26 +2,29 @@
|
||||
import asyncio
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
||||
from .const import (
|
||||
FILE_HASSIO_HOMEASSISTANT, ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION,
|
||||
ATTR_VERSION)
|
||||
from .dock.homeassistant import DockerHomeAssistant
|
||||
from .tools import JsonConfig
|
||||
from .tools import JsonConfig, convert_to_ascii
|
||||
from .validate import SCHEMA_HASS_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml")
|
||||
|
||||
|
||||
class HomeAssistant(JsonConfig):
|
||||
"""Hass core object for handle it."""
|
||||
|
||||
def __init__(self, config, loop, dock, websession):
|
||||
def __init__(self, config, loop, dock, updater):
|
||||
"""Initialize hass object."""
|
||||
super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG)
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.websession = websession
|
||||
self.updater = updater
|
||||
self.docker = DockerHomeAssistant(config, loop, dock, self)
|
||||
|
||||
async def prepare(self):
|
||||
@@ -45,7 +48,7 @@ class HomeAssistant(JsonConfig):
|
||||
"""Return last available version of homeassistant."""
|
||||
if self.is_custom_image:
|
||||
return self._data.get(ATTR_LAST_VERSION)
|
||||
return self.config.last_homeassistant
|
||||
return self.updater.version_homeassistant
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
@@ -101,7 +104,7 @@ class HomeAssistant(JsonConfig):
|
||||
while True:
|
||||
# read homeassistant tag and install it
|
||||
if not self.last_version:
|
||||
await self.config.fetch_update_infos(self.websession)
|
||||
await self.updater.fetch_data()
|
||||
|
||||
tag = self.last_version
|
||||
if tag and await self.docker.install(tag):
|
||||
@@ -113,13 +116,18 @@ class HomeAssistant(JsonConfig):
|
||||
_LOGGER.info("HomeAssistant docker now installed")
|
||||
await self.docker.cleanup()
|
||||
|
||||
def update(self, version=None):
|
||||
"""Update HomeAssistant version.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
async def update(self, version=None):
|
||||
"""Update HomeAssistant version."""
|
||||
version = version or self.last_version
|
||||
return self.docker.update(version)
|
||||
|
||||
if version == self.docker.version:
|
||||
_LOGGER.warning("Version %s is already installed", version)
|
||||
return False
|
||||
|
||||
try:
|
||||
return await self.docker.update(version)
|
||||
finally:
|
||||
await self.docker.run()
|
||||
|
||||
def run(self):
|
||||
"""Run HomeAssistant docker.
|
||||
@@ -160,3 +168,19 @@ class HomeAssistant(JsonConfig):
|
||||
def in_progress(self):
|
||||
"""Return True if a task is in progress."""
|
||||
return self.docker.in_progress
|
||||
|
||||
async def check_config(self):
|
||||
"""Run homeassistant config check."""
|
||||
exit_code, log = await self.docker.execute_command(
|
||||
"python3 -m homeassistant -c /config --script check_config"
|
||||
)
|
||||
|
||||
# if not valid
|
||||
if exit_code is None:
|
||||
return (False, "")
|
||||
|
||||
# parse output
|
||||
log = convert_to_ascii(log)
|
||||
if exit_code != 0 or RE_YAML_ERROR.search(log):
|
||||
return (False, log)
|
||||
return (True, log)
|
||||
|
@@ -13,7 +13,7 @@ def api_sessions_cleanup(config):
|
||||
now = datetime.now()
|
||||
for session, until_valid in config.security_sessions.items():
|
||||
if now >= until_valid:
|
||||
config.security_sessions = (session, None)
|
||||
config.drop_security_session(session)
|
||||
|
||||
return _api_sessions_cleanup
|
||||
|
||||
@@ -43,21 +43,21 @@ def addons_update(loop, addons):
|
||||
return _addons_update
|
||||
|
||||
|
||||
def hassio_update(config, supervisor, websession):
|
||||
def hassio_update(supervisor, updater):
|
||||
"""Create scheduler task for update of supervisor hassio."""
|
||||
async def _hassio_update():
|
||||
"""Check and run update of supervisor hassio."""
|
||||
await config.fetch_update_infos(websession)
|
||||
if config.last_hassio == supervisor.version:
|
||||
await updater.fetch_data()
|
||||
if updater.version_hassio == supervisor.version:
|
||||
return
|
||||
|
||||
# don't perform a update on beta/dev channel
|
||||
if config.upstream_beta:
|
||||
if updater.beta_channel:
|
||||
_LOGGER.warning("Ignore Hass.IO update on beta upstream!")
|
||||
return
|
||||
|
||||
_LOGGER.info("Found new HassIO version %s.", config.last_hassio)
|
||||
await supervisor.update(config.last_hassio)
|
||||
_LOGGER.info("Found new HassIO version %s.", updater.version_hassio)
|
||||
await supervisor.update(updater.version_hassio)
|
||||
|
||||
return _hassio_update
|
||||
|
||||
|
@@ -1,39 +1,22 @@
|
||||
"""Tools file for HassIO."""
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
from datetime import datetime
|
||||
import json
|
||||
import logging
|
||||
import socket
|
||||
import re
|
||||
|
||||
import aiohttp
|
||||
import async_timeout
|
||||
import pytz
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .const import URL_HASSIO_VERSION, URL_HASSIO_VERSION_BETA
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
FREEGEOIP_URL = "https://freegeoip.io/json/"
|
||||
|
||||
|
||||
async def fetch_last_versions(websession, beta=False):
|
||||
"""Fetch current versions from github.
|
||||
|
||||
Is a coroutine.
|
||||
"""
|
||||
url = URL_HASSIO_VERSION_BETA if beta else URL_HASSIO_VERSION
|
||||
try:
|
||||
with async_timeout.timeout(10, loop=websession.loop):
|
||||
async with websession.get(url) as request:
|
||||
return await request.json(content_type=None)
|
||||
|
||||
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
|
||||
_LOGGER.warning("Can't fetch versions from %s! %s", url, err)
|
||||
|
||||
except json.JSONDecodeError as err:
|
||||
_LOGGER.warning("Can't parse versions from %s! %s", url, err)
|
||||
RE_STRING = re.compile(r"\x1b(\[.*?[@-~]|\].*?(\x07|\x1b\\))")
|
||||
|
||||
|
||||
def get_local_ip(loop):
|
||||
@@ -76,19 +59,6 @@ def read_json_file(jsonfile):
|
||||
return json.loads(cfile.read())
|
||||
|
||||
|
||||
def validate_timezone(timezone):
|
||||
"""Validate voluptuous timezone."""
|
||||
try:
|
||||
pytz.timezone(timezone)
|
||||
except pytz.exceptions.UnknownTimeZoneError:
|
||||
raise vol.Invalid(
|
||||
"Invalid time zone passed in. Valid options can be found here: "
|
||||
"http://en.wikipedia.org/wiki/List_of_tz_database_time_zones") \
|
||||
from None
|
||||
|
||||
return timezone
|
||||
|
||||
|
||||
async def fetch_timezone(websession):
|
||||
"""Read timezone from freegeoip."""
|
||||
data = {}
|
||||
@@ -101,6 +71,11 @@ async def fetch_timezone(websession):
|
||||
return data.get('time_zone', 'UTC')
|
||||
|
||||
|
||||
def convert_to_ascii(raw):
|
||||
"""Convert binary to ascii and remove colors."""
|
||||
return RE_STRING.sub("", raw.decode())
|
||||
|
||||
|
||||
class JsonConfig(object):
|
||||
"""Hass core object for handle it."""
|
||||
|
||||
@@ -140,3 +115,27 @@ class JsonConfig(object):
|
||||
_LOGGER.error("Can't store config in %s", self._file)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class AsyncThrottle(object):
|
||||
"""
|
||||
Decorator that prevents a function from being called more than once every
|
||||
time period.
|
||||
"""
|
||||
def __init__(self, delta):
|
||||
"""Initialize async throttle."""
|
||||
self.throttle_period = delta
|
||||
self.time_of_last_call = datetime.min
|
||||
|
||||
def __call__(self, method):
|
||||
"""Throttle function"""
|
||||
async def wrapper(*args, **kwargs):
|
||||
"""Throttle function wrapper"""
|
||||
now = datetime.now()
|
||||
time_since_last_call = now - self.time_of_last_call
|
||||
|
||||
if time_since_last_call > self.throttle_period:
|
||||
self.time_of_last_call = now
|
||||
return await method(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
86
hassio/updater.py
Normal file
86
hassio/updater.py
Normal file
@@ -0,0 +1,86 @@
|
||||
"""Fetch last versions from webserver."""
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import json
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
import async_timeout
|
||||
|
||||
from .const import (
|
||||
URL_HASSIO_VERSION, FILE_HASSIO_UPDATER, ATTR_HOMEASSISTANT, ATTR_HASSIO,
|
||||
ATTR_BETA_CHANNEL)
|
||||
from .tools import AsyncThrottle, JsonConfig
|
||||
from .validate import SCHEMA_UPDATER_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Updater(JsonConfig):
|
||||
"""Fetch last versions from version.json."""
|
||||
|
||||
def __init__(self, config, loop, websession):
|
||||
"""Initialize updater."""
|
||||
super().__init__(FILE_HASSIO_UPDATER, SCHEMA_UPDATER_CONFIG)
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.websession = websession
|
||||
|
||||
@property
|
||||
def version_homeassistant(self):
|
||||
"""Return last version of homeassistant."""
|
||||
return self._data.get(ATTR_HOMEASSISTANT)
|
||||
|
||||
@property
|
||||
def version_hassio(self):
|
||||
"""Return last version of hassio."""
|
||||
return self._data.get(ATTR_HASSIO)
|
||||
|
||||
@property
|
||||
def upstream(self):
|
||||
"""Return Upstream branch for version."""
|
||||
if self.beta_channel:
|
||||
return 'dev'
|
||||
return 'master'
|
||||
|
||||
@property
|
||||
def beta_channel(self):
|
||||
"""Return True if we run in beta upstream."""
|
||||
return self._data[ATTR_BETA_CHANNEL]
|
||||
|
||||
@beta_channel.setter
|
||||
def beta_channel(self, value):
|
||||
"""Set beta upstream mode."""
|
||||
self._data[ATTR_BETA_CHANNEL] = bool(value)
|
||||
self.save()
|
||||
|
||||
@AsyncThrottle(timedelta(seconds=60))
|
||||
async def fetch_data(self):
|
||||
"""Fetch current versions from github.
|
||||
|
||||
Is a coroutine.
|
||||
"""
|
||||
url = URL_HASSIO_VERSION.format(self.upstream)
|
||||
try:
|
||||
_LOGGER.info("Fetch update data from %s", url)
|
||||
with async_timeout.timeout(10, loop=self.loop):
|
||||
async with self.websession.get(url) as request:
|
||||
data = await request.json(content_type=None)
|
||||
|
||||
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
|
||||
_LOGGER.warning("Can't fetch versions from %s -> %s", url, err)
|
||||
return
|
||||
|
||||
except json.JSONDecodeError as err:
|
||||
_LOGGER.warning("Can't parse versions from %s -> %s", url, err)
|
||||
return
|
||||
|
||||
# data valid?
|
||||
if not data:
|
||||
_LOGGER.warning("Invalid data from %s", url)
|
||||
return
|
||||
|
||||
# update versions
|
||||
self._data[ATTR_HOMEASSISTANT] = data.get('homeassistant')
|
||||
self._data[ATTR_HASSIO] = data.get('hassio')
|
||||
self.save()
|
@@ -1,11 +1,31 @@
|
||||
"""Validate functions."""
|
||||
import voluptuous as vol
|
||||
|
||||
from .const import ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION
|
||||
import pytz
|
||||
|
||||
from .const import (
|
||||
ATTR_DEVICES, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_SESSIONS, ATTR_PASSWORD,
|
||||
ATTR_TOTP, ATTR_SECURITY, ATTR_BETA_CHANNEL, ATTR_TIMEZONE,
|
||||
ATTR_API_ENDPOINT, ATTR_ADDONS_CUSTOM_LIST, ATTR_AUDIO_OUTPUT,
|
||||
ATTR_AUDIO_INPUT, ATTR_HOMEASSISTANT, ATTR_HASSIO)
|
||||
|
||||
|
||||
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
||||
HASS_DEVICES = [vol.Match(r"^[^/]*$")]
|
||||
ALSA_CHANNEL = vol.Match(r"\d+,\d+")
|
||||
|
||||
|
||||
def validate_timezone(timezone):
|
||||
"""Validate voluptuous timezone."""
|
||||
try:
|
||||
pytz.timezone(timezone)
|
||||
except pytz.exceptions.UnknownTimeZoneError:
|
||||
raise vol.Invalid(
|
||||
"Invalid time zone passed in. Valid options can be found here: "
|
||||
"http://en.wikipedia.org/wiki/List_of_tz_database_time_zones") \
|
||||
from None
|
||||
|
||||
return timezone
|
||||
|
||||
|
||||
def convert_to_docker_ports(data):
|
||||
@@ -40,3 +60,26 @@ SCHEMA_HASS_CONFIG = vol.Schema({
|
||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'): vol.Coerce(str),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_UPDATER_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_BETA_CHANNEL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Coerce(str),
|
||||
vol.Optional(ATTR_HASSIO): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_HASSIO_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_API_ENDPOINT): vol.Coerce(str),
|
||||
vol.Optional(ATTR_TIMEZONE, default='UTC'): validate_timezone,
|
||||
vol.Optional(ATTR_ADDONS_CUSTOM_LIST, default=[]): [vol.Url()],
|
||||
vol.Optional(ATTR_SECURITY, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_TOTP): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PASSWORD): vol.Coerce(str),
|
||||
vol.Optional(ATTR_SESSIONS, default={}):
|
||||
vol.Schema({vol.Coerce(str): vol.Coerce(str)}),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_CHANNEL,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_CHANNEL,
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"hassio": "0.49",
|
||||
"homeassistant": "0.49.1",
|
||||
"hassio": "0.56",
|
||||
"homeassistant": "0.51.2",
|
||||
"resinos": "1.0",
|
||||
"resinhup": "0.3",
|
||||
"generic": "0.3",
|
||||
|
Reference in New Issue
Block a user