mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-17 04:59:20 +00:00
Compare commits
110 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
a2d3ee0d67 | ||
![]() |
d29fab69e8 | ||
![]() |
6205f40298 | ||
![]() |
6b169f3f17 | ||
![]() |
0d4a5a7ffb | ||
![]() |
dac90d29dd | ||
![]() |
7e815633e7 | ||
![]() |
f062f31ca2 | ||
![]() |
1374f90433 | ||
![]() |
b692b19a4d | ||
![]() |
92d5b14cf5 | ||
![]() |
6a84829c16 | ||
![]() |
7036ecbd0a | ||
![]() |
19b5059972 | ||
![]() |
cebc377fa7 | ||
![]() |
d36c3919d7 | ||
![]() |
0684427373 | ||
![]() |
8ff79e85bf | ||
![]() |
ee4b28a490 | ||
![]() |
fddd5b8860 | ||
![]() |
72279072ac | ||
![]() |
0b70448273 | ||
![]() |
4eb24fcbc5 | ||
![]() |
06edf59d14 | ||
![]() |
36ca851bc2 | ||
![]() |
a4e453bf83 | ||
![]() |
d211eec66f | ||
![]() |
db8540d4ab | ||
![]() |
30e270e7c0 | ||
![]() |
9734307551 | ||
![]() |
c650f8d1e1 | ||
![]() |
10005898f8 | ||
![]() |
716389e0c1 | ||
![]() |
658729feb5 | ||
![]() |
ae7808eb2a | ||
![]() |
d8e0e9e0b0 | ||
![]() |
a860a3c122 | ||
![]() |
fe60d526b9 | ||
![]() |
769904778f | ||
![]() |
a3a40c79d6 | ||
![]() |
b44f613136 | ||
![]() |
801be9c60b | ||
![]() |
b6db6a1287 | ||
![]() |
4181174bcc | ||
![]() |
3be46e6011 | ||
![]() |
98b93efc5c | ||
![]() |
6156019c2f | ||
![]() |
80d60148a9 | ||
![]() |
8baf59a608 | ||
![]() |
b546365aaa | ||
![]() |
0a68698912 | ||
![]() |
45288a2491 | ||
![]() |
f34a175e4f | ||
![]() |
6e7e145822 | ||
![]() |
9abebe2d5d | ||
![]() |
b0c5884c3f | ||
![]() |
a79e6a8eea | ||
![]() |
c1f1aed9ca | ||
![]() |
65b0e17b5b | ||
![]() |
6947131b47 | ||
![]() |
914dd53da0 | ||
![]() |
58616ef686 | ||
![]() |
563e0c1e0e | ||
![]() |
437070fd7a | ||
![]() |
baa9cf451c | ||
![]() |
c2918d4519 | ||
![]() |
1efdcd4691 | ||
![]() |
2a43087ed7 | ||
![]() |
5716324934 | ||
![]() |
ae267e0380 | ||
![]() |
3918a2a228 | ||
![]() |
e375fc36d3 | ||
![]() |
f5e29b4651 | ||
![]() |
524d875516 | ||
![]() |
60bdc00ce9 | ||
![]() |
073166190f | ||
![]() |
b80e4d7d70 | ||
![]() |
cc434e27cf | ||
![]() |
8377e04b62 | ||
![]() |
0a47fb9c83 | ||
![]() |
a5d3c850e9 | ||
![]() |
d6391f62be | ||
![]() |
c6f302e448 | ||
![]() |
9706022c21 | ||
![]() |
1d858f4920 | ||
![]() |
e09ba30d46 | ||
![]() |
38ec3d14ed | ||
![]() |
8ee9380cc7 | ||
![]() |
6e74e4c008 | ||
![]() |
5ebc58851b | ||
![]() |
16b09bbfc5 | ||
![]() |
d4b5fc79f4 | ||
![]() |
e51c044ccd | ||
![]() |
d3b1ba81f7 | ||
![]() |
26f55f02c0 | ||
![]() |
8050707ff9 | ||
![]() |
46252030cf | ||
![]() |
681fa835ef | ||
![]() |
d6560eb976 | ||
![]() |
3770b307af | ||
![]() |
0dacbb31be | ||
![]() |
bbdbd756a7 | ||
![]() |
508e38e622 | ||
![]() |
ffe45d0d02 | ||
![]() |
9206d1acf8 | ||
![]() |
da867ef8ef | ||
![]() |
4826201e51 | ||
![]() |
463c97f9e7 | ||
![]() |
3983928c6c | ||
![]() |
15e626027f |
2
.gitattributes
vendored
Normal file
2
.gitattributes
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
# Ignore version on merge
|
||||
version.json merge=ours
|
6
API.md
6
API.md
@@ -36,7 +36,7 @@ The addons from `addons` are only installed one.
|
||||
"version": "INSTALL_VERSION",
|
||||
"last_version": "LAST_VERSION",
|
||||
"arch": "armhf|aarch64|i386|amd64",
|
||||
"beta_channel": "true|false",
|
||||
"channel": "stable|beta|dev",
|
||||
"timezone": "TIMEZONE",
|
||||
"wait_boot": "int",
|
||||
"addons": [
|
||||
@@ -72,7 +72,7 @@ Optional:
|
||||
|
||||
```json
|
||||
{
|
||||
"beta_channel": "true|false",
|
||||
"channel": "stable|beta|dev",
|
||||
"timezone": "TIMEZONE",
|
||||
"wait_boot": "int",
|
||||
"addons_repositories": [
|
||||
@@ -427,6 +427,8 @@ Get all available addons.
|
||||
"host_ipc": "bool",
|
||||
"host_dbus": "bool",
|
||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||
"seccomp": "disable|default|profile",
|
||||
"apparmor": "disable|default|profile",
|
||||
"devices": ["/dev/xy"],
|
||||
"auto_uart": "bool",
|
||||
"icon": "bool",
|
||||
|
@@ -23,7 +23,9 @@ from ..const import (
|
||||
ATTR_STATE, ATTR_TIMEOUT, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_WEBUI,
|
||||
ATTR_HASSIO_API, ATTR_AUDIO, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
|
||||
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY, ATTR_HOST_IPC,
|
||||
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_DISCOVERY, ATTR_SERVICES)
|
||||
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_DISCOVERY, ATTR_SERVICES,
|
||||
ATTR_SECCOMP, ATTR_APPARMOR, SECURITY_PROFILE, SECURITY_DISABLE,
|
||||
SECURITY_DEFAULT)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..docker.addon import DockerAddon
|
||||
from ..utils.json import write_json_file, read_json_file
|
||||
@@ -316,6 +318,24 @@ class Addon(CoreSysAttributes):
|
||||
"""Return list of privilege."""
|
||||
return self._mesh.get(ATTR_PRIVILEGED)
|
||||
|
||||
@property
|
||||
def seccomp(self):
|
||||
"""Return True if seccomp is enabled."""
|
||||
if not self._mesh.get(ATTR_SECCOMP):
|
||||
return SECURITY_DISABLE
|
||||
elif self.path_seccomp.exists():
|
||||
return SECURITY_PROFILE
|
||||
return SECURITY_DEFAULT
|
||||
|
||||
@property
|
||||
def apparmor(self):
|
||||
"""Return True if seccomp is enabled."""
|
||||
if not self._mesh.get(ATTR_APPARMOR):
|
||||
return SECURITY_DISABLE
|
||||
elif self.path_apparmor.exists():
|
||||
return SECURITY_PROFILE
|
||||
return SECURITY_DEFAULT
|
||||
|
||||
@property
|
||||
def legacy(self):
|
||||
"""Return if the add-on don't support hass labels."""
|
||||
@@ -474,6 +494,16 @@ class Addon(CoreSysAttributes):
|
||||
"""Return path to addon changelog."""
|
||||
return Path(self.path_location, 'CHANGELOG.md')
|
||||
|
||||
@property
|
||||
def path_seccomp(self):
|
||||
"""Return path to custom seccomp profile."""
|
||||
return Path(self.path_location, 'seccomp.json')
|
||||
|
||||
@property
|
||||
def path_apparmor(self):
|
||||
"""Return path to custom AppArmor profile."""
|
||||
return Path(self.path_location, 'apparmor')
|
||||
|
||||
def save_data(self):
|
||||
"""Save data of addon."""
|
||||
self._addons.data.save_data()
|
||||
|
@@ -55,8 +55,8 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
'io.hass.version': version,
|
||||
'io.hass.arch': self._arch,
|
||||
'io.hass.type': META_ADDON,
|
||||
'io.hass.name': self.addon.name,
|
||||
'io.hass.description': self.addon.description,
|
||||
'io.hass.name': self._fix_label('name'),
|
||||
'io.hass.description': self._fix_label('description'),
|
||||
},
|
||||
'buildargs': {
|
||||
'BUILD_FROM': self.base_image,
|
||||
@@ -70,3 +70,8 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
args['labels']['io.hass.url'] = self.addon.url
|
||||
|
||||
return args
|
||||
|
||||
def _fix_label(self, label_name):
|
||||
"""Remove characters they are not supported."""
|
||||
label = getattr(self.addon, label_name, "")
|
||||
return label.replace("'", "")
|
||||
|
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"local": {
|
||||
"name": "Local Add-Ons",
|
||||
"name": "Local add-ons",
|
||||
"url": "https://home-assistant.io/hassio",
|
||||
"maintainer": "you"
|
||||
},
|
||||
"core": {
|
||||
"name": "Built-in Add-Ons",
|
||||
"name": "Official add-ons",
|
||||
"url": "https://home-assistant.io/addons",
|
||||
"maintainer": "Home Assistant authors"
|
||||
"maintainer": "Home Assistant"
|
||||
}
|
||||
}
|
||||
|
@@ -8,8 +8,9 @@ import shutil
|
||||
import git
|
||||
|
||||
from .utils import get_hash_from_repository
|
||||
from ..const import URL_HASSIO_ADDONS
|
||||
from ..const import URL_HASSIO_ADDONS, ATTR_URL, ATTR_BRANCH
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import RE_REPOSITORY
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -22,9 +23,20 @@ class GitRepo(CoreSysAttributes):
|
||||
self.coresys = coresys
|
||||
self.repo = None
|
||||
self.path = path
|
||||
self.url = url
|
||||
self.lock = asyncio.Lock(loop=coresys.loop)
|
||||
|
||||
self._data = RE_REPOSITORY.match(url).groupdict()
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
"""Return repository URL."""
|
||||
return self._data[ATTR_URL]
|
||||
|
||||
@property
|
||||
def branch(self):
|
||||
"""Return repository branch."""
|
||||
return self._data[ATTR_BRANCH]
|
||||
|
||||
async def load(self):
|
||||
"""Init git addon repo."""
|
||||
if not self.path.is_dir():
|
||||
@@ -46,12 +58,20 @@ class GitRepo(CoreSysAttributes):
|
||||
async def clone(self):
|
||||
"""Clone git addon repo."""
|
||||
async with self.lock:
|
||||
git_args = {
|
||||
attribute: value
|
||||
for attribute, value in (
|
||||
('recursive', True),
|
||||
('branch', self.branch)
|
||||
) if value is not None
|
||||
}
|
||||
|
||||
try:
|
||||
_LOGGER.info("Clone addon %s repository", self.url)
|
||||
self.repo = await self._loop.run_in_executor(
|
||||
None, ft.partial(
|
||||
git.Repo.clone_from, self.url, str(self.path),
|
||||
recursive=True))
|
||||
self.repo = await self._loop.run_in_executor(None, ft.partial(
|
||||
git.Repo.clone_from, self.url, str(self.path),
|
||||
**git_args
|
||||
))
|
||||
|
||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||
git.GitCommandError) as err:
|
||||
|
@@ -17,7 +17,8 @@ from ..const import (
|
||||
ATTR_AUTO_UPDATE, ATTR_WEBUI, ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_HOST_IPC,
|
||||
ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API, ATTR_BUILD_FROM, ATTR_SQUASH,
|
||||
ATTR_ARGS, ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, ATTR_LEGACY,
|
||||
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_SERVICES, ATTR_DISCOVERY)
|
||||
ATTR_HOST_DBUS, ATTR_AUTO_UART, ATTR_SERVICES, ATTR_DISCOVERY,
|
||||
ATTR_SECCOMP, ATTR_APPARMOR)
|
||||
from ..validate import NETWORK_PORT, DOCKER_PORTS, ALSA_CHANNEL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -60,6 +61,7 @@ PRIVILEGED_ALL = [
|
||||
"NET_ADMIN",
|
||||
"SYS_ADMIN",
|
||||
"SYS_RAWIO",
|
||||
"IPC_LOCK",
|
||||
"SYS_TIME",
|
||||
"SYS_NICE"
|
||||
]
|
||||
@@ -106,6 +108,8 @@ SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||
vol.Optional(ATTR_SECCOMP, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
||||
|
@@ -13,7 +13,7 @@ from .proxy import APIProxy
|
||||
from .supervisor import APISupervisor
|
||||
from .snapshots import APISnapshots
|
||||
from .services import APIServices
|
||||
from .security import security_layer
|
||||
from .security import SecurityMiddleware
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -25,16 +25,14 @@ class RestAPI(CoreSysAttributes):
|
||||
def __init__(self, coresys):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.coresys = coresys
|
||||
self.security = SecurityMiddleware(coresys)
|
||||
self.webapp = web.Application(
|
||||
middlewares=[security_layer], loop=self._loop)
|
||||
middlewares=[self.security.token_validation], loop=self._loop)
|
||||
|
||||
# service stuff
|
||||
self._handler = None
|
||||
self.server = None
|
||||
|
||||
# middleware
|
||||
self.webapp['coresys'] = coresys
|
||||
|
||||
async def load(self):
|
||||
"""Register REST API Calls."""
|
||||
self._register_supervisor()
|
||||
@@ -53,178 +51,163 @@ class RestAPI(CoreSysAttributes):
|
||||
api_host = APIHost()
|
||||
api_host.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/host/info', api_host.info)
|
||||
self.webapp.router.add_get('/host/hardware', api_host.hardware)
|
||||
self.webapp.router.add_post('/host/reboot', api_host.reboot)
|
||||
self.webapp.router.add_post('/host/shutdown', api_host.shutdown)
|
||||
self.webapp.router.add_post('/host/update', api_host.update)
|
||||
self.webapp.router.add_post('/host/options', api_host.options)
|
||||
self.webapp.router.add_post('/host/reload', api_host.reload)
|
||||
self.webapp.add_routes([
|
||||
web.get('/host/info', api_host.info),
|
||||
web.get('/host/hardware', api_host.hardware),
|
||||
web.post('/host/reboot', api_host.reboot),
|
||||
web.post('/host/shutdown', api_host.shutdown),
|
||||
web.post('/host/update', api_host.update),
|
||||
web.post('/host/options', api_host.options),
|
||||
web.post('/host/reload', api_host.reload),
|
||||
])
|
||||
|
||||
def _register_network(self):
|
||||
"""Register network function."""
|
||||
api_net = APINetwork()
|
||||
api_net.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/network/info', api_net.info)
|
||||
self.webapp.router.add_post('/network/options', api_net.options)
|
||||
self.webapp.add_routes([
|
||||
web.get('/network/info', api_net.info),
|
||||
web.post('/network/options', api_net.options),
|
||||
])
|
||||
|
||||
def _register_supervisor(self):
|
||||
"""Register supervisor function."""
|
||||
api_supervisor = APISupervisor()
|
||||
api_supervisor.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/supervisor/ping', api_supervisor.ping)
|
||||
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
|
||||
self.webapp.router.add_get('/supervisor/stats', api_supervisor.stats)
|
||||
self.webapp.router.add_post(
|
||||
'/supervisor/update', api_supervisor.update)
|
||||
self.webapp.router.add_post(
|
||||
'/supervisor/reload', api_supervisor.reload)
|
||||
self.webapp.router.add_post(
|
||||
'/supervisor/options', api_supervisor.options)
|
||||
self.webapp.router.add_get('/supervisor/logs', api_supervisor.logs)
|
||||
self.webapp.add_routes([
|
||||
web.get('/supervisor/ping', api_supervisor.ping),
|
||||
web.get('/supervisor/info', api_supervisor.info),
|
||||
web.get('/supervisor/stats', api_supervisor.stats),
|
||||
web.get('/supervisor/logs', api_supervisor.logs),
|
||||
web.post('/supervisor/update', api_supervisor.update),
|
||||
web.post('/supervisor/reload', api_supervisor.reload),
|
||||
web.post('/supervisor/options', api_supervisor.options),
|
||||
])
|
||||
|
||||
def _register_homeassistant(self):
|
||||
"""Register homeassistant function."""
|
||||
api_hass = APIHomeAssistant()
|
||||
api_hass.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
|
||||
self.webapp.router.add_get('/homeassistant/logs', api_hass.logs)
|
||||
self.webapp.router.add_get('/homeassistant/stats', api_hass.stats)
|
||||
self.webapp.router.add_post('/homeassistant/options', api_hass.options)
|
||||
self.webapp.router.add_post('/homeassistant/update', api_hass.update)
|
||||
self.webapp.router.add_post('/homeassistant/restart', api_hass.restart)
|
||||
self.webapp.router.add_post('/homeassistant/stop', api_hass.stop)
|
||||
self.webapp.router.add_post('/homeassistant/start', api_hass.start)
|
||||
self.webapp.router.add_post('/homeassistant/check', api_hass.check)
|
||||
self.webapp.add_routes([
|
||||
web.get('/homeassistant/info', api_hass.info),
|
||||
web.get('/homeassistant/logs', api_hass.logs),
|
||||
web.get('/homeassistant/stats', api_hass.stats),
|
||||
web.post('/homeassistant/options', api_hass.options),
|
||||
web.post('/homeassistant/update', api_hass.update),
|
||||
web.post('/homeassistant/restart', api_hass.restart),
|
||||
web.post('/homeassistant/stop', api_hass.stop),
|
||||
web.post('/homeassistant/start', api_hass.start),
|
||||
web.post('/homeassistant/check', api_hass.check),
|
||||
])
|
||||
|
||||
def _register_proxy(self):
|
||||
"""Register HomeAssistant API Proxy."""
|
||||
api_proxy = APIProxy()
|
||||
api_proxy.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get(
|
||||
'/homeassistant/api/websocket', api_proxy.websocket)
|
||||
self.webapp.router.add_get(
|
||||
'/homeassistant/websocket', api_proxy.websocket)
|
||||
self.webapp.router.add_get(
|
||||
'/homeassistant/api/stream', api_proxy.stream)
|
||||
self.webapp.router.add_post(
|
||||
'/homeassistant/api/{path:.+}', api_proxy.api)
|
||||
self.webapp.router.add_get(
|
||||
'/homeassistant/api/{path:.+}', api_proxy.api)
|
||||
self.webapp.router.add_get(
|
||||
'/homeassistant/api/', api_proxy.api)
|
||||
self.webapp.add_routes([
|
||||
web.get('/homeassistant/api/websocket', api_proxy.websocket),
|
||||
web.get('/homeassistant/websocket', api_proxy.websocket),
|
||||
web.get('/homeassistant/api/stream', api_proxy.stream),
|
||||
web.post('/homeassistant/api/{path:.+}', api_proxy.api),
|
||||
web.get('/homeassistant/api/{path:.+}', api_proxy.api),
|
||||
web.get('/homeassistant/api/', api_proxy.api),
|
||||
])
|
||||
|
||||
def _register_addons(self):
|
||||
"""Register homeassistant function."""
|
||||
api_addons = APIAddons()
|
||||
api_addons.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/addons', api_addons.list)
|
||||
self.webapp.router.add_post('/addons/reload', api_addons.reload)
|
||||
self.webapp.router.add_get('/addons/{addon}/info', api_addons.info)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/install', api_addons.install)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/uninstall', api_addons.uninstall)
|
||||
self.webapp.router.add_post('/addons/{addon}/start', api_addons.start)
|
||||
self.webapp.router.add_post('/addons/{addon}/stop', api_addons.stop)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/restart', api_addons.restart)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/update', api_addons.update)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/options', api_addons.options)
|
||||
self.webapp.router.add_post(
|
||||
'/addons/{addon}/rebuild', api_addons.rebuild)
|
||||
self.webapp.router.add_get('/addons/{addon}/logs', api_addons.logs)
|
||||
self.webapp.router.add_get('/addons/{addon}/icon', api_addons.icon)
|
||||
self.webapp.router.add_get('/addons/{addon}/logo', api_addons.logo)
|
||||
self.webapp.router.add_get(
|
||||
'/addons/{addon}/changelog', api_addons.changelog)
|
||||
self.webapp.router.add_post('/addons/{addon}/stdin', api_addons.stdin)
|
||||
self.webapp.router.add_get('/addons/{addon}/stats', api_addons.stats)
|
||||
self.webapp.add_routes([
|
||||
web.get('/addons', api_addons.list),
|
||||
web.post('/addons/reload', api_addons.reload),
|
||||
web.get('/addons/{addon}/info', api_addons.info),
|
||||
web.post('/addons/{addon}/install', api_addons.install),
|
||||
web.post('/addons/{addon}/uninstall', api_addons.uninstall),
|
||||
web.post('/addons/{addon}/start', api_addons.start),
|
||||
web.post('/addons/{addon}/stop', api_addons.stop),
|
||||
web.post('/addons/{addon}/restart', api_addons.restart),
|
||||
web.post('/addons/{addon}/update', api_addons.update),
|
||||
web.post('/addons/{addon}/options', api_addons.options),
|
||||
web.post('/addons/{addon}/rebuild', api_addons.rebuild),
|
||||
web.get('/addons/{addon}/logs', api_addons.logs),
|
||||
web.get('/addons/{addon}/icon', api_addons.icon),
|
||||
web.get('/addons/{addon}/logo', api_addons.logo),
|
||||
web.get('/addons/{addon}/changelog', api_addons.changelog),
|
||||
web.post('/addons/{addon}/stdin', api_addons.stdin),
|
||||
web.get('/addons/{addon}/stats', api_addons.stats),
|
||||
])
|
||||
|
||||
def _register_snapshots(self):
|
||||
"""Register snapshots function."""
|
||||
api_snapshots = APISnapshots()
|
||||
api_snapshots.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/snapshots', api_snapshots.list)
|
||||
self.webapp.router.add_post('/snapshots/reload', api_snapshots.reload)
|
||||
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/new/full', api_snapshots.snapshot_full)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/new/partial', api_snapshots.snapshot_partial)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/new/upload', api_snapshots.upload)
|
||||
|
||||
self.webapp.router.add_get(
|
||||
'/snapshots/{snapshot}/info', api_snapshots.info)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/remove', api_snapshots.remove)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/restore/full', api_snapshots.restore_full)
|
||||
self.webapp.router.add_post(
|
||||
'/snapshots/{snapshot}/restore/partial',
|
||||
api_snapshots.restore_partial)
|
||||
self.webapp.router.add_get(
|
||||
'/snapshots/{snapshot}/download',
|
||||
api_snapshots.download)
|
||||
self.webapp.add_routes([
|
||||
web.get('/snapshots', api_snapshots.list),
|
||||
web.post('/snapshots/reload', api_snapshots.reload),
|
||||
web.post('/snapshots/new/full', api_snapshots.snapshot_full),
|
||||
web.post('/snapshots/new/partial', api_snapshots.snapshot_partial),
|
||||
web.post('/snapshots/new/upload', api_snapshots.upload),
|
||||
web.get('/snapshots/{snapshot}/info', api_snapshots.info),
|
||||
web.post('/snapshots/{snapshot}/remove', api_snapshots.remove),
|
||||
web.post('/snapshots/{snapshot}/restore/full',
|
||||
api_snapshots.restore_full),
|
||||
web.post('/snapshots/{snapshot}/restore/partial',
|
||||
api_snapshots.restore_partial),
|
||||
web.get('/snapshots/{snapshot}/download', api_snapshots.download),
|
||||
])
|
||||
|
||||
def _register_services(self):
|
||||
api_services = APIServices()
|
||||
api_services.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get('/services', api_services.list)
|
||||
|
||||
self.webapp.router.add_get(
|
||||
'/services/{service}', api_services.get_service)
|
||||
self.webapp.router.add_post(
|
||||
'/services/{service}', api_services.set_service)
|
||||
self.webapp.router.add_delete(
|
||||
'/services/{service}', api_services.del_service)
|
||||
self.webapp.add_routes([
|
||||
web.get('/services', api_services.list),
|
||||
web.get('/services/{service}', api_services.get_service),
|
||||
web.post('/services/{service}', api_services.set_service),
|
||||
web.delete('/services/{service}', api_services.del_service),
|
||||
])
|
||||
|
||||
def _register_discovery(self):
|
||||
api_discovery = APIDiscovery()
|
||||
api_discovery.coresys = self.coresys
|
||||
|
||||
self.webapp.router.add_get(
|
||||
'/services/discovery', api_discovery.list)
|
||||
self.webapp.router.add_get(
|
||||
'/services/discovery/{uuid}', api_discovery.get_discovery)
|
||||
self.webapp.router.add_delete(
|
||||
'/services/discovery/{uuid}', api_discovery.del_discovery)
|
||||
self.webapp.router.add_post(
|
||||
'/services/discovery', api_discovery.set_discovery)
|
||||
self.webapp.add_routes([
|
||||
web.get('/services/discovery', api_discovery.list),
|
||||
web.get('/services/discovery/{uuid}', api_discovery.get_discovery),
|
||||
web.delete('/services/discovery/{uuid}',
|
||||
api_discovery.del_discovery),
|
||||
web.post('/services/discovery', api_discovery.set_discovery),
|
||||
])
|
||||
|
||||
def _register_panel(self):
|
||||
"""Register panel for homeassistant."""
|
||||
def create_panel_response(build_type):
|
||||
def create_response(build_type):
|
||||
"""Create a function to generate a response."""
|
||||
path = Path(__file__).parent.joinpath(
|
||||
f"panel/{build_type}.html")
|
||||
return lambda request: web.FileResponse(path)
|
||||
|
||||
# This route is for backwards compatibility with HA < 0.58
|
||||
self.webapp.router.add_get(
|
||||
'/panel', create_panel_response('hassio-main-es5'))
|
||||
self.webapp.add_routes([
|
||||
web.get('/panel', create_response('hassio-main-es5'))])
|
||||
|
||||
# This route is for backwards compatibility with HA 0.58 - 0.61
|
||||
self.webapp.router.add_get(
|
||||
'/panel_es5', create_panel_response('hassio-main-es5'))
|
||||
self.webapp.router.add_get(
|
||||
'/panel_latest', create_panel_response('hassio-main-latest'))
|
||||
self.webapp.add_routes([
|
||||
web.get('/panel_es5', create_response('hassio-main-es5')),
|
||||
web.get('/panel_latest', create_response('hassio-main-latest')),
|
||||
])
|
||||
|
||||
# This route is for HA > 0.61
|
||||
self.webapp.router.add_get(
|
||||
'/app-es5/index.html', create_panel_response('index'))
|
||||
self.webapp.router.add_get(
|
||||
'/app-es5/hassio-app.html', create_panel_response('hassio-app'))
|
||||
self.webapp.add_routes([
|
||||
web.get('/app-es5/index.html', create_response('index')),
|
||||
web.get('/app-es5/hassio-app.html', create_response('hassio-app')),
|
||||
])
|
||||
|
||||
async def start(self):
|
||||
"""Run rest api webserver."""
|
||||
|
@@ -17,7 +17,7 @@ from ..const import (
|
||||
ATTR_CHANGELOG, ATTR_HOST_IPC, ATTR_HOST_DBUS, ATTR_LONG_DESCRIPTION,
|
||||
ATTR_CPU_PERCENT, ATTR_MEMORY_LIMIT, ATTR_MEMORY_USAGE, ATTR_NETWORK_TX,
|
||||
ATTR_NETWORK_RX, ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_ICON, ATTR_SERVICES,
|
||||
ATTR_DISCOVERY,
|
||||
ATTR_DISCOVERY, ATTR_SECCOMP, ATTR_APPARMOR,
|
||||
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY, CONTENT_TYPE_TEXT)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import DOCKER_PORTS
|
||||
@@ -123,6 +123,8 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_HOST_IPC: addon.host_ipc,
|
||||
ATTR_HOST_DBUS: addon.host_dbus,
|
||||
ATTR_PRIVILEGED: addon.privileged,
|
||||
ATTR_SECCOMP: addon.seccomp,
|
||||
ATTR_APPARMOR: addon.apparmor,
|
||||
ATTR_DEVICES: self._pretty_devices(addon),
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
|
File diff suppressed because one or more lines are too long
Binary file not shown.
Binary file not shown.
@@ -83,7 +83,7 @@ class APIProxy(CoreSysAttributes):
|
||||
if not data:
|
||||
await response.write_eof()
|
||||
break
|
||||
response.write(data)
|
||||
await response.write(data)
|
||||
|
||||
except aiohttp.ClientError:
|
||||
await response.write_eof()
|
||||
|
@@ -6,6 +6,7 @@ from aiohttp.web import middleware
|
||||
from aiohttp.web_exceptions import HTTPUnauthorized
|
||||
|
||||
from ..const import HEADER_TOKEN, REQUEST_FROM
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -16,35 +17,41 @@ NO_SECURITY_CHECK = set((
|
||||
))
|
||||
|
||||
|
||||
@middleware
|
||||
async def security_layer(request, handler):
|
||||
"""Check security access of this layer."""
|
||||
coresys = request.app['coresys']
|
||||
hassio_token = request.headers.get(HEADER_TOKEN)
|
||||
class SecurityMiddleware(CoreSysAttributes):
|
||||
"""Security middleware functions."""
|
||||
|
||||
# Ignore security check
|
||||
for rule in NO_SECURITY_CHECK:
|
||||
if rule.match(request.path):
|
||||
_LOGGER.debug("Passthrough %s", request.path)
|
||||
def __init__(self, coresys):
|
||||
"""Initialize security middleware."""
|
||||
self.coresys = coresys
|
||||
|
||||
@middleware
|
||||
async def token_validation(self, request, handler):
|
||||
"""Check security access of this layer."""
|
||||
hassio_token = request.headers.get(HEADER_TOKEN)
|
||||
|
||||
# Ignore security check
|
||||
for rule in NO_SECURITY_CHECK:
|
||||
if rule.match(request.path):
|
||||
_LOGGER.debug("Passthrough %s", request.path)
|
||||
return await handler(request)
|
||||
|
||||
# Need to be removed later
|
||||
if not hassio_token:
|
||||
_LOGGER.warning("Invalid token for access %s", request.path)
|
||||
request[REQUEST_FROM] = 'UNKNOWN'
|
||||
return await handler(request)
|
||||
|
||||
# Need to be removed later
|
||||
if not hassio_token:
|
||||
_LOGGER.warning("Invalid token for access %s", request.path)
|
||||
request[REQUEST_FROM] = 'UNKNOWN'
|
||||
return await handler(request)
|
||||
# Home-Assistant
|
||||
if hassio_token == self._homeassistant.uuid:
|
||||
_LOGGER.debug("%s access from Home-Assistant", request.path)
|
||||
request[REQUEST_FROM] = 'homeassistant'
|
||||
return await handler(request)
|
||||
|
||||
# Home-Assistant
|
||||
if hassio_token == coresys.homeassistant.uuid:
|
||||
_LOGGER.debug("%s access from Home-Assistant", request.path)
|
||||
request[REQUEST_FROM] = 'homeassistant'
|
||||
return await handler(request)
|
||||
# Add-on
|
||||
addon = self._addons.from_uuid(hassio_token)
|
||||
if addon:
|
||||
_LOGGER.info("%s access from %s", request.path, addon.slug)
|
||||
request[REQUEST_FROM] = addon.slug
|
||||
return await handler(request)
|
||||
|
||||
# Add-on
|
||||
addon = coresys.addons.from_uuid(hassio_token)
|
||||
if addon:
|
||||
_LOGGER.info("%s access from %s", request.path, addon.slug)
|
||||
request[REQUEST_FROM] = addon.slug
|
||||
return await handler(request)
|
||||
|
||||
raise HTTPUnauthorized()
|
||||
raise HTTPUnauthorized()
|
||||
|
@@ -6,20 +6,19 @@ import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from ..const import (
|
||||
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, ATTR_ARCH,
|
||||
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_CHANNEL, ATTR_ARCH,
|
||||
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY,
|
||||
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
|
||||
ATTR_STATE, ATTR_WAIT_BOOT, ATTR_CPU_PERCENT, ATTR_MEMORY_USAGE,
|
||||
ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE, CONTENT_TYPE_BINARY, ATTR_ICON)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import validate_timezone, WAIT_BOOT, REPOSITORIES
|
||||
from ..validate import validate_timezone, WAIT_BOOT, REPOSITORIES, CHANNELS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
# pylint: disable=no-value-for-parameter
|
||||
vol.Optional(ATTR_BETA_CHANNEL): vol.Boolean(),
|
||||
vol.Optional(ATTR_CHANNEL): CHANNELS,
|
||||
vol.Optional(ATTR_ADDONS_REPOSITORIES): REPOSITORIES,
|
||||
vol.Optional(ATTR_TIMEZONE): validate_timezone,
|
||||
vol.Optional(ATTR_WAIT_BOOT): WAIT_BOOT,
|
||||
@@ -59,7 +58,7 @@ class APISupervisor(CoreSysAttributes):
|
||||
return {
|
||||
ATTR_VERSION: HASSIO_VERSION,
|
||||
ATTR_LAST_VERSION: self._updater.version_hassio,
|
||||
ATTR_BETA_CHANNEL: self._updater.beta_channel,
|
||||
ATTR_CHANNEL: self._updater.channel,
|
||||
ATTR_ARCH: self._arch,
|
||||
ATTR_WAIT_BOOT: self._config.wait_boot,
|
||||
ATTR_TIMEZONE: self._config.timezone,
|
||||
@@ -72,8 +71,8 @@ class APISupervisor(CoreSysAttributes):
|
||||
"""Set supervisor options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_BETA_CHANNEL in body:
|
||||
self._updater.beta_channel = body[ATTR_BETA_CHANNEL]
|
||||
if ATTR_CHANNEL in body:
|
||||
self._updater.channel = body[ATTR_CHANNEL]
|
||||
|
||||
if ATTR_TIMEZONE in body:
|
||||
self._config.timezone = body[ATTR_TIMEZONE]
|
||||
|
@@ -2,7 +2,7 @@
|
||||
from pathlib import Path
|
||||
from ipaddress import ip_network
|
||||
|
||||
HASSIO_VERSION = '0.93'
|
||||
HASSIO_VERSION = '0.100'
|
||||
|
||||
URL_HASSIO_VERSION = ('https://raw.githubusercontent.com/home-assistant/'
|
||||
'hassio/{}/version.json')
|
||||
@@ -70,7 +70,7 @@ ATTR_VERSION = 'version'
|
||||
ATTR_AUTO_UART = 'auto_uart'
|
||||
ATTR_LAST_BOOT = 'last_boot'
|
||||
ATTR_LAST_VERSION = 'last_version'
|
||||
ATTR_BETA_CHANNEL = 'beta_channel'
|
||||
ATTR_CHANNEL = 'channel'
|
||||
ATTR_NAME = 'name'
|
||||
ATTR_SLUG = 'slug'
|
||||
ATTR_DESCRIPTON = 'description'
|
||||
@@ -158,6 +158,9 @@ ATTR_SERVICES = 'services'
|
||||
ATTR_DISCOVERY = 'discovery'
|
||||
ATTR_PROTECTED = 'protected'
|
||||
ATTR_CRYPTO = 'crypto'
|
||||
ATTR_BRANCH = 'branch'
|
||||
ATTR_SECCOMP = 'seccomp'
|
||||
ATTR_APPARMOR = 'apparmor'
|
||||
|
||||
SERVICE_MQTT = 'mqtt'
|
||||
|
||||
@@ -185,6 +188,10 @@ ARCH_AARCH64 = 'aarch64'
|
||||
ARCH_AMD64 = 'amd64'
|
||||
ARCH_I386 = 'i386'
|
||||
|
||||
CHANNEL_STABLE = 'stable'
|
||||
CHANNEL_BETA = 'beta'
|
||||
CHANNEL_DEV = 'dev'
|
||||
|
||||
REPOSITORY_CORE = 'core'
|
||||
REPOSITORY_LOCAL = 'local'
|
||||
|
||||
@@ -197,3 +204,7 @@ SNAPSHOT_FULL = 'full'
|
||||
SNAPSHOT_PARTIAL = 'partial'
|
||||
|
||||
CRYPTO_AES128 = 'aes128'
|
||||
|
||||
SECURITY_PROFILE = 'profile'
|
||||
SECURITY_DEFAULT = 'default'
|
||||
SECURITY_DISABLE = 'disable'
|
||||
|
@@ -56,12 +56,12 @@ class HassIO(CoreSysAttributes):
|
||||
async def start(self):
|
||||
"""Start HassIO orchestration."""
|
||||
# on release channel, try update itself
|
||||
# on beta channel, only read new versions
|
||||
if not self._updater.beta_channel and self._supervisor.need_update:
|
||||
# on dev mode, only read new versions
|
||||
if not self._dev and self._supervisor.need_update:
|
||||
if await self._supervisor.update():
|
||||
return
|
||||
else:
|
||||
_LOGGER.info("Ignore Hass.io auto updates on beta mode")
|
||||
_LOGGER.info("Ignore Hass.io auto updates on dev channel")
|
||||
|
||||
# start api
|
||||
await self._api.start()
|
||||
@@ -108,10 +108,10 @@ class HassIO(CoreSysAttributes):
|
||||
# don't process scheduler anymore
|
||||
self._scheduler.suspend = True
|
||||
|
||||
# process stop tasks
|
||||
self._websession.close()
|
||||
self._websession_ssl.close()
|
||||
|
||||
# process async stop tasks
|
||||
await asyncio.wait(
|
||||
[self._api.stop(), self._dns.stop()], loop=self._loop)
|
||||
await asyncio.wait([
|
||||
self._api.stop(),
|
||||
self._dns.stop(),
|
||||
self._websession.close(),
|
||||
self._websession_ssl.close()
|
||||
], loop=self._loop)
|
||||
|
@@ -2,6 +2,7 @@
|
||||
|
||||
import aiohttp
|
||||
|
||||
from .const import CHANNEL_DEV
|
||||
from .config import CoreConfig
|
||||
from .docker import DockerAPI
|
||||
from .misc.dns import DNSForward
|
||||
@@ -49,6 +50,11 @@ class CoreSys(object):
|
||||
return self._supervisor.arch
|
||||
return None
|
||||
|
||||
@property
|
||||
def dev(self):
|
||||
"""Return True if we run dev modus."""
|
||||
return self._updater.channel == CHANNEL_DEV
|
||||
|
||||
@property
|
||||
def loop(self):
|
||||
"""Return loop object."""
|
||||
|
@@ -6,11 +6,11 @@ import docker
|
||||
import requests
|
||||
|
||||
from .interface import DockerInterface
|
||||
from .utils import docker_process
|
||||
from ..addons.build import AddonBuild
|
||||
from ..const import (
|
||||
MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE, ENV_TOKEN,
|
||||
ENV_TIME)
|
||||
ENV_TIME, SECURITY_PROFILE, SECURITY_DISABLE)
|
||||
from ..utils import process_lock
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -121,14 +121,21 @@ class DockerAddon(DockerInterface):
|
||||
@property
|
||||
def security_opt(self):
|
||||
"""Controlling security opt."""
|
||||
privileged = self.addon.privileged or []
|
||||
security = []
|
||||
|
||||
# Disable AppArmor sinse it make troubles wit SYS_ADMIN
|
||||
if 'SYS_ADMIN' in privileged:
|
||||
return [
|
||||
"apparmor:unconfined",
|
||||
]
|
||||
return None
|
||||
# AppArmor
|
||||
if self.addon.apparmor == SECURITY_DISABLE:
|
||||
security.append("apparmor:unconfined")
|
||||
elif self.addon.apparmor == SECURITY_PROFILE:
|
||||
security.append(f"apparmor={self.addon.slug}")
|
||||
|
||||
# Seccomp
|
||||
if self.addon.seccomp == SECURITY_DISABLE:
|
||||
security.append("seccomp=unconfined")
|
||||
elif self.addon.seccomp == SECURITY_PROFILE:
|
||||
security.append(f"seccomp={self.addon.path_seccomp}")
|
||||
|
||||
return security or None
|
||||
|
||||
@property
|
||||
def tmpfs(self):
|
||||
@@ -285,7 +292,7 @@ class DockerAddon(DockerInterface):
|
||||
_LOGGER.info("Build %s:%s done", self.image, tag)
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def export_image(self, path):
|
||||
"""Export current images into a tar file."""
|
||||
return self._loop.run_in_executor(None, self._export_image, path)
|
||||
@@ -313,7 +320,7 @@ class DockerAddon(DockerInterface):
|
||||
_LOGGER.info("Export image %s done", self.image)
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def import_image(self, path, tag):
|
||||
"""Import a tar file as image."""
|
||||
return self._loop.run_in_executor(None, self._import_image, path, tag)
|
||||
@@ -338,7 +345,7 @@ class DockerAddon(DockerInterface):
|
||||
self._cleanup()
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def write_stdin(self, data):
|
||||
"""Write to add-on stdin."""
|
||||
return self._loop.run_in_executor(None, self._write_stdin, data)
|
||||
|
@@ -5,10 +5,10 @@ import logging
|
||||
|
||||
import docker
|
||||
|
||||
from .utils import docker_process
|
||||
from .stats import DockerStats
|
||||
from ..const import LABEL_VERSION, LABEL_ARCH
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..utils import process_lock
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -20,7 +20,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.coresys = coresys
|
||||
self._meta = None
|
||||
self.lock = asyncio.Lock(loop=self._loop)
|
||||
self.lock = asyncio.Lock(loop=coresys.loop)
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
@@ -58,7 +58,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
"""Return True if a task is in progress."""
|
||||
return self.lock.locked()
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def install(self, tag):
|
||||
"""Pull docker image."""
|
||||
return self._loop.run_in_executor(None, self._install, tag)
|
||||
@@ -126,7 +126,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def attach(self):
|
||||
"""Attach to running docker container."""
|
||||
return self._loop.run_in_executor(None, self._attach)
|
||||
@@ -149,7 +149,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def run(self):
|
||||
"""Run docker image."""
|
||||
return self._loop.run_in_executor(None, self._run)
|
||||
@@ -161,7 +161,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def stop(self):
|
||||
"""Stop/remove docker container."""
|
||||
return self._loop.run_in_executor(None, self._stop)
|
||||
@@ -187,7 +187,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def remove(self):
|
||||
"""Remove docker images."""
|
||||
return self._loop.run_in_executor(None, self._remove)
|
||||
@@ -219,7 +219,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
self._meta = None
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def update(self, tag):
|
||||
"""Update a docker image."""
|
||||
return self._loop.run_in_executor(None, self._update, tag)
|
||||
@@ -264,32 +264,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't grap logs from %s: %s", self.image, err)
|
||||
|
||||
@docker_process
|
||||
def restart(self):
|
||||
"""Restart docker container."""
|
||||
return self._loop.run_in_executor(None, self._restart)
|
||||
|
||||
def _restart(self):
|
||||
"""Restart docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self._docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
_LOGGER.info("Restart %s", self.image)
|
||||
|
||||
try:
|
||||
container.restart(timeout=self.timeout)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't restart %s: %s", self.image, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def cleanup(self):
|
||||
"""Check if old version exists and cleanup."""
|
||||
return self._loop.run_in_executor(None, self._cleanup)
|
||||
@@ -315,7 +290,7 @@ class DockerInterface(CoreSysAttributes):
|
||||
|
||||
return True
|
||||
|
||||
@docker_process
|
||||
@process_lock
|
||||
def execute_command(self, command):
|
||||
"""Create a temporary container and run command."""
|
||||
return self._loop.run_in_executor(None, self._execute_command, command)
|
||||
|
@@ -1,20 +0,0 @@
|
||||
"""HassIO docker utilitys."""
|
||||
import logging
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# pylint: disable=protected-access
|
||||
def docker_process(method):
|
||||
"""Wrap function with only run once."""
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return api wrapper."""
|
||||
if api.lock.locked():
|
||||
_LOGGER.error(
|
||||
"Can't excute %s while a task is in progress", method.__name__)
|
||||
return False
|
||||
|
||||
async with api.lock:
|
||||
return await method(api, *args, **kwargs)
|
||||
|
||||
return wrap_api
|
@@ -16,7 +16,7 @@ from .const import (
|
||||
ATTR_WAIT_BOOT, HEADER_HA_ACCESS, CONTENT_TYPE_JSON)
|
||||
from .coresys import CoreSysAttributes
|
||||
from .docker.homeassistant import DockerHomeAssistant
|
||||
from .utils import convert_to_ascii
|
||||
from .utils import convert_to_ascii, process_lock
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_HASS_CONFIG
|
||||
|
||||
@@ -35,6 +35,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG)
|
||||
self.coresys = coresys
|
||||
self.instance = DockerHomeAssistant(coresys)
|
||||
self.lock = asyncio.Lock(loop=coresys.loop)
|
||||
|
||||
async def load(self):
|
||||
"""Prepare HomeAssistant object."""
|
||||
@@ -162,6 +163,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
"""Return a UUID of this HomeAssistant."""
|
||||
return self._data[ATTR_UUID]
|
||||
|
||||
@process_lock
|
||||
async def install_landingpage(self):
|
||||
"""Install a landingpage."""
|
||||
_LOGGER.info("Setup HomeAssistant landingpage")
|
||||
@@ -172,8 +174,9 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
await asyncio.sleep(60, loop=self._loop)
|
||||
|
||||
# Run landingpage after installation
|
||||
await self.start()
|
||||
await self._start()
|
||||
|
||||
@process_lock
|
||||
async def install(self):
|
||||
"""Install a landingpage."""
|
||||
_LOGGER.info("Setup HomeAssistant")
|
||||
@@ -191,9 +194,10 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
# finishing
|
||||
_LOGGER.info("HomeAssistant docker now installed")
|
||||
if self.boot:
|
||||
await self.start()
|
||||
await self._start()
|
||||
await self.instance.cleanup()
|
||||
|
||||
@process_lock
|
||||
async def update(self, version=None):
|
||||
"""Update HomeAssistant version."""
|
||||
version = version or self.last_version
|
||||
@@ -208,15 +212,23 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
return await self.instance.update(version)
|
||||
finally:
|
||||
if running:
|
||||
await self.start()
|
||||
await self._start()
|
||||
|
||||
async def start(self):
|
||||
"""Run HomeAssistant docker."""
|
||||
async def _start(self):
|
||||
"""Start HomeAssistant docker & wait."""
|
||||
if not await self.instance.run():
|
||||
return False
|
||||
|
||||
return await self._block_till_run()
|
||||
|
||||
@process_lock
|
||||
def start(self):
|
||||
"""Run HomeAssistant docker.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._start()
|
||||
|
||||
@process_lock
|
||||
def stop(self):
|
||||
"""Stop HomeAssistant docker.
|
||||
|
||||
@@ -224,12 +236,11 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
"""
|
||||
return self.instance.stop()
|
||||
|
||||
@process_lock
|
||||
async def restart(self):
|
||||
"""Restart HomeAssistant docker."""
|
||||
if not await self.instance.restart():
|
||||
return False
|
||||
|
||||
return await self._block_till_run()
|
||||
await self.instance.stop()
|
||||
return await self._start()
|
||||
|
||||
def logs(self):
|
||||
"""Get HomeAssistant docker logs.
|
||||
@@ -262,7 +273,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
@property
|
||||
def in_progress(self):
|
||||
"""Return True if a task is in progress."""
|
||||
return self.instance.in_progress
|
||||
return self.instance.in_progress or self.lock.locked()
|
||||
|
||||
async def check_config(self):
|
||||
"""Run homeassistant config check."""
|
||||
|
@@ -173,14 +173,17 @@ class SnapshotManager(CoreSysAttributes):
|
||||
if addon and addon.is_installed:
|
||||
addon_list.append(addon)
|
||||
continue
|
||||
_LOGGER.warning("Add-on %s not found", addon_slug)
|
||||
_LOGGER.warning(
|
||||
"Add-on %s not found/installed", addon_slug)
|
||||
|
||||
_LOGGER.info("Snapshot %s store Add-ons", snapshot.slug)
|
||||
await snapshot.store_addons(addon_list)
|
||||
if addon_list:
|
||||
_LOGGER.info("Snapshot %s store Add-ons", snapshot.slug)
|
||||
await snapshot.store_addons(addon_list)
|
||||
|
||||
# snapshot folders
|
||||
_LOGGER.info("Snapshot %s store folders", snapshot.slug)
|
||||
await snapshot.store_folders(folders)
|
||||
# Snapshot folders
|
||||
if folders:
|
||||
_LOGGER.info("Snapshot %s store folders", snapshot.slug)
|
||||
await snapshot.store_folders(folders)
|
||||
|
||||
except Exception: # pylint: disable=broad-except
|
||||
_LOGGER.exception("Snapshot %s error", snapshot.slug)
|
||||
|
@@ -151,7 +151,7 @@ class Snapshot(CoreSysAttributes):
|
||||
|
||||
def _encrypt_data(self, data):
|
||||
"""Make data secure."""
|
||||
if not self._key:
|
||||
if not self._key or data is None:
|
||||
return data
|
||||
|
||||
return b64encode(
|
||||
@@ -159,7 +159,7 @@ class Snapshot(CoreSysAttributes):
|
||||
|
||||
def _decrypt_data(self, data):
|
||||
"""Make data readable."""
|
||||
if not self._key:
|
||||
if not self._key or data is None:
|
||||
return data
|
||||
|
||||
return Padding.unpad(
|
||||
|
@@ -78,8 +78,8 @@ class Tasks(CoreSysAttributes):
|
||||
return
|
||||
|
||||
# don't perform a update on beta/dev channel
|
||||
if self._updater.beta_channel:
|
||||
_LOGGER.warning("Ignore Hass.io update on beta upstream!")
|
||||
if self._dev:
|
||||
_LOGGER.warning("Ignore Hass.io update on dev channel!")
|
||||
return
|
||||
|
||||
_LOGGER.info("Found new Hass.io version")
|
||||
|
@@ -9,7 +9,7 @@ import async_timeout
|
||||
|
||||
from .const import (
|
||||
URL_HASSIO_VERSION, FILE_HASSIO_UPDATER, ATTR_HOMEASSISTANT, ATTR_HASSIO,
|
||||
ATTR_BETA_CHANNEL)
|
||||
ATTR_CHANNEL, CHANNEL_STABLE, CHANNEL_BETA, CHANNEL_DEV)
|
||||
from .coresys import CoreSysAttributes
|
||||
from .utils import AsyncThrottle
|
||||
from .utils.json import JsonConfig
|
||||
@@ -17,6 +17,12 @@ from .validate import SCHEMA_UPDATER_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CHANNEL_TO_BRANCH = {
|
||||
CHANNEL_STABLE: 'master',
|
||||
CHANNEL_BETA: 'rc',
|
||||
CHANNEL_DEV: 'dev',
|
||||
}
|
||||
|
||||
|
||||
class Updater(JsonConfig, CoreSysAttributes):
|
||||
"""Fetch last versions from version.json."""
|
||||
@@ -44,21 +50,14 @@ class Updater(JsonConfig, CoreSysAttributes):
|
||||
return self._data.get(ATTR_HASSIO)
|
||||
|
||||
@property
|
||||
def upstream(self):
|
||||
"""Return Upstream branch for version."""
|
||||
if self.beta_channel:
|
||||
return 'dev'
|
||||
return 'master'
|
||||
def channel(self):
|
||||
"""Return upstream channel of hassio instance."""
|
||||
return self._data[ATTR_CHANNEL]
|
||||
|
||||
@property
|
||||
def beta_channel(self):
|
||||
"""Return True if we run in beta upstream."""
|
||||
return self._data[ATTR_BETA_CHANNEL]
|
||||
|
||||
@beta_channel.setter
|
||||
def beta_channel(self, value):
|
||||
"""Set beta upstream mode."""
|
||||
self._data[ATTR_BETA_CHANNEL] = bool(value)
|
||||
@channel.setter
|
||||
def channel(self, value):
|
||||
"""Set upstream mode."""
|
||||
self._data[ATTR_CHANNEL] = value
|
||||
|
||||
@AsyncThrottle(timedelta(seconds=60))
|
||||
async def reload(self):
|
||||
@@ -66,7 +65,7 @@ class Updater(JsonConfig, CoreSysAttributes):
|
||||
|
||||
Is a coroutine.
|
||||
"""
|
||||
url = URL_HASSIO_VERSION.format(self.upstream)
|
||||
url = URL_HASSIO_VERSION.format(CHANNEL_TO_BRANCH[self.channel])
|
||||
try:
|
||||
_LOGGER.info("Fetch update data from %s", url)
|
||||
with async_timeout.timeout(10, loop=self._loop):
|
||||
|
@@ -1,7 +1,9 @@
|
||||
"""Tools file for HassIO."""
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import re
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
RE_STRING = re.compile(r"\x1b(\[.*?[@-~]|\].*?(\x07|\x1b\\))")
|
||||
|
||||
|
||||
@@ -10,6 +12,21 @@ def convert_to_ascii(raw):
|
||||
return RE_STRING.sub("", raw.decode())
|
||||
|
||||
|
||||
def process_lock(method):
|
||||
"""Wrap function with only run once."""
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return api wrapper."""
|
||||
if api.lock.locked():
|
||||
_LOGGER.error(
|
||||
"Can't excute %s while a task is in progress", method.__name__)
|
||||
return False
|
||||
|
||||
async with api.lock:
|
||||
return await method(api, *args, **kwargs)
|
||||
|
||||
return wrap_api
|
||||
|
||||
|
||||
class AsyncThrottle(object):
|
||||
"""
|
||||
Decorator that prevents a function from being called more than once every
|
||||
|
@@ -12,7 +12,7 @@ UTC = pytz.utc
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
FREEGEOIP_URL = "https://freegeoip.io/json/"
|
||||
FREEGEOIP_URL = "https://freegeoip.net/json/"
|
||||
|
||||
# Copyright (c) Django Software Foundation and individual contributors.
|
||||
# All rights reserved.
|
||||
|
@@ -1,23 +1,42 @@
|
||||
"""Validate functions."""
|
||||
import uuid
|
||||
import re
|
||||
|
||||
import voluptuous as vol
|
||||
import pytz
|
||||
|
||||
from .const import (
|
||||
ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_BETA_CHANNEL, ATTR_TIMEZONE,
|
||||
ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_CHANNEL, ATTR_TIMEZONE,
|
||||
ATTR_ADDONS_CUSTOM_LIST, ATTR_AUDIO_OUTPUT, ATTR_AUDIO_INPUT,
|
||||
ATTR_PASSWORD, ATTR_HOMEASSISTANT, ATTR_HASSIO, ATTR_BOOT, ATTR_LAST_BOOT,
|
||||
ATTR_SSL, ATTR_PORT, ATTR_WATCHDOG, ATTR_WAIT_BOOT, ATTR_UUID)
|
||||
ATTR_SSL, ATTR_PORT, ATTR_WATCHDOG, ATTR_WAIT_BOOT, ATTR_UUID,
|
||||
CHANNEL_STABLE, CHANNEL_BETA, CHANNEL_DEV)
|
||||
|
||||
|
||||
RE_REPOSITORY = re.compile(r"^(?P<url>[^#]+)(?:#(?P<branch>[\w\-]+))?$")
|
||||
|
||||
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
||||
ALSA_CHANNEL = vol.Match(r"\d+,\d+")
|
||||
WAIT_BOOT = vol.All(vol.Coerce(int), vol.Range(min=1, max=60))
|
||||
DOCKER_IMAGE = vol.Match(r"^[\w{}]+/[\-\w{}]+$")
|
||||
CHANNELS = vol.In([CHANNEL_STABLE, CHANNEL_BETA, CHANNEL_DEV])
|
||||
|
||||
|
||||
def validate_repository(repository):
|
||||
"""Validate a valide repository."""
|
||||
data = RE_REPOSITORY.match(repository)
|
||||
if not data:
|
||||
raise vol.Invalid("No valid repository format!")
|
||||
|
||||
# Validate URL
|
||||
# pylint: disable=no-value-for-parameter
|
||||
vol.Url()(data.group('url'))
|
||||
|
||||
return repository
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
REPOSITORIES = vol.All([vol.Url()], vol.Unique())
|
||||
REPOSITORIES = vol.All([validate_repository], vol.Unique())
|
||||
|
||||
|
||||
def validate_timezone(timezone):
|
||||
@@ -77,9 +96,8 @@ SCHEMA_HASS_CONFIG = vol.Schema({
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_UPDATER_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_BETA_CHANNEL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_CHANNEL, default=CHANNEL_STABLE): CHANNELS,
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Coerce(str),
|
||||
vol.Optional(ATTR_HASSIO): vol.Coerce(str),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
Submodule home-assistant-polymer updated: 5f5ac3834d...3b76238241
6
setup.py
6
setup.py
@@ -40,9 +40,9 @@ setup(
|
||||
],
|
||||
include_package_data=True,
|
||||
install_requires=[
|
||||
'async_timeout==2.0.0',
|
||||
'aiohttp==2.3.10',
|
||||
'docker==3.1.0',
|
||||
'async_timeout==2.0.1',
|
||||
'aiohttp==3.1.2',
|
||||
'docker==3.2.0',
|
||||
'colorlog==3.1.2',
|
||||
'voluptuous==0.11.1',
|
||||
'gitpython==2.1.8',
|
||||
|
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"hassio": "0.93",
|
||||
"homeassistant": "0.63.3",
|
||||
"resinos": "1.1",
|
||||
"hassio": "0.100",
|
||||
"homeassistant": "0.66.1",
|
||||
"resinos": "1.3",
|
||||
"resinhup": "0.3",
|
||||
"generic": "0.3",
|
||||
"cluster": "0.1"
|
||||
|
Reference in New Issue
Block a user