mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-07-16 13:46:31 +00:00
Cleanup
This commit is contained in:
parent
55c2127baa
commit
28f295a1e2
@ -56,7 +56,7 @@ class AddonManager(CoreSysAttributes):
|
|||||||
|
|
||||||
# init hassio built-in repositories
|
# init hassio built-in repositories
|
||||||
repositories = \
|
repositories = \
|
||||||
set(self._config.addons_repositories) | BUILTIN_REPOSITORIES
|
set(self.sys_config.addons_repositories) | BUILTIN_REPOSITORIES
|
||||||
|
|
||||||
# init custom repositories & load addons
|
# init custom repositories & load addons
|
||||||
await self.load_repositories(repositories)
|
await self.load_repositories(repositories)
|
||||||
@ -90,7 +90,7 @@ class AddonManager(CoreSysAttributes):
|
|||||||
|
|
||||||
# don't add built-in repository to config
|
# don't add built-in repository to config
|
||||||
if url not in BUILTIN_REPOSITORIES:
|
if url not in BUILTIN_REPOSITORIES:
|
||||||
self._config.add_addon_repository(url)
|
self.sys_config.add_addon_repository(url)
|
||||||
|
|
||||||
tasks = [_add_repository(url) for url in new_rep - old_rep]
|
tasks = [_add_repository(url) for url in new_rep - old_rep]
|
||||||
if tasks:
|
if tasks:
|
||||||
@ -99,7 +99,7 @@ class AddonManager(CoreSysAttributes):
|
|||||||
# del new repository
|
# del new repository
|
||||||
for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
|
for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
|
||||||
self.repositories_obj.pop(url).remove()
|
self.repositories_obj.pop(url).remove()
|
||||||
self._config.drop_addon_repository(url)
|
self.sys_config.drop_addon_repository(url)
|
||||||
|
|
||||||
# update data
|
# update data
|
||||||
self.data.reload()
|
self.data.reload()
|
||||||
@ -142,4 +142,4 @@ class AddonManager(CoreSysAttributes):
|
|||||||
_LOGGER.info("Startup %s run %d addons", stage, len(tasks))
|
_LOGGER.info("Startup %s run %d addons", stage, len(tasks))
|
||||||
if tasks:
|
if tasks:
|
||||||
await asyncio.wait(tasks)
|
await asyncio.wait(tasks)
|
||||||
await asyncio.sleep(self._config.wait_boot)
|
await asyncio.sleep(self.sys_config.wait_boot)
|
||||||
|
@ -66,7 +66,7 @@ class Addon(CoreSysAttributes):
|
|||||||
@property
|
@property
|
||||||
def _data(self):
|
def _data(self):
|
||||||
"""Return addons data storage."""
|
"""Return addons data storage."""
|
||||||
return self._addons.data
|
return self.sys_addons.data
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_installed(self):
|
def is_installed(self):
|
||||||
@ -376,7 +376,7 @@ class Addon(CoreSysAttributes):
|
|||||||
if self.is_installed and \
|
if self.is_installed and \
|
||||||
ATTR_AUDIO_OUTPUT in self._data.user[self._id]:
|
ATTR_AUDIO_OUTPUT in self._data.user[self._id]:
|
||||||
return self._data.user[self._id][ATTR_AUDIO_OUTPUT]
|
return self._data.user[self._id][ATTR_AUDIO_OUTPUT]
|
||||||
return self._alsa.default.output
|
return self.sys_alsa.default.output
|
||||||
|
|
||||||
@audio_output.setter
|
@audio_output.setter
|
||||||
def audio_output(self, value):
|
def audio_output(self, value):
|
||||||
@ -394,7 +394,7 @@ class Addon(CoreSysAttributes):
|
|||||||
|
|
||||||
if self.is_installed and ATTR_AUDIO_INPUT in self._data.user[self._id]:
|
if self.is_installed and ATTR_AUDIO_INPUT in self._data.user[self._id]:
|
||||||
return self._data.user[self._id][ATTR_AUDIO_INPUT]
|
return self._data.user[self._id][ATTR_AUDIO_INPUT]
|
||||||
return self._alsa.default.input
|
return self.sys_alsa.default.input
|
||||||
|
|
||||||
@audio_input.setter
|
@audio_input.setter
|
||||||
def audio_input(self, value):
|
def audio_input(self, value):
|
||||||
@ -436,11 +436,11 @@ class Addon(CoreSysAttributes):
|
|||||||
|
|
||||||
# Repository with dockerhub images
|
# Repository with dockerhub images
|
||||||
if ATTR_IMAGE in addon_data:
|
if ATTR_IMAGE in addon_data:
|
||||||
return addon_data[ATTR_IMAGE].format(arch=self._arch)
|
return addon_data[ATTR_IMAGE].format(arch=self.sys_arch)
|
||||||
|
|
||||||
# local build
|
# local build
|
||||||
return "{}/{}-addon-{}".format(
|
return "{}/{}-addon-{}".format(
|
||||||
addon_data[ATTR_REPOSITORY], self._arch,
|
addon_data[ATTR_REPOSITORY], self.sys_arch,
|
||||||
addon_data[ATTR_SLUG])
|
addon_data[ATTR_SLUG])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -461,12 +461,12 @@ class Addon(CoreSysAttributes):
|
|||||||
@property
|
@property
|
||||||
def path_data(self):
|
def path_data(self):
|
||||||
"""Return addon data path inside supervisor."""
|
"""Return addon data path inside supervisor."""
|
||||||
return Path(self._config.path_addons_data, self._id)
|
return Path(self.sys_config.path_addons_data, self._id)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_extern_data(self):
|
def path_extern_data(self):
|
||||||
"""Return addon data path external for docker."""
|
"""Return addon data path external for docker."""
|
||||||
return PurePath(self._config.path_extern_addons_data, self._id)
|
return PurePath(self.sys_config.path_extern_addons_data, self._id)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_options(self):
|
def path_options(self):
|
||||||
@ -506,16 +506,16 @@ class Addon(CoreSysAttributes):
|
|||||||
@property
|
@property
|
||||||
def path_asound(self):
|
def path_asound(self):
|
||||||
"""Return path to asound config."""
|
"""Return path to asound config."""
|
||||||
return Path(self._config.path_tmp, f"{self.slug}_asound")
|
return Path(self.sys_config.path_tmp, f"{self.slug}_asound")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_extern_asound(self):
|
def path_extern_asound(self):
|
||||||
"""Return path to asound config for docker."""
|
"""Return path to asound config for docker."""
|
||||||
return Path(self._config.path_extern_tmp, f"{self.slug}_asound")
|
return Path(self.sys_config.path_extern_tmp, f"{self.slug}_asound")
|
||||||
|
|
||||||
def save_data(self):
|
def save_data(self):
|
||||||
"""Save data of addon."""
|
"""Save data of addon."""
|
||||||
self._addons.data.save_data()
|
self.sys_addons.data.save_data()
|
||||||
|
|
||||||
def write_options(self):
|
def write_options(self):
|
||||||
"""Return True if addon options is written to data."""
|
"""Return True if addon options is written to data."""
|
||||||
@ -537,7 +537,7 @@ class Addon(CoreSysAttributes):
|
|||||||
|
|
||||||
def write_asound(self):
|
def write_asound(self):
|
||||||
"""Write asound config to file and return True on success."""
|
"""Write asound config to file and return True on success."""
|
||||||
asound_config = self._alsa.asound(
|
asound_config = self.sys_alsa.asound(
|
||||||
alsa_input=self.audio_input, alsa_output=self.audio_output)
|
alsa_input=self.audio_input, alsa_output=self.audio_output)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -590,9 +590,9 @@ class Addon(CoreSysAttributes):
|
|||||||
|
|
||||||
async def install(self):
|
async def install(self):
|
||||||
"""Install a addon."""
|
"""Install a addon."""
|
||||||
if self._arch not in self.supported_arch:
|
if self.sys_arch not in self.supported_arch:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Addon %s not supported on %s", self._id, self._arch)
|
"Addon %s not supported on %s", self._id, self.sys_arch)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if self.is_installed:
|
if self.is_installed:
|
||||||
@ -735,7 +735,7 @@ class Addon(CoreSysAttributes):
|
|||||||
@check_installed
|
@check_installed
|
||||||
async def snapshot(self, tar_file):
|
async def snapshot(self, tar_file):
|
||||||
"""Snapshot a state of a addon."""
|
"""Snapshot a state of a addon."""
|
||||||
with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp:
|
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp:
|
||||||
# store local image
|
# store local image
|
||||||
if self.need_build and not await \
|
if self.need_build and not await \
|
||||||
self.instance.export_image(Path(temp, "image.tar")):
|
self.instance.export_image(Path(temp, "image.tar")):
|
||||||
@ -764,7 +764,7 @@ class Addon(CoreSysAttributes):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Build snapshot for addon %s", self._id)
|
_LOGGER.info("Build snapshot for addon %s", self._id)
|
||||||
await self._loop.run_in_executor(None, _write_tarfile)
|
await self.sys_run_in_executor(_write_tarfile)
|
||||||
except (tarfile.TarError, OSError) as err:
|
except (tarfile.TarError, OSError) as err:
|
||||||
_LOGGER.error("Can't write tarfile %s: %s", tar_file, err)
|
_LOGGER.error("Can't write tarfile %s: %s", tar_file, err)
|
||||||
return False
|
return False
|
||||||
@ -774,7 +774,7 @@ class Addon(CoreSysAttributes):
|
|||||||
|
|
||||||
async def restore(self, tar_file):
|
async def restore(self, tar_file):
|
||||||
"""Restore a state of a addon."""
|
"""Restore a state of a addon."""
|
||||||
with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp:
|
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp:
|
||||||
# extract snapshot
|
# extract snapshot
|
||||||
def _extract_tarfile():
|
def _extract_tarfile():
|
||||||
"""Extract tar snapshot."""
|
"""Extract tar snapshot."""
|
||||||
@ -782,7 +782,7 @@ class Addon(CoreSysAttributes):
|
|||||||
snapshot.extractall(path=Path(temp))
|
snapshot.extractall(path=Path(temp))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await self._loop.run_in_executor(None, _extract_tarfile)
|
await self.sys_run_in_executor(_extract_tarfile)
|
||||||
except tarfile.TarError as err:
|
except tarfile.TarError as err:
|
||||||
_LOGGER.error("Can't read tarfile %s: %s", tar_file, err)
|
_LOGGER.error("Can't read tarfile %s: %s", tar_file, err)
|
||||||
return False
|
return False
|
||||||
@ -828,7 +828,7 @@ class Addon(CoreSysAttributes):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Restore data for addon %s", self._id)
|
_LOGGER.info("Restore data for addon %s", self._id)
|
||||||
await self._loop.run_in_executor(None, _restore_data)
|
await self.sys_run_in_executor(_restore_data)
|
||||||
except shutil.Error as err:
|
except shutil.Error as err:
|
||||||
_LOGGER.error("Can't restore origin data: %s", err)
|
_LOGGER.error("Can't restore origin data: %s", err)
|
||||||
return False
|
return False
|
||||||
|
@ -25,13 +25,13 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
|||||||
@property
|
@property
|
||||||
def addon(self):
|
def addon(self):
|
||||||
"""Return addon of build data."""
|
"""Return addon of build data."""
|
||||||
return self._addons.get(self._id)
|
return self.sys_addons.get(self._id)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def base_image(self):
|
def base_image(self):
|
||||||
"""Base images for this addon."""
|
"""Base images for this addon."""
|
||||||
return self._data[ATTR_BUILD_FROM].get(
|
return self._data[ATTR_BUILD_FROM].get(
|
||||||
self._arch, BASE_IMAGE[self._arch])
|
self.sys_arch, BASE_IMAGE[self.sys_arch])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def squash(self):
|
def squash(self):
|
||||||
@ -53,7 +53,7 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
|||||||
'squash': self.squash,
|
'squash': self.squash,
|
||||||
'labels': {
|
'labels': {
|
||||||
'io.hass.version': version,
|
'io.hass.version': version,
|
||||||
'io.hass.arch': self._arch,
|
'io.hass.arch': self.sys_arch,
|
||||||
'io.hass.type': META_ADDON,
|
'io.hass.type': META_ADDON,
|
||||||
'io.hass.name': self._fix_label('name'),
|
'io.hass.name': self._fix_label('name'),
|
||||||
'io.hass.description': self._fix_label('description'),
|
'io.hass.description': self._fix_label('description'),
|
||||||
@ -61,7 +61,7 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
|||||||
'buildargs': {
|
'buildargs': {
|
||||||
'BUILD_FROM': self.base_image,
|
'BUILD_FROM': self.base_image,
|
||||||
'BUILD_VERSION': version,
|
'BUILD_VERSION': version,
|
||||||
'BUILD_ARCH': self._arch,
|
'BUILD_ARCH': self.sys_arch,
|
||||||
**self.additional_args,
|
**self.additional_args,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -56,17 +56,17 @@ class AddonsData(JsonConfig, CoreSysAttributes):
|
|||||||
|
|
||||||
# read core repository
|
# read core repository
|
||||||
self._read_addons_folder(
|
self._read_addons_folder(
|
||||||
self._config.path_addons_core, REPOSITORY_CORE)
|
self.sys_config.path_addons_core, REPOSITORY_CORE)
|
||||||
|
|
||||||
# read local repository
|
# read local repository
|
||||||
self._read_addons_folder(
|
self._read_addons_folder(
|
||||||
self._config.path_addons_local, REPOSITORY_LOCAL)
|
self.sys_config.path_addons_local, REPOSITORY_LOCAL)
|
||||||
|
|
||||||
# add built-in repositories information
|
# add built-in repositories information
|
||||||
self._set_builtin_repositories()
|
self._set_builtin_repositories()
|
||||||
|
|
||||||
# read custom git repositories
|
# read custom git repositories
|
||||||
for repository_element in self._config.path_addons_git.iterdir():
|
for repository_element in self.sys_config.path_addons_git.iterdir():
|
||||||
if repository_element.is_dir():
|
if repository_element.is_dir():
|
||||||
self._read_git_repository(repository_element)
|
self._read_git_repository(repository_element)
|
||||||
|
|
||||||
|
@ -45,7 +45,7 @@ class GitRepo(CoreSysAttributes):
|
|||||||
async with self.lock:
|
async with self.lock:
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Load addon %s repository", self.path)
|
_LOGGER.info("Load addon %s repository", self.path)
|
||||||
self.repo = await self._loop.run_in_executor(
|
self.repo = await self.sys_loop.run_in_executor(
|
||||||
None, git.Repo, str(self.path))
|
None, git.Repo, str(self.path))
|
||||||
|
|
||||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||||
@ -68,7 +68,7 @@ class GitRepo(CoreSysAttributes):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Clone addon %s repository", self.url)
|
_LOGGER.info("Clone addon %s repository", self.url)
|
||||||
self.repo = await self._loop.run_in_executor(None, ft.partial(
|
self.repo = await self.sys_run_in_executor(ft.partial(
|
||||||
git.Repo.clone_from, self.url, str(self.path),
|
git.Repo.clone_from, self.url, str(self.path),
|
||||||
**git_args
|
**git_args
|
||||||
))
|
))
|
||||||
@ -89,7 +89,7 @@ class GitRepo(CoreSysAttributes):
|
|||||||
async with self.lock:
|
async with self.lock:
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Pull addon %s repository", self.url)
|
_LOGGER.info("Pull addon %s repository", self.url)
|
||||||
await self._loop.run_in_executor(
|
await self.sys_loop.run_in_executor(
|
||||||
None, self.repo.remotes.origin.pull)
|
None, self.repo.remotes.origin.pull)
|
||||||
|
|
||||||
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
except (git.InvalidGitRepositoryError, git.NoSuchPathError,
|
||||||
|
@ -30,7 +30,7 @@ class Repository(CoreSysAttributes):
|
|||||||
@property
|
@property
|
||||||
def _mesh(self):
|
def _mesh(self):
|
||||||
"""Return data struct repository."""
|
"""Return data struct repository."""
|
||||||
return self._addons.data.repositories.get(self._id, {})
|
return self.sys_addons.data.repositories.get(self._id, {})
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def slug(self):
|
def slug(self):
|
||||||
|
@ -224,7 +224,7 @@ class RestAPI(CoreSysAttributes):
|
|||||||
self._handler = self.webapp.make_handler()
|
self._handler = self.webapp.make_handler()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.server = await self._loop.create_server(
|
self.server = await self.sys_loop.create_server(
|
||||||
self._handler, "0.0.0.0", "80")
|
self._handler, "0.0.0.0", "80")
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
_LOGGER.fatal(
|
_LOGGER.fatal(
|
||||||
|
@ -43,7 +43,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
|
|
||||||
def _extract_addon(self, request, check_installed=True):
|
def _extract_addon(self, request, check_installed=True):
|
||||||
"""Return addon and if not exists trow a exception."""
|
"""Return addon and if not exists trow a exception."""
|
||||||
addon = self._addons.get(request.match_info.get('addon'))
|
addon = self.sys_addons.get(request.match_info.get('addon'))
|
||||||
if not addon:
|
if not addon:
|
||||||
raise RuntimeError("Addon not exists")
|
raise RuntimeError("Addon not exists")
|
||||||
|
|
||||||
@ -64,7 +64,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
async def list(self, request):
|
async def list(self, request):
|
||||||
"""Return all addons / repositories ."""
|
"""Return all addons / repositories ."""
|
||||||
data_addons = []
|
data_addons = []
|
||||||
for addon in self._addons.list_addons:
|
for addon in self.sys_addons.list_addons:
|
||||||
data_addons.append({
|
data_addons.append({
|
||||||
ATTR_NAME: addon.name,
|
ATTR_NAME: addon.name,
|
||||||
ATTR_SLUG: addon.slug,
|
ATTR_SLUG: addon.slug,
|
||||||
@ -81,7 +81,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
})
|
})
|
||||||
|
|
||||||
data_repositories = []
|
data_repositories = []
|
||||||
for repository in self._addons.list_repositories:
|
for repository in self.sys_addons.list_repositories:
|
||||||
data_repositories.append({
|
data_repositories.append({
|
||||||
ATTR_SLUG: repository.slug,
|
ATTR_SLUG: repository.slug,
|
||||||
ATTR_NAME: repository.name,
|
ATTR_NAME: repository.name,
|
||||||
@ -98,7 +98,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def reload(self, request):
|
async def reload(self, request):
|
||||||
"""Reload all addons data."""
|
"""Reload all addons data."""
|
||||||
await asyncio.shield(self._addons.reload())
|
await asyncio.shield(self.sys_addons.reload())
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
|
@ -21,7 +21,7 @@ class APIDiscovery(CoreSysAttributes):
|
|||||||
|
|
||||||
def _extract_message(self, request):
|
def _extract_message(self, request):
|
||||||
"""Extract discovery message from URL."""
|
"""Extract discovery message from URL."""
|
||||||
message = self._services.discovery.get(request.match_info.get('uuid'))
|
message = self.sys_discovery.get(request.match_info.get('uuid'))
|
||||||
if not message:
|
if not message:
|
||||||
raise RuntimeError("Discovery message not found")
|
raise RuntimeError("Discovery message not found")
|
||||||
return message
|
return message
|
||||||
@ -30,7 +30,7 @@ class APIDiscovery(CoreSysAttributes):
|
|||||||
async def list(self, request):
|
async def list(self, request):
|
||||||
"""Show register services."""
|
"""Show register services."""
|
||||||
discovery = []
|
discovery = []
|
||||||
for message in self._services.discovery.list_messages:
|
for message in self.sys_discovery.list_messages:
|
||||||
discovery.append({
|
discovery.append({
|
||||||
ATTR_PROVIDER: message.provider,
|
ATTR_PROVIDER: message.provider,
|
||||||
ATTR_UUID: message.uuid,
|
ATTR_UUID: message.uuid,
|
||||||
@ -45,7 +45,7 @@ class APIDiscovery(CoreSysAttributes):
|
|||||||
async def set_discovery(self, request):
|
async def set_discovery(self, request):
|
||||||
"""Write data into a discovery pipeline."""
|
"""Write data into a discovery pipeline."""
|
||||||
body = await api_validate(SCHEMA_DISCOVERY, request)
|
body = await api_validate(SCHEMA_DISCOVERY, request)
|
||||||
message = self._services.discovery.send(
|
message = self.sys_discovery.send(
|
||||||
provider=request[REQUEST_FROM], **body)
|
provider=request[REQUEST_FROM], **body)
|
||||||
|
|
||||||
return {ATTR_UUID: message.uuid}
|
return {ATTR_UUID: message.uuid}
|
||||||
@ -68,5 +68,5 @@ class APIDiscovery(CoreSysAttributes):
|
|||||||
"""Delete data into a discovery message."""
|
"""Delete data into a discovery message."""
|
||||||
message = self._extract_message(request)
|
message = self._extract_message(request)
|
||||||
|
|
||||||
self._services.discovery.remove(message)
|
self.sys_discovery.remove(message)
|
||||||
return True
|
return True
|
||||||
|
@ -16,11 +16,11 @@ class APIHardware(CoreSysAttributes):
|
|||||||
async def info(self, request):
|
async def info(self, request):
|
||||||
"""Show hardware info."""
|
"""Show hardware info."""
|
||||||
return {
|
return {
|
||||||
ATTR_SERIAL: list(self._hardware.serial_devices),
|
ATTR_SERIAL: list(self.sys_hardware.serial_devices),
|
||||||
ATTR_INPUT: list(self._hardware.input_devices),
|
ATTR_INPUT: list(self.sys_hardware.input_devices),
|
||||||
ATTR_DISK: list(self._hardware.disk_devices),
|
ATTR_DISK: list(self.sys_hardware.disk_devices),
|
||||||
ATTR_GPIO: list(self._hardware.gpio_devices),
|
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
|
||||||
ATTR_AUDIO: self._hardware.audio_devices,
|
ATTR_AUDIO: self.sys_hardware.audio_devices,
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@ -28,7 +28,7 @@ class APIHardware(CoreSysAttributes):
|
|||||||
"""Show ALSA audio devices."""
|
"""Show ALSA audio devices."""
|
||||||
return {
|
return {
|
||||||
ATTR_AUDIO: {
|
ATTR_AUDIO: {
|
||||||
ATTR_INPUT: self._alsa.input_devices,
|
ATTR_INPUT: self.sys_alsa.input_devices,
|
||||||
ATTR_OUTPUT: self._alsa.output_devices,
|
ATTR_OUTPUT: self.sys_alsa.output_devices,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -43,15 +43,15 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
async def info(self, request):
|
async def info(self, request):
|
||||||
"""Return host information."""
|
"""Return host information."""
|
||||||
return {
|
return {
|
||||||
ATTR_VERSION: self._homeassistant.version,
|
ATTR_VERSION: self.sys_homeassistant.version,
|
||||||
ATTR_LAST_VERSION: self._homeassistant.last_version,
|
ATTR_LAST_VERSION: self.sys_homeassistant.last_version,
|
||||||
ATTR_IMAGE: self._homeassistant.image,
|
ATTR_IMAGE: self.sys_homeassistant.image,
|
||||||
ATTR_CUSTOM: self._homeassistant.is_custom_image,
|
ATTR_CUSTOM: self.sys_homeassistant.is_custom_image,
|
||||||
ATTR_BOOT: self._homeassistant.boot,
|
ATTR_BOOT: self.sys_homeassistant.boot,
|
||||||
ATTR_PORT: self._homeassistant.api_port,
|
ATTR_PORT: self.sys_homeassistant.api_port,
|
||||||
ATTR_SSL: self._homeassistant.api_ssl,
|
ATTR_SSL: self.sys_homeassistant.api_ssl,
|
||||||
ATTR_WATCHDOG: self._homeassistant.watchdog,
|
ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
|
||||||
ATTR_WAIT_BOOT: self._homeassistant.wait_boot,
|
ATTR_WAIT_BOOT: self.sys_homeassistant.wait_boot,
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@ -60,34 +60,34 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||||
|
|
||||||
if ATTR_IMAGE in body and ATTR_LAST_VERSION in body:
|
if ATTR_IMAGE in body and ATTR_LAST_VERSION in body:
|
||||||
self._homeassistant.image = body[ATTR_IMAGE]
|
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
||||||
self._homeassistant.last_version = body[ATTR_LAST_VERSION]
|
self.sys_homeassistant.last_version = body[ATTR_LAST_VERSION]
|
||||||
|
|
||||||
if ATTR_BOOT in body:
|
if ATTR_BOOT in body:
|
||||||
self._homeassistant.boot = body[ATTR_BOOT]
|
self.sys_homeassistant.boot = body[ATTR_BOOT]
|
||||||
|
|
||||||
if ATTR_PORT in body:
|
if ATTR_PORT in body:
|
||||||
self._homeassistant.api_port = body[ATTR_PORT]
|
self.sys_homeassistant.api_port = body[ATTR_PORT]
|
||||||
|
|
||||||
if ATTR_PASSWORD in body:
|
if ATTR_PASSWORD in body:
|
||||||
self._homeassistant.api_password = body[ATTR_PASSWORD]
|
self.sys_homeassistant.api_password = body[ATTR_PASSWORD]
|
||||||
|
|
||||||
if ATTR_SSL in body:
|
if ATTR_SSL in body:
|
||||||
self._homeassistant.api_ssl = body[ATTR_SSL]
|
self.sys_homeassistant.api_ssl = body[ATTR_SSL]
|
||||||
|
|
||||||
if ATTR_WATCHDOG in body:
|
if ATTR_WATCHDOG in body:
|
||||||
self._homeassistant.watchdog = body[ATTR_WATCHDOG]
|
self.sys_homeassistant.watchdog = body[ATTR_WATCHDOG]
|
||||||
|
|
||||||
if ATTR_WAIT_BOOT in body:
|
if ATTR_WAIT_BOOT in body:
|
||||||
self._homeassistant.wait_boot = body[ATTR_WAIT_BOOT]
|
self.sys_homeassistant.wait_boot = body[ATTR_WAIT_BOOT]
|
||||||
|
|
||||||
self._homeassistant.save_data()
|
self.sys_homeassistant.save_data()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def stats(self, request):
|
async def stats(self, request):
|
||||||
"""Return resource information."""
|
"""Return resource information."""
|
||||||
stats = await self._homeassistant.stats()
|
stats = await self.sys_homeassistant.stats()
|
||||||
if not stats:
|
if not stats:
|
||||||
raise RuntimeError("No stats available")
|
raise RuntimeError("No stats available")
|
||||||
|
|
||||||
@ -105,38 +105,38 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
async def update(self, request):
|
async def update(self, request):
|
||||||
"""Update homeassistant."""
|
"""Update homeassistant."""
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
body = await api_validate(SCHEMA_VERSION, request)
|
||||||
version = body.get(ATTR_VERSION, self._homeassistant.last_version)
|
version = body.get(ATTR_VERSION, self.sys_homeassistant.last_version)
|
||||||
|
|
||||||
if version == self._homeassistant.version:
|
if version == self.sys_homeassistant.version:
|
||||||
raise RuntimeError("Version {} is already in use".format(version))
|
raise RuntimeError("Version {} is already in use".format(version))
|
||||||
|
|
||||||
return await asyncio.shield(
|
return await asyncio.shield(
|
||||||
self._homeassistant.update(version))
|
self.sys_homeassistant.update(version))
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def stop(self, request):
|
def stop(self, request):
|
||||||
"""Stop homeassistant."""
|
"""Stop homeassistant."""
|
||||||
return asyncio.shield(self._homeassistant.stop())
|
return asyncio.shield(self.sys_homeassistant.stop())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def start(self, request):
|
def start(self, request):
|
||||||
"""Start homeassistant."""
|
"""Start homeassistant."""
|
||||||
return asyncio.shield(self._homeassistant.start())
|
return asyncio.shield(self.sys_homeassistant.start())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request):
|
def restart(self, request):
|
||||||
"""Restart homeassistant."""
|
"""Restart homeassistant."""
|
||||||
return asyncio.shield(self._homeassistant.restart())
|
return asyncio.shield(self.sys_homeassistant.restart())
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
def logs(self, request):
|
def logs(self, request):
|
||||||
"""Return homeassistant docker logs."""
|
"""Return homeassistant docker logs."""
|
||||||
return self._homeassistant.logs()
|
return self.sys_homeassistant.logs()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def check(self, request):
|
async def check(self, request):
|
||||||
"""Check config of homeassistant."""
|
"""Check config of homeassistant."""
|
||||||
result = await self._homeassistant.check_config()
|
result = await self.sys_homeassistant.check_config()
|
||||||
if not result.valid:
|
if not result.valid:
|
||||||
raise RuntimeError(result.log)
|
raise RuntimeError(result.log)
|
||||||
|
|
||||||
|
@ -20,7 +20,7 @@ class APIProxy(CoreSysAttributes):
|
|||||||
def _check_access(self, request):
|
def _check_access(self, request):
|
||||||
"""Check the Hass.io token."""
|
"""Check the Hass.io token."""
|
||||||
hassio_token = request.headers.get(HEADER_HA_ACCESS)
|
hassio_token = request.headers.get(HEADER_HA_ACCESS)
|
||||||
addon = self._addons.from_uuid(hassio_token)
|
addon = self.sys_addons.from_uuid(hassio_token)
|
||||||
|
|
||||||
if not addon:
|
if not addon:
|
||||||
_LOGGER.warning("Unknown Home-Assistant API access!")
|
_LOGGER.warning("Unknown Home-Assistant API access!")
|
||||||
@ -29,7 +29,7 @@ class APIProxy(CoreSysAttributes):
|
|||||||
|
|
||||||
async def _api_client(self, request, path, timeout=300):
|
async def _api_client(self, request, path, timeout=300):
|
||||||
"""Return a client request with proxy origin for Home-Assistant."""
|
"""Return a client request with proxy origin for Home-Assistant."""
|
||||||
url = f"{self._homeassistant.api_url}/api/{path}"
|
url = f"{self.sys_homeassistant.api_url}/api/{path}"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = None
|
data = None
|
||||||
@ -45,8 +45,10 @@ class APIProxy(CoreSysAttributes):
|
|||||||
headers.update({CONTENT_TYPE: request.content_type})
|
headers.update({CONTENT_TYPE: request.content_type})
|
||||||
|
|
||||||
# need api password?
|
# need api password?
|
||||||
if self._homeassistant.api_password:
|
if self.sys_homeassistant.api_password:
|
||||||
headers = {HEADER_HA_ACCESS: self._homeassistant.api_password}
|
headers = {
|
||||||
|
HEADER_HA_ACCESS: self.sys_homeassistant.api_password,
|
||||||
|
}
|
||||||
|
|
||||||
# reset headers
|
# reset headers
|
||||||
if not headers:
|
if not headers:
|
||||||
@ -114,10 +116,10 @@ class APIProxy(CoreSysAttributes):
|
|||||||
|
|
||||||
async def _websocket_client(self):
|
async def _websocket_client(self):
|
||||||
"""Initialize a websocket api connection."""
|
"""Initialize a websocket api connection."""
|
||||||
url = f"{self._homeassistant.api_url}/api/websocket"
|
url = f"{self.sys_homeassistant.api_url}/api/websocket"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
client = await self._websession_ssl.ws_connect(
|
client = await self.sys_websession_ssl.ws_connect(
|
||||||
url, heartbeat=60, verify_ssl=False)
|
url, heartbeat=60, verify_ssl=False)
|
||||||
|
|
||||||
# handle authentication
|
# handle authentication
|
||||||
@ -128,7 +130,7 @@ class APIProxy(CoreSysAttributes):
|
|||||||
elif data.get('type') == 'auth_required':
|
elif data.get('type') == 'auth_required':
|
||||||
await client.send_json({
|
await client.send_json({
|
||||||
'type': 'auth',
|
'type': 'auth',
|
||||||
'api_password': self._homeassistant.api_password,
|
'api_password': self.sys_homeassistant.api_password,
|
||||||
})
|
})
|
||||||
|
|
||||||
_LOGGER.error("Authentication to Home-Assistant websocket")
|
_LOGGER.error("Authentication to Home-Assistant websocket")
|
||||||
@ -150,13 +152,13 @@ class APIProxy(CoreSysAttributes):
|
|||||||
try:
|
try:
|
||||||
await server.send_json({
|
await server.send_json({
|
||||||
'type': 'auth_required',
|
'type': 'auth_required',
|
||||||
'ha_version': self._homeassistant.version,
|
'ha_version': self.sys_homeassistant.version,
|
||||||
})
|
})
|
||||||
|
|
||||||
# Check API access
|
# Check API access
|
||||||
response = await server.receive_json()
|
response = await server.receive_json()
|
||||||
hassio_token = response.get('api_password')
|
hassio_token = response.get('api_password')
|
||||||
addon = self._addons.from_uuid(hassio_token)
|
addon = self.sys_addons.from_uuid(hassio_token)
|
||||||
|
|
||||||
if not addon:
|
if not addon:
|
||||||
_LOGGER.warning("Unauthorized websocket access!")
|
_LOGGER.warning("Unauthorized websocket access!")
|
||||||
@ -165,7 +167,7 @@ class APIProxy(CoreSysAttributes):
|
|||||||
|
|
||||||
await server.send_json({
|
await server.send_json({
|
||||||
'type': 'auth_ok',
|
'type': 'auth_ok',
|
||||||
'ha_version': self._homeassistant.version,
|
'ha_version': self.sys_homeassistant.version,
|
||||||
})
|
})
|
||||||
except (RuntimeError, ValueError) as err:
|
except (RuntimeError, ValueError) as err:
|
||||||
_LOGGER.error("Can't initialize handshake: %s", err)
|
_LOGGER.error("Can't initialize handshake: %s", err)
|
||||||
@ -180,10 +182,10 @@ class APIProxy(CoreSysAttributes):
|
|||||||
server_read = None
|
server_read = None
|
||||||
while not server.closed and not client.closed:
|
while not server.closed and not client.closed:
|
||||||
if not client_read:
|
if not client_read:
|
||||||
client_read = asyncio.ensure_future(
|
client_read = self.sys_create_task(
|
||||||
client.receive_str())
|
client.receive_str())
|
||||||
if not server_read:
|
if not server_read:
|
||||||
server_read = asyncio.ensure_future(
|
server_read = self.sys_create_task(
|
||||||
server.receive_str())
|
server.receive_str())
|
||||||
|
|
||||||
# wait until data need to be processed
|
# wait until data need to be processed
|
||||||
|
@ -42,13 +42,13 @@ class SecurityMiddleware(CoreSysAttributes):
|
|||||||
return await handler(request)
|
return await handler(request)
|
||||||
|
|
||||||
# Home-Assistant
|
# Home-Assistant
|
||||||
if hassio_token == self._homeassistant.uuid:
|
if hassio_token == self.sys_homeassistant.uuid:
|
||||||
_LOGGER.debug("%s access from Home-Assistant", request.path)
|
_LOGGER.debug("%s access from Home-Assistant", request.path)
|
||||||
request[REQUEST_FROM] = 'homeassistant'
|
request[REQUEST_FROM] = 'homeassistant'
|
||||||
return await handler(request)
|
return await handler(request)
|
||||||
|
|
||||||
# Add-on
|
# Add-on
|
||||||
addon = self._addons.from_uuid(hassio_token)
|
addon = self.sys_addons.from_uuid(hassio_token)
|
||||||
if addon:
|
if addon:
|
||||||
_LOGGER.info("%s access from %s", request.path, addon.slug)
|
_LOGGER.info("%s access from %s", request.path, addon.slug)
|
||||||
request[REQUEST_FROM] = addon.slug
|
request[REQUEST_FROM] = addon.slug
|
||||||
|
@ -11,7 +11,7 @@ class APIServices(CoreSysAttributes):
|
|||||||
|
|
||||||
def _extract_service(self, request):
|
def _extract_service(self, request):
|
||||||
"""Return service and if not exists trow a exception."""
|
"""Return service and if not exists trow a exception."""
|
||||||
service = self._services.get(request.match_info.get('service'))
|
service = self.sys_services.get(request.match_info.get('service'))
|
||||||
if not service:
|
if not service:
|
||||||
raise RuntimeError("Service not exists")
|
raise RuntimeError("Service not exists")
|
||||||
|
|
||||||
@ -21,7 +21,7 @@ class APIServices(CoreSysAttributes):
|
|||||||
async def list(self, request):
|
async def list(self, request):
|
||||||
"""Show register services."""
|
"""Show register services."""
|
||||||
services = []
|
services = []
|
||||||
for service in self._services.list_services:
|
for service in self.sys_services.list_services:
|
||||||
services.append({
|
services.append({
|
||||||
ATTR_SLUG: service.slug,
|
ATTR_SLUG: service.slug,
|
||||||
ATTR_AVAILABLE: service.enabled,
|
ATTR_AVAILABLE: service.enabled,
|
||||||
|
@ -50,7 +50,7 @@ class APISnapshots(CoreSysAttributes):
|
|||||||
|
|
||||||
def _extract_snapshot(self, request):
|
def _extract_snapshot(self, request):
|
||||||
"""Return addon and if not exists trow a exception."""
|
"""Return addon and if not exists trow a exception."""
|
||||||
snapshot = self._snapshots.get(request.match_info.get('snapshot'))
|
snapshot = self.sys_snapshots.get(request.match_info.get('snapshot'))
|
||||||
if not snapshot:
|
if not snapshot:
|
||||||
raise RuntimeError("Snapshot not exists")
|
raise RuntimeError("Snapshot not exists")
|
||||||
return snapshot
|
return snapshot
|
||||||
@ -59,7 +59,7 @@ class APISnapshots(CoreSysAttributes):
|
|||||||
async def list(self, request):
|
async def list(self, request):
|
||||||
"""Return snapshot list."""
|
"""Return snapshot list."""
|
||||||
data_snapshots = []
|
data_snapshots = []
|
||||||
for snapshot in self._snapshots.list_snapshots:
|
for snapshot in self.sys_snapshots.list_snapshots:
|
||||||
data_snapshots.append({
|
data_snapshots.append({
|
||||||
ATTR_SLUG: snapshot.slug,
|
ATTR_SLUG: snapshot.slug,
|
||||||
ATTR_NAME: snapshot.name,
|
ATTR_NAME: snapshot.name,
|
||||||
@ -75,7 +75,7 @@ class APISnapshots(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def reload(self, request):
|
async def reload(self, request):
|
||||||
"""Reload snapshot list."""
|
"""Reload snapshot list."""
|
||||||
await asyncio.shield(self._snapshots.reload())
|
await asyncio.shield(self.sys_snapshots.reload())
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@ -110,7 +110,7 @@ class APISnapshots(CoreSysAttributes):
|
|||||||
"""Full-Snapshot a snapshot."""
|
"""Full-Snapshot a snapshot."""
|
||||||
body = await api_validate(SCHEMA_SNAPSHOT_FULL, request)
|
body = await api_validate(SCHEMA_SNAPSHOT_FULL, request)
|
||||||
snapshot = await asyncio.shield(
|
snapshot = await asyncio.shield(
|
||||||
self._snapshots.do_snapshot_full(**body))
|
self.sys_snapshots.do_snapshot_full(**body))
|
||||||
|
|
||||||
if snapshot:
|
if snapshot:
|
||||||
return {ATTR_SLUG: snapshot.slug}
|
return {ATTR_SLUG: snapshot.slug}
|
||||||
@ -121,7 +121,7 @@ class APISnapshots(CoreSysAttributes):
|
|||||||
"""Partial-Snapshot a snapshot."""
|
"""Partial-Snapshot a snapshot."""
|
||||||
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request)
|
||||||
snapshot = await asyncio.shield(
|
snapshot = await asyncio.shield(
|
||||||
self._snapshots.do_snapshot_partial(**body))
|
self.sys_snapshots.do_snapshot_partial(**body))
|
||||||
|
|
||||||
if snapshot:
|
if snapshot:
|
||||||
return {ATTR_SLUG: snapshot.slug}
|
return {ATTR_SLUG: snapshot.slug}
|
||||||
@ -134,7 +134,7 @@ class APISnapshots(CoreSysAttributes):
|
|||||||
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
||||||
|
|
||||||
return await asyncio.shield(
|
return await asyncio.shield(
|
||||||
self._snapshots.do_restore_full(snapshot, **body))
|
self.sys_snapshots.do_restore_full(snapshot, **body))
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restore_partial(self, request):
|
async def restore_partial(self, request):
|
||||||
@ -143,13 +143,13 @@ class APISnapshots(CoreSysAttributes):
|
|||||||
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
||||||
|
|
||||||
return await asyncio.shield(
|
return await asyncio.shield(
|
||||||
self._snapshots.do_restore_partial(snapshot, **body))
|
self.sys_snapshots.do_restore_partial(snapshot, **body))
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def remove(self, request):
|
async def remove(self, request):
|
||||||
"""Remove a snapshot."""
|
"""Remove a snapshot."""
|
||||||
snapshot = self._extract_snapshot(request)
|
snapshot = self._extract_snapshot(request)
|
||||||
return self._snapshots.remove(snapshot)
|
return self.sys_snapshots.remove(snapshot)
|
||||||
|
|
||||||
async def download(self, request):
|
async def download(self, request):
|
||||||
"""Download a snapshot file."""
|
"""Download a snapshot file."""
|
||||||
@ -163,7 +163,7 @@ class APISnapshots(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def upload(self, request):
|
async def upload(self, request):
|
||||||
"""Upload a snapshot file."""
|
"""Upload a snapshot file."""
|
||||||
with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp_dir:
|
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir:
|
||||||
tar_file = Path(temp_dir, f"snapshot.tar")
|
tar_file = Path(temp_dir, f"snapshot.tar")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -179,7 +179,7 @@ class APISnapshots(CoreSysAttributes):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
snapshot = await asyncio.shield(
|
snapshot = await asyncio.shield(
|
||||||
self._snapshots.import_snapshot(tar_file))
|
self.sys_snapshots.import_snapshot(tar_file))
|
||||||
|
|
||||||
if snapshot:
|
if snapshot:
|
||||||
return {ATTR_SLUG: snapshot.slug}
|
return {ATTR_SLUG: snapshot.slug}
|
||||||
|
@ -41,7 +41,7 @@ class APISupervisor(CoreSysAttributes):
|
|||||||
async def info(self, request):
|
async def info(self, request):
|
||||||
"""Return host information."""
|
"""Return host information."""
|
||||||
list_addons = []
|
list_addons = []
|
||||||
for addon in self._addons.list_addons:
|
for addon in self.sys_addons.list_addons:
|
||||||
if addon.is_installed:
|
if addon.is_installed:
|
||||||
list_addons.append({
|
list_addons.append({
|
||||||
ATTR_NAME: addon.name,
|
ATTR_NAME: addon.name,
|
||||||
@ -57,13 +57,13 @@ class APISupervisor(CoreSysAttributes):
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
ATTR_VERSION: HASSIO_VERSION,
|
ATTR_VERSION: HASSIO_VERSION,
|
||||||
ATTR_LAST_VERSION: self._updater.version_hassio,
|
ATTR_LAST_VERSION: self.sys_updater.version_hassio,
|
||||||
ATTR_CHANNEL: self._updater.channel,
|
ATTR_CHANNEL: self.sys_updater.channel,
|
||||||
ATTR_ARCH: self._arch,
|
ATTR_ARCH: self.sys_arch,
|
||||||
ATTR_WAIT_BOOT: self._config.wait_boot,
|
ATTR_WAIT_BOOT: self.sys_config.wait_boot,
|
||||||
ATTR_TIMEZONE: self._config.timezone,
|
ATTR_TIMEZONE: self.sys_config.timezone,
|
||||||
ATTR_ADDONS: list_addons,
|
ATTR_ADDONS: list_addons,
|
||||||
ATTR_ADDONS_REPOSITORIES: self._config.addons_repositories,
|
ATTR_ADDONS_REPOSITORIES: self.sys_config.addons_repositories,
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@ -72,26 +72,26 @@ class APISupervisor(CoreSysAttributes):
|
|||||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||||
|
|
||||||
if ATTR_CHANNEL in body:
|
if ATTR_CHANNEL in body:
|
||||||
self._updater.channel = body[ATTR_CHANNEL]
|
self.sys_updater.channel = body[ATTR_CHANNEL]
|
||||||
|
|
||||||
if ATTR_TIMEZONE in body:
|
if ATTR_TIMEZONE in body:
|
||||||
self._config.timezone = body[ATTR_TIMEZONE]
|
self.sys_config.timezone = body[ATTR_TIMEZONE]
|
||||||
|
|
||||||
if ATTR_WAIT_BOOT in body:
|
if ATTR_WAIT_BOOT in body:
|
||||||
self._config.wait_boot = body[ATTR_WAIT_BOOT]
|
self.sys_config.wait_boot = body[ATTR_WAIT_BOOT]
|
||||||
|
|
||||||
if ATTR_ADDONS_REPOSITORIES in body:
|
if ATTR_ADDONS_REPOSITORIES in body:
|
||||||
new = set(body[ATTR_ADDONS_REPOSITORIES])
|
new = set(body[ATTR_ADDONS_REPOSITORIES])
|
||||||
await asyncio.shield(self._addons.load_repositories(new))
|
await asyncio.shield(self.sys_addons.load_repositories(new))
|
||||||
|
|
||||||
self._updater.save_data()
|
self.sys_updater.save_data()
|
||||||
self._config.save_data()
|
self.sys_config.save_data()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def stats(self, request):
|
async def stats(self, request):
|
||||||
"""Return resource information."""
|
"""Return resource information."""
|
||||||
stats = await self._supervisor.stats()
|
stats = await self.sys_supervisor.stats()
|
||||||
if not stats:
|
if not stats:
|
||||||
raise RuntimeError("No stats available")
|
raise RuntimeError("No stats available")
|
||||||
|
|
||||||
@ -109,19 +109,19 @@ class APISupervisor(CoreSysAttributes):
|
|||||||
async def update(self, request):
|
async def update(self, request):
|
||||||
"""Update supervisor OS."""
|
"""Update supervisor OS."""
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
body = await api_validate(SCHEMA_VERSION, request)
|
||||||
version = body.get(ATTR_VERSION, self._updater.version_hassio)
|
version = body.get(ATTR_VERSION, self.sys_updater.version_hassio)
|
||||||
|
|
||||||
if version == self._supervisor.version:
|
if version == self.sys_supervisor.version:
|
||||||
raise RuntimeError("Version {} is already in use".format(version))
|
raise RuntimeError("Version {} is already in use".format(version))
|
||||||
|
|
||||||
return await asyncio.shield(
|
return await asyncio.shield(
|
||||||
self._supervisor.update(version))
|
self.sys_supervisor.update(version))
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def reload(self, request):
|
async def reload(self, request):
|
||||||
"""Reload addons, config ect."""
|
"""Reload addons, config ect."""
|
||||||
tasks = [
|
tasks = [
|
||||||
self._updater.reload(),
|
self.sys_updater.reload(),
|
||||||
]
|
]
|
||||||
results, _ = await asyncio.shield(
|
results, _ = await asyncio.shield(
|
||||||
asyncio.wait(tasks))
|
asyncio.wait(tasks))
|
||||||
@ -135,4 +135,4 @@ class APISupervisor(CoreSysAttributes):
|
|||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||||
def logs(self, request):
|
def logs(self, request):
|
||||||
"""Return supervisor docker logs."""
|
"""Return supervisor docker logs."""
|
||||||
return self._supervisor.logs()
|
return self.sys_supervisor.logs()
|
||||||
|
@ -17,6 +17,7 @@ from .snapshots import SnapshotManager
|
|||||||
from .tasks import Tasks
|
from .tasks import Tasks
|
||||||
from .updater import Updater
|
from .updater import Updater
|
||||||
from .services import ServiceManager
|
from .services import ServiceManager
|
||||||
|
from .services.discovery import Discovery
|
||||||
from .host import AlsaAudio
|
from .host import AlsaAudio
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@ -36,6 +37,7 @@ def initialize_coresys(loop):
|
|||||||
coresys.snapshots = SnapshotManager(coresys)
|
coresys.snapshots = SnapshotManager(coresys)
|
||||||
coresys.tasks = Tasks(coresys)
|
coresys.tasks = Tasks(coresys)
|
||||||
coresys.services = ServiceManager(coresys)
|
coresys.services = ServiceManager(coresys)
|
||||||
|
coresys.discovery = Discovery(coresys)
|
||||||
|
|
||||||
# bootstrap config
|
# bootstrap config
|
||||||
initialize_system_data(coresys)
|
initialize_system_data(coresys)
|
||||||
|
@ -20,98 +20,98 @@ class HassIO(CoreSysAttributes):
|
|||||||
async def setup(self):
|
async def setup(self):
|
||||||
"""Setup HassIO orchestration."""
|
"""Setup HassIO orchestration."""
|
||||||
# update timezone
|
# update timezone
|
||||||
if self._config.timezone == 'UTC':
|
if self.sys_config.timezone == 'UTC':
|
||||||
self._config.timezone = await fetch_timezone(self._websession)
|
self.sys_config.timezone = await fetch_timezone(self._websession)
|
||||||
|
|
||||||
# supervisor
|
# supervisor
|
||||||
await self._supervisor.load()
|
await self.sys_supervisor.load()
|
||||||
|
|
||||||
# hostcontrol
|
# hostcontrol
|
||||||
await self._host_control.load()
|
await self._host_control.load()
|
||||||
|
|
||||||
# Load homeassistant
|
# Load homeassistant
|
||||||
await self._homeassistant.load()
|
await self.sys_homeassistant.load()
|
||||||
|
|
||||||
# Load addons
|
# Load addons
|
||||||
await self._addons.load()
|
await self.sys_addons.load()
|
||||||
|
|
||||||
# rest api views
|
# rest api views
|
||||||
await self._api.load()
|
await self.sys_api.load()
|
||||||
|
|
||||||
# load last available data
|
# load last available data
|
||||||
await self._updater.load()
|
await self.sys_updater.load()
|
||||||
|
|
||||||
# load last available data
|
# load last available data
|
||||||
await self._snapshots.load()
|
await self.sys_snapshots.load()
|
||||||
|
|
||||||
# load services
|
# load services
|
||||||
await self._services.load()
|
await self.sys_services.load()
|
||||||
|
|
||||||
# start dns forwarding
|
# start dns forwarding
|
||||||
self._loop.create_task(self._dns.start())
|
self.sys_create_task(self.sys_dns.start())
|
||||||
|
|
||||||
# start addon mark as initialize
|
# start addon mark as initialize
|
||||||
await self._addons.auto_boot(STARTUP_INITIALIZE)
|
await self.sys_addons.auto_boot(STARTUP_INITIALIZE)
|
||||||
|
|
||||||
async def start(self):
|
async def start(self):
|
||||||
"""Start HassIO orchestration."""
|
"""Start HassIO orchestration."""
|
||||||
# on release channel, try update itself
|
# on release channel, try update itself
|
||||||
# on dev mode, only read new versions
|
# on dev mode, only read new versions
|
||||||
if not self._dev and self._supervisor.need_update:
|
if not self.sys_dev and self.sys_supervisor.need_update:
|
||||||
if await self._supervisor.update():
|
if await self.sys_supervisor.update():
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
_LOGGER.info("Ignore Hass.io auto updates on dev channel")
|
_LOGGER.info("Ignore Hass.io auto updates on dev channel")
|
||||||
|
|
||||||
# start api
|
# start api
|
||||||
await self._api.start()
|
await self.sys_api.start()
|
||||||
_LOGGER.info("Start API on %s", self._docker.network.supervisor)
|
_LOGGER.info("Start API on %s", self.sys_docker.network.supervisor)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# HomeAssistant is already running / supervisor have only reboot
|
# HomeAssistant is already running / supervisor have only reboot
|
||||||
if self._hardware.last_boot == self._config.last_boot:
|
if self.sys_hardware.last_boot == self.sys_config.last_boot:
|
||||||
_LOGGER.info("Hass.io reboot detected")
|
_LOGGER.info("Hass.io reboot detected")
|
||||||
return
|
return
|
||||||
|
|
||||||
# reset register services / discovery
|
# reset register services / discovery
|
||||||
self._services.reset()
|
self.sys_services.reset()
|
||||||
|
|
||||||
# start addon mark as system
|
# start addon mark as system
|
||||||
await self._addons.auto_boot(STARTUP_SYSTEM)
|
await self.sys_addons.auto_boot(STARTUP_SYSTEM)
|
||||||
|
|
||||||
# start addon mark as services
|
# start addon mark as services
|
||||||
await self._addons.auto_boot(STARTUP_SERVICES)
|
await self.sys_addons.auto_boot(STARTUP_SERVICES)
|
||||||
|
|
||||||
# run HomeAssistant
|
# run HomeAssistant
|
||||||
if self._homeassistant.boot:
|
if self.sys_homeassistant.boot:
|
||||||
await self._homeassistant.start()
|
await self.sys_homeassistant.start()
|
||||||
|
|
||||||
# start addon mark as application
|
# start addon mark as application
|
||||||
await self._addons.auto_boot(STARTUP_APPLICATION)
|
await self.sys_addons.auto_boot(STARTUP_APPLICATION)
|
||||||
|
|
||||||
# store new last boot
|
# store new last boot
|
||||||
self._config.last_boot = self._hardware.last_boot
|
self.sys_config.last_boot = self.sys_hardware.last_boot
|
||||||
self._config.save_data()
|
self.sys_config.save_data()
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
# Add core tasks into scheduler
|
# Add core tasks into scheduler
|
||||||
await self._tasks.load()
|
await self.sys_tasks.load()
|
||||||
|
|
||||||
# If landingpage / run upgrade in background
|
# If landingpage / run upgrade in background
|
||||||
if self._homeassistant.version == 'landingpage':
|
if self.sys_homeassistant.version == 'landingpage':
|
||||||
self._loop.create_task(self._homeassistant.install())
|
self.sys_create_task(self.sys_homeassistant.install())
|
||||||
|
|
||||||
_LOGGER.info("Hass.io is up and running")
|
_LOGGER.info("Hass.io is up and running")
|
||||||
|
|
||||||
async def stop(self):
|
async def stop(self):
|
||||||
"""Stop a running orchestration."""
|
"""Stop a running orchestration."""
|
||||||
# don't process scheduler anymore
|
# don't process scheduler anymore
|
||||||
self._scheduler.suspend = True
|
self.sys_scheduler.suspend = True
|
||||||
|
|
||||||
# process async stop tasks
|
# process async stop tasks
|
||||||
await asyncio.wait([
|
await asyncio.wait([
|
||||||
self._api.stop(),
|
self.sys_api.stop(),
|
||||||
self._dns.stop(),
|
self.sys_dns.stop(),
|
||||||
self._websession.close(),
|
self.sys_websession.close(),
|
||||||
self._websession_ssl.close()
|
self.sys_websession_ssl.close()
|
||||||
])
|
])
|
||||||
|
@ -7,8 +7,8 @@ from .config import CoreConfig
|
|||||||
from .docker import DockerAPI
|
from .docker import DockerAPI
|
||||||
from .misc.dns import DNSForward
|
from .misc.dns import DNSForward
|
||||||
from .misc.hardware import Hardware
|
from .misc.hardware import Hardware
|
||||||
from .misc.host_control import HostControl
|
|
||||||
from .misc.scheduler import Scheduler
|
from .misc.scheduler import Scheduler
|
||||||
|
from .misc.systemd import Systemd
|
||||||
|
|
||||||
|
|
||||||
class CoreSys(object):
|
class CoreSys(object):
|
||||||
@ -29,9 +29,9 @@ class CoreSys(object):
|
|||||||
self._config = CoreConfig()
|
self._config = CoreConfig()
|
||||||
self._hardware = Hardware()
|
self._hardware = Hardware()
|
||||||
self._docker = DockerAPI()
|
self._docker = DockerAPI()
|
||||||
|
self._systemd = Systemd()
|
||||||
self._scheduler = Scheduler(loop=loop)
|
self._scheduler = Scheduler(loop=loop)
|
||||||
self._dns = DNSForward(loop=loop)
|
self._dns = DNSForward(loop=loop)
|
||||||
self._host_control = HostControl(loop=loop)
|
|
||||||
|
|
||||||
# Internal objects pointers
|
# Internal objects pointers
|
||||||
self._homeassistant = None
|
self._homeassistant = None
|
||||||
@ -42,6 +42,7 @@ class CoreSys(object):
|
|||||||
self._snapshots = None
|
self._snapshots = None
|
||||||
self._tasks = None
|
self._tasks = None
|
||||||
self._services = None
|
self._services = None
|
||||||
|
self._discovery = None
|
||||||
self._alsa = None
|
self._alsa = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -104,9 +105,9 @@ class CoreSys(object):
|
|||||||
return self._dns
|
return self._dns
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def host_control(self):
|
def systemd(self):
|
||||||
"""Return HostControl object."""
|
"""Return systemd object."""
|
||||||
return self._host_control
|
return self._systemd
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def homeassistant(self):
|
def homeassistant(self):
|
||||||
@ -204,6 +205,18 @@ class CoreSys(object):
|
|||||||
raise RuntimeError("Services already set!")
|
raise RuntimeError("Services already set!")
|
||||||
self._services = value
|
self._services = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def discovery(self):
|
||||||
|
"""Return ServiceManager object."""
|
||||||
|
return self._discovery
|
||||||
|
|
||||||
|
@discovery.setter
|
||||||
|
def discovery(self, value):
|
||||||
|
"""Set a Discovery object."""
|
||||||
|
if self._discovery:
|
||||||
|
raise RuntimeError("Discovery already set!")
|
||||||
|
self._discovery = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def alsa(self):
|
def alsa(self):
|
||||||
"""Return ALSA Audio object."""
|
"""Return ALSA Audio object."""
|
||||||
@ -216,6 +229,14 @@ class CoreSys(object):
|
|||||||
raise RuntimeError("ALSA already set!")
|
raise RuntimeError("ALSA already set!")
|
||||||
self._alsa = value
|
self._alsa = value
|
||||||
|
|
||||||
|
async def run_in_executor(self, funct, *args):
|
||||||
|
"""Wrapper for executor pool."""
|
||||||
|
return self._loop.run_in_executor(None, funct, *args)
|
||||||
|
|
||||||
|
async def create_task(self, coroutine):
|
||||||
|
"""Wrapper for async task."""
|
||||||
|
return self._loop.create_task(coroutine)
|
||||||
|
|
||||||
|
|
||||||
class CoreSysAttributes(object):
|
class CoreSysAttributes(object):
|
||||||
"""Inheret basic CoreSysAttributes."""
|
"""Inheret basic CoreSysAttributes."""
|
||||||
@ -224,6 +245,6 @@ class CoreSysAttributes(object):
|
|||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
"""Mapping to coresys."""
|
"""Mapping to coresys."""
|
||||||
if hasattr(self.coresys, name[1:]):
|
if name.startswith("_sys_") and hasattr(self.coresys, name[5:]):
|
||||||
return getattr(self.coresys, name[1:])
|
return getattr(self.coresys, name[5:])
|
||||||
raise AttributeError(f"Can't find {name} on {self.__class__}")
|
raise AttributeError()
|
||||||
|
@ -28,7 +28,7 @@ class DockerAddon(DockerInterface):
|
|||||||
@property
|
@property
|
||||||
def addon(self):
|
def addon(self):
|
||||||
"""Return addon of docker image."""
|
"""Return addon of docker image."""
|
||||||
return self._addons.get(self._id)
|
return self.sys_addons.get(self._id)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def image(self):
|
def image(self):
|
||||||
@ -52,7 +52,7 @@ class DockerAddon(DockerInterface):
|
|||||||
"""Return arch of docker image."""
|
"""Return arch of docker image."""
|
||||||
if not self.addon.legacy:
|
if not self.addon.legacy:
|
||||||
return super().arch
|
return super().arch
|
||||||
return self._arch
|
return self.sys_arch
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
@ -85,7 +85,7 @@ class DockerAddon(DockerInterface):
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
**addon_env,
|
**addon_env,
|
||||||
ENV_TIME: self._config.timezone,
|
ENV_TIME: self.sys_config.timezone,
|
||||||
ENV_TOKEN: self.addon.uuid,
|
ENV_TOKEN: self.addon.uuid,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -100,7 +100,7 @@ class DockerAddon(DockerInterface):
|
|||||||
|
|
||||||
# Auto mapping UART devices
|
# Auto mapping UART devices
|
||||||
if self.addon.auto_uart:
|
if self.addon.auto_uart:
|
||||||
for device in self._hardware.serial_devices:
|
for device in self.sys_hardware.serial_devices:
|
||||||
devices.append(f"{device}:{device}:rwm")
|
devices.append(f"{device}:{device}:rwm")
|
||||||
|
|
||||||
# Return None if no devices is present
|
# Return None if no devices is present
|
||||||
@ -149,8 +149,8 @@ class DockerAddon(DockerInterface):
|
|||||||
def network_mapping(self):
|
def network_mapping(self):
|
||||||
"""Return hosts mapping."""
|
"""Return hosts mapping."""
|
||||||
return {
|
return {
|
||||||
'homeassistant': self._docker.network.gateway,
|
'homeassistant': self.sys_docker.network.gateway,
|
||||||
'hassio': self._docker.network.supervisor,
|
'hassio': self.sys_docker.network.supervisor,
|
||||||
}
|
}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -173,31 +173,31 @@ class DockerAddon(DockerInterface):
|
|||||||
# setup config mappings
|
# setup config mappings
|
||||||
if MAP_CONFIG in addon_mapping:
|
if MAP_CONFIG in addon_mapping:
|
||||||
volumes.update({
|
volumes.update({
|
||||||
str(self._config.path_extern_config): {
|
str(self.sys_config.path_extern_config): {
|
||||||
'bind': "/config", 'mode': addon_mapping[MAP_CONFIG]
|
'bind': "/config", 'mode': addon_mapping[MAP_CONFIG]
|
||||||
}})
|
}})
|
||||||
|
|
||||||
if MAP_SSL in addon_mapping:
|
if MAP_SSL in addon_mapping:
|
||||||
volumes.update({
|
volumes.update({
|
||||||
str(self._config.path_extern_ssl): {
|
str(self.sys_config.path_extern_ssl): {
|
||||||
'bind': "/ssl", 'mode': addon_mapping[MAP_SSL]
|
'bind': "/ssl", 'mode': addon_mapping[MAP_SSL]
|
||||||
}})
|
}})
|
||||||
|
|
||||||
if MAP_ADDONS in addon_mapping:
|
if MAP_ADDONS in addon_mapping:
|
||||||
volumes.update({
|
volumes.update({
|
||||||
str(self._config.path_extern_addons_local): {
|
str(self.sys_config.path_extern_addons_local): {
|
||||||
'bind': "/addons", 'mode': addon_mapping[MAP_ADDONS]
|
'bind': "/addons", 'mode': addon_mapping[MAP_ADDONS]
|
||||||
}})
|
}})
|
||||||
|
|
||||||
if MAP_BACKUP in addon_mapping:
|
if MAP_BACKUP in addon_mapping:
|
||||||
volumes.update({
|
volumes.update({
|
||||||
str(self._config.path_extern_backup): {
|
str(self.sys_config.path_extern_backup): {
|
||||||
'bind': "/backup", 'mode': addon_mapping[MAP_BACKUP]
|
'bind': "/backup", 'mode': addon_mapping[MAP_BACKUP]
|
||||||
}})
|
}})
|
||||||
|
|
||||||
if MAP_SHARE in addon_mapping:
|
if MAP_SHARE in addon_mapping:
|
||||||
volumes.update({
|
volumes.update({
|
||||||
str(self._config.path_extern_share): {
|
str(self.sys_config.path_extern_share): {
|
||||||
'bind': "/share", 'mode': addon_mapping[MAP_SHARE]
|
'bind': "/share", 'mode': addon_mapping[MAP_SHARE]
|
||||||
}})
|
}})
|
||||||
|
|
||||||
@ -239,7 +239,7 @@ class DockerAddon(DockerInterface):
|
|||||||
# cleanup
|
# cleanup
|
||||||
self._stop()
|
self._stop()
|
||||||
|
|
||||||
ret = self._docker.run(
|
ret = self.sys_docker.run(
|
||||||
self.image,
|
self.image,
|
||||||
name=self.name,
|
name=self.name,
|
||||||
hostname=self.hostname,
|
hostname=self.hostname,
|
||||||
@ -283,7 +283,7 @@ class DockerAddon(DockerInterface):
|
|||||||
|
|
||||||
_LOGGER.info("Start build %s:%s", self.image, tag)
|
_LOGGER.info("Start build %s:%s", self.image, tag)
|
||||||
try:
|
try:
|
||||||
image, log = self._docker.images.build(
|
image, log = self.sys_docker.images.build(
|
||||||
**build_env.get_docker_args(tag))
|
**build_env.get_docker_args(tag))
|
||||||
|
|
||||||
_LOGGER.debug("Build %s:%s done: %s", self.image, tag, log)
|
_LOGGER.debug("Build %s:%s done: %s", self.image, tag, log)
|
||||||
@ -302,7 +302,7 @@ class DockerAddon(DockerInterface):
|
|||||||
@process_lock
|
@process_lock
|
||||||
def export_image(self, path):
|
def export_image(self, path):
|
||||||
"""Export current images into a tar file."""
|
"""Export current images into a tar file."""
|
||||||
return self._loop.run_in_executor(None, self._export_image, path)
|
return self.sys_run_in_executor(self._export_image, path)
|
||||||
|
|
||||||
def _export_image(self, tar_file):
|
def _export_image(self, tar_file):
|
||||||
"""Export current images into a tar file.
|
"""Export current images into a tar file.
|
||||||
@ -310,7 +310,7 @@ class DockerAddon(DockerInterface):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
image = self._docker.api.get_image(self.image)
|
image = self.sys_docker.api.get_image(self.image)
|
||||||
except docker.errors.DockerException as err:
|
except docker.errors.DockerException as err:
|
||||||
_LOGGER.error("Can't fetch image %s: %s", self.image, err)
|
_LOGGER.error("Can't fetch image %s: %s", self.image, err)
|
||||||
return False
|
return False
|
||||||
@ -330,7 +330,7 @@ class DockerAddon(DockerInterface):
|
|||||||
@process_lock
|
@process_lock
|
||||||
def import_image(self, path, tag):
|
def import_image(self, path, tag):
|
||||||
"""Import a tar file as image."""
|
"""Import a tar file as image."""
|
||||||
return self._loop.run_in_executor(None, self._import_image, path, tag)
|
return self.sys_run_in_executor(self._import_image, path, tag)
|
||||||
|
|
||||||
def _import_image(self, tar_file, tag):
|
def _import_image(self, tar_file, tag):
|
||||||
"""Import a tar file as image.
|
"""Import a tar file as image.
|
||||||
@ -339,9 +339,9 @@ class DockerAddon(DockerInterface):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
with tar_file.open("rb") as read_tar:
|
with tar_file.open("rb") as read_tar:
|
||||||
self._docker.api.load_image(read_tar, quiet=True)
|
self.sys_docker.api.load_image(read_tar, quiet=True)
|
||||||
|
|
||||||
image = self._docker.images.get(self.image)
|
image = self.sys_docker.images.get(self.image)
|
||||||
image.tag(self.image, tag=tag)
|
image.tag(self.image, tag=tag)
|
||||||
except (docker.errors.DockerException, OSError) as err:
|
except (docker.errors.DockerException, OSError) as err:
|
||||||
_LOGGER.error("Can't import image %s: %s", self.image, err)
|
_LOGGER.error("Can't import image %s: %s", self.image, err)
|
||||||
@ -355,7 +355,7 @@ class DockerAddon(DockerInterface):
|
|||||||
@process_lock
|
@process_lock
|
||||||
def write_stdin(self, data):
|
def write_stdin(self, data):
|
||||||
"""Write to add-on stdin."""
|
"""Write to add-on stdin."""
|
||||||
return self._loop.run_in_executor(None, self._write_stdin, data)
|
return self.sys_run_in_executor(self._write_stdin, data)
|
||||||
|
|
||||||
def _write_stdin(self, data):
|
def _write_stdin(self, data):
|
||||||
"""Write to add-on stdin.
|
"""Write to add-on stdin.
|
||||||
@ -367,7 +367,7 @@ class DockerAddon(DockerInterface):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
# load needed docker objects
|
# load needed docker objects
|
||||||
container = self._docker.containers.get(self.name)
|
container = self.sys_docker.containers.get(self.name)
|
||||||
socket = container.attach_socket(params={'stdin': 1, 'stream': 1})
|
socket = container.attach_socket(params={'stdin': 1, 'stream': 1})
|
||||||
except docker.errors.DockerException as err:
|
except docker.errors.DockerException as err:
|
||||||
_LOGGER.error("Can't attach to %s stdin: %s", self.name, err)
|
_LOGGER.error("Can't attach to %s stdin: %s", self.name, err)
|
||||||
|
@ -24,7 +24,7 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
@property
|
@property
|
||||||
def image(self):
|
def image(self):
|
||||||
"""Return name of docker image."""
|
"""Return name of docker image."""
|
||||||
return self._homeassistant.image
|
return self.sys_homeassistant.image
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
@ -35,7 +35,7 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
def devices(self):
|
def devices(self):
|
||||||
"""Create list of special device to map into docker."""
|
"""Create list of special device to map into docker."""
|
||||||
devices = []
|
devices = []
|
||||||
for device in self._hardware.serial_devices:
|
for device in self.sys_hardware.serial_devices:
|
||||||
devices.append(f"{device}:{device}:rwm")
|
devices.append(f"{device}:{device}:rwm")
|
||||||
return devices or None
|
return devices or None
|
||||||
|
|
||||||
@ -50,7 +50,7 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
# cleanup
|
# cleanup
|
||||||
self._stop()
|
self._stop()
|
||||||
|
|
||||||
ret = self._docker.run(
|
ret = self.sys_docker.run(
|
||||||
self.image,
|
self.image,
|
||||||
name=self.name,
|
name=self.name,
|
||||||
hostname=self.name,
|
hostname=self.name,
|
||||||
@ -60,16 +60,16 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
devices=self.devices,
|
devices=self.devices,
|
||||||
network_mode='host',
|
network_mode='host',
|
||||||
environment={
|
environment={
|
||||||
'HASSIO': self._docker.network.supervisor,
|
'HASSIO': self.sys_docker.network.supervisor,
|
||||||
ENV_TIME: self._config.timezone,
|
ENV_TIME: self.sys_config.timezone,
|
||||||
ENV_TOKEN: self._homeassistant.uuid,
|
ENV_TOKEN: self.sys_homeassistant.uuid,
|
||||||
},
|
},
|
||||||
volumes={
|
volumes={
|
||||||
str(self._config.path_extern_config):
|
str(self.sys_config.path_extern_config):
|
||||||
{'bind': '/config', 'mode': 'rw'},
|
{'bind': '/config', 'mode': 'rw'},
|
||||||
str(self._config.path_extern_ssl):
|
str(self.sys_config.path_extern_ssl):
|
||||||
{'bind': '/ssl', 'mode': 'ro'},
|
{'bind': '/ssl', 'mode': 'ro'},
|
||||||
str(self._config.path_extern_share):
|
str(self.sys_config.path_extern_share):
|
||||||
{'bind': '/share', 'mode': 'rw'},
|
{'bind': '/share', 'mode': 'rw'},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
@ -85,26 +85,26 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
|
|
||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
return self._docker.run_command(
|
return self.sys_docker.run_command(
|
||||||
self.image,
|
self.image,
|
||||||
command,
|
command,
|
||||||
detach=True,
|
detach=True,
|
||||||
stdout=True,
|
stdout=True,
|
||||||
stderr=True,
|
stderr=True,
|
||||||
environment={
|
environment={
|
||||||
ENV_TIME: self._config.timezone,
|
ENV_TIME: self.sys_config.timezone,
|
||||||
},
|
},
|
||||||
volumes={
|
volumes={
|
||||||
str(self._config.path_extern_config):
|
str(self.sys_config.path_extern_config):
|
||||||
{'bind': '/config', 'mode': 'ro'},
|
{'bind': '/config', 'mode': 'ro'},
|
||||||
str(self._config.path_extern_ssl):
|
str(self.sys_config.path_extern_ssl):
|
||||||
{'bind': '/ssl', 'mode': 'ro'},
|
{'bind': '/ssl', 'mode': 'ro'},
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
def is_initialize(self):
|
def is_initialize(self):
|
||||||
"""Return True if docker container exists."""
|
"""Return True if docker container exists."""
|
||||||
return self._loop.run_in_executor(None, self._is_initialize)
|
return self.sys_run_in_executor(self._is_initialize)
|
||||||
|
|
||||||
def _is_initialize(self):
|
def _is_initialize(self):
|
||||||
"""Return True if docker container exists.
|
"""Return True if docker container exists.
|
||||||
@ -112,7 +112,7 @@ class DockerHomeAssistant(DockerInterface):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
self._docker.containers.get(self.name)
|
self.sys_docker.containers.get(self.name)
|
||||||
except docker.errors.DockerException:
|
except docker.errors.DockerException:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -61,7 +61,7 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
@process_lock
|
@process_lock
|
||||||
def install(self, tag):
|
def install(self, tag):
|
||||||
"""Pull docker image."""
|
"""Pull docker image."""
|
||||||
return self._loop.run_in_executor(None, self._install, tag)
|
return self.sys_run_in_executor(self._install, tag)
|
||||||
|
|
||||||
def _install(self, tag):
|
def _install(self, tag):
|
||||||
"""Pull docker image.
|
"""Pull docker image.
|
||||||
@ -70,7 +70,7 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
_LOGGER.info("Pull image %s tag %s.", self.image, tag)
|
_LOGGER.info("Pull image %s tag %s.", self.image, tag)
|
||||||
image = self._docker.images.pull(f"{self.image}:{tag}")
|
image = self.sys_docker.images.pull(f"{self.image}:{tag}")
|
||||||
|
|
||||||
image.tag(self.image, tag='latest')
|
image.tag(self.image, tag='latest')
|
||||||
self._meta = image.attrs
|
self._meta = image.attrs
|
||||||
@ -83,7 +83,7 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
|
|
||||||
def exists(self):
|
def exists(self):
|
||||||
"""Return True if docker image exists in local repo."""
|
"""Return True if docker image exists in local repo."""
|
||||||
return self._loop.run_in_executor(None, self._exists)
|
return self.sys_run_in_executor(self._exists)
|
||||||
|
|
||||||
def _exists(self):
|
def _exists(self):
|
||||||
"""Return True if docker image exists in local repo.
|
"""Return True if docker image exists in local repo.
|
||||||
@ -91,7 +91,7 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
image = self._docker.images.get(self.image)
|
image = self.sys_docker.images.get(self.image)
|
||||||
assert f"{self.image}:{self.version}" in image.tags
|
assert f"{self.image}:{self.version}" in image.tags
|
||||||
except (docker.errors.DockerException, AssertionError):
|
except (docker.errors.DockerException, AssertionError):
|
||||||
return False
|
return False
|
||||||
@ -103,7 +103,7 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
|
|
||||||
Return a Future.
|
Return a Future.
|
||||||
"""
|
"""
|
||||||
return self._loop.run_in_executor(None, self._is_running)
|
return self.sys_run_in_executor(self._is_running)
|
||||||
|
|
||||||
def _is_running(self):
|
def _is_running(self):
|
||||||
"""Return True if docker is Running.
|
"""Return True if docker is Running.
|
||||||
@ -111,8 +111,8 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
container = self._docker.containers.get(self.name)
|
container = self.sys_docker.containers.get(self.name)
|
||||||
image = self._docker.images.get(self.image)
|
image = self.sys_docker.images.get(self.image)
|
||||||
except docker.errors.DockerException:
|
except docker.errors.DockerException:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -129,7 +129,7 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
@process_lock
|
@process_lock
|
||||||
def attach(self):
|
def attach(self):
|
||||||
"""Attach to running docker container."""
|
"""Attach to running docker container."""
|
||||||
return self._loop.run_in_executor(None, self._attach)
|
return self.sys_run_in_executor(self._attach)
|
||||||
|
|
||||||
def _attach(self):
|
def _attach(self):
|
||||||
"""Attach to running docker container.
|
"""Attach to running docker container.
|
||||||
@ -138,9 +138,9 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
if self.image:
|
if self.image:
|
||||||
self._meta = self._docker.images.get(self.image).attrs
|
self._meta = self.sys_docker.images.get(self.image).attrs
|
||||||
else:
|
else:
|
||||||
self._meta = self._docker.containers.get(self.name).attrs
|
self._meta = self.sys_docker.containers.get(self.name).attrs
|
||||||
except docker.errors.DockerException:
|
except docker.errors.DockerException:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -152,7 +152,7 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
@process_lock
|
@process_lock
|
||||||
def run(self):
|
def run(self):
|
||||||
"""Run docker image."""
|
"""Run docker image."""
|
||||||
return self._loop.run_in_executor(None, self._run)
|
return self.sys_run_in_executor(self._run)
|
||||||
|
|
||||||
def _run(self):
|
def _run(self):
|
||||||
"""Run docker image.
|
"""Run docker image.
|
||||||
@ -164,7 +164,7 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
@process_lock
|
@process_lock
|
||||||
def stop(self):
|
def stop(self):
|
||||||
"""Stop/remove docker container."""
|
"""Stop/remove docker container."""
|
||||||
return self._loop.run_in_executor(None, self._stop)
|
return self.sys_run_in_executor(self._stop)
|
||||||
|
|
||||||
def _stop(self):
|
def _stop(self):
|
||||||
"""Stop/remove and remove docker container.
|
"""Stop/remove and remove docker container.
|
||||||
@ -172,7 +172,7 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
container = self._docker.containers.get(self.name)
|
container = self.sys_docker.containers.get(self.name)
|
||||||
except docker.errors.DockerException:
|
except docker.errors.DockerException:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -190,7 +190,7 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
@process_lock
|
@process_lock
|
||||||
def remove(self):
|
def remove(self):
|
||||||
"""Remove docker images."""
|
"""Remove docker images."""
|
||||||
return self._loop.run_in_executor(None, self._remove)
|
return self.sys_run_in_executor(self._remove)
|
||||||
|
|
||||||
def _remove(self):
|
def _remove(self):
|
||||||
"""remove docker images.
|
"""remove docker images.
|
||||||
@ -205,11 +205,11 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
with suppress(docker.errors.ImageNotFound):
|
with suppress(docker.errors.ImageNotFound):
|
||||||
self._docker.images.remove(
|
self.sys_docker.images.remove(
|
||||||
image=f"{self.image}:latest", force=True)
|
image=f"{self.image}:latest", force=True)
|
||||||
|
|
||||||
with suppress(docker.errors.ImageNotFound):
|
with suppress(docker.errors.ImageNotFound):
|
||||||
self._docker.images.remove(
|
self.sys_docker.images.remove(
|
||||||
image=f"{self.image}:{self.version}", force=True)
|
image=f"{self.image}:{self.version}", force=True)
|
||||||
|
|
||||||
except docker.errors.DockerException as err:
|
except docker.errors.DockerException as err:
|
||||||
@ -222,7 +222,7 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
@process_lock
|
@process_lock
|
||||||
def update(self, tag):
|
def update(self, tag):
|
||||||
"""Update a docker image."""
|
"""Update a docker image."""
|
||||||
return self._loop.run_in_executor(None, self._update, tag)
|
return self.sys_run_in_executor(self._update, tag)
|
||||||
|
|
||||||
def _update(self, tag):
|
def _update(self, tag):
|
||||||
"""Update a docker image.
|
"""Update a docker image.
|
||||||
@ -247,7 +247,7 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
|
|
||||||
Return a Future.
|
Return a Future.
|
||||||
"""
|
"""
|
||||||
return self._loop.run_in_executor(None, self._logs)
|
return self.sys_run_in_executor(self._logs)
|
||||||
|
|
||||||
def _logs(self):
|
def _logs(self):
|
||||||
"""Return docker logs of container.
|
"""Return docker logs of container.
|
||||||
@ -255,7 +255,7 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
container = self._docker.containers.get(self.name)
|
container = self.sys_docker.containers.get(self.name)
|
||||||
except docker.errors.DockerException:
|
except docker.errors.DockerException:
|
||||||
return b""
|
return b""
|
||||||
|
|
||||||
@ -267,7 +267,7 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
@process_lock
|
@process_lock
|
||||||
def cleanup(self):
|
def cleanup(self):
|
||||||
"""Check if old version exists and cleanup."""
|
"""Check if old version exists and cleanup."""
|
||||||
return self._loop.run_in_executor(None, self._cleanup)
|
return self.sys_run_in_executor(self._cleanup)
|
||||||
|
|
||||||
def _cleanup(self):
|
def _cleanup(self):
|
||||||
"""Check if old version exists and cleanup.
|
"""Check if old version exists and cleanup.
|
||||||
@ -275,25 +275,25 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
latest = self._docker.images.get(self.image)
|
latest = self.sys_docker.images.get(self.image)
|
||||||
except docker.errors.DockerException:
|
except docker.errors.DockerException:
|
||||||
_LOGGER.warning("Can't find %s for cleanup", self.image)
|
_LOGGER.warning("Can't find %s for cleanup", self.image)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
for image in self._docker.images.list(name=self.image):
|
for image in self.sys_docker.images.list(name=self.image):
|
||||||
if latest.id == image.id:
|
if latest.id == image.id:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
with suppress(docker.errors.DockerException):
|
with suppress(docker.errors.DockerException):
|
||||||
_LOGGER.info("Cleanup docker images: %s", image.tags)
|
_LOGGER.info("Cleanup docker images: %s", image.tags)
|
||||||
self._docker.images.remove(image.id, force=True)
|
self.sys_docker.images.remove(image.id, force=True)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@process_lock
|
@process_lock
|
||||||
def execute_command(self, command):
|
def execute_command(self, command):
|
||||||
"""Create a temporary container and run command."""
|
"""Create a temporary container and run command."""
|
||||||
return self._loop.run_in_executor(None, self._execute_command, command)
|
return self.sys_run_in_executor(self._execute_command, command)
|
||||||
|
|
||||||
def _execute_command(self, command):
|
def _execute_command(self, command):
|
||||||
"""Create a temporary container and run command.
|
"""Create a temporary container and run command.
|
||||||
@ -304,7 +304,7 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
|
|
||||||
def stats(self):
|
def stats(self):
|
||||||
"""Read and return stats from container."""
|
"""Read and return stats from container."""
|
||||||
return self._loop.run_in_executor(None, self._stats)
|
return self.sys_run_in_executor(self._stats)
|
||||||
|
|
||||||
def _stats(self):
|
def _stats(self):
|
||||||
"""Create a temporary container and run command.
|
"""Create a temporary container and run command.
|
||||||
@ -312,7 +312,7 @@ class DockerInterface(CoreSysAttributes):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
container = self._docker.containers.get(self.name)
|
container = self.sys_docker.containers.get(self.name)
|
||||||
except docker.errors.DockerException:
|
except docker.errors.DockerException:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -24,7 +24,7 @@ class DockerSupervisor(DockerInterface, CoreSysAttributes):
|
|||||||
Need run inside executor.
|
Need run inside executor.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
container = self._docker.containers.get(self.name)
|
container = self.sys_docker.containers.get(self.name)
|
||||||
except docker.errors.DockerException:
|
except docker.errors.DockerException:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -33,9 +33,10 @@ class DockerSupervisor(DockerInterface, CoreSysAttributes):
|
|||||||
self.image, self.version)
|
self.image, self.version)
|
||||||
|
|
||||||
# if already attach
|
# if already attach
|
||||||
if container in self._docker.network.containers:
|
if container in self.sys_docker.network.containers:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# attach to network
|
# attach to network
|
||||||
return self._docker.network.attach_container(
|
return self.sys_docker.network.attach_container(
|
||||||
container, alias=['hassio'], ipv4=self._docker.network.supervisor)
|
container, alias=['hassio'],
|
||||||
|
ipv4=self.sys_docker.network.supervisor)
|
||||||
|
15
hassio/exceptions.py
Normal file
15
hassio/exceptions.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
"""Core Exceptions."""
|
||||||
|
|
||||||
|
class HassioError(Exception):
|
||||||
|
"""Root exception."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class HassioInternalError(HassioError):
|
||||||
|
"""Internal Hass.io error they can't handle."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class HassioNotSupportedError(HassioError):
|
||||||
|
"""Function is not supported."""
|
||||||
|
pass
|
@ -54,7 +54,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|||||||
@property
|
@property
|
||||||
def api_ip(self):
|
def api_ip(self):
|
||||||
"""Return IP of HomeAssistant instance."""
|
"""Return IP of HomeAssistant instance."""
|
||||||
return self._docker.network.gateway
|
return self.sys_docker.network.gateway
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def api_port(self):
|
def api_port(self):
|
||||||
@ -123,7 +123,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|||||||
"""Return last available version of homeassistant."""
|
"""Return last available version of homeassistant."""
|
||||||
if self.is_custom_image:
|
if self.is_custom_image:
|
||||||
return self._data.get(ATTR_LAST_VERSION)
|
return self._data.get(ATTR_LAST_VERSION)
|
||||||
return self._updater.version_homeassistant
|
return self.sys_updater.version_homeassistant
|
||||||
|
|
||||||
@last_version.setter
|
@last_version.setter
|
||||||
def last_version(self, value):
|
def last_version(self, value):
|
||||||
@ -189,7 +189,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|||||||
while True:
|
while True:
|
||||||
# read homeassistant tag and install it
|
# read homeassistant tag and install it
|
||||||
if not self.last_version:
|
if not self.last_version:
|
||||||
await self._updater.reload()
|
await self.sys_updater.reload()
|
||||||
|
|
||||||
tag = self.last_version
|
tag = self.last_version
|
||||||
if tag and await self.instance.install(tag):
|
if tag and await self.instance.install(tag):
|
||||||
@ -307,7 +307,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
# pylint: disable=bad-continuation
|
# pylint: disable=bad-continuation
|
||||||
async with self._websession_ssl.get(
|
async with self.sys_websession_ssl.get(
|
||||||
url, headers=header, timeout=30) as request:
|
url, headers=header, timeout=30) as request:
|
||||||
status = request.status
|
status = request.status
|
||||||
|
|
||||||
@ -328,7 +328,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
# pylint: disable=bad-continuation
|
# pylint: disable=bad-continuation
|
||||||
async with self._websession_ssl.post(
|
async with self.sys_websession_ssl.post(
|
||||||
url, headers=header, timeout=30,
|
url, headers=header, timeout=30,
|
||||||
json=event_data) as request:
|
json=event_data) as request:
|
||||||
status = request.status
|
status = request.status
|
||||||
@ -361,7 +361,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
while time.monotonic() - start_time < self.wait_boot:
|
while time.monotonic() - start_time < self.wait_boot:
|
||||||
if await self._loop.run_in_executor(None, check_port):
|
if await self.sys_run_in_executor(check_port):
|
||||||
_LOGGER.info("Detect a running Home-Assistant instance")
|
_LOGGER.info("Detect a running Home-Assistant instance")
|
||||||
return True
|
return True
|
||||||
await asyncio.sleep(10)
|
await asyncio.sleep(10)
|
||||||
|
@ -42,7 +42,7 @@ class AlsaAudio(CoreSysAttributes):
|
|||||||
|
|
||||||
def _update_device(self):
|
def _update_device(self):
|
||||||
"""Update Internal device DB."""
|
"""Update Internal device DB."""
|
||||||
current_id = hash(frozenset(self._hardware.audio_devices))
|
current_id = hash(frozenset(self.sys_hardware.audio_devices))
|
||||||
|
|
||||||
# Need rebuild?
|
# Need rebuild?
|
||||||
if current_id == self._cache:
|
if current_id == self._cache:
|
||||||
@ -57,7 +57,7 @@ class AlsaAudio(CoreSysAttributes):
|
|||||||
database = self._audio_database()
|
database = self._audio_database()
|
||||||
|
|
||||||
# Process devices
|
# Process devices
|
||||||
for dev_id, dev_data in self._hardware.audio_devices.items():
|
for dev_id, dev_data in self.sys_hardware.audio_devices.items():
|
||||||
for chan_id, chan_type in dev_data[ATTR_DEVICES].items():
|
for chan_id, chan_type in dev_data[ATTR_DEVICES].items():
|
||||||
alsa_id = f"{dev_id},{chan_id}"
|
alsa_id = f"{dev_id},{chan_id}"
|
||||||
dev_name = dev_data[ATTR_NAME]
|
dev_name = dev_data[ATTR_NAME]
|
||||||
@ -73,7 +73,7 @@ class AlsaAudio(CoreSysAttributes):
|
|||||||
|
|
||||||
# Use name from DB or a generic name
|
# Use name from DB or a generic name
|
||||||
self._data[key][alsa_id] = database.get(
|
self._data[key][alsa_id] = database.get(
|
||||||
self._machine, {}).get(
|
self.sys_machine, {}).get(
|
||||||
dev_name, {}).get(alsa_id, f"{dev_name}: {chan_id}")
|
dev_name, {}).get(alsa_id, f"{dev_name}: {chan_id}")
|
||||||
|
|
||||||
self._cache = current_id
|
self._cache = current_id
|
||||||
@ -98,8 +98,8 @@ class AlsaAudio(CoreSysAttributes):
|
|||||||
# Init defaults
|
# Init defaults
|
||||||
if self._default is None:
|
if self._default is None:
|
||||||
database = self._audio_database()
|
database = self._audio_database()
|
||||||
alsa_input = database.get(self._machine, {}).get(ATTR_INPUT)
|
alsa_input = database.get(self.sys_machine, {}).get(ATTR_INPUT)
|
||||||
alsa_output = database.get(self._machine, {}).get(ATTR_OUTPUT)
|
alsa_output = database.get(self.sys_machine, {}).get(ATTR_OUTPUT)
|
||||||
|
|
||||||
self._default = DefaultConfig(alsa_input, alsa_output)
|
self._default = DefaultConfig(alsa_input, alsa_output)
|
||||||
|
|
||||||
|
@ -1,13 +1,16 @@
|
|||||||
"""Interface to Systemd over dbus."""
|
"""Interface to Systemd over dbus."""
|
||||||
|
from logging
|
||||||
|
|
||||||
|
from ..exceptions import HassioInternalError
|
||||||
from ..utils.gdbus import DBus, DBusError
|
from ..utils.gdbus import DBus, DBusError
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
DBUS_NAME = 'org.freedesktop.systemd1'
|
DBUS_NAME = 'org.freedesktop.systemd1'
|
||||||
DBUS_OBJECT = '/org/freedesktop/systemd1/Manager'
|
DBUS_OBJECT = '/org/freedesktop/systemd1/Manager'
|
||||||
|
|
||||||
|
|
||||||
class System(object):
|
class Systemd(object):
|
||||||
"""Systemd function handler."""
|
"""Systemd function handler."""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@ -19,14 +22,21 @@ class System(object):
|
|||||||
try:
|
try:
|
||||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
||||||
except DBusError:
|
except DBusError:
|
||||||
|
_LOGGER.warning("Can't connect to systemd")
|
||||||
return
|
return
|
||||||
|
|
||||||
async def reboot():
|
async def reboot(self):
|
||||||
"""Reboot host computer."""
|
"""Reboot host computer."""
|
||||||
try:
|
try:
|
||||||
await self.dbus.Reboot()
|
await self.dbus.Reboot()
|
||||||
except DBusError:
|
except DBusError:
|
||||||
_LOGGER.error("Can't reboot host")
|
_LOGGER.error("Can't reboot host")
|
||||||
|
raise HassioInternalError() from None
|
||||||
|
|
||||||
async def shutdown():
|
async def shutdown(self):
|
||||||
"""Shutdown host computer."""
|
"""Shutdown host computer."""
|
||||||
|
try:
|
||||||
|
await self.dbus.PowerOff()
|
||||||
|
except DBusError:
|
||||||
|
_LOGGER.error("Can't PowerOff host")
|
||||||
|
raise HassioInternalError() from None
|
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
from .mqtt import MQTTService
|
from .mqtt import MQTTService
|
||||||
from .data import ServicesData
|
from .data import ServicesData
|
||||||
from .discovery import Discovery
|
|
||||||
from ..const import SERVICE_MQTT
|
from ..const import SERVICE_MQTT
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
|
|
||||||
@ -19,7 +18,6 @@ class ServiceManager(CoreSysAttributes):
|
|||||||
"""Initialize Services handler."""
|
"""Initialize Services handler."""
|
||||||
self.coresys = coresys
|
self.coresys = coresys
|
||||||
self.data = ServicesData()
|
self.data = ServicesData()
|
||||||
self.discovery = Discovery(coresys)
|
|
||||||
self.services_obj = {}
|
self.services_obj = {}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -37,9 +35,9 @@ class ServiceManager(CoreSysAttributes):
|
|||||||
self.services_obj[slug] = service(self.coresys)
|
self.services_obj[slug] = service(self.coresys)
|
||||||
|
|
||||||
# Read exists discovery messages
|
# Read exists discovery messages
|
||||||
self.discovery.load()
|
self.sys_discovery.load()
|
||||||
|
|
||||||
def reset(self):
|
def reset(self):
|
||||||
"""Reset available data."""
|
"""Reset available data."""
|
||||||
self.data.reset_data()
|
self.data.reset_data()
|
||||||
self.discovery.load()
|
self.sys_discovery.load()
|
||||||
|
@ -36,7 +36,7 @@ class Discovery(CoreSysAttributes):
|
|||||||
|
|
||||||
self._data.clear()
|
self._data.clear()
|
||||||
self._data.extend(messages)
|
self._data.extend(messages)
|
||||||
self._services.data.save_data()
|
self.sys_services.data.save_data()
|
||||||
|
|
||||||
def get(self, uuid):
|
def get(self, uuid):
|
||||||
"""Return discovery message."""
|
"""Return discovery message."""
|
||||||
@ -45,7 +45,7 @@ class Discovery(CoreSysAttributes):
|
|||||||
@property
|
@property
|
||||||
def _data(self):
|
def _data(self):
|
||||||
"""Return discovery data."""
|
"""Return discovery data."""
|
||||||
return self._services.data.discovery
|
return self.sys_services.data.discovery
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def list_messages(self):
|
def list_messages(self):
|
||||||
@ -69,7 +69,7 @@ class Discovery(CoreSysAttributes):
|
|||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
# send event to Home-Assistant
|
# send event to Home-Assistant
|
||||||
self._loop.create_task(self._homeassistant.send_event(
|
self.sys_create_task(self.sys_homeassistant.send_event(
|
||||||
EVENT_DISCOVERY_ADD, {ATTR_UUID: message.uuid}))
|
EVENT_DISCOVERY_ADD, {ATTR_UUID: message.uuid}))
|
||||||
|
|
||||||
return message
|
return message
|
||||||
@ -80,7 +80,7 @@ class Discovery(CoreSysAttributes):
|
|||||||
self.save()
|
self.save()
|
||||||
|
|
||||||
# send event to Home-Assistant
|
# send event to Home-Assistant
|
||||||
self._loop.create_task(self._homeassistant.send_event(
|
self.sys_create_task(self.sys_homeassistant.send_event(
|
||||||
EVENT_DISCOVERY_DEL, {ATTR_UUID: message.uuid}))
|
EVENT_DISCOVERY_DEL, {ATTR_UUID: message.uuid}))
|
||||||
|
|
||||||
|
|
||||||
|
@ -37,7 +37,7 @@ class ServiceInterface(CoreSysAttributes):
|
|||||||
|
|
||||||
def save(self):
|
def save(self):
|
||||||
"""Save changes."""
|
"""Save changes."""
|
||||||
self._services.data.save_data()
|
self.sys_services.data.save_data()
|
||||||
|
|
||||||
def get_service_data(self):
|
def get_service_data(self):
|
||||||
"""Return the requested service data."""
|
"""Return the requested service data."""
|
||||||
|
@ -21,7 +21,7 @@ class MQTTService(ServiceInterface):
|
|||||||
@property
|
@property
|
||||||
def _data(self):
|
def _data(self):
|
||||||
"""Return data of this service."""
|
"""Return data of this service."""
|
||||||
return self._services.data.mqtt
|
return self.sys_services.data.mqtt
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def schema(self):
|
def schema(self):
|
||||||
@ -66,7 +66,7 @@ class MQTTService(ServiceInterface):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
# discover mqtt to homeassistant
|
# discover mqtt to homeassistant
|
||||||
message = self._services.discovery.send(
|
message = self.sys_discovery.send(
|
||||||
provider, SERVICE_MQTT, None, self.hass_config)
|
provider, SERVICE_MQTT, None, self.hass_config)
|
||||||
|
|
||||||
self._data[ATTR_DISCOVERY_ID] = message.uuid
|
self._data[ATTR_DISCOVERY_ID] = message.uuid
|
||||||
@ -81,8 +81,8 @@ class MQTTService(ServiceInterface):
|
|||||||
|
|
||||||
discovery_id = self._data.get(ATTR_DISCOVERY_ID)
|
discovery_id = self._data.get(ATTR_DISCOVERY_ID)
|
||||||
if discovery_id:
|
if discovery_id:
|
||||||
self._services.discovery.remove(
|
self.sys_discovery.remove(
|
||||||
self._services.discovery.get(discovery_id))
|
self.sys_discovery.get(discovery_id))
|
||||||
|
|
||||||
self._data.clear()
|
self._data.clear()
|
||||||
self.save()
|
self.save()
|
||||||
|
@ -35,7 +35,7 @@ class SnapshotManager(CoreSysAttributes):
|
|||||||
"""Initialize a new snapshot object from name."""
|
"""Initialize a new snapshot object from name."""
|
||||||
date_str = utcnow().isoformat()
|
date_str = utcnow().isoformat()
|
||||||
slug = create_slug(name, date_str)
|
slug = create_slug(name, date_str)
|
||||||
tar_file = Path(self._config.path_backup, f"{slug}.tar")
|
tar_file = Path(self.sys_config.path_backup, f"{slug}.tar")
|
||||||
|
|
||||||
# init object
|
# init object
|
||||||
snapshot = Snapshot(self.coresys, tar_file)
|
snapshot = Snapshot(self.coresys, tar_file)
|
||||||
@ -65,7 +65,7 @@ class SnapshotManager(CoreSysAttributes):
|
|||||||
self.snapshots_obj[snapshot.slug] = snapshot
|
self.snapshots_obj[snapshot.slug] = snapshot
|
||||||
|
|
||||||
tasks = [_load_snapshot(tar_file) for tar_file in
|
tasks = [_load_snapshot(tar_file) for tar_file in
|
||||||
self._config.path_backup.glob("*.tar")]
|
self.sys_config.path_backup.glob("*.tar")]
|
||||||
|
|
||||||
_LOGGER.info("Found %d snapshot files", len(tasks))
|
_LOGGER.info("Found %d snapshot files", len(tasks))
|
||||||
if tasks:
|
if tasks:
|
||||||
@ -98,7 +98,7 @@ class SnapshotManager(CoreSysAttributes):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
# Move snapshot to backup
|
# Move snapshot to backup
|
||||||
tar_origin = Path(self._config.path_backup, f"{snapshot.slug}.tar")
|
tar_origin = Path(self.sys_config.path_backup, f"{snapshot.slug}.tar")
|
||||||
try:
|
try:
|
||||||
snapshot.tarfile.rename(tar_origin)
|
snapshot.tarfile.rename(tar_origin)
|
||||||
|
|
||||||
@ -124,7 +124,7 @@ class SnapshotManager(CoreSysAttributes):
|
|||||||
snapshot = self._create_snapshot(name, SNAPSHOT_FULL, password)
|
snapshot = self._create_snapshot(name, SNAPSHOT_FULL, password)
|
||||||
_LOGGER.info("Full-Snapshot %s start", snapshot.slug)
|
_LOGGER.info("Full-Snapshot %s start", snapshot.slug)
|
||||||
try:
|
try:
|
||||||
self._scheduler.suspend = True
|
self.sys_scheduler.suspend = True
|
||||||
await self.lock.acquire()
|
await self.lock.acquire()
|
||||||
|
|
||||||
async with snapshot:
|
async with snapshot:
|
||||||
@ -146,7 +146,7 @@ class SnapshotManager(CoreSysAttributes):
|
|||||||
return snapshot
|
return snapshot
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
self._scheduler.suspend = False
|
self.sys_scheduler.suspend = False
|
||||||
self.lock.release()
|
self.lock.release()
|
||||||
|
|
||||||
async def do_snapshot_partial(self, name="", addons=None, folders=None,
|
async def do_snapshot_partial(self, name="", addons=None, folders=None,
|
||||||
@ -162,14 +162,14 @@ class SnapshotManager(CoreSysAttributes):
|
|||||||
|
|
||||||
_LOGGER.info("Partial-Snapshot %s start", snapshot.slug)
|
_LOGGER.info("Partial-Snapshot %s start", snapshot.slug)
|
||||||
try:
|
try:
|
||||||
self._scheduler.suspend = True
|
self.sys_scheduler.suspend = True
|
||||||
await self.lock.acquire()
|
await self.lock.acquire()
|
||||||
|
|
||||||
async with snapshot:
|
async with snapshot:
|
||||||
# Snapshot add-ons
|
# Snapshot add-ons
|
||||||
addon_list = []
|
addon_list = []
|
||||||
for addon_slug in addons:
|
for addon_slug in addons:
|
||||||
addon = self._addons.get(addon_slug)
|
addon = self.sys_addons.get(addon_slug)
|
||||||
if addon and addon.is_installed:
|
if addon and addon.is_installed:
|
||||||
addon_list.append(addon)
|
addon_list.append(addon)
|
||||||
continue
|
continue
|
||||||
@ -195,7 +195,7 @@ class SnapshotManager(CoreSysAttributes):
|
|||||||
return snapshot
|
return snapshot
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
self._scheduler.suspend = False
|
self.sys_scheduler.suspend = False
|
||||||
self.lock.release()
|
self.lock.release()
|
||||||
|
|
||||||
async def do_restore_full(self, snapshot, password=None):
|
async def do_restore_full(self, snapshot, password=None):
|
||||||
@ -215,15 +215,15 @@ class SnapshotManager(CoreSysAttributes):
|
|||||||
|
|
||||||
_LOGGER.info("Full-Restore %s start", snapshot.slug)
|
_LOGGER.info("Full-Restore %s start", snapshot.slug)
|
||||||
try:
|
try:
|
||||||
self._scheduler.suspend = True
|
self.sys_scheduler.suspend = True
|
||||||
await self.lock.acquire()
|
await self.lock.acquire()
|
||||||
|
|
||||||
async with snapshot:
|
async with snapshot:
|
||||||
tasks = []
|
tasks = []
|
||||||
|
|
||||||
# Stop Home-Assistant / Add-ons
|
# Stop Home-Assistant / Add-ons
|
||||||
tasks.append(self._homeassistant.stop())
|
tasks.append(self.sys_homeassistant.stop())
|
||||||
for addon in self._addons.list_addons:
|
for addon in self.sys_addons.list_addons:
|
||||||
if addon.is_installed:
|
if addon.is_installed:
|
||||||
tasks.append(addon.stop())
|
tasks.append(addon.stop())
|
||||||
|
|
||||||
@ -238,8 +238,8 @@ class SnapshotManager(CoreSysAttributes):
|
|||||||
# Start homeassistant restore
|
# Start homeassistant restore
|
||||||
_LOGGER.info("Restore %s run Home-Assistant", snapshot.slug)
|
_LOGGER.info("Restore %s run Home-Assistant", snapshot.slug)
|
||||||
snapshot.restore_homeassistant()
|
snapshot.restore_homeassistant()
|
||||||
task_hass = self._loop.create_task(
|
task_hass = self.sys_create_task(self.sys_homeassistant.update(
|
||||||
self._homeassistant.update(snapshot.homeassistant_version))
|
snapshot.homeassistant_version))
|
||||||
|
|
||||||
# Restore repositories
|
# Restore repositories
|
||||||
_LOGGER.info("Restore %s run Repositories", snapshot.slug)
|
_LOGGER.info("Restore %s run Repositories", snapshot.slug)
|
||||||
@ -247,7 +247,7 @@ class SnapshotManager(CoreSysAttributes):
|
|||||||
|
|
||||||
# Delete delta add-ons
|
# Delete delta add-ons
|
||||||
tasks.clear()
|
tasks.clear()
|
||||||
for addon in self._addons.list_installed:
|
for addon in self.sys_addons.list_installed:
|
||||||
if addon.slug not in snapshot.addon_list:
|
if addon.slug not in snapshot.addon_list:
|
||||||
tasks.append(addon.uninstall())
|
tasks.append(addon.uninstall())
|
||||||
|
|
||||||
@ -263,7 +263,7 @@ class SnapshotManager(CoreSysAttributes):
|
|||||||
_LOGGER.info("Restore %s wait until homeassistant ready",
|
_LOGGER.info("Restore %s wait until homeassistant ready",
|
||||||
snapshot.slug)
|
snapshot.slug)
|
||||||
await task_hass
|
await task_hass
|
||||||
await self._homeassistant.start()
|
await self.sys_homeassistant.start()
|
||||||
|
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
_LOGGER.exception("Restore %s error", snapshot.slug)
|
_LOGGER.exception("Restore %s error", snapshot.slug)
|
||||||
@ -274,7 +274,7 @@ class SnapshotManager(CoreSysAttributes):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
self._scheduler.suspend = False
|
self.sys_scheduler.suspend = False
|
||||||
self.lock.release()
|
self.lock.release()
|
||||||
|
|
||||||
async def do_restore_partial(self, snapshot, homeassistant=False,
|
async def do_restore_partial(self, snapshot, homeassistant=False,
|
||||||
@ -293,13 +293,13 @@ class SnapshotManager(CoreSysAttributes):
|
|||||||
|
|
||||||
_LOGGER.info("Partial-Restore %s start", snapshot.slug)
|
_LOGGER.info("Partial-Restore %s start", snapshot.slug)
|
||||||
try:
|
try:
|
||||||
self._scheduler.suspend = True
|
self.sys_scheduler.suspend = True
|
||||||
await self.lock.acquire()
|
await self.lock.acquire()
|
||||||
|
|
||||||
async with snapshot:
|
async with snapshot:
|
||||||
# Stop Home-Assistant if they will be restored later
|
# Stop Home-Assistant if they will be restored later
|
||||||
if homeassistant and FOLDER_HOMEASSISTANT in folders:
|
if homeassistant and FOLDER_HOMEASSISTANT in folders:
|
||||||
await self._homeassistant.stop()
|
await self.sys_homeassistant.stop()
|
||||||
|
|
||||||
# Process folders
|
# Process folders
|
||||||
if folders:
|
if folders:
|
||||||
@ -312,14 +312,14 @@ class SnapshotManager(CoreSysAttributes):
|
|||||||
_LOGGER.info("Restore %s run Home-Assistant",
|
_LOGGER.info("Restore %s run Home-Assistant",
|
||||||
snapshot.slug)
|
snapshot.slug)
|
||||||
snapshot.restore_homeassistant()
|
snapshot.restore_homeassistant()
|
||||||
task_hass = self._loop.create_task(
|
task_hass = self.sys_create_task(
|
||||||
self._homeassistant.update(
|
self.sys_homeassistant.update(
|
||||||
snapshot.homeassistant_version))
|
snapshot.homeassistant_version))
|
||||||
|
|
||||||
# Process Add-ons
|
# Process Add-ons
|
||||||
addon_list = []
|
addon_list = []
|
||||||
for slug in addons:
|
for slug in addons:
|
||||||
addon = self._addons.get(slug)
|
addon = self.sys_addons.get(slug)
|
||||||
if addon:
|
if addon:
|
||||||
addon_list.append(addon)
|
addon_list.append(addon)
|
||||||
continue
|
continue
|
||||||
@ -334,7 +334,7 @@ class SnapshotManager(CoreSysAttributes):
|
|||||||
_LOGGER.info("Restore %s wait for Home-Assistant",
|
_LOGGER.info("Restore %s wait for Home-Assistant",
|
||||||
snapshot.slug)
|
snapshot.slug)
|
||||||
await task_hass
|
await task_hass
|
||||||
await self._homeassistant.start()
|
await self.sys_homeassistant.start()
|
||||||
|
|
||||||
except Exception: # pylint: disable=broad-except
|
except Exception: # pylint: disable=broad-except
|
||||||
_LOGGER.exception("Restore %s error", snapshot.slug)
|
_LOGGER.exception("Restore %s error", snapshot.slug)
|
||||||
@ -345,5 +345,5 @@ class SnapshotManager(CoreSysAttributes):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
self._scheduler.suspend = False
|
self.sys_scheduler.suspend = False
|
||||||
self.lock.release()
|
self.lock.release()
|
||||||
|
@ -179,7 +179,7 @@ class Snapshot(CoreSysAttributes):
|
|||||||
|
|
||||||
# read snapshot.json
|
# read snapshot.json
|
||||||
try:
|
try:
|
||||||
raw = await self._loop.run_in_executor(None, _load_file)
|
raw = await self.sys_run_in_executor(_load_file)
|
||||||
except (tarfile.TarError, KeyError) as err:
|
except (tarfile.TarError, KeyError) as err:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Can't read snapshot tarfile %s: %s", self.tarfile, err)
|
"Can't read snapshot tarfile %s: %s", self.tarfile, err)
|
||||||
@ -204,7 +204,7 @@ class Snapshot(CoreSysAttributes):
|
|||||||
|
|
||||||
async def __aenter__(self):
|
async def __aenter__(self):
|
||||||
"""Async context to open a snapshot."""
|
"""Async context to open a snapshot."""
|
||||||
self._tmp = TemporaryDirectory(dir=str(self._config.path_tmp))
|
self._tmp = TemporaryDirectory(dir=str(self.sys_config.path_tmp))
|
||||||
|
|
||||||
# create a snapshot
|
# create a snapshot
|
||||||
if not self.tarfile.is_file():
|
if not self.tarfile.is_file():
|
||||||
@ -216,7 +216,7 @@ class Snapshot(CoreSysAttributes):
|
|||||||
with tarfile.open(self.tarfile, "r:") as tar:
|
with tarfile.open(self.tarfile, "r:") as tar:
|
||||||
tar.extractall(path=self._tmp.name)
|
tar.extractall(path=self._tmp.name)
|
||||||
|
|
||||||
await self._loop.run_in_executor(None, _extract_snapshot)
|
await self.sys_run_in_executor(_extract_snapshot)
|
||||||
|
|
||||||
async def __aexit__(self, exception_type, exception_value, traceback):
|
async def __aexit__(self, exception_type, exception_value, traceback):
|
||||||
"""Async context to close a snapshot."""
|
"""Async context to close a snapshot."""
|
||||||
@ -241,7 +241,7 @@ class Snapshot(CoreSysAttributes):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
write_json_file(Path(self._tmp.name, "snapshot.json"), self._data)
|
write_json_file(Path(self._tmp.name, "snapshot.json"), self._data)
|
||||||
await self._loop.run_in_executor(None, _create_snapshot)
|
await self.sys_run_in_executor(_create_snapshot)
|
||||||
except (OSError, json.JSONDecodeError) as err:
|
except (OSError, json.JSONDecodeError) as err:
|
||||||
_LOGGER.error("Can't write snapshot: %s", err)
|
_LOGGER.error("Can't write snapshot: %s", err)
|
||||||
finally:
|
finally:
|
||||||
@ -249,7 +249,7 @@ class Snapshot(CoreSysAttributes):
|
|||||||
|
|
||||||
async def store_addons(self, addon_list=None):
|
async def store_addons(self, addon_list=None):
|
||||||
"""Add a list of add-ons into snapshot."""
|
"""Add a list of add-ons into snapshot."""
|
||||||
addon_list = addon_list or self._addons.list_installed
|
addon_list = addon_list or self.sys_addons.list_installed
|
||||||
|
|
||||||
async def _addon_save(addon):
|
async def _addon_save(addon):
|
||||||
"""Task to store a add-on into snapshot."""
|
"""Task to store a add-on into snapshot."""
|
||||||
@ -280,7 +280,7 @@ class Snapshot(CoreSysAttributes):
|
|||||||
if not addon_list:
|
if not addon_list:
|
||||||
addon_list = []
|
addon_list = []
|
||||||
for addon_slug in self.addon_list:
|
for addon_slug in self.addon_list:
|
||||||
addon = self._addons.get(addon_slug)
|
addon = self.sys_addons.get(addon_slug)
|
||||||
if addon:
|
if addon:
|
||||||
addon_list.append(addon)
|
addon_list.append(addon)
|
||||||
|
|
||||||
@ -313,7 +313,7 @@ class Snapshot(CoreSysAttributes):
|
|||||||
"""Intenal function to snapshot a folder."""
|
"""Intenal function to snapshot a folder."""
|
||||||
slug_name = name.replace("/", "_")
|
slug_name = name.replace("/", "_")
|
||||||
tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz")
|
tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz")
|
||||||
origin_dir = Path(self._config.path_hassio, name)
|
origin_dir = Path(self.sys_config.path_hassio, name)
|
||||||
|
|
||||||
# Check if exsits
|
# Check if exsits
|
||||||
if not origin_dir.is_dir():
|
if not origin_dir.is_dir():
|
||||||
@ -332,7 +332,7 @@ class Snapshot(CoreSysAttributes):
|
|||||||
_LOGGER.warning("Can't snapshot folder %s: %s", name, err)
|
_LOGGER.warning("Can't snapshot folder %s: %s", name, err)
|
||||||
|
|
||||||
# Run tasks
|
# Run tasks
|
||||||
tasks = [self._loop.run_in_executor(None, _folder_save, folder)
|
tasks = [self.sys_run_in_executor(_folder_save, folder)
|
||||||
for folder in folder_list]
|
for folder in folder_list]
|
||||||
if tasks:
|
if tasks:
|
||||||
await asyncio.wait(tasks)
|
await asyncio.wait(tasks)
|
||||||
@ -345,7 +345,7 @@ class Snapshot(CoreSysAttributes):
|
|||||||
"""Intenal function to restore a folder."""
|
"""Intenal function to restore a folder."""
|
||||||
slug_name = name.replace("/", "_")
|
slug_name = name.replace("/", "_")
|
||||||
tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz")
|
tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz")
|
||||||
origin_dir = Path(self._config.path_hassio, name)
|
origin_dir = Path(self.sys_config.path_hassio, name)
|
||||||
|
|
||||||
# Check if exists inside snapshot
|
# Check if exists inside snapshot
|
||||||
if not tar_name.exists():
|
if not tar_name.exists():
|
||||||
@ -366,58 +366,58 @@ class Snapshot(CoreSysAttributes):
|
|||||||
_LOGGER.warning("Can't restore folder %s: %s", name, err)
|
_LOGGER.warning("Can't restore folder %s: %s", name, err)
|
||||||
|
|
||||||
# Run tasks
|
# Run tasks
|
||||||
tasks = [self._loop.run_in_executor(None, _folder_restore, folder)
|
tasks = [self.sys_run_in_executor(_folder_restore, folder)
|
||||||
for folder in folder_list]
|
for folder in folder_list]
|
||||||
if tasks:
|
if tasks:
|
||||||
await asyncio.wait(tasks)
|
await asyncio.wait(tasks)
|
||||||
|
|
||||||
def store_homeassistant(self):
|
def store_homeassistant(self):
|
||||||
"""Read all data from homeassistant object."""
|
"""Read all data from homeassistant object."""
|
||||||
self.homeassistant[ATTR_VERSION] = self._homeassistant.version
|
self.homeassistant[ATTR_VERSION] = self.sys_homeassistant.version
|
||||||
self.homeassistant[ATTR_WATCHDOG] = self._homeassistant.watchdog
|
self.homeassistant[ATTR_WATCHDOG] = self.sys_homeassistant.watchdog
|
||||||
self.homeassistant[ATTR_BOOT] = self._homeassistant.boot
|
self.homeassistant[ATTR_BOOT] = self.sys_homeassistant.boot
|
||||||
self.homeassistant[ATTR_WAIT_BOOT] = self._homeassistant.wait_boot
|
self.homeassistant[ATTR_WAIT_BOOT] = self.sys_homeassistant.wait_boot
|
||||||
|
|
||||||
# Custom image
|
# Custom image
|
||||||
if self._homeassistant.is_custom_image:
|
if self.sys_homeassistant.is_custom_image:
|
||||||
self.homeassistant[ATTR_IMAGE] = self._homeassistant.image
|
self.homeassistant[ATTR_IMAGE] = self.sys_homeassistant.image
|
||||||
self.homeassistant[ATTR_LAST_VERSION] = \
|
self.homeassistant[ATTR_LAST_VERSION] = \
|
||||||
self._homeassistant.last_version
|
self.sys_homeassistant.last_version
|
||||||
|
|
||||||
# API/Proxy
|
# API/Proxy
|
||||||
self.homeassistant[ATTR_PORT] = self._homeassistant.api_port
|
self.homeassistant[ATTR_PORT] = self.sys_homeassistant.api_port
|
||||||
self.homeassistant[ATTR_SSL] = self._homeassistant.api_ssl
|
self.homeassistant[ATTR_SSL] = self.sys_homeassistant.api_ssl
|
||||||
self.homeassistant[ATTR_PASSWORD] = \
|
self.homeassistant[ATTR_PASSWORD] = \
|
||||||
self._encrypt_data(self._homeassistant.api_password)
|
self._encrypt_data(self.sys_homeassistant.api_password)
|
||||||
|
|
||||||
def restore_homeassistant(self):
|
def restore_homeassistant(self):
|
||||||
"""Write all data to homeassistant object."""
|
"""Write all data to homeassistant object."""
|
||||||
self._homeassistant.watchdog = self.homeassistant[ATTR_WATCHDOG]
|
self.sys_homeassistant.watchdog = self.homeassistant[ATTR_WATCHDOG]
|
||||||
self._homeassistant.boot = self.homeassistant[ATTR_BOOT]
|
self.sys_homeassistant.boot = self.homeassistant[ATTR_BOOT]
|
||||||
self._homeassistant.wait_boot = self.homeassistant[ATTR_WAIT_BOOT]
|
self.sys_homeassistant.wait_boot = self.homeassistant[ATTR_WAIT_BOOT]
|
||||||
|
|
||||||
# Custom image
|
# Custom image
|
||||||
if self.homeassistant.get(ATTR_IMAGE):
|
if self.homeassistant.get(ATTR_IMAGE):
|
||||||
self._homeassistant.image = self.homeassistant[ATTR_IMAGE]
|
self.sys_homeassistant.image = self.homeassistant[ATTR_IMAGE]
|
||||||
self._homeassistant.last_version = \
|
self.sys_homeassistant.last_version = \
|
||||||
self.homeassistant[ATTR_LAST_VERSION]
|
self.homeassistant[ATTR_LAST_VERSION]
|
||||||
|
|
||||||
# API/Proxy
|
# API/Proxy
|
||||||
self._homeassistant.api_port = self.homeassistant[ATTR_PORT]
|
self.sys_homeassistant.api_port = self.homeassistant[ATTR_PORT]
|
||||||
self._homeassistant.api_ssl = self.homeassistant[ATTR_SSL]
|
self.sys_homeassistant.api_ssl = self.homeassistant[ATTR_SSL]
|
||||||
self._homeassistant.api_password = \
|
self.sys_homeassistant.api_password = \
|
||||||
self._decrypt_data(self.homeassistant[ATTR_PASSWORD])
|
self._decrypt_data(self.homeassistant[ATTR_PASSWORD])
|
||||||
|
|
||||||
# save
|
# save
|
||||||
self._homeassistant.save_data()
|
self.sys_homeassistant.save_data()
|
||||||
|
|
||||||
def store_repositories(self):
|
def store_repositories(self):
|
||||||
"""Store repository list into snapshot."""
|
"""Store repository list into snapshot."""
|
||||||
self.repositories = self._config.addons_repositories
|
self.repositories = self.sys_config.addons_repositories
|
||||||
|
|
||||||
def restore_repositories(self):
|
def restore_repositories(self):
|
||||||
"""Restore repositories from snapshot.
|
"""Restore repositories from snapshot.
|
||||||
|
|
||||||
Return a coroutine.
|
Return a coroutine.
|
||||||
"""
|
"""
|
||||||
return self._addons.load_repositories(self.repositories)
|
return self.sys_addons.load_repositories(self.repositories)
|
||||||
|
@ -34,7 +34,7 @@ class Supervisor(CoreSysAttributes):
|
|||||||
@property
|
@property
|
||||||
def last_version(self):
|
def last_version(self):
|
||||||
"""Return last available version of homeassistant."""
|
"""Return last available version of homeassistant."""
|
||||||
return self._updater.version_hassio
|
return self.sys_updater.version_hassio
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def image(self):
|
def image(self):
|
||||||
@ -50,13 +50,13 @@ class Supervisor(CoreSysAttributes):
|
|||||||
"""Update HomeAssistant version."""
|
"""Update HomeAssistant version."""
|
||||||
version = version or self.last_version
|
version = version or self.last_version
|
||||||
|
|
||||||
if version == self._supervisor.version:
|
if version == self.sys_supervisor.version:
|
||||||
_LOGGER.warning("Version %s is already installed", version)
|
_LOGGER.warning("Version %s is already installed", version)
|
||||||
return
|
return
|
||||||
|
|
||||||
_LOGGER.info("Update supervisor to version %s", version)
|
_LOGGER.info("Update supervisor to version %s", version)
|
||||||
if await self.instance.install(version):
|
if await self.instance.install(version):
|
||||||
self._loop.call_later(1, self._loop.stop)
|
self.sys_loop.call_later(1, self.sys_loop.stop)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
_LOGGER.error("Update of hass.io fails!")
|
_LOGGER.error("Update of hass.io fails!")
|
||||||
|
@ -29,24 +29,24 @@ class Tasks(CoreSysAttributes):
|
|||||||
|
|
||||||
async def load(self):
|
async def load(self):
|
||||||
"""Add Tasks to scheduler."""
|
"""Add Tasks to scheduler."""
|
||||||
self.jobs.add(self._scheduler.register_task(
|
self.jobs.add(self.sys_scheduler.register_task(
|
||||||
self._update_addons, self.RUN_UPDATE_ADDONS))
|
self._update_addons, self.RUN_UPDATE_ADDONS))
|
||||||
self.jobs.add(self._scheduler.register_task(
|
self.jobs.add(self.sys_scheduler.register_task(
|
||||||
self._update_supervisor, self.RUN_UPDATE_SUPERVISOR))
|
self._update_supervisor, self.RUN_UPDATE_SUPERVISOR))
|
||||||
|
|
||||||
self.jobs.add(self._scheduler.register_task(
|
self.jobs.add(self.sys_scheduler.register_task(
|
||||||
self._addons.reload, self.RUN_RELOAD_ADDONS))
|
self.sys_addons.reload, self.RUN_RELOAD_ADDONS))
|
||||||
self.jobs.add(self._scheduler.register_task(
|
self.jobs.add(self.sys_scheduler.register_task(
|
||||||
self._updater.reload, self.RUN_RELOAD_UPDATER))
|
self.sys_updater.reload, self.RUN_RELOAD_UPDATER))
|
||||||
self.jobs.add(self._scheduler.register_task(
|
self.jobs.add(self.sys_scheduler.register_task(
|
||||||
self._snapshots.reload, self.RUN_RELOAD_SNAPSHOTS))
|
self.sys_snapshots.reload, self.RUN_RELOAD_SNAPSHOTS))
|
||||||
self.jobs.add(self._scheduler.register_task(
|
self.jobs.add(self.sys_scheduler.register_task(
|
||||||
self._host_control.load, self.RUN_RELOAD_HOST_CONTROL))
|
self._host_control.load, self.RUN_RELOAD_HOST_CONTROL))
|
||||||
|
|
||||||
self.jobs.add(self._scheduler.register_task(
|
self.jobs.add(self.sys_scheduler.register_task(
|
||||||
self._watchdog_homeassistant_docker,
|
self._watchdog_homeassistant_docker,
|
||||||
self.RUN_WATCHDOG_HOMEASSISTANT_DOCKER))
|
self.RUN_WATCHDOG_HOMEASSISTANT_DOCKER))
|
||||||
self.jobs.add(self._scheduler.register_task(
|
self.jobs.add(self.sys_scheduler.register_task(
|
||||||
self._watchdog_homeassistant_api,
|
self._watchdog_homeassistant_api,
|
||||||
self.RUN_WATCHDOG_HOMEASSISTANT_API))
|
self.RUN_WATCHDOG_HOMEASSISTANT_API))
|
||||||
|
|
||||||
@ -55,7 +55,7 @@ class Tasks(CoreSysAttributes):
|
|||||||
async def _update_addons(self):
|
async def _update_addons(self):
|
||||||
"""Check if a update is available of a addon and update it."""
|
"""Check if a update is available of a addon and update it."""
|
||||||
tasks = []
|
tasks = []
|
||||||
for addon in self._addons.list_addons:
|
for addon in self.sys_addons.list_addons:
|
||||||
if not addon.is_installed or not addon.auto_update:
|
if not addon.is_installed or not addon.auto_update:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@ -74,31 +74,31 @@ class Tasks(CoreSysAttributes):
|
|||||||
|
|
||||||
async def _update_supervisor(self):
|
async def _update_supervisor(self):
|
||||||
"""Check and run update of supervisor hassio."""
|
"""Check and run update of supervisor hassio."""
|
||||||
if not self._supervisor.need_update:
|
if not self.sys_supervisor.need_update:
|
||||||
return
|
return
|
||||||
|
|
||||||
# don't perform a update on beta/dev channel
|
# don't perform a update on beta/dev channel
|
||||||
if self._dev:
|
if self.sys_dev:
|
||||||
_LOGGER.warning("Ignore Hass.io update on dev channel!")
|
_LOGGER.warning("Ignore Hass.io update on dev channel!")
|
||||||
return
|
return
|
||||||
|
|
||||||
_LOGGER.info("Found new Hass.io version")
|
_LOGGER.info("Found new Hass.io version")
|
||||||
await self._supervisor.update()
|
await self.sys_supervisor.update()
|
||||||
|
|
||||||
async def _watchdog_homeassistant_docker(self):
|
async def _watchdog_homeassistant_docker(self):
|
||||||
"""Check running state of docker and start if they is close."""
|
"""Check running state of docker and start if they is close."""
|
||||||
# if Home-Assistant is active
|
# if Home-Assistant is active
|
||||||
if not await self._homeassistant.is_initialize() or \
|
if not await self.sys_homeassistant.is_initialize() or \
|
||||||
not self._homeassistant.watchdog:
|
not self.sys_homeassistant.watchdog:
|
||||||
return
|
return
|
||||||
|
|
||||||
# if Home-Assistant is running
|
# if Home-Assistant is running
|
||||||
if self._homeassistant.in_progress or \
|
if self.sys_homeassistant.in_progress or \
|
||||||
await self._homeassistant.is_running():
|
await self.sys_homeassistant.is_running():
|
||||||
return
|
return
|
||||||
|
|
||||||
_LOGGER.warning("Watchdog found a problem with Home-Assistant docker!")
|
_LOGGER.warning("Watchdog found a problem with Home-Assistant docker!")
|
||||||
await self._homeassistant.start()
|
await self.sys_homeassistant.start()
|
||||||
|
|
||||||
async def _watchdog_homeassistant_api(self):
|
async def _watchdog_homeassistant_api(self):
|
||||||
"""Create scheduler task for montoring running state of API.
|
"""Create scheduler task for montoring running state of API.
|
||||||
@ -109,13 +109,13 @@ class Tasks(CoreSysAttributes):
|
|||||||
retry_scan = self._data.get('HASS_WATCHDOG_API', 0)
|
retry_scan = self._data.get('HASS_WATCHDOG_API', 0)
|
||||||
|
|
||||||
# If Home-Assistant is active
|
# If Home-Assistant is active
|
||||||
if not await self._homeassistant.is_initialize() or \
|
if not await self.sys_homeassistant.is_initialize() or \
|
||||||
not self._homeassistant.watchdog:
|
not self.sys_homeassistant.watchdog:
|
||||||
return
|
return
|
||||||
|
|
||||||
# If Home-Assistant API is up
|
# If Home-Assistant API is up
|
||||||
if self._homeassistant.in_progress or \
|
if self.sys_homeassistant.in_progress or \
|
||||||
await self._homeassistant.check_api_state():
|
await self.sys_homeassistant.check_api_state():
|
||||||
return
|
return
|
||||||
|
|
||||||
# Look like we run into a problem
|
# Look like we run into a problem
|
||||||
@ -126,5 +126,5 @@ class Tasks(CoreSysAttributes):
|
|||||||
return
|
return
|
||||||
|
|
||||||
_LOGGER.error("Watchdog found a problem with Home-Assistant API!")
|
_LOGGER.error("Watchdog found a problem with Home-Assistant API!")
|
||||||
await self._homeassistant.restart()
|
await self.sys_homeassistant.restart()
|
||||||
self._data['HASS_WATCHDOG_API'] = 0
|
self._data['HASS_WATCHDOG_API'] = 0
|
||||||
|
@ -69,7 +69,7 @@ class Updater(JsonConfig, CoreSysAttributes):
|
|||||||
try:
|
try:
|
||||||
_LOGGER.info("Fetch update data from %s", url)
|
_LOGGER.info("Fetch update data from %s", url)
|
||||||
with async_timeout.timeout(10):
|
with async_timeout.timeout(10):
|
||||||
async with self._websession.get(url) as request:
|
async with self.sys_websession.get(url) as request:
|
||||||
data = await request.json(content_type=None)
|
data = await request.json(content_type=None)
|
||||||
|
|
||||||
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
|
except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err:
|
||||||
|
Loading…
x
Reference in New Issue
Block a user