diff --git a/API.md b/API.md index 58e5d136c..9b78563d9 100644 --- a/API.md +++ b/API.md @@ -217,8 +217,11 @@ return: ### Host - POST `/host/reload` + - POST `/host/shutdown` + - POST `/host/reboot` + - GET `/host/info` ```json @@ -228,14 +231,21 @@ return: "last_version": "", "features": ["shutdown", "reboot", "update", "hostname", "network_info", "network_control"], "hostname": "", - "os": "", - "audio": { - "input": "0,0", - "output": "0,0" - } + "operating_system": "", + "kernel": "", + "chassis": "" } ``` +- POST `/host/options` + +```json +{ + "hostname": "", +} +``` + + - POST `/host/update` Optional: @@ -284,24 +294,6 @@ Optional: } ``` -### Network - -- GET `/network/info` - -```json -{ - "hostname": "" -} -``` - -- POST `/network/options` - -```json -{ - "hostname": "", -} -``` - ### Home Assistant - GET `/homeassistant/info` @@ -613,46 +605,3 @@ This service perform a auto discovery to Home-Assistant. ``` - DEL `/services/mqtt` - -## Host Control - -Communicate over UNIX socket with a host daemon. - -- commands - -``` -# info --> {'type', 'version', 'last_version', 'features', 'hostname'} -# reboot -# shutdown -# host-update [v] - -# hostname xy - -# network info --> {} -# network wlan ssd xy -# network wlan password xy -# network int ip xy -# network int netmask xy -# network int route xy -``` - -Features: - -- shutdown -- reboot -- update -- hostname -- network_info -- network_control - -Answer: -``` -{}|OK|ERROR|WRONG -``` - -- {}: json -- OK: call was successfully -- ERROR: error on call -- WRONG: not supported diff --git a/Dockerfile b/Dockerfile index df893aafb..426c0f7ee 100644 --- a/Dockerfile +++ b/Dockerfile @@ -9,6 +9,7 @@ RUN apk add --no-cache \ python3 \ git \ socat \ + glib \ libstdc++ \ && apk add --no-cache --virtual .build-dependencies \ make \ diff --git a/hassio/__main__.py b/hassio/__main__.py index 4d6f0d6b9..b64bad447 100644 --- a/hassio/__main__.py +++ b/hassio/__main__.py @@ -5,7 +5,6 @@ import logging import sys import hassio.bootstrap as bootstrap -import hassio.core as core _LOGGER = logging.getLogger(__name__) @@ -34,14 +33,13 @@ if __name__ == "__main__": _LOGGER.info("Initialize Hassio setup") coresys = bootstrap.initialize_coresys(loop) - hassio = core.HassIO(coresys) bootstrap.migrate_system_env(coresys) _LOGGER.info("Setup HassIO") - loop.run_until_complete(hassio.setup()) + loop.run_until_complete(coresys.core.setup()) - loop.call_soon_threadsafe(loop.create_task, hassio.start()) + loop.call_soon_threadsafe(loop.create_task, coresys.core.start()) loop.call_soon_threadsafe(bootstrap.reg_signal, loop) try: @@ -49,7 +47,7 @@ if __name__ == "__main__": loop.run_forever() finally: _LOGGER.info("Stopping HassIO") - loop.run_until_complete(hassio.stop()) + loop.run_until_complete(coresys.core.stop()) executor.shutdown(wait=False) loop.close() diff --git a/hassio/addons/__init__.py b/hassio/addons/__init__.py index d75dfab96..58c728af1 100644 --- a/hassio/addons/__init__.py +++ b/hassio/addons/__init__.py @@ -5,7 +5,7 @@ import logging from .addon import Addon from .repository import Repository from .data import AddonsData -from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO +from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO, STATE_STARTED from ..coresys import CoreSysAttributes _LOGGER = logging.getLogger(__name__) @@ -56,7 +56,7 @@ class AddonManager(CoreSysAttributes): # init hassio built-in repositories repositories = \ - set(self._config.addons_repositories) | BUILTIN_REPOSITORIES + set(self.sys_config.addons_repositories) | BUILTIN_REPOSITORIES # init custom repositories & load addons await self.load_repositories(repositories) @@ -66,7 +66,7 @@ class AddonManager(CoreSysAttributes): tasks = [repository.update() for repository in self.repositories_obj.values()] if tasks: - await asyncio.wait(tasks, loop=self._loop) + await asyncio.wait(tasks) # read data from repositories self.data.reload() @@ -90,16 +90,16 @@ class AddonManager(CoreSysAttributes): # don't add built-in repository to config if url not in BUILTIN_REPOSITORIES: - self._config.add_addon_repository(url) + self.sys_config.add_addon_repository(url) tasks = [_add_repository(url) for url in new_rep - old_rep] if tasks: - await asyncio.wait(tasks, loop=self._loop) + await asyncio.wait(tasks) # del new repository for url in old_rep - new_rep - BUILTIN_REPOSITORIES: self.repositories_obj.pop(url).remove() - self._config.drop_addon_repository(url) + self.sys_config.drop_addon_repository(url) # update data self.data.reload() @@ -125,13 +125,13 @@ class AddonManager(CoreSysAttributes): self.addons_obj[addon_slug] = addon if tasks: - await asyncio.wait(tasks, loop=self._loop) + await asyncio.wait(tasks) # remove for addon_slug in del_addons: self.addons_obj.pop(addon_slug) - async def auto_boot(self, stage): + async def boot(self, stage): """Boot addons with mode auto.""" tasks = [] for addon in self.addons_obj.values(): @@ -141,5 +141,18 @@ class AddonManager(CoreSysAttributes): _LOGGER.info("Startup %s run %d addons", stage, len(tasks)) if tasks: - await asyncio.wait(tasks, loop=self._loop) - await asyncio.sleep(self._config.wait_boot, loop=self._loop) + await asyncio.wait(tasks) + await asyncio.sleep(self.sys_config.wait_boot) + + async def shutdown(self, stage): + """Shutdown addons.""" + tasks = [] + for addon in self.addons_obj.values(): + if addon.is_installed and \ + await addon.state() == STATE_STARTED and \ + addon.startup == stage: + tasks.append(addon.stop()) + + _LOGGER.info("Shutdown %s stop %d addons", stage, len(tasks)) + if tasks: + await asyncio.wait(tasks) diff --git a/hassio/addons/addon.py b/hassio/addons/addon.py index e17f3f7a5..4bebda235 100644 --- a/hassio/addons/addon.py +++ b/hassio/addons/addon.py @@ -66,7 +66,7 @@ class Addon(CoreSysAttributes): @property def _data(self): """Return addons data storage.""" - return self._addons.data + return self.sys_addons.data @property def is_installed(self): @@ -376,7 +376,7 @@ class Addon(CoreSysAttributes): if self.is_installed and \ ATTR_AUDIO_OUTPUT in self._data.user[self._id]: return self._data.user[self._id][ATTR_AUDIO_OUTPUT] - return self._alsa.default.output + return self.sys_host.alsa.default.output @audio_output.setter def audio_output(self, value): @@ -394,7 +394,7 @@ class Addon(CoreSysAttributes): if self.is_installed and ATTR_AUDIO_INPUT in self._data.user[self._id]: return self._data.user[self._id][ATTR_AUDIO_INPUT] - return self._alsa.default.input + return self.sys_host.alsa.default.input @audio_input.setter def audio_input(self, value): @@ -436,11 +436,11 @@ class Addon(CoreSysAttributes): # Repository with dockerhub images if ATTR_IMAGE in addon_data: - return addon_data[ATTR_IMAGE].format(arch=self._arch) + return addon_data[ATTR_IMAGE].format(arch=self.sys_arch) # local build return "{}/{}-addon-{}".format( - addon_data[ATTR_REPOSITORY], self._arch, + addon_data[ATTR_REPOSITORY], self.sys_arch, addon_data[ATTR_SLUG]) @property @@ -461,12 +461,12 @@ class Addon(CoreSysAttributes): @property def path_data(self): """Return addon data path inside supervisor.""" - return Path(self._config.path_addons_data, self._id) + return Path(self.sys_config.path_addons_data, self._id) @property def path_extern_data(self): """Return addon data path external for docker.""" - return PurePath(self._config.path_extern_addons_data, self._id) + return PurePath(self.sys_config.path_extern_addons_data, self._id) @property def path_options(self): @@ -506,16 +506,16 @@ class Addon(CoreSysAttributes): @property def path_asound(self): """Return path to asound config.""" - return Path(self._config.path_tmp, f"{self.slug}_asound") + return Path(self.sys_config.path_tmp, f"{self.slug}_asound") @property def path_extern_asound(self): """Return path to asound config for docker.""" - return Path(self._config.path_extern_tmp, f"{self.slug}_asound") + return Path(self.sys_config.path_extern_tmp, f"{self.slug}_asound") def save_data(self): """Save data of addon.""" - self._addons.data.save_data() + self.sys_addons.data.save_data() def write_options(self): """Return True if addon options is written to data.""" @@ -537,7 +537,7 @@ class Addon(CoreSysAttributes): def write_asound(self): """Write asound config to file and return True on success.""" - asound_config = self._alsa.asound( + asound_config = self.sys_host.alsa.asound( alsa_input=self.audio_input, alsa_output=self.audio_output) try: @@ -590,9 +590,9 @@ class Addon(CoreSysAttributes): async def install(self): """Install a addon.""" - if self._arch not in self.supported_arch: + if self.sys_arch not in self.supported_arch: _LOGGER.error( - "Addon %s not supported on %s", self._id, self._arch) + "Addon %s not supported on %s", self._id, self.sys_arch) return False if self.is_installed: @@ -735,7 +735,7 @@ class Addon(CoreSysAttributes): @check_installed async def snapshot(self, tar_file): """Snapshot a state of a addon.""" - with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp: + with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp: # store local image if self.need_build and not await \ self.instance.export_image(Path(temp, "image.tar")): @@ -764,7 +764,7 @@ class Addon(CoreSysAttributes): try: _LOGGER.info("Build snapshot for addon %s", self._id) - await self._loop.run_in_executor(None, _write_tarfile) + await self.sys_run_in_executor(_write_tarfile) except (tarfile.TarError, OSError) as err: _LOGGER.error("Can't write tarfile %s: %s", tar_file, err) return False @@ -774,7 +774,7 @@ class Addon(CoreSysAttributes): async def restore(self, tar_file): """Restore a state of a addon.""" - with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp: + with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp: # extract snapshot def _extract_tarfile(): """Extract tar snapshot.""" @@ -782,7 +782,7 @@ class Addon(CoreSysAttributes): snapshot.extractall(path=Path(temp)) try: - await self._loop.run_in_executor(None, _extract_tarfile) + await self.sys_run_in_executor(_extract_tarfile) except tarfile.TarError as err: _LOGGER.error("Can't read tarfile %s: %s", tar_file, err) return False @@ -828,7 +828,7 @@ class Addon(CoreSysAttributes): try: _LOGGER.info("Restore data for addon %s", self._id) - await self._loop.run_in_executor(None, _restore_data) + await self.sys_run_in_executor(_restore_data) except shutil.Error as err: _LOGGER.error("Can't restore origin data: %s", err) return False diff --git a/hassio/addons/build.py b/hassio/addons/build.py index d98c9597a..4d9a37618 100644 --- a/hassio/addons/build.py +++ b/hassio/addons/build.py @@ -25,13 +25,13 @@ class AddonBuild(JsonConfig, CoreSysAttributes): @property def addon(self): """Return addon of build data.""" - return self._addons.get(self._id) + return self.sys_addons.get(self._id) @property def base_image(self): """Base images for this addon.""" return self._data[ATTR_BUILD_FROM].get( - self._arch, BASE_IMAGE[self._arch]) + self.sys_arch, BASE_IMAGE[self.sys_arch]) @property def squash(self): @@ -53,7 +53,7 @@ class AddonBuild(JsonConfig, CoreSysAttributes): 'squash': self.squash, 'labels': { 'io.hass.version': version, - 'io.hass.arch': self._arch, + 'io.hass.arch': self.sys_arch, 'io.hass.type': META_ADDON, 'io.hass.name': self._fix_label('name'), 'io.hass.description': self._fix_label('description'), @@ -61,7 +61,7 @@ class AddonBuild(JsonConfig, CoreSysAttributes): 'buildargs': { 'BUILD_FROM': self.base_image, 'BUILD_VERSION': version, - 'BUILD_ARCH': self._arch, + 'BUILD_ARCH': self.sys_arch, **self.additional_args, } } diff --git a/hassio/addons/data.py b/hassio/addons/data.py index 777d2b025..ade770c54 100644 --- a/hassio/addons/data.py +++ b/hassio/addons/data.py @@ -56,17 +56,17 @@ class AddonsData(JsonConfig, CoreSysAttributes): # read core repository self._read_addons_folder( - self._config.path_addons_core, REPOSITORY_CORE) + self.sys_config.path_addons_core, REPOSITORY_CORE) # read local repository self._read_addons_folder( - self._config.path_addons_local, REPOSITORY_LOCAL) + self.sys_config.path_addons_local, REPOSITORY_LOCAL) # add built-in repositories information self._set_builtin_repositories() # read custom git repositories - for repository_element in self._config.path_addons_git.iterdir(): + for repository_element in self.sys_config.path_addons_git.iterdir(): if repository_element.is_dir(): self._read_git_repository(repository_element) diff --git a/hassio/addons/git.py b/hassio/addons/git.py index a00521946..6f487d579 100644 --- a/hassio/addons/git.py +++ b/hassio/addons/git.py @@ -45,7 +45,7 @@ class GitRepo(CoreSysAttributes): async with self.lock: try: _LOGGER.info("Load addon %s repository", self.path) - self.repo = await self._loop.run_in_executor( + self.repo = await self.sys_loop.run_in_executor( None, git.Repo, str(self.path)) except (git.InvalidGitRepositoryError, git.NoSuchPathError, @@ -68,7 +68,7 @@ class GitRepo(CoreSysAttributes): try: _LOGGER.info("Clone addon %s repository", self.url) - self.repo = await self._loop.run_in_executor(None, ft.partial( + self.repo = await self.sys_run_in_executor(ft.partial( git.Repo.clone_from, self.url, str(self.path), **git_args )) @@ -89,7 +89,7 @@ class GitRepo(CoreSysAttributes): async with self.lock: try: _LOGGER.info("Pull addon %s repository", self.url) - await self._loop.run_in_executor( + await self.sys_loop.run_in_executor( None, self.repo.remotes.origin.pull) except (git.InvalidGitRepositoryError, git.NoSuchPathError, diff --git a/hassio/addons/repository.py b/hassio/addons/repository.py index 851e9e037..37d75ea75 100644 --- a/hassio/addons/repository.py +++ b/hassio/addons/repository.py @@ -30,7 +30,7 @@ class Repository(CoreSysAttributes): @property def _mesh(self): """Return data struct repository.""" - return self._addons.data.repositories.get(self._id, {}) + return self.sys_addons.data.repositories.get(self._id, {}) @property def slug(self): diff --git a/hassio/api/__init__.py b/hassio/api/__init__.py index b91d79485..20195d21c 100644 --- a/hassio/api/__init__.py +++ b/hassio/api/__init__.py @@ -9,7 +9,6 @@ from .discovery import APIDiscovery from .homeassistant import APIHomeAssistant from .hardware import APIHardware from .host import APIHost -from .network import APINetwork from .proxy import APIProxy from .supervisor import APISupervisor from .snapshots import APISnapshots @@ -28,7 +27,7 @@ class RestAPI(CoreSysAttributes): self.coresys = coresys self.security = SecurityMiddleware(coresys) self.webapp = web.Application( - middlewares=[self.security.token_validation], loop=self._loop) + middlewares=[self.security.token_validation], loop=coresys.loop) # service stuff self._handler = None @@ -44,7 +43,6 @@ class RestAPI(CoreSysAttributes): self._register_panel() self._register_addons() self._register_snapshots() - self._register_network() self._register_discovery() self._register_services() @@ -61,16 +59,6 @@ class RestAPI(CoreSysAttributes): web.post('/host/reload', api_host.reload), ]) - def _register_network(self): - """Register network function.""" - api_net = APINetwork() - api_net.coresys = self.coresys - - self.webapp.add_routes([ - web.get('/network/info', api_net.info), - web.post('/network/options', api_net.options), - ]) - def _register_hardware(self): """Register hardware function.""" api_hardware = APIHardware() @@ -221,10 +209,10 @@ class RestAPI(CoreSysAttributes): async def start(self): """Run rest api webserver.""" - self._handler = self.webapp.make_handler(loop=self._loop) + self._handler = self.webapp.make_handler() try: - self.server = await self._loop.create_server( + self.server = await self.sys_loop.create_server( self._handler, "0.0.0.0", "80") except OSError as err: _LOGGER.fatal( diff --git a/hassio/api/addons.py b/hassio/api/addons.py index a4d5b4593..f382c9116 100644 --- a/hassio/api/addons.py +++ b/hassio/api/addons.py @@ -43,7 +43,7 @@ class APIAddons(CoreSysAttributes): def _extract_addon(self, request, check_installed=True): """Return addon and if not exists trow a exception.""" - addon = self._addons.get(request.match_info.get('addon')) + addon = self.sys_addons.get(request.match_info.get('addon')) if not addon: raise RuntimeError("Addon not exists") @@ -64,7 +64,7 @@ class APIAddons(CoreSysAttributes): async def list(self, request): """Return all addons / repositories .""" data_addons = [] - for addon in self._addons.list_addons: + for addon in self.sys_addons.list_addons: data_addons.append({ ATTR_NAME: addon.name, ATTR_SLUG: addon.slug, @@ -81,7 +81,7 @@ class APIAddons(CoreSysAttributes): }) data_repositories = [] - for repository in self._addons.list_repositories: + for repository in self.sys_addons.list_repositories: data_repositories.append({ ATTR_SLUG: repository.slug, ATTR_NAME: repository.name, @@ -98,7 +98,7 @@ class APIAddons(CoreSysAttributes): @api_process async def reload(self, request): """Reload all addons data.""" - await asyncio.shield(self._addons.reload(), loop=self._loop) + await asyncio.shield(self.sys_addons.reload()) return True @api_process @@ -194,13 +194,13 @@ class APIAddons(CoreSysAttributes): def install(self, request): """Install addon.""" addon = self._extract_addon(request, check_installed=False) - return asyncio.shield(addon.install(), loop=self._loop) + return asyncio.shield(addon.install()) @api_process def uninstall(self, request): """Uninstall addon.""" addon = self._extract_addon(request) - return asyncio.shield(addon.uninstall(), loop=self._loop) + return asyncio.shield(addon.uninstall()) @api_process def start(self, request): @@ -214,13 +214,13 @@ class APIAddons(CoreSysAttributes): except vol.Invalid as ex: raise RuntimeError(humanize_error(options, ex)) from None - return asyncio.shield(addon.start(), loop=self._loop) + return asyncio.shield(addon.start()) @api_process def stop(self, request): """Stop addon.""" addon = self._extract_addon(request) - return asyncio.shield(addon.stop(), loop=self._loop) + return asyncio.shield(addon.stop()) @api_process def update(self, request): @@ -230,13 +230,13 @@ class APIAddons(CoreSysAttributes): if addon.last_version == addon.version_installed: raise RuntimeError("No update available!") - return asyncio.shield(addon.update(), loop=self._loop) + return asyncio.shield(addon.update()) @api_process def restart(self, request): """Restart addon.""" addon = self._extract_addon(request) - return asyncio.shield(addon.restart(), loop=self._loop) + return asyncio.shield(addon.restart()) @api_process def rebuild(self, request): @@ -245,7 +245,7 @@ class APIAddons(CoreSysAttributes): if not addon.need_build: raise RuntimeError("Only local build addons are supported") - return asyncio.shield(addon.rebuild(), loop=self._loop) + return asyncio.shield(addon.rebuild()) @api_process_raw(CONTENT_TYPE_BINARY) def logs(self, request): @@ -291,4 +291,4 @@ class APIAddons(CoreSysAttributes): raise RuntimeError("STDIN not supported by addon") data = await request.read() - return await asyncio.shield(addon.write_stdin(data), loop=self._loop) + return await asyncio.shield(addon.write_stdin(data)) diff --git a/hassio/api/discovery.py b/hassio/api/discovery.py index fb028fcd9..7ab5f8d2a 100644 --- a/hassio/api/discovery.py +++ b/hassio/api/discovery.py @@ -21,7 +21,7 @@ class APIDiscovery(CoreSysAttributes): def _extract_message(self, request): """Extract discovery message from URL.""" - message = self._services.discovery.get(request.match_info.get('uuid')) + message = self.sys_discovery.get(request.match_info.get('uuid')) if not message: raise RuntimeError("Discovery message not found") return message @@ -30,7 +30,7 @@ class APIDiscovery(CoreSysAttributes): async def list(self, request): """Show register services.""" discovery = [] - for message in self._services.discovery.list_messages: + for message in self.sys_discovery.list_messages: discovery.append({ ATTR_PROVIDER: message.provider, ATTR_UUID: message.uuid, @@ -45,7 +45,7 @@ class APIDiscovery(CoreSysAttributes): async def set_discovery(self, request): """Write data into a discovery pipeline.""" body = await api_validate(SCHEMA_DISCOVERY, request) - message = self._services.discovery.send( + message = self.sys_discovery.send( provider=request[REQUEST_FROM], **body) return {ATTR_UUID: message.uuid} @@ -68,5 +68,5 @@ class APIDiscovery(CoreSysAttributes): """Delete data into a discovery message.""" message = self._extract_message(request) - self._services.discovery.remove(message) + self.sys_discovery.remove(message) return True diff --git a/hassio/api/hardware.py b/hassio/api/hardware.py index e5e22a35f..7830b9675 100644 --- a/hassio/api/hardware.py +++ b/hassio/api/hardware.py @@ -16,11 +16,11 @@ class APIHardware(CoreSysAttributes): async def info(self, request): """Show hardware info.""" return { - ATTR_SERIAL: list(self._hardware.serial_devices), - ATTR_INPUT: list(self._hardware.input_devices), - ATTR_DISK: list(self._hardware.disk_devices), - ATTR_GPIO: list(self._hardware.gpio_devices), - ATTR_AUDIO: self._hardware.audio_devices, + ATTR_SERIAL: list(self.sys_hardware.serial_devices), + ATTR_INPUT: list(self.sys_hardware.input_devices), + ATTR_DISK: list(self.sys_hardware.disk_devices), + ATTR_GPIO: list(self.sys_hardware.gpio_devices), + ATTR_AUDIO: self.sys_hardware.audio_devices, } @api_process @@ -28,7 +28,7 @@ class APIHardware(CoreSysAttributes): """Show ALSA audio devices.""" return { ATTR_AUDIO: { - ATTR_INPUT: self._alsa.input_devices, - ATTR_OUTPUT: self._alsa.output_devices, + ATTR_INPUT: self.sys_host.alsa.input_devices, + ATTR_OUTPUT: self.sys_host.alsa.output_devices, } } diff --git a/hassio/api/homeassistant.py b/hassio/api/homeassistant.py index 86bfaf296..8e9c07359 100644 --- a/hassio/api/homeassistant.py +++ b/hassio/api/homeassistant.py @@ -43,15 +43,15 @@ class APIHomeAssistant(CoreSysAttributes): async def info(self, request): """Return host information.""" return { - ATTR_VERSION: self._homeassistant.version, - ATTR_LAST_VERSION: self._homeassistant.last_version, - ATTR_IMAGE: self._homeassistant.image, - ATTR_CUSTOM: self._homeassistant.is_custom_image, - ATTR_BOOT: self._homeassistant.boot, - ATTR_PORT: self._homeassistant.api_port, - ATTR_SSL: self._homeassistant.api_ssl, - ATTR_WATCHDOG: self._homeassistant.watchdog, - ATTR_WAIT_BOOT: self._homeassistant.wait_boot, + ATTR_VERSION: self.sys_homeassistant.version, + ATTR_LAST_VERSION: self.sys_homeassistant.last_version, + ATTR_IMAGE: self.sys_homeassistant.image, + ATTR_CUSTOM: self.sys_homeassistant.is_custom_image, + ATTR_BOOT: self.sys_homeassistant.boot, + ATTR_PORT: self.sys_homeassistant.api_port, + ATTR_SSL: self.sys_homeassistant.api_ssl, + ATTR_WATCHDOG: self.sys_homeassistant.watchdog, + ATTR_WAIT_BOOT: self.sys_homeassistant.wait_boot, } @api_process @@ -60,34 +60,34 @@ class APIHomeAssistant(CoreSysAttributes): body = await api_validate(SCHEMA_OPTIONS, request) if ATTR_IMAGE in body and ATTR_LAST_VERSION in body: - self._homeassistant.image = body[ATTR_IMAGE] - self._homeassistant.last_version = body[ATTR_LAST_VERSION] + self.sys_homeassistant.image = body[ATTR_IMAGE] + self.sys_homeassistant.last_version = body[ATTR_LAST_VERSION] if ATTR_BOOT in body: - self._homeassistant.boot = body[ATTR_BOOT] + self.sys_homeassistant.boot = body[ATTR_BOOT] if ATTR_PORT in body: - self._homeassistant.api_port = body[ATTR_PORT] + self.sys_homeassistant.api_port = body[ATTR_PORT] if ATTR_PASSWORD in body: - self._homeassistant.api_password = body[ATTR_PASSWORD] + self.sys_homeassistant.api_password = body[ATTR_PASSWORD] if ATTR_SSL in body: - self._homeassistant.api_ssl = body[ATTR_SSL] + self.sys_homeassistant.api_ssl = body[ATTR_SSL] if ATTR_WATCHDOG in body: - self._homeassistant.watchdog = body[ATTR_WATCHDOG] + self.sys_homeassistant.watchdog = body[ATTR_WATCHDOG] if ATTR_WAIT_BOOT in body: - self._homeassistant.wait_boot = body[ATTR_WAIT_BOOT] + self.sys_homeassistant.wait_boot = body[ATTR_WAIT_BOOT] - self._homeassistant.save_data() + self.sys_homeassistant.save_data() return True @api_process async def stats(self, request): """Return resource information.""" - stats = await self._homeassistant.stats() + stats = await self.sys_homeassistant.stats() if not stats: raise RuntimeError("No stats available") @@ -105,38 +105,38 @@ class APIHomeAssistant(CoreSysAttributes): async def update(self, request): """Update homeassistant.""" body = await api_validate(SCHEMA_VERSION, request) - version = body.get(ATTR_VERSION, self._homeassistant.last_version) + version = body.get(ATTR_VERSION, self.sys_homeassistant.last_version) - if version == self._homeassistant.version: + if version == self.sys_homeassistant.version: raise RuntimeError("Version {} is already in use".format(version)) return await asyncio.shield( - self._homeassistant.update(version), loop=self._loop) + self.sys_homeassistant.update(version)) @api_process def stop(self, request): """Stop homeassistant.""" - return asyncio.shield(self._homeassistant.stop(), loop=self._loop) + return asyncio.shield(self.sys_homeassistant.stop()) @api_process def start(self, request): """Start homeassistant.""" - return asyncio.shield(self._homeassistant.start(), loop=self._loop) + return asyncio.shield(self.sys_homeassistant.start()) @api_process def restart(self, request): """Restart homeassistant.""" - return asyncio.shield(self._homeassistant.restart(), loop=self._loop) + return asyncio.shield(self.sys_homeassistant.restart()) @api_process_raw(CONTENT_TYPE_BINARY) def logs(self, request): """Return homeassistant docker logs.""" - return self._homeassistant.logs() + return self.sys_homeassistant.logs() @api_process async def check(self, request): """Check config of homeassistant.""" - result = await self._homeassistant.check_config() + result = await self.sys_homeassistant.check_config() if not result.valid: raise RuntimeError(result.log) diff --git a/hassio/api/host.py b/hassio/api/host.py index a0b63f2c9..8e1676ef1 100644 --- a/hassio/api/host.py +++ b/hassio/api/host.py @@ -4,10 +4,10 @@ import logging import voluptuous as vol -from .utils import api_process_hostcontrol, api_process, api_validate +from .utils import api_process, api_validate from ..const import ( ATTR_VERSION, ATTR_LAST_VERSION, ATTR_TYPE, ATTR_HOSTNAME, ATTR_FEATURES, - ATTR_OS) + ATTR_OPERATING_SYSTEM, ATTR_KERNEL, ATTR_CHASSIS) from ..coresys import CoreSysAttributes _LOGGER = logging.getLogger(__name__) @@ -16,6 +16,10 @@ SCHEMA_VERSION = vol.Schema({ vol.Optional(ATTR_VERSION): vol.Coerce(str), }) +SCHEMA_OPTIONS = vol.Schema({ + vol.Optional(ATTR_HOSTNAME): vol.Coerce(str), +}) + class APIHost(CoreSysAttributes): """Handle rest api for host functions.""" @@ -24,31 +28,41 @@ class APIHost(CoreSysAttributes): async def info(self, request): """Return host information.""" return { - ATTR_TYPE: self._host_control.type, - ATTR_VERSION: self._host_control.version, - ATTR_LAST_VERSION: self._host_control.last_version, - ATTR_FEATURES: self._host_control.features, - ATTR_HOSTNAME: self._host_control.hostname, - ATTR_OS: self._host_control.os_info, + ATTR_TYPE: None, + ATTR_CHASSIS: self.sys_host.local.chassis, + ATTR_VERSION: None, + ATTR_LAST_VERSION: None, + ATTR_FEATURES: self.sys_host.supperted_features, + ATTR_HOSTNAME: self.sys_host.local.hostname, + ATTR_OPERATING_SYSTEM: self.sys_host.local.operating_system, + ATTR_KERNEL: self.sys_host.local.kernel, } - @api_process_hostcontrol + @api_process + async def options(self, request): + """Edit host settings.""" + body = await api_validate(SCHEMA_OPTIONS, request) + + # hostname + if ATTR_HOSTNAME in body: + await self.sys_host.local.set_hostname(body[ATTR_HOSTNAME]) + + @api_process def reboot(self, request): """Reboot host.""" - return self._host_control.reboot() + return self.sys_host.power.reboot() - @api_process_hostcontrol + @api_process def shutdown(self, request): """Poweroff host.""" - return self._host_control.shutdown() + return self.sys_host.power.shutdown() - @api_process_hostcontrol - async def reload(self, request): + @api_process + def reload(self, request): """Reload host data.""" - await self._host_control.load() - return True + return self._host_control.load() - @api_process_hostcontrol + @api_process async def update(self, request): """Update host OS.""" body = await api_validate(SCHEMA_VERSION, request) @@ -58,4 +72,4 @@ class APIHost(CoreSysAttributes): raise RuntimeError(f"Version {version} is already in use") return await asyncio.shield( - self._host_control.update(version=version), loop=self._loop) + self._host_control.update(version=version)) diff --git a/hassio/api/network.py b/hassio/api/network.py deleted file mode 100644 index c5c647066..000000000 --- a/hassio/api/network.py +++ /dev/null @@ -1,38 +0,0 @@ -"""Init file for HassIO network rest api.""" -import logging - -import voluptuous as vol - -from .utils import api_process, api_process_hostcontrol, api_validate -from ..const import ATTR_HOSTNAME -from ..coresys import CoreSysAttributes - -_LOGGER = logging.getLogger(__name__) - - -SCHEMA_OPTIONS = vol.Schema({ - vol.Optional(ATTR_HOSTNAME): vol.Coerce(str), -}) - - -class APINetwork(CoreSysAttributes): - """Handle rest api for network functions.""" - - @api_process - async def info(self, request): - """Show network settings.""" - return { - ATTR_HOSTNAME: self._host_control.hostname, - } - - @api_process_hostcontrol - async def options(self, request): - """Edit network settings.""" - body = await api_validate(SCHEMA_OPTIONS, request) - - # hostname - if ATTR_HOSTNAME in body: - if self._host_control.hostname != body[ATTR_HOSTNAME]: - await self._host_control.set_hostname(body[ATTR_HOSTNAME]) - - return True diff --git a/hassio/api/proxy.py b/hassio/api/proxy.py index 62bb22dc8..89336ce53 100644 --- a/hassio/api/proxy.py +++ b/hassio/api/proxy.py @@ -20,7 +20,7 @@ class APIProxy(CoreSysAttributes): def _check_access(self, request): """Check the Hass.io token.""" hassio_token = request.headers.get(HEADER_HA_ACCESS) - addon = self._addons.from_uuid(hassio_token) + addon = self.sys_addons.from_uuid(hassio_token) if not addon: _LOGGER.warning("Unknown Home-Assistant API access!") @@ -29,7 +29,7 @@ class APIProxy(CoreSysAttributes): async def _api_client(self, request, path, timeout=300): """Return a client request with proxy origin for Home-Assistant.""" - url = f"{self._homeassistant.api_url}/api/{path}" + url = f"{self.sys_homeassistant.api_url}/api/{path}" try: data = None @@ -38,15 +38,17 @@ class APIProxy(CoreSysAttributes): params = request.query or None # read data - with async_timeout.timeout(30, loop=self._loop): + with async_timeout.timeout(30): data = await request.read() if data: headers.update({CONTENT_TYPE: request.content_type}) # need api password? - if self._homeassistant.api_password: - headers = {HEADER_HA_ACCESS: self._homeassistant.api_password} + if self.sys_homeassistant.api_password: + headers = { + HEADER_HA_ACCESS: self.sys_homeassistant.api_password, + } # reset headers if not headers: @@ -114,10 +116,10 @@ class APIProxy(CoreSysAttributes): async def _websocket_client(self): """Initialize a websocket api connection.""" - url = f"{self._homeassistant.api_url}/api/websocket" + url = f"{self.sys_homeassistant.api_url}/api/websocket" try: - client = await self._websession_ssl.ws_connect( + client = await self.sys_websession_ssl.ws_connect( url, heartbeat=60, verify_ssl=False) # handle authentication @@ -128,7 +130,7 @@ class APIProxy(CoreSysAttributes): elif data.get('type') == 'auth_required': await client.send_json({ 'type': 'auth', - 'api_password': self._homeassistant.api_password, + 'api_password': self.sys_homeassistant.api_password, }) _LOGGER.error("Authentication to Home-Assistant websocket") @@ -150,13 +152,13 @@ class APIProxy(CoreSysAttributes): try: await server.send_json({ 'type': 'auth_required', - 'ha_version': self._homeassistant.version, + 'ha_version': self.sys_homeassistant.version, }) # Check API access response = await server.receive_json() hassio_token = response.get('api_password') - addon = self._addons.from_uuid(hassio_token) + addon = self.sys_addons.from_uuid(hassio_token) if not addon: _LOGGER.warning("Unauthorized websocket access!") @@ -165,7 +167,7 @@ class APIProxy(CoreSysAttributes): await server.send_json({ 'type': 'auth_ok', - 'ha_version': self._homeassistant.version, + 'ha_version': self.sys_homeassistant.version, }) except (RuntimeError, ValueError) as err: _LOGGER.error("Can't initialize handshake: %s", err) @@ -180,16 +182,16 @@ class APIProxy(CoreSysAttributes): server_read = None while not server.closed and not client.closed: if not client_read: - client_read = asyncio.ensure_future( - client.receive_str(), loop=self._loop) + client_read = self.sys_create_task( + client.receive_str()) if not server_read: - server_read = asyncio.ensure_future( - server.receive_str(), loop=self._loop) + server_read = self.sys_create_task( + server.receive_str()) # wait until data need to be processed await asyncio.wait( [client_read, server_read], - loop=self._loop, return_when=asyncio.FIRST_COMPLETED + return_when=asyncio.FIRST_COMPLETED ) # server diff --git a/hassio/api/security.py b/hassio/api/security.py index de29f227e..a07ae99ef 100644 --- a/hassio/api/security.py +++ b/hassio/api/security.py @@ -42,13 +42,13 @@ class SecurityMiddleware(CoreSysAttributes): return await handler(request) # Home-Assistant - if hassio_token == self._homeassistant.uuid: + if hassio_token == self.sys_homeassistant.uuid: _LOGGER.debug("%s access from Home-Assistant", request.path) request[REQUEST_FROM] = 'homeassistant' return await handler(request) # Add-on - addon = self._addons.from_uuid(hassio_token) + addon = self.sys_addons.from_uuid(hassio_token) if addon: _LOGGER.info("%s access from %s", request.path, addon.slug) request[REQUEST_FROM] = addon.slug diff --git a/hassio/api/services.py b/hassio/api/services.py index 9d3e0b651..b14e96fee 100644 --- a/hassio/api/services.py +++ b/hassio/api/services.py @@ -11,7 +11,7 @@ class APIServices(CoreSysAttributes): def _extract_service(self, request): """Return service and if not exists trow a exception.""" - service = self._services.get(request.match_info.get('service')) + service = self.sys_services.get(request.match_info.get('service')) if not service: raise RuntimeError("Service not exists") @@ -21,7 +21,7 @@ class APIServices(CoreSysAttributes): async def list(self, request): """Show register services.""" services = [] - for service in self._services.list_services: + for service in self.sys_services.list_services: services.append({ ATTR_SLUG: service.slug, ATTR_AVAILABLE: service.enabled, diff --git a/hassio/api/snapshots.py b/hassio/api/snapshots.py index 46396adbf..ccada9a4a 100644 --- a/hassio/api/snapshots.py +++ b/hassio/api/snapshots.py @@ -50,7 +50,7 @@ class APISnapshots(CoreSysAttributes): def _extract_snapshot(self, request): """Return addon and if not exists trow a exception.""" - snapshot = self._snapshots.get(request.match_info.get('snapshot')) + snapshot = self.sys_snapshots.get(request.match_info.get('snapshot')) if not snapshot: raise RuntimeError("Snapshot not exists") return snapshot @@ -59,7 +59,7 @@ class APISnapshots(CoreSysAttributes): async def list(self, request): """Return snapshot list.""" data_snapshots = [] - for snapshot in self._snapshots.list_snapshots: + for snapshot in self.sys_snapshots.list_snapshots: data_snapshots.append({ ATTR_SLUG: snapshot.slug, ATTR_NAME: snapshot.name, @@ -75,7 +75,7 @@ class APISnapshots(CoreSysAttributes): @api_process async def reload(self, request): """Reload snapshot list.""" - await asyncio.shield(self._snapshots.reload(), loop=self._loop) + await asyncio.shield(self.sys_snapshots.reload()) return True @api_process @@ -110,7 +110,7 @@ class APISnapshots(CoreSysAttributes): """Full-Snapshot a snapshot.""" body = await api_validate(SCHEMA_SNAPSHOT_FULL, request) snapshot = await asyncio.shield( - self._snapshots.do_snapshot_full(**body), loop=self._loop) + self.sys_snapshots.do_snapshot_full(**body)) if snapshot: return {ATTR_SLUG: snapshot.slug} @@ -121,7 +121,7 @@ class APISnapshots(CoreSysAttributes): """Partial-Snapshot a snapshot.""" body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request) snapshot = await asyncio.shield( - self._snapshots.do_snapshot_partial(**body), loop=self._loop) + self.sys_snapshots.do_snapshot_partial(**body)) if snapshot: return {ATTR_SLUG: snapshot.slug} @@ -134,9 +134,7 @@ class APISnapshots(CoreSysAttributes): body = await api_validate(SCHEMA_RESTORE_FULL, request) return await asyncio.shield( - self._snapshots.do_restore_full(snapshot, **body), - loop=self._loop - ) + self.sys_snapshots.do_restore_full(snapshot, **body)) @api_process async def restore_partial(self, request): @@ -145,15 +143,13 @@ class APISnapshots(CoreSysAttributes): body = await api_validate(SCHEMA_RESTORE_PARTIAL, request) return await asyncio.shield( - self._snapshots.do_restore_partial(snapshot, **body), - loop=self._loop - ) + self.sys_snapshots.do_restore_partial(snapshot, **body)) @api_process async def remove(self, request): """Remove a snapshot.""" snapshot = self._extract_snapshot(request) - return self._snapshots.remove(snapshot) + return self.sys_snapshots.remove(snapshot) async def download(self, request): """Download a snapshot file.""" @@ -167,7 +163,7 @@ class APISnapshots(CoreSysAttributes): @api_process async def upload(self, request): """Upload a snapshot file.""" - with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp_dir: + with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir: tar_file = Path(temp_dir, f"snapshot.tar") try: @@ -183,7 +179,7 @@ class APISnapshots(CoreSysAttributes): return False snapshot = await asyncio.shield( - self._snapshots.import_snapshot(tar_file), loop=self._loop) + self.sys_snapshots.import_snapshot(tar_file)) if snapshot: return {ATTR_SLUG: snapshot.slug} diff --git a/hassio/api/supervisor.py b/hassio/api/supervisor.py index 10d35701f..176401106 100644 --- a/hassio/api/supervisor.py +++ b/hassio/api/supervisor.py @@ -41,7 +41,7 @@ class APISupervisor(CoreSysAttributes): async def info(self, request): """Return host information.""" list_addons = [] - for addon in self._addons.list_addons: + for addon in self.sys_addons.list_addons: if addon.is_installed: list_addons.append({ ATTR_NAME: addon.name, @@ -57,13 +57,13 @@ class APISupervisor(CoreSysAttributes): return { ATTR_VERSION: HASSIO_VERSION, - ATTR_LAST_VERSION: self._updater.version_hassio, - ATTR_CHANNEL: self._updater.channel, - ATTR_ARCH: self._arch, - ATTR_WAIT_BOOT: self._config.wait_boot, - ATTR_TIMEZONE: self._config.timezone, + ATTR_LAST_VERSION: self.sys_updater.version_hassio, + ATTR_CHANNEL: self.sys_updater.channel, + ATTR_ARCH: self.sys_arch, + ATTR_WAIT_BOOT: self.sys_config.wait_boot, + ATTR_TIMEZONE: self.sys_config.timezone, ATTR_ADDONS: list_addons, - ATTR_ADDONS_REPOSITORIES: self._config.addons_repositories, + ATTR_ADDONS_REPOSITORIES: self.sys_config.addons_repositories, } @api_process @@ -72,26 +72,26 @@ class APISupervisor(CoreSysAttributes): body = await api_validate(SCHEMA_OPTIONS, request) if ATTR_CHANNEL in body: - self._updater.channel = body[ATTR_CHANNEL] + self.sys_updater.channel = body[ATTR_CHANNEL] if ATTR_TIMEZONE in body: - self._config.timezone = body[ATTR_TIMEZONE] + self.sys_config.timezone = body[ATTR_TIMEZONE] if ATTR_WAIT_BOOT in body: - self._config.wait_boot = body[ATTR_WAIT_BOOT] + self.sys_config.wait_boot = body[ATTR_WAIT_BOOT] if ATTR_ADDONS_REPOSITORIES in body: new = set(body[ATTR_ADDONS_REPOSITORIES]) - await asyncio.shield(self._addons.load_repositories(new)) + await asyncio.shield(self.sys_addons.load_repositories(new)) - self._updater.save_data() - self._config.save_data() + self.sys_updater.save_data() + self.sys_config.save_data() return True @api_process async def stats(self, request): """Return resource information.""" - stats = await self._supervisor.stats() + stats = await self.sys_supervisor.stats() if not stats: raise RuntimeError("No stats available") @@ -109,22 +109,22 @@ class APISupervisor(CoreSysAttributes): async def update(self, request): """Update supervisor OS.""" body = await api_validate(SCHEMA_VERSION, request) - version = body.get(ATTR_VERSION, self._updater.version_hassio) + version = body.get(ATTR_VERSION, self.sys_updater.version_hassio) - if version == self._supervisor.version: + if version == self.sys_supervisor.version: raise RuntimeError("Version {} is already in use".format(version)) return await asyncio.shield( - self._supervisor.update(version), loop=self._loop) + self.sys_supervisor.update(version)) @api_process async def reload(self, request): """Reload addons, config ect.""" tasks = [ - self._updater.reload(), + self.sys_updater.reload(), ] results, _ = await asyncio.shield( - asyncio.wait(tasks, loop=self._loop), loop=self._loop) + asyncio.wait(tasks)) for result in results: if result.exception() is not None: @@ -135,4 +135,4 @@ class APISupervisor(CoreSysAttributes): @api_process_raw(CONTENT_TYPE_BINARY) def logs(self, request): """Return supervisor docker logs.""" - return self._supervisor.logs() + return self.sys_supervisor.logs() diff --git a/hassio/api/utils.py b/hassio/api/utils.py index 88ea1ec6c..ae5921313 100644 --- a/hassio/api/utils.py +++ b/hassio/api/utils.py @@ -4,13 +4,13 @@ import hashlib import logging from aiohttp import web -from aiohttp.web_exceptions import HTTPServiceUnavailable import voluptuous as vol from voluptuous.humanize import humanize_error from ..const import ( JSON_RESULT, JSON_DATA, JSON_MESSAGE, RESULT_OK, RESULT_ERROR, CONTENT_TYPE_BINARY) +from ..exceptions import HassioError _LOGGER = logging.getLogger(__name__) @@ -33,42 +33,21 @@ def api_process(method): answer = await method(api, *args, **kwargs) except RuntimeError as err: return api_return_error(message=str(err)) + except HassioError: + _LOGGER.exception("Hassio error") + return api_return_error() if isinstance(answer, dict): return api_return_ok(data=answer) if isinstance(answer, web.Response): return answer - elif answer: - return api_return_ok() - return api_return_error() + elif isinstance(answer, bool) and not answer: + return api_return_error() + return api_return_ok() return wrap_api -def api_process_hostcontrol(method): - """Wrap HostControl calls to rest api.""" - async def wrap_hostcontrol(api, *args, **kwargs): - """Return host information.""" - # pylint: disable=protected-access - if not api._host_control.active: - raise HTTPServiceUnavailable() - - try: - answer = await method(api, *args, **kwargs) - except RuntimeError as err: - return api_return_error(message=str(err)) - - if isinstance(answer, dict): - return api_return_ok(data=answer) - elif answer is None: - return api_return_error("Function is not supported") - elif answer: - return api_return_ok() - return api_return_error() - - return wrap_hostcontrol - - def api_process_raw(content): """Wrap content_type into function.""" def wrap_method(method): @@ -81,6 +60,9 @@ def api_process_raw(content): except RuntimeError as err: msg_data = str(err).encode() msg_type = CONTENT_TYPE_BINARY + except HassioError: + msg_data = b'' + msg_type = CONTENT_TYPE_BINARY return web.Response(body=msg_data, content_type=msg_type) diff --git a/hassio/bootstrap.py b/hassio/bootstrap.py index 8ce5f2f41..a2fcda3cc 100644 --- a/hassio/bootstrap.py +++ b/hassio/bootstrap.py @@ -7,6 +7,7 @@ from pathlib import Path from colorlog import ColoredFormatter +from .core import HassIO from .addons import AddonManager from .api import RestAPI from .const import SOCKET_DOCKER @@ -17,7 +18,9 @@ from .snapshots import SnapshotManager from .tasks import Tasks from .updater import Updater from .services import ServiceManager -from .host import AlsaAudio +from .services import Discovery +from .host import HostManager +from .dbus import DBusManager _LOGGER = logging.getLogger(__name__) @@ -27,15 +30,18 @@ def initialize_coresys(loop): coresys = CoreSys(loop) # Initialize core objects + coresys.core = HassIO(coresys) coresys.updater = Updater(coresys) coresys.api = RestAPI(coresys) - coresys.alsa = AlsaAudio(coresys) coresys.supervisor = Supervisor(coresys) coresys.homeassistant = HomeAssistant(coresys) coresys.addons = AddonManager(coresys) coresys.snapshots = SnapshotManager(coresys) + coresys.host = HostManager(coresys) coresys.tasks = Tasks(coresys) coresys.services = ServiceManager(coresys) + coresys.discovery = Discovery(coresys) + coresys.dbus = DBusManager(coresys) # bootstrap config initialize_system_data(coresys) @@ -148,7 +154,12 @@ def check_environment(): # check socat exec if not shutil.which('socat'): - _LOGGER.fatal("Can0t find socat program!") + _LOGGER.fatal("Can't find socat program!") + return False + + # check socat exec + if not shutil.which('gdbus'): + _LOGGER.fatal("Can't find gdbus program!") return False return True diff --git a/hassio/const.py b/hassio/const.py index 3901c5c57..ad5b26cb4 100644 --- a/hassio/const.py +++ b/hassio/const.py @@ -18,7 +18,6 @@ FILE_HASSIO_UPDATER = Path(HASSIO_DATA, "updater.json") FILE_HASSIO_SERVICES = Path(HASSIO_DATA, "services.json") SOCKET_DOCKER = Path("/var/run/docker.sock") -SOCKET_HC = Path("/var/run/hassio-hc.sock") DOCKER_NETWORK = 'hassio' DOCKER_NETWORK_MASK = ip_network('172.30.32.0/23') @@ -62,7 +61,8 @@ ATTR_LONG_DESCRIPTION = 'long_description' ATTR_HOSTNAME = 'hostname' ATTR_TIMEZONE = 'timezone' ATTR_ARGS = 'args' -ATTR_OS = 'os' +ATTR_OPERATING_SYSTEM = 'operating_system' +ATTR_CHASSIS = 'chassis' ATTR_TYPE = 'type' ATTR_SOURCE = 'source' ATTR_FEATURES = 'features' @@ -160,6 +160,7 @@ ATTR_DISCOVERY = 'discovery' ATTR_PROTECTED = 'protected' ATTR_CRYPTO = 'crypto' ATTR_BRANCH = 'branch' +ATTR_KERNEL = 'kernel' ATTR_SECCOMP = 'seccomp' ATTR_APPARMOR = 'apparmor' @@ -209,3 +210,8 @@ CRYPTO_AES128 = 'aes128' SECURITY_PROFILE = 'profile' SECURITY_DEFAULT = 'default' SECURITY_DISABLE = 'disable' + +FEATURES_SHUTDOWN = 'shutdown' +FEATURES_REBOOT = 'reboot' +FEATURES_UPDATE = 'update' +FEATURES_HOSTNAME = 'hostname' diff --git a/hassio/core.py b/hassio/core.py index 6acea8879..d32431ae9 100644 --- a/hassio/core.py +++ b/hassio/core.py @@ -1,10 +1,12 @@ """Main file for HassIO.""" +from contextlib import suppress import asyncio import logging from .coresys import CoreSysAttributes from .const import ( STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION, STARTUP_INITIALIZE) +from .exceptions import HassioError from .utils.dt import fetch_timezone _LOGGER = logging.getLogger(__name__) @@ -20,98 +22,113 @@ class HassIO(CoreSysAttributes): async def setup(self): """Setup HassIO orchestration.""" # update timezone - if self._config.timezone == 'UTC': - self._config.timezone = await fetch_timezone(self._websession) + if self.sys_config.timezone == 'UTC': + self.sys_config.timezone = await fetch_timezone(self._websession) - # supervisor - await self._supervisor.load() + # Load DBus + await self.sys_dbus.load() - # hostcontrol - await self._host_control.load() + # Load Host + await self.sys_host.load() - # Load homeassistant - await self._homeassistant.load() + # Load Supervisor + await self.sys_supervisor.load() - # Load addons - await self._addons.load() + # Load Home Assistant + await self.sys_homeassistant.load() + + # Load Add-ons + await self.sys_addons.load() # rest api views - await self._api.load() + await self.sys_api.load() # load last available data - await self._updater.load() + await self.sys_updater.load() # load last available data - await self._snapshots.load() + await self.sys_snapshots.load() # load services - await self._services.load() + await self.sys_services.load() # start dns forwarding - self._loop.create_task(self._dns.start()) - - # start addon mark as initialize - await self._addons.auto_boot(STARTUP_INITIALIZE) + self.sys_create_task(self.sys_dns.start()) async def start(self): """Start HassIO orchestration.""" # on release channel, try update itself # on dev mode, only read new versions - if not self._dev and self._supervisor.need_update: - if await self._supervisor.update(): + if not self.sys_dev and self.sys_supervisor.need_update: + if await self.sys_supervisor.update(): return else: _LOGGER.info("Ignore Hass.io auto updates on dev channel") # start api - await self._api.start() - _LOGGER.info("Start API on %s", self._docker.network.supervisor) + await self.sys_api.start() + _LOGGER.info("Start API on %s", self.sys_docker.network.supervisor) + + # start addon mark as initialize + await self.sys_addons.boot(STARTUP_INITIALIZE) try: # HomeAssistant is already running / supervisor have only reboot - if self._hardware.last_boot == self._config.last_boot: + if self.sys_hardware.last_boot == self.sys_config.last_boot: _LOGGER.info("Hass.io reboot detected") return # reset register services / discovery - self._services.reset() + self.sys_services.reset() # start addon mark as system - await self._addons.auto_boot(STARTUP_SYSTEM) + await self.sys_addons.boot(STARTUP_SYSTEM) # start addon mark as services - await self._addons.auto_boot(STARTUP_SERVICES) + await self.sys_addons.boot(STARTUP_SERVICES) # run HomeAssistant - if self._homeassistant.boot: - await self._homeassistant.start() + if self.sys_homeassistant.boot: + await self.sys_homeassistant.start() # start addon mark as application - await self._addons.auto_boot(STARTUP_APPLICATION) + await self.sys_addons.boot(STARTUP_APPLICATION) # store new last boot - self._config.last_boot = self._hardware.last_boot - self._config.save_data() + self.sys_config.last_boot = self.sys_hardware.last_boot + self.sys_config.save_data() finally: # Add core tasks into scheduler - await self._tasks.load() + await self.sys_tasks.load() # If landingpage / run upgrade in background - if self._homeassistant.version == 'landingpage': - self._loop.create_task(self._homeassistant.install()) + if self.sys_homeassistant.version == 'landingpage': + self.sys_create_task(self.sys_homeassistant.install()) _LOGGER.info("Hass.io is up and running") async def stop(self): """Stop a running orchestration.""" # don't process scheduler anymore - self._scheduler.suspend = True + self.sys_scheduler.suspend = True # process async stop tasks await asyncio.wait([ - self._api.stop(), - self._dns.stop(), - self._websession.close(), - self._websession_ssl.close() - ], loop=self._loop) + self.sys_api.stop(), + self.sys_dns.stop(), + self.sys_websession.close(), + self.sys_websession_ssl.close() + ]) + + async def shutdown(self): + """Shutdown all running containers in correct order.""" + await self.sys_addons.shutdown(STARTUP_APPLICATION) + + # Close Home Assistant + with suppress(HassioError): + await self.sys_homeassistant.stop() + + await self.sys_addons.shutdown(STARTUP_SERVICES) + await self.sys_addons.shutdown(STARTUP_SYSTEM) + await self.sys_addons.shutdown(STARTUP_INITIALIZE) diff --git a/hassio/coresys.py b/hassio/coresys.py index f17889d3a..e8ec6ddd9 100644 --- a/hassio/coresys.py +++ b/hassio/coresys.py @@ -7,11 +7,10 @@ from .config import CoreConfig from .docker import DockerAPI from .misc.dns import DNSForward from .misc.hardware import Hardware -from .misc.host_control import HostControl from .misc.scheduler import Scheduler -class CoreSys(object): +class CoreSys: """Class that handle all shared data.""" def __init__(self, loop): @@ -31,9 +30,9 @@ class CoreSys(object): self._docker = DockerAPI() self._scheduler = Scheduler(loop=loop) self._dns = DNSForward(loop=loop) - self._host_control = HostControl(loop=loop) # Internal objects pointers + self._core = None self._homeassistant = None self._supervisor = None self._addons = None @@ -41,8 +40,10 @@ class CoreSys(object): self._updater = None self._snapshots = None self._tasks = None + self._host = None + self._dbus = None self._services = None - self._alsa = None + self._discovery = None @property def arch(self): @@ -104,9 +105,16 @@ class CoreSys(object): return self._dns @property - def host_control(self): - """Return HostControl object.""" - return self._host_control + def core(self): + """Return HassIO object.""" + return self._core + + @core.setter + def core(self, value): + """Set a HassIO object.""" + if self._core: + raise RuntimeError("HassIO already set!") + self._core = value @property def homeassistant(self): @@ -205,25 +213,57 @@ class CoreSys(object): self._services = value @property - def alsa(self): - """Return ALSA Audio object.""" - return self._alsa + def discovery(self): + """Return ServiceManager object.""" + return self._discovery - @alsa.setter - def alsa(self, value): - """Set a ALSA Audio object.""" - if self._alsa: - raise RuntimeError("ALSA already set!") - self._alsa = value + @discovery.setter + def discovery(self, value): + """Set a Discovery object.""" + if self._discovery: + raise RuntimeError("Discovery already set!") + self._discovery = value + + @property + def dbus(self): + """Return DBusManager object.""" + return self._dbus + + @dbus.setter + def dbus(self, value): + """Set a DBusManager object.""" + if self._dbus: + raise RuntimeError("DBusManager already set!") + self._dbus = value + + @property + def host(self): + """Return HostManager object.""" + return self._host + + @host.setter + def host(self, value): + """Set a HostManager object.""" + if self._host: + raise RuntimeError("HostManager already set!") + self._host = value + + def run_in_executor(self, funct, *args): + """Wrapper for executor pool.""" + return self._loop.run_in_executor(None, funct, *args) + + def create_task(self, coroutine): + """Wrapper for async task.""" + return self._loop.create_task(coroutine) -class CoreSysAttributes(object): +class CoreSysAttributes: """Inheret basic CoreSysAttributes.""" coresys = None def __getattr__(self, name): """Mapping to coresys.""" - if hasattr(self.coresys, name[1:]): - return getattr(self.coresys, name[1:]) - raise AttributeError(f"Can't find {name} on {self.__class__}") + if name.startswith("sys_") and hasattr(self.coresys, name[4:]): + return getattr(self.coresys, name[4:]) + raise AttributeError() diff --git a/hassio/dbus/__init__.py b/hassio/dbus/__init__.py new file mode 100644 index 000000000..8aa4250b2 --- /dev/null +++ b/hassio/dbus/__init__.py @@ -0,0 +1,30 @@ +"""DBus interface objects.""" + +from .systemd import Systemd +from .hostname import Hostname +from ..coresys import CoreSysAttributes + + +class DBusManager(CoreSysAttributes): + """DBus Interface handler.""" + + def __init__(self, coresys): + """Initialize DBus Interface.""" + self.coresys = coresys + self._systemd = Systemd() + self._hostname = Hostname() + + @property + def systemd(self): + """Return Systemd Interface.""" + return self._systemd + + @property + def hostname(self): + """Return hostname Interface.""" + return self._hostname + + async def load(self): + """Connect interfaces to dbus.""" + await self.systemd.connect() + await self.hostname.connect() diff --git a/hassio/dbus/hostname.py b/hassio/dbus/hostname.py new file mode 100644 index 000000000..793abf7ed --- /dev/null +++ b/hassio/dbus/hostname.py @@ -0,0 +1,39 @@ +"""DBus interface for hostname.""" +import logging + +from .interface import DBusInterface +from .utils import dbus_connected +from ..exceptions import DBusError +from ..utils.gdbus import DBus + +_LOGGER = logging.getLogger(__name__) + +DBUS_NAME = 'org.freedesktop.hostname1' +DBUS_OBJECT = '/org/freedesktop/hostname1' + + +class Hostname(DBusInterface): + """Handle DBus interface for hostname/system.""" + + async def connect(self): + """Connect do bus.""" + try: + self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT) + except DBusError: + _LOGGER.warning("Can't connect to hostname") + + @dbus_connected + def set_hostname(self, hostname): + """Change local hostname. + + Return a coroutine. + """ + return self.dbus.SetHostname(hostname) + + @dbus_connected + def get_properties(self): + """Return local host informations. + + Return a coroutine. + """ + return self.dbus.get_properties(DBUS_NAME) diff --git a/hassio/dbus/interface.py b/hassio/dbus/interface.py new file mode 100644 index 000000000..fcdf8de25 --- /dev/null +++ b/hassio/dbus/interface.py @@ -0,0 +1,18 @@ +"""Interface class for dbus wrappers.""" + + +class DBusInterface: + """Handle DBus interface for hostname/system.""" + + def __init__(self): + """Initialize systemd.""" + self.dbus = None + + @property + def is_connected(self): + """Return True, if they is connected to dbus.""" + return self.dbus is not None + + async def connect(self): + """Connect do bus.""" + raise NotImplementedError() diff --git a/hassio/dbus/systemd.py b/hassio/dbus/systemd.py new file mode 100644 index 000000000..d1aff0268 --- /dev/null +++ b/hassio/dbus/systemd.py @@ -0,0 +1,39 @@ +"""Interface to Systemd over dbus.""" +import logging + +from .interface import DBusInterface +from .utils import dbus_connected +from ..exceptions import DBusError +from ..utils.gdbus import DBus + +_LOGGER = logging.getLogger(__name__) + +DBUS_NAME = 'org.freedesktop.systemd1' +DBUS_OBJECT = '/org/freedesktop/systemd1' + + +class Systemd(DBusInterface): + """Systemd function handler.""" + + async def connect(self): + """Connect do bus.""" + try: + self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT) + except DBusError: + _LOGGER.warning("Can't connect to systemd") + + @dbus_connected + def reboot(self): + """Reboot host computer. + + Return a coroutine. + """ + return self.dbus.Manager.Reboot() + + @dbus_connected + def power_off(self): + """Power off host computer. + + Return a coroutine. + """ + return self.dbus.Manager.PowerOff() diff --git a/hassio/dbus/utils.py b/hassio/dbus/utils.py new file mode 100644 index 000000000..b8656a235 --- /dev/null +++ b/hassio/dbus/utils.py @@ -0,0 +1,14 @@ +"""Utils for dbus.""" + +from ..exceptions import DBusNotConnectedError + + +def dbus_connected(method): + """Wrapper for check if dbus is connected.""" + def wrap_dbus(api, *args, **kwargs): + """Check if dbus is connected before call a method.""" + if api.dbus is None: + raise DBusNotConnectedError(f"{api!s} not connected to dbus!") + return method(api, *args, **kwargs) + + return wrap_dbus diff --git a/hassio/docker/__init__.py b/hassio/docker/__init__.py index a5733ddbc..e91da35f4 100644 --- a/hassio/docker/__init__.py +++ b/hassio/docker/__init__.py @@ -14,7 +14,7 @@ _LOGGER = logging.getLogger(__name__) CommandReturn = attr.make_class('CommandReturn', ['exit_code', 'output']) -class DockerAPI(object): +class DockerAPI: """Docker hassio wrapper. This class is not AsyncIO safe! diff --git a/hassio/docker/addon.py b/hassio/docker/addon.py index 9dc363235..960923c75 100644 --- a/hassio/docker/addon.py +++ b/hassio/docker/addon.py @@ -28,7 +28,7 @@ class DockerAddon(DockerInterface): @property def addon(self): """Return addon of docker image.""" - return self._addons.get(self._id) + return self.sys_addons.get(self._id) @property def image(self): @@ -52,7 +52,7 @@ class DockerAddon(DockerInterface): """Return arch of docker image.""" if not self.addon.legacy: return super().arch - return self._arch + return self.sys_arch @property def name(self): @@ -85,7 +85,7 @@ class DockerAddon(DockerInterface): return { **addon_env, - ENV_TIME: self._config.timezone, + ENV_TIME: self.sys_config.timezone, ENV_TOKEN: self.addon.uuid, } @@ -100,7 +100,7 @@ class DockerAddon(DockerInterface): # Auto mapping UART devices if self.addon.auto_uart: - for device in self._hardware.serial_devices: + for device in self.sys_hardware.serial_devices: devices.append(f"{device}:{device}:rwm") # Return None if no devices is present @@ -149,8 +149,8 @@ class DockerAddon(DockerInterface): def network_mapping(self): """Return hosts mapping.""" return { - 'homeassistant': self._docker.network.gateway, - 'hassio': self._docker.network.supervisor, + 'homeassistant': self.sys_docker.network.gateway, + 'hassio': self.sys_docker.network.supervisor, } @property @@ -173,31 +173,31 @@ class DockerAddon(DockerInterface): # setup config mappings if MAP_CONFIG in addon_mapping: volumes.update({ - str(self._config.path_extern_config): { + str(self.sys_config.path_extern_config): { 'bind': "/config", 'mode': addon_mapping[MAP_CONFIG] }}) if MAP_SSL in addon_mapping: volumes.update({ - str(self._config.path_extern_ssl): { + str(self.sys_config.path_extern_ssl): { 'bind': "/ssl", 'mode': addon_mapping[MAP_SSL] }}) if MAP_ADDONS in addon_mapping: volumes.update({ - str(self._config.path_extern_addons_local): { + str(self.sys_config.path_extern_addons_local): { 'bind': "/addons", 'mode': addon_mapping[MAP_ADDONS] }}) if MAP_BACKUP in addon_mapping: volumes.update({ - str(self._config.path_extern_backup): { + str(self.sys_config.path_extern_backup): { 'bind': "/backup", 'mode': addon_mapping[MAP_BACKUP] }}) if MAP_SHARE in addon_mapping: volumes.update({ - str(self._config.path_extern_share): { + str(self.sys_config.path_extern_share): { 'bind': "/share", 'mode': addon_mapping[MAP_SHARE] }}) @@ -239,7 +239,7 @@ class DockerAddon(DockerInterface): # cleanup self._stop() - ret = self._docker.run( + ret = self.sys_docker.run( self.image, name=self.name, hostname=self.hostname, @@ -283,7 +283,7 @@ class DockerAddon(DockerInterface): _LOGGER.info("Start build %s:%s", self.image, tag) try: - image, log = self._docker.images.build( + image, log = self.sys_docker.images.build( **build_env.get_docker_args(tag)) _LOGGER.debug("Build %s:%s done: %s", self.image, tag, log) @@ -302,7 +302,7 @@ class DockerAddon(DockerInterface): @process_lock def export_image(self, path): """Export current images into a tar file.""" - return self._loop.run_in_executor(None, self._export_image, path) + return self.sys_run_in_executor(self._export_image, path) def _export_image(self, tar_file): """Export current images into a tar file. @@ -310,7 +310,7 @@ class DockerAddon(DockerInterface): Need run inside executor. """ try: - image = self._docker.api.get_image(self.image) + image = self.sys_docker.api.get_image(self.image) except docker.errors.DockerException as err: _LOGGER.error("Can't fetch image %s: %s", self.image, err) return False @@ -330,7 +330,7 @@ class DockerAddon(DockerInterface): @process_lock def import_image(self, path, tag): """Import a tar file as image.""" - return self._loop.run_in_executor(None, self._import_image, path, tag) + return self.sys_run_in_executor(self._import_image, path, tag) def _import_image(self, tar_file, tag): """Import a tar file as image. @@ -339,9 +339,9 @@ class DockerAddon(DockerInterface): """ try: with tar_file.open("rb") as read_tar: - self._docker.api.load_image(read_tar, quiet=True) + self.sys_docker.api.load_image(read_tar, quiet=True) - image = self._docker.images.get(self.image) + image = self.sys_docker.images.get(self.image) image.tag(self.image, tag=tag) except (docker.errors.DockerException, OSError) as err: _LOGGER.error("Can't import image %s: %s", self.image, err) @@ -355,7 +355,7 @@ class DockerAddon(DockerInterface): @process_lock def write_stdin(self, data): """Write to add-on stdin.""" - return self._loop.run_in_executor(None, self._write_stdin, data) + return self.sys_run_in_executor(self._write_stdin, data) def _write_stdin(self, data): """Write to add-on stdin. @@ -367,7 +367,7 @@ class DockerAddon(DockerInterface): try: # load needed docker objects - container = self._docker.containers.get(self.name) + container = self.sys_docker.containers.get(self.name) socket = container.attach_socket(params={'stdin': 1, 'stream': 1}) except docker.errors.DockerException as err: _LOGGER.error("Can't attach to %s stdin: %s", self.name, err) diff --git a/hassio/docker/homeassistant.py b/hassio/docker/homeassistant.py index 1ac05b18c..036727bc6 100644 --- a/hassio/docker/homeassistant.py +++ b/hassio/docker/homeassistant.py @@ -24,7 +24,7 @@ class DockerHomeAssistant(DockerInterface): @property def image(self): """Return name of docker image.""" - return self._homeassistant.image + return self.sys_homeassistant.image @property def name(self): @@ -35,7 +35,7 @@ class DockerHomeAssistant(DockerInterface): def devices(self): """Create list of special device to map into docker.""" devices = [] - for device in self._hardware.serial_devices: + for device in self.sys_hardware.serial_devices: devices.append(f"{device}:{device}:rwm") return devices or None @@ -50,7 +50,7 @@ class DockerHomeAssistant(DockerInterface): # cleanup self._stop() - ret = self._docker.run( + ret = self.sys_docker.run( self.image, name=self.name, hostname=self.name, @@ -60,16 +60,16 @@ class DockerHomeAssistant(DockerInterface): devices=self.devices, network_mode='host', environment={ - 'HASSIO': self._docker.network.supervisor, - ENV_TIME: self._config.timezone, - ENV_TOKEN: self._homeassistant.uuid, + 'HASSIO': self.sys_docker.network.supervisor, + ENV_TIME: self.sys_config.timezone, + ENV_TOKEN: self.sys_homeassistant.uuid, }, volumes={ - str(self._config.path_extern_config): + str(self.sys_config.path_extern_config): {'bind': '/config', 'mode': 'rw'}, - str(self._config.path_extern_ssl): + str(self.sys_config.path_extern_ssl): {'bind': '/ssl', 'mode': 'ro'}, - str(self._config.path_extern_share): + str(self.sys_config.path_extern_share): {'bind': '/share', 'mode': 'rw'}, } ) @@ -85,26 +85,26 @@ class DockerHomeAssistant(DockerInterface): Need run inside executor. """ - return self._docker.run_command( + return self.sys_docker.run_command( self.image, command, detach=True, stdout=True, stderr=True, environment={ - ENV_TIME: self._config.timezone, + ENV_TIME: self.sys_config.timezone, }, volumes={ - str(self._config.path_extern_config): + str(self.sys_config.path_extern_config): {'bind': '/config', 'mode': 'ro'}, - str(self._config.path_extern_ssl): + str(self.sys_config.path_extern_ssl): {'bind': '/ssl', 'mode': 'ro'}, } ) def is_initialize(self): """Return True if docker container exists.""" - return self._loop.run_in_executor(None, self._is_initialize) + return self.sys_run_in_executor(self._is_initialize) def _is_initialize(self): """Return True if docker container exists. @@ -112,7 +112,7 @@ class DockerHomeAssistant(DockerInterface): Need run inside executor. """ try: - self._docker.containers.get(self.name) + self.sys_docker.containers.get(self.name) except docker.errors.DockerException: return False diff --git a/hassio/docker/interface.py b/hassio/docker/interface.py index f14ddae46..4f063f0bf 100644 --- a/hassio/docker/interface.py +++ b/hassio/docker/interface.py @@ -61,7 +61,7 @@ class DockerInterface(CoreSysAttributes): @process_lock def install(self, tag): """Pull docker image.""" - return self._loop.run_in_executor(None, self._install, tag) + return self.sys_run_in_executor(self._install, tag) def _install(self, tag): """Pull docker image. @@ -70,7 +70,7 @@ class DockerInterface(CoreSysAttributes): """ try: _LOGGER.info("Pull image %s tag %s.", self.image, tag) - image = self._docker.images.pull(f"{self.image}:{tag}") + image = self.sys_docker.images.pull(f"{self.image}:{tag}") image.tag(self.image, tag='latest') self._meta = image.attrs @@ -83,7 +83,7 @@ class DockerInterface(CoreSysAttributes): def exists(self): """Return True if docker image exists in local repo.""" - return self._loop.run_in_executor(None, self._exists) + return self.sys_run_in_executor(self._exists) def _exists(self): """Return True if docker image exists in local repo. @@ -91,7 +91,7 @@ class DockerInterface(CoreSysAttributes): Need run inside executor. """ try: - image = self._docker.images.get(self.image) + image = self.sys_docker.images.get(self.image) assert f"{self.image}:{self.version}" in image.tags except (docker.errors.DockerException, AssertionError): return False @@ -103,7 +103,7 @@ class DockerInterface(CoreSysAttributes): Return a Future. """ - return self._loop.run_in_executor(None, self._is_running) + return self.sys_run_in_executor(self._is_running) def _is_running(self): """Return True if docker is Running. @@ -111,8 +111,8 @@ class DockerInterface(CoreSysAttributes): Need run inside executor. """ try: - container = self._docker.containers.get(self.name) - image = self._docker.images.get(self.image) + container = self.sys_docker.containers.get(self.name) + image = self.sys_docker.images.get(self.image) except docker.errors.DockerException: return False @@ -129,7 +129,7 @@ class DockerInterface(CoreSysAttributes): @process_lock def attach(self): """Attach to running docker container.""" - return self._loop.run_in_executor(None, self._attach) + return self.sys_run_in_executor(self._attach) def _attach(self): """Attach to running docker container. @@ -138,9 +138,9 @@ class DockerInterface(CoreSysAttributes): """ try: if self.image: - self._meta = self._docker.images.get(self.image).attrs + self._meta = self.sys_docker.images.get(self.image).attrs else: - self._meta = self._docker.containers.get(self.name).attrs + self._meta = self.sys_docker.containers.get(self.name).attrs except docker.errors.DockerException: return False @@ -152,7 +152,7 @@ class DockerInterface(CoreSysAttributes): @process_lock def run(self): """Run docker image.""" - return self._loop.run_in_executor(None, self._run) + return self.sys_run_in_executor(self._run) def _run(self): """Run docker image. @@ -164,7 +164,7 @@ class DockerInterface(CoreSysAttributes): @process_lock def stop(self): """Stop/remove docker container.""" - return self._loop.run_in_executor(None, self._stop) + return self.sys_run_in_executor(self._stop) def _stop(self): """Stop/remove and remove docker container. @@ -172,7 +172,7 @@ class DockerInterface(CoreSysAttributes): Need run inside executor. """ try: - container = self._docker.containers.get(self.name) + container = self.sys_docker.containers.get(self.name) except docker.errors.DockerException: return False @@ -190,7 +190,7 @@ class DockerInterface(CoreSysAttributes): @process_lock def remove(self): """Remove docker images.""" - return self._loop.run_in_executor(None, self._remove) + return self.sys_run_in_executor(self._remove) def _remove(self): """remove docker images. @@ -205,11 +205,11 @@ class DockerInterface(CoreSysAttributes): try: with suppress(docker.errors.ImageNotFound): - self._docker.images.remove( + self.sys_docker.images.remove( image=f"{self.image}:latest", force=True) with suppress(docker.errors.ImageNotFound): - self._docker.images.remove( + self.sys_docker.images.remove( image=f"{self.image}:{self.version}", force=True) except docker.errors.DockerException as err: @@ -222,7 +222,7 @@ class DockerInterface(CoreSysAttributes): @process_lock def update(self, tag): """Update a docker image.""" - return self._loop.run_in_executor(None, self._update, tag) + return self.sys_run_in_executor(self._update, tag) def _update(self, tag): """Update a docker image. @@ -247,7 +247,7 @@ class DockerInterface(CoreSysAttributes): Return a Future. """ - return self._loop.run_in_executor(None, self._logs) + return self.sys_run_in_executor(self._logs) def _logs(self): """Return docker logs of container. @@ -255,7 +255,7 @@ class DockerInterface(CoreSysAttributes): Need run inside executor. """ try: - container = self._docker.containers.get(self.name) + container = self.sys_docker.containers.get(self.name) except docker.errors.DockerException: return b"" @@ -267,7 +267,7 @@ class DockerInterface(CoreSysAttributes): @process_lock def cleanup(self): """Check if old version exists and cleanup.""" - return self._loop.run_in_executor(None, self._cleanup) + return self.sys_run_in_executor(self._cleanup) def _cleanup(self): """Check if old version exists and cleanup. @@ -275,25 +275,25 @@ class DockerInterface(CoreSysAttributes): Need run inside executor. """ try: - latest = self._docker.images.get(self.image) + latest = self.sys_docker.images.get(self.image) except docker.errors.DockerException: _LOGGER.warning("Can't find %s for cleanup", self.image) return False - for image in self._docker.images.list(name=self.image): + for image in self.sys_docker.images.list(name=self.image): if latest.id == image.id: continue with suppress(docker.errors.DockerException): _LOGGER.info("Cleanup docker images: %s", image.tags) - self._docker.images.remove(image.id, force=True) + self.sys_docker.images.remove(image.id, force=True) return True @process_lock def execute_command(self, command): """Create a temporary container and run command.""" - return self._loop.run_in_executor(None, self._execute_command, command) + return self.sys_run_in_executor(self._execute_command, command) def _execute_command(self, command): """Create a temporary container and run command. @@ -304,7 +304,7 @@ class DockerInterface(CoreSysAttributes): def stats(self): """Read and return stats from container.""" - return self._loop.run_in_executor(None, self._stats) + return self.sys_run_in_executor(self._stats) def _stats(self): """Create a temporary container and run command. @@ -312,7 +312,7 @@ class DockerInterface(CoreSysAttributes): Need run inside executor. """ try: - container = self._docker.containers.get(self.name) + container = self.sys_docker.containers.get(self.name) except docker.errors.DockerException: return None diff --git a/hassio/docker/network.py b/hassio/docker/network.py index ee5a8b767..08120c3ce 100644 --- a/hassio/docker/network.py +++ b/hassio/docker/network.py @@ -8,7 +8,7 @@ from ..const import DOCKER_NETWORK_MASK, DOCKER_NETWORK, DOCKER_NETWORK_RANGE _LOGGER = logging.getLogger(__name__) -class DockerNetwork(object): +class DockerNetwork: """Internal HassIO Network. This class is not AsyncIO safe! diff --git a/hassio/docker/stats.py b/hassio/docker/stats.py index be6bb4b7a..300cfccd0 100644 --- a/hassio/docker/stats.py +++ b/hassio/docker/stats.py @@ -2,7 +2,7 @@ from contextlib import suppress -class DockerStats(object): +class DockerStats: """Hold stats data from container inside.""" def __init__(self, stats): diff --git a/hassio/docker/supervisor.py b/hassio/docker/supervisor.py index ae8c06ac7..1a93015e7 100644 --- a/hassio/docker/supervisor.py +++ b/hassio/docker/supervisor.py @@ -24,7 +24,7 @@ class DockerSupervisor(DockerInterface, CoreSysAttributes): Need run inside executor. """ try: - container = self._docker.containers.get(self.name) + container = self.sys_docker.containers.get(self.name) except docker.errors.DockerException: return False @@ -33,9 +33,10 @@ class DockerSupervisor(DockerInterface, CoreSysAttributes): self.image, self.version) # if already attach - if container in self._docker.network.containers: + if container in self.sys_docker.network.containers: return True # attach to network - return self._docker.network.attach_container( - container, alias=['hassio'], ipv4=self._docker.network.supervisor) + return self.sys_docker.network.attach_container( + container, alias=['hassio'], + ipv4=self.sys_docker.network.supervisor) diff --git a/hassio/exceptions.py b/hassio/exceptions.py new file mode 100644 index 000000000..4c4f66a36 --- /dev/null +++ b/hassio/exceptions.py @@ -0,0 +1,49 @@ +"""Core Exceptions.""" + + +class HassioError(Exception): + """Root exception.""" + pass + + +class HassioInternalError(HassioError): + """Internal Hass.io error they can't handle.""" + pass + + +class HassioNotSupportedError(HassioError): + """Function is not supported.""" + pass + + +# Host + +class HostError(HassioError): + """Internal Host error.""" + pass + + +class HostNotSupportedError(HassioNotSupportedError): + """Host function is not supprted.""" + pass + + +# utils/gdbus + +class DBusError(HassioError): + """DBus generic error.""" + pass + + +class DBusNotConnectedError(HassioNotSupportedError): + """DBus is not connected and call a method.""" + + +class DBusFatalError(DBusError): + """DBus call going wrong.""" + pass + + +class DBusParseError(DBusError): + """DBus parse error.""" + pass diff --git a/hassio/homeassistant.py b/hassio/homeassistant.py index 899050b83..c95b644bd 100644 --- a/hassio/homeassistant.py +++ b/hassio/homeassistant.py @@ -54,7 +54,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes): @property def api_ip(self): """Return IP of HomeAssistant instance.""" - return self._docker.network.gateway + return self.sys_docker.network.gateway @property def api_port(self): @@ -123,7 +123,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes): """Return last available version of homeassistant.""" if self.is_custom_image: return self._data.get(ATTR_LAST_VERSION) - return self._updater.version_homeassistant + return self.sys_updater.version_homeassistant @last_version.setter def last_version(self, value): @@ -177,7 +177,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes): if await self.instance.install('landingpage'): break _LOGGER.warning("Fails install landingpage, retry after 60sec") - await asyncio.sleep(60, loop=self._loop) + await asyncio.sleep(60) # Run landingpage after installation await self._start() @@ -189,13 +189,13 @@ class HomeAssistant(JsonConfig, CoreSysAttributes): while True: # read homeassistant tag and install it if not self.last_version: - await self._updater.reload() + await self.sys_updater.reload() tag = self.last_version if tag and await self.instance.install(tag): break _LOGGER.warning("Error on install HomeAssistant. Retry in 60sec") - await asyncio.sleep(60, loop=self._loop) + await asyncio.sleep(60) # finishing _LOGGER.info("HomeAssistant docker now installed") @@ -307,7 +307,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes): try: # pylint: disable=bad-continuation - async with self._websession_ssl.get( + async with self.sys_websession_ssl.get( url, headers=header, timeout=30) as request: status = request.status @@ -328,7 +328,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes): try: # pylint: disable=bad-continuation - async with self._websession_ssl.post( + async with self.sys_websession_ssl.post( url, headers=header, timeout=30, json=event_data) as request: status = request.status @@ -361,10 +361,10 @@ class HomeAssistant(JsonConfig, CoreSysAttributes): pass while time.monotonic() - start_time < self.wait_boot: - if await self._loop.run_in_executor(None, check_port): + if await self.sys_run_in_executor(check_port): _LOGGER.info("Detect a running Home-Assistant instance") return True - await asyncio.sleep(10, loop=self._loop) + await asyncio.sleep(10) _LOGGER.warning("Don't wait anymore of Home-Assistant startup!") return False diff --git a/hassio/host/__init__.py b/hassio/host/__init__.py index 16b30ad18..659b4cdf5 100644 --- a/hassio/host/__init__.py +++ b/hassio/host/__init__.py @@ -1,2 +1,58 @@ """Host function like audio/dbus/systemd.""" -from .alsa import AlsaAudio # noqa + +from .alsa import AlsaAudio +from .power import PowerControl +from .local import LocalCenter +from ..const import FEATURES_REBOOT, FEATURES_SHUTDOWN, FEATURES_HOSTNAME +from ..coresys import CoreSysAttributes + + +class HostManager(CoreSysAttributes): + """Manage supported function from host.""" + + def __init__(self, coresys): + """Initialize Host manager.""" + self.coresys = coresys + self._alsa = AlsaAudio(coresys) + self._power = PowerControl(coresys) + self._local = LocalCenter(coresys) + + @property + def alsa(self): + """Return host ALSA handler.""" + return self._alsa + + @property + def power(self): + """Return host power handler.""" + return self._power + + @property + def local(self): + """Return host local handler.""" + return self._local + + @property + def supperted_features(self): + """Return a list of supported host features.""" + features = [] + + if self.sys_dbus.systemd.is_connected: + features.extend([ + FEATURES_REBOOT, + FEATURES_SHUTDOWN, + ]) + + if self.sys_dbus.hostname.is_connected: + features.append(FEATURES_HOSTNAME) + + return features + + async def load(self): + """Load host functions.""" + if self.sys_dbus.hostname.is_connected: + await self.local.update() + + def reload(self): + """Reload host information.""" + return self.load() diff --git a/hassio/host/alsa.py b/hassio/host/alsa.py index 6baca457c..75a935192 100644 --- a/hassio/host/alsa.py +++ b/hassio/host/alsa.py @@ -42,7 +42,7 @@ class AlsaAudio(CoreSysAttributes): def _update_device(self): """Update Internal device DB.""" - current_id = hash(frozenset(self._hardware.audio_devices)) + current_id = hash(frozenset(self.sys_hardware.audio_devices)) # Need rebuild? if current_id == self._cache: @@ -57,7 +57,7 @@ class AlsaAudio(CoreSysAttributes): database = self._audio_database() # Process devices - for dev_id, dev_data in self._hardware.audio_devices.items(): + for dev_id, dev_data in self.sys_hardware.audio_devices.items(): for chan_id, chan_type in dev_data[ATTR_DEVICES].items(): alsa_id = f"{dev_id},{chan_id}" dev_name = dev_data[ATTR_NAME] @@ -73,7 +73,7 @@ class AlsaAudio(CoreSysAttributes): # Use name from DB or a generic name self._data[key][alsa_id] = database.get( - self._machine, {}).get( + self.sys_machine, {}).get( dev_name, {}).get(alsa_id, f"{dev_name}: {chan_id}") self._cache = current_id @@ -98,8 +98,8 @@ class AlsaAudio(CoreSysAttributes): # Init defaults if self._default is None: database = self._audio_database() - alsa_input = database.get(self._machine, {}).get(ATTR_INPUT) - alsa_output = database.get(self._machine, {}).get(ATTR_OUTPUT) + alsa_input = database.get(self.sys_machine, {}).get(ATTR_INPUT) + alsa_output = database.get(self.sys_machine, {}).get(ATTR_OUTPUT) self._default = DefaultConfig(alsa_input, alsa_output) diff --git a/hassio/host/local.py b/hassio/host/local.py new file mode 100644 index 000000000..ab41be753 --- /dev/null +++ b/hassio/host/local.py @@ -0,0 +1,67 @@ +"""Power control for host.""" +import logging + +from ..coresys import CoreSysAttributes +from ..exceptions import HassioError, HostNotSupportedError + +_LOGGER = logging.getLogger(__name__) + +UNKNOWN = 'Unknown' + + +class LocalCenter(CoreSysAttributes): + """Handle local system information controls.""" + + def __init__(self, coresys): + """Initialize system center handling.""" + self.coresys = coresys + self._data = {} + + @property + def hostname(self): + """Return local hostname.""" + return self._data.get('Hostname', UNKNOWN) + + @property + def chassis(self): + """Return local chassis type.""" + return self._data.get('Chassis', UNKNOWN) + + @property + def kernel(self): + """Return local kernel version.""" + return self._data.get('KernelRelease', UNKNOWN) + + @property + def operating_system(self): + """Return local operating system.""" + return self._data.get('OperatingSystemPrettyName', UNKNOWN) + + @property + def cpe(self): + """Return local CPE.""" + return self._data.get('OperatingSystemCPEName', UNKNOWN) + + def _check_dbus(self): + """Check if systemd is connect or raise error.""" + if not self.sys_dbus.hostname.is_connected: + _LOGGER.error("No hostname dbus connection available") + raise HostNotSupportedError() + + async def update(self): + """Update properties over dbus.""" + self._check_dbus() + + _LOGGER.info("Update local host information") + try: + self._data = await self.sys_dbus.hostname.get_properties() + except HassioError: + _LOGGER.warning("Can't update host system information!") + + async def set_hostname(self, hostname): + """Set local a new Hostname.""" + self._check_dbus() + + _LOGGER.info("Set Hostname %s", hostname) + await self.sys_dbus.hostname.set_hostname(hostname) + await self.update() diff --git a/hassio/host/power.py b/hassio/host/power.py new file mode 100644 index 000000000..f620e15b2 --- /dev/null +++ b/hassio/host/power.py @@ -0,0 +1,41 @@ +"""Power control for host.""" +import logging + +from ..coresys import CoreSysAttributes +from ..exceptions import HostNotSupportedError + +_LOGGER = logging.getLogger(__name__) + + +class PowerControl(CoreSysAttributes): + """Handle host power controls.""" + + def __init__(self, coresys): + """Initialize host power handling.""" + self.coresys = coresys + + def _check_dbus(self): + """Check if systemd is connect or raise error.""" + if not self.sys_dbus.systemd.is_connected: + _LOGGER.error("No systemd dbus connection available") + raise HostNotSupportedError() + + async def reboot(self): + """Reboot host system.""" + self._check_dbus() + + _LOGGER.info("Initialize host reboot over systemd") + try: + await self.sys_core.shutdown() + finally: + await self.sys_dbus.systemd.reboot() + + async def shutdown(self): + """Shutdown host system.""" + self._check_dbus() + + _LOGGER.info("Initialize host power off over systemd") + try: + await self.sys_core.shutdown() + finally: + await self.sys_dbus.systemd.power_off() diff --git a/hassio/misc/dns.py b/hassio/misc/dns.py index 6b116b849..c7ea30b50 100644 --- a/hassio/misc/dns.py +++ b/hassio/misc/dns.py @@ -8,7 +8,7 @@ _LOGGER = logging.getLogger(__name__) COMMAND = "socat UDP-RECVFROM:53,fork UDP-SENDTO:127.0.0.11:53" -class DNSForward(object): +class DNSForward: """Manage DNS forwarding to internal DNS.""" def __init__(self, loop): diff --git a/hassio/misc/hardware.py b/hassio/misc/hardware.py index 66060d0c4..62e045223 100644 --- a/hassio/misc/hardware.py +++ b/hassio/misc/hardware.py @@ -23,7 +23,7 @@ GPIO_DEVICES = Path("/sys/class/gpio") RE_TTY = re.compile(r"tty[A-Z]+") -class Hardware(object): +class Hardware: """Represent a interface to procfs, sysfs and udev.""" def __init__(self): diff --git a/hassio/misc/host_control.py b/hassio/misc/host_control.py deleted file mode 100644 index 848a7e0aa..000000000 --- a/hassio/misc/host_control.py +++ /dev/null @@ -1,124 +0,0 @@ -"""Host control for HassIO.""" -import asyncio -import json -import logging - -import async_timeout - -from ..const import ( - SOCKET_HC, ATTR_LAST_VERSION, ATTR_VERSION, ATTR_TYPE, ATTR_FEATURES, - ATTR_HOSTNAME, ATTR_OS) - -_LOGGER = logging.getLogger(__name__) - -TIMEOUT = 15 -UNKNOWN = 'unknown' - -FEATURES_SHUTDOWN = 'shutdown' -FEATURES_REBOOT = 'reboot' -FEATURES_UPDATE = 'update' -FEATURES_HOSTNAME = 'hostname' -FEATURES_NETWORK_INFO = 'network_info' -FEATURES_NETWORK_CONTROL = 'network_control' - - -class HostControl(object): - """Client for host control.""" - - def __init__(self, loop): - """Initialize HostControl socket client.""" - self.loop = loop - self.active = False - self.version = UNKNOWN - self.last_version = UNKNOWN - self.type = UNKNOWN - self.features = [] - self.hostname = UNKNOWN - self.os_info = UNKNOWN - - if SOCKET_HC.is_socket(): - self.active = True - - async def _send_command(self, command): - """Send command to host. - - Is a coroutine. - """ - if not self.active: - return - - reader, writer = await asyncio.open_unix_connection( - str(SOCKET_HC), loop=self.loop) - - try: - # send - _LOGGER.info("Send '%s' to HostControl.", command) - - with async_timeout.timeout(TIMEOUT, loop=self.loop): - writer.write("{}\n".format(command).encode()) - data = await reader.readline() - - response = data.decode().rstrip() - _LOGGER.info("Receive from HostControl: %s.", response) - - if response == "OK": - return True - elif response == "ERROR": - return False - elif response == "WRONG": - return None - else: - try: - return json.loads(response) - except json.JSONDecodeError: - _LOGGER.warning("Json parse error from HostControl '%s'.", - response) - - except asyncio.TimeoutError: - _LOGGER.error("Timeout from HostControl!") - - finally: - writer.close() - - async def load(self): - """Load Info from host. - - Return a coroutine. - """ - info = await self._send_command("info") - if not info: - return - - self.version = info.get(ATTR_VERSION, UNKNOWN) - self.last_version = info.get(ATTR_LAST_VERSION, UNKNOWN) - self.type = info.get(ATTR_TYPE, UNKNOWN) - self.features = info.get(ATTR_FEATURES, []) - self.hostname = info.get(ATTR_HOSTNAME, UNKNOWN) - self.os_info = info.get(ATTR_OS, UNKNOWN) - - def reboot(self): - """Reboot the host system. - - Return a coroutine. - """ - return self._send_command("reboot") - - def shutdown(self): - """Shutdown the host system. - - Return a coroutine. - """ - return self._send_command("shutdown") - - def update(self, version=None): - """Update the host system. - - Return a coroutine. - """ - if version: - return self._send_command("update {}".format(version)) - return self._send_command("update") - - def set_hostname(self, hostname): - """Update hostname on host.""" - return self._send_command("hostname {}".format(hostname)) diff --git a/hassio/misc/scheduler.py b/hassio/misc/scheduler.py index 65cb4be62..44401422c 100644 --- a/hassio/misc/scheduler.py +++ b/hassio/misc/scheduler.py @@ -10,7 +10,7 @@ CALL = 'callback' TASK = 'task' -class Scheduler(object): +class Scheduler: """Schedule task inside HassIO.""" def __init__(self, loop): diff --git a/hassio/services/__init__.py b/hassio/services/__init__.py index 5f15940a1..9d5abe4be 100644 --- a/hassio/services/__init__.py +++ b/hassio/services/__init__.py @@ -1,8 +1,8 @@ """Handle internal services discovery.""" +from .discovery import Discovery # noqa from .mqtt import MQTTService from .data import ServicesData -from .discovery import Discovery from ..const import SERVICE_MQTT from ..coresys import CoreSysAttributes @@ -19,7 +19,6 @@ class ServiceManager(CoreSysAttributes): """Initialize Services handler.""" self.coresys = coresys self.data = ServicesData() - self.discovery = Discovery(coresys) self.services_obj = {} @property @@ -37,9 +36,9 @@ class ServiceManager(CoreSysAttributes): self.services_obj[slug] = service(self.coresys) # Read exists discovery messages - self.discovery.load() + self.sys_discovery.load() def reset(self): """Reset available data.""" self.data.reset_data() - self.discovery.load() + self.sys_discovery.load() diff --git a/hassio/services/discovery.py b/hassio/services/discovery.py index f0166589a..4914e7c44 100644 --- a/hassio/services/discovery.py +++ b/hassio/services/discovery.py @@ -36,7 +36,7 @@ class Discovery(CoreSysAttributes): self._data.clear() self._data.extend(messages) - self._services.data.save_data() + self.sys_services.data.save_data() def get(self, uuid): """Return discovery message.""" @@ -45,7 +45,7 @@ class Discovery(CoreSysAttributes): @property def _data(self): """Return discovery data.""" - return self._services.data.discovery + return self.sys_services.data.discovery @property def list_messages(self): @@ -69,7 +69,7 @@ class Discovery(CoreSysAttributes): self.save() # send event to Home-Assistant - self._loop.create_task(self._homeassistant.send_event( + self.sys_create_task(self.sys_homeassistant.send_event( EVENT_DISCOVERY_ADD, {ATTR_UUID: message.uuid})) return message @@ -80,11 +80,11 @@ class Discovery(CoreSysAttributes): self.save() # send event to Home-Assistant - self._loop.create_task(self._homeassistant.send_event( + self.sys_create_task(self.sys_homeassistant.send_event( EVENT_DISCOVERY_DEL, {ATTR_UUID: message.uuid})) -class Message(object): +class Message: """Represent a single Discovery message.""" def __init__(self, provider, component, platform, config, uuid=None): diff --git a/hassio/services/interface.py b/hassio/services/interface.py index a3e13a387..4d4e79da0 100644 --- a/hassio/services/interface.py +++ b/hassio/services/interface.py @@ -37,7 +37,7 @@ class ServiceInterface(CoreSysAttributes): def save(self): """Save changes.""" - self._services.data.save_data() + self.sys_services.data.save_data() def get_service_data(self): """Return the requested service data.""" diff --git a/hassio/services/mqtt.py b/hassio/services/mqtt.py index fa4436622..6e2e519c8 100644 --- a/hassio/services/mqtt.py +++ b/hassio/services/mqtt.py @@ -21,7 +21,7 @@ class MQTTService(ServiceInterface): @property def _data(self): """Return data of this service.""" - return self._services.data.mqtt + return self.sys_services.data.mqtt @property def schema(self): @@ -66,7 +66,7 @@ class MQTTService(ServiceInterface): return True # discover mqtt to homeassistant - message = self._services.discovery.send( + message = self.sys_discovery.send( provider, SERVICE_MQTT, None, self.hass_config) self._data[ATTR_DISCOVERY_ID] = message.uuid @@ -81,8 +81,8 @@ class MQTTService(ServiceInterface): discovery_id = self._data.get(ATTR_DISCOVERY_ID) if discovery_id: - self._services.discovery.remove( - self._services.discovery.get(discovery_id)) + self.sys_discovery.remove( + self.sys_discovery.get(discovery_id)) self._data.clear() self.save() diff --git a/hassio/snapshots/__init__.py b/hassio/snapshots/__init__.py index ade6e355a..9b5370df7 100644 --- a/hassio/snapshots/__init__.py +++ b/hassio/snapshots/__init__.py @@ -35,7 +35,7 @@ class SnapshotManager(CoreSysAttributes): """Initialize a new snapshot object from name.""" date_str = utcnow().isoformat() slug = create_slug(name, date_str) - tar_file = Path(self._config.path_backup, f"{slug}.tar") + tar_file = Path(self.sys_config.path_backup, f"{slug}.tar") # init object snapshot = Snapshot(self.coresys, tar_file) @@ -65,11 +65,11 @@ class SnapshotManager(CoreSysAttributes): self.snapshots_obj[snapshot.slug] = snapshot tasks = [_load_snapshot(tar_file) for tar_file in - self._config.path_backup.glob("*.tar")] + self.sys_config.path_backup.glob("*.tar")] _LOGGER.info("Found %d snapshot files", len(tasks)) if tasks: - await asyncio.wait(tasks, loop=self._loop) + await asyncio.wait(tasks) def remove(self, snapshot): """Remove a snapshot.""" @@ -98,7 +98,7 @@ class SnapshotManager(CoreSysAttributes): return None # Move snapshot to backup - tar_origin = Path(self._config.path_backup, f"{snapshot.slug}.tar") + tar_origin = Path(self.sys_config.path_backup, f"{snapshot.slug}.tar") try: snapshot.tarfile.rename(tar_origin) @@ -124,7 +124,7 @@ class SnapshotManager(CoreSysAttributes): snapshot = self._create_snapshot(name, SNAPSHOT_FULL, password) _LOGGER.info("Full-Snapshot %s start", snapshot.slug) try: - self._scheduler.suspend = True + self.sys_scheduler.suspend = True await self.lock.acquire() async with snapshot: @@ -146,7 +146,7 @@ class SnapshotManager(CoreSysAttributes): return snapshot finally: - self._scheduler.suspend = False + self.sys_scheduler.suspend = False self.lock.release() async def do_snapshot_partial(self, name="", addons=None, folders=None, @@ -162,14 +162,14 @@ class SnapshotManager(CoreSysAttributes): _LOGGER.info("Partial-Snapshot %s start", snapshot.slug) try: - self._scheduler.suspend = True + self.sys_scheduler.suspend = True await self.lock.acquire() async with snapshot: # Snapshot add-ons addon_list = [] for addon_slug in addons: - addon = self._addons.get(addon_slug) + addon = self.sys_addons.get(addon_slug) if addon and addon.is_installed: addon_list.append(addon) continue @@ -195,7 +195,7 @@ class SnapshotManager(CoreSysAttributes): return snapshot finally: - self._scheduler.suspend = False + self.sys_scheduler.suspend = False self.lock.release() async def do_restore_full(self, snapshot, password=None): @@ -215,21 +215,14 @@ class SnapshotManager(CoreSysAttributes): _LOGGER.info("Full-Restore %s start", snapshot.slug) try: - self._scheduler.suspend = True + self.sys_scheduler.suspend = True await self.lock.acquire() async with snapshot: tasks = [] # Stop Home-Assistant / Add-ons - tasks.append(self._homeassistant.stop()) - for addon in self._addons.list_addons: - if addon.is_installed: - tasks.append(addon.stop()) - - if tasks: - _LOGGER.info("Restore %s stop tasks", snapshot.slug) - await asyncio.wait(tasks, loop=self._loop) + await self.sys_core.shutdown() # Restore folders _LOGGER.info("Restore %s run folders", snapshot.slug) @@ -238,8 +231,8 @@ class SnapshotManager(CoreSysAttributes): # Start homeassistant restore _LOGGER.info("Restore %s run Home-Assistant", snapshot.slug) snapshot.restore_homeassistant() - task_hass = self._loop.create_task( - self._homeassistant.update(snapshot.homeassistant_version)) + task_hass = self.sys_create_task(self.sys_homeassistant.update( + snapshot.homeassistant_version)) # Restore repositories _LOGGER.info("Restore %s run Repositories", snapshot.slug) @@ -247,13 +240,13 @@ class SnapshotManager(CoreSysAttributes): # Delete delta add-ons tasks.clear() - for addon in self._addons.list_installed: + for addon in self.sys_addons.list_installed: if addon.slug not in snapshot.addon_list: tasks.append(addon.uninstall()) if tasks: _LOGGER.info("Restore %s remove add-ons", snapshot.slug) - await asyncio.wait(tasks, loop=self._loop) + await asyncio.wait(tasks) # Restore add-ons _LOGGER.info("Restore %s old add-ons", snapshot.slug) @@ -263,7 +256,7 @@ class SnapshotManager(CoreSysAttributes): _LOGGER.info("Restore %s wait until homeassistant ready", snapshot.slug) await task_hass - await self._homeassistant.start() + await self.sys_homeassistant.start() except Exception: # pylint: disable=broad-except _LOGGER.exception("Restore %s error", snapshot.slug) @@ -274,7 +267,7 @@ class SnapshotManager(CoreSysAttributes): return True finally: - self._scheduler.suspend = False + self.sys_scheduler.suspend = False self.lock.release() async def do_restore_partial(self, snapshot, homeassistant=False, @@ -293,13 +286,13 @@ class SnapshotManager(CoreSysAttributes): _LOGGER.info("Partial-Restore %s start", snapshot.slug) try: - self._scheduler.suspend = True + self.sys_scheduler.suspend = True await self.lock.acquire() async with snapshot: # Stop Home-Assistant if they will be restored later if homeassistant and FOLDER_HOMEASSISTANT in folders: - await self._homeassistant.stop() + await self.sys_homeassistant.stop() # Process folders if folders: @@ -312,14 +305,14 @@ class SnapshotManager(CoreSysAttributes): _LOGGER.info("Restore %s run Home-Assistant", snapshot.slug) snapshot.restore_homeassistant() - task_hass = self._loop.create_task( - self._homeassistant.update( + task_hass = self.sys_create_task( + self.sys_homeassistant.update( snapshot.homeassistant_version)) # Process Add-ons addon_list = [] for slug in addons: - addon = self._addons.get(slug) + addon = self.sys_addons.get(slug) if addon: addon_list.append(addon) continue @@ -334,7 +327,7 @@ class SnapshotManager(CoreSysAttributes): _LOGGER.info("Restore %s wait for Home-Assistant", snapshot.slug) await task_hass - await self._homeassistant.start() + await self.sys_homeassistant.start() except Exception: # pylint: disable=broad-except _LOGGER.exception("Restore %s error", snapshot.slug) @@ -345,5 +338,5 @@ class SnapshotManager(CoreSysAttributes): return True finally: - self._scheduler.suspend = False + self.sys_scheduler.suspend = False self.lock.release() diff --git a/hassio/snapshots/snapshot.py b/hassio/snapshots/snapshot.py index 7077e9c4e..0888ad177 100644 --- a/hassio/snapshots/snapshot.py +++ b/hassio/snapshots/snapshot.py @@ -179,7 +179,7 @@ class Snapshot(CoreSysAttributes): # read snapshot.json try: - raw = await self._loop.run_in_executor(None, _load_file) + raw = await self.sys_run_in_executor(_load_file) except (tarfile.TarError, KeyError) as err: _LOGGER.error( "Can't read snapshot tarfile %s: %s", self.tarfile, err) @@ -204,7 +204,7 @@ class Snapshot(CoreSysAttributes): async def __aenter__(self): """Async context to open a snapshot.""" - self._tmp = TemporaryDirectory(dir=str(self._config.path_tmp)) + self._tmp = TemporaryDirectory(dir=str(self.sys_config.path_tmp)) # create a snapshot if not self.tarfile.is_file(): @@ -216,7 +216,7 @@ class Snapshot(CoreSysAttributes): with tarfile.open(self.tarfile, "r:") as tar: tar.extractall(path=self._tmp.name) - await self._loop.run_in_executor(None, _extract_snapshot) + await self.sys_run_in_executor(_extract_snapshot) async def __aexit__(self, exception_type, exception_value, traceback): """Async context to close a snapshot.""" @@ -241,7 +241,7 @@ class Snapshot(CoreSysAttributes): try: write_json_file(Path(self._tmp.name, "snapshot.json"), self._data) - await self._loop.run_in_executor(None, _create_snapshot) + await self.sys_run_in_executor(_create_snapshot) except (OSError, json.JSONDecodeError) as err: _LOGGER.error("Can't write snapshot: %s", err) finally: @@ -249,7 +249,7 @@ class Snapshot(CoreSysAttributes): async def store_addons(self, addon_list=None): """Add a list of add-ons into snapshot.""" - addon_list = addon_list or self._addons.list_installed + addon_list = addon_list or self.sys_addons.list_installed async def _addon_save(addon): """Task to store a add-on into snapshot.""" @@ -273,14 +273,14 @@ class Snapshot(CoreSysAttributes): # Run tasks tasks = [_addon_save(addon) for addon in addon_list] if tasks: - await asyncio.wait(tasks, loop=self._loop) + await asyncio.wait(tasks) async def restore_addons(self, addon_list=None): """Restore a list add-on from snapshot.""" if not addon_list: addon_list = [] for addon_slug in self.addon_list: - addon = self._addons.get(addon_slug) + addon = self.sys_addons.get(addon_slug) if addon: addon_list.append(addon) @@ -303,7 +303,7 @@ class Snapshot(CoreSysAttributes): # Run tasks tasks = [_addon_restore(addon) for addon in addon_list] if tasks: - await asyncio.wait(tasks, loop=self._loop) + await asyncio.wait(tasks) async def store_folders(self, folder_list=None): """Backup hassio data into snapshot.""" @@ -313,7 +313,7 @@ class Snapshot(CoreSysAttributes): """Intenal function to snapshot a folder.""" slug_name = name.replace("/", "_") tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz") - origin_dir = Path(self._config.path_hassio, name) + origin_dir = Path(self.sys_config.path_hassio, name) # Check if exsits if not origin_dir.is_dir(): @@ -332,10 +332,10 @@ class Snapshot(CoreSysAttributes): _LOGGER.warning("Can't snapshot folder %s: %s", name, err) # Run tasks - tasks = [self._loop.run_in_executor(None, _folder_save, folder) + tasks = [self.sys_run_in_executor(_folder_save, folder) for folder in folder_list] if tasks: - await asyncio.wait(tasks, loop=self._loop) + await asyncio.wait(tasks) async def restore_folders(self, folder_list=None): """Backup hassio data into snapshot.""" @@ -345,7 +345,7 @@ class Snapshot(CoreSysAttributes): """Intenal function to restore a folder.""" slug_name = name.replace("/", "_") tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz") - origin_dir = Path(self._config.path_hassio, name) + origin_dir = Path(self.sys_config.path_hassio, name) # Check if exists inside snapshot if not tar_name.exists(): @@ -366,58 +366,58 @@ class Snapshot(CoreSysAttributes): _LOGGER.warning("Can't restore folder %s: %s", name, err) # Run tasks - tasks = [self._loop.run_in_executor(None, _folder_restore, folder) + tasks = [self.sys_run_in_executor(_folder_restore, folder) for folder in folder_list] if tasks: - await asyncio.wait(tasks, loop=self._loop) + await asyncio.wait(tasks) def store_homeassistant(self): """Read all data from homeassistant object.""" - self.homeassistant[ATTR_VERSION] = self._homeassistant.version - self.homeassistant[ATTR_WATCHDOG] = self._homeassistant.watchdog - self.homeassistant[ATTR_BOOT] = self._homeassistant.boot - self.homeassistant[ATTR_WAIT_BOOT] = self._homeassistant.wait_boot + self.homeassistant[ATTR_VERSION] = self.sys_homeassistant.version + self.homeassistant[ATTR_WATCHDOG] = self.sys_homeassistant.watchdog + self.homeassistant[ATTR_BOOT] = self.sys_homeassistant.boot + self.homeassistant[ATTR_WAIT_BOOT] = self.sys_homeassistant.wait_boot # Custom image - if self._homeassistant.is_custom_image: - self.homeassistant[ATTR_IMAGE] = self._homeassistant.image + if self.sys_homeassistant.is_custom_image: + self.homeassistant[ATTR_IMAGE] = self.sys_homeassistant.image self.homeassistant[ATTR_LAST_VERSION] = \ - self._homeassistant.last_version + self.sys_homeassistant.last_version # API/Proxy - self.homeassistant[ATTR_PORT] = self._homeassistant.api_port - self.homeassistant[ATTR_SSL] = self._homeassistant.api_ssl + self.homeassistant[ATTR_PORT] = self.sys_homeassistant.api_port + self.homeassistant[ATTR_SSL] = self.sys_homeassistant.api_ssl self.homeassistant[ATTR_PASSWORD] = \ - self._encrypt_data(self._homeassistant.api_password) + self._encrypt_data(self.sys_homeassistant.api_password) def restore_homeassistant(self): """Write all data to homeassistant object.""" - self._homeassistant.watchdog = self.homeassistant[ATTR_WATCHDOG] - self._homeassistant.boot = self.homeassistant[ATTR_BOOT] - self._homeassistant.wait_boot = self.homeassistant[ATTR_WAIT_BOOT] + self.sys_homeassistant.watchdog = self.homeassistant[ATTR_WATCHDOG] + self.sys_homeassistant.boot = self.homeassistant[ATTR_BOOT] + self.sys_homeassistant.wait_boot = self.homeassistant[ATTR_WAIT_BOOT] # Custom image if self.homeassistant.get(ATTR_IMAGE): - self._homeassistant.image = self.homeassistant[ATTR_IMAGE] - self._homeassistant.last_version = \ + self.sys_homeassistant.image = self.homeassistant[ATTR_IMAGE] + self.sys_homeassistant.last_version = \ self.homeassistant[ATTR_LAST_VERSION] # API/Proxy - self._homeassistant.api_port = self.homeassistant[ATTR_PORT] - self._homeassistant.api_ssl = self.homeassistant[ATTR_SSL] - self._homeassistant.api_password = \ + self.sys_homeassistant.api_port = self.homeassistant[ATTR_PORT] + self.sys_homeassistant.api_ssl = self.homeassistant[ATTR_SSL] + self.sys_homeassistant.api_password = \ self._decrypt_data(self.homeassistant[ATTR_PASSWORD]) # save - self._homeassistant.save_data() + self.sys_homeassistant.save_data() def store_repositories(self): """Store repository list into snapshot.""" - self.repositories = self._config.addons_repositories + self.repositories = self.sys_config.addons_repositories def restore_repositories(self): """Restore repositories from snapshot. Return a coroutine. """ - return self._addons.load_repositories(self.repositories) + return self.sys_addons.load_repositories(self.repositories) diff --git a/hassio/supervisor.py b/hassio/supervisor.py index f2740b45a..905ff4c46 100644 --- a/hassio/supervisor.py +++ b/hassio/supervisor.py @@ -34,7 +34,7 @@ class Supervisor(CoreSysAttributes): @property def last_version(self): """Return last available version of homeassistant.""" - return self._updater.version_hassio + return self.sys_updater.version_hassio @property def image(self): @@ -50,13 +50,13 @@ class Supervisor(CoreSysAttributes): """Update HomeAssistant version.""" version = version or self.last_version - if version == self._supervisor.version: + if version == self.sys_supervisor.version: _LOGGER.warning("Version %s is already installed", version) return _LOGGER.info("Update supervisor to version %s", version) if await self.instance.install(version): - self._loop.call_later(1, self._loop.stop) + self.sys_loop.call_later(1, self.sys_loop.stop) return True _LOGGER.error("Update of hass.io fails!") diff --git a/hassio/tasks.py b/hassio/tasks.py index 7e8ea6b08..c68fdd904 100644 --- a/hassio/tasks.py +++ b/hassio/tasks.py @@ -15,7 +15,7 @@ class Tasks(CoreSysAttributes): RUN_RELOAD_ADDONS = 21600 RUN_RELOAD_SNAPSHOTS = 72000 - RUN_RELOAD_HOST_CONTROL = 72000 + RUN_RELOAD_HOST = 72000 RUN_RELOAD_UPDATER = 21600 RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15 @@ -29,24 +29,24 @@ class Tasks(CoreSysAttributes): async def load(self): """Add Tasks to scheduler.""" - self.jobs.add(self._scheduler.register_task( + self.jobs.add(self.sys_scheduler.register_task( self._update_addons, self.RUN_UPDATE_ADDONS)) - self.jobs.add(self._scheduler.register_task( + self.jobs.add(self.sys_scheduler.register_task( self._update_supervisor, self.RUN_UPDATE_SUPERVISOR)) - self.jobs.add(self._scheduler.register_task( - self._addons.reload, self.RUN_RELOAD_ADDONS)) - self.jobs.add(self._scheduler.register_task( - self._updater.reload, self.RUN_RELOAD_UPDATER)) - self.jobs.add(self._scheduler.register_task( - self._snapshots.reload, self.RUN_RELOAD_SNAPSHOTS)) - self.jobs.add(self._scheduler.register_task( - self._host_control.load, self.RUN_RELOAD_HOST_CONTROL)) + self.jobs.add(self.sys_scheduler.register_task( + self.sys_addons.reload, self.RUN_RELOAD_ADDONS)) + self.jobs.add(self.sys_scheduler.register_task( + self.sys_updater.reload, self.RUN_RELOAD_UPDATER)) + self.jobs.add(self.sys_scheduler.register_task( + self.sys_snapshots.reload, self.RUN_RELOAD_SNAPSHOTS)) + self.jobs.add(self.sys_scheduler.register_task( + self.sys_host.load, self.RUN_RELOAD_HOST)) - self.jobs.add(self._scheduler.register_task( + self.jobs.add(self.sys_scheduler.register_task( self._watchdog_homeassistant_docker, self.RUN_WATCHDOG_HOMEASSISTANT_DOCKER)) - self.jobs.add(self._scheduler.register_task( + self.jobs.add(self.sys_scheduler.register_task( self._watchdog_homeassistant_api, self.RUN_WATCHDOG_HOMEASSISTANT_API)) @@ -55,7 +55,7 @@ class Tasks(CoreSysAttributes): async def _update_addons(self): """Check if a update is available of a addon and update it.""" tasks = [] - for addon in self._addons.list_addons: + for addon in self.sys_addons.list_addons: if not addon.is_installed or not addon.auto_update: continue @@ -70,35 +70,35 @@ class Tasks(CoreSysAttributes): if tasks: _LOGGER.info("Addon auto update process %d tasks", len(tasks)) - await asyncio.wait(tasks, loop=self._loop) + await asyncio.wait(tasks) async def _update_supervisor(self): """Check and run update of supervisor hassio.""" - if not self._supervisor.need_update: + if not self.sys_supervisor.need_update: return # don't perform a update on beta/dev channel - if self._dev: + if self.sys_dev: _LOGGER.warning("Ignore Hass.io update on dev channel!") return _LOGGER.info("Found new Hass.io version") - await self._supervisor.update() + await self.sys_supervisor.update() async def _watchdog_homeassistant_docker(self): """Check running state of docker and start if they is close.""" # if Home-Assistant is active - if not await self._homeassistant.is_initialize() or \ - not self._homeassistant.watchdog: + if not await self.sys_homeassistant.is_initialize() or \ + not self.sys_homeassistant.watchdog: return # if Home-Assistant is running - if self._homeassistant.in_progress or \ - await self._homeassistant.is_running(): + if self.sys_homeassistant.in_progress or \ + await self.sys_homeassistant.is_running(): return _LOGGER.warning("Watchdog found a problem with Home-Assistant docker!") - await self._homeassistant.start() + await self.sys_homeassistant.start() async def _watchdog_homeassistant_api(self): """Create scheduler task for montoring running state of API. @@ -109,13 +109,13 @@ class Tasks(CoreSysAttributes): retry_scan = self._data.get('HASS_WATCHDOG_API', 0) # If Home-Assistant is active - if not await self._homeassistant.is_initialize() or \ - not self._homeassistant.watchdog: + if not await self.sys_homeassistant.is_initialize() or \ + not self.sys_homeassistant.watchdog: return # If Home-Assistant API is up - if self._homeassistant.in_progress or \ - await self._homeassistant.check_api_state(): + if self.sys_homeassistant.in_progress or \ + await self.sys_homeassistant.check_api_state(): return # Look like we run into a problem @@ -126,5 +126,5 @@ class Tasks(CoreSysAttributes): return _LOGGER.error("Watchdog found a problem with Home-Assistant API!") - await self._homeassistant.restart() + await self.sys_homeassistant.restart() self._data['HASS_WATCHDOG_API'] = 0 diff --git a/hassio/updater.py b/hassio/updater.py index ea65a0503..14ac30328 100644 --- a/hassio/updater.py +++ b/hassio/updater.py @@ -68,8 +68,8 @@ class Updater(JsonConfig, CoreSysAttributes): url = URL_HASSIO_VERSION.format(CHANNEL_TO_BRANCH[self.channel]) try: _LOGGER.info("Fetch update data from %s", url) - with async_timeout.timeout(10, loop=self._loop): - async with self._websession.get(url) as request: + with async_timeout.timeout(10): + async with self.sys_websession.get(url) as request: data = await request.json(content_type=None) except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err: diff --git a/hassio/utils/__init__.py b/hassio/utils/__init__.py index 0e960af4e..57220352d 100644 --- a/hassio/utils/__init__.py +++ b/hassio/utils/__init__.py @@ -27,7 +27,7 @@ def process_lock(method): return wrap_api -class AsyncThrottle(object): +class AsyncThrottle: """ Decorator that prevents a function from being called more than once every time period. diff --git a/hassio/utils/dt.py b/hassio/utils/dt.py index f8f9601d7..c529c07fc 100644 --- a/hassio/utils/dt.py +++ b/hassio/utils/dt.py @@ -29,7 +29,7 @@ async def fetch_timezone(websession): """Read timezone from freegeoip.""" data = {} try: - with async_timeout.timeout(10, loop=websession.loop): + with async_timeout.timeout(10): async with websession.get(FREEGEOIP_URL) as request: data = await request.json() diff --git a/hassio/utils/gdbus.py b/hassio/utils/gdbus.py new file mode 100644 index 000000000..8673ebe9d --- /dev/null +++ b/hassio/utils/gdbus.py @@ -0,0 +1,168 @@ +"""DBus implementation with glib.""" +import asyncio +import logging +import json +import shlex +import re +import xml.etree.ElementTree as ET + +from ..exceptions import DBusFatalError, DBusParseError + +_LOGGER = logging.getLogger(__name__) + +# Use to convert GVariant into json +RE_GVARIANT_TYPE = re.compile( + r"(?:boolean|byte|int16|uint16|int32|uint32|handle|int64|uint64|double|" + r"string|objectpath|signature) ") +RE_GVARIANT_TULPE = re.compile(r"^\((.*),\)$") +RE_GVARIANT_VARIANT = re.compile( + r"(?<=(?: |{|\[))<((?:'|\").*?(?:'|\")|\d+(?:\.\d+)?)>(?=(?:|]|}|,))") +RE_GVARIANT_STRING = re.compile(r"(?<=(?: |{|\[))'(.*?)'(?=(?:|]|}|,))") + +# Commands for dbus +INTROSPECT = ("gdbus introspect --system --dest {bus} " + "--object-path {object} --xml") +CALL = ("gdbus call --system --dest {bus} --object-path {object} " + "--method {method} {args}") + +DBUS_METHOD_GETALL = 'org.freedesktop.DBus.Properties.GetAll' + + +class DBus: + """DBus handler.""" + + def __init__(self, bus_name, object_path): + """Initialize dbus object.""" + self.bus_name = bus_name + self.object_path = object_path + self.methods = set() + + @staticmethod + async def connect(bus_name, object_path): + """Read object data.""" + self = DBus(bus_name, object_path) + self._init_proxy() # pylint: disable=protected-access + + _LOGGER.info("Connect to dbus: %s - %s", bus_name, object_path) + return self + + async def _init_proxy(self): + """Read interface data.""" + command = shlex.split(INTROSPECT.format( + bus=self.bus_name, + object=self.object_path + )) + + # Ask data + _LOGGER.info("Introspect %s no %s", self.bus_name, self.object_path) + data = await self._send(command) + + # Parse XML + try: + xml = ET.fromstring(data) + except ET.ParseError as err: + _LOGGER.error("Can't parse introspect data: %s", err) + raise DBusParseError() from None + + # Read available methods + for interface in xml.findall("/node/interface"): + interface_name = interface.get('name') + for method in interface.findall("/method"): + method_name = method.get('name') + self.methods.add(f"{interface_name}.{method_name}") + + @staticmethod + def _gvariant(raw): + """Parse GVariant input to python.""" + raw = RE_GVARIANT_TYPE.sub("", raw) + raw = RE_GVARIANT_TULPE.sub(r"[\1]", raw) + raw = RE_GVARIANT_VARIANT.sub(r"\1", raw) + raw = RE_GVARIANT_STRING.sub(r'"\1"', raw) + + try: + return json.loads(raw) + except json.JSONDecodeError as err: + _LOGGER.error("Can't parse '%s': %s", raw, err) + raise DBusParseError() from None + + async def call_dbus(self, method, *args): + """Call a dbus method.""" + command = shlex.split(CALL.format( + bus=self.bus_name, + object=self.object_path, + method=method, + args=" ".join(map(str, args)) + )) + + # Run command + _LOGGER.info("Call %s on %s", method, self.object_path) + data = await self._send(command) + + # Parse and return data + return self._gvariant(data) + + async def get_properties(self, interface): + """Read all properties from interface.""" + try: + return (await self.call_dbus(DBUS_METHOD_GETALL, interface))[0] + except IndexError: + _LOGGER.error("No attributes returned for %s", interface) + raise DBusFatalError from None + + async def _send(self, command): + """Send command over dbus.""" + # Run command + try: + proc = await asyncio.create_subprocess_exec( + *command, + stdin=asyncio.subprocess.DEVNULL, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.DEVNULL + ) + + data, _ = await proc.communicate() + except OSError as err: + _LOGGER.error("DBus fatal error: %s", err) + raise DBusFatalError() from None + + # Success? + if proc.returncode != 0: + _LOGGER.error("DBus return error: %s", data) + raise DBusFatalError() + + # End + return data.decode() + + def __getattr__(self, name): + """Mapping to dbus method.""" + return getattr(DBusCallWrapper(self, self.object_path), name) + + +class DBusCallWrapper: + """Wrapper a DBus interface for a call.""" + + def __init__(self, dbus, interface): + """Initialize wrapper.""" + self.dbus = dbus + self.interface = interface + + def __call__(self): + """Should never be called.""" + _LOGGER.error("DBus method %s not exists!", self.interface) + raise DBusFatalError() + + def __getattr__(self, name): + """Mapping to dbus method.""" + interface = f"{self.interface}.{name}" + + if interface not in self.dbus.methods: + return DBusCallWrapper(self.dbus, interface) + + def _method_wrapper(*args): + """Wrap method. + + Return a coroutine + """ + return self.dbus.call_dbus(self.interface, *args) + + return _method_wrapper diff --git a/hassio/utils/json.py b/hassio/utils/json.py index fa29f8172..93eb482c2 100644 --- a/hassio/utils/json.py +++ b/hassio/utils/json.py @@ -21,7 +21,7 @@ def read_json_file(jsonfile): return json.loads(cfile.read()) -class JsonConfig(object): +class JsonConfig: """Hass core object for handle it.""" def __init__(self, json_file, schema): diff --git a/hassio/utils/tar.py b/hassio/utils/tar.py index a1f23537c..9d13abe0c 100644 --- a/hassio/utils/tar.py +++ b/hassio/utils/tar.py @@ -12,7 +12,7 @@ MOD_READ = 'r' MOD_WRITE = 'w' -class SecureTarFile(object): +class SecureTarFile: """Handle encrypted files for tarfile library.""" def __init__(self, name, mode, key=None, gzip=True):