diff --git a/hassio/addons/__init__.py b/hassio/addons/__init__.py index 7d3f0db17..6b21290ce 100644 --- a/hassio/addons/__init__.py +++ b/hassio/addons/__init__.py @@ -56,7 +56,7 @@ class AddonManager(CoreSysAttributes): # init hassio built-in repositories repositories = \ - set(self._config.addons_repositories) | BUILTIN_REPOSITORIES + set(self.sys_config.addons_repositories) | BUILTIN_REPOSITORIES # init custom repositories & load addons await self.load_repositories(repositories) @@ -90,7 +90,7 @@ class AddonManager(CoreSysAttributes): # don't add built-in repository to config if url not in BUILTIN_REPOSITORIES: - self._config.add_addon_repository(url) + self.sys_config.add_addon_repository(url) tasks = [_add_repository(url) for url in new_rep - old_rep] if tasks: @@ -99,7 +99,7 @@ class AddonManager(CoreSysAttributes): # del new repository for url in old_rep - new_rep - BUILTIN_REPOSITORIES: self.repositories_obj.pop(url).remove() - self._config.drop_addon_repository(url) + self.sys_config.drop_addon_repository(url) # update data self.data.reload() @@ -142,4 +142,4 @@ class AddonManager(CoreSysAttributes): _LOGGER.info("Startup %s run %d addons", stage, len(tasks)) if tasks: await asyncio.wait(tasks) - await asyncio.sleep(self._config.wait_boot) + await asyncio.sleep(self.sys_config.wait_boot) diff --git a/hassio/addons/addon.py b/hassio/addons/addon.py index e17f3f7a5..cfe28e6f4 100644 --- a/hassio/addons/addon.py +++ b/hassio/addons/addon.py @@ -66,7 +66,7 @@ class Addon(CoreSysAttributes): @property def _data(self): """Return addons data storage.""" - return self._addons.data + return self.sys_addons.data @property def is_installed(self): @@ -376,7 +376,7 @@ class Addon(CoreSysAttributes): if self.is_installed and \ ATTR_AUDIO_OUTPUT in self._data.user[self._id]: return self._data.user[self._id][ATTR_AUDIO_OUTPUT] - return self._alsa.default.output + return self.sys_alsa.default.output @audio_output.setter def audio_output(self, value): @@ -394,7 +394,7 @@ class Addon(CoreSysAttributes): if self.is_installed and ATTR_AUDIO_INPUT in self._data.user[self._id]: return self._data.user[self._id][ATTR_AUDIO_INPUT] - return self._alsa.default.input + return self.sys_alsa.default.input @audio_input.setter def audio_input(self, value): @@ -436,11 +436,11 @@ class Addon(CoreSysAttributes): # Repository with dockerhub images if ATTR_IMAGE in addon_data: - return addon_data[ATTR_IMAGE].format(arch=self._arch) + return addon_data[ATTR_IMAGE].format(arch=self.sys_arch) # local build return "{}/{}-addon-{}".format( - addon_data[ATTR_REPOSITORY], self._arch, + addon_data[ATTR_REPOSITORY], self.sys_arch, addon_data[ATTR_SLUG]) @property @@ -461,12 +461,12 @@ class Addon(CoreSysAttributes): @property def path_data(self): """Return addon data path inside supervisor.""" - return Path(self._config.path_addons_data, self._id) + return Path(self.sys_config.path_addons_data, self._id) @property def path_extern_data(self): """Return addon data path external for docker.""" - return PurePath(self._config.path_extern_addons_data, self._id) + return PurePath(self.sys_config.path_extern_addons_data, self._id) @property def path_options(self): @@ -506,16 +506,16 @@ class Addon(CoreSysAttributes): @property def path_asound(self): """Return path to asound config.""" - return Path(self._config.path_tmp, f"{self.slug}_asound") + return Path(self.sys_config.path_tmp, f"{self.slug}_asound") @property def path_extern_asound(self): """Return path to asound config for docker.""" - return Path(self._config.path_extern_tmp, f"{self.slug}_asound") + return Path(self.sys_config.path_extern_tmp, f"{self.slug}_asound") def save_data(self): """Save data of addon.""" - self._addons.data.save_data() + self.sys_addons.data.save_data() def write_options(self): """Return True if addon options is written to data.""" @@ -537,7 +537,7 @@ class Addon(CoreSysAttributes): def write_asound(self): """Write asound config to file and return True on success.""" - asound_config = self._alsa.asound( + asound_config = self.sys_alsa.asound( alsa_input=self.audio_input, alsa_output=self.audio_output) try: @@ -590,9 +590,9 @@ class Addon(CoreSysAttributes): async def install(self): """Install a addon.""" - if self._arch not in self.supported_arch: + if self.sys_arch not in self.supported_arch: _LOGGER.error( - "Addon %s not supported on %s", self._id, self._arch) + "Addon %s not supported on %s", self._id, self.sys_arch) return False if self.is_installed: @@ -735,7 +735,7 @@ class Addon(CoreSysAttributes): @check_installed async def snapshot(self, tar_file): """Snapshot a state of a addon.""" - with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp: + with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp: # store local image if self.need_build and not await \ self.instance.export_image(Path(temp, "image.tar")): @@ -764,7 +764,7 @@ class Addon(CoreSysAttributes): try: _LOGGER.info("Build snapshot for addon %s", self._id) - await self._loop.run_in_executor(None, _write_tarfile) + await self.sys_run_in_executor(_write_tarfile) except (tarfile.TarError, OSError) as err: _LOGGER.error("Can't write tarfile %s: %s", tar_file, err) return False @@ -774,7 +774,7 @@ class Addon(CoreSysAttributes): async def restore(self, tar_file): """Restore a state of a addon.""" - with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp: + with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp: # extract snapshot def _extract_tarfile(): """Extract tar snapshot.""" @@ -782,7 +782,7 @@ class Addon(CoreSysAttributes): snapshot.extractall(path=Path(temp)) try: - await self._loop.run_in_executor(None, _extract_tarfile) + await self.sys_run_in_executor(_extract_tarfile) except tarfile.TarError as err: _LOGGER.error("Can't read tarfile %s: %s", tar_file, err) return False @@ -828,7 +828,7 @@ class Addon(CoreSysAttributes): try: _LOGGER.info("Restore data for addon %s", self._id) - await self._loop.run_in_executor(None, _restore_data) + await self.sys_run_in_executor(_restore_data) except shutil.Error as err: _LOGGER.error("Can't restore origin data: %s", err) return False diff --git a/hassio/addons/build.py b/hassio/addons/build.py index d98c9597a..4d9a37618 100644 --- a/hassio/addons/build.py +++ b/hassio/addons/build.py @@ -25,13 +25,13 @@ class AddonBuild(JsonConfig, CoreSysAttributes): @property def addon(self): """Return addon of build data.""" - return self._addons.get(self._id) + return self.sys_addons.get(self._id) @property def base_image(self): """Base images for this addon.""" return self._data[ATTR_BUILD_FROM].get( - self._arch, BASE_IMAGE[self._arch]) + self.sys_arch, BASE_IMAGE[self.sys_arch]) @property def squash(self): @@ -53,7 +53,7 @@ class AddonBuild(JsonConfig, CoreSysAttributes): 'squash': self.squash, 'labels': { 'io.hass.version': version, - 'io.hass.arch': self._arch, + 'io.hass.arch': self.sys_arch, 'io.hass.type': META_ADDON, 'io.hass.name': self._fix_label('name'), 'io.hass.description': self._fix_label('description'), @@ -61,7 +61,7 @@ class AddonBuild(JsonConfig, CoreSysAttributes): 'buildargs': { 'BUILD_FROM': self.base_image, 'BUILD_VERSION': version, - 'BUILD_ARCH': self._arch, + 'BUILD_ARCH': self.sys_arch, **self.additional_args, } } diff --git a/hassio/addons/data.py b/hassio/addons/data.py index 777d2b025..ade770c54 100644 --- a/hassio/addons/data.py +++ b/hassio/addons/data.py @@ -56,17 +56,17 @@ class AddonsData(JsonConfig, CoreSysAttributes): # read core repository self._read_addons_folder( - self._config.path_addons_core, REPOSITORY_CORE) + self.sys_config.path_addons_core, REPOSITORY_CORE) # read local repository self._read_addons_folder( - self._config.path_addons_local, REPOSITORY_LOCAL) + self.sys_config.path_addons_local, REPOSITORY_LOCAL) # add built-in repositories information self._set_builtin_repositories() # read custom git repositories - for repository_element in self._config.path_addons_git.iterdir(): + for repository_element in self.sys_config.path_addons_git.iterdir(): if repository_element.is_dir(): self._read_git_repository(repository_element) diff --git a/hassio/addons/git.py b/hassio/addons/git.py index a00521946..6f487d579 100644 --- a/hassio/addons/git.py +++ b/hassio/addons/git.py @@ -45,7 +45,7 @@ class GitRepo(CoreSysAttributes): async with self.lock: try: _LOGGER.info("Load addon %s repository", self.path) - self.repo = await self._loop.run_in_executor( + self.repo = await self.sys_loop.run_in_executor( None, git.Repo, str(self.path)) except (git.InvalidGitRepositoryError, git.NoSuchPathError, @@ -68,7 +68,7 @@ class GitRepo(CoreSysAttributes): try: _LOGGER.info("Clone addon %s repository", self.url) - self.repo = await self._loop.run_in_executor(None, ft.partial( + self.repo = await self.sys_run_in_executor(ft.partial( git.Repo.clone_from, self.url, str(self.path), **git_args )) @@ -89,7 +89,7 @@ class GitRepo(CoreSysAttributes): async with self.lock: try: _LOGGER.info("Pull addon %s repository", self.url) - await self._loop.run_in_executor( + await self.sys_loop.run_in_executor( None, self.repo.remotes.origin.pull) except (git.InvalidGitRepositoryError, git.NoSuchPathError, diff --git a/hassio/addons/repository.py b/hassio/addons/repository.py index 851e9e037..37d75ea75 100644 --- a/hassio/addons/repository.py +++ b/hassio/addons/repository.py @@ -30,7 +30,7 @@ class Repository(CoreSysAttributes): @property def _mesh(self): """Return data struct repository.""" - return self._addons.data.repositories.get(self._id, {}) + return self.sys_addons.data.repositories.get(self._id, {}) @property def slug(self): diff --git a/hassio/api/__init__.py b/hassio/api/__init__.py index e0727cffe..2fbf7143b 100644 --- a/hassio/api/__init__.py +++ b/hassio/api/__init__.py @@ -224,7 +224,7 @@ class RestAPI(CoreSysAttributes): self._handler = self.webapp.make_handler() try: - self.server = await self._loop.create_server( + self.server = await self.sys_loop.create_server( self._handler, "0.0.0.0", "80") except OSError as err: _LOGGER.fatal( diff --git a/hassio/api/addons.py b/hassio/api/addons.py index 1aba6266c..f382c9116 100644 --- a/hassio/api/addons.py +++ b/hassio/api/addons.py @@ -43,7 +43,7 @@ class APIAddons(CoreSysAttributes): def _extract_addon(self, request, check_installed=True): """Return addon and if not exists trow a exception.""" - addon = self._addons.get(request.match_info.get('addon')) + addon = self.sys_addons.get(request.match_info.get('addon')) if not addon: raise RuntimeError("Addon not exists") @@ -64,7 +64,7 @@ class APIAddons(CoreSysAttributes): async def list(self, request): """Return all addons / repositories .""" data_addons = [] - for addon in self._addons.list_addons: + for addon in self.sys_addons.list_addons: data_addons.append({ ATTR_NAME: addon.name, ATTR_SLUG: addon.slug, @@ -81,7 +81,7 @@ class APIAddons(CoreSysAttributes): }) data_repositories = [] - for repository in self._addons.list_repositories: + for repository in self.sys_addons.list_repositories: data_repositories.append({ ATTR_SLUG: repository.slug, ATTR_NAME: repository.name, @@ -98,7 +98,7 @@ class APIAddons(CoreSysAttributes): @api_process async def reload(self, request): """Reload all addons data.""" - await asyncio.shield(self._addons.reload()) + await asyncio.shield(self.sys_addons.reload()) return True @api_process diff --git a/hassio/api/discovery.py b/hassio/api/discovery.py index fb028fcd9..7ab5f8d2a 100644 --- a/hassio/api/discovery.py +++ b/hassio/api/discovery.py @@ -21,7 +21,7 @@ class APIDiscovery(CoreSysAttributes): def _extract_message(self, request): """Extract discovery message from URL.""" - message = self._services.discovery.get(request.match_info.get('uuid')) + message = self.sys_discovery.get(request.match_info.get('uuid')) if not message: raise RuntimeError("Discovery message not found") return message @@ -30,7 +30,7 @@ class APIDiscovery(CoreSysAttributes): async def list(self, request): """Show register services.""" discovery = [] - for message in self._services.discovery.list_messages: + for message in self.sys_discovery.list_messages: discovery.append({ ATTR_PROVIDER: message.provider, ATTR_UUID: message.uuid, @@ -45,7 +45,7 @@ class APIDiscovery(CoreSysAttributes): async def set_discovery(self, request): """Write data into a discovery pipeline.""" body = await api_validate(SCHEMA_DISCOVERY, request) - message = self._services.discovery.send( + message = self.sys_discovery.send( provider=request[REQUEST_FROM], **body) return {ATTR_UUID: message.uuid} @@ -68,5 +68,5 @@ class APIDiscovery(CoreSysAttributes): """Delete data into a discovery message.""" message = self._extract_message(request) - self._services.discovery.remove(message) + self.sys_discovery.remove(message) return True diff --git a/hassio/api/hardware.py b/hassio/api/hardware.py index e5e22a35f..626f6d434 100644 --- a/hassio/api/hardware.py +++ b/hassio/api/hardware.py @@ -16,11 +16,11 @@ class APIHardware(CoreSysAttributes): async def info(self, request): """Show hardware info.""" return { - ATTR_SERIAL: list(self._hardware.serial_devices), - ATTR_INPUT: list(self._hardware.input_devices), - ATTR_DISK: list(self._hardware.disk_devices), - ATTR_GPIO: list(self._hardware.gpio_devices), - ATTR_AUDIO: self._hardware.audio_devices, + ATTR_SERIAL: list(self.sys_hardware.serial_devices), + ATTR_INPUT: list(self.sys_hardware.input_devices), + ATTR_DISK: list(self.sys_hardware.disk_devices), + ATTR_GPIO: list(self.sys_hardware.gpio_devices), + ATTR_AUDIO: self.sys_hardware.audio_devices, } @api_process @@ -28,7 +28,7 @@ class APIHardware(CoreSysAttributes): """Show ALSA audio devices.""" return { ATTR_AUDIO: { - ATTR_INPUT: self._alsa.input_devices, - ATTR_OUTPUT: self._alsa.output_devices, + ATTR_INPUT: self.sys_alsa.input_devices, + ATTR_OUTPUT: self.sys_alsa.output_devices, } } diff --git a/hassio/api/homeassistant.py b/hassio/api/homeassistant.py index 2bbe54a68..8e9c07359 100644 --- a/hassio/api/homeassistant.py +++ b/hassio/api/homeassistant.py @@ -43,15 +43,15 @@ class APIHomeAssistant(CoreSysAttributes): async def info(self, request): """Return host information.""" return { - ATTR_VERSION: self._homeassistant.version, - ATTR_LAST_VERSION: self._homeassistant.last_version, - ATTR_IMAGE: self._homeassistant.image, - ATTR_CUSTOM: self._homeassistant.is_custom_image, - ATTR_BOOT: self._homeassistant.boot, - ATTR_PORT: self._homeassistant.api_port, - ATTR_SSL: self._homeassistant.api_ssl, - ATTR_WATCHDOG: self._homeassistant.watchdog, - ATTR_WAIT_BOOT: self._homeassistant.wait_boot, + ATTR_VERSION: self.sys_homeassistant.version, + ATTR_LAST_VERSION: self.sys_homeassistant.last_version, + ATTR_IMAGE: self.sys_homeassistant.image, + ATTR_CUSTOM: self.sys_homeassistant.is_custom_image, + ATTR_BOOT: self.sys_homeassistant.boot, + ATTR_PORT: self.sys_homeassistant.api_port, + ATTR_SSL: self.sys_homeassistant.api_ssl, + ATTR_WATCHDOG: self.sys_homeassistant.watchdog, + ATTR_WAIT_BOOT: self.sys_homeassistant.wait_boot, } @api_process @@ -60,34 +60,34 @@ class APIHomeAssistant(CoreSysAttributes): body = await api_validate(SCHEMA_OPTIONS, request) if ATTR_IMAGE in body and ATTR_LAST_VERSION in body: - self._homeassistant.image = body[ATTR_IMAGE] - self._homeassistant.last_version = body[ATTR_LAST_VERSION] + self.sys_homeassistant.image = body[ATTR_IMAGE] + self.sys_homeassistant.last_version = body[ATTR_LAST_VERSION] if ATTR_BOOT in body: - self._homeassistant.boot = body[ATTR_BOOT] + self.sys_homeassistant.boot = body[ATTR_BOOT] if ATTR_PORT in body: - self._homeassistant.api_port = body[ATTR_PORT] + self.sys_homeassistant.api_port = body[ATTR_PORT] if ATTR_PASSWORD in body: - self._homeassistant.api_password = body[ATTR_PASSWORD] + self.sys_homeassistant.api_password = body[ATTR_PASSWORD] if ATTR_SSL in body: - self._homeassistant.api_ssl = body[ATTR_SSL] + self.sys_homeassistant.api_ssl = body[ATTR_SSL] if ATTR_WATCHDOG in body: - self._homeassistant.watchdog = body[ATTR_WATCHDOG] + self.sys_homeassistant.watchdog = body[ATTR_WATCHDOG] if ATTR_WAIT_BOOT in body: - self._homeassistant.wait_boot = body[ATTR_WAIT_BOOT] + self.sys_homeassistant.wait_boot = body[ATTR_WAIT_BOOT] - self._homeassistant.save_data() + self.sys_homeassistant.save_data() return True @api_process async def stats(self, request): """Return resource information.""" - stats = await self._homeassistant.stats() + stats = await self.sys_homeassistant.stats() if not stats: raise RuntimeError("No stats available") @@ -105,38 +105,38 @@ class APIHomeAssistant(CoreSysAttributes): async def update(self, request): """Update homeassistant.""" body = await api_validate(SCHEMA_VERSION, request) - version = body.get(ATTR_VERSION, self._homeassistant.last_version) + version = body.get(ATTR_VERSION, self.sys_homeassistant.last_version) - if version == self._homeassistant.version: + if version == self.sys_homeassistant.version: raise RuntimeError("Version {} is already in use".format(version)) return await asyncio.shield( - self._homeassistant.update(version)) + self.sys_homeassistant.update(version)) @api_process def stop(self, request): """Stop homeassistant.""" - return asyncio.shield(self._homeassistant.stop()) + return asyncio.shield(self.sys_homeassistant.stop()) @api_process def start(self, request): """Start homeassistant.""" - return asyncio.shield(self._homeassistant.start()) + return asyncio.shield(self.sys_homeassistant.start()) @api_process def restart(self, request): """Restart homeassistant.""" - return asyncio.shield(self._homeassistant.restart()) + return asyncio.shield(self.sys_homeassistant.restart()) @api_process_raw(CONTENT_TYPE_BINARY) def logs(self, request): """Return homeassistant docker logs.""" - return self._homeassistant.logs() + return self.sys_homeassistant.logs() @api_process async def check(self, request): """Check config of homeassistant.""" - result = await self._homeassistant.check_config() + result = await self.sys_homeassistant.check_config() if not result.valid: raise RuntimeError(result.log) diff --git a/hassio/api/proxy.py b/hassio/api/proxy.py index 0701d260b..89336ce53 100644 --- a/hassio/api/proxy.py +++ b/hassio/api/proxy.py @@ -20,7 +20,7 @@ class APIProxy(CoreSysAttributes): def _check_access(self, request): """Check the Hass.io token.""" hassio_token = request.headers.get(HEADER_HA_ACCESS) - addon = self._addons.from_uuid(hassio_token) + addon = self.sys_addons.from_uuid(hassio_token) if not addon: _LOGGER.warning("Unknown Home-Assistant API access!") @@ -29,7 +29,7 @@ class APIProxy(CoreSysAttributes): async def _api_client(self, request, path, timeout=300): """Return a client request with proxy origin for Home-Assistant.""" - url = f"{self._homeassistant.api_url}/api/{path}" + url = f"{self.sys_homeassistant.api_url}/api/{path}" try: data = None @@ -45,8 +45,10 @@ class APIProxy(CoreSysAttributes): headers.update({CONTENT_TYPE: request.content_type}) # need api password? - if self._homeassistant.api_password: - headers = {HEADER_HA_ACCESS: self._homeassistant.api_password} + if self.sys_homeassistant.api_password: + headers = { + HEADER_HA_ACCESS: self.sys_homeassistant.api_password, + } # reset headers if not headers: @@ -114,10 +116,10 @@ class APIProxy(CoreSysAttributes): async def _websocket_client(self): """Initialize a websocket api connection.""" - url = f"{self._homeassistant.api_url}/api/websocket" + url = f"{self.sys_homeassistant.api_url}/api/websocket" try: - client = await self._websession_ssl.ws_connect( + client = await self.sys_websession_ssl.ws_connect( url, heartbeat=60, verify_ssl=False) # handle authentication @@ -128,7 +130,7 @@ class APIProxy(CoreSysAttributes): elif data.get('type') == 'auth_required': await client.send_json({ 'type': 'auth', - 'api_password': self._homeassistant.api_password, + 'api_password': self.sys_homeassistant.api_password, }) _LOGGER.error("Authentication to Home-Assistant websocket") @@ -150,13 +152,13 @@ class APIProxy(CoreSysAttributes): try: await server.send_json({ 'type': 'auth_required', - 'ha_version': self._homeassistant.version, + 'ha_version': self.sys_homeassistant.version, }) # Check API access response = await server.receive_json() hassio_token = response.get('api_password') - addon = self._addons.from_uuid(hassio_token) + addon = self.sys_addons.from_uuid(hassio_token) if not addon: _LOGGER.warning("Unauthorized websocket access!") @@ -165,7 +167,7 @@ class APIProxy(CoreSysAttributes): await server.send_json({ 'type': 'auth_ok', - 'ha_version': self._homeassistant.version, + 'ha_version': self.sys_homeassistant.version, }) except (RuntimeError, ValueError) as err: _LOGGER.error("Can't initialize handshake: %s", err) @@ -180,10 +182,10 @@ class APIProxy(CoreSysAttributes): server_read = None while not server.closed and not client.closed: if not client_read: - client_read = asyncio.ensure_future( + client_read = self.sys_create_task( client.receive_str()) if not server_read: - server_read = asyncio.ensure_future( + server_read = self.sys_create_task( server.receive_str()) # wait until data need to be processed diff --git a/hassio/api/security.py b/hassio/api/security.py index de29f227e..a07ae99ef 100644 --- a/hassio/api/security.py +++ b/hassio/api/security.py @@ -42,13 +42,13 @@ class SecurityMiddleware(CoreSysAttributes): return await handler(request) # Home-Assistant - if hassio_token == self._homeassistant.uuid: + if hassio_token == self.sys_homeassistant.uuid: _LOGGER.debug("%s access from Home-Assistant", request.path) request[REQUEST_FROM] = 'homeassistant' return await handler(request) # Add-on - addon = self._addons.from_uuid(hassio_token) + addon = self.sys_addons.from_uuid(hassio_token) if addon: _LOGGER.info("%s access from %s", request.path, addon.slug) request[REQUEST_FROM] = addon.slug diff --git a/hassio/api/services.py b/hassio/api/services.py index 9d3e0b651..b14e96fee 100644 --- a/hassio/api/services.py +++ b/hassio/api/services.py @@ -11,7 +11,7 @@ class APIServices(CoreSysAttributes): def _extract_service(self, request): """Return service and if not exists trow a exception.""" - service = self._services.get(request.match_info.get('service')) + service = self.sys_services.get(request.match_info.get('service')) if not service: raise RuntimeError("Service not exists") @@ -21,7 +21,7 @@ class APIServices(CoreSysAttributes): async def list(self, request): """Show register services.""" services = [] - for service in self._services.list_services: + for service in self.sys_services.list_services: services.append({ ATTR_SLUG: service.slug, ATTR_AVAILABLE: service.enabled, diff --git a/hassio/api/snapshots.py b/hassio/api/snapshots.py index 9f4922649..ccada9a4a 100644 --- a/hassio/api/snapshots.py +++ b/hassio/api/snapshots.py @@ -50,7 +50,7 @@ class APISnapshots(CoreSysAttributes): def _extract_snapshot(self, request): """Return addon and if not exists trow a exception.""" - snapshot = self._snapshots.get(request.match_info.get('snapshot')) + snapshot = self.sys_snapshots.get(request.match_info.get('snapshot')) if not snapshot: raise RuntimeError("Snapshot not exists") return snapshot @@ -59,7 +59,7 @@ class APISnapshots(CoreSysAttributes): async def list(self, request): """Return snapshot list.""" data_snapshots = [] - for snapshot in self._snapshots.list_snapshots: + for snapshot in self.sys_snapshots.list_snapshots: data_snapshots.append({ ATTR_SLUG: snapshot.slug, ATTR_NAME: snapshot.name, @@ -75,7 +75,7 @@ class APISnapshots(CoreSysAttributes): @api_process async def reload(self, request): """Reload snapshot list.""" - await asyncio.shield(self._snapshots.reload()) + await asyncio.shield(self.sys_snapshots.reload()) return True @api_process @@ -110,7 +110,7 @@ class APISnapshots(CoreSysAttributes): """Full-Snapshot a snapshot.""" body = await api_validate(SCHEMA_SNAPSHOT_FULL, request) snapshot = await asyncio.shield( - self._snapshots.do_snapshot_full(**body)) + self.sys_snapshots.do_snapshot_full(**body)) if snapshot: return {ATTR_SLUG: snapshot.slug} @@ -121,7 +121,7 @@ class APISnapshots(CoreSysAttributes): """Partial-Snapshot a snapshot.""" body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request) snapshot = await asyncio.shield( - self._snapshots.do_snapshot_partial(**body)) + self.sys_snapshots.do_snapshot_partial(**body)) if snapshot: return {ATTR_SLUG: snapshot.slug} @@ -134,7 +134,7 @@ class APISnapshots(CoreSysAttributes): body = await api_validate(SCHEMA_RESTORE_FULL, request) return await asyncio.shield( - self._snapshots.do_restore_full(snapshot, **body)) + self.sys_snapshots.do_restore_full(snapshot, **body)) @api_process async def restore_partial(self, request): @@ -143,13 +143,13 @@ class APISnapshots(CoreSysAttributes): body = await api_validate(SCHEMA_RESTORE_PARTIAL, request) return await asyncio.shield( - self._snapshots.do_restore_partial(snapshot, **body)) + self.sys_snapshots.do_restore_partial(snapshot, **body)) @api_process async def remove(self, request): """Remove a snapshot.""" snapshot = self._extract_snapshot(request) - return self._snapshots.remove(snapshot) + return self.sys_snapshots.remove(snapshot) async def download(self, request): """Download a snapshot file.""" @@ -163,7 +163,7 @@ class APISnapshots(CoreSysAttributes): @api_process async def upload(self, request): """Upload a snapshot file.""" - with TemporaryDirectory(dir=str(self._config.path_tmp)) as temp_dir: + with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir: tar_file = Path(temp_dir, f"snapshot.tar") try: @@ -179,7 +179,7 @@ class APISnapshots(CoreSysAttributes): return False snapshot = await asyncio.shield( - self._snapshots.import_snapshot(tar_file)) + self.sys_snapshots.import_snapshot(tar_file)) if snapshot: return {ATTR_SLUG: snapshot.slug} diff --git a/hassio/api/supervisor.py b/hassio/api/supervisor.py index 788401aae..176401106 100644 --- a/hassio/api/supervisor.py +++ b/hassio/api/supervisor.py @@ -41,7 +41,7 @@ class APISupervisor(CoreSysAttributes): async def info(self, request): """Return host information.""" list_addons = [] - for addon in self._addons.list_addons: + for addon in self.sys_addons.list_addons: if addon.is_installed: list_addons.append({ ATTR_NAME: addon.name, @@ -57,13 +57,13 @@ class APISupervisor(CoreSysAttributes): return { ATTR_VERSION: HASSIO_VERSION, - ATTR_LAST_VERSION: self._updater.version_hassio, - ATTR_CHANNEL: self._updater.channel, - ATTR_ARCH: self._arch, - ATTR_WAIT_BOOT: self._config.wait_boot, - ATTR_TIMEZONE: self._config.timezone, + ATTR_LAST_VERSION: self.sys_updater.version_hassio, + ATTR_CHANNEL: self.sys_updater.channel, + ATTR_ARCH: self.sys_arch, + ATTR_WAIT_BOOT: self.sys_config.wait_boot, + ATTR_TIMEZONE: self.sys_config.timezone, ATTR_ADDONS: list_addons, - ATTR_ADDONS_REPOSITORIES: self._config.addons_repositories, + ATTR_ADDONS_REPOSITORIES: self.sys_config.addons_repositories, } @api_process @@ -72,26 +72,26 @@ class APISupervisor(CoreSysAttributes): body = await api_validate(SCHEMA_OPTIONS, request) if ATTR_CHANNEL in body: - self._updater.channel = body[ATTR_CHANNEL] + self.sys_updater.channel = body[ATTR_CHANNEL] if ATTR_TIMEZONE in body: - self._config.timezone = body[ATTR_TIMEZONE] + self.sys_config.timezone = body[ATTR_TIMEZONE] if ATTR_WAIT_BOOT in body: - self._config.wait_boot = body[ATTR_WAIT_BOOT] + self.sys_config.wait_boot = body[ATTR_WAIT_BOOT] if ATTR_ADDONS_REPOSITORIES in body: new = set(body[ATTR_ADDONS_REPOSITORIES]) - await asyncio.shield(self._addons.load_repositories(new)) + await asyncio.shield(self.sys_addons.load_repositories(new)) - self._updater.save_data() - self._config.save_data() + self.sys_updater.save_data() + self.sys_config.save_data() return True @api_process async def stats(self, request): """Return resource information.""" - stats = await self._supervisor.stats() + stats = await self.sys_supervisor.stats() if not stats: raise RuntimeError("No stats available") @@ -109,19 +109,19 @@ class APISupervisor(CoreSysAttributes): async def update(self, request): """Update supervisor OS.""" body = await api_validate(SCHEMA_VERSION, request) - version = body.get(ATTR_VERSION, self._updater.version_hassio) + version = body.get(ATTR_VERSION, self.sys_updater.version_hassio) - if version == self._supervisor.version: + if version == self.sys_supervisor.version: raise RuntimeError("Version {} is already in use".format(version)) return await asyncio.shield( - self._supervisor.update(version)) + self.sys_supervisor.update(version)) @api_process async def reload(self, request): """Reload addons, config ect.""" tasks = [ - self._updater.reload(), + self.sys_updater.reload(), ] results, _ = await asyncio.shield( asyncio.wait(tasks)) @@ -135,4 +135,4 @@ class APISupervisor(CoreSysAttributes): @api_process_raw(CONTENT_TYPE_BINARY) def logs(self, request): """Return supervisor docker logs.""" - return self._supervisor.logs() + return self.sys_supervisor.logs() diff --git a/hassio/bootstrap.py b/hassio/bootstrap.py index 8ce5f2f41..96afa4cc9 100644 --- a/hassio/bootstrap.py +++ b/hassio/bootstrap.py @@ -17,6 +17,7 @@ from .snapshots import SnapshotManager from .tasks import Tasks from .updater import Updater from .services import ServiceManager +from .services.discovery import Discovery from .host import AlsaAudio _LOGGER = logging.getLogger(__name__) @@ -36,6 +37,7 @@ def initialize_coresys(loop): coresys.snapshots = SnapshotManager(coresys) coresys.tasks = Tasks(coresys) coresys.services = ServiceManager(coresys) + coresys.discovery = Discovery(coresys) # bootstrap config initialize_system_data(coresys) diff --git a/hassio/core.py b/hassio/core.py index 06be7708d..ff04545f9 100644 --- a/hassio/core.py +++ b/hassio/core.py @@ -20,98 +20,98 @@ class HassIO(CoreSysAttributes): async def setup(self): """Setup HassIO orchestration.""" # update timezone - if self._config.timezone == 'UTC': - self._config.timezone = await fetch_timezone(self._websession) + if self.sys_config.timezone == 'UTC': + self.sys_config.timezone = await fetch_timezone(self._websession) # supervisor - await self._supervisor.load() + await self.sys_supervisor.load() # hostcontrol await self._host_control.load() # Load homeassistant - await self._homeassistant.load() + await self.sys_homeassistant.load() # Load addons - await self._addons.load() + await self.sys_addons.load() # rest api views - await self._api.load() + await self.sys_api.load() # load last available data - await self._updater.load() + await self.sys_updater.load() # load last available data - await self._snapshots.load() + await self.sys_snapshots.load() # load services - await self._services.load() + await self.sys_services.load() # start dns forwarding - self._loop.create_task(self._dns.start()) + self.sys_create_task(self.sys_dns.start()) # start addon mark as initialize - await self._addons.auto_boot(STARTUP_INITIALIZE) + await self.sys_addons.auto_boot(STARTUP_INITIALIZE) async def start(self): """Start HassIO orchestration.""" # on release channel, try update itself # on dev mode, only read new versions - if not self._dev and self._supervisor.need_update: - if await self._supervisor.update(): + if not self.sys_dev and self.sys_supervisor.need_update: + if await self.sys_supervisor.update(): return else: _LOGGER.info("Ignore Hass.io auto updates on dev channel") # start api - await self._api.start() - _LOGGER.info("Start API on %s", self._docker.network.supervisor) + await self.sys_api.start() + _LOGGER.info("Start API on %s", self.sys_docker.network.supervisor) try: # HomeAssistant is already running / supervisor have only reboot - if self._hardware.last_boot == self._config.last_boot: + if self.sys_hardware.last_boot == self.sys_config.last_boot: _LOGGER.info("Hass.io reboot detected") return # reset register services / discovery - self._services.reset() + self.sys_services.reset() # start addon mark as system - await self._addons.auto_boot(STARTUP_SYSTEM) + await self.sys_addons.auto_boot(STARTUP_SYSTEM) # start addon mark as services - await self._addons.auto_boot(STARTUP_SERVICES) + await self.sys_addons.auto_boot(STARTUP_SERVICES) # run HomeAssistant - if self._homeassistant.boot: - await self._homeassistant.start() + if self.sys_homeassistant.boot: + await self.sys_homeassistant.start() # start addon mark as application - await self._addons.auto_boot(STARTUP_APPLICATION) + await self.sys_addons.auto_boot(STARTUP_APPLICATION) # store new last boot - self._config.last_boot = self._hardware.last_boot - self._config.save_data() + self.sys_config.last_boot = self.sys_hardware.last_boot + self.sys_config.save_data() finally: # Add core tasks into scheduler - await self._tasks.load() + await self.sys_tasks.load() # If landingpage / run upgrade in background - if self._homeassistant.version == 'landingpage': - self._loop.create_task(self._homeassistant.install()) + if self.sys_homeassistant.version == 'landingpage': + self.sys_create_task(self.sys_homeassistant.install()) _LOGGER.info("Hass.io is up and running") async def stop(self): """Stop a running orchestration.""" # don't process scheduler anymore - self._scheduler.suspend = True + self.sys_scheduler.suspend = True # process async stop tasks await asyncio.wait([ - self._api.stop(), - self._dns.stop(), - self._websession.close(), - self._websession_ssl.close() + self.sys_api.stop(), + self.sys_dns.stop(), + self.sys_websession.close(), + self.sys_websession_ssl.close() ]) diff --git a/hassio/coresys.py b/hassio/coresys.py index f17889d3a..70e239e43 100644 --- a/hassio/coresys.py +++ b/hassio/coresys.py @@ -7,8 +7,8 @@ from .config import CoreConfig from .docker import DockerAPI from .misc.dns import DNSForward from .misc.hardware import Hardware -from .misc.host_control import HostControl from .misc.scheduler import Scheduler +from .misc.systemd import Systemd class CoreSys(object): @@ -29,9 +29,9 @@ class CoreSys(object): self._config = CoreConfig() self._hardware = Hardware() self._docker = DockerAPI() + self._systemd = Systemd() self._scheduler = Scheduler(loop=loop) self._dns = DNSForward(loop=loop) - self._host_control = HostControl(loop=loop) # Internal objects pointers self._homeassistant = None @@ -42,6 +42,7 @@ class CoreSys(object): self._snapshots = None self._tasks = None self._services = None + self._discovery = None self._alsa = None @property @@ -104,9 +105,9 @@ class CoreSys(object): return self._dns @property - def host_control(self): - """Return HostControl object.""" - return self._host_control + def systemd(self): + """Return systemd object.""" + return self._systemd @property def homeassistant(self): @@ -204,6 +205,18 @@ class CoreSys(object): raise RuntimeError("Services already set!") self._services = value + @property + def discovery(self): + """Return ServiceManager object.""" + return self._discovery + + @discovery.setter + def discovery(self, value): + """Set a Discovery object.""" + if self._discovery: + raise RuntimeError("Discovery already set!") + self._discovery = value + @property def alsa(self): """Return ALSA Audio object.""" @@ -216,6 +229,14 @@ class CoreSys(object): raise RuntimeError("ALSA already set!") self._alsa = value + async def run_in_executor(self, funct, *args): + """Wrapper for executor pool.""" + return self._loop.run_in_executor(None, funct, *args) + + async def create_task(self, coroutine): + """Wrapper for async task.""" + return self._loop.create_task(coroutine) + class CoreSysAttributes(object): """Inheret basic CoreSysAttributes.""" @@ -224,6 +245,6 @@ class CoreSysAttributes(object): def __getattr__(self, name): """Mapping to coresys.""" - if hasattr(self.coresys, name[1:]): - return getattr(self.coresys, name[1:]) - raise AttributeError(f"Can't find {name} on {self.__class__}") + if name.startswith("_sys_") and hasattr(self.coresys, name[5:]): + return getattr(self.coresys, name[5:]) + raise AttributeError() diff --git a/hassio/docker/addon.py b/hassio/docker/addon.py index 9dc363235..960923c75 100644 --- a/hassio/docker/addon.py +++ b/hassio/docker/addon.py @@ -28,7 +28,7 @@ class DockerAddon(DockerInterface): @property def addon(self): """Return addon of docker image.""" - return self._addons.get(self._id) + return self.sys_addons.get(self._id) @property def image(self): @@ -52,7 +52,7 @@ class DockerAddon(DockerInterface): """Return arch of docker image.""" if not self.addon.legacy: return super().arch - return self._arch + return self.sys_arch @property def name(self): @@ -85,7 +85,7 @@ class DockerAddon(DockerInterface): return { **addon_env, - ENV_TIME: self._config.timezone, + ENV_TIME: self.sys_config.timezone, ENV_TOKEN: self.addon.uuid, } @@ -100,7 +100,7 @@ class DockerAddon(DockerInterface): # Auto mapping UART devices if self.addon.auto_uart: - for device in self._hardware.serial_devices: + for device in self.sys_hardware.serial_devices: devices.append(f"{device}:{device}:rwm") # Return None if no devices is present @@ -149,8 +149,8 @@ class DockerAddon(DockerInterface): def network_mapping(self): """Return hosts mapping.""" return { - 'homeassistant': self._docker.network.gateway, - 'hassio': self._docker.network.supervisor, + 'homeassistant': self.sys_docker.network.gateway, + 'hassio': self.sys_docker.network.supervisor, } @property @@ -173,31 +173,31 @@ class DockerAddon(DockerInterface): # setup config mappings if MAP_CONFIG in addon_mapping: volumes.update({ - str(self._config.path_extern_config): { + str(self.sys_config.path_extern_config): { 'bind': "/config", 'mode': addon_mapping[MAP_CONFIG] }}) if MAP_SSL in addon_mapping: volumes.update({ - str(self._config.path_extern_ssl): { + str(self.sys_config.path_extern_ssl): { 'bind': "/ssl", 'mode': addon_mapping[MAP_SSL] }}) if MAP_ADDONS in addon_mapping: volumes.update({ - str(self._config.path_extern_addons_local): { + str(self.sys_config.path_extern_addons_local): { 'bind': "/addons", 'mode': addon_mapping[MAP_ADDONS] }}) if MAP_BACKUP in addon_mapping: volumes.update({ - str(self._config.path_extern_backup): { + str(self.sys_config.path_extern_backup): { 'bind': "/backup", 'mode': addon_mapping[MAP_BACKUP] }}) if MAP_SHARE in addon_mapping: volumes.update({ - str(self._config.path_extern_share): { + str(self.sys_config.path_extern_share): { 'bind': "/share", 'mode': addon_mapping[MAP_SHARE] }}) @@ -239,7 +239,7 @@ class DockerAddon(DockerInterface): # cleanup self._stop() - ret = self._docker.run( + ret = self.sys_docker.run( self.image, name=self.name, hostname=self.hostname, @@ -283,7 +283,7 @@ class DockerAddon(DockerInterface): _LOGGER.info("Start build %s:%s", self.image, tag) try: - image, log = self._docker.images.build( + image, log = self.sys_docker.images.build( **build_env.get_docker_args(tag)) _LOGGER.debug("Build %s:%s done: %s", self.image, tag, log) @@ -302,7 +302,7 @@ class DockerAddon(DockerInterface): @process_lock def export_image(self, path): """Export current images into a tar file.""" - return self._loop.run_in_executor(None, self._export_image, path) + return self.sys_run_in_executor(self._export_image, path) def _export_image(self, tar_file): """Export current images into a tar file. @@ -310,7 +310,7 @@ class DockerAddon(DockerInterface): Need run inside executor. """ try: - image = self._docker.api.get_image(self.image) + image = self.sys_docker.api.get_image(self.image) except docker.errors.DockerException as err: _LOGGER.error("Can't fetch image %s: %s", self.image, err) return False @@ -330,7 +330,7 @@ class DockerAddon(DockerInterface): @process_lock def import_image(self, path, tag): """Import a tar file as image.""" - return self._loop.run_in_executor(None, self._import_image, path, tag) + return self.sys_run_in_executor(self._import_image, path, tag) def _import_image(self, tar_file, tag): """Import a tar file as image. @@ -339,9 +339,9 @@ class DockerAddon(DockerInterface): """ try: with tar_file.open("rb") as read_tar: - self._docker.api.load_image(read_tar, quiet=True) + self.sys_docker.api.load_image(read_tar, quiet=True) - image = self._docker.images.get(self.image) + image = self.sys_docker.images.get(self.image) image.tag(self.image, tag=tag) except (docker.errors.DockerException, OSError) as err: _LOGGER.error("Can't import image %s: %s", self.image, err) @@ -355,7 +355,7 @@ class DockerAddon(DockerInterface): @process_lock def write_stdin(self, data): """Write to add-on stdin.""" - return self._loop.run_in_executor(None, self._write_stdin, data) + return self.sys_run_in_executor(self._write_stdin, data) def _write_stdin(self, data): """Write to add-on stdin. @@ -367,7 +367,7 @@ class DockerAddon(DockerInterface): try: # load needed docker objects - container = self._docker.containers.get(self.name) + container = self.sys_docker.containers.get(self.name) socket = container.attach_socket(params={'stdin': 1, 'stream': 1}) except docker.errors.DockerException as err: _LOGGER.error("Can't attach to %s stdin: %s", self.name, err) diff --git a/hassio/docker/homeassistant.py b/hassio/docker/homeassistant.py index 1ac05b18c..036727bc6 100644 --- a/hassio/docker/homeassistant.py +++ b/hassio/docker/homeassistant.py @@ -24,7 +24,7 @@ class DockerHomeAssistant(DockerInterface): @property def image(self): """Return name of docker image.""" - return self._homeassistant.image + return self.sys_homeassistant.image @property def name(self): @@ -35,7 +35,7 @@ class DockerHomeAssistant(DockerInterface): def devices(self): """Create list of special device to map into docker.""" devices = [] - for device in self._hardware.serial_devices: + for device in self.sys_hardware.serial_devices: devices.append(f"{device}:{device}:rwm") return devices or None @@ -50,7 +50,7 @@ class DockerHomeAssistant(DockerInterface): # cleanup self._stop() - ret = self._docker.run( + ret = self.sys_docker.run( self.image, name=self.name, hostname=self.name, @@ -60,16 +60,16 @@ class DockerHomeAssistant(DockerInterface): devices=self.devices, network_mode='host', environment={ - 'HASSIO': self._docker.network.supervisor, - ENV_TIME: self._config.timezone, - ENV_TOKEN: self._homeassistant.uuid, + 'HASSIO': self.sys_docker.network.supervisor, + ENV_TIME: self.sys_config.timezone, + ENV_TOKEN: self.sys_homeassistant.uuid, }, volumes={ - str(self._config.path_extern_config): + str(self.sys_config.path_extern_config): {'bind': '/config', 'mode': 'rw'}, - str(self._config.path_extern_ssl): + str(self.sys_config.path_extern_ssl): {'bind': '/ssl', 'mode': 'ro'}, - str(self._config.path_extern_share): + str(self.sys_config.path_extern_share): {'bind': '/share', 'mode': 'rw'}, } ) @@ -85,26 +85,26 @@ class DockerHomeAssistant(DockerInterface): Need run inside executor. """ - return self._docker.run_command( + return self.sys_docker.run_command( self.image, command, detach=True, stdout=True, stderr=True, environment={ - ENV_TIME: self._config.timezone, + ENV_TIME: self.sys_config.timezone, }, volumes={ - str(self._config.path_extern_config): + str(self.sys_config.path_extern_config): {'bind': '/config', 'mode': 'ro'}, - str(self._config.path_extern_ssl): + str(self.sys_config.path_extern_ssl): {'bind': '/ssl', 'mode': 'ro'}, } ) def is_initialize(self): """Return True if docker container exists.""" - return self._loop.run_in_executor(None, self._is_initialize) + return self.sys_run_in_executor(self._is_initialize) def _is_initialize(self): """Return True if docker container exists. @@ -112,7 +112,7 @@ class DockerHomeAssistant(DockerInterface): Need run inside executor. """ try: - self._docker.containers.get(self.name) + self.sys_docker.containers.get(self.name) except docker.errors.DockerException: return False diff --git a/hassio/docker/interface.py b/hassio/docker/interface.py index f14ddae46..4f063f0bf 100644 --- a/hassio/docker/interface.py +++ b/hassio/docker/interface.py @@ -61,7 +61,7 @@ class DockerInterface(CoreSysAttributes): @process_lock def install(self, tag): """Pull docker image.""" - return self._loop.run_in_executor(None, self._install, tag) + return self.sys_run_in_executor(self._install, tag) def _install(self, tag): """Pull docker image. @@ -70,7 +70,7 @@ class DockerInterface(CoreSysAttributes): """ try: _LOGGER.info("Pull image %s tag %s.", self.image, tag) - image = self._docker.images.pull(f"{self.image}:{tag}") + image = self.sys_docker.images.pull(f"{self.image}:{tag}") image.tag(self.image, tag='latest') self._meta = image.attrs @@ -83,7 +83,7 @@ class DockerInterface(CoreSysAttributes): def exists(self): """Return True if docker image exists in local repo.""" - return self._loop.run_in_executor(None, self._exists) + return self.sys_run_in_executor(self._exists) def _exists(self): """Return True if docker image exists in local repo. @@ -91,7 +91,7 @@ class DockerInterface(CoreSysAttributes): Need run inside executor. """ try: - image = self._docker.images.get(self.image) + image = self.sys_docker.images.get(self.image) assert f"{self.image}:{self.version}" in image.tags except (docker.errors.DockerException, AssertionError): return False @@ -103,7 +103,7 @@ class DockerInterface(CoreSysAttributes): Return a Future. """ - return self._loop.run_in_executor(None, self._is_running) + return self.sys_run_in_executor(self._is_running) def _is_running(self): """Return True if docker is Running. @@ -111,8 +111,8 @@ class DockerInterface(CoreSysAttributes): Need run inside executor. """ try: - container = self._docker.containers.get(self.name) - image = self._docker.images.get(self.image) + container = self.sys_docker.containers.get(self.name) + image = self.sys_docker.images.get(self.image) except docker.errors.DockerException: return False @@ -129,7 +129,7 @@ class DockerInterface(CoreSysAttributes): @process_lock def attach(self): """Attach to running docker container.""" - return self._loop.run_in_executor(None, self._attach) + return self.sys_run_in_executor(self._attach) def _attach(self): """Attach to running docker container. @@ -138,9 +138,9 @@ class DockerInterface(CoreSysAttributes): """ try: if self.image: - self._meta = self._docker.images.get(self.image).attrs + self._meta = self.sys_docker.images.get(self.image).attrs else: - self._meta = self._docker.containers.get(self.name).attrs + self._meta = self.sys_docker.containers.get(self.name).attrs except docker.errors.DockerException: return False @@ -152,7 +152,7 @@ class DockerInterface(CoreSysAttributes): @process_lock def run(self): """Run docker image.""" - return self._loop.run_in_executor(None, self._run) + return self.sys_run_in_executor(self._run) def _run(self): """Run docker image. @@ -164,7 +164,7 @@ class DockerInterface(CoreSysAttributes): @process_lock def stop(self): """Stop/remove docker container.""" - return self._loop.run_in_executor(None, self._stop) + return self.sys_run_in_executor(self._stop) def _stop(self): """Stop/remove and remove docker container. @@ -172,7 +172,7 @@ class DockerInterface(CoreSysAttributes): Need run inside executor. """ try: - container = self._docker.containers.get(self.name) + container = self.sys_docker.containers.get(self.name) except docker.errors.DockerException: return False @@ -190,7 +190,7 @@ class DockerInterface(CoreSysAttributes): @process_lock def remove(self): """Remove docker images.""" - return self._loop.run_in_executor(None, self._remove) + return self.sys_run_in_executor(self._remove) def _remove(self): """remove docker images. @@ -205,11 +205,11 @@ class DockerInterface(CoreSysAttributes): try: with suppress(docker.errors.ImageNotFound): - self._docker.images.remove( + self.sys_docker.images.remove( image=f"{self.image}:latest", force=True) with suppress(docker.errors.ImageNotFound): - self._docker.images.remove( + self.sys_docker.images.remove( image=f"{self.image}:{self.version}", force=True) except docker.errors.DockerException as err: @@ -222,7 +222,7 @@ class DockerInterface(CoreSysAttributes): @process_lock def update(self, tag): """Update a docker image.""" - return self._loop.run_in_executor(None, self._update, tag) + return self.sys_run_in_executor(self._update, tag) def _update(self, tag): """Update a docker image. @@ -247,7 +247,7 @@ class DockerInterface(CoreSysAttributes): Return a Future. """ - return self._loop.run_in_executor(None, self._logs) + return self.sys_run_in_executor(self._logs) def _logs(self): """Return docker logs of container. @@ -255,7 +255,7 @@ class DockerInterface(CoreSysAttributes): Need run inside executor. """ try: - container = self._docker.containers.get(self.name) + container = self.sys_docker.containers.get(self.name) except docker.errors.DockerException: return b"" @@ -267,7 +267,7 @@ class DockerInterface(CoreSysAttributes): @process_lock def cleanup(self): """Check if old version exists and cleanup.""" - return self._loop.run_in_executor(None, self._cleanup) + return self.sys_run_in_executor(self._cleanup) def _cleanup(self): """Check if old version exists and cleanup. @@ -275,25 +275,25 @@ class DockerInterface(CoreSysAttributes): Need run inside executor. """ try: - latest = self._docker.images.get(self.image) + latest = self.sys_docker.images.get(self.image) except docker.errors.DockerException: _LOGGER.warning("Can't find %s for cleanup", self.image) return False - for image in self._docker.images.list(name=self.image): + for image in self.sys_docker.images.list(name=self.image): if latest.id == image.id: continue with suppress(docker.errors.DockerException): _LOGGER.info("Cleanup docker images: %s", image.tags) - self._docker.images.remove(image.id, force=True) + self.sys_docker.images.remove(image.id, force=True) return True @process_lock def execute_command(self, command): """Create a temporary container and run command.""" - return self._loop.run_in_executor(None, self._execute_command, command) + return self.sys_run_in_executor(self._execute_command, command) def _execute_command(self, command): """Create a temporary container and run command. @@ -304,7 +304,7 @@ class DockerInterface(CoreSysAttributes): def stats(self): """Read and return stats from container.""" - return self._loop.run_in_executor(None, self._stats) + return self.sys_run_in_executor(self._stats) def _stats(self): """Create a temporary container and run command. @@ -312,7 +312,7 @@ class DockerInterface(CoreSysAttributes): Need run inside executor. """ try: - container = self._docker.containers.get(self.name) + container = self.sys_docker.containers.get(self.name) except docker.errors.DockerException: return None diff --git a/hassio/docker/supervisor.py b/hassio/docker/supervisor.py index ae8c06ac7..1a93015e7 100644 --- a/hassio/docker/supervisor.py +++ b/hassio/docker/supervisor.py @@ -24,7 +24,7 @@ class DockerSupervisor(DockerInterface, CoreSysAttributes): Need run inside executor. """ try: - container = self._docker.containers.get(self.name) + container = self.sys_docker.containers.get(self.name) except docker.errors.DockerException: return False @@ -33,9 +33,10 @@ class DockerSupervisor(DockerInterface, CoreSysAttributes): self.image, self.version) # if already attach - if container in self._docker.network.containers: + if container in self.sys_docker.network.containers: return True # attach to network - return self._docker.network.attach_container( - container, alias=['hassio'], ipv4=self._docker.network.supervisor) + return self.sys_docker.network.attach_container( + container, alias=['hassio'], + ipv4=self.sys_docker.network.supervisor) diff --git a/hassio/exceptions.py b/hassio/exceptions.py new file mode 100644 index 000000000..f4d4bea61 --- /dev/null +++ b/hassio/exceptions.py @@ -0,0 +1,15 @@ +"""Core Exceptions.""" + +class HassioError(Exception): + """Root exception.""" + pass + + +class HassioInternalError(HassioError): + """Internal Hass.io error they can't handle.""" + pass + + +class HassioNotSupportedError(HassioError): + """Function is not supported.""" + pass diff --git a/hassio/homeassistant.py b/hassio/homeassistant.py index ff49bfd35..c95b644bd 100644 --- a/hassio/homeassistant.py +++ b/hassio/homeassistant.py @@ -54,7 +54,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes): @property def api_ip(self): """Return IP of HomeAssistant instance.""" - return self._docker.network.gateway + return self.sys_docker.network.gateway @property def api_port(self): @@ -123,7 +123,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes): """Return last available version of homeassistant.""" if self.is_custom_image: return self._data.get(ATTR_LAST_VERSION) - return self._updater.version_homeassistant + return self.sys_updater.version_homeassistant @last_version.setter def last_version(self, value): @@ -189,7 +189,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes): while True: # read homeassistant tag and install it if not self.last_version: - await self._updater.reload() + await self.sys_updater.reload() tag = self.last_version if tag and await self.instance.install(tag): @@ -307,7 +307,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes): try: # pylint: disable=bad-continuation - async with self._websession_ssl.get( + async with self.sys_websession_ssl.get( url, headers=header, timeout=30) as request: status = request.status @@ -328,7 +328,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes): try: # pylint: disable=bad-continuation - async with self._websession_ssl.post( + async with self.sys_websession_ssl.post( url, headers=header, timeout=30, json=event_data) as request: status = request.status @@ -361,7 +361,7 @@ class HomeAssistant(JsonConfig, CoreSysAttributes): pass while time.monotonic() - start_time < self.wait_boot: - if await self._loop.run_in_executor(None, check_port): + if await self.sys_run_in_executor(check_port): _LOGGER.info("Detect a running Home-Assistant instance") return True await asyncio.sleep(10) diff --git a/hassio/host/alsa.py b/hassio/host/alsa.py index 6baca457c..75a935192 100644 --- a/hassio/host/alsa.py +++ b/hassio/host/alsa.py @@ -42,7 +42,7 @@ class AlsaAudio(CoreSysAttributes): def _update_device(self): """Update Internal device DB.""" - current_id = hash(frozenset(self._hardware.audio_devices)) + current_id = hash(frozenset(self.sys_hardware.audio_devices)) # Need rebuild? if current_id == self._cache: @@ -57,7 +57,7 @@ class AlsaAudio(CoreSysAttributes): database = self._audio_database() # Process devices - for dev_id, dev_data in self._hardware.audio_devices.items(): + for dev_id, dev_data in self.sys_hardware.audio_devices.items(): for chan_id, chan_type in dev_data[ATTR_DEVICES].items(): alsa_id = f"{dev_id},{chan_id}" dev_name = dev_data[ATTR_NAME] @@ -73,7 +73,7 @@ class AlsaAudio(CoreSysAttributes): # Use name from DB or a generic name self._data[key][alsa_id] = database.get( - self._machine, {}).get( + self.sys_machine, {}).get( dev_name, {}).get(alsa_id, f"{dev_name}: {chan_id}") self._cache = current_id @@ -98,8 +98,8 @@ class AlsaAudio(CoreSysAttributes): # Init defaults if self._default is None: database = self._audio_database() - alsa_input = database.get(self._machine, {}).get(ATTR_INPUT) - alsa_output = database.get(self._machine, {}).get(ATTR_OUTPUT) + alsa_input = database.get(self.sys_machine, {}).get(ATTR_INPUT) + alsa_output = database.get(self.sys_machine, {}).get(ATTR_OUTPUT) self._default = DefaultConfig(alsa_input, alsa_output) diff --git a/hassio/host/network.py b/hassio/misc/networkmanager.py similarity index 100% rename from hassio/host/network.py rename to hassio/misc/networkmanager.py diff --git a/hassio/host/rauc.py b/hassio/misc/rauc.py similarity index 100% rename from hassio/host/rauc.py rename to hassio/misc/rauc.py diff --git a/hassio/host/system.py b/hassio/misc/systemd.py similarity index 59% rename from hassio/host/system.py rename to hassio/misc/systemd.py index d4176992e..5be0a700f 100644 --- a/hassio/host/system.py +++ b/hassio/misc/systemd.py @@ -1,13 +1,16 @@ """Interface to Systemd over dbus.""" +from logging - +from ..exceptions import HassioInternalError from ..utils.gdbus import DBus, DBusError +_LOGGER = logging.getLogger(__name__) + DBUS_NAME = 'org.freedesktop.systemd1' DBUS_OBJECT = '/org/freedesktop/systemd1/Manager' -class System(object): +class Systemd(object): """Systemd function handler.""" def __init__(self): @@ -19,14 +22,21 @@ class System(object): try: self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT) except DBusError: + _LOGGER.warning("Can't connect to systemd") return - async def reboot(): + async def reboot(self): """Reboot host computer.""" try: await self.dbus.Reboot() except DBusError: _LOGGER.error("Can't reboot host") + raise HassioInternalError() from None - async def shutdown(): + async def shutdown(self): """Shutdown host computer.""" + try: + await self.dbus.PowerOff() + except DBusError: + _LOGGER.error("Can't PowerOff host") + raise HassioInternalError() from None diff --git a/hassio/services/__init__.py b/hassio/services/__init__.py index 5f15940a1..addd172fa 100644 --- a/hassio/services/__init__.py +++ b/hassio/services/__init__.py @@ -2,7 +2,6 @@ from .mqtt import MQTTService from .data import ServicesData -from .discovery import Discovery from ..const import SERVICE_MQTT from ..coresys import CoreSysAttributes @@ -19,7 +18,6 @@ class ServiceManager(CoreSysAttributes): """Initialize Services handler.""" self.coresys = coresys self.data = ServicesData() - self.discovery = Discovery(coresys) self.services_obj = {} @property @@ -37,9 +35,9 @@ class ServiceManager(CoreSysAttributes): self.services_obj[slug] = service(self.coresys) # Read exists discovery messages - self.discovery.load() + self.sys_discovery.load() def reset(self): """Reset available data.""" self.data.reset_data() - self.discovery.load() + self.sys_discovery.load() diff --git a/hassio/services/discovery.py b/hassio/services/discovery.py index f0166589a..ac48f88d1 100644 --- a/hassio/services/discovery.py +++ b/hassio/services/discovery.py @@ -36,7 +36,7 @@ class Discovery(CoreSysAttributes): self._data.clear() self._data.extend(messages) - self._services.data.save_data() + self.sys_services.data.save_data() def get(self, uuid): """Return discovery message.""" @@ -45,7 +45,7 @@ class Discovery(CoreSysAttributes): @property def _data(self): """Return discovery data.""" - return self._services.data.discovery + return self.sys_services.data.discovery @property def list_messages(self): @@ -69,7 +69,7 @@ class Discovery(CoreSysAttributes): self.save() # send event to Home-Assistant - self._loop.create_task(self._homeassistant.send_event( + self.sys_create_task(self.sys_homeassistant.send_event( EVENT_DISCOVERY_ADD, {ATTR_UUID: message.uuid})) return message @@ -80,7 +80,7 @@ class Discovery(CoreSysAttributes): self.save() # send event to Home-Assistant - self._loop.create_task(self._homeassistant.send_event( + self.sys_create_task(self.sys_homeassistant.send_event( EVENT_DISCOVERY_DEL, {ATTR_UUID: message.uuid})) diff --git a/hassio/services/interface.py b/hassio/services/interface.py index a3e13a387..4d4e79da0 100644 --- a/hassio/services/interface.py +++ b/hassio/services/interface.py @@ -37,7 +37,7 @@ class ServiceInterface(CoreSysAttributes): def save(self): """Save changes.""" - self._services.data.save_data() + self.sys_services.data.save_data() def get_service_data(self): """Return the requested service data.""" diff --git a/hassio/services/mqtt.py b/hassio/services/mqtt.py index fa4436622..6e2e519c8 100644 --- a/hassio/services/mqtt.py +++ b/hassio/services/mqtt.py @@ -21,7 +21,7 @@ class MQTTService(ServiceInterface): @property def _data(self): """Return data of this service.""" - return self._services.data.mqtt + return self.sys_services.data.mqtt @property def schema(self): @@ -66,7 +66,7 @@ class MQTTService(ServiceInterface): return True # discover mqtt to homeassistant - message = self._services.discovery.send( + message = self.sys_discovery.send( provider, SERVICE_MQTT, None, self.hass_config) self._data[ATTR_DISCOVERY_ID] = message.uuid @@ -81,8 +81,8 @@ class MQTTService(ServiceInterface): discovery_id = self._data.get(ATTR_DISCOVERY_ID) if discovery_id: - self._services.discovery.remove( - self._services.discovery.get(discovery_id)) + self.sys_discovery.remove( + self.sys_discovery.get(discovery_id)) self._data.clear() self.save() diff --git a/hassio/snapshots/__init__.py b/hassio/snapshots/__init__.py index d9ec14769..bb346167f 100644 --- a/hassio/snapshots/__init__.py +++ b/hassio/snapshots/__init__.py @@ -35,7 +35,7 @@ class SnapshotManager(CoreSysAttributes): """Initialize a new snapshot object from name.""" date_str = utcnow().isoformat() slug = create_slug(name, date_str) - tar_file = Path(self._config.path_backup, f"{slug}.tar") + tar_file = Path(self.sys_config.path_backup, f"{slug}.tar") # init object snapshot = Snapshot(self.coresys, tar_file) @@ -65,7 +65,7 @@ class SnapshotManager(CoreSysAttributes): self.snapshots_obj[snapshot.slug] = snapshot tasks = [_load_snapshot(tar_file) for tar_file in - self._config.path_backup.glob("*.tar")] + self.sys_config.path_backup.glob("*.tar")] _LOGGER.info("Found %d snapshot files", len(tasks)) if tasks: @@ -98,7 +98,7 @@ class SnapshotManager(CoreSysAttributes): return None # Move snapshot to backup - tar_origin = Path(self._config.path_backup, f"{snapshot.slug}.tar") + tar_origin = Path(self.sys_config.path_backup, f"{snapshot.slug}.tar") try: snapshot.tarfile.rename(tar_origin) @@ -124,7 +124,7 @@ class SnapshotManager(CoreSysAttributes): snapshot = self._create_snapshot(name, SNAPSHOT_FULL, password) _LOGGER.info("Full-Snapshot %s start", snapshot.slug) try: - self._scheduler.suspend = True + self.sys_scheduler.suspend = True await self.lock.acquire() async with snapshot: @@ -146,7 +146,7 @@ class SnapshotManager(CoreSysAttributes): return snapshot finally: - self._scheduler.suspend = False + self.sys_scheduler.suspend = False self.lock.release() async def do_snapshot_partial(self, name="", addons=None, folders=None, @@ -162,14 +162,14 @@ class SnapshotManager(CoreSysAttributes): _LOGGER.info("Partial-Snapshot %s start", snapshot.slug) try: - self._scheduler.suspend = True + self.sys_scheduler.suspend = True await self.lock.acquire() async with snapshot: # Snapshot add-ons addon_list = [] for addon_slug in addons: - addon = self._addons.get(addon_slug) + addon = self.sys_addons.get(addon_slug) if addon and addon.is_installed: addon_list.append(addon) continue @@ -195,7 +195,7 @@ class SnapshotManager(CoreSysAttributes): return snapshot finally: - self._scheduler.suspend = False + self.sys_scheduler.suspend = False self.lock.release() async def do_restore_full(self, snapshot, password=None): @@ -215,15 +215,15 @@ class SnapshotManager(CoreSysAttributes): _LOGGER.info("Full-Restore %s start", snapshot.slug) try: - self._scheduler.suspend = True + self.sys_scheduler.suspend = True await self.lock.acquire() async with snapshot: tasks = [] # Stop Home-Assistant / Add-ons - tasks.append(self._homeassistant.stop()) - for addon in self._addons.list_addons: + tasks.append(self.sys_homeassistant.stop()) + for addon in self.sys_addons.list_addons: if addon.is_installed: tasks.append(addon.stop()) @@ -238,8 +238,8 @@ class SnapshotManager(CoreSysAttributes): # Start homeassistant restore _LOGGER.info("Restore %s run Home-Assistant", snapshot.slug) snapshot.restore_homeassistant() - task_hass = self._loop.create_task( - self._homeassistant.update(snapshot.homeassistant_version)) + task_hass = self.sys_create_task(self.sys_homeassistant.update( + snapshot.homeassistant_version)) # Restore repositories _LOGGER.info("Restore %s run Repositories", snapshot.slug) @@ -247,7 +247,7 @@ class SnapshotManager(CoreSysAttributes): # Delete delta add-ons tasks.clear() - for addon in self._addons.list_installed: + for addon in self.sys_addons.list_installed: if addon.slug not in snapshot.addon_list: tasks.append(addon.uninstall()) @@ -263,7 +263,7 @@ class SnapshotManager(CoreSysAttributes): _LOGGER.info("Restore %s wait until homeassistant ready", snapshot.slug) await task_hass - await self._homeassistant.start() + await self.sys_homeassistant.start() except Exception: # pylint: disable=broad-except _LOGGER.exception("Restore %s error", snapshot.slug) @@ -274,7 +274,7 @@ class SnapshotManager(CoreSysAttributes): return True finally: - self._scheduler.suspend = False + self.sys_scheduler.suspend = False self.lock.release() async def do_restore_partial(self, snapshot, homeassistant=False, @@ -293,13 +293,13 @@ class SnapshotManager(CoreSysAttributes): _LOGGER.info("Partial-Restore %s start", snapshot.slug) try: - self._scheduler.suspend = True + self.sys_scheduler.suspend = True await self.lock.acquire() async with snapshot: # Stop Home-Assistant if they will be restored later if homeassistant and FOLDER_HOMEASSISTANT in folders: - await self._homeassistant.stop() + await self.sys_homeassistant.stop() # Process folders if folders: @@ -312,14 +312,14 @@ class SnapshotManager(CoreSysAttributes): _LOGGER.info("Restore %s run Home-Assistant", snapshot.slug) snapshot.restore_homeassistant() - task_hass = self._loop.create_task( - self._homeassistant.update( + task_hass = self.sys_create_task( + self.sys_homeassistant.update( snapshot.homeassistant_version)) # Process Add-ons addon_list = [] for slug in addons: - addon = self._addons.get(slug) + addon = self.sys_addons.get(slug) if addon: addon_list.append(addon) continue @@ -334,7 +334,7 @@ class SnapshotManager(CoreSysAttributes): _LOGGER.info("Restore %s wait for Home-Assistant", snapshot.slug) await task_hass - await self._homeassistant.start() + await self.sys_homeassistant.start() except Exception: # pylint: disable=broad-except _LOGGER.exception("Restore %s error", snapshot.slug) @@ -345,5 +345,5 @@ class SnapshotManager(CoreSysAttributes): return True finally: - self._scheduler.suspend = False + self.sys_scheduler.suspend = False self.lock.release() diff --git a/hassio/snapshots/snapshot.py b/hassio/snapshots/snapshot.py index 4ffd4345d..0888ad177 100644 --- a/hassio/snapshots/snapshot.py +++ b/hassio/snapshots/snapshot.py @@ -179,7 +179,7 @@ class Snapshot(CoreSysAttributes): # read snapshot.json try: - raw = await self._loop.run_in_executor(None, _load_file) + raw = await self.sys_run_in_executor(_load_file) except (tarfile.TarError, KeyError) as err: _LOGGER.error( "Can't read snapshot tarfile %s: %s", self.tarfile, err) @@ -204,7 +204,7 @@ class Snapshot(CoreSysAttributes): async def __aenter__(self): """Async context to open a snapshot.""" - self._tmp = TemporaryDirectory(dir=str(self._config.path_tmp)) + self._tmp = TemporaryDirectory(dir=str(self.sys_config.path_tmp)) # create a snapshot if not self.tarfile.is_file(): @@ -216,7 +216,7 @@ class Snapshot(CoreSysAttributes): with tarfile.open(self.tarfile, "r:") as tar: tar.extractall(path=self._tmp.name) - await self._loop.run_in_executor(None, _extract_snapshot) + await self.sys_run_in_executor(_extract_snapshot) async def __aexit__(self, exception_type, exception_value, traceback): """Async context to close a snapshot.""" @@ -241,7 +241,7 @@ class Snapshot(CoreSysAttributes): try: write_json_file(Path(self._tmp.name, "snapshot.json"), self._data) - await self._loop.run_in_executor(None, _create_snapshot) + await self.sys_run_in_executor(_create_snapshot) except (OSError, json.JSONDecodeError) as err: _LOGGER.error("Can't write snapshot: %s", err) finally: @@ -249,7 +249,7 @@ class Snapshot(CoreSysAttributes): async def store_addons(self, addon_list=None): """Add a list of add-ons into snapshot.""" - addon_list = addon_list or self._addons.list_installed + addon_list = addon_list or self.sys_addons.list_installed async def _addon_save(addon): """Task to store a add-on into snapshot.""" @@ -280,7 +280,7 @@ class Snapshot(CoreSysAttributes): if not addon_list: addon_list = [] for addon_slug in self.addon_list: - addon = self._addons.get(addon_slug) + addon = self.sys_addons.get(addon_slug) if addon: addon_list.append(addon) @@ -313,7 +313,7 @@ class Snapshot(CoreSysAttributes): """Intenal function to snapshot a folder.""" slug_name = name.replace("/", "_") tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz") - origin_dir = Path(self._config.path_hassio, name) + origin_dir = Path(self.sys_config.path_hassio, name) # Check if exsits if not origin_dir.is_dir(): @@ -332,7 +332,7 @@ class Snapshot(CoreSysAttributes): _LOGGER.warning("Can't snapshot folder %s: %s", name, err) # Run tasks - tasks = [self._loop.run_in_executor(None, _folder_save, folder) + tasks = [self.sys_run_in_executor(_folder_save, folder) for folder in folder_list] if tasks: await asyncio.wait(tasks) @@ -345,7 +345,7 @@ class Snapshot(CoreSysAttributes): """Intenal function to restore a folder.""" slug_name = name.replace("/", "_") tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz") - origin_dir = Path(self._config.path_hassio, name) + origin_dir = Path(self.sys_config.path_hassio, name) # Check if exists inside snapshot if not tar_name.exists(): @@ -366,58 +366,58 @@ class Snapshot(CoreSysAttributes): _LOGGER.warning("Can't restore folder %s: %s", name, err) # Run tasks - tasks = [self._loop.run_in_executor(None, _folder_restore, folder) + tasks = [self.sys_run_in_executor(_folder_restore, folder) for folder in folder_list] if tasks: await asyncio.wait(tasks) def store_homeassistant(self): """Read all data from homeassistant object.""" - self.homeassistant[ATTR_VERSION] = self._homeassistant.version - self.homeassistant[ATTR_WATCHDOG] = self._homeassistant.watchdog - self.homeassistant[ATTR_BOOT] = self._homeassistant.boot - self.homeassistant[ATTR_WAIT_BOOT] = self._homeassistant.wait_boot + self.homeassistant[ATTR_VERSION] = self.sys_homeassistant.version + self.homeassistant[ATTR_WATCHDOG] = self.sys_homeassistant.watchdog + self.homeassistant[ATTR_BOOT] = self.sys_homeassistant.boot + self.homeassistant[ATTR_WAIT_BOOT] = self.sys_homeassistant.wait_boot # Custom image - if self._homeassistant.is_custom_image: - self.homeassistant[ATTR_IMAGE] = self._homeassistant.image + if self.sys_homeassistant.is_custom_image: + self.homeassistant[ATTR_IMAGE] = self.sys_homeassistant.image self.homeassistant[ATTR_LAST_VERSION] = \ - self._homeassistant.last_version + self.sys_homeassistant.last_version # API/Proxy - self.homeassistant[ATTR_PORT] = self._homeassistant.api_port - self.homeassistant[ATTR_SSL] = self._homeassistant.api_ssl + self.homeassistant[ATTR_PORT] = self.sys_homeassistant.api_port + self.homeassistant[ATTR_SSL] = self.sys_homeassistant.api_ssl self.homeassistant[ATTR_PASSWORD] = \ - self._encrypt_data(self._homeassistant.api_password) + self._encrypt_data(self.sys_homeassistant.api_password) def restore_homeassistant(self): """Write all data to homeassistant object.""" - self._homeassistant.watchdog = self.homeassistant[ATTR_WATCHDOG] - self._homeassistant.boot = self.homeassistant[ATTR_BOOT] - self._homeassistant.wait_boot = self.homeassistant[ATTR_WAIT_BOOT] + self.sys_homeassistant.watchdog = self.homeassistant[ATTR_WATCHDOG] + self.sys_homeassistant.boot = self.homeassistant[ATTR_BOOT] + self.sys_homeassistant.wait_boot = self.homeassistant[ATTR_WAIT_BOOT] # Custom image if self.homeassistant.get(ATTR_IMAGE): - self._homeassistant.image = self.homeassistant[ATTR_IMAGE] - self._homeassistant.last_version = \ + self.sys_homeassistant.image = self.homeassistant[ATTR_IMAGE] + self.sys_homeassistant.last_version = \ self.homeassistant[ATTR_LAST_VERSION] # API/Proxy - self._homeassistant.api_port = self.homeassistant[ATTR_PORT] - self._homeassistant.api_ssl = self.homeassistant[ATTR_SSL] - self._homeassistant.api_password = \ + self.sys_homeassistant.api_port = self.homeassistant[ATTR_PORT] + self.sys_homeassistant.api_ssl = self.homeassistant[ATTR_SSL] + self.sys_homeassistant.api_password = \ self._decrypt_data(self.homeassistant[ATTR_PASSWORD]) # save - self._homeassistant.save_data() + self.sys_homeassistant.save_data() def store_repositories(self): """Store repository list into snapshot.""" - self.repositories = self._config.addons_repositories + self.repositories = self.sys_config.addons_repositories def restore_repositories(self): """Restore repositories from snapshot. Return a coroutine. """ - return self._addons.load_repositories(self.repositories) + return self.sys_addons.load_repositories(self.repositories) diff --git a/hassio/supervisor.py b/hassio/supervisor.py index f2740b45a..905ff4c46 100644 --- a/hassio/supervisor.py +++ b/hassio/supervisor.py @@ -34,7 +34,7 @@ class Supervisor(CoreSysAttributes): @property def last_version(self): """Return last available version of homeassistant.""" - return self._updater.version_hassio + return self.sys_updater.version_hassio @property def image(self): @@ -50,13 +50,13 @@ class Supervisor(CoreSysAttributes): """Update HomeAssistant version.""" version = version or self.last_version - if version == self._supervisor.version: + if version == self.sys_supervisor.version: _LOGGER.warning("Version %s is already installed", version) return _LOGGER.info("Update supervisor to version %s", version) if await self.instance.install(version): - self._loop.call_later(1, self._loop.stop) + self.sys_loop.call_later(1, self.sys_loop.stop) return True _LOGGER.error("Update of hass.io fails!") diff --git a/hassio/tasks.py b/hassio/tasks.py index 2858be578..81e9ce2e7 100644 --- a/hassio/tasks.py +++ b/hassio/tasks.py @@ -29,24 +29,24 @@ class Tasks(CoreSysAttributes): async def load(self): """Add Tasks to scheduler.""" - self.jobs.add(self._scheduler.register_task( + self.jobs.add(self.sys_scheduler.register_task( self._update_addons, self.RUN_UPDATE_ADDONS)) - self.jobs.add(self._scheduler.register_task( + self.jobs.add(self.sys_scheduler.register_task( self._update_supervisor, self.RUN_UPDATE_SUPERVISOR)) - self.jobs.add(self._scheduler.register_task( - self._addons.reload, self.RUN_RELOAD_ADDONS)) - self.jobs.add(self._scheduler.register_task( - self._updater.reload, self.RUN_RELOAD_UPDATER)) - self.jobs.add(self._scheduler.register_task( - self._snapshots.reload, self.RUN_RELOAD_SNAPSHOTS)) - self.jobs.add(self._scheduler.register_task( + self.jobs.add(self.sys_scheduler.register_task( + self.sys_addons.reload, self.RUN_RELOAD_ADDONS)) + self.jobs.add(self.sys_scheduler.register_task( + self.sys_updater.reload, self.RUN_RELOAD_UPDATER)) + self.jobs.add(self.sys_scheduler.register_task( + self.sys_snapshots.reload, self.RUN_RELOAD_SNAPSHOTS)) + self.jobs.add(self.sys_scheduler.register_task( self._host_control.load, self.RUN_RELOAD_HOST_CONTROL)) - self.jobs.add(self._scheduler.register_task( + self.jobs.add(self.sys_scheduler.register_task( self._watchdog_homeassistant_docker, self.RUN_WATCHDOG_HOMEASSISTANT_DOCKER)) - self.jobs.add(self._scheduler.register_task( + self.jobs.add(self.sys_scheduler.register_task( self._watchdog_homeassistant_api, self.RUN_WATCHDOG_HOMEASSISTANT_API)) @@ -55,7 +55,7 @@ class Tasks(CoreSysAttributes): async def _update_addons(self): """Check if a update is available of a addon and update it.""" tasks = [] - for addon in self._addons.list_addons: + for addon in self.sys_addons.list_addons: if not addon.is_installed or not addon.auto_update: continue @@ -74,31 +74,31 @@ class Tasks(CoreSysAttributes): async def _update_supervisor(self): """Check and run update of supervisor hassio.""" - if not self._supervisor.need_update: + if not self.sys_supervisor.need_update: return # don't perform a update on beta/dev channel - if self._dev: + if self.sys_dev: _LOGGER.warning("Ignore Hass.io update on dev channel!") return _LOGGER.info("Found new Hass.io version") - await self._supervisor.update() + await self.sys_supervisor.update() async def _watchdog_homeassistant_docker(self): """Check running state of docker and start if they is close.""" # if Home-Assistant is active - if not await self._homeassistant.is_initialize() or \ - not self._homeassistant.watchdog: + if not await self.sys_homeassistant.is_initialize() or \ + not self.sys_homeassistant.watchdog: return # if Home-Assistant is running - if self._homeassistant.in_progress or \ - await self._homeassistant.is_running(): + if self.sys_homeassistant.in_progress or \ + await self.sys_homeassistant.is_running(): return _LOGGER.warning("Watchdog found a problem with Home-Assistant docker!") - await self._homeassistant.start() + await self.sys_homeassistant.start() async def _watchdog_homeassistant_api(self): """Create scheduler task for montoring running state of API. @@ -109,13 +109,13 @@ class Tasks(CoreSysAttributes): retry_scan = self._data.get('HASS_WATCHDOG_API', 0) # If Home-Assistant is active - if not await self._homeassistant.is_initialize() or \ - not self._homeassistant.watchdog: + if not await self.sys_homeassistant.is_initialize() or \ + not self.sys_homeassistant.watchdog: return # If Home-Assistant API is up - if self._homeassistant.in_progress or \ - await self._homeassistant.check_api_state(): + if self.sys_homeassistant.in_progress or \ + await self.sys_homeassistant.check_api_state(): return # Look like we run into a problem @@ -126,5 +126,5 @@ class Tasks(CoreSysAttributes): return _LOGGER.error("Watchdog found a problem with Home-Assistant API!") - await self._homeassistant.restart() + await self.sys_homeassistant.restart() self._data['HASS_WATCHDOG_API'] = 0 diff --git a/hassio/updater.py b/hassio/updater.py index 47007c140..14ac30328 100644 --- a/hassio/updater.py +++ b/hassio/updater.py @@ -69,7 +69,7 @@ class Updater(JsonConfig, CoreSysAttributes): try: _LOGGER.info("Fetch update data from %s", url) with async_timeout.timeout(10): - async with self._websession.get(url) as request: + async with self.sys_websession.get(url) as request: data = await request.json(content_type=None) except (aiohttp.ClientError, asyncio.TimeoutError, KeyError) as err: