diff --git a/setup.py b/setup.py index ac4e8396d..f0a8d8b75 100644 --- a/setup.py +++ b/setup.py @@ -33,6 +33,7 @@ setup( packages=[ "supervisor.addons", "supervisor.api", + "supervisor.backups", "supervisor.dbus.network", "supervisor.dbus.payloads", "supervisor.dbus", @@ -50,7 +51,6 @@ setup( "supervisor.resolution", "supervisor.services.modules", "supervisor.services", - "supervisor.snapshots", "supervisor.store", "supervisor.utils", "supervisor", diff --git a/supervisor/addons/addon.py b/supervisor/addons/addon.py index f033c7b60..918dc9c09 100644 --- a/supervisor/addons/addon.py +++ b/supervisor/addons/addon.py @@ -65,11 +65,11 @@ from ..utils import check_port from ..utils.apparmor import adjust_profile from ..utils.json import read_json_file, write_json_file from ..utils.tar import atomic_contents_add, secure_path -from .const import SnapshotAddonMode +from .const import AddonBackupMode from .model import AddonModel, Data from .options import AddonOptions from .utils import remove_data -from .validate import SCHEMA_ADDON_SNAPSHOT +from .validate import SCHEMA_ADDON_BACKUP _LOGGER: logging.Logger = logging.getLogger(__name__) @@ -679,23 +679,23 @@ class Addon(AddonModel): except DockerError as err: raise AddonsError() from err - async def _snapshot_command(self, command: str) -> None: + async def _backup_command(self, command: str) -> None: try: command_return = await self.instance.run_inside(command) if command_return.exit_code != 0: _LOGGER.error( - "Pre-/Post-Snapshot command returned error code: %s", + "Pre-/Post backup command returned error code: %s", command_return.exit_code, ) raise AddonsError() except DockerError as err: _LOGGER.error( - "Failed running pre-/post-snapshot command %s: %s", command, err + "Failed running pre-/post backup command %s: %s", command, err ) raise AddonsError() from err - async def snapshot(self, tar_file: tarfile.TarFile) -> None: - """Snapshot state of an add-on.""" + async def backup(self, tar_file: tarfile.TarFile) -> None: + """Backup state of an add-on.""" is_running = await self.is_running() with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp: @@ -734,31 +734,31 @@ class Addon(AddonModel): # write into tarfile def _write_tarfile(): """Write tar inside loop.""" - with tar_file as snapshot: - # Snapshot system + with tar_file as backup: + # Backup system - snapshot.add(temp, arcname=".") + backup.add(temp, arcname=".") - # Snapshot data + # Backup data atomic_contents_add( - snapshot, + backup, self.path_data, - excludes=self.snapshot_exclude, + excludes=self.backup_exclude, arcname="data", ) if ( is_running - and self.snapshot_mode == SnapshotAddonMode.HOT - and self.snapshot_pre is not None + and self.backup_mode == AddonBackupMode.HOT + and self.backup_pre is not None ): - await self._snapshot_command(self.snapshot_pre) - elif is_running and self.snapshot_mode == SnapshotAddonMode.COLD: - _LOGGER.info("Shutdown add-on %s for cold snapshot", self.slug) + await self._backup_command(self.backup_pre) + elif is_running and self.backup_mode == AddonBackupMode.COLD: + _LOGGER.info("Shutdown add-on %s for cold backup", self.slug) await self.instance.stop() try: - _LOGGER.info("Building snapshot for add-on %s", self.slug) + _LOGGER.info("Building backup for add-on %s", self.slug) await self.sys_run_in_executor(_write_tarfile) except (tarfile.TarError, OSError) as err: _LOGGER.error("Can't write tarfile %s: %s", tar_file, err) @@ -766,24 +766,24 @@ class Addon(AddonModel): finally: if ( is_running - and self.snapshot_mode == SnapshotAddonMode.HOT - and self.snapshot_post is not None + and self.backup_mode == AddonBackupMode.HOT + and self.backup_post is not None ): - await self._snapshot_command(self.snapshot_post) - elif is_running and self.snapshot_mode is SnapshotAddonMode.COLD: + await self._backup_command(self.backup_post) + elif is_running and self.backup_mode is AddonBackupMode.COLD: _LOGGER.info("Starting add-on %s again", self.slug) await self.start() - _LOGGER.info("Finish snapshot for addon %s", self.slug) + _LOGGER.info("Finish backup for addon %s", self.slug) async def restore(self, tar_file: tarfile.TarFile) -> None: """Restore state of an add-on.""" with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp: - # extract snapshot + # extract backup def _extract_tarfile(): - """Extract tar snapshot.""" - with tar_file as snapshot: - snapshot.extractall(path=Path(temp), members=secure_path(snapshot)) + """Extract tar backup.""" + with tar_file as backup: + backup.extractall(path=Path(temp), members=secure_path(backup)) try: await self.sys_run_in_executor(_extract_tarfile) @@ -791,7 +791,7 @@ class Addon(AddonModel): _LOGGER.error("Can't read tarfile %s: %s", tar_file, err) raise AddonsError() from err - # Read snapshot data + # Read backup data try: data = read_json_file(Path(temp, "addon.json")) except ConfigurationFileError as err: @@ -799,10 +799,10 @@ class Addon(AddonModel): # Validate try: - data = SCHEMA_ADDON_SNAPSHOT(data) + data = SCHEMA_ADDON_BACKUP(data) except vol.Invalid as err: _LOGGER.error( - "Can't validate %s, snapshot data: %s", + "Can't validate %s, backup data: %s", self.slug, humanize_error(data, err), ) diff --git a/supervisor/addons/const.py b/supervisor/addons/const.py index b82b226d1..b0fd3aa29 100644 --- a/supervisor/addons/const.py +++ b/supervisor/addons/const.py @@ -2,11 +2,11 @@ from enum import Enum -class SnapshotAddonMode(str, Enum): - """Snapshot mode of an Add-on.""" +class AddonBackupMode(str, Enum): + """Backup mode of an Add-on.""" HOT = "hot" COLD = "cold" -ATTR_SNAPSHOT = "snapshot" +ATTR_BACKUP = "backup" diff --git a/supervisor/addons/model.py b/supervisor/addons/model.py index 5167ea908..8ce54b7dd 100644 --- a/supervisor/addons/model.py +++ b/supervisor/addons/model.py @@ -5,7 +5,7 @@ from typing import Any, Awaitable, Dict, List, Optional from awesomeversion import AwesomeVersion, AwesomeVersionException -from supervisor.addons.const import SnapshotAddonMode +from supervisor.addons.const import AddonBackupMode from ..const import ( ATTR_ADVANCED, @@ -13,6 +13,9 @@ from ..const import ( ATTR_ARCH, ATTR_AUDIO, ATTR_AUTH_API, + ATTR_BACKUP_EXCLUDE, + ATTR_BACKUP_POST, + ATTR_BACKUP_PRE, ATTR_BOOT, ATTR_DESCRIPTON, ATTR_DEVICES, @@ -53,9 +56,6 @@ from ..const import ( ATTR_SCHEMA, ATTR_SERVICES, ATTR_SLUG, - ATTR_SNAPSHOT_EXCLUDE, - ATTR_SNAPSHOT_POST, - ATTR_SNAPSHOT_PRE, ATTR_STAGE, ATTR_STARTUP, ATTR_STDIN, @@ -79,7 +79,7 @@ from ..const import ( ) from ..coresys import CoreSys, CoreSysAttributes from ..docker.const import Capabilities -from .const import ATTR_SNAPSHOT +from .const import ATTR_BACKUP from .options import AddonOptions, UiOptions from .validate import RE_SERVICE, RE_VOLUME @@ -360,24 +360,24 @@ class AddonModel(CoreSysAttributes, ABC): return self.data[ATTR_HASSIO_ROLE] @property - def snapshot_exclude(self) -> List[str]: - """Return Exclude list for snapshot.""" - return self.data.get(ATTR_SNAPSHOT_EXCLUDE, []) + def backup_exclude(self) -> List[str]: + """Return Exclude list for backup.""" + return self.data.get(ATTR_BACKUP_EXCLUDE, []) @property - def snapshot_pre(self) -> Optional[str]: - """Return pre-snapshot command.""" - return self.data.get(ATTR_SNAPSHOT_PRE) + def backup_pre(self) -> Optional[str]: + """Return pre-backup command.""" + return self.data.get(ATTR_BACKUP_PRE) @property - def snapshot_post(self) -> Optional[str]: - """Return post-snapshot command.""" - return self.data.get(ATTR_SNAPSHOT_POST) + def backup_post(self) -> Optional[str]: + """Return post-backup command.""" + return self.data.get(ATTR_BACKUP_POST) @property - def snapshot_mode(self) -> SnapshotAddonMode: - """Return if snapshot is hot/cold.""" - return self.data[ATTR_SNAPSHOT] + def backup_mode(self) -> AddonBackupMode: + """Return if backup is hot/cold.""" + return self.data[ATTR_BACKUP] @property def default_init(self) -> bool: diff --git a/supervisor/addons/validate.py b/supervisor/addons/validate.py index 918fc4335..cbf7ec2c4 100644 --- a/supervisor/addons/validate.py +++ b/supervisor/addons/validate.py @@ -7,7 +7,7 @@ import uuid import voluptuous as vol -from supervisor.addons.const import SnapshotAddonMode +from supervisor.addons.const import AddonBackupMode from ..const import ( ARCH_ALL, @@ -21,6 +21,9 @@ from ..const import ( ATTR_AUDIO_OUTPUT, ATTR_AUTH_API, ATTR_AUTO_UPDATE, + ATTR_BACKUP_EXCLUDE, + ATTR_BACKUP_POST, + ATTR_BACKUP_PRE, ATTR_BOOT, ATTR_BUILD_FROM, ATTR_CONFIGURATION, @@ -70,9 +73,6 @@ from ..const import ( ATTR_SCHEMA, ATTR_SERVICES, ATTR_SLUG, - ATTR_SNAPSHOT_EXCLUDE, - ATTR_SNAPSHOT_POST, - ATTR_SNAPSHOT_PRE, ATTR_SQUASH, ATTR_STAGE, ATTR_STARTUP, @@ -110,7 +110,7 @@ from ..validate import ( uuid_match, version_tag, ) -from .const import ATTR_SNAPSHOT +from .const import ATTR_BACKUP from .options import RE_SCHEMA_ELEMENT _LOGGER: logging.Logger = logging.getLogger(__name__) @@ -165,8 +165,8 @@ def _warn_addon_config(config: Dict[str, Any]): name, ) - if config.get(ATTR_SNAPSHOT, SnapshotAddonMode.HOT) == SnapshotAddonMode.COLD and ( - config.get(ATTR_SNAPSHOT_POST) or config.get(ATTR_SNAPSHOT_PRE) + if config.get(ATTR_BACKUP, AddonBackupMode.HOT) == AddonBackupMode.COLD and ( + config.get(ATTR_BACKUP_POST) or config.get(ATTR_BACKUP_PRE) ): _LOGGER.warning( "Add-on which only support COLD backups trying to use post/pre commands. Please report this to the maintainer of %s", @@ -225,6 +225,23 @@ def _migrate_addon_config(protocol=False): ) config[ATTR_TMPFS] = True + # 2021-06 "snapshot" renamed to "backup" + for entry in ( + "snapshot_exclude", + "snapshot_post", + "snapshot_pre", + "snapshot", + ): + if entry in config: + new_entry = entry.replace("snapshot", "backup") + config[new_entry] = config.pop(entry) + _LOGGER.warning( + "Add-on config '%s' is deprecated, '%s' should be used instead. Please report this to the maintainer of %s", + entry, + new_entry, + name, + ) + return config return _migrate @@ -294,11 +311,11 @@ _SCHEMA_ADDON_CONFIG = vol.Schema( vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(), vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)], vol.Optional(ATTR_DISCOVERY): [valid_discovery_service], - vol.Optional(ATTR_SNAPSHOT_EXCLUDE): [str], - vol.Optional(ATTR_SNAPSHOT_PRE): str, - vol.Optional(ATTR_SNAPSHOT_POST): str, - vol.Optional(ATTR_SNAPSHOT, default=SnapshotAddonMode.HOT): vol.Coerce( - SnapshotAddonMode + vol.Optional(ATTR_BACKUP_EXCLUDE): [str], + vol.Optional(ATTR_BACKUP_PRE): str, + vol.Optional(ATTR_BACKUP_POST): str, + vol.Optional(ATTR_BACKUP, default=AddonBackupMode.HOT): vol.Coerce( + AddonBackupMode ), vol.Optional(ATTR_OPTIONS, default={}): dict, vol.Optional(ATTR_SCHEMA, default={}): vol.Any( @@ -407,7 +424,7 @@ SCHEMA_ADDONS_FILE = vol.Schema( ) -SCHEMA_ADDON_SNAPSHOT = vol.Schema( +SCHEMA_ADDON_BACKUP = vol.Schema( { vol.Required(ATTR_USER): SCHEMA_ADDON_USER, vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM, diff --git a/supervisor/api/__init__.py b/supervisor/api/__init__.py index 6faca29b1..e698f20e0 100644 --- a/supervisor/api/__init__.py +++ b/supervisor/api/__init__.py @@ -9,6 +9,7 @@ from ..coresys import CoreSys, CoreSysAttributes from .addons import APIAddons from .audio import APIAudio from .auth import APIAuth +from .backups import APIBackups from .cli import APICli from .discovery import APIDiscovery from .dns import APICoreDNS @@ -28,7 +29,6 @@ from .proxy import APIProxy from .resolution import APIResoulution from .security import APISecurity from .services import APIServices -from .snapshots import APISnapshots from .store import APIStore from .supervisor import APISupervisor @@ -62,6 +62,7 @@ class RestAPI(CoreSysAttributes): self._register_addons() self._register_audio() self._register_auth() + self._register_backups() self._register_cli() self._register_discovery() self._register_dns() @@ -80,7 +81,6 @@ class RestAPI(CoreSysAttributes): self._register_proxy() self._register_resolution() self._register_services() - self._register_snapshots() self._register_supervisor() self._register_store() self._register_security() @@ -393,30 +393,41 @@ class RestAPI(CoreSysAttributes): ] ) - def _register_snapshots(self) -> None: - """Register snapshots functions.""" - api_snapshots = APISnapshots() - api_snapshots.coresys = self.coresys + def _register_backups(self) -> None: + """Register backups functions.""" + api_backups = APIBackups() + api_backups.coresys = self.coresys self.webapp.add_routes( [ - web.get("/snapshots", api_snapshots.list), - web.post("/snapshots/reload", api_snapshots.reload), - web.post("/snapshots/new/full", api_snapshots.snapshot_full), - web.post("/snapshots/new/partial", api_snapshots.snapshot_partial), - web.post("/snapshots/new/upload", api_snapshots.upload), - web.get("/snapshots/{snapshot}/info", api_snapshots.info), - web.delete("/snapshots/{snapshot}", api_snapshots.remove), + web.get("/snapshots", api_backups.list), + web.post("/snapshots/reload", api_backups.reload), + web.post("/snapshots/new/full", api_backups.backup_full), + web.post("/snapshots/new/partial", api_backups.backup_partial), + web.post("/snapshots/new/upload", api_backups.upload), + web.get("/snapshots/{slug}/info", api_backups.info), + web.delete("/snapshots/{slug}", api_backups.remove), + web.post("/snapshots/{slug}/restore/full", api_backups.restore_full), web.post( - "/snapshots/{snapshot}/restore/full", api_snapshots.restore_full + "/snapshots/{slug}/restore/partial", + api_backups.restore_partial, ), + web.get("/snapshots/{slug}/download", api_backups.download), + web.post("/snapshots/{slug}/remove", api_backups.remove), + # June 2021: /snapshots was renamed to /backups + web.get("/backups", api_backups.list), + web.post("/backups/reload", api_backups.reload), + web.post("/backups/new/full", api_backups.backup_full), + web.post("/backups/new/partial", api_backups.backup_partial), + web.post("/backups/new/upload", api_backups.upload), + web.get("/backups/{slug}/info", api_backups.info), + web.delete("/backups/{slug}", api_backups.remove), + web.post("/backups/{slug}/restore/full", api_backups.restore_full), web.post( - "/snapshots/{snapshot}/restore/partial", - api_snapshots.restore_partial, + "/backups/{slug}/restore/partial", + api_backups.restore_partial, ), - web.get("/snapshots/{snapshot}/download", api_snapshots.download), - # Old, remove at end of 2020 - web.post("/snapshots/{snapshot}/remove", api_snapshots.remove), + web.get("/backups/{slug}/download", api_backups.download), ] ) diff --git a/supervisor/api/backups.py b/supervisor/api/backups.py new file mode 100644 index 000000000..abca25d3a --- /dev/null +++ b/supervisor/api/backups.py @@ -0,0 +1,219 @@ +"""Backups RESTful API.""" +import asyncio +import logging +from pathlib import Path +import re +from tempfile import TemporaryDirectory + +from aiohttp import web +from aiohttp.hdrs import CONTENT_DISPOSITION +import voluptuous as vol + +from ..backups.validate import ALL_FOLDERS +from ..const import ( + ATTR_ADDONS, + ATTR_BACKUPS, + ATTR_CONTENT, + ATTR_DATE, + ATTR_FOLDERS, + ATTR_HOMEASSISTANT, + ATTR_NAME, + ATTR_PASSWORD, + ATTR_PROTECTED, + ATTR_REPOSITORIES, + ATTR_SIZE, + ATTR_SLUG, + ATTR_TYPE, + ATTR_VERSION, + CONTENT_TYPE_TAR, +) +from ..coresys import CoreSysAttributes +from ..exceptions import APIError +from .utils import api_process, api_validate + +_LOGGER: logging.Logger = logging.getLogger(__name__) + +RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+") + +# pylint: disable=no-value-for-parameter +SCHEMA_RESTORE_PARTIAL = vol.Schema( + { + vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)), + vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(), + vol.Optional(ATTR_ADDONS): vol.All([vol.Coerce(str)], vol.Unique()), + vol.Optional(ATTR_FOLDERS): vol.All([vol.In(ALL_FOLDERS)], vol.Unique()), + } +) + +SCHEMA_RESTORE_FULL = vol.Schema( + {vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str))} +) + +SCHEMA_BACKUP_FULL = vol.Schema( + { + vol.Optional(ATTR_NAME): vol.Coerce(str), + vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)), + } +) + +SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend( + { + vol.Optional(ATTR_ADDONS): vol.All([vol.Coerce(str)], vol.Unique()), + vol.Optional(ATTR_FOLDERS): vol.All([vol.In(ALL_FOLDERS)], vol.Unique()), + vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(), + } +) + + +class APIBackups(CoreSysAttributes): + """Handle RESTful API for backups functions.""" + + def _extract_slug(self, request): + """Return backup, throw an exception if it doesn't exist.""" + backup = self.sys_backups.get(request.match_info.get("slug")) + if not backup: + raise APIError("Backup does not exist") + return backup + + @api_process + async def list(self, request): + """Return backup list.""" + data_backups = [] + for backup in self.sys_backups.list_backups: + data_backups.append( + { + ATTR_SLUG: backup.slug, + ATTR_NAME: backup.name, + ATTR_DATE: backup.date, + ATTR_TYPE: backup.sys_type, + ATTR_PROTECTED: backup.protected, + ATTR_CONTENT: { + ATTR_HOMEASSISTANT: backup.homeassistant_version is not None, + ATTR_ADDONS: backup.addon_list, + ATTR_FOLDERS: backup.folders, + }, + } + ) + + if request.path == "/snapshots": + # Kept for backwards compability + return {"snapshots": data_backups} + + return {ATTR_BACKUPS: data_backups} + + @api_process + async def reload(self, request): + """Reload backup list.""" + await asyncio.shield(self.sys_backups.reload()) + return True + + @api_process + async def info(self, request): + """Return backup info.""" + backup = self._extract_slug(request) + + data_addons = [] + for addon_data in backup.addons: + data_addons.append( + { + ATTR_SLUG: addon_data[ATTR_SLUG], + ATTR_NAME: addon_data[ATTR_NAME], + ATTR_VERSION: addon_data[ATTR_VERSION], + ATTR_SIZE: addon_data[ATTR_SIZE], + } + ) + + return { + ATTR_SLUG: backup.slug, + ATTR_TYPE: backup.sys_type, + ATTR_NAME: backup.name, + ATTR_DATE: backup.date, + ATTR_SIZE: backup.size, + ATTR_PROTECTED: backup.protected, + ATTR_HOMEASSISTANT: backup.homeassistant_version, + ATTR_ADDONS: data_addons, + ATTR_REPOSITORIES: backup.repositories, + ATTR_FOLDERS: backup.folders, + } + + @api_process + async def backup_full(self, request): + """Create full backup.""" + body = await api_validate(SCHEMA_BACKUP_FULL, request) + backup = await asyncio.shield(self.sys_backups.do_backup_full(**body)) + + if backup: + return {ATTR_SLUG: backup.slug} + return False + + @api_process + async def backup_partial(self, request): + """Create a partial backup.""" + body = await api_validate(SCHEMA_BACKUP_PARTIAL, request) + backup = await asyncio.shield(self.sys_backups.do_backup_partial(**body)) + + if backup: + return {ATTR_SLUG: backup.slug} + return False + + @api_process + async def restore_full(self, request): + """Full restore of a backup.""" + backup = self._extract_slug(request) + body = await api_validate(SCHEMA_RESTORE_FULL, request) + + return await asyncio.shield(self.sys_backups.do_restore_full(backup, **body)) + + @api_process + async def restore_partial(self, request): + """Partial restore a backup.""" + backup = self._extract_slug(request) + body = await api_validate(SCHEMA_RESTORE_PARTIAL, request) + + return await asyncio.shield(self.sys_backups.do_restore_partial(backup, **body)) + + @api_process + async def remove(self, request): + """Remove a backup.""" + backup = self._extract_slug(request) + return self.sys_backups.remove(backup) + + async def download(self, request): + """Download a backup file.""" + backup = self._extract_slug(request) + + _LOGGER.info("Downloading backup %s", backup.slug) + response = web.FileResponse(backup.tarfile) + response.content_type = CONTENT_TYPE_TAR + response.headers[ + CONTENT_DISPOSITION + ] = f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar" + return response + + @api_process + async def upload(self, request): + """Upload a backup file.""" + with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir: + tar_file = Path(temp_dir, "backup.tar") + reader = await request.multipart() + contents = await reader.next() + try: + with tar_file.open("wb") as backup: + while True: + chunk = await contents.read_chunk() + if not chunk: + break + backup.write(chunk) + + except OSError as err: + _LOGGER.error("Can't write new backup file: %s", err) + return False + + except asyncio.CancelledError: + return False + + backup = await asyncio.shield(self.sys_backups.import_backup(tar_file)) + + if backup: + return {ATTR_SLUG: backup.slug} + return False diff --git a/supervisor/api/middleware/security.py b/supervisor/api/middleware/security.py index d11efc920..bc781cfbc 100644 --- a/supervisor/api/middleware/security.py +++ b/supervisor/api/middleware/security.py @@ -76,6 +76,7 @@ ADDONS_ROLE_ACCESS = { ROLE_BACKUP: re.compile( r"^(?:" r"|/.+/info" + r"|/backups.*" r"|/snapshots.*" r")$" ), @@ -99,6 +100,7 @@ ADDONS_ROLE_ACCESS = { r"|/observer/.+" r"|/os/.+" r"|/resolution/.+" + r"|/backups.*" r"|/snapshots.*" r"|/store.*" r"|/supervisor/.+" diff --git a/supervisor/api/snapshots.py b/supervisor/api/snapshots.py deleted file mode 100644 index c4fbde124..000000000 --- a/supervisor/api/snapshots.py +++ /dev/null @@ -1,221 +0,0 @@ -"""Init file for Supervisor snapshot RESTful API.""" -import asyncio -import logging -from pathlib import Path -import re -from tempfile import TemporaryDirectory - -from aiohttp import web -from aiohttp.hdrs import CONTENT_DISPOSITION -import voluptuous as vol - -from ..const import ( - ATTR_ADDONS, - ATTR_CONTENT, - ATTR_DATE, - ATTR_FOLDERS, - ATTR_HOMEASSISTANT, - ATTR_NAME, - ATTR_PASSWORD, - ATTR_PROTECTED, - ATTR_REPOSITORIES, - ATTR_SIZE, - ATTR_SLUG, - ATTR_SNAPSHOTS, - ATTR_TYPE, - ATTR_VERSION, - CONTENT_TYPE_TAR, -) -from ..coresys import CoreSysAttributes -from ..exceptions import APIError -from ..snapshots.validate import ALL_FOLDERS -from .utils import api_process, api_validate - -_LOGGER: logging.Logger = logging.getLogger(__name__) - -RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+") - -# pylint: disable=no-value-for-parameter -SCHEMA_RESTORE_PARTIAL = vol.Schema( - { - vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)), - vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(), - vol.Optional(ATTR_ADDONS): vol.All([vol.Coerce(str)], vol.Unique()), - vol.Optional(ATTR_FOLDERS): vol.All([vol.In(ALL_FOLDERS)], vol.Unique()), - } -) - -SCHEMA_RESTORE_FULL = vol.Schema( - {vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str))} -) - -SCHEMA_SNAPSHOT_FULL = vol.Schema( - { - vol.Optional(ATTR_NAME): vol.Coerce(str), - vol.Optional(ATTR_PASSWORD): vol.Any(None, vol.Coerce(str)), - } -) - -SCHEMA_SNAPSHOT_PARTIAL = SCHEMA_SNAPSHOT_FULL.extend( - { - vol.Optional(ATTR_ADDONS): vol.All([vol.Coerce(str)], vol.Unique()), - vol.Optional(ATTR_FOLDERS): vol.All([vol.In(ALL_FOLDERS)], vol.Unique()), - vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(), - } -) - - -class APISnapshots(CoreSysAttributes): - """Handle RESTful API for snapshot functions.""" - - def _extract_snapshot(self, request): - """Return snapshot, throw an exception if it doesn't exist.""" - snapshot = self.sys_snapshots.get(request.match_info.get("snapshot")) - if not snapshot: - raise APIError("Snapshot does not exist") - return snapshot - - @api_process - async def list(self, request): - """Return snapshot list.""" - data_snapshots = [] - for snapshot in self.sys_snapshots.list_snapshots: - data_snapshots.append( - { - ATTR_SLUG: snapshot.slug, - ATTR_NAME: snapshot.name, - ATTR_DATE: snapshot.date, - ATTR_TYPE: snapshot.sys_type, - ATTR_PROTECTED: snapshot.protected, - ATTR_CONTENT: { - ATTR_HOMEASSISTANT: snapshot.homeassistant_version is not None, - ATTR_ADDONS: snapshot.addon_list, - ATTR_FOLDERS: snapshot.folders, - }, - } - ) - - return {ATTR_SNAPSHOTS: data_snapshots} - - @api_process - async def reload(self, request): - """Reload snapshot list.""" - await asyncio.shield(self.sys_snapshots.reload()) - return True - - @api_process - async def info(self, request): - """Return snapshot info.""" - snapshot = self._extract_snapshot(request) - - data_addons = [] - for addon_data in snapshot.addons: - data_addons.append( - { - ATTR_SLUG: addon_data[ATTR_SLUG], - ATTR_NAME: addon_data[ATTR_NAME], - ATTR_VERSION: addon_data[ATTR_VERSION], - ATTR_SIZE: addon_data[ATTR_SIZE], - } - ) - - return { - ATTR_SLUG: snapshot.slug, - ATTR_TYPE: snapshot.sys_type, - ATTR_NAME: snapshot.name, - ATTR_DATE: snapshot.date, - ATTR_SIZE: snapshot.size, - ATTR_PROTECTED: snapshot.protected, - ATTR_HOMEASSISTANT: snapshot.homeassistant_version, - ATTR_ADDONS: data_addons, - ATTR_REPOSITORIES: snapshot.repositories, - ATTR_FOLDERS: snapshot.folders, - } - - @api_process - async def snapshot_full(self, request): - """Full-Snapshot a snapshot.""" - body = await api_validate(SCHEMA_SNAPSHOT_FULL, request) - snapshot = await asyncio.shield(self.sys_snapshots.do_snapshot_full(**body)) - - if snapshot: - return {ATTR_SLUG: snapshot.slug} - return False - - @api_process - async def snapshot_partial(self, request): - """Partial-Snapshot a snapshot.""" - body = await api_validate(SCHEMA_SNAPSHOT_PARTIAL, request) - snapshot = await asyncio.shield(self.sys_snapshots.do_snapshot_partial(**body)) - - if snapshot: - return {ATTR_SLUG: snapshot.slug} - return False - - @api_process - async def restore_full(self, request): - """Full-Restore a snapshot.""" - snapshot = self._extract_snapshot(request) - body = await api_validate(SCHEMA_RESTORE_FULL, request) - - return await asyncio.shield( - self.sys_snapshots.do_restore_full(snapshot, **body) - ) - - @api_process - async def restore_partial(self, request): - """Partial-Restore a snapshot.""" - snapshot = self._extract_snapshot(request) - body = await api_validate(SCHEMA_RESTORE_PARTIAL, request) - - return await asyncio.shield( - self.sys_snapshots.do_restore_partial(snapshot, **body) - ) - - @api_process - async def remove(self, request): - """Remove a snapshot.""" - snapshot = self._extract_snapshot(request) - return self.sys_snapshots.remove(snapshot) - - async def download(self, request): - """Download a snapshot file.""" - snapshot = self._extract_snapshot(request) - - _LOGGER.info("Downloading snapshot %s", snapshot.slug) - response = web.FileResponse(snapshot.tarfile) - response.content_type = CONTENT_TYPE_TAR - response.headers[ - CONTENT_DISPOSITION - ] = f"attachment; filename={RE_SLUGIFY_NAME.sub('_', snapshot.name)}.tar" - return response - - @api_process - async def upload(self, request): - """Upload a snapshot file.""" - with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir: - tar_file = Path(temp_dir, "snapshot.tar") - reader = await request.multipart() - contents = await reader.next() - try: - with tar_file.open("wb") as snapshot: - while True: - chunk = await contents.read_chunk() - if not chunk: - break - snapshot.write(chunk) - - except OSError as err: - _LOGGER.error("Can't write new snapshot file: %s", err) - return False - - except asyncio.CancelledError: - return False - - snapshot = await asyncio.shield( - self.sys_snapshots.import_snapshot(tar_file) - ) - - if snapshot: - return {ATTR_SLUG: snapshot.slug} - return False diff --git a/supervisor/backups/__init__.py b/supervisor/backups/__init__.py new file mode 100644 index 000000000..37bbde0cc --- /dev/null +++ b/supervisor/backups/__init__.py @@ -0,0 +1 @@ +"""Backup system control.""" diff --git a/supervisor/snapshots/snapshot.py b/supervisor/backups/backup.py similarity index 82% rename from supervisor/snapshots/snapshot.py rename to supervisor/backups/backup.py index 983a73ffd..8603967e8 100644 --- a/supervisor/snapshots/snapshot.py +++ b/supervisor/backups/backup.py @@ -1,4 +1,4 @@ -"""Representation of a snapshot file.""" +"""Representation of a backup file.""" from base64 import b64decode, b64encode import json import logging @@ -48,7 +48,7 @@ from ..exceptions import AddonsError from ..utils.json import write_json_file from ..utils.tar import SecureTarFile, atomic_contents_add, secure_path from .utils import key_to_iv, password_for_validating, password_to_key, remove_folder -from .validate import ALL_FOLDERS, SCHEMA_SNAPSHOT +from .validate import ALL_FOLDERS, SCHEMA_BACKUP _LOGGER: logging.Logger = logging.getLogger(__name__) @@ -63,11 +63,11 @@ MAP_FOLDER_EXCLUDE = { } -class Snapshot(CoreSysAttributes): - """A single Supervisor snapshot.""" +class Backup(CoreSysAttributes): + """A single Supervisor backup.""" def __init__(self, coresys: CoreSys, tar_file: Path): - """Initialize a snapshot.""" + """Initialize a backup.""" self.coresys: CoreSys = coresys self._tarfile: Path = tar_file self._data: Dict[str, Any] = {} @@ -77,32 +77,32 @@ class Snapshot(CoreSysAttributes): @property def slug(self): - """Return snapshot slug.""" + """Return backup slug.""" return self._data.get(ATTR_SLUG) @property def sys_type(self): - """Return snapshot type.""" + """Return backup type.""" return self._data.get(ATTR_TYPE) @property def name(self): - """Return snapshot name.""" + """Return backup name.""" return self._data[ATTR_NAME] @property def date(self): - """Return snapshot date.""" + """Return backup date.""" return self._data[ATTR_DATE] @property def protected(self): - """Return snapshot date.""" + """Return backup date.""" return self._data.get(ATTR_PROTECTED) is not None @property def addons(self): - """Return snapshot date.""" + """Return backup date.""" return self._data[ATTR_ADDONS] @property @@ -117,27 +117,27 @@ class Snapshot(CoreSysAttributes): @property def repositories(self): - """Return snapshot date.""" + """Return backup date.""" return self._data[ATTR_REPOSITORIES] @repositories.setter def repositories(self, value): - """Set snapshot date.""" + """Set backup date.""" self._data[ATTR_REPOSITORIES] = value @property def homeassistant_version(self): - """Return snapshot Home Assistant version.""" + """Return backupbackup Home Assistant version.""" return self._data[ATTR_HOMEASSISTANT].get(ATTR_VERSION) @property def homeassistant(self): - """Return snapshot Home Assistant data.""" + """Return backup Home Assistant data.""" return self._data[ATTR_HOMEASSISTANT] @property def docker(self): - """Return snapshot Docker config data.""" + """Return backup Docker config data.""" return self._data.get(ATTR_DOCKER, {}) @docker.setter @@ -147,7 +147,7 @@ class Snapshot(CoreSysAttributes): @property def size(self): - """Return snapshot size.""" + """Return backup size.""" if not self.tarfile.is_file(): return 0 return round(self.tarfile.stat().st_size / 1048576, 2) # calc mbyte @@ -159,11 +159,11 @@ class Snapshot(CoreSysAttributes): @property def tarfile(self): - """Return path to Snapshot tarfile.""" + """Return path to backup tarfile.""" return self._tarfile def new(self, slug, name, date, sys_type, password=None): - """Initialize a new snapshot.""" + """Initialize a new backup.""" # Init metadata self._data[ATTR_SLUG] = slug self._data[ATTR_NAME] = name @@ -171,7 +171,7 @@ class Snapshot(CoreSysAttributes): self._data[ATTR_TYPE] = sys_type # Add defaults - self._data = SCHEMA_SNAPSHOT(self._data) + self._data = SCHEMA_BACKUP(self._data) # Set password if password: @@ -180,7 +180,7 @@ class Snapshot(CoreSysAttributes): self._data[ATTR_CRYPTO] = CRYPTO_AES128 def set_password(self, password: str) -> bool: - """Set the password for an existing snapshot.""" + """Set the password for an existing backup.""" if not password: return False @@ -223,22 +223,26 @@ class Snapshot(CoreSysAttributes): return data.decode() async def load(self): - """Read snapshot.json from tar file.""" + """Read backup.json from tar file.""" if not self.tarfile.is_file(): _LOGGER.error("No tarfile located at %s", self.tarfile) return False def _load_file(): - """Read snapshot.json.""" - with tarfile.open(self.tarfile, "r:") as snapshot: - json_file = snapshot.extractfile("./snapshot.json") + """Read backup.json.""" + with tarfile.open(self.tarfile, "r:") as backup: + if "./snapshot.json" in [entry.name for entry in backup.getmembers()]: + # Old backups stil uses "snapshot.json", we need to support that forever + json_file = backup.extractfile("./snapshot.json") + else: + json_file = backup.extractfile("./backup.json") return json_file.read() - # read snapshot.json + # read backup.json try: raw = await self.sys_run_in_executor(_load_file) except (tarfile.TarError, KeyError) as err: - _LOGGER.error("Can't read snapshot tarfile %s: %s", self.tarfile, err) + _LOGGER.error("Can't read backup tarfile %s: %s", self.tarfile, err) return False # parse data @@ -250,7 +254,7 @@ class Snapshot(CoreSysAttributes): # validate try: - self._data = SCHEMA_SNAPSHOT(raw_dict) + self._data = SCHEMA_BACKUP(raw_dict) except vol.Invalid as err: _LOGGER.error( "Can't validate data for %s: %s", @@ -262,66 +266,66 @@ class Snapshot(CoreSysAttributes): return True async def __aenter__(self): - """Async context to open a snapshot.""" + """Async context to open a backup.""" self._tmp = TemporaryDirectory(dir=str(self.sys_config.path_tmp)) - # create a snapshot + # create a backup if not self.tarfile.is_file(): return self - # extract an existing snapshot - def _extract_snapshot(): - """Extract a snapshot.""" + # extract an existing backup + def _extract_backup(): + """Extract a backup.""" with tarfile.open(self.tarfile, "r:") as tar: tar.extractall(path=self._tmp.name, members=secure_path(tar)) - await self.sys_run_in_executor(_extract_snapshot) + await self.sys_run_in_executor(_extract_backup) async def __aexit__(self, exception_type, exception_value, traceback): - """Async context to close a snapshot.""" - # exists snapshot or exception on build + """Async context to close a backup.""" + # exists backup or exception on build if self.tarfile.is_file() or exception_type is not None: self._tmp.cleanup() return # validate data try: - self._data = SCHEMA_SNAPSHOT(self._data) + self._data = SCHEMA_BACKUP(self._data) except vol.Invalid as err: _LOGGER.error( "Invalid data for %s: %s", self.tarfile, humanize_error(self._data, err) ) raise ValueError("Invalid config") from None - # new snapshot, build it - def _create_snapshot(): - """Create a new snapshot.""" + # new backup, build it + def _create_backup(): + """Create a new backup.""" with tarfile.open(self.tarfile, "w:") as tar: tar.add(self._tmp.name, arcname=".") try: - write_json_file(Path(self._tmp.name, "snapshot.json"), self._data) - await self.sys_run_in_executor(_create_snapshot) + write_json_file(Path(self._tmp.name, "backup.json"), self._data) + await self.sys_run_in_executor(_create_backup) except (OSError, json.JSONDecodeError) as err: - _LOGGER.error("Can't write snapshot: %s", err) + _LOGGER.error("Can't write backup: %s", err) finally: self._tmp.cleanup() async def store_addons(self, addon_list: Optional[List[Addon]] = None): - """Add a list of add-ons into snapshot.""" + """Add a list of add-ons into backup.""" addon_list: List[Addon] = addon_list or self.sys_addons.installed async def _addon_save(addon: Addon): - """Task to store an add-on into snapshot.""" + """Task to store an add-on into backup.""" addon_file = SecureTarFile( Path(self._tmp.name, f"{addon.slug}.tar.gz"), "w", key=self._key ) - # Take snapshot + # Take backup try: - await addon.snapshot(addon_file) + await addon.backup(addon_file) except AddonsError: - _LOGGER.error("Can't create snapshot for %s", addon.slug) + _LOGGER.error("Can't create backup for %s", addon.slug) return # Store to config @@ -343,25 +347,25 @@ class Snapshot(CoreSysAttributes): _LOGGER.warning("Can't save Add-on %s: %s", addon.slug, err) async def restore_addons(self, addon_list: Optional[List[str]] = None): - """Restore a list add-on from snapshot.""" + """Restore a list add-on from backup.""" addon_list: List[str] = addon_list or self.addon_list async def _addon_restore(addon_slug: str): - """Task to restore an add-on into snapshot.""" + """Task to restore an add-on into backup.""" addon_file = SecureTarFile( Path(self._tmp.name, f"{addon_slug}.tar.gz"), "r", key=self._key ) - # If exists inside snapshot + # If exists inside backup if not addon_file.path.exists(): - _LOGGER.error("Can't find snapshot %s", addon_slug) + _LOGGER.error("Can't find backup %s", addon_slug) return # Perform a restore try: await self.sys_addons.restore(addon_slug, addon_file) except AddonsError: - _LOGGER.error("Can't restore snapshot %s", addon_slug) + _LOGGER.error("Can't restore backup %s", addon_slug) # Save Add-ons sequential # avoid issue on slow IO @@ -372,23 +376,23 @@ class Snapshot(CoreSysAttributes): _LOGGER.warning("Can't restore Add-on %s: %s", slug, err) async def store_folders(self, folder_list: Optional[List[str]] = None): - """Backup Supervisor data into snapshot.""" + """Backup Supervisor data into backup.""" folder_list: Set[str] = set(folder_list or ALL_FOLDERS) def _folder_save(name: str): - """Take snapshot of a folder.""" + """Take backup of a folder.""" slug_name = name.replace("/", "_") tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz") origin_dir = Path(self.sys_config.path_supervisor, name) # Check if exists if not origin_dir.is_dir(): - _LOGGER.warning("Can't find snapshot folder %s", name) + _LOGGER.warning("Can't find backup folder %s", name) return - # Take snapshot + # Take backup try: - _LOGGER.info("Snapshot folder %s", name) + _LOGGER.info("Backing up folder %s", name) with SecureTarFile(tar_name, "w", key=self._key) as tar_file: atomic_contents_add( tar_file, @@ -397,10 +401,10 @@ class Snapshot(CoreSysAttributes): arcname=".", ) - _LOGGER.info("Snapshot folder %s done", name) + _LOGGER.info("Backup folder %s done", name) self._data[ATTR_FOLDERS].append(name) except (tarfile.TarError, OSError) as err: - _LOGGER.warning("Can't snapshot folder %s: %s", name, err) + _LOGGER.warning("Can't backup folder %s: %s", name, err) # Save folder sequential # avoid issue on slow IO @@ -411,7 +415,7 @@ class Snapshot(CoreSysAttributes): _LOGGER.warning("Can't save folder %s: %s", folder, err) async def restore_folders(self, folder_list: Optional[List[str]] = None): - """Backup Supervisor data into snapshot.""" + """Backup Supervisor data into backup.""" folder_list: Set[str] = set(folder_list or self.folders) def _folder_restore(name: str): @@ -420,7 +424,7 @@ class Snapshot(CoreSysAttributes): tar_name = Path(self._tmp.name, f"{slug_name}.tar.gz") origin_dir = Path(self.sys_config.path_supervisor, name) - # Check if exists inside snapshot + # Check if exists inside backup if not tar_name.exists(): _LOGGER.warning("Can't find restore folder %s", name) return @@ -486,11 +490,11 @@ class Snapshot(CoreSysAttributes): self.sys_homeassistant.save_data() def store_repositories(self): - """Store repository list into snapshot.""" + """Store repository list into backup.""" self.repositories = self.sys_config.addons_repositories def restore_repositories(self): - """Restore repositories from snapshot. + """Restore repositories from backup. Return a coroutine. """ diff --git a/supervisor/backups/const.py b/supervisor/backups/const.py new file mode 100644 index 000000000..ff6e37347 --- /dev/null +++ b/supervisor/backups/const.py @@ -0,0 +1,9 @@ +"""Backup consts.""" +from enum import Enum + + +class BackupType(str, Enum): + """Backup type enum.""" + + FULL = "full" + PARTIAL = "partial" diff --git a/supervisor/backups/manager.py b/supervisor/backups/manager.py new file mode 100644 index 000000000..185fa0db9 --- /dev/null +++ b/supervisor/backups/manager.py @@ -0,0 +1,400 @@ +"""Backup manager.""" +import asyncio +import logging +from pathlib import Path +from typing import Awaitable, Set + +from awesomeversion.awesomeversion import AwesomeVersion +from awesomeversion.exceptions import AwesomeVersionCompare + +from ..const import FOLDER_HOMEASSISTANT, CoreState +from ..coresys import CoreSysAttributes +from ..exceptions import AddonsError +from ..jobs.decorator import Job, JobCondition +from ..utils.dt import utcnow +from .backup import Backup +from .const import BackupType +from .utils import create_slug + +_LOGGER: logging.Logger = logging.getLogger(__name__) + + +class BackupManager(CoreSysAttributes): + """Manage backups.""" + + def __init__(self, coresys): + """Initialize a backup manager.""" + self.coresys = coresys + self._backups = {} + self.lock = asyncio.Lock() + + @property + def list_backups(self) -> Set[Backup]: + """Return a list of all backup objects.""" + return set(self._backups.values()) + + def get(self, slug): + """Return backup object.""" + return self._backups.get(slug) + + def _create_backup(self, name, sys_type, password, homeassistant=True): + """Initialize a new backup object from name.""" + date_str = utcnow().isoformat() + slug = create_slug(name, date_str) + tar_file = Path(self.sys_config.path_backup, f"{slug}.tar") + + # init object + backup = Backup(self.coresys, tar_file) + backup.new(slug, name, date_str, sys_type, password) + + # set general data + if homeassistant: + backup.store_homeassistant() + + backup.store_repositories() + backup.store_dockerconfig() + + return backup + + def load(self): + """Load exists backups data. + + Return a coroutine. + """ + return self.reload() + + async def reload(self): + """Load exists backups.""" + self._backups = {} + + async def _load_backup(tar_file): + """Load the backup.""" + backup = Backup(self.coresys, tar_file) + if await backup.load(): + self._backups[backup.slug] = backup + + tasks = [ + _load_backup(tar_file) + for tar_file in self.sys_config.path_backup.glob("*.tar") + ] + + _LOGGER.info("Found %d backup files", len(tasks)) + if tasks: + await asyncio.wait(tasks) + + def remove(self, backup): + """Remove a backup.""" + try: + backup.tarfile.unlink() + self._backups.pop(backup.slug, None) + _LOGGER.info("Removed backup file %s", backup.slug) + + except OSError as err: + _LOGGER.error("Can't remove backup %s: %s", backup.slug, err) + return False + + return True + + async def import_backup(self, tar_file): + """Check backup tarfile and import it.""" + backup = Backup(self.coresys, tar_file) + + # Read meta data + if not await backup.load(): + return None + + # Already exists? + if backup.slug in self._backups: + _LOGGER.warning("Backup %s already exists! overwriting", backup.slug) + self.remove(self.get(backup.slug)) + + # Move backup to backup + tar_origin = Path(self.sys_config.path_backup, f"{backup.slug}.tar") + try: + backup.tarfile.rename(tar_origin) + + except OSError as err: + _LOGGER.error("Can't move backup file to storage: %s", err) + return None + + # Load new backup + backup = Backup(self.coresys, tar_origin) + if not await backup.load(): + return None + _LOGGER.info("Successfully imported %s", backup.slug) + + self._backups[backup.slug] = backup + return backup + + @Job(conditions=[JobCondition.FREE_SPACE, JobCondition.RUNNING]) + async def do_backup_full(self, name="", password=None): + """Create a full backup.""" + if self.lock.locked(): + _LOGGER.error("A backup/restore process is already running") + return None + + backup = self._create_backup(name, BackupType.FULL, password) + _LOGGER.info("Creating new full backup with slug %s", backup.slug) + try: + self.sys_core.state = CoreState.FREEZE + await self.lock.acquire() + + async with backup: + # Backup add-ons + _LOGGER.info("Backing up %s store Add-ons", backup.slug) + await backup.store_addons() + + # Backup folders + _LOGGER.info("Backing up %s store folders", backup.slug) + await backup.store_folders() + + except Exception as err: # pylint: disable=broad-except + _LOGGER.exception("Backup %s error", backup.slug) + self.sys_capture_exception(err) + return None + + else: + _LOGGER.info("Creating full backup with slug %s completed", backup.slug) + self._backups[backup.slug] = backup + return backup + + finally: + self.sys_core.state = CoreState.RUNNING + self.lock.release() + + @Job(conditions=[JobCondition.FREE_SPACE, JobCondition.RUNNING]) + async def do_backup_partial( + self, name="", addons=None, folders=None, password=None, homeassistant=True + ): + """Create a partial backup.""" + if self.lock.locked(): + _LOGGER.error("A backup/restore process is already running") + return None + + addons = addons or [] + folders = folders or [] + + if len(addons) == 0 and len(folders) == 0 and not homeassistant: + _LOGGER.error("Nothing to create backup for") + return + + backup = self._create_backup(name, BackupType.PARTIAL, password, homeassistant) + + _LOGGER.info("Creating new partial backup with slug %s", backup.slug) + try: + self.sys_core.state = CoreState.FREEZE + await self.lock.acquire() + + async with backup: + # Backup add-ons + addon_list = [] + for addon_slug in addons: + addon = self.sys_addons.get(addon_slug) + if addon and addon.is_installed: + addon_list.append(addon) + continue + _LOGGER.warning("Add-on %s not found/installed", addon_slug) + + if addon_list: + _LOGGER.info("Backing up %s store Add-ons", backup.slug) + await backup.store_addons(addon_list) + + # Backup folders + if folders: + _LOGGER.info("Backing up %s store folders", backup.slug) + await backup.store_folders(folders) + + except Exception as err: # pylint: disable=broad-except + _LOGGER.exception("Backup %s error", backup.slug) + self.sys_capture_exception(err) + return None + + else: + _LOGGER.info("Creating partial backup with slug %s completed", backup.slug) + self._backups[backup.slug] = backup + return backup + + finally: + self.sys_core.state = CoreState.RUNNING + self.lock.release() + + @Job( + conditions=[ + JobCondition.FREE_SPACE, + JobCondition.HEALTHY, + JobCondition.INTERNET_HOST, + JobCondition.INTERNET_SYSTEM, + JobCondition.RUNNING, + ] + ) + async def do_restore_full(self, backup, password=None): + """Restore a backup.""" + if self.lock.locked(): + _LOGGER.error("A backup/restore process is already running") + return False + + if backup.sys_type != BackupType.FULL: + _LOGGER.error("%s is only a partial backup!", backup.slug) + return False + + if backup.protected and not backup.set_password(password): + _LOGGER.error("Invalid password for backup %s", backup.slug) + return False + + _LOGGER.info("Full-Restore %s start", backup.slug) + try: + self.sys_core.state = CoreState.FREEZE + await self.lock.acquire() + + async with backup: + # Stop Home-Assistant / Add-ons + await self.sys_core.shutdown() + + # Restore folders + _LOGGER.info("Restoring %s folders", backup.slug) + await backup.restore_folders() + + # Restore docker config + _LOGGER.info("Restoring %s Docker Config", backup.slug) + backup.restore_dockerconfig() + + # Start homeassistant restore + _LOGGER.info("Restoring %s Home-Assistant", backup.slug) + backup.restore_homeassistant() + task_hass = self._update_core_task(backup.homeassistant_version) + + # Restore repositories + _LOGGER.info("Restoring %s Repositories", backup.slug) + await backup.restore_repositories() + + # Delete delta add-ons + _LOGGER.info("Removing add-ons not in the backup %s", backup.slug) + for addon in self.sys_addons.installed: + if addon.slug in backup.addon_list: + continue + + # Remove Add-on because it's not a part of the new env + # Do it sequential avoid issue on slow IO + try: + await addon.uninstall() + except AddonsError: + _LOGGER.warning("Can't uninstall Add-on %s", addon.slug) + + # Restore add-ons + _LOGGER.info("Restore %s old add-ons", backup.slug) + await backup.restore_addons() + + # finish homeassistant task + _LOGGER.info("Restore %s wait until homeassistant ready", backup.slug) + await task_hass + await self.sys_homeassistant.core.start() + + except Exception as err: # pylint: disable=broad-except + _LOGGER.exception("Restore %s error", backup.slug) + self.sys_capture_exception(err) + return False + + else: + _LOGGER.info("Full-Restore %s done", backup.slug) + return True + + finally: + self.sys_core.state = CoreState.RUNNING + self.lock.release() + + @Job( + conditions=[ + JobCondition.FREE_SPACE, + JobCondition.HEALTHY, + JobCondition.INTERNET_HOST, + JobCondition.INTERNET_SYSTEM, + JobCondition.RUNNING, + ] + ) + async def do_restore_partial( + self, backup, homeassistant=False, addons=None, folders=None, password=None + ): + """Restore a backup.""" + if self.lock.locked(): + _LOGGER.error("A backup/restore process is already running") + return False + + if backup.protected and not backup.set_password(password): + _LOGGER.error("Invalid password for backup %s", backup.slug) + return False + + addons = addons or [] + folders = folders or [] + + _LOGGER.info("Partial-Restore %s start", backup.slug) + try: + self.sys_core.state = CoreState.FREEZE + await self.lock.acquire() + + async with backup: + # Restore docker config + _LOGGER.info("Restoring %s Docker Config", backup.slug) + backup.restore_dockerconfig() + + # Stop Home-Assistant for config restore + if FOLDER_HOMEASSISTANT in folders: + await self.sys_homeassistant.core.stop() + backup.restore_homeassistant() + + # Process folders + if folders: + _LOGGER.info("Restoring %s folders", backup.slug) + await backup.restore_folders(folders) + + # Process Home-Assistant + task_hass = None + if homeassistant: + _LOGGER.info("Restoring %s Home-Assistant", backup.slug) + task_hass = self._update_core_task(backup.homeassistant_version) + + if addons: + _LOGGER.info("Restoring %s Repositories", backup.slug) + await backup.restore_repositories() + + _LOGGER.info("Restoring %s old add-ons", backup.slug) + await backup.restore_addons(addons) + + # Make sure homeassistant run agen + if task_hass: + _LOGGER.info("Restore %s wait for Home-Assistant", backup.slug) + await task_hass + + # Do we need start HomeAssistant? + if not await self.sys_homeassistant.core.is_running(): + await self.sys_homeassistant.core.start() + + # Check If we can access to API / otherwise restart + if not await self.sys_homeassistant.api.check_api_state(): + _LOGGER.warning("Need restart HomeAssistant for API") + await self.sys_homeassistant.core.restart() + + except Exception as err: # pylint: disable=broad-except + _LOGGER.exception("Restore %s error", backup.slug) + self.sys_capture_exception(err) + return False + + else: + _LOGGER.info("Partial-Restore %s done", backup.slug) + return True + + finally: + self.sys_core.state = CoreState.RUNNING + self.lock.release() + + def _update_core_task(self, version: AwesomeVersion) -> Awaitable[None]: + """Process core update if needed and make awaitable object.""" + + async def _core_update(): + try: + if version == self.sys_homeassistant.version: + return + except (AwesomeVersionCompare, TypeError): + pass + await self.sys_homeassistant.core.update(version) + + return self.sys_create_task(_core_update()) diff --git a/supervisor/snapshots/utils.py b/supervisor/backups/utils.py similarity index 100% rename from supervisor/snapshots/utils.py rename to supervisor/backups/utils.py diff --git a/supervisor/snapshots/validate.py b/supervisor/backups/validate.py similarity index 93% rename from supervisor/snapshots/validate.py rename to supervisor/backups/validate.py index 5e6bf9ce5..f30de4cfd 100644 --- a/supervisor/snapshots/validate.py +++ b/supervisor/backups/validate.py @@ -1,6 +1,7 @@ """Validate some things around restore.""" import voluptuous as vol +from ..backups.const import BackupType from ..const import ( ATTR_ADDONS, ATTR_AUDIO_INPUT, @@ -30,8 +31,6 @@ from ..const import ( FOLDER_MEDIA, FOLDER_SHARE, FOLDER_SSL, - SNAPSHOT_FULL, - SNAPSHOT_PARTIAL, ) from ..validate import ( SCHEMA_DOCKER_CONFIG, @@ -55,15 +54,15 @@ def unique_addons(addons_list): single = {addon[ATTR_SLUG] for addon in addons_list} if len(single) != len(addons_list): - raise vol.Invalid("Invalid addon list on snapshot!") from None + raise vol.Invalid("Invalid addon list in backup!") from None return addons_list # pylint: disable=no-value-for-parameter -SCHEMA_SNAPSHOT = vol.Schema( +SCHEMA_BACKUP = vol.Schema( { vol.Required(ATTR_SLUG): vol.Coerce(str), - vol.Required(ATTR_TYPE): vol.In([SNAPSHOT_FULL, SNAPSHOT_PARTIAL]), + vol.Required(ATTR_TYPE): vol.Coerce(BackupType), vol.Required(ATTR_NAME): vol.Coerce(str), vol.Required(ATTR_DATE): vol.Coerce(str), vol.Inclusive(ATTR_PROTECTED, "encrypted"): vol.All( diff --git a/supervisor/bootstrap.py b/supervisor/bootstrap.py index 582c5556a..30041c827 100644 --- a/supervisor/bootstrap.py +++ b/supervisor/bootstrap.py @@ -20,6 +20,7 @@ from .addons import AddonManager from .api import RestAPI from .arch import CpuArch from .auth import Auth +from .backups.manager import BackupManager from .const import ( ENV_HOMEASSISTANT_REPOSITORY, ENV_SUPERVISOR_MACHINE, @@ -47,7 +48,6 @@ from .plugins import PluginManager from .resolution.module import ResolutionManager from .security import Security from .services import ServiceManager -from .snapshots import SnapshotManager from .store import StoreManager from .supervisor import Supervisor from .updater import Updater @@ -71,7 +71,7 @@ async def initialize_coresys() -> CoreSys: coresys.supervisor = Supervisor(coresys) coresys.homeassistant = HomeAssistant(coresys) coresys.addons = AddonManager(coresys) - coresys.snapshots = SnapshotManager(coresys) + coresys.backups = BackupManager(coresys) coresys.host = HostManager(coresys) coresys.hardware = HardwareManager(coresys) coresys.ingress = Ingress(coresys) diff --git a/supervisor/const.py b/supervisor/const.py index 901e5f13e..8997e3854 100644 --- a/supervisor/const.py +++ b/supervisor/const.py @@ -110,6 +110,10 @@ ATTR_AUTH = "auth" ATTR_AUTH_API = "auth_api" ATTR_AUTO_UPDATE = "auto_update" ATTR_AVAILABLE = "available" +ATTR_BACKUP_EXCLUDE = "backup_exclude" +ATTR_BACKUP_POST = "backup_post" +ATTR_BACKUP_PRE = "backup_pre" +ATTR_BACKUPS = "backups" ATTR_BLK_READ = "blk_read" ATTR_BLK_WRITE = "blk_write" ATTR_BOARD = "board" @@ -270,10 +274,6 @@ ATTR_SESSION = "session" ATTR_SIGNAL = "signal" ATTR_SIZE = "size" ATTR_SLUG = "slug" -ATTR_SNAPSHOT_EXCLUDE = "snapshot_exclude" -ATTR_SNAPSHOT_PRE = "snapshot_pre" -ATTR_SNAPSHOT_POST = "snapshot_post" -ATTR_SNAPSHOTS = "snapshots" ATTR_SOURCE = "source" ATTR_SQUASH = "squash" ATTR_SSD = "ssid" @@ -352,9 +352,6 @@ FOLDER_ADDONS = "addons/local" FOLDER_SSL = "ssl" FOLDER_MEDIA = "media" -SNAPSHOT_FULL = "full" -SNAPSHOT_PARTIAL = "partial" - CRYPTO_AES128 = "aes128" SECURITY_PROFILE = "profile" diff --git a/supervisor/core.py b/supervisor/core.py index d55d031ac..5ffdfb71e 100644 --- a/supervisor/core.py +++ b/supervisor/core.py @@ -131,7 +131,7 @@ class Core(CoreSysAttributes): # Load Add-ons self.sys_addons.load(), # load last available data - self.sys_snapshots.load(), + self.sys_backups.load(), # load services self.sys_services.load(), # Load discovery diff --git a/supervisor/coresys.py b/supervisor/coresys.py index fabc49e43..253d3dcda 100644 --- a/supervisor/coresys.py +++ b/supervisor/coresys.py @@ -20,6 +20,7 @@ if TYPE_CHECKING: from .api import RestAPI from .arch import CpuArch from .auth import Auth + from .backups.manager import BackupManager from .core import Core from .dbus.manager import DBusManager from .discovery import Discovery @@ -33,12 +34,11 @@ if TYPE_CHECKING: from .misc.tasks import Tasks from .plugins import PluginManager from .resolution.module import ResolutionManager + from .security import Security from .services import ServiceManager - from .snapshots import SnapshotManager from .store import StoreManager from .supervisor import Supervisor from .updater import Updater - from .security import Security T = TypeVar("T") @@ -72,7 +72,7 @@ class CoreSys: self._addons: Optional[AddonManager] = None self._api: Optional[RestAPI] = None self._updater: Optional[Updater] = None - self._snapshots: Optional[SnapshotManager] = None + self._backups: Optional[BackupManager] = None self._tasks: Optional[Tasks] = None self._host: Optional[HostManager] = None self._ingress: Optional[Ingress] = None @@ -277,18 +277,18 @@ class CoreSys: self._store = value @property - def snapshots(self) -> SnapshotManager: - """Return SnapshotManager object.""" - if self._snapshots is None: - raise RuntimeError("SnapshotManager not set!") - return self._snapshots + def backups(self) -> BackupManager: + """Return BackupManager object.""" + if self._backups is None: + raise RuntimeError("BackupManager not set!") + return self._backups - @snapshots.setter - def snapshots(self, value: SnapshotManager) -> None: - """Set a SnapshotManager object.""" - if self._snapshots: - raise RuntimeError("SnapshotsManager already set!") - self._snapshots = value + @backups.setter + def backups(self, value: BackupManager) -> None: + """Set a BackupManager object.""" + if self._backups: + raise RuntimeError("BackupsManager already set!") + self._backups = value @property def tasks(self) -> Tasks: @@ -583,9 +583,9 @@ class CoreSysAttributes: return self.coresys.store @property - def sys_snapshots(self) -> SnapshotManager: - """Return SnapshotManager object.""" - return self.coresys.snapshots + def sys_backups(self) -> BackupManager: + """Return BackupManager object.""" + return self.coresys.backups @property def sys_tasks(self) -> Tasks: diff --git a/supervisor/misc/tasks.py b/supervisor/misc/tasks.py index 7a515f448..f5623a5da 100644 --- a/supervisor/misc/tasks.py +++ b/supervisor/misc/tasks.py @@ -28,7 +28,7 @@ RUN_UPDATE_MULTICAST = 30300 RUN_UPDATE_OBSERVER = 30400 RUN_RELOAD_ADDONS = 10800 -RUN_RELOAD_SNAPSHOTS = 72000 +RUN_RELOAD_BACKUPS = 72000 RUN_RELOAD_HOST = 7600 RUN_RELOAD_UPDATER = 7200 RUN_RELOAD_INGRESS = 930 @@ -73,9 +73,7 @@ class Tasks(CoreSysAttributes): # Reload self.sys_scheduler.register_task(self.sys_store.reload, RUN_RELOAD_ADDONS) self.sys_scheduler.register_task(self.sys_updater.reload, RUN_RELOAD_UPDATER) - self.sys_scheduler.register_task( - self.sys_snapshots.reload, RUN_RELOAD_SNAPSHOTS - ) + self.sys_scheduler.register_task(self.sys_backups.reload, RUN_RELOAD_BACKUPS) self.sys_scheduler.register_task(self.sys_host.reload, RUN_RELOAD_HOST) self.sys_scheduler.register_task(self.sys_ingress.reload, RUN_RELOAD_INGRESS) diff --git a/supervisor/resolution/checks/free_space.py b/supervisor/resolution/checks/free_space.py index 73a982a95..b46f2a64c 100644 --- a/supervisor/resolution/checks/free_space.py +++ b/supervisor/resolution/checks/free_space.py @@ -1,11 +1,12 @@ """Helpers to check and fix issues with free space.""" from typing import List, Optional -from ...const import SNAPSHOT_FULL, CoreState +from ...backups.const import BackupType +from ...const import CoreState from ...coresys import CoreSys from ..const import ( MINIMUM_FREE_SPACE_THRESHOLD, - MINIMUM_FULL_SNAPSHOTS, + MINIMUM_FULL_BACKUPS, ContextType, IssueType, SuggestionType, @@ -25,10 +26,10 @@ class CheckFreeSpace(CheckBase): async def run_check(self) -> None: """Run check if not affected by issue.""" if self.sys_host.info.free_space > MINIMUM_FREE_SPACE_THRESHOLD: - if len(self.sys_snapshots.list_snapshots) == 0: - # No snapshots, let's suggest the user to create one! + if len(self.sys_backups.list_backups) == 0: + # No backups, let's suggest the user to create one! self.sys_resolution.suggestions = Suggestion( - SuggestionType.CREATE_FULL_SNAPSHOT, ContextType.SYSTEM + SuggestionType.CREATE_FULL_BACKUP, ContextType.SYSTEM ) return @@ -37,13 +38,13 @@ class CheckFreeSpace(CheckBase): len( [ x - for x in self.sys_snapshots.list_snapshots - if x.sys_type == SNAPSHOT_FULL + for x in self.sys_backups.list_backups + if x.sys_type == BackupType.FULL ] ) - >= MINIMUM_FULL_SNAPSHOTS + >= MINIMUM_FULL_BACKUPS ): - suggestions.append(SuggestionType.CLEAR_FULL_SNAPSHOT) + suggestions.append(SuggestionType.CLEAR_FULL_BACKUP) self.sys_resolution.create_issue( IssueType.FREE_SPACE, ContextType.SYSTEM, suggestions=suggestions diff --git a/supervisor/resolution/const.py b/supervisor/resolution/const.py index 184465472..bd4e7dc8c 100644 --- a/supervisor/resolution/const.py +++ b/supervisor/resolution/const.py @@ -9,7 +9,7 @@ FILE_CONFIG_RESOLUTION = Path(SUPERVISOR_DATA, "resolution.json") SCHEDULED_HEALTHCHECK = 3600 MINIMUM_FREE_SPACE_THRESHOLD = 1 -MINIMUM_FULL_SNAPSHOTS = 2 +MINIMUM_FULL_BACKUPS = 2 class ContextType(str, Enum): @@ -71,8 +71,8 @@ class IssueType(str, Enum): class SuggestionType(str, Enum): """Sugestion type.""" - CLEAR_FULL_SNAPSHOT = "clear_full_snapshot" - CREATE_FULL_SNAPSHOT = "create_full_snapshot" + CLEAR_FULL_BACKUP = "clear_full_backup" + CREATE_FULL_BACKUP = "create_full_backup" EXECUTE_UPDATE = "execute_update" EXECUTE_REPAIR = "execute_repair" EXECUTE_RESET = "execute_reset" diff --git a/supervisor/resolution/fixup.py b/supervisor/resolution/fixup.py index 164ca830d..71788ef6a 100644 --- a/supervisor/resolution/fixup.py +++ b/supervisor/resolution/fixup.py @@ -7,8 +7,8 @@ from ..jobs.const import JobCondition from ..jobs.decorator import Job from .data import Suggestion from .fixups.base import FixupBase -from .fixups.clear_full_snapshot import FixupClearFullSnapshot -from .fixups.create_full_snapshot import FixupCreateFullSnapshot +from .fixups.clear_full_backup import FixupClearFullBackup +from .fixups.create_full_backup import FixupCreateFullBackup from .fixups.store_execute_reload import FixupStoreExecuteReload from .fixups.store_execute_remove import FixupStoreExecuteRemove from .fixups.store_execute_reset import FixupStoreExecuteReset @@ -23,8 +23,8 @@ class ResolutionFixup(CoreSysAttributes): """Initialize the suggestion class.""" self.coresys = coresys - self._create_full_snapshot = FixupCreateFullSnapshot(coresys) - self._clear_full_snapshot = FixupClearFullSnapshot(coresys) + self._create_full_backup = FixupCreateFullBackup(coresys) + self._clear_full_backup = FixupClearFullBackup(coresys) self._store_execute_reset = FixupStoreExecuteReset(coresys) self._store_execute_reload = FixupStoreExecuteReload(coresys) self._store_execute_remove = FixupStoreExecuteRemove(coresys) @@ -36,8 +36,8 @@ class ResolutionFixup(CoreSysAttributes): Order can be important! """ return [ - self._create_full_snapshot, - self._clear_full_snapshot, + self._create_full_backup, + self._clear_full_backup, self._store_execute_reload, self._store_execute_reset, self._store_execute_remove, diff --git a/supervisor/resolution/fixups/clear_full_snapshot.py b/supervisor/resolution/fixups/clear_full_backup.py similarity index 56% rename from supervisor/resolution/fixups/clear_full_snapshot.py rename to supervisor/resolution/fixups/clear_full_backup.py index 72b5e4652..4754dce2b 100644 --- a/supervisor/resolution/fixups/clear_full_snapshot.py +++ b/supervisor/resolution/fixups/clear_full_backup.py @@ -2,33 +2,33 @@ import logging from typing import List, Optional -from ...const import SNAPSHOT_FULL -from ..const import MINIMUM_FULL_SNAPSHOTS, ContextType, IssueType, SuggestionType +from ...backups.const import BackupType +from ..const import MINIMUM_FULL_BACKUPS, ContextType, IssueType, SuggestionType from .base import FixupBase _LOGGER: logging.Logger = logging.getLogger(__name__) -class FixupClearFullSnapshot(FixupBase): +class FixupClearFullBackup(FixupBase): """Storage class for fixup.""" async def process_fixup(self, reference: Optional[str] = None) -> None: """Initialize the fixup class.""" - full_snapshots = [ - x for x in self.sys_snapshots.list_snapshots if x.sys_type == SNAPSHOT_FULL + full_backups = [ + x for x in self.sys_backups.list_backups if x.sys_type == BackupType.FULL ] - if len(full_snapshots) < MINIMUM_FULL_SNAPSHOTS: + if len(full_backups) < MINIMUM_FULL_BACKUPS: return - _LOGGER.info("Starting removal of old full snapshots") - for snapshot in sorted(full_snapshots, key=lambda x: x.date)[:-1]: - self.sys_snapshots.remove(snapshot) + _LOGGER.info("Starting removal of old full backups") + for backup in sorted(full_backups, key=lambda x: x.date)[:-1]: + self.sys_backups.remove(backup) @property def suggestion(self) -> SuggestionType: """Return a SuggestionType enum.""" - return SuggestionType.CLEAR_FULL_SNAPSHOT + return SuggestionType.CLEAR_FULL_BACKUP @property def context(self) -> ContextType: diff --git a/supervisor/resolution/fixups/create_full_snapshot.py b/supervisor/resolution/fixups/create_full_backup.py similarity index 75% rename from supervisor/resolution/fixups/create_full_snapshot.py rename to supervisor/resolution/fixups/create_full_backup.py index fb1a47b83..0a8a9c32b 100644 --- a/supervisor/resolution/fixups/create_full_snapshot.py +++ b/supervisor/resolution/fixups/create_full_backup.py @@ -8,18 +8,18 @@ from .base import FixupBase _LOGGER: logging.Logger = logging.getLogger(__name__) -class FixupCreateFullSnapshot(FixupBase): +class FixupCreateFullBackup(FixupBase): """Storage class for fixup.""" async def process_fixup(self, reference: Optional[str] = None) -> None: """Initialize the fixup class.""" - _LOGGER.info("Create a full snapshot as backup") - await self.sys_snapshots.do_snapshot_full() + _LOGGER.info("Creating a full backup") + await self.sys_backups.do_backup_full() @property def suggestion(self) -> SuggestionType: """Return a SuggestionType enum.""" - return SuggestionType.CREATE_FULL_SNAPSHOT + return SuggestionType.CREATE_FULL_BACKUP @property def context(self) -> ContextType: diff --git a/supervisor/snapshots/__init__.py b/supervisor/snapshots/__init__.py deleted file mode 100644 index 150e49660..000000000 --- a/supervisor/snapshots/__init__.py +++ /dev/null @@ -1,405 +0,0 @@ -"""Snapshot system control.""" -import asyncio -import logging -from pathlib import Path -from typing import Awaitable, Set - -from awesomeversion.awesomeversion import AwesomeVersion -from awesomeversion.exceptions import AwesomeVersionCompare - -from ..const import FOLDER_HOMEASSISTANT, SNAPSHOT_FULL, SNAPSHOT_PARTIAL, CoreState -from ..coresys import CoreSysAttributes -from ..exceptions import AddonsError -from ..jobs.decorator import Job, JobCondition -from ..utils.dt import utcnow -from .snapshot import Snapshot -from .utils import create_slug - -_LOGGER: logging.Logger = logging.getLogger(__name__) - - -class SnapshotManager(CoreSysAttributes): - """Manage snapshots.""" - - def __init__(self, coresys): - """Initialize a snapshot manager.""" - self.coresys = coresys - self.snapshots_obj = {} - self.lock = asyncio.Lock() - - @property - def list_snapshots(self) -> Set[Snapshot]: - """Return a list of all snapshot object.""" - return set(self.snapshots_obj.values()) - - def get(self, slug): - """Return snapshot object.""" - return self.snapshots_obj.get(slug) - - def _create_snapshot(self, name, sys_type, password, homeassistant=True): - """Initialize a new snapshot object from name.""" - date_str = utcnow().isoformat() - slug = create_slug(name, date_str) - tar_file = Path(self.sys_config.path_backup, f"{slug}.tar") - - # init object - snapshot = Snapshot(self.coresys, tar_file) - snapshot.new(slug, name, date_str, sys_type, password) - - # set general data - if homeassistant: - snapshot.store_homeassistant() - - snapshot.store_repositories() - snapshot.store_dockerconfig() - - return snapshot - - def load(self): - """Load exists snapshots data. - - Return a coroutine. - """ - return self.reload() - - async def reload(self): - """Load exists backups.""" - self.snapshots_obj = {} - - async def _load_snapshot(tar_file): - """Load the snapshot.""" - snapshot = Snapshot(self.coresys, tar_file) - if await snapshot.load(): - self.snapshots_obj[snapshot.slug] = snapshot - - tasks = [ - _load_snapshot(tar_file) - for tar_file in self.sys_config.path_backup.glob("*.tar") - ] - - _LOGGER.info("Found %d snapshot files", len(tasks)) - if tasks: - await asyncio.wait(tasks) - - def remove(self, snapshot): - """Remove a snapshot.""" - try: - snapshot.tarfile.unlink() - self.snapshots_obj.pop(snapshot.slug, None) - _LOGGER.info("Removed snapshot file %s", snapshot.slug) - - except OSError as err: - _LOGGER.error("Can't remove snapshot %s: %s", snapshot.slug, err) - return False - - return True - - async def import_snapshot(self, tar_file): - """Check snapshot tarfile and import it.""" - snapshot = Snapshot(self.coresys, tar_file) - - # Read meta data - if not await snapshot.load(): - return None - - # Already exists? - if snapshot.slug in self.snapshots_obj: - _LOGGER.warning( - "Snapshot %s already exists! overwriting snapshot", snapshot.slug - ) - self.remove(self.get(snapshot.slug)) - - # Move snapshot to backup - tar_origin = Path(self.sys_config.path_backup, f"{snapshot.slug}.tar") - try: - snapshot.tarfile.rename(tar_origin) - - except OSError as err: - _LOGGER.error("Can't move snapshot file to storage: %s", err) - return None - - # Load new snapshot - snapshot = Snapshot(self.coresys, tar_origin) - if not await snapshot.load(): - return None - _LOGGER.info("Successfully imported %s", snapshot.slug) - - self.snapshots_obj[snapshot.slug] = snapshot - return snapshot - - @Job(conditions=[JobCondition.FREE_SPACE, JobCondition.RUNNING]) - async def do_snapshot_full(self, name="", password=None): - """Create a full snapshot.""" - if self.lock.locked(): - _LOGGER.error("A snapshot/restore process is already running") - return None - - snapshot = self._create_snapshot(name, SNAPSHOT_FULL, password) - _LOGGER.info("Creating new full-snapshot with slug %s", snapshot.slug) - try: - self.sys_core.state = CoreState.FREEZE - await self.lock.acquire() - - async with snapshot: - # Snapshot add-ons - _LOGGER.info("Snapshotting %s store Add-ons", snapshot.slug) - await snapshot.store_addons() - - # Snapshot folders - _LOGGER.info("Snapshotting %s store folders", snapshot.slug) - await snapshot.store_folders() - - except Exception as err: # pylint: disable=broad-except - _LOGGER.exception("Snapshot %s error", snapshot.slug) - self.sys_capture_exception(err) - return None - - else: - _LOGGER.info("Creating full-snapshot with slug %s completed", snapshot.slug) - self.snapshots_obj[snapshot.slug] = snapshot - return snapshot - - finally: - self.sys_core.state = CoreState.RUNNING - self.lock.release() - - @Job(conditions=[JobCondition.FREE_SPACE, JobCondition.RUNNING]) - async def do_snapshot_partial( - self, name="", addons=None, folders=None, password=None, homeassistant=True - ): - """Create a partial snapshot.""" - if self.lock.locked(): - _LOGGER.error("A snapshot/restore process is already running") - return None - - addons = addons or [] - folders = folders or [] - - if len(addons) == 0 and len(folders) == 0 and not homeassistant: - _LOGGER.error("Nothing to create snapshot for") - return - - snapshot = self._create_snapshot( - name, SNAPSHOT_PARTIAL, password, homeassistant - ) - - _LOGGER.info("Creating new partial-snapshot with slug %s", snapshot.slug) - try: - self.sys_core.state = CoreState.FREEZE - await self.lock.acquire() - - async with snapshot: - # Snapshot add-ons - addon_list = [] - for addon_slug in addons: - addon = self.sys_addons.get(addon_slug) - if addon and addon.is_installed: - addon_list.append(addon) - continue - _LOGGER.warning("Add-on %s not found/installed", addon_slug) - - if addon_list: - _LOGGER.info("Snapshotting %s store Add-ons", snapshot.slug) - await snapshot.store_addons(addon_list) - - # Snapshot folders - if folders: - _LOGGER.info("Snapshotting %s store folders", snapshot.slug) - await snapshot.store_folders(folders) - - except Exception as err: # pylint: disable=broad-except - _LOGGER.exception("Snapshot %s error", snapshot.slug) - self.sys_capture_exception(err) - return None - - else: - _LOGGER.info( - "Creating partial-snapshot with slug %s completed", snapshot.slug - ) - self.snapshots_obj[snapshot.slug] = snapshot - return snapshot - - finally: - self.sys_core.state = CoreState.RUNNING - self.lock.release() - - @Job( - conditions=[ - JobCondition.FREE_SPACE, - JobCondition.HEALTHY, - JobCondition.INTERNET_HOST, - JobCondition.INTERNET_SYSTEM, - JobCondition.RUNNING, - ] - ) - async def do_restore_full(self, snapshot, password=None): - """Restore a snapshot.""" - if self.lock.locked(): - _LOGGER.error("A snapshot/restore process is already running") - return False - - if snapshot.sys_type != SNAPSHOT_FULL: - _LOGGER.error("%s is only a partial snapshot!", snapshot.slug) - return False - - if snapshot.protected and not snapshot.set_password(password): - _LOGGER.error("Invalid password for snapshot %s", snapshot.slug) - return False - - _LOGGER.info("Full-Restore %s start", snapshot.slug) - try: - self.sys_core.state = CoreState.FREEZE - await self.lock.acquire() - - async with snapshot: - # Stop Home-Assistant / Add-ons - await self.sys_core.shutdown() - - # Restore folders - _LOGGER.info("Restoring %s folders", snapshot.slug) - await snapshot.restore_folders() - - # Restore docker config - _LOGGER.info("Restoring %s Docker Config", snapshot.slug) - snapshot.restore_dockerconfig() - - # Start homeassistant restore - _LOGGER.info("Restoring %s Home-Assistant", snapshot.slug) - snapshot.restore_homeassistant() - task_hass = self._update_core_task(snapshot.homeassistant_version) - - # Restore repositories - _LOGGER.info("Restoring %s Repositories", snapshot.slug) - await snapshot.restore_repositories() - - # Delete delta add-ons - _LOGGER.info("Removing add-ons not in the snapshot %s", snapshot.slug) - for addon in self.sys_addons.installed: - if addon.slug in snapshot.addon_list: - continue - - # Remove Add-on because it's not a part of the new env - # Do it sequential avoid issue on slow IO - try: - await addon.uninstall() - except AddonsError: - _LOGGER.warning("Can't uninstall Add-on %s", addon.slug) - - # Restore add-ons - _LOGGER.info("Restore %s old add-ons", snapshot.slug) - await snapshot.restore_addons() - - # finish homeassistant task - _LOGGER.info("Restore %s wait until homeassistant ready", snapshot.slug) - await task_hass - await self.sys_homeassistant.core.start() - - except Exception as err: # pylint: disable=broad-except - _LOGGER.exception("Restore %s error", snapshot.slug) - self.sys_capture_exception(err) - return False - - else: - _LOGGER.info("Full-Restore %s done", snapshot.slug) - return True - - finally: - self.sys_core.state = CoreState.RUNNING - self.lock.release() - - @Job( - conditions=[ - JobCondition.FREE_SPACE, - JobCondition.HEALTHY, - JobCondition.INTERNET_HOST, - JobCondition.INTERNET_SYSTEM, - JobCondition.RUNNING, - ] - ) - async def do_restore_partial( - self, snapshot, homeassistant=False, addons=None, folders=None, password=None - ): - """Restore a snapshot.""" - if self.lock.locked(): - _LOGGER.error("A snapshot/restore process is already running") - return False - - if snapshot.protected and not snapshot.set_password(password): - _LOGGER.error("Invalid password for snapshot %s", snapshot.slug) - return False - - addons = addons or [] - folders = folders or [] - - _LOGGER.info("Partial-Restore %s start", snapshot.slug) - try: - self.sys_core.state = CoreState.FREEZE - await self.lock.acquire() - - async with snapshot: - # Restore docker config - _LOGGER.info("Restoring %s Docker Config", snapshot.slug) - snapshot.restore_dockerconfig() - - # Stop Home-Assistant for config restore - if FOLDER_HOMEASSISTANT in folders: - await self.sys_homeassistant.core.stop() - snapshot.restore_homeassistant() - - # Process folders - if folders: - _LOGGER.info("Restoring %s folders", snapshot.slug) - await snapshot.restore_folders(folders) - - # Process Home-Assistant - task_hass = None - if homeassistant: - _LOGGER.info("Restoring %s Home-Assistant", snapshot.slug) - task_hass = self._update_core_task(snapshot.homeassistant_version) - - if addons: - _LOGGER.info("Restoring %s Repositories", snapshot.slug) - await snapshot.restore_repositories() - - _LOGGER.info("Restoring %s old add-ons", snapshot.slug) - await snapshot.restore_addons(addons) - - # Make sure homeassistant run agen - if task_hass: - _LOGGER.info("Restore %s wait for Home-Assistant", snapshot.slug) - await task_hass - - # Do we need start HomeAssistant? - if not await self.sys_homeassistant.core.is_running(): - await self.sys_homeassistant.core.start() - - # Check If we can access to API / otherwise restart - if not await self.sys_homeassistant.api.check_api_state(): - _LOGGER.warning("Need restart HomeAssistant for API") - await self.sys_homeassistant.core.restart() - - except Exception as err: # pylint: disable=broad-except - _LOGGER.exception("Restore %s error", snapshot.slug) - self.sys_capture_exception(err) - return False - - else: - _LOGGER.info("Partial-Restore %s done", snapshot.slug) - return True - - finally: - self.sys_core.state = CoreState.RUNNING - self.lock.release() - - def _update_core_task(self, version: AwesomeVersion) -> Awaitable[None]: - """Process core update if needed and make awaitable object.""" - - async def _core_update(): - try: - if version == self.sys_homeassistant.version: - return - except (AwesomeVersionCompare, TypeError): - pass - await self.sys_homeassistant.core.update(version) - - return self.sys_create_task(_core_update()) diff --git a/supervisor/utils/tar.py b/supervisor/utils/tar.py index 531c96aa2..a371ec04f 100644 --- a/supervisor/utils/tar.py +++ b/supervisor/utils/tar.py @@ -110,7 +110,7 @@ class SecureTarFile: @property def size(self) -> float: - """Return snapshot size.""" + """Return backup size.""" if not self._name.is_file(): return 0 return round(self._name.stat().st_size / 1_048_576, 2) # calc mbyte diff --git a/tests/addons/test_config.py b/tests/addons/test_config.py index 47635a0db..75bf01376 100644 --- a/tests/addons/test_config.py +++ b/tests/addons/test_config.py @@ -4,6 +4,7 @@ import pytest import voluptuous as vol from supervisor.addons import validate as vd +from supervisor.addons.const import AddonBackupMode from ..common import load_json_fixture @@ -79,6 +80,28 @@ def test_migration_tmpfs(): assert valid_config["tmpfs"] +def test_migration_backup(): + """Migrate snapshot to backup.""" + config = load_json_fixture("basic-addon-config.json") + + config["snapshot"] = AddonBackupMode.HOT + config["snapshot_pre"] = "pre_command" + config["snapshot_post"] = "post_command" + config["snapshot_exclude"] = ["excludeed"] + + valid_config = vd.SCHEMA_ADDON_CONFIG(config) + + assert valid_config.get("snapshot") is None + assert valid_config.get("snapshot_pre") is None + assert valid_config.get("snapshot_post") is None + assert valid_config.get("snapshot_exclude") is None + + assert valid_config["backup"] == AddonBackupMode.HOT + assert valid_config["backup_pre"] == "pre_command" + assert valid_config["backup_post"] == "post_command" + assert valid_config["backup_exclude"] == ["excludeed"] + + def test_invalid_repository(): """Validate basic config with invalid repositories.""" config = load_json_fixture("basic-addon-config.json") diff --git a/tests/api/test_resolution.py b/tests/api/test_resolution.py index 66dcfaea6..cd841b77f 100644 --- a/tests/api/test_resolution.py +++ b/tests/api/test_resolution.py @@ -27,7 +27,7 @@ async def test_api_resolution_base(coresys: CoreSys, api_client): """Test resolution manager api.""" coresys.resolution.unsupported = UnsupportedReason.OS coresys.resolution.suggestions = Suggestion( - SuggestionType.CLEAR_FULL_SNAPSHOT, ContextType.SYSTEM + SuggestionType.CLEAR_FULL_BACKUP, ContextType.SYSTEM ) coresys.resolution.create_issue(IssueType.FREE_SPACE, ContextType.SYSTEM) @@ -35,8 +35,7 @@ async def test_api_resolution_base(coresys: CoreSys, api_client): result = await resp.json() assert UnsupportedReason.OS in result["data"][ATTR_UNSUPPORTED] assert ( - SuggestionType.CLEAR_FULL_SNAPSHOT - == result["data"][ATTR_SUGGESTIONS][-1]["type"] + SuggestionType.CLEAR_FULL_BACKUP == result["data"][ATTR_SUGGESTIONS][-1]["type"] ) assert IssueType.FREE_SPACE == result["data"][ATTR_ISSUES][-1]["type"] @@ -44,41 +43,41 @@ async def test_api_resolution_base(coresys: CoreSys, api_client): @pytest.mark.asyncio async def test_api_resolution_dismiss_suggestion(coresys: CoreSys, api_client): """Test resolution manager suggestion apply api.""" - coresys.resolution.suggestions = clear_snapshot = Suggestion( - SuggestionType.CLEAR_FULL_SNAPSHOT, ContextType.SYSTEM + coresys.resolution.suggestions = clear_backup = Suggestion( + SuggestionType.CLEAR_FULL_BACKUP, ContextType.SYSTEM ) - assert SuggestionType.CLEAR_FULL_SNAPSHOT == coresys.resolution.suggestions[-1].type - await api_client.delete(f"/resolution/suggestion/{clear_snapshot.uuid}") - assert clear_snapshot not in coresys.resolution.suggestions + assert SuggestionType.CLEAR_FULL_BACKUP == coresys.resolution.suggestions[-1].type + await api_client.delete(f"/resolution/suggestion/{clear_backup.uuid}") + assert clear_backup not in coresys.resolution.suggestions @pytest.mark.asyncio async def test_api_resolution_apply_suggestion(coresys: CoreSys, api_client): """Test resolution manager suggestion apply api.""" - coresys.resolution.suggestions = clear_snapshot = Suggestion( - SuggestionType.CLEAR_FULL_SNAPSHOT, ContextType.SYSTEM + coresys.resolution.suggestions = clear_backup = Suggestion( + SuggestionType.CLEAR_FULL_BACKUP, ContextType.SYSTEM ) - coresys.resolution.suggestions = create_snapshot = Suggestion( - SuggestionType.CREATE_FULL_SNAPSHOT, ContextType.SYSTEM + coresys.resolution.suggestions = create_backup = Suggestion( + SuggestionType.CREATE_FULL_BACKUP, ContextType.SYSTEM ) - mock_snapshots = AsyncMock() + mock_backups = AsyncMock() mock_health = AsyncMock() - coresys.snapshots.do_snapshot_full = mock_snapshots + coresys.backups.do_backup_full = mock_backups coresys.resolution.healthcheck = mock_health - await api_client.post(f"/resolution/suggestion/{clear_snapshot.uuid}") - await api_client.post(f"/resolution/suggestion/{create_snapshot.uuid}") + await api_client.post(f"/resolution/suggestion/{clear_backup.uuid}") + await api_client.post(f"/resolution/suggestion/{create_backup.uuid}") - assert clear_snapshot not in coresys.resolution.suggestions - assert create_snapshot not in coresys.resolution.suggestions + assert clear_backup not in coresys.resolution.suggestions + assert create_backup not in coresys.resolution.suggestions - assert mock_snapshots.called + assert mock_backups.called assert mock_health.called with pytest.raises(ResolutionError): - await coresys.resolution.apply_suggestion(clear_snapshot) + await coresys.resolution.apply_suggestion(clear_backup) @pytest.mark.asyncio diff --git a/tests/resolution/fixup/test_clear_full_backup.py b/tests/resolution/fixup/test_clear_full_backup.py new file mode 100644 index 000000000..283d23c02 --- /dev/null +++ b/tests/resolution/fixup/test_clear_full_backup.py @@ -0,0 +1,55 @@ +"""Test evaluation base.""" +# pylint: disable=import-error,protected-access +from pathlib import Path + +from supervisor.backups.backup import Backup +from supervisor.backups.const import BackupType +from supervisor.const import ATTR_DATE, ATTR_SLUG, ATTR_TYPE +from supervisor.coresys import CoreSys +from supervisor.resolution.const import ContextType, SuggestionType +from supervisor.resolution.data import Suggestion +from supervisor.resolution.fixups.clear_full_backup import FixupClearFullBackup +from supervisor.utils.dt import utcnow +from supervisor.utils.tar import SecureTarFile + + +async def test_fixup(coresys: CoreSys, tmp_path): + """Test fixup.""" + clear_full_backup = FixupClearFullBackup(coresys) + + assert not clear_full_backup.auto + + coresys.resolution.suggestions = Suggestion( + SuggestionType.CLEAR_FULL_BACKUP, ContextType.SYSTEM + ) + + for slug in ["sn1", "sn2", "sn3", "sn4", "sn5"]: + temp_tar = Path(tmp_path, f"{slug}.tar") + with SecureTarFile(temp_tar, "w"): + pass + backup = Backup(coresys, temp_tar) + backup._data = { # pylint: disable=protected-access + ATTR_SLUG: slug, + ATTR_DATE: utcnow().isoformat(), + ATTR_TYPE: BackupType.PARTIAL + if "1" in slug or "5" in slug + else BackupType.FULL, + } + coresys.backups._backups[backup.slug] = backup + + newest_full_backup = coresys.backups._backups["sn4"] + + assert newest_full_backup in coresys.backups.list_backups + assert ( + len([x for x in coresys.backups.list_backups if x.sys_type == BackupType.FULL]) + == 3 + ) + + await clear_full_backup() + assert newest_full_backup in coresys.backups.list_backups + assert ( + len([x for x in coresys.backups.list_backups if x.sys_type == BackupType.FULL]) + == 1 + ) + + assert len(coresys.resolution.suggestions) == 0 diff --git a/tests/resolution/fixup/test_clear_full_snapshot.py b/tests/resolution/fixup/test_clear_full_snapshot.py deleted file mode 100644 index a3bc323f0..000000000 --- a/tests/resolution/fixup/test_clear_full_snapshot.py +++ /dev/null @@ -1,64 +0,0 @@ -"""Test evaluation base.""" -# pylint: disable=import-error,protected-access -from pathlib import Path - -from supervisor.const import ( - ATTR_DATE, - ATTR_SLUG, - ATTR_TYPE, - SNAPSHOT_FULL, - SNAPSHOT_PARTIAL, -) -from supervisor.coresys import CoreSys -from supervisor.resolution.const import ContextType, SuggestionType -from supervisor.resolution.data import Suggestion -from supervisor.resolution.fixups.clear_full_snapshot import FixupClearFullSnapshot -from supervisor.snapshots.snapshot import Snapshot -from supervisor.utils.dt import utcnow -from supervisor.utils.tar import SecureTarFile - - -async def test_fixup(coresys: CoreSys, tmp_path): - """Test fixup.""" - clear_full_snapshot = FixupClearFullSnapshot(coresys) - - assert not clear_full_snapshot.auto - - coresys.resolution.suggestions = Suggestion( - SuggestionType.CLEAR_FULL_SNAPSHOT, ContextType.SYSTEM - ) - - for slug in ["sn1", "sn2", "sn3", "sn4", "sn5"]: - temp_tar = Path(tmp_path, f"{slug}.tar") - with SecureTarFile(temp_tar, "w"): - pass - snapshot = Snapshot(coresys, temp_tar) - snapshot._data = { # pylint: disable=protected-access - ATTR_SLUG: slug, - ATTR_DATE: utcnow().isoformat(), - ATTR_TYPE: SNAPSHOT_PARTIAL - if "1" in slug or "5" in slug - else SNAPSHOT_FULL, - } - coresys.snapshots.snapshots_obj[snapshot.slug] = snapshot - - newest_full_snapshot = coresys.snapshots.snapshots_obj["sn4"] - - assert newest_full_snapshot in coresys.snapshots.list_snapshots - assert ( - len( - [x for x in coresys.snapshots.list_snapshots if x.sys_type == SNAPSHOT_FULL] - ) - == 3 - ) - - await clear_full_snapshot() - assert newest_full_snapshot in coresys.snapshots.list_snapshots - assert ( - len( - [x for x in coresys.snapshots.list_snapshots if x.sys_type == SNAPSHOT_FULL] - ) - == 1 - ) - - assert len(coresys.resolution.suggestions) == 0 diff --git a/tests/resolution/fixup/test_create_full_snapshot.py b/tests/resolution/fixup/test_create_full_backup.py similarity index 52% rename from tests/resolution/fixup/test_create_full_snapshot.py rename to tests/resolution/fixup/test_create_full_backup.py index e5b5728f8..54a525802 100644 --- a/tests/resolution/fixup/test_create_full_snapshot.py +++ b/tests/resolution/fixup/test_create_full_backup.py @@ -5,23 +5,23 @@ from unittest.mock import AsyncMock from supervisor.coresys import CoreSys from supervisor.resolution.const import ContextType, SuggestionType from supervisor.resolution.data import Suggestion -from supervisor.resolution.fixups.create_full_snapshot import FixupCreateFullSnapshot +from supervisor.resolution.fixups.create_full_backup import FixupCreateFullBackup async def test_fixup(coresys: CoreSys): """Test fixup.""" - create_full_snapshot = FixupCreateFullSnapshot(coresys) + create_full_backup = FixupCreateFullBackup(coresys) - assert not create_full_snapshot.auto + assert not create_full_backup.auto coresys.resolution.suggestions = Suggestion( - SuggestionType.CREATE_FULL_SNAPSHOT, ContextType.SYSTEM + SuggestionType.CREATE_FULL_BACKUP, ContextType.SYSTEM ) - mock_snapshots = AsyncMock() - coresys.snapshots.do_snapshot_full = mock_snapshots + mock_backups = AsyncMock() + coresys.backups.do_backup_full = mock_backups - await create_full_snapshot() + await create_full_backup() - mock_snapshots.assert_called() + mock_backups.assert_called() assert len(coresys.resolution.suggestions) == 0 diff --git a/tests/resolution/fixup/test_fixup.py b/tests/resolution/fixup/test_fixup.py index 02f2b9069..c92ae4b1f 100644 --- a/tests/resolution/fixup/test_fixup.py +++ b/tests/resolution/fixup/test_fixup.py @@ -12,24 +12,24 @@ async def test_check_autofix(coresys: CoreSys): """Test check for setup.""" coresys.core.state = CoreState.RUNNING - coresys.resolution.fixup._create_full_snapshot.process_fixup = AsyncMock() + coresys.resolution.fixup._create_full_backup.process_fixup = AsyncMock() with patch( - "supervisor.resolution.fixups.create_full_snapshot.FixupCreateFullSnapshot.auto", + "supervisor.resolution.fixups.create_full_backup.FixupCreateFullBackup.auto", return_value=True, ): await coresys.resolution.fixup.run_autofix() - coresys.resolution.fixup._create_full_snapshot.process_fixup.assert_not_called() + coresys.resolution.fixup._create_full_backup.process_fixup.assert_not_called() coresys.resolution.suggestions = Suggestion( - SuggestionType.CREATE_FULL_SNAPSHOT, ContextType.SYSTEM + SuggestionType.CREATE_FULL_BACKUP, ContextType.SYSTEM ) with patch( - "supervisor.resolution.fixups.create_full_snapshot.FixupCreateFullSnapshot.auto", + "supervisor.resolution.fixups.create_full_backup.FixupCreateFullBackup.auto", return_value=True, ): await coresys.resolution.fixup.run_autofix() - coresys.resolution.fixup._create_full_snapshot.process_fixup.assert_called_once() + coresys.resolution.fixup._create_full_backup.process_fixup.assert_called_once() assert len(coresys.resolution.suggestions) == 0 diff --git a/tests/resolution/test_resolution_manager.py b/tests/resolution/test_resolution_manager.py index 40056360e..1f7234b3d 100644 --- a/tests/resolution/test_resolution_manager.py +++ b/tests/resolution/test_resolution_manager.py @@ -36,44 +36,44 @@ def test_properies_unhealthy(coresys: CoreSys): @pytest.mark.asyncio async def test_resolution_dismiss_suggestion(coresys: CoreSys): """Test resolution manager suggestion apply api.""" - coresys.resolution.suggestions = clear_snapshot = Suggestion( - SuggestionType.CLEAR_FULL_SNAPSHOT, ContextType.SYSTEM + coresys.resolution.suggestions = clear_backup = Suggestion( + SuggestionType.CLEAR_FULL_BACKUP, ContextType.SYSTEM ) - assert SuggestionType.CLEAR_FULL_SNAPSHOT == coresys.resolution.suggestions[-1].type - coresys.resolution.dismiss_suggestion(clear_snapshot) - assert clear_snapshot not in coresys.resolution.suggestions + assert SuggestionType.CLEAR_FULL_BACKUP == coresys.resolution.suggestions[-1].type + coresys.resolution.dismiss_suggestion(clear_backup) + assert clear_backup not in coresys.resolution.suggestions with pytest.raises(ResolutionError): - coresys.resolution.dismiss_suggestion(clear_snapshot) + coresys.resolution.dismiss_suggestion(clear_backup) @pytest.mark.asyncio async def test_resolution_apply_suggestion(coresys: CoreSys): """Test resolution manager suggestion apply api.""" - coresys.resolution.suggestions = clear_snapshot = Suggestion( - SuggestionType.CLEAR_FULL_SNAPSHOT, ContextType.SYSTEM + coresys.resolution.suggestions = clear_backup = Suggestion( + SuggestionType.CLEAR_FULL_BACKUP, ContextType.SYSTEM ) - coresys.resolution.suggestions = create_snapshot = Suggestion( - SuggestionType.CREATE_FULL_SNAPSHOT, ContextType.SYSTEM + coresys.resolution.suggestions = create_backup = Suggestion( + SuggestionType.CREATE_FULL_BACKUP, ContextType.SYSTEM ) - mock_snapshots = AsyncMock() + mock_backups = AsyncMock() mock_health = AsyncMock() - coresys.snapshots.do_snapshot_full = mock_snapshots + coresys.backups.do_backup_full = mock_backups coresys.resolution.healthcheck = mock_health - await coresys.resolution.apply_suggestion(clear_snapshot) - await coresys.resolution.apply_suggestion(create_snapshot) + await coresys.resolution.apply_suggestion(clear_backup) + await coresys.resolution.apply_suggestion(create_backup) - assert mock_snapshots.called + assert mock_backups.called assert mock_health.called - assert clear_snapshot not in coresys.resolution.suggestions - assert create_snapshot not in coresys.resolution.suggestions + assert clear_backup not in coresys.resolution.suggestions + assert create_backup not in coresys.resolution.suggestions with pytest.raises(ResolutionError): - await coresys.resolution.apply_suggestion(clear_snapshot) + await coresys.resolution.apply_suggestion(clear_backup) @pytest.mark.asyncio