Compare commits

..

1 Commits

8 changed files with 193 additions and 222 deletions

View File

@ -28,22 +28,22 @@ RUN \
\
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
&& chmod a+x /usr/bin/cosign \
&& pip3 install uv==0.6.1
&& pip3 install uv==0.2.21
# Install requirements
COPY requirements.txt .
RUN \
if [ "${BUILD_ARCH}" = "i386" ]; then \
linux32 uv pip install --compile-bytecode --no-build -r requirements.txt; \
linux32 uv pip install --no-build -r requirements.txt; \
else \
uv pip install --compile-bytecode --no-build -r requirements.txt; \
uv pip install --no-build -r requirements.txt; \
fi \
&& rm -f requirements.txt
# Install Home Assistant Supervisor
COPY . supervisor
RUN \
uv pip install -e ./supervisor \
pip3 install -e ./supervisor \
&& python3 -m compileall ./supervisor/supervisor

View File

@ -94,7 +94,7 @@ class Backup(JobGroup):
coresys, JOB_GROUP_BACKUP.format_map(defaultdict(str, slug=slug)), slug
)
self._data: dict[str, Any] = data or {ATTR_SLUG: slug}
self._tmp: TemporaryDirectory = None
self._tmp = None
self._outer_secure_tarfile: SecureTarFile | None = None
self._key: bytes | None = None
self._aes: Cipher | None = None
@ -463,31 +463,23 @@ class Backup(JobGroup):
@asynccontextmanager
async def create(self) -> AsyncGenerator[None]:
"""Create new backup file."""
def _open_outer_tarfile():
"""Create and open outer tarfile."""
if self.tarfile.is_file():
raise BackupError(
f"Cannot make new backup at {self.tarfile.as_posix()}, file already exists!",
_LOGGER.error,
)
outer_secure_tarfile = SecureTarFile(
self.tarfile,
"w",
gzip=False,
bufsize=BUF_SIZE,
if self.tarfile.is_file():
raise BackupError(
f"Cannot make new backup at {self.tarfile.as_posix()}, file already exists!",
_LOGGER.error,
)
return outer_secure_tarfile, outer_secure_tarfile.open()
self._outer_secure_tarfile, outer_tarfile = await self.sys_run_in_executor(
_open_outer_tarfile
self._outer_secure_tarfile = SecureTarFile(
self.tarfile,
"w",
gzip=False,
bufsize=BUF_SIZE,
)
try:
yield
with self._outer_secure_tarfile as outer_tarfile:
yield
await self._create_cleanup(outer_tarfile)
finally:
await self._create_cleanup(outer_tarfile)
await self.sys_run_in_executor(self._outer_secure_tarfile.close)
self._outer_secure_tarfile = None
@asynccontextmanager
@ -504,34 +496,28 @@ class Backup(JobGroup):
if location == DEFAULT
else self.all_locations[location][ATTR_PATH]
)
if not backup_tarfile.is_file():
self.sys_create_task(self.sys_backups.reload(location))
raise BackupFileNotFoundError(
f"Cannot open backup at {backup_tarfile.as_posix()}, file does not exist!",
_LOGGER.error,
)
# extract an existing backup
def _extract_backup():
if not backup_tarfile.is_file():
raise BackupFileNotFoundError(
f"Cannot open backup at {backup_tarfile.as_posix()}, file does not exist!",
_LOGGER.error,
)
tmp = TemporaryDirectory(dir=str(backup_tarfile.parent))
self._tmp = TemporaryDirectory(dir=str(backup_tarfile.parent))
def _extract_backup():
"""Extract a backup."""
with tarfile.open(backup_tarfile, "r:") as tar:
tar.extractall(
path=tmp.name,
path=self._tmp.name,
members=secure_path(tar),
filter="fully_trusted",
)
return tmp
try:
self._tmp = await self.sys_run_in_executor(_extract_backup)
with self._tmp:
await self.sys_run_in_executor(_extract_backup)
yield
except BackupFileNotFoundError as err:
self.sys_create_task(self.sys_backups.reload(location))
raise err
finally:
if self._tmp:
self._tmp.cleanup()
async def _create_cleanup(self, outer_tarfile: TarFile) -> None:
"""Cleanup after backup creation.
@ -683,16 +669,17 @@ class Backup(JobGroup):
async def _folder_save(self, name: str):
"""Take backup of a folder."""
self.sys_jobs.current.reference = name
slug_name = name.replace("/", "_")
tar_name = f"{slug_name}.tar{'.gz' if self.compressed else ''}"
origin_dir = Path(self.sys_config.path_supervisor, name)
def _save() -> bool:
# Check if exists
if not origin_dir.is_dir():
_LOGGER.warning("Can't find backup folder %s", name)
return False
# Check if exists
if not origin_dir.is_dir():
_LOGGER.warning("Can't find backup folder %s", name)
return
def _save() -> None:
# Take backup
_LOGGER.info("Backing up folder %s", name)
@ -725,16 +712,16 @@ class Backup(JobGroup):
)
_LOGGER.info("Backup folder %s done", name)
return True
try:
if await self.sys_run_in_executor(_save):
self._data[ATTR_FOLDERS].append(name)
await self.sys_run_in_executor(_save)
except (tarfile.TarError, OSError) as err:
raise BackupError(
f"Can't backup folder {name}: {str(err)}", _LOGGER.error
) from err
self._data[ATTR_FOLDERS].append(name)
@Job(name="backup_store_folders", cleanup=False)
async def store_folders(self, folder_list: list[str]):
"""Backup Supervisor data into backup."""
@ -753,18 +740,28 @@ class Backup(JobGroup):
)
origin_dir = Path(self.sys_config.path_supervisor, name)
# Check if exists inside backup
if not tar_name.exists():
raise BackupInvalidError(
f"Can't find restore folder {name}", _LOGGER.warning
)
# Unmount any mounts within folder
bind_mounts = [
bound.bind_mount
for bound in self.sys_mounts.bound_mounts
if bound.bind_mount.local_where
and bound.bind_mount.local_where.is_relative_to(origin_dir)
]
if bind_mounts:
await asyncio.gather(*[bind_mount.unmount() for bind_mount in bind_mounts])
# Clean old stuff
if origin_dir.is_dir():
await remove_folder(origin_dir, content_only=True)
# Perform a restore
def _restore() -> bool:
# Check if exists inside backup
if not tar_name.exists():
raise BackupInvalidError(
f"Can't find restore folder {name}", _LOGGER.warning
)
# Clean old stuff
if origin_dir.is_dir():
remove_folder(origin_dir, content_only=True)
try:
_LOGGER.info("Restore folder %s", name)
with SecureTarFile(
@ -784,16 +781,6 @@ class Backup(JobGroup):
) from err
return True
# Unmount any mounts within folder
bind_mounts = [
bound.bind_mount
for bound in self.sys_mounts.bound_mounts
if bound.bind_mount.local_where
and bound.bind_mount.local_where.is_relative_to(origin_dir)
]
if bind_mounts:
await asyncio.gather(*[bind_mount.unmount() for bind_mount in bind_mounts])
try:
return await self.sys_run_in_executor(_restore)
finally:

View File

@ -118,24 +118,14 @@ class BackupManager(FileConfiguration, JobGroup):
location = self.sys_mounts.default_backup_mount
if location:
location_mount: Mount = location
return location_mount.local_where
if not location.local_where.is_mount():
raise BackupMountDownError(
f"{location.name} is down, cannot back-up to it", _LOGGER.error
)
return location.local_where
return self.sys_config.path_backup
async def _check_location(self, location: LOCATION_TYPE | type[DEFAULT] = DEFAULT):
"""Check if backup location is accessible."""
if location == DEFAULT and self.sys_mounts.default_backup_mount:
location = self.sys_mounts.default_backup_mount
if location not in (DEFAULT, LOCATION_CLOUD_BACKUP, None):
location_mount: Mount = location
if not await location_mount.is_mounted():
raise BackupMountDownError(
f"{location_mount.name} is down, cannot back-up to it",
_LOGGER.error,
)
def _get_location_name(
self,
location: LOCATION_TYPE | type[DEFAULT] = DEFAULT,
@ -362,14 +352,8 @@ class BackupManager(FileConfiguration, JobGroup):
copy(backup.tarfile, self.sys_config.path_core_backup)
)
elif location:
location_mount: Mount = location
if not location_mount.local_where.is_mount():
raise BackupMountDownError(
f"{location_mount.name} is down, cannot copy to it",
_LOGGER.error,
)
all_locations[location_mount.name] = Path(
copy(backup.tarfile, location_mount.local_where)
all_locations[location.name] = Path(
copy(backup.tarfile, location.local_where)
)
else:
all_locations[None] = Path(
@ -411,8 +395,6 @@ class BackupManager(FileConfiguration, JobGroup):
additional_locations: list[LOCATION_TYPE] | None = None,
) -> Backup | None:
"""Check backup tarfile and import it."""
await self._check_location(location)
backup = Backup(self.coresys, tar_file, "temp", None)
# Read meta data
@ -560,8 +542,6 @@ class BackupManager(FileConfiguration, JobGroup):
additional_locations: list[LOCATION_TYPE] | None = None,
) -> Backup | None:
"""Create a full backup."""
await self._check_location(location)
if self._get_base_path(location) in {
self.sys_config.path_backup,
self.sys_config.path_core_backup,
@ -610,8 +590,6 @@ class BackupManager(FileConfiguration, JobGroup):
additional_locations: list[LOCATION_TYPE] | None = None,
) -> Backup | None:
"""Create a partial backup."""
await self._check_location(location)
if self._get_base_path(location) in {
self.sys_config.path_backup,
self.sys_config.path_core_backup,

View File

@ -9,7 +9,6 @@ from pathlib import Path, PurePath
import shutil
import tarfile
from tempfile import TemporaryDirectory
from typing import Any
from uuid import UUID
from awesomeversion import AwesomeVersion, AwesomeVersionException
@ -47,7 +46,7 @@ from ..hardware.const import PolicyGroup
from ..hardware.data import Device
from ..jobs.decorator import Job, JobExecutionLimit
from ..resolution.const import UnhealthyReason
from ..utils import remove_folder, remove_folder_with_excludes
from ..utils import remove_folder
from ..utils.common import FileConfiguration
from ..utils.json import read_json_file, write_json_file
from .api import HomeAssistantAPI
@ -458,94 +457,91 @@ class HomeAssistant(FileConfiguration, CoreSysAttributes):
self, tar_file: tarfile.TarFile, exclude_database: bool = False
) -> None:
"""Restore Home Assistant Core config/ directory."""
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
temp_path = Path(temp)
temp_data = temp_path.joinpath("data")
temp_meta = temp_path.joinpath("homeassistant.json")
def _restore_home_assistant() -> Any:
"""Restores data and reads metadata from backup.
Returns: Home Assistant metdata
"""
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
temp_path = Path(temp)
temp_data = temp_path.joinpath("data")
temp_meta = temp_path.joinpath("homeassistant.json")
# extract backup
try:
with tar_file as backup:
backup.extractall(
path=temp_path,
members=secure_path(backup),
filter="fully_trusted",
)
except tarfile.TarError as err:
raise HomeAssistantError(
f"Can't read tarfile {tar_file}: {err}", _LOGGER.error
) from err
# Check old backup format v1
if not temp_data.exists():
temp_data = temp_path
_LOGGER.info("Restore Home Assistant Core config folder")
if exclude_database:
remove_folder_with_excludes(
self.sys_config.path_homeassistant,
excludes=HOMEASSISTANT_BACKUP_EXCLUDE_DATABASE,
tmp_dir=self.sys_config.path_tmp,
# extract backup
def _extract_tarfile():
"""Extract tar backup."""
with tar_file as backup:
backup.extractall(
path=temp_path,
members=secure_path(backup),
filter="fully_trusted",
)
else:
remove_folder(self.sys_config.path_homeassistant)
try:
shutil.copytree(
temp_data,
self.sys_config.path_homeassistant,
symlinks=True,
dirs_exist_ok=True,
)
except shutil.Error as err:
raise HomeAssistantError(
f"Can't restore origin data: {err}", _LOGGER.error
) from err
try:
await self.sys_run_in_executor(_extract_tarfile)
except tarfile.TarError as err:
raise HomeAssistantError(
f"Can't read tarfile {tar_file}: {err}", _LOGGER.error
) from err
_LOGGER.info("Restore Home Assistant Core config folder done")
# Check old backup format v1
if not temp_data.exists():
temp_data = temp_path
if not temp_meta.exists():
return None
_LOGGER.info("Restore Home Assistant Core metadata")
# Restore data
def _restore_data():
"""Restore data."""
shutil.copytree(
temp_data,
self.sys_config.path_homeassistant,
symlinks=True,
dirs_exist_ok=True,
)
# Read backup data
try:
data = read_json_file(temp_meta)
except ConfigurationFileError as err:
raise HomeAssistantError() from err
_LOGGER.info("Restore Home Assistant Core config folder")
excludes = (
HOMEASSISTANT_BACKUP_EXCLUDE_DATABASE if exclude_database else None
)
await remove_folder(
self.sys_config.path_homeassistant,
content_only=True,
excludes=excludes,
tmp_dir=self.sys_config.path_tmp,
)
try:
await self.sys_run_in_executor(_restore_data)
except shutil.Error as err:
raise HomeAssistantError(
f"Can't restore origin data: {err}", _LOGGER.error
) from err
return data
_LOGGER.info("Restore Home Assistant Core config folder done")
data = await self.sys_run_in_executor(_restore_home_assistant)
if data is None:
return
if not temp_meta.exists():
return
_LOGGER.info("Restore Home Assistant Core metadata")
# Validate metadata
try:
data = SCHEMA_HASS_CONFIG(data)
except vol.Invalid as err:
raise HomeAssistantError(
f"Can't validate backup data: {humanize_error(data, err)}",
_LOGGER.error,
) from err
# Read backup data
try:
data = read_json_file(temp_meta)
except ConfigurationFileError as err:
raise HomeAssistantError() from err
# Restore metadata
for attr in (
ATTR_AUDIO_INPUT,
ATTR_AUDIO_OUTPUT,
ATTR_PORT,
ATTR_SSL,
ATTR_REFRESH_TOKEN,
ATTR_WATCHDOG,
):
if attr in data:
self._data[attr] = data[attr]
# Validate
try:
data = SCHEMA_HASS_CONFIG(data)
except vol.Invalid as err:
raise HomeAssistantError(
f"Can't validate backup data: {humanize_error(data, err)}",
_LOGGER.err,
) from err
# Restore metadata
for attr in (
ATTR_AUDIO_INPUT,
ATTR_AUDIO_OUTPUT,
ATTR_PORT,
ATTR_SSL,
ATTR_REFRESH_TOKEN,
ATTR_WATCHDOG,
):
if attr in data:
self._data[attr] = data[attr]
@Job(
name="home_assistant_get_users",

View File

@ -40,7 +40,7 @@ class FixupStoreExecuteReset(FixupBase):
_LOGGER.warning("Can't find store %s for fixup", reference)
return
await self.sys_run_in_executor(remove_folder, repository.git.path)
await remove_folder(repository.git.path)
# Load data again
try:

View File

@ -189,13 +189,9 @@ class GitRepo(CoreSysAttributes):
_LOGGER.warning("There is already a task in progress")
return
def _remove_git_dir(path: Path) -> None:
if not path.is_dir():
return
remove_folder(path)
async with self.lock:
await self.sys_run_in_executor(_remove_git_dir, self.path)
if not self.path.is_dir():
return
await remove_folder(self.path)
class GitRepoHassIO(GitRepo):

View File

@ -8,7 +8,6 @@ import os
from pathlib import Path
import re
import socket
import subprocess
from tempfile import TemporaryDirectory
from typing import Any
@ -81,9 +80,11 @@ def get_message_from_exception_chain(err: Exception) -> str:
return get_message_from_exception_chain(err.__context__)
def remove_folder(
async def remove_folder(
folder: Path,
content_only: bool = False,
excludes: list[str] | None = None,
tmp_dir: Path | None = None,
) -> None:
"""Remove folder and reset privileged.
@ -91,40 +92,48 @@ def remove_folder(
- CAP_DAC_OVERRIDE
- CAP_DAC_READ_SEARCH
"""
find_args = []
if content_only:
find_args.extend(["-mindepth", "1"])
try:
proc = subprocess.run(
["/usr/bin/find", str(folder), "-xdev", *find_args, "-delete"],
stdout=subprocess.DEVNULL,
stderr=subprocess.PIPE,
env=clean_env(),
text=True,
check=True,
)
if proc.returncode != 0:
_LOGGER.error("Can't remove folder %s: %s", folder, proc.stderr.strip())
except OSError as err:
_LOGGER.exception("Can't remove folder %s: %s", folder, err)
if excludes:
if not tmp_dir:
raise ValueError("tmp_dir is required if excludes are provided")
if not content_only:
raise ValueError("Cannot delete the folder if excludes are provided")
def remove_folder_with_excludes(
folder: Path,
excludes: list[str],
tmp_dir: Path | None = None,
) -> None:
"""Remove folder with excludes."""
with TemporaryDirectory(dir=tmp_dir) as temp_path:
temp_path = Path(temp_path)
temp = TemporaryDirectory(dir=tmp_dir)
temp_path = Path(temp.name)
moved_files: list[Path] = []
for item in folder.iterdir():
if any(item.match(exclude) for exclude in excludes):
moved_files.append(item.rename(temp_path / item.name))
remove_folder(folder, content_only=True)
for item in moved_files:
item.rename(folder / item.name)
find_args = []
if content_only:
find_args.extend(["-mindepth", "1"])
try:
proc = await asyncio.create_subprocess_exec(
"/usr/bin/find",
folder,
"-xdev",
*find_args,
"-delete",
stdout=asyncio.subprocess.DEVNULL,
stderr=asyncio.subprocess.PIPE,
env=clean_env(),
)
_, error_msg = await proc.communicate()
except OSError as err:
_LOGGER.exception("Can't remove folder %s: %s", folder, err)
else:
if proc.returncode == 0:
return
_LOGGER.error(
"Can't remove folder %s: %s", folder, error_msg.decode("utf-8").strip()
)
finally:
if excludes:
for item in moved_files:
item.rename(folder / item.name)
temp.cleanup()
def clean_env() -> dict[str, str]:

View File

@ -3,10 +3,13 @@
from pathlib import Path
import shutil
import pytest
from supervisor.utils import remove_folder
def test_remove_all(tmp_path):
@pytest.mark.asyncio
async def test_remove_all(tmp_path):
"""Test remove folder."""
# Prepair test folder
temp_orig = tmp_path.joinpath("orig")
@ -14,11 +17,12 @@ def test_remove_all(tmp_path):
shutil.copytree(fixture_data, temp_orig, symlinks=True)
assert temp_orig.exists()
remove_folder(temp_orig)
await remove_folder(temp_orig)
assert not temp_orig.exists()
def test_remove_content(tmp_path):
@pytest.mark.asyncio
async def test_remove_content(tmp_path):
"""Test remove content of folder."""
# Prepair test folder
temp_orig = tmp_path.joinpath("orig")
@ -34,7 +38,8 @@ def test_remove_content(tmp_path):
assert test_folder.exists()
assert test_file.exists()
assert test_hidden.exists()
remove_folder(temp_orig, content_only=True)
await remove_folder(temp_orig, content_only=True)
assert not test_folder.exists()
assert not test_file.exists()