mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-04-19 10:47:15 +00:00
WIP: Fix docker config restore in corner case
Store Docker config per location to fix corner case (when encrypted and unencrypted backup get consolidated).
This commit is contained in:
parent
9164d35615
commit
aa0313f1b9
@ -202,12 +202,12 @@ class Backup(JobGroup):
|
||||
@property
|
||||
def docker(self) -> dict[str, Any]:
|
||||
"""Return backup Docker config data."""
|
||||
return self._data.get(ATTR_DOCKER, {})
|
||||
return self._locations[self.location].get(ATTR_DOCKER, {})
|
||||
|
||||
@docker.setter
|
||||
def docker(self, value: dict[str, Any]) -> None:
|
||||
"""Set the Docker config data."""
|
||||
self._data[ATTR_DOCKER] = value
|
||||
self._locations[self.location][ATTR_DOCKER] = value
|
||||
|
||||
@property
|
||||
def location(self) -> str | None:
|
||||
@ -389,7 +389,7 @@ class Backup(JobGroup):
|
||||
|
||||
decrypt = self._aes.decryptor()
|
||||
padder = padding.PKCS7(128).unpadder()
|
||||
|
||||
_LOGGER.info("Decrypting data: %s", data)
|
||||
data = padder.update(decrypt.update(b64decode(data))) + padder.finalize()
|
||||
return data.decode()
|
||||
|
||||
@ -486,6 +486,8 @@ class Backup(JobGroup):
|
||||
|
||||
if self._data[ATTR_PROTECTED]:
|
||||
self._locations[self.location][ATTR_PROTECTED] = True
|
||||
if self._data[ATTR_DOCKER]:
|
||||
self._locations[self.location][ATTR_DOCKER] = self._data[ATTR_DOCKER]
|
||||
|
||||
return True
|
||||
|
||||
|
@ -12,6 +12,7 @@ from shutil import copy
|
||||
from ..addons.addon import Addon
|
||||
from ..const import (
|
||||
ATTR_DAYS_UNTIL_STALE,
|
||||
ATTR_DOCKER,
|
||||
ATTR_PATH,
|
||||
ATTR_PROTECTED,
|
||||
FILE_HASSIO_BACKUPS,
|
||||
@ -368,7 +369,11 @@ class BackupManager(FileConfiguration, JobGroup):
|
||||
|
||||
backup.all_locations.update(
|
||||
{
|
||||
loc: {ATTR_PATH: path, ATTR_PROTECTED: backup.protected}
|
||||
loc: {
|
||||
ATTR_PATH: path,
|
||||
ATTR_PROTECTED: backup.protected,
|
||||
ATTR_DOCKER: backup.docker,
|
||||
}
|
||||
for loc, path in all_new_locations.items()
|
||||
}
|
||||
)
|
||||
|
@ -486,7 +486,9 @@ async def test_restore_immediate_errors(
|
||||
|
||||
with (
|
||||
patch.object(
|
||||
Backup, "all_locations", new={None: {"path": None, "protected": True}}
|
||||
Backup,
|
||||
"all_locations",
|
||||
new={None: {"path": None, "protected": True, "docker": {}}},
|
||||
),
|
||||
patch.object(Backup, "validate_password", return_value=False),
|
||||
):
|
||||
@ -638,8 +640,12 @@ async def test_backup_to_multiple_locations(
|
||||
assert orig_backup.exists()
|
||||
assert copy_backup.exists()
|
||||
assert coresys.backups.get(slug).all_locations == {
|
||||
None: {"path": orig_backup, "protected": False},
|
||||
".cloud_backup": {"path": copy_backup, "protected": False},
|
||||
None: {"path": orig_backup, "protected": False, "docker": {"registries": {}}},
|
||||
".cloud_backup": {
|
||||
"path": copy_backup,
|
||||
"protected": False,
|
||||
"docker": {"registries": {}},
|
||||
},
|
||||
}
|
||||
assert coresys.backups.get(slug).location is None
|
||||
|
||||
@ -699,8 +705,16 @@ async def test_upload_to_multiple_locations(
|
||||
assert orig_backup.exists()
|
||||
assert copy_backup.exists()
|
||||
assert coresys.backups.get("7fed74c8").all_locations == {
|
||||
None: {"path": orig_backup, "protected": False},
|
||||
".cloud_backup": {"path": copy_backup, "protected": False},
|
||||
None: {
|
||||
"path": orig_backup,
|
||||
"protected": False,
|
||||
"docker": {"registries": {}},
|
||||
},
|
||||
".cloud_backup": {
|
||||
"path": copy_backup,
|
||||
"protected": False,
|
||||
"docker": {"registries": {}},
|
||||
},
|
||||
}
|
||||
assert coresys.backups.get("7fed74c8").location is None
|
||||
|
||||
@ -714,7 +728,7 @@ async def test_upload_duplicate_backup_new_location(
|
||||
orig_backup = Path(copy(backup_file, coresys.config.path_backup))
|
||||
await coresys.backups.reload()
|
||||
assert coresys.backups.get("7fed74c8").all_locations == {
|
||||
None: {"path": orig_backup, "protected": False}
|
||||
None: {"path": orig_backup, "protected": False, "docker": {"registries": {}}}
|
||||
}
|
||||
|
||||
with backup_file.open("rb") as file, MultipartWriter("form-data") as mp:
|
||||
@ -731,8 +745,12 @@ async def test_upload_duplicate_backup_new_location(
|
||||
assert orig_backup.exists()
|
||||
assert copy_backup.exists()
|
||||
assert coresys.backups.get("7fed74c8").all_locations == {
|
||||
None: {"path": orig_backup, "protected": False},
|
||||
".cloud_backup": {"path": copy_backup, "protected": False},
|
||||
None: {"path": orig_backup, "protected": False, "docker": {"registries": {}}},
|
||||
".cloud_backup": {
|
||||
"path": copy_backup,
|
||||
"protected": False,
|
||||
"docker": {"registries": {}},
|
||||
},
|
||||
}
|
||||
assert coresys.backups.get("7fed74c8").location is None
|
||||
|
||||
@ -769,7 +787,7 @@ async def test_upload_with_filename(
|
||||
orig_backup = coresys.config.path_backup / filename
|
||||
assert orig_backup.exists()
|
||||
assert coresys.backups.get("7fed74c8").all_locations == {
|
||||
None: {"path": orig_backup, "protected": False}
|
||||
None: {"path": orig_backup, "protected": False, "docker": {"registries": {}}}
|
||||
}
|
||||
assert coresys.backups.get("7fed74c8").location is None
|
||||
|
||||
@ -802,8 +820,12 @@ async def test_remove_backup_from_location(api_client: TestClient, coresys: Core
|
||||
await coresys.backups.reload()
|
||||
assert (backup := coresys.backups.get("7fed74c8"))
|
||||
assert backup.all_locations == {
|
||||
None: {"path": location_1, "protected": False},
|
||||
".cloud_backup": {"path": location_2, "protected": False},
|
||||
None: {"path": location_1, "protected": False, "docker": {"registries": {}}},
|
||||
".cloud_backup": {
|
||||
"path": location_2,
|
||||
"protected": False,
|
||||
"docker": {"registries": {}},
|
||||
},
|
||||
}
|
||||
|
||||
resp = await api_client.delete(
|
||||
@ -814,7 +836,9 @@ async def test_remove_backup_from_location(api_client: TestClient, coresys: Core
|
||||
assert location_1.exists()
|
||||
assert not location_2.exists()
|
||||
assert coresys.backups.get("7fed74c8")
|
||||
assert backup.all_locations == {None: {"path": location_1, "protected": False}}
|
||||
assert backup.all_locations == {
|
||||
None: {"path": location_1, "protected": False, "docker": {"registries": {}}}
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.usefixtures("tmp_supervisor_data")
|
||||
@ -973,11 +997,6 @@ async def test_restore_backup_unencrypted_after_encrypted(
|
||||
".cloud_backup": {"path": Path(unc_tar), "protected": False},
|
||||
}
|
||||
|
||||
# pylint: disable=fixme
|
||||
# TODO: There is a bug in the restore code that causes the restore to fail
|
||||
# if the backup contains a Docker registry configuration and one location
|
||||
# is encrypted and the other is not (just like our test fixture).
|
||||
# We punt the ball on this one for this PR since this is a rare edge case.
|
||||
backup.restore_dockerconfig = MagicMock()
|
||||
|
||||
coresys.core.state = CoreState.RUNNING
|
||||
|
@ -584,7 +584,6 @@ async def mock_full_backup(coresys: CoreSys, tmp_path) -> Backup:
|
||||
mock_backup = Backup(coresys, Path(tmp_path, "test_backup.tar"), "test", None)
|
||||
mock_backup.new("Test", utcnow().isoformat(), BackupType.FULL)
|
||||
mock_backup.repositories = ["https://github.com/awesome-developer/awesome-repo"]
|
||||
mock_backup.docker = {}
|
||||
mock_backup._data[ATTR_ADDONS] = [
|
||||
{
|
||||
ATTR_SLUG: "local_ssh",
|
||||
@ -609,7 +608,6 @@ async def mock_partial_backup(coresys: CoreSys, tmp_path) -> Backup:
|
||||
mock_backup = Backup(coresys, Path(tmp_path, "test_backup.tar"), "test", None)
|
||||
mock_backup.new("Test", utcnow().isoformat(), BackupType.PARTIAL)
|
||||
mock_backup.repositories = ["https://github.com/awesome-developer/awesome-repo"]
|
||||
mock_backup.docker = {}
|
||||
mock_backup._data[ATTR_ADDONS] = [
|
||||
{
|
||||
ATTR_SLUG: "local_ssh",
|
||||
|
Loading…
x
Reference in New Issue
Block a user