Don't pollute config dir with deleted duplicated statistics (#62489)

This commit is contained in:
Erik Montnemery 2021-12-21 14:27:35 +01:00 committed by GitHub
parent cbcd6d458e
commit cab2a74b5f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 5 additions and 2 deletions

View File

@ -9,6 +9,7 @@ from datetime import datetime, timedelta
from itertools import chain, groupby from itertools import chain, groupby
import json import json
import logging import logging
import os
import re import re
from statistics import mean from statistics import mean
from typing import TYPE_CHECKING, Any, Literal from typing import TYPE_CHECKING, Any, Literal
@ -365,8 +366,10 @@ def delete_duplicates(instance: Recorder, session: scoped_session) -> None:
if non_identical_duplicates: if non_identical_duplicates:
isotime = dt_util.utcnow().isoformat() isotime = dt_util.utcnow().isoformat()
backup_file_name = f"deleted_statistics.{isotime}.json" backup_file_name = f".deleted_statistics/deleted_statistics.{isotime}.json"
backup_path = instance.hass.config.path(backup_file_name) backup_path = instance.hass.config.path(backup_file_name)
os.makedirs(os.path.dirname(backup_path), exist_ok=True)
with open(backup_path, "w", encoding="utf8") as backup_file: with open(backup_path, "w", encoding="utf8") as backup_file:
json.dump( json.dump(
non_identical_duplicates, non_identical_duplicates,

View File

@ -993,7 +993,7 @@ def test_delete_duplicates_non_identical(caplog, tmpdir):
assert "Found duplicated" not in caplog.text assert "Found duplicated" not in caplog.text
isotime = dt_util.utcnow().isoformat() isotime = dt_util.utcnow().isoformat()
backup_file_name = f"deleted_statistics.{isotime}.json" backup_file_name = f".deleted_statistics/deleted_statistics.{isotime}.json"
with open(hass.config.path(backup_file_name)) as backup_file: with open(hass.config.path(backup_file_name)) as backup_file:
backup = json.load(backup_file) backup = json.load(backup_file)