diff --git a/homeassistant/components/recorder/statistics.py b/homeassistant/components/recorder/statistics.py index 5310c8ed9f3..49b1f890a74 100644 --- a/homeassistant/components/recorder/statistics.py +++ b/homeassistant/components/recorder/statistics.py @@ -9,6 +9,7 @@ from datetime import datetime, timedelta from itertools import chain, groupby import json import logging +import os import re from statistics import mean from typing import TYPE_CHECKING, Any, Literal @@ -365,8 +366,10 @@ def delete_duplicates(instance: Recorder, session: scoped_session) -> None: if non_identical_duplicates: isotime = dt_util.utcnow().isoformat() - backup_file_name = f"deleted_statistics.{isotime}.json" + backup_file_name = f".deleted_statistics/deleted_statistics.{isotime}.json" backup_path = instance.hass.config.path(backup_file_name) + + os.makedirs(os.path.dirname(backup_path), exist_ok=True) with open(backup_path, "w", encoding="utf8") as backup_file: json.dump( non_identical_duplicates, diff --git a/tests/components/recorder/test_statistics.py b/tests/components/recorder/test_statistics.py index bd10d1e9612..77055b172ce 100644 --- a/tests/components/recorder/test_statistics.py +++ b/tests/components/recorder/test_statistics.py @@ -993,7 +993,7 @@ def test_delete_duplicates_non_identical(caplog, tmpdir): assert "Found duplicated" not in caplog.text isotime = dt_util.utcnow().isoformat() - backup_file_name = f"deleted_statistics.{isotime}.json" + backup_file_name = f".deleted_statistics/deleted_statistics.{isotime}.json" with open(hass.config.path(backup_file_name)) as backup_file: backup = json.load(backup_file)