Create FileConfiguration baseclass (#2651)

This commit is contained in:
Joakim Sørensen 2021-03-01 12:26:43 +01:00 committed by GitHub
parent 7a542aeb38
commit bee55d08fb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 197 additions and 150 deletions

View File

@ -54,10 +54,10 @@ from ..exceptions import (
AddonConfigurationError, AddonConfigurationError,
AddonsError, AddonsError,
AddonsNotSupportedError, AddonsNotSupportedError,
ConfigurationFileError,
DockerError, DockerError,
DockerRequestError, DockerRequestError,
HostAppArmorError, HostAppArmorError,
JsonFileError,
) )
from ..hardware.data import Device from ..hardware.data import Device
from ..homeassistant.const import WSEvent, WSType from ..homeassistant.const import WSEvent, WSType
@ -511,7 +511,7 @@ class Addon(AddonModel):
self.slug, self.slug,
humanize_error(self.options, ex), humanize_error(self.options, ex),
) )
except JsonFileError: except ConfigurationFileError:
_LOGGER.error("Add-on %s can't write options", self.slug) _LOGGER.error("Add-on %s can't write options", self.slug)
else: else:
_LOGGER.debug("Add-on %s write options: %s", self.slug, options) _LOGGER.debug("Add-on %s write options: %s", self.slug, options)
@ -710,7 +710,7 @@ class Addon(AddonModel):
# Store local configs/state # Store local configs/state
try: try:
write_json_file(temp_path.joinpath("addon.json"), data) write_json_file(temp_path.joinpath("addon.json"), data)
except JsonFileError as err: except ConfigurationFileError as err:
_LOGGER.error("Can't save meta for %s", self.slug) _LOGGER.error("Can't save meta for %s", self.slug)
raise AddonsError() from err raise AddonsError() from err
@ -766,7 +766,7 @@ class Addon(AddonModel):
# Read snapshot data # Read snapshot data
try: try:
data = read_json_file(Path(temp, "addon.json")) data = read_json_file(Path(temp, "addon.json"))
except JsonFileError as err: except ConfigurationFileError as err:
raise AddonsError() from err raise AddonsError() from err
# Validate # Validate

View File

@ -6,16 +6,22 @@ from typing import TYPE_CHECKING, Dict
from awesomeversion import AwesomeVersion from awesomeversion import AwesomeVersion
from ..const import ATTR_ARGS, ATTR_BUILD_FROM, ATTR_SQUASH, META_ADDON from ..const import (
ATTR_ARGS,
ATTR_BUILD_FROM,
ATTR_SQUASH,
FILE_SUFFIX_CONFIGURATION,
META_ADDON,
)
from ..coresys import CoreSys, CoreSysAttributes from ..coresys import CoreSys, CoreSysAttributes
from ..utils.json import JsonConfig from ..utils.common import FileConfiguration, find_one_filetype
from .validate import SCHEMA_BUILD_CONFIG from .validate import SCHEMA_BUILD_CONFIG
if TYPE_CHECKING: if TYPE_CHECKING:
from . import AnyAddon from . import AnyAddon
class AddonBuild(JsonConfig, CoreSysAttributes): class AddonBuild(FileConfiguration, CoreSysAttributes):
"""Handle build options for add-ons.""" """Handle build options for add-ons."""
def __init__(self, coresys: CoreSys, addon: AnyAddon) -> None: def __init__(self, coresys: CoreSys, addon: AnyAddon) -> None:
@ -24,7 +30,10 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
self.addon = addon self.addon = addon
super().__init__( super().__init__(
Path(self.addon.path_location, "build.json"), SCHEMA_BUILD_CONFIG find_one_filetype(
self.addon.path_location, "build", FILE_SUFFIX_CONFIGURATION
),
SCHEMA_BUILD_CONFIG,
) )
def save_data(self): def save_data(self):

View File

@ -13,7 +13,7 @@ from ..const import (
) )
from ..coresys import CoreSys, CoreSysAttributes from ..coresys import CoreSys, CoreSysAttributes
from ..store.addon import AddonStore from ..store.addon import AddonStore
from ..utils.json import JsonConfig from ..utils.common import FileConfiguration
from .addon import Addon from .addon import Addon
from .validate import SCHEMA_ADDONS_FILE from .validate import SCHEMA_ADDONS_FILE
@ -22,7 +22,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
Config = Dict[str, Any] Config = Dict[str, Any]
class AddonsData(JsonConfig, CoreSysAttributes): class AddonsData(FileConfiguration, CoreSysAttributes):
"""Hold data for installed Add-ons inside Supervisor.""" """Hold data for installed Add-ons inside Supervisor."""
def __init__(self, coresys: CoreSys): def __init__(self, coresys: CoreSys):

View File

@ -5,7 +5,7 @@ import platform
from typing import List from typing import List
from .coresys import CoreSys, CoreSysAttributes from .coresys import CoreSys, CoreSysAttributes
from .exceptions import HassioArchNotFound, JsonFileError from .exceptions import ConfigurationFileError, HassioArchNotFound
from .utils.json import read_json_file from .utils.json import read_json_file
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -50,7 +50,7 @@ class CpuArch(CoreSysAttributes):
"""Load data and initialize default arch.""" """Load data and initialize default arch."""
try: try:
arch_data = read_json_file(ARCH_JSON) arch_data = read_json_file(ARCH_JSON)
except JsonFileError: except ConfigurationFileError:
_LOGGER.warning("Can't read arch json file from %s", ARCH_JSON) _LOGGER.warning("Can't read arch json file from %s", ARCH_JSON)
return return

View File

@ -8,13 +8,13 @@ from .addons.addon import Addon
from .const import ATTR_ADDON, ATTR_PASSWORD, ATTR_USERNAME, FILE_HASSIO_AUTH from .const import ATTR_ADDON, ATTR_PASSWORD, ATTR_USERNAME, FILE_HASSIO_AUTH
from .coresys import CoreSys, CoreSysAttributes from .coresys import CoreSys, CoreSysAttributes
from .exceptions import AuthError, AuthPasswordResetError, HomeAssistantAPIError from .exceptions import AuthError, AuthPasswordResetError, HomeAssistantAPIError
from .utils.json import JsonConfig from .utils.common import FileConfiguration
from .validate import SCHEMA_AUTH_CONFIG from .validate import SCHEMA_AUTH_CONFIG
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
class Auth(JsonConfig, CoreSysAttributes): class Auth(FileConfiguration, CoreSysAttributes):
"""Manage SSO for Add-ons with Home Assistant user.""" """Manage SSO for Add-ons with Home Assistant user."""
def __init__(self, coresys: CoreSys) -> None: def __init__(self, coresys: CoreSys) -> None:

View File

@ -22,8 +22,8 @@ from .const import (
SUPERVISOR_DATA, SUPERVISOR_DATA,
LogLevel, LogLevel,
) )
from .utils.common import FileConfiguration
from .utils.dt import parse_datetime from .utils.dt import parse_datetime
from .utils.json import JsonConfig
from .validate import SCHEMA_SUPERVISOR_CONFIG from .validate import SCHEMA_SUPERVISOR_CONFIG
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
@ -48,7 +48,7 @@ MEDIA_DATA = PurePath("media")
DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat() DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat()
class CoreConfig(JsonConfig): class CoreConfig(FileConfiguration):
"""Hold all core config data.""" """Hold all core config data."""
def __init__(self): def __init__(self):

View File

@ -13,7 +13,7 @@ from voluptuous.humanize import humanize_error
from ..const import ATTR_CONFIG, ATTR_DISCOVERY, FILE_HASSIO_DISCOVERY from ..const import ATTR_CONFIG, ATTR_DISCOVERY, FILE_HASSIO_DISCOVERY
from ..coresys import CoreSys, CoreSysAttributes from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import DiscoveryError, HomeAssistantAPIError from ..exceptions import DiscoveryError, HomeAssistantAPIError
from ..utils.json import JsonConfig from ..utils.common import FileConfiguration
from .validate import SCHEMA_DISCOVERY_CONFIG, valid_discovery_config from .validate import SCHEMA_DISCOVERY_CONFIG, valid_discovery_config
if TYPE_CHECKING: if TYPE_CHECKING:
@ -35,7 +35,7 @@ class Message:
uuid: UUID = attr.ib(factory=lambda: uuid4().hex, eq=False) uuid: UUID = attr.ib(factory=lambda: uuid4().hex, eq=False)
class Discovery(CoreSysAttributes, JsonConfig): class Discovery(CoreSysAttributes, FileConfiguration):
"""Home Assistant Discovery handler.""" """Home Assistant Discovery handler."""
def __init__(self, coresys: CoreSys): def __init__(self, coresys: CoreSys):

View File

@ -20,7 +20,7 @@ from ..const import (
SOCKET_DOCKER, SOCKET_DOCKER,
) )
from ..exceptions import DockerAPIError, DockerError, DockerNotFound, DockerRequestError from ..exceptions import DockerAPIError, DockerError, DockerNotFound, DockerRequestError
from ..utils.json import JsonConfig from ..utils.common import FileConfiguration
from ..validate import SCHEMA_DOCKER_CONFIG from ..validate import SCHEMA_DOCKER_CONFIG
from .network import DockerNetwork from .network import DockerNetwork
@ -66,7 +66,7 @@ class DockerInfo:
return bool(os.environ.get(ENV_SUPERVISOR_CPU_RT, 0)) return bool(os.environ.get(ENV_SUPERVISOR_CPU_RT, 0))
class DockerConfig(JsonConfig): class DockerConfig(FileConfiguration):
"""Home Assistant core object for Docker configuration.""" """Home Assistant core object for Docker configuration."""
def __init__(self): def __init__(self):

View File

@ -289,6 +289,13 @@ class YamlFileError(HassioError):
"""Invalid YAML file.""" """Invalid YAML file."""
# util/common
class ConfigurationFileError(JsonFileError, YamlFileError):
"""Invalid JSON or YAML file."""
# util/pwned # util/pwned

View File

@ -25,7 +25,7 @@ from ..const import (
FILE_HASSIO_HOMEASSISTANT, FILE_HASSIO_HOMEASSISTANT,
) )
from ..coresys import CoreSys, CoreSysAttributes from ..coresys import CoreSys, CoreSysAttributes
from ..utils.json import JsonConfig from ..utils.common import FileConfiguration
from ..validate import SCHEMA_HASS_CONFIG from ..validate import SCHEMA_HASS_CONFIG
from .api import HomeAssistantAPI from .api import HomeAssistantAPI
from .core import HomeAssistantCore from .core import HomeAssistantCore
@ -35,7 +35,7 @@ from .websocket import HomeAssistantWebSocket
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
class HomeAssistant(JsonConfig, CoreSysAttributes): class HomeAssistant(FileConfiguration, CoreSysAttributes):
"""Home Assistant core object for handle it.""" """Home Assistant core object for handle it."""
def __init__(self, coresys: CoreSys): def __init__(self, coresys: CoreSys):

View File

@ -9,14 +9,14 @@ from .addons.addon import Addon
from .const import ATTR_PORTS, ATTR_SESSION, FILE_HASSIO_INGRESS from .const import ATTR_PORTS, ATTR_SESSION, FILE_HASSIO_INGRESS
from .coresys import CoreSys, CoreSysAttributes from .coresys import CoreSys, CoreSysAttributes
from .utils import check_port from .utils import check_port
from .utils.common import FileConfiguration
from .utils.dt import utc_from_timestamp, utcnow from .utils.dt import utc_from_timestamp, utcnow
from .utils.json import JsonConfig
from .validate import SCHEMA_INGRESS_CONFIG from .validate import SCHEMA_INGRESS_CONFIG
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
class Ingress(JsonConfig, CoreSysAttributes): class Ingress(FileConfiguration, CoreSysAttributes):
"""Fetch last versions from version.json.""" """Fetch last versions from version.json."""
def __init__(self, coresys: CoreSys): def __init__(self, coresys: CoreSys):

View File

@ -3,7 +3,7 @@ import logging
from typing import Dict, List, Optional from typing import Dict, List, Optional
from ..coresys import CoreSys, CoreSysAttributes from ..coresys import CoreSys, CoreSysAttributes
from ..utils.json import JsonConfig from ..utils.common import FileConfiguration
from .const import ATTR_IGNORE_CONDITIONS, FILE_CONFIG_JOBS, JobCondition from .const import ATTR_IGNORE_CONDITIONS, FILE_CONFIG_JOBS, JobCondition
from .validate import SCHEMA_JOBS_CONFIG from .validate import SCHEMA_JOBS_CONFIG
@ -49,7 +49,7 @@ class SupervisorJob(CoreSysAttributes):
) )
class JobManager(JsonConfig, CoreSysAttributes): class JobManager(FileConfiguration, CoreSysAttributes):
"""Job class.""" """Job class."""
def __init__(self, coresys: CoreSys): def __init__(self, coresys: CoreSys):

View File

@ -6,10 +6,10 @@ from awesomeversion import AwesomeVersion, AwesomeVersionException
from ..const import ATTR_IMAGE, ATTR_VERSION from ..const import ATTR_IMAGE, ATTR_VERSION
from ..coresys import CoreSysAttributes from ..coresys import CoreSysAttributes
from ..utils.json import JsonConfig from ..utils.common import FileConfiguration
class PluginBase(ABC, JsonConfig, CoreSysAttributes): class PluginBase(ABC, FileConfiguration, CoreSysAttributes):
"""Base class for plugins.""" """Base class for plugins."""
slug: str = "" slug: str = ""

View File

@ -18,7 +18,12 @@ from ..const import ATTR_SERVERS, DNS_SUFFIX, LogLevel
from ..coresys import CoreSys from ..coresys import CoreSys
from ..docker.dns import DockerDNS from ..docker.dns import DockerDNS
from ..docker.stats import DockerStats from ..docker.stats import DockerStats
from ..exceptions import CoreDNSError, CoreDNSUpdateError, DockerError, JsonFileError from ..exceptions import (
ConfigurationFileError,
CoreDNSError,
CoreDNSUpdateError,
DockerError,
)
from ..resolution.const import ContextType, IssueType, SuggestionType from ..resolution.const import ContextType, IssueType, SuggestionType
from ..utils.json import write_json_file from ..utils.json import write_json_file
from ..validate import dns_url from ..validate import dns_url
@ -286,7 +291,7 @@ class PluginDns(PluginBase):
"debug": debug, "debug": debug,
}, },
) )
except JsonFileError as err: except ConfigurationFileError as err:
_LOGGER.error("Can't update coredns config: %s", err) _LOGGER.error("Can't update coredns config: %s", err)
raise CoreDNSError() from err raise CoreDNSError() from err

View File

@ -2,12 +2,12 @@
from typing import Any, Dict from typing import Any, Dict
from ..const import FILE_HASSIO_SERVICES from ..const import FILE_HASSIO_SERVICES
from ..utils.json import JsonConfig from ..utils.common import FileConfiguration
from .const import SERVICE_MQTT, SERVICE_MYSQL from .const import SERVICE_MQTT, SERVICE_MYSQL
from .validate import SCHEMA_SERVICES_CONFIG from .validate import SCHEMA_SERVICES_CONFIG
class ServicesData(JsonConfig): class ServicesData(FileConfiguration):
"""Class to handle services data.""" """Class to handle services data."""
def __init__(self): def __init__(self):

View File

@ -17,9 +17,9 @@ from ..const import (
REPOSITORY_LOCAL, REPOSITORY_LOCAL,
) )
from ..coresys import CoreSys, CoreSysAttributes from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import JsonFileError, YamlFileError from ..exceptions import ConfigurationFileError
from ..resolution.const import ContextType, IssueType, SuggestionType from ..resolution.const import ContextType, IssueType, SuggestionType
from ..utils import find_one_filetype, read_json_or_yaml_file from ..utils.common import find_one_filetype, read_json_or_yaml_file
from ..utils.json import read_json_file from ..utils.json import read_json_file
from .const import StoreType from .const import StoreType
from .utils import extract_hash_from_path from .utils import extract_hash_from_path
@ -69,7 +69,7 @@ class StoreData(CoreSysAttributes):
repository_info = SCHEMA_REPOSITORY_CONFIG( repository_info = SCHEMA_REPOSITORY_CONFIG(
read_json_or_yaml_file(repository_file) read_json_or_yaml_file(repository_file)
) )
except (JsonFileError, YamlFileError): except ConfigurationFileError:
_LOGGER.warning( _LOGGER.warning(
"Can't read repository information from %s", repository_file "Can't read repository information from %s", repository_file
) )
@ -111,7 +111,7 @@ class StoreData(CoreSysAttributes):
for addon in addon_list: for addon in addon_list:
try: try:
addon_config = read_json_or_yaml_file(addon) addon_config = read_json_or_yaml_file(addon)
except JsonFileError: except ConfigurationFileError:
_LOGGER.warning("Can't read %s from repository %s", addon, repository) _LOGGER.warning("Can't read %s from repository %s", addon, repository)
continue continue
@ -138,7 +138,7 @@ class StoreData(CoreSysAttributes):
try: try:
builtin_file = Path(__file__).parent.joinpath("built-in.json") builtin_file = Path(__file__).parent.joinpath("built-in.json")
builtin_data = read_json_file(builtin_file) builtin_data = read_json_file(builtin_file)
except JsonFileError: except ConfigurationFileError:
_LOGGER.warning("Can't read built-in json") _LOGGER.warning("Can't read built-in json")
return return
@ -168,7 +168,7 @@ class StoreData(CoreSysAttributes):
read_json_or_yaml_file(translation) read_json_or_yaml_file(translation)
) )
except (JsonFileError, YamlFileError, vol.Invalid): except (ConfigurationFileError, vol.Invalid):
_LOGGER.warning("Can't read translations from %s", translation) _LOGGER.warning("Can't read translations from %s", translation)
continue continue

View File

@ -7,8 +7,8 @@ import voluptuous as vol
from ..const import ATTR_MAINTAINER, ATTR_NAME, ATTR_URL, FILE_SUFFIX_CONFIGURATION from ..const import ATTR_MAINTAINER, ATTR_NAME, ATTR_URL, FILE_SUFFIX_CONFIGURATION
from ..coresys import CoreSys, CoreSysAttributes from ..coresys import CoreSys, CoreSysAttributes
from ..exceptions import JsonFileError, StoreError, YamlFileError from ..exceptions import ConfigurationFileError, StoreError
from ..utils import read_json_or_yaml_file from ..utils.common import read_json_or_yaml_file
from .const import StoreType from .const import StoreType
from .git import GitRepoCustom, GitRepoHassIO from .git import GitRepoCustom, GitRepoHassIO
from .utils import get_hash_from_repository from .utils import get_hash_from_repository
@ -91,7 +91,7 @@ class Repository(CoreSysAttributes):
# If valid? # If valid?
try: try:
SCHEMA_REPOSITORY_CONFIG(read_json_or_yaml_file(repository_file)) SCHEMA_REPOSITORY_CONFIG(read_json_or_yaml_file(repository_file))
except (JsonFileError, YamlFileError, vol.Invalid): except (ConfigurationFileError, vol.Invalid):
return False return False
return True return True

View File

@ -30,13 +30,13 @@ from .const import (
from .coresys import CoreSysAttributes from .coresys import CoreSysAttributes
from .exceptions import UpdaterError, UpdaterJobError from .exceptions import UpdaterError, UpdaterJobError
from .jobs.decorator import Job, JobCondition from .jobs.decorator import Job, JobCondition
from .utils.json import JsonConfig from .utils.common import FileConfiguration
from .validate import SCHEMA_UPDATER_CONFIG from .validate import SCHEMA_UPDATER_CONFIG
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
class Updater(JsonConfig, CoreSysAttributes): class Updater(FileConfiguration, CoreSysAttributes):
"""Fetch last versions from version.json.""" """Fetch last versions from version.json."""
def __init__(self, coresys): def __init__(self, coresys):

View File

@ -5,38 +5,13 @@ import logging
from pathlib import Path from pathlib import Path
import re import re
import socket import socket
from typing import Any, List, Optional from typing import Any
from ..exceptions import HassioError
from .json import read_json_file
from .yaml import read_yaml_file
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
RE_STRING: re.Pattern = re.compile(r"\x1b(\[.*?[@-~]|\].*?(\x07|\x1b\\))") RE_STRING: re.Pattern = re.compile(r"\x1b(\[.*?[@-~]|\].*?(\x07|\x1b\\))")
def find_one_filetype(
path: Path, filename: str, filetypes: List[str]
) -> Optional[Path]:
"""Find first file matching filetypes."""
for file in path.glob(f"**/{filename}.*"):
if file.suffix in filetypes:
return file
return None
def read_json_or_yaml_file(path: Path) -> dict:
"""Read JSON or YAML file."""
if path.suffix == ".json":
return read_json_file(path)
if path.suffix in [".yaml", ".yml"]:
return read_yaml_file(path)
raise HassioError(f"{path} is not JSON or YAML")
def convert_to_ascii(raw: bytes) -> str: def convert_to_ascii(raw: bytes) -> str:
"""Convert binary to ascii and remove colors.""" """Convert binary to ascii and remove colors."""
return RE_STRING.sub("", raw.decode()) return RE_STRING.sub("", raw.decode())

107
supervisor/utils/common.py Normal file
View File

@ -0,0 +1,107 @@
"""Common utils."""
import logging
from pathlib import Path
from typing import Any, Dict, List, Optional
import voluptuous as vol
from voluptuous.humanize import humanize_error
from ..exceptions import ConfigurationFileError, HassioError
from .json import read_json_file, write_json_file
from .yaml import read_yaml_file, write_yaml_file
_LOGGER: logging.Logger = logging.getLogger(__name__)
_DEFAULT: Dict[str, Any] = {}
def find_one_filetype(
path: Path, filename: str, filetypes: List[str]
) -> Optional[Path]:
"""Find first file matching filetypes."""
for file in path.glob(f"**/{filename}.*"):
if file.suffix in filetypes:
return file
return None
def read_json_or_yaml_file(path: Path) -> dict:
"""Read JSON or YAML file."""
if path.suffix == ".json":
return read_json_file(path)
if path.suffix in [".yaml", ".yml"]:
return read_yaml_file(path)
raise HassioError(f"{path} is not JSON or YAML")
def write_json_or_yaml_file(path: Path, data: dict) -> None:
"""Write JSON or YAML file."""
if path.suffix == ".json":
return write_json_file(path, data)
if path.suffix in [".yaml", ".yml"]:
return write_yaml_file(path, data)
raise HassioError(f"{path} is not JSON or YAML")
class FileConfiguration:
"""Baseclass for classes that uses configuration files, the files can be JSON/YAML."""
def __init__(self, file_path: Path, schema: vol.Schema):
"""Initialize hass object."""
self._file: Path = file_path
self._schema: vol.Schema = schema
self._data: Dict[str, Any] = _DEFAULT
self.read_data()
def reset_data(self) -> None:
"""Reset configuration to default."""
try:
self._data = self._schema(_DEFAULT)
except vol.Invalid as ex:
_LOGGER.error(
"Can't reset %s: %s", self._file, humanize_error(self._data, ex)
)
def read_data(self) -> None:
"""Read configuration file."""
if self._file.is_file():
try:
self._data = read_json_or_yaml_file(self._file)
except ConfigurationFileError:
self._data = _DEFAULT
# Validate
try:
self._data = self._schema(self._data)
except vol.Invalid as ex:
_LOGGER.critical(
"Can't parse %s: %s", self._file, humanize_error(self._data, ex)
)
# Reset data to default
_LOGGER.warning("Resetting %s to default", self._file)
self._data = self._schema(_DEFAULT)
def save_data(self) -> None:
"""Store data to configuration file."""
# Validate
try:
self._data = self._schema(self._data)
except vol.Invalid as ex:
_LOGGER.critical("Can't parse data: %s", humanize_error(self._data, ex))
# Load last valid data
_LOGGER.warning("Resetting %s to last version", self._file)
self._data = _DEFAULT
self.read_data()
else:
# write
try:
write_json_or_yaml_file(self._file, self._data)
except ConfigurationFileError:
pass

View File

@ -3,18 +3,14 @@ from datetime import datetime
import json import json
import logging import logging
from pathlib import Path from pathlib import Path
from typing import Any, Dict from typing import Any
from atomicwrites import atomic_write from atomicwrites import atomic_write
import voluptuous as vol
from voluptuous.humanize import humanize_error
from ..exceptions import JsonFileError from ..exceptions import JsonFileError
_LOGGER: logging.Logger = logging.getLogger(__name__) _LOGGER: logging.Logger = logging.getLogger(__name__)
_DEFAULT: Dict[str, Any] = {}
class JSONEncoder(json.JSONEncoder): class JSONEncoder(json.JSONEncoder):
"""JSONEncoder that supports Supervisor objects.""" """JSONEncoder that supports Supervisor objects."""
@ -54,63 +50,3 @@ def read_json_file(jsonfile: Path) -> Any:
except (OSError, ValueError, TypeError, UnicodeDecodeError) as err: except (OSError, ValueError, TypeError, UnicodeDecodeError) as err:
_LOGGER.error("Can't read json from %s: %s", jsonfile, err) _LOGGER.error("Can't read json from %s: %s", jsonfile, err)
raise JsonFileError() from err raise JsonFileError() from err
class JsonConfig:
"""Hass core object for handle it."""
def __init__(self, json_file: Path, schema: vol.Schema):
"""Initialize hass object."""
self._file: Path = json_file
self._schema: vol.Schema = schema
self._data: Dict[str, Any] = _DEFAULT
self.read_data()
def reset_data(self) -> None:
"""Reset JSON file to default."""
try:
self._data = self._schema({})
except vol.Invalid as ex:
_LOGGER.error(
"Can't reset %s: %s", self._file, humanize_error(self._data, ex)
)
def read_data(self) -> None:
"""Read JSON file & validate."""
if self._file.is_file():
try:
self._data = read_json_file(self._file)
except JsonFileError:
self._data = {}
# Validate
try:
self._data = self._schema(self._data)
except vol.Invalid as ex:
_LOGGER.critical(
"Can't parse %s: %s", self._file, humanize_error(self._data, ex)
)
# Reset data to default
_LOGGER.warning("Resetting %s to default", self._file)
self._data = self._schema(_DEFAULT)
def save_data(self) -> None:
"""Store data to configuration file."""
# Validate
try:
self._data = self._schema(self._data)
except vol.Invalid as ex:
_LOGGER.critical("Can't parse data: %s", humanize_error(self._data, ex))
# Load last valid data
_LOGGER.warning("Resetting %s to last version", self._file)
self._data = _DEFAULT
self.read_data()
else:
# write
try:
write_json_file(self._file, self._data)
except JsonFileError:
pass

View File

@ -2,6 +2,7 @@
import logging import logging
from pathlib import Path from pathlib import Path
from atomicwrites import atomic_write
from ruamel.yaml import YAML, YAMLError from ruamel.yaml import YAML, YAMLError
from ..exceptions import YamlFileError from ..exceptions import YamlFileError
@ -20,3 +21,14 @@ def read_yaml_file(path: Path) -> dict:
except (YAMLError, AttributeError) as err: except (YAMLError, AttributeError) as err:
_LOGGER.error("Can't read YAML file %s - %s", path, err) _LOGGER.error("Can't read YAML file %s - %s", path, err)
raise YamlFileError() from err raise YamlFileError() from err
def write_yaml_file(path: Path, data: dict) -> None:
"""Write a YAML file."""
try:
with atomic_write(path, overwrite=True) as fp:
_YAML.dump(data, fp)
path.chmod(0o600)
except (YAMLError, OSError, ValueError, TypeError) as err:
_LOGGER.error("Can't write %s: %s", path, err)
raise YamlFileError() from err

View File

@ -1,40 +1,37 @@
"""test yaml.""" """test yaml."""
import json
from ruamel.yaml import YAML as _YAML
from supervisor.const import FILE_SUFFIX_CONFIGURATION from supervisor.const import FILE_SUFFIX_CONFIGURATION
from supervisor.utils import find_one_filetype, read_json_or_yaml_file, yaml from supervisor.utils.common import find_one_filetype, read_json_or_yaml_file
from supervisor.utils.json import write_json_file
YAML = _YAML() from supervisor.utils.yaml import read_yaml_file, write_yaml_file
def test_reading_yaml(tmp_path): def test_reading_yaml(tmp_path):
"""Test reading YAML file.""" """Test reading YAML file."""
tempfile = tmp_path / "test.yaml" tempfile = tmp_path / "test.yaml"
YAML.dump({"test": "test"}, tempfile) write_yaml_file(tempfile, {"test": "test"})
yaml.read_yaml_file(tempfile) read = read_yaml_file(tempfile)
assert read["test"] == "test"
def test_get_file_from_type(tmp_path): def test_get_file_from_type(tmp_path):
"""Test get file from type.""" """Test get file from type."""
tempfile = tmp_path / "test1.yaml" tempfile = tmp_path / "test1.yaml"
YAML.dump({"test": "test"}, tempfile) write_yaml_file(tempfile, {"test": "test"})
found = find_one_filetype(tmp_path, "test1", FILE_SUFFIX_CONFIGURATION) found = find_one_filetype(tmp_path, "test1", FILE_SUFFIX_CONFIGURATION)
assert found.parts[-1] == "test1.yaml" assert found.parts[-1] == "test1.yaml"
tempfile = tmp_path / "test2.yml" tempfile = tmp_path / "test2.yml"
YAML.dump({"test": "test"}, tempfile) write_yaml_file(tempfile, {"test": "test"})
found = find_one_filetype(tmp_path, "test2", FILE_SUFFIX_CONFIGURATION) found = find_one_filetype(tmp_path, "test2", FILE_SUFFIX_CONFIGURATION)
assert found.parts[-1] == "test2.yml" assert found.parts[-1] == "test2.yml"
tempfile = tmp_path / "test3.json" tempfile = tmp_path / "test3.json"
YAML.dump({"test": "test"}, tempfile) write_yaml_file(tempfile, {"test": "test"})
found = find_one_filetype(tmp_path, "test3", FILE_SUFFIX_CONFIGURATION) found = find_one_filetype(tmp_path, "test3", FILE_SUFFIX_CONFIGURATION)
assert found.parts[-1] == "test3.json" assert found.parts[-1] == "test3.json"
tempfile = tmp_path / "test.config" tempfile = tmp_path / "test.config"
YAML.dump({"test": "test"}, tempfile) write_yaml_file(tempfile, {"test": "test"})
found = find_one_filetype(tmp_path, "test4", FILE_SUFFIX_CONFIGURATION) found = find_one_filetype(tmp_path, "test4", FILE_SUFFIX_CONFIGURATION)
assert not found assert not found
@ -42,12 +39,11 @@ def test_get_file_from_type(tmp_path):
def test_read_json_or_yaml_file(tmp_path): def test_read_json_or_yaml_file(tmp_path):
"""Read JSON or YAML file.""" """Read JSON or YAML file."""
tempfile = tmp_path / "test.json" tempfile = tmp_path / "test.json"
with open(tempfile, "w") as outfile: write_json_file(tempfile, {"test": "test"})
json.dump({"test": "test"}, outfile)
read = read_json_or_yaml_file(tempfile) read = read_json_or_yaml_file(tempfile)
assert read["test"] == "test" assert read["test"] == "test"
tempfile = tmp_path / "test.yaml" tempfile = tmp_path / "test.yaml"
YAML.dump({"test": "test"}, tempfile) write_yaml_file(tempfile, {"test": "test"})
read = read_json_or_yaml_file(tempfile) read = read_json_or_yaml_file(tempfile)
assert read["test"] == "test" assert read["test"] == "test"