mirror of
https://github.com/home-assistant/core.git
synced 2025-04-25 17:57:55 +00:00
Improve error messages from translation script (#102098)
Co-authored-by: Robert Resch <robert@resch.dev>
This commit is contained in:
parent
bc45de627a
commit
164872e1af
@ -1,11 +1,10 @@
|
|||||||
"""Find translation keys that are in Lokalise but no longer defined in source."""
|
"""Find translation keys that are in Lokalise but no longer defined in source."""
|
||||||
import argparse
|
import argparse
|
||||||
import json
|
|
||||||
|
|
||||||
from .const import CORE_PROJECT_ID, FRONTEND_DIR, FRONTEND_PROJECT_ID, INTEGRATIONS_DIR
|
from .const import CORE_PROJECT_ID, FRONTEND_DIR, FRONTEND_PROJECT_ID, INTEGRATIONS_DIR
|
||||||
from .error import ExitApp
|
from .error import ExitApp
|
||||||
from .lokalise import get_api
|
from .lokalise import get_api
|
||||||
from .util import get_base_arg_parser
|
from .util import get_base_arg_parser, load_json_from_path
|
||||||
|
|
||||||
|
|
||||||
def get_arguments() -> argparse.Namespace:
|
def get_arguments() -> argparse.Namespace:
|
||||||
@ -46,9 +45,9 @@ def find_core():
|
|||||||
|
|
||||||
translations = int_dir / "translations" / "en.json"
|
translations = int_dir / "translations" / "en.json"
|
||||||
|
|
||||||
strings_json = json.loads(strings.read_text())
|
strings_json = load_json_from_path(strings)
|
||||||
if translations.is_file():
|
if translations.is_file():
|
||||||
translations_json = json.loads(translations.read_text())
|
translations_json = load_json_from_path(translations)
|
||||||
else:
|
else:
|
||||||
translations_json = {}
|
translations_json = {}
|
||||||
|
|
||||||
@ -69,8 +68,8 @@ def find_frontend():
|
|||||||
|
|
||||||
missing_keys = []
|
missing_keys = []
|
||||||
find_extra(
|
find_extra(
|
||||||
json.loads(source.read_text()),
|
load_json_from_path(source),
|
||||||
json.loads(translated.read_text()),
|
load_json_from_path(translated),
|
||||||
"",
|
"",
|
||||||
missing_keys,
|
missing_keys,
|
||||||
)
|
)
|
||||||
|
@ -9,7 +9,7 @@ from homeassistant.const import Platform
|
|||||||
|
|
||||||
from . import upload
|
from . import upload
|
||||||
from .develop import flatten_translations
|
from .develop import flatten_translations
|
||||||
from .util import get_base_arg_parser
|
from .util import get_base_arg_parser, load_json_from_path
|
||||||
|
|
||||||
|
|
||||||
def get_arguments() -> argparse.Namespace:
|
def get_arguments() -> argparse.Namespace:
|
||||||
@ -101,7 +101,7 @@ def run():
|
|||||||
|
|
||||||
for component in components:
|
for component in components:
|
||||||
comp_strings_path = Path(STRINGS_PATH.format(component))
|
comp_strings_path = Path(STRINGS_PATH.format(component))
|
||||||
strings[component] = json.loads(comp_strings_path.read_text(encoding="utf-8"))
|
strings[component] = load_json_from_path(comp_strings_path)
|
||||||
|
|
||||||
for path, value in update_keys.items():
|
for path, value in update_keys.items():
|
||||||
parts = path.split("::")
|
parts = path.split("::")
|
||||||
|
@ -10,7 +10,7 @@ import subprocess
|
|||||||
|
|
||||||
from .const import CLI_2_DOCKER_IMAGE, CORE_PROJECT_ID, INTEGRATIONS_DIR
|
from .const import CLI_2_DOCKER_IMAGE, CORE_PROJECT_ID, INTEGRATIONS_DIR
|
||||||
from .error import ExitApp
|
from .error import ExitApp
|
||||||
from .util import get_lokalise_token
|
from .util import get_lokalise_token, load_json_from_path
|
||||||
|
|
||||||
FILENAME_FORMAT = re.compile(r"strings\.(?P<suffix>\w+)\.json")
|
FILENAME_FORMAT = re.compile(r"strings\.(?P<suffix>\w+)\.json")
|
||||||
DOWNLOAD_DIR = pathlib.Path("build/translations-download").absolute()
|
DOWNLOAD_DIR = pathlib.Path("build/translations-download").absolute()
|
||||||
@ -122,7 +122,7 @@ def write_integration_translations():
|
|||||||
"""Write integration translations."""
|
"""Write integration translations."""
|
||||||
for lang_file in DOWNLOAD_DIR.glob("*.json"):
|
for lang_file in DOWNLOAD_DIR.glob("*.json"):
|
||||||
lang = lang_file.stem
|
lang = lang_file.stem
|
||||||
translations = json.loads(lang_file.read_text())
|
translations = load_json_from_path(lang_file)
|
||||||
save_language_translations(lang, translations)
|
save_language_translations(lang, translations)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
"""Errors for translations."""
|
"""Errors for translations."""
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
class ExitApp(Exception):
|
class ExitApp(Exception):
|
||||||
@ -8,3 +9,28 @@ class ExitApp(Exception):
|
|||||||
"""Initialize the exit app exception."""
|
"""Initialize the exit app exception."""
|
||||||
self.reason = reason
|
self.reason = reason
|
||||||
self.exit_code = exit_code
|
self.exit_code = exit_code
|
||||||
|
|
||||||
|
|
||||||
|
class JSONDecodeErrorWithPath(json.JSONDecodeError):
|
||||||
|
"""Subclass of JSONDecodeError with additional properties.
|
||||||
|
|
||||||
|
Additional properties:
|
||||||
|
path: Path to the JSON document being parsed
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, msg, doc, pos, path):
|
||||||
|
"""Initialize."""
|
||||||
|
lineno = doc.count("\n", 0, pos) + 1
|
||||||
|
colno = pos - doc.rfind("\n", 0, pos)
|
||||||
|
errmsg = f"{msg}: file: {path} line {lineno} column {colno} (char {pos})"
|
||||||
|
ValueError.__init__(self, errmsg)
|
||||||
|
self.msg = msg
|
||||||
|
self.doc = doc
|
||||||
|
self.pos = pos
|
||||||
|
self.lineno = lineno
|
||||||
|
self.colno = colno
|
||||||
|
self.path = path
|
||||||
|
|
||||||
|
def __reduce__(self):
|
||||||
|
"""Reduce."""
|
||||||
|
return self.__class__, (self.msg, self.doc, self.pos, self.path)
|
||||||
|
@ -4,7 +4,7 @@ import json
|
|||||||
|
|
||||||
from .const import FRONTEND_DIR
|
from .const import FRONTEND_DIR
|
||||||
from .download import DOWNLOAD_DIR, run_download_docker
|
from .download import DOWNLOAD_DIR, run_download_docker
|
||||||
from .util import get_base_arg_parser
|
from .util import get_base_arg_parser, load_json_from_path
|
||||||
|
|
||||||
FRONTEND_BACKEND_TRANSLATIONS = FRONTEND_DIR / "translations/backend"
|
FRONTEND_BACKEND_TRANSLATIONS = FRONTEND_DIR / "translations/backend"
|
||||||
|
|
||||||
@ -29,7 +29,7 @@ def run():
|
|||||||
run_download_docker()
|
run_download_docker()
|
||||||
|
|
||||||
for lang_file in DOWNLOAD_DIR.glob("*.json"):
|
for lang_file in DOWNLOAD_DIR.glob("*.json"):
|
||||||
translations = json.loads(lang_file.read_text())
|
translations = load_json_from_path(lang_file)
|
||||||
|
|
||||||
to_write_translations = {"component": {}}
|
to_write_translations = {"component": {}}
|
||||||
|
|
||||||
|
@ -6,6 +6,7 @@ import re
|
|||||||
|
|
||||||
from .const import CORE_PROJECT_ID, FRONTEND_PROJECT_ID, INTEGRATIONS_DIR
|
from .const import CORE_PROJECT_ID, FRONTEND_PROJECT_ID, INTEGRATIONS_DIR
|
||||||
from .lokalise import get_api
|
from .lokalise import get_api
|
||||||
|
from .util import load_json_from_path
|
||||||
|
|
||||||
FRONTEND_REPO = pathlib.Path("../frontend/")
|
FRONTEND_REPO = pathlib.Path("../frontend/")
|
||||||
|
|
||||||
@ -164,7 +165,7 @@ def find_and_rename_keys():
|
|||||||
if not strings_file.is_file():
|
if not strings_file.is_file():
|
||||||
continue
|
continue
|
||||||
|
|
||||||
strings = json.loads(strings_file.read_text())
|
strings = load_json_from_path(strings_file)
|
||||||
|
|
||||||
if "title" in strings.get("config", {}):
|
if "title" in strings.get("config", {}):
|
||||||
from_key = f"component::{integration.name}::config::title"
|
from_key = f"component::{integration.name}::config::title"
|
||||||
@ -194,12 +195,12 @@ def interactive_update():
|
|||||||
if not strings_file.is_file():
|
if not strings_file.is_file():
|
||||||
continue
|
continue
|
||||||
|
|
||||||
strings = json.loads(strings_file.read_text())
|
strings = load_json_from_path(strings_file)
|
||||||
|
|
||||||
if "title" not in strings:
|
if "title" not in strings:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
manifest = json.loads((integration / "manifest.json").read_text())
|
manifest = load_json_from_path(integration / "manifest.json")
|
||||||
|
|
||||||
print("Processing", manifest["name"])
|
print("Processing", manifest["name"])
|
||||||
print("Translation title", strings["title"])
|
print("Translation title", strings["title"])
|
||||||
@ -247,9 +248,8 @@ def find_frontend_states():
|
|||||||
Source key -> target key
|
Source key -> target key
|
||||||
Add key to integrations strings.json
|
Add key to integrations strings.json
|
||||||
"""
|
"""
|
||||||
frontend_states = json.loads(
|
path = FRONTEND_REPO / "src/translations/en.json"
|
||||||
(FRONTEND_REPO / "src/translations/en.json").read_text()
|
frontend_states = load_json_from_path(path)["state"]
|
||||||
)["state"]
|
|
||||||
|
|
||||||
# domain => state object
|
# domain => state object
|
||||||
to_write = {}
|
to_write = {}
|
||||||
@ -307,7 +307,7 @@ def find_frontend_states():
|
|||||||
for domain, state in to_write.items():
|
for domain, state in to_write.items():
|
||||||
strings = INTEGRATIONS_DIR / domain / "strings.json"
|
strings = INTEGRATIONS_DIR / domain / "strings.json"
|
||||||
if strings.is_file():
|
if strings.is_file():
|
||||||
content = json.loads(strings.read_text())
|
content = load_json_from_path(strings)
|
||||||
else:
|
else:
|
||||||
content = {}
|
content = {}
|
||||||
|
|
||||||
@ -326,7 +326,7 @@ def find_frontend_states():
|
|||||||
def apply_data_references(to_migrate):
|
def apply_data_references(to_migrate):
|
||||||
"""Apply references."""
|
"""Apply references."""
|
||||||
for strings_file in INTEGRATIONS_DIR.glob("*/strings.json"):
|
for strings_file in INTEGRATIONS_DIR.glob("*/strings.json"):
|
||||||
strings = json.loads(strings_file.read_text())
|
strings = load_json_from_path(strings_file)
|
||||||
steps = strings.get("config", {}).get("step")
|
steps = strings.get("config", {}).get("step")
|
||||||
|
|
||||||
if not steps:
|
if not steps:
|
||||||
|
@ -8,7 +8,7 @@ import subprocess
|
|||||||
|
|
||||||
from .const import CLI_2_DOCKER_IMAGE, CORE_PROJECT_ID, INTEGRATIONS_DIR
|
from .const import CLI_2_DOCKER_IMAGE, CORE_PROJECT_ID, INTEGRATIONS_DIR
|
||||||
from .error import ExitApp
|
from .error import ExitApp
|
||||||
from .util import get_current_branch, get_lokalise_token
|
from .util import get_current_branch, get_lokalise_token, load_json_from_path
|
||||||
|
|
||||||
FILENAME_FORMAT = re.compile(r"strings\.(?P<suffix>\w+)\.json")
|
FILENAME_FORMAT = re.compile(r"strings\.(?P<suffix>\w+)\.json")
|
||||||
LOCAL_FILE = pathlib.Path("build/translations-upload.json").absolute()
|
LOCAL_FILE = pathlib.Path("build/translations-upload.json").absolute()
|
||||||
@ -52,7 +52,7 @@ def run_upload_docker():
|
|||||||
|
|
||||||
def generate_upload_data():
|
def generate_upload_data():
|
||||||
"""Generate the data for uploading."""
|
"""Generate the data for uploading."""
|
||||||
translations = json.loads((INTEGRATIONS_DIR.parent / "strings.json").read_text())
|
translations = load_json_from_path(INTEGRATIONS_DIR.parent / "strings.json")
|
||||||
translations["component"] = {}
|
translations["component"] = {}
|
||||||
|
|
||||||
for path in INTEGRATIONS_DIR.glob(f"*{os.sep}strings*.json"):
|
for path in INTEGRATIONS_DIR.glob(f"*{os.sep}strings*.json"):
|
||||||
@ -66,7 +66,7 @@ def generate_upload_data():
|
|||||||
platforms = parent.setdefault("platform", {})
|
platforms = parent.setdefault("platform", {})
|
||||||
parent = platforms.setdefault(platform, {})
|
parent = platforms.setdefault(platform, {})
|
||||||
|
|
||||||
parent.update(json.loads(path.read_text()))
|
parent.update(load_json_from_path(path))
|
||||||
|
|
||||||
return translations
|
return translations
|
||||||
|
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
"""Translation utils."""
|
"""Translation utils."""
|
||||||
import argparse
|
import argparse
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import pathlib
|
import pathlib
|
||||||
import subprocess
|
import subprocess
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from .error import ExitApp
|
from .error import ExitApp, JSONDecodeErrorWithPath
|
||||||
|
|
||||||
|
|
||||||
def get_base_arg_parser() -> argparse.ArgumentParser:
|
def get_base_arg_parser() -> argparse.ArgumentParser:
|
||||||
@ -55,3 +57,11 @@ def get_current_branch():
|
|||||||
.stdout.decode()
|
.stdout.decode()
|
||||||
.strip()
|
.strip()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def load_json_from_path(path: pathlib.Path) -> Any:
|
||||||
|
"""Load JSON from path."""
|
||||||
|
try:
|
||||||
|
return json.loads(path.read_text())
|
||||||
|
except json.JSONDecodeError as err:
|
||||||
|
raise JSONDecodeErrorWithPath(err.msg, err.doc, err.pos, path) from err
|
||||||
|
Loading…
x
Reference in New Issue
Block a user