diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4bc8a956f1c..5ec14e9ab6a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -88,4 +88,4 @@ repos: pass_filenames: false language: script types: [json] - files: ^homeassistant/.+/manifest\.json$ + files: ^homeassistant/.+/(manifest|strings)\.json$ diff --git a/homeassistant/components/deconz/strings.json b/homeassistant/components/deconz/strings.json index 2293be4b247..ab2d21195fb 100644 --- a/homeassistant/components/deconz/strings.json +++ b/homeassistant/components/deconz/strings.json @@ -84,7 +84,7 @@ "close": "Close", "both_buttons": "Both buttons", "top_buttons": "Top buttons", - "bottom_buttons" : "Bottom buttons", + "bottom_buttons": "Bottom buttons", "button_1": "First button", "button_2": "Second button", "button_3": "Third button", diff --git a/homeassistant/components/unifi/strings.json b/homeassistant/components/unifi/strings.json index cb7767eba12..50f29de9104 100644 --- a/homeassistant/components/unifi/strings.json +++ b/homeassistant/components/unifi/strings.json @@ -57,8 +57,5 @@ "title": "UniFi options 3/3" } } - }, - "error": { - "unknown_client_mac": "No client available in UniFi on that MAC address" } } diff --git a/script/hassfest/__main__.py b/script/hassfest/__main__.py index a1541ef68c9..7c86a1ca6c4 100644 --- a/script/hassfest/__main__.py +++ b/script/hassfest/__main__.py @@ -8,16 +8,15 @@ from . import ( config_flow, coverage, dependencies, - json, manifest, services, ssdp, + translations, zeroconf, ) from .model import Config, Integration PLUGINS = [ - json, codeowners, config_flow, coverage, @@ -25,6 +24,7 @@ PLUGINS = [ manifest, services, ssdp, + translations, zeroconf, ] diff --git a/script/hassfest/json.py b/script/hassfest/json.py deleted file mode 100644 index 73b6c372b4f..00000000000 --- a/script/hassfest/json.py +++ /dev/null @@ -1,29 +0,0 @@ -"""Validate integration JSON files.""" -import json -from typing import Dict - -from .model import Integration - - -def validate_json_files(integration: Integration): - """Validate JSON files for integration.""" - for json_file in integration.path.glob("**/*.json"): - if not json_file.is_file(): - continue - - try: - json.loads(json_file.read_text()) - except json.JSONDecodeError: - relative_path = json_file.relative_to(integration.path) - integration.add_error("json", f"Invalid JSON file {relative_path}") - - return - - -def validate(integrations: Dict[str, Integration], config): - """Handle JSON files inside integrations.""" - for integration in integrations.values(): - if not integration.manifest: - continue - - validate_json_files(integration) diff --git a/script/hassfest/translations.py b/script/hassfest/translations.py new file mode 100644 index 00000000000..f55e793ad92 --- /dev/null +++ b/script/hassfest/translations.py @@ -0,0 +1,90 @@ +"""Validate integration translation files.""" +import json +from typing import Dict + +import voluptuous as vol +from voluptuous.humanize import humanize_error + +from .model import Integration + + +def data_entry_schema(*, require_title: bool, require_step_title: bool): + """Generate a data entry schema.""" + step_title_class = vol.Required if require_step_title else vol.Optional + data_entry_schema = { + vol.Optional("flow_title"): str, + vol.Required("step"): { + str: { + step_title_class("title"): str, + vol.Optional("description"): str, + vol.Optional("data"): {str: str}, + } + }, + vol.Optional("error"): {str: str}, + vol.Optional("abort"): {str: str}, + vol.Optional("create_entry"): {str: str}, + } + if require_title: + data_entry_schema[vol.Required("title")] = str + + return data_entry_schema + + +STRINGS_SCHEMA = vol.Schema( + { + vol.Optional("title"): str, + vol.Optional("config"): data_entry_schema( + require_title=False, require_step_title=True + ), + vol.Optional("options"): data_entry_schema( + require_title=False, require_step_title=False + ), + vol.Optional("device_automation"): { + vol.Optional("action_type"): {str: str}, + vol.Optional("condition_type"): {str: str}, + vol.Optional("trigger_type"): {str: str}, + vol.Optional("trigger_subtype"): {str: str}, + }, + vol.Optional("state"): {str: str}, + } +) + +AUTH_SCHEMA = vol.Schema( + { + vol.Optional("mfa_setup"): { + str: data_entry_schema(require_title=True, require_step_title=True) + } + } +) + +ONBOARDING_SCHEMA = vol.Schema({vol.Required("area"): {str: str}}) + + +def validate_translation_file(integration: Integration): + """Validate translation files for integration.""" + strings_file = integration.path / "strings.json" + + if not strings_file.is_file(): + return + + strings = json.loads(strings_file.read_text()) + + if integration.domain == "auth": + schema = AUTH_SCHEMA + elif integration.domain == "onboarding": + schema = ONBOARDING_SCHEMA + else: + schema = STRINGS_SCHEMA + + try: + schema(strings) + except vol.Invalid as err: + integration.add_error( + "translations", f"Invalid strings.json: {humanize_error(strings, err)}" + ) + + +def validate(integrations: Dict[str, Integration], config): + """Handle JSON files inside integrations.""" + for integration in integrations.values(): + validate_translation_file(integration) diff --git a/script/translations/__main__.py b/script/translations/__main__.py index 3b64b4168c2..52a39038107 100644 --- a/script/translations/__main__.py +++ b/script/translations/__main__.py @@ -21,9 +21,7 @@ def main(): args = get_arguments() module = importlib.import_module(f".{args.action}", "script.translations") - module.run() - - return 0 + return module.run() if __name__ == "__main__": diff --git a/script/translations/clean.py b/script/translations/clean.py index 348b8ca6c4d..57e23bee9df 100644 --- a/script/translations/clean.py +++ b/script/translations/clean.py @@ -68,3 +68,5 @@ def run(): print("Deleting keys:", ", ".join(map(str, to_delete))) print(lokalise.keys_delete_multiple(to_delete)) + + return 0 diff --git a/script/translations/develop.py b/script/translations/develop.py index 27f3a884335..8886debd555 100644 --- a/script/translations/develop.py +++ b/script/translations/develop.py @@ -61,3 +61,5 @@ def run(): ) download.write_integration_translations() + + return 0 diff --git a/script/translations/download.py b/script/translations/download.py index 2191a8195a8..e6e4415f16d 100755 --- a/script/translations/download.py +++ b/script/translations/download.py @@ -149,3 +149,5 @@ def run(): run_download_docker() write_integration_translations() + + return 0 diff --git a/script/translations/migrate.py b/script/translations/migrate.py index 644bfec5b30..7026aef2840 100644 --- a/script/translations/migrate.py +++ b/script/translations/migrate.py @@ -48,3 +48,5 @@ def run(): print() print("Updating keys") pprint(lokalise.keys_bulk_update(updates).json()) + + return 0 diff --git a/script/translations/upload.py b/script/translations/upload.py index cf14ffa3cf9..ecd9ec405df 100755 --- a/script/translations/upload.py +++ b/script/translations/upload.py @@ -82,3 +82,5 @@ def run(): LOCAL_FILE.write_text(json.dumps(translations, indent=4, sort_keys=True)) run_upload_docker() + + return 0 diff --git a/script/translations/util.py b/script/translations/util.py index 02a7d577e66..ad415481e8e 100644 --- a/script/translations/util.py +++ b/script/translations/util.py @@ -13,7 +13,7 @@ def get_base_arg_parser(): parser.add_argument( "action", type=str, - choices=["download", "clean", "upload", "develop", "migrate"], + choices=["clean", "develop", "download", "migrate", "upload"], ) parser.add_argument("--debug", action="store_true", help="Enable log output") return parser