Import state strings (#34451)

This commit is contained in:
Paulus Schoutsen 2020-04-20 16:25:35 -07:00 committed by GitHub
parent 095d2718f3
commit d560e8245a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
30 changed files with 617 additions and 51 deletions

View File

@ -22,5 +22,19 @@
"armed_away": "{entity_name} armed away",
"armed_night": "{entity_name} armed night"
}
},
"state": {
"_": {
"armed": "Armed",
"disarmed": "Disarmed",
"armed_home": "Armed home",
"armed_away": "Armed away",
"armed_night": "Armed night",
"armed_custom_bypass": "Armed custom bypass",
"pending": "Pending",
"arming": "Arming",
"disarming": "Disarming",
"triggered": "Triggered"
}
}
}

View File

@ -1 +1,9 @@
{ "title": "Automation" }
{
"title": "Automation",
"state": {
"_": {
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
}
}
}

View File

@ -89,5 +89,87 @@
"turned_on": "{entity_name} turned on",
"turned_off": "{entity_name} turned off"
}
},
"state": {
"battery": {
"off": "Normal",
"on": "Low"
},
"cold": {
"off": "[%key:component::binary_sensor::state::battery::off%]",
"on": "Cold"
},
"connectivity": {
"off": "[%key:common::state::disconnected%]",
"on": "[%key:common::state::connected%]"
},
"door": {
"off": "[%key:common::state::closed%]",
"on": "[%key:common::state::open%]"
},
"garage_door": {
"off": "[%key:common::state::closed%]",
"on": "[%key:common::state::open%]"
},
"gas": {
"off": "Clear",
"on": "Detected"
},
"heat": {
"off": "[%key:component::binary_sensor::state::battery::off%]",
"on": "Hot"
},
"lock": {
"off": "[%key:common::state::locked%]",
"on": "[%key:common::state::unlocked%]"
},
"moisture": {
"off": "Dry",
"on": "Wet"
},
"motion": {
"off": "[%key:component::binary_sensor::state::gas::off%]",
"on": "[%key:component::binary_sensor::state::gas::on%]"
},
"occupancy": {
"off": "[%key:component::binary_sensor::state::gas::off%]",
"on": "[%key:component::binary_sensor::state::gas::on%]"
},
"opening": {
"off": "[%key:common::state::closed%]",
"on": "[%key:common::state::open%]"
},
"presence": {
"off": "[%key:component::device_tracker::state::not_home%]",
"on": "[%key:component::device_tracker::state::home%]"
},
"problem": {
"off": "OK",
"on": "Problem"
},
"safety": {
"off": "Safe",
"on": "Unsafe"
},
"smoke": {
"off": "[%key:component::binary_sensor::state::gas::off%]",
"on": "[%key:component::binary_sensor::state::gas::on%]"
},
"sound": {
"off": "[%key:component::binary_sensor::state::gas::off%]",
"on": "[%key:component::binary_sensor::state::gas::on%]"
},
"vibration": {
"off": "[%key:component::binary_sensor::state::gas::off%]",
"on": "[%key:component::binary_sensor::state::gas::on%]"
},
"window": {
"off": "[%key:common::state::closed%]",
"on": "[%key:common::state::open%]"
},
"_": {
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
}
}
}

View File

@ -1 +1,9 @@
{ "title": "Calendar" }
{
"title": "Calendar",
"state": {
"_": {
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
}
}
}

View File

@ -1 +1,10 @@
{ "title": "Camera" }
{
"title": "Camera",
"state": {
"_": {
"recording": "Recording",
"streaming": "Streaming",
"idle": "[%key:common::state::idle%]"
}
}
}

View File

@ -14,5 +14,16 @@
"set_hvac_mode": "Change HVAC mode on {entity_name}",
"set_preset_mode": "Change preset on {entity_name}"
}
},
"state": {
"_": {
"off": "[%key:common::state::off%]",
"heat": "Heat",
"cool": "Cool",
"heat_cool": "Heat/Cool",
"auto": "Auto",
"dry": "Dry",
"fan_only": "Fan only"
}
}
}

View File

@ -1 +1,9 @@
{ "title": "Configurator" }
{
"title": "Configurator",
"state": {
"_": {
"configure": "Configure",
"configured": "Configured"
}
}
}

View File

@ -25,5 +25,14 @@
"position": "{entity_name} position changes",
"tilt_position": "{entity_name} tilt position changes"
}
},
"state": {
"_": {
"open": "[%key:common::state::open%]",
"opening": "Opening",
"closed": "[%key:common::state::closed%]",
"closing": "Closing",
"stopped": "Stopped"
}
}
}

View File

@ -5,5 +5,11 @@
"is_home": "{entity_name} is home",
"is_not_home": "{entity_name} is not home"
}
},
"state": {
"_": {
"home": "[%key:common::state::home%]",
"not_home": "[%key:common::state::not_home%]"
}
}
}

View File

@ -13,5 +13,11 @@
"turn_on": "Turn on {entity_name}",
"turn_off": "Turn off {entity_name}"
}
},
"state": {
"_": {
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
}
}
}

View File

@ -1 +1,17 @@
{ "title": "Group" }
{
"title": "Group",
"state": {
"_": {
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]",
"home": "[%key:component::device_tracker::state::home%]",
"not_home": "[%key:component::device_tracker::state::not_home%]",
"open": "[%key:common::state::open%]",
"closed": "[%key:common::state::closed%]",
"locked": "[%key:common::state::locked%]",
"unlocked": "[%key:common::state::unlocked%]",
"ok": "[%key:component::binary_sensor::state::problem::off%]",
"problem": "[%key:component::binary_sensor::state::problem::on%]"
}
}
}

View File

@ -1 +1,9 @@
{ "title": "Input boolean" }
{
"title": "Input boolean",
"state": {
"_": {
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
}
}
}

View File

@ -17,5 +17,11 @@
"turned_on": "{entity_name} turned on",
"turned_off": "{entity_name} turned off"
}
},
"state": {
"_": {
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
}
}
}

View File

@ -14,5 +14,11 @@
"locked": "{entity_name} locked",
"unlocked": "{entity_name} unlocked"
}
},
"state": {
"_": {
"locked": "[%key:common::state::locked%]",
"unlocked": "[%key:common::state::unlocked%]"
}
}
}

View File

@ -8,5 +8,15 @@
"is_paused": "{entity_name} is paused",
"is_playing": "{entity_name} is playing"
}
},
"state": {
"_": {
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]",
"playing": "Playing",
"paused": "[%key:common::state::paused%]",
"idle": "[%key:common::state::idle%]",
"standby": "[%key:common::state::standby%]"
}
}
}

View File

@ -1 +1,9 @@
{ "title": "Person" }
{
"title": "Person",
"state": {
"_": {
"home": "[%key:common::state::home%]",
"not_home": "[%key:common::state::not_home%]"
}
}
}

View File

@ -1 +1,9 @@
{ "title": "Plant Monitor" }
{
"title": "Plant Monitor",
"state": {
"_": {
"ok": "[%key:component::binary_sensor::state::problem::off%]",
"problem": "[%key:component::binary_sensor::state::problem::on%]"
}
}
}

View File

@ -1 +1,9 @@
{ "title": "Remote" }
{
"title": "Remote",
"state": {
"_": {
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
}
}
}

View File

@ -1 +1,9 @@
{ "title": "Script" }
{
"title": "Script",
"state": {
"_": {
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
}
}
}

View File

@ -23,5 +23,11 @@
"timestamp": "{entity_name} timestamp changes",
"value": "{entity_name} value changes"
}
},
"state": {
"_": {
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
}
}
}

View File

@ -1 +1,9 @@
{ "title": "Sun" }
{
"title": "Sun",
"state": {
"_": {
"above_horizon": "Above horizon",
"below_horizon": "Below horizon"
}
}
}

View File

@ -14,5 +14,11 @@
"turned_on": "{entity_name} turned on",
"turned_off": "{entity_name} turned off"
}
},
"state": {
"_": {
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]"
}
}
}

View File

@ -0,0 +1,9 @@
{
"state": {
"_": {
"active": "[%key:common::state::active%]",
"idle": "[%key:common::state::idle%]",
"paused": "[%key:common::state::paused%]"
}
}
}

View File

@ -13,5 +13,17 @@
"clean": "Let {entity_name} clean",
"dock": "Let {entity_name} return to the dock"
}
},
"state": {
"_": {
"cleaning": "Cleaning",
"docked": "Docked",
"error": "Error",
"idle": "[%key:common::state::idle%]",
"off": "[%key:common::state::off%]",
"on": "[%key:common::state::on%]",
"paused": "[%key:common::state::paused%]",
"returning": "Returning to dock"
}
}
}

View File

@ -0,0 +1,21 @@
{
"state": {
"_": {
"clear-night": "Clear, night",
"cloudy": "Cloudy",
"exceptional": "Exceptional",
"fog": "Fog",
"hail": "Hail",
"lightning": "Lightning",
"lightning-rainy": "Lightning, rainy",
"partlycloudy": "Partly cloudy",
"pouring": "Pouring",
"rainy": "Rainy",
"snowy": "Snowy",
"snowy-rainy": "Snowy, rainy",
"sunny": "Sunny",
"windy": "Windy",
"windy-variant": "Windy"
}
}
}

View File

@ -17,5 +17,17 @@
"already_configured": "Z-Wave is already configured",
"one_instance_only": "Component only supports one Z-Wave instance"
}
},
"state": {
"query_stage": {
"initializing": "[%key:component::zwave::state::_::initializing%]",
"dead": "[%key:component::zwave::state::_::dead%]"
},
"_": {
"initializing": "Initializing",
"dead": "Dead",
"sleeping": "Sleeping",
"ready": "Ready"
}
}
}

View File

@ -0,0 +1,20 @@
{
"common": {
"state": {
"off": "Off",
"on": "On",
"open": "Open",
"closed": "Closed",
"connected": "Connected",
"disconnected": "Disconnected",
"locked": "Locked",
"unlocked": "Unlocked",
"active": "Active",
"idle": "Idle",
"standby": "Standby",
"paused": "Paused",
"home": "Home",
"not_home": "Away"
}
}
}

View File

@ -2,8 +2,10 @@
from functools import partial
import json
import logging
import re
from typing import Dict
from script.translations import upload
import voluptuous as vol
from voluptuous.humanize import humanize_error
@ -18,6 +20,8 @@ UNDEFINED = 0
REQUIRED = 1
REMOVED = 2
RE_REFERENCE = r"\[\%key:(.+)\%\]"
REMOVED_TITLE_MSG = (
"config.title key has been moved out of config and into the root of strings.json. "
"Starting Home Assistant 0.109 you only need to define this key in the root "
@ -26,6 +30,19 @@ REMOVED_TITLE_MSG = (
)
def find_references(strings, prefix, found):
"""Find references."""
for key, value in strings.items():
if isinstance(value, dict):
find_references(value, f"{prefix}::{key}", found)
continue
match = re.match(RE_REFERENCE, value)
if match:
found.append({"source": f"{prefix}::{key}", "ref": match.groups()[0]})
def removed_title_validator(config, integration, value):
"""Mark removed title."""
if not config.specific_integrations:
@ -36,6 +53,14 @@ def removed_title_validator(config, integration, value):
return value
def lowercase_validator(value):
"""Validate value is lowercase."""
if value.lower() != value:
raise vol.Invalid("Needs to be lowercase")
return value
def gen_data_entry_schema(
*,
config: Config,
@ -92,7 +117,8 @@ def gen_strings_schema(config: Config, integration: Integration):
vol.Optional("trigger_subtype"): {str: str},
},
vol.Optional("state"): cv.schema_with_slug_keys(
cv.schema_with_slug_keys(str)
cv.schema_with_slug_keys(str, slug_validator=lowercase_validator),
slug_validator=vol.Any("_", cv.slug),
),
}
)
@ -115,10 +141,23 @@ def gen_auth_schema(config: Config, integration: Integration):
def gen_platform_strings_schema(config: Config, integration: Integration):
"""Generate platform strings schema like strings.sensor.json."""
"""Generate platform strings schema like strings.sensor.json.
Example of valid data:
{
"state": {
"moon__phase": {
"full": "Full"
}
}
}
"""
def device_class_validator(value):
"""Key validator."""
"""Key validator for platorm states.
Platform states are only allowed to provide states for device classes they prefix.
"""
if not value.startswith(f"{integration.domain}__"):
raise vol.Invalid(
f"Device class need to start with '{integration.domain}__'. Key {value} is invalid"
@ -128,14 +167,17 @@ def gen_platform_strings_schema(config: Config, integration: Integration):
slugged = slugify(slug_friendly)
if slug_friendly != slugged:
raise vol.Invalid(f"invalid device class {value}")
raise vol.Invalid(
f"invalid device class {value}. After domain__, needs to be all lowercase, no spaces."
)
return value
return vol.Schema(
{
vol.Optional("state"): cv.schema_with_slug_keys(
cv.schema_with_slug_keys(str), slug_validator=device_class_validator
cv.schema_with_slug_keys(str, slug_validator=lowercase_validator),
slug_validator=device_class_validator,
)
}
)
@ -144,9 +186,10 @@ def gen_platform_strings_schema(config: Config, integration: Integration):
ONBOARDING_SCHEMA = vol.Schema({vol.Required("area"): {str: str}})
def validate_translation_file(config: Config, integration: Integration):
def validate_translation_file(config: Config, integration: Integration, all_strings):
"""Validate translation files for integration."""
strings_file = integration.path / "strings.json"
references = []
if strings_file.is_file():
strings = json.loads(strings_file.read_text())
@ -164,6 +207,8 @@ def validate_translation_file(config: Config, integration: Integration):
integration.add_error(
"translations", f"Invalid strings.json: {humanize_error(strings, err)}"
)
else:
find_references(strings, "strings.json", references)
for path in integration.path.glob("strings.*.json"):
strings = json.loads(path.read_text())
@ -177,9 +222,35 @@ def validate_translation_file(config: Config, integration: Integration):
integration.add_warning("translations", msg)
else:
integration.add_error("translations", msg)
else:
find_references(strings, path.name, references)
if config.specific_integrations:
return
# Validate references
for reference in references:
parts = reference["ref"].split("::")
search = all_strings
key = parts.pop(0)
while parts and key in search:
search = search[key]
key = parts.pop(0)
if parts:
print(key, list(search))
integration.add_error(
"translations",
f"{reference['source']} contains invalid reference {reference['ref']}: Could not find {key}",
)
def validate(integrations: Dict[str, Integration], config: Config):
"""Handle JSON files inside integrations."""
if config.specific_integrations:
all_strings = None
else:
all_strings = upload.generate_upload_data()
for integration in integrations.values():
validate_translation_file(config, integration)
validate_translation_file(config, integration, all_strings)

View File

@ -1,10 +1,14 @@
"""Migrate things."""
import json
import pathlib
from pprint import pprint
import re
from .const import CORE_PROJECT_ID, FRONTEND_PROJECT_ID, INTEGRATIONS_DIR
from .lokalise import get_api
FRONTEND_REPO = pathlib.Path("../frontend/")
def create_lookup(results):
"""Create a lookup table by key name."""
@ -47,30 +51,53 @@ def rename_keys(project_id, to_migrate):
pprint(lokalise.keys_bulk_update(updates))
def list_keys_helper(lokalise, keys, params={}, *, validate=True):
"""List keys in chunks so it doesn't exceed max URL length."""
results = []
for i in range(0, len(keys), 100):
filter_keys = keys[i : i + 100]
from_key_data = lokalise.keys_list(
{
**params,
"filter_keys": ",".join(filter_keys),
"limit": len(filter_keys) + 1,
}
)
if len(from_key_data) == len(filter_keys) or not validate:
results.extend(from_key_data)
continue
print(
f"Lookin up keys in Lokalise returns {len(from_key_data)} results, expected {len(keys)}"
)
searched = set(filter_keys)
returned = set(create_lookup(from_key_data))
print("Not found:", ", ".join(searched - returned))
raise ValueError
return results
def migrate_project_keys_translations(from_project_id, to_project_id, to_migrate):
"""Migrate keys and translations from one project to another.
to_migrate is Dict[from_key] = to_key.
"""
from_lokalise = get_api(from_project_id)
to_lokalise = get_api(to_project_id, True)
from_key_data = from_lokalise.keys_list(
{"filter_keys": ",".join(to_migrate), "include_translations": 1}
)
if len(from_key_data) != len(to_migrate):
print(
f"Lookin up keys in Lokalise returns {len(from_key_data)} results, expected {len(to_migrate)}"
)
return
from_key_lookup = create_lookup(from_key_data)
to_lokalise = get_api(to_project_id)
# Fetch keys in target
# We are going to skip migrating existing keys
to_key_data = to_lokalise.keys_list(
{"filter_keys": ",".join(to_migrate.values()), "include_translations": 1}
)
print("Checking which target keys exist..")
try:
to_key_data = list_keys_helper(
to_lokalise, list(to_migrate.values()), validate=False
)
except ValueError:
return
existing = set(create_lookup(to_key_data))
missing = [key for key in to_migrate.values() if key not in existing]
@ -79,6 +106,19 @@ def migrate_project_keys_translations(from_project_id, to_project_id, to_migrate
print("All keys to migrate exist already, nothing to do")
return
# Fetch keys whose translations we're importing
print("Fetch translations that we're importing..")
try:
from_key_data = list_keys_helper(
from_lokalise,
[key for key, value in to_migrate.items() if value not in existing],
{"include_translations": 1},
)
except ValueError:
return
from_key_lookup = create_lookup(from_key_data)
print("Creating", ", ".join(missing))
to_key_lookup = create_lookup(
to_lokalise.keys_create(
@ -169,24 +209,145 @@ def interactive_update():
print()
STATE_REWRITE = {
"Off": "[%key:common::state::off%]",
"On": "[%key:common::state::on%]",
"Unknown": "[%key:common::state::unknown%]",
"Unavailable": "[%key:common::state::unavailable%]",
"Open": "[%key:common::state::open%]",
"Closed": "[%key:common::state::closed%]",
"Connected": "[%key:common::state::connected%]",
"Disconnected": "[%key:common::state::disconnected%]",
"Locked": "[%key:common::state::locked%]",
"Unlocked": "[%key:common::state::unlocked%]",
"Active": "[%key:common::state::active%]",
"active": "[%key:common::state::active%]",
"Standby": "[%key:common::state::standby%]",
"Idle": "[%key:common::state::idle%]",
"idle": "[%key:common::state::idle%]",
"Paused": "[%key:common::state::paused%]",
"paused": "[%key:common::state::paused%]",
"Home": "[%key:common::state::home%]",
"Away": "[%key:common::state::not_home%]",
"[%key:state::default::off%]": "[%key:common::state::off%]",
"[%key:state::default::on%]": "[%key:common::state::on%]",
"[%key:state::cover::open%]": "[%key:common::state::open%]",
"[%key:state::cover::closed%]": "[%key:common::state::closed%]",
"[%key:state::lock::locked%]": "[%key:common::state::locked%]",
"[%key:state::lock::unlocked%]": "[%key:common::state::unlocked%]",
}
SKIP_DOMAIN = {"default", "scene"}
STATES_WITH_DEV_CLASS = {"binary_sensor", "zwave"}
GROUP_DELETE = {"opening", "closing", "stopped"} # They don't exist
def find_frontend_states():
"""Find frontend states.
Source key -> target key
Add key to integrations strings.json
"""
frontend_states = json.loads(
(FRONTEND_REPO / "src/translations/en.json").read_text()
)["state"]
# domain => state object
to_write = {}
to_migrate = {}
for domain, states in frontend_states.items():
if domain in SKIP_DOMAIN:
continue
to_key_base = f"component::{domain}::state"
from_key_base = f"state::{domain}"
if domain in STATES_WITH_DEV_CLASS:
domain_to_write = dict(states)
for device_class, dev_class_states in domain_to_write.items():
to_device_class = "_" if device_class == "default" else device_class
for key in dev_class_states:
to_migrate[
f"{from_key_base}::{device_class}::{key}"
] = f"{to_key_base}::{to_device_class}::{key}"
# Rewrite "default" device class to _
if "default" in domain_to_write:
domain_to_write["_"] = domain_to_write.pop("default")
else:
if domain == "group":
for key in GROUP_DELETE:
states.pop(key)
domain_to_write = {"_": states}
for key in states:
to_migrate[f"{from_key_base}::{key}"] = f"{to_key_base}::_::{key}"
# Map out common values with
for dev_class_states in domain_to_write.values():
for key, value in dev_class_states.copy().items():
if value in STATE_REWRITE:
dev_class_states[key] = STATE_REWRITE[value]
continue
match = re.match(r"\[\%key:state::(\w+)::(.+)\%\]", value)
if not match:
continue
dev_class_states[key] = "[%key:component::{}::state::{}%]".format(
*match.groups()
)
to_write[domain] = domain_to_write
for domain, state in to_write.items():
strings = INTEGRATIONS_DIR / domain / "strings.json"
if strings.is_file():
content = json.loads(strings.read_text())
else:
content = {}
content["state"] = state
strings.write_text(json.dumps(content, indent=2) + "\n")
pprint(to_migrate)
print()
while input("Type YES to confirm: ") != "YES":
pass
migrate_project_keys_translations(FRONTEND_PROJECT_ID, CORE_PROJECT_ID, to_migrate)
def run():
"""Migrate translations."""
rename_keys(
CORE_PROJECT_ID,
{
"component::moon::platform::sensor::state::new_moon": "component::moon::platform::sensor::state::moon__phase::new_moon",
"component::moon::platform::sensor::state::waxing_crescent": "component::moon::platform::sensor::state::moon__phase::waxing_crescent",
"component::moon::platform::sensor::state::first_quarter": "component::moon::platform::sensor::state::moon__phase::first_quarter",
"component::moon::platform::sensor::state::waxing_gibbous": "component::moon::platform::sensor::state::moon__phase::waxing_gibbous",
"component::moon::platform::sensor::state::full_moon": "component::moon::platform::sensor::state::moon__phase::full_moon",
"component::moon::platform::sensor::state::waning_gibbous": "component::moon::platform::sensor::state::moon__phase::waning_gibbous",
"component::moon::platform::sensor::state::last_quarter": "component::moon::platform::sensor::state::moon__phase::last_quarter",
"component::moon::platform::sensor::state::waning_crescent": "component::moon::platform::sensor::state::moon__phase::waning_crescent",
"component::season::platform::sensor::state::spring": "component::season::platform::sensor::state::season__season__::spring",
"component::season::platform::sensor::state::summer": "component::season::platform::sensor::state::season__season__::summer",
"component::season::platform::sensor::state::autumn": "component::season::platform::sensor::state::season__season__::autumn",
"component::season::platform::sensor::state::winter": "component::season::platform::sensor::state::season__season__::winter",
},
)
# Import new common keys
# migrate_project_keys_translations(
# FRONTEND_PROJECT_ID,
# CORE_PROJECT_ID,
# {
# "state::default::off": "common::state::off",
# "state::default::on": "common::state::on",
# "state::cover::open": "common::state::open",
# "state::cover::closed": "common::state::closed",
# "state::binary_sensor::connectivity::on": "common::state::connected",
# "state::binary_sensor::connectivity::off": "common::state::disconnected",
# "state::lock::locked": "common::state::locked",
# "state::lock::unlocked": "common::state::unlocked",
# "state::timer::active": "common::state::active",
# "state::camera::idle": "common::state::idle",
# "state::media_player::standby": "common::state::standby",
# "state::media_player::paused": "common::state::paused",
# "state::device_tracker::home": "common::state::home",
# "state::device_tracker::not_home": "common::state::not_home",
# },
# )
find_frontend_states()
return 0

View File

@ -51,7 +51,8 @@ def run_upload_docker():
def generate_upload_data():
"""Generate the data for uploading."""
translations = {"component": {}}
translations = json.loads((INTEGRATIONS_DIR.parent / "strings.json").read_text())
translations["component"] = {}
for path in INTEGRATIONS_DIR.glob(f"*{os.sep}strings*.json"):
component = path.parent.name