From 1ea534e40032a7d74e3fdb7e85e3c3112c2d22f9 Mon Sep 17 00:00:00 2001 From: Artur Pragacz <49985303+arturpragacz@users.noreply.github.com> Date: Wed, 29 Oct 2025 14:16:57 +0100 Subject: [PATCH] Remove legacy platform support from translations (#155178) --- script/translations/develop.py | 2 +- script/translations/download.py | 129 +++++++++++--------------------- script/translations/upload.py | 19 +---- 3 files changed, 48 insertions(+), 102 deletions(-) diff --git a/script/translations/develop.py b/script/translations/develop.py index 00ac7bf98ac..a4f3c122b7e 100644 --- a/script/translations/develop.py +++ b/script/translations/develop.py @@ -91,7 +91,7 @@ def run_single(translations, flattened_translations, integration): json.dumps({"component": {integration: translations["component"][integration]}}) ) - download.write_integration_translations() + download.save_integrations_translations() def run(): diff --git a/script/translations/download.py b/script/translations/download.py index 0c9504f44cd..27d2af3f3ef 100755 --- a/script/translations/download.py +++ b/script/translations/download.py @@ -5,15 +5,13 @@ from __future__ import annotations import json from pathlib import Path -import re import subprocess from typing import Any from .const import CLI_2_DOCKER_IMAGE, CORE_PROJECT_ID, INTEGRATIONS_DIR from .error import ExitApp -from .util import flatten_translations, get_lokalise_token, load_json_from_path +from .util import get_lokalise_token, load_json_from_path -FILENAME_FORMAT = re.compile(r"strings\.(?P\w+)\.json") DOWNLOAD_DIR = Path("build/translations-download").absolute() @@ -61,73 +59,58 @@ def save_json(filename: Path, data: list | dict) -> None: filename.write_text(json.dumps(data, sort_keys=True, indent=4), encoding="utf-8") -def get_component_path(lang, component) -> Path | None: - """Get the component translation path.""" - if (Path("homeassistant") / "components" / component).is_dir(): - return ( - Path("homeassistant") - / "components" - / component - / "translations" - / f"{lang}.json" - ) - return None +def filter_translations(translations: dict[str, Any], strings: dict[str, Any]) -> None: + """Remove translations that are not in the original strings.""" + for key in list(translations.keys()): + if key not in strings: + translations.pop(key) + continue - -def get_platform_path(lang, component, platform) -> Path: - """Get the platform translation path.""" - return ( - Path("homeassistant") - / "components" - / component - / "translations" - / f"{platform}.{lang}.json" - ) - - -def get_component_translations(translations): - """Get the component level translations.""" - translations = translations.copy() - translations.pop("platform", None) - - return translations + if isinstance(translations[key], dict): + if not isinstance(strings[key], dict): + translations.pop(key) + continue + filter_translations(translations[key], strings[key]) + if not translations[key]: + translations.pop(key) + continue def save_language_translations(lang, translations): - """Distribute the translations for this language.""" + """Save translations for a single language.""" components = translations.get("component", {}) for component, component_translations in components.items(): - base_translations = get_component_translations(component_translations) - if base_translations: - if (path := get_component_path(lang, component)) is None: - print( - f"Skipping {lang} for {component}, as the integration doesn't seem to exist." - ) - continue - if not ( - Path("homeassistant") / "components" / component / "strings.json" - ).exists(): - print( - f"Skipping {lang} for {component}, as the integration doesn't have a strings.json file." - ) - continue - path.parent.mkdir(parents=True, exist_ok=True) - base_translations = pick_keys(component, base_translations) - save_json(path, base_translations) + # Remove legacy platform translations + component_translations.pop("platform", None) - if "platform" not in component_translations: + if not component_translations: continue - for platform, platform_translations in component_translations[ - "platform" - ].items(): - path = get_platform_path(lang, component, platform) - path.parent.mkdir(parents=True, exist_ok=True) - save_json(path, platform_translations) + component_path = Path("homeassistant") / "components" / component + if not component_path.is_dir(): + print( + f"Skipping {lang} for {component}, as the integration doesn't seem to exist." + ) + continue + + strings_path = component_path / "strings.json" + if not strings_path.exists(): + print( + f"Skipping {lang} for {component}, as the integration doesn't have a strings.json file." + ) + continue + strings = load_json_from_path(strings_path) + + path = component_path / "translations" / f"{lang}.json" + path.parent.mkdir(parents=True, exist_ok=True) + + filter_translations(component_translations, strings) + + save_json(path, component_translations) -def write_integration_translations(): - """Write integration translations.""" +def save_integrations_translations(): + """Save integrations translations.""" for lang_file in DOWNLOAD_DIR.glob("*.json"): lang = lang_file.stem translations = load_json_from_path(lang_file) @@ -140,32 +123,6 @@ def delete_old_translations(): fil.unlink() -def get_current_keys(component: str) -> dict[str, Any]: - """Get the current keys for a component.""" - strings_path = Path("homeassistant") / "components" / component / "strings.json" - return load_json_from_path(strings_path) - - -def pick_keys(component: str, translations: dict[str, Any]) -> dict[str, Any]: - """Pick the keys that are in the current strings.""" - flat_translations = flatten_translations(translations) - flat_current_keys = flatten_translations(get_current_keys(component)) - flatten_result = {} - for key in flat_current_keys: - if key in flat_translations: - flatten_result[key] = flat_translations[key] - result = {} - for key, value in flatten_result.items(): - parts = key.split("::") - d = result - for part in parts[:-1]: - if part not in d: - d[part] = {} - d = d[part] - d[parts[-1]] = value - return result - - def run(): """Run the script.""" DOWNLOAD_DIR.mkdir(parents=True, exist_ok=True) @@ -174,6 +131,6 @@ def run(): delete_old_translations() - write_integration_translations() + save_integrations_translations() return 0 diff --git a/script/translations/upload.py b/script/translations/upload.py index ee4a57bc00a..be1e8fc76ca 100755 --- a/script/translations/upload.py +++ b/script/translations/upload.py @@ -4,14 +4,12 @@ import json import os import pathlib -import re import subprocess from .const import CLI_2_DOCKER_IMAGE, CORE_PROJECT_ID, INTEGRATIONS_DIR from .error import ExitApp from .util import get_current_branch, get_lokalise_token, load_json_from_path -FILENAME_FORMAT = re.compile(r"strings\.(?P\w+)\.json") LOCAL_FILE = pathlib.Path("build/translations-upload.json").absolute() CONTAINER_FILE = "/opt/src/build/translations-upload.json" LANG_ISO = "en" @@ -54,20 +52,11 @@ def run_upload_docker(): def generate_upload_data(): """Generate the data for uploading.""" translations = load_json_from_path(INTEGRATIONS_DIR.parent / "strings.json") - translations["component"] = {} - for path in INTEGRATIONS_DIR.glob(f"*{os.sep}strings*.json"): - component = path.parent.name - match = FILENAME_FORMAT.search(path.name) - platform = match.group("suffix") if match else None - - parent = translations["component"].setdefault(component, {}) - - if platform: - platforms = parent.setdefault("platform", {}) - parent = platforms.setdefault(platform, {}) - - parent.update(load_json_from_path(path)) + translations["component"] = { + path.parent.name: load_json_from_path(path) + for path in INTEGRATIONS_DIR.glob(f"*{os.sep}strings.json") + } return translations