From 3e25eec020882521abf05e458acd6e5bd0263248 Mon Sep 17 00:00:00 2001 From: Olli Meier Date: Wed, 23 Oct 2024 11:53:52 +0200 Subject: [PATCH] Adding argparse to be possible to skip WIP rows --- scripts/rebuild_languages.py | 198 +++++++++++++++++++---------------- 1 file changed, 108 insertions(+), 90 deletions(-) diff --git a/scripts/rebuild_languages.py b/scripts/rebuild_languages.py index 962b7c1eb..3fe7b8a9b 100755 --- a/scripts/rebuild_languages.py +++ b/scripts/rebuild_languages.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +import argparse import csv import io import json @@ -9,7 +10,8 @@ from urllib.request import urlopen -def prettier(path): +def prettier(path, repoDir): + subprocess.run( [ os.fspath(repoDir / "node_modules" / ".bin" / "prettier"), @@ -33,104 +35,120 @@ def downloadSheet(url): return list(reader) -languageSpreadsheetURL = ( - "https://docs.google.com/" - "spreadsheets/d/1woTU8dZCHJh7yvdk-N1kgQBUj4Sn3SdRsbKgn6ltJQs/" -) +def main(wip): + languageSpreadsheetURL = ( + "https://docs.google.com/" + "spreadsheets/d/1woTU8dZCHJh7yvdk-N1kgQBUj4Sn3SdRsbKgn6ltJQs/" + ) -rows = downloadSheet(languageSpreadsheetURL + "export?format=csv") + rows = downloadSheet(languageSpreadsheetURL + "export?format=csv") -numHeaders = 5 -headers = rows[:numHeaders] -assert headers[0][0] == "Go to Documentation", headers[0][0] -assert headers[1][2] == "English", headers[1][2] -assert headers[2][2] == "English", headers[2][2] -assert headers[3][2] == "en", headers[3][2] + numHeaders = 5 + headers = rows[:numHeaders] + assert headers[0][0] == "Go to Documentation", headers[0][0] + assert headers[1][2] == "English", headers[1][2] + assert headers[2][2] == "English", headers[2][2] + assert headers[3][2] == "en", headers[3][2] -rows = rows[numHeaders:] + rows = rows[numHeaders:] -startColumn = 2 + startColumn = 2 -languages = [] -languageStrings = {} + languages = [] + languageStrings = {} -for columnIndex in range(startColumn, len(headers[1])): - languageInEnglish = headers[1][columnIndex] - languageInLanguage = headers[2][columnIndex] - languageCode = headers[3][columnIndex] - languageStatus = headers[4][columnIndex] - assert languageCode - if not languageStatus.strip(): - continue + for columnIndex in range(startColumn, len(headers[1])): + languageInEnglish = headers[1][columnIndex] + languageInLanguage = headers[2][columnIndex] + languageCode = headers[3][columnIndex] + languageStatus = headers[4][columnIndex] + assert languageCode + if not languageStatus.strip(): + continue - languages.append( - dict( - code=languageCode, - langEn=languageInEnglish, - langLang=languageInLanguage, - status=languageStatus, + languages.append( + dict( + code=languageCode, + langEn=languageInEnglish, + langLang=languageInLanguage, + status=languageStatus, + ) ) + + languageStrings[languageCode] = strings = {} + + for row in rows: + if wip and row[0] == "WIP": + # Skip rows marked as work in progress + continue + + key = row[1] + if not key.strip(): + continue + string = row[columnIndex] + if not string or string == "-": + string = languageStrings["en"].get(key, "!missing!") + strings[key] = string + + repoDir = pathlib.Path(__file__).resolve().parent.parent + langDir = repoDir / "src" / "fontra" / "client" / "lang" + assert langDir.is_dir() + localizationJSPath = ( + repoDir / "src" / "fontra" / "client" / "core" / "localization.js" + ) + assert localizationJSPath.is_file() + + localizationJSSource = localizationJSPath.read_text(encoding="utf-8") + + languagesList = "\n".join(f" {jsonDump(langs)}," for langs in languages) + languagesBlock = f"""// Don't edit this block, see scripts/rebuild_languages.py + export const languages = [ + {languagesList} + ]; + """ + + indexStart = localizationJSSource.find(languagesBlock.splitlines()[0]) + assert indexStart > 0 + indexEnd = localizationJSSource.find("];\n", indexStart) + assert indexEnd > indexStart + + localizationJSSource = ( + localizationJSSource[:indexStart] + + languagesBlock + + localizationJSSource[indexEnd + 3 :] ) - languageStrings[languageCode] = strings = {} + localizationJSPath.write_text(localizationJSSource, encoding="utf-8") + prettier(localizationJSPath, repoDir) + + languageSourceTemplate = """\ + // Don't edit this file: it is generated by scripts/rebuild_languages.py + // The strings are maintained here: {url} + export const strings = {{ + {stringsBlock} + }}; + """ + + for languageCode, strings in languageStrings.items(): + languagePath = langDir / f"{languageCode.strip()}.js" + print("writing", languagePath) + lines = [] + for k, v in sorted(strings.items()): + lines.append(f" {jsonDump(k.strip())}: {jsonDump(v)},") + stringsBlock = "\n".join(lines) + languageSource = languageSourceTemplate.format( + stringsBlock=stringsBlock, url=languageSpreadsheetURL + ) + languagePath.write_text(languageSource, encoding="utf-8") + prettier(languagePath, repoDir) - for row in rows: - key = row[1] - if not key.strip(): - continue - string = row[columnIndex] - if not string or string == "-": - string = languageStrings["en"].get(key, "!missing!") - strings[key] = string - - -repoDir = pathlib.Path(__file__).resolve().parent.parent -langDir = repoDir / "src" / "fontra" / "client" / "lang" -assert langDir.is_dir() -localizationJSPath = repoDir / "src" / "fontra" / "client" / "core" / "localization.js" -assert localizationJSPath.is_file() - -localizationJSSource = localizationJSPath.read_text(encoding="utf-8") - -languagesList = "\n".join(f" {jsonDump(langs)}," for langs in languages) -languagesBlock = f"""// Don't edit this block, see scripts/rebuild_languages.py -export const languages = [ -{languagesList} -]; -""" - -indexStart = localizationJSSource.find(languagesBlock.splitlines()[0]) -assert indexStart > 0 -indexEnd = localizationJSSource.find("];\n", indexStart) -assert indexEnd > indexStart - -localizationJSSource = ( - localizationJSSource[:indexStart] - + languagesBlock - + localizationJSSource[indexEnd + 3 :] -) - -localizationJSPath.write_text(localizationJSSource, encoding="utf-8") -prettier(localizationJSPath) - - -languageSourceTemplate = """\ -// Don't edit this file: it is generated by scripts/rebuild_languages.py -// The strings are maintained here: {url} -export const strings = {{ -{stringsBlock} -}}; -""" - -for languageCode, strings in languageStrings.items(): - languagePath = langDir / f"{languageCode.strip()}.js" - print("writing", languagePath) - lines = [] - for k, v in sorted(strings.items()): - lines.append(f" {jsonDump(k.strip())}: {jsonDump(v)},") - stringsBlock = "\n".join(lines) - languageSource = languageSourceTemplate.format( - stringsBlock=stringsBlock, url=languageSpreadsheetURL + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Script to rebuild the language files from the Google Sheet" + ) + parser.add_argument( + "--wip", action="store_true", required=False, help="Flag to skip WIP rows" ) - languagePath.write_text(languageSource, encoding="utf-8") - prettier(languagePath) + args = parser.parse_args() + main(args.wip)