Skip to content

Commit

Permalink
Adding argparse to be possible to skip WIP rows
Browse files Browse the repository at this point in the history
  • Loading branch information
ollimeier committed Oct 23, 2024
1 parent 74551cc commit 3e25eec
Showing 1 changed file with 108 additions and 90 deletions.
198 changes: 108 additions & 90 deletions scripts/rebuild_languages.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
#!/usr/bin/env python

import argparse
import csv
import io
import json
Expand All @@ -9,7 +10,8 @@
from urllib.request import urlopen


def prettier(path):
def prettier(path, repoDir):

subprocess.run(
[
os.fspath(repoDir / "node_modules" / ".bin" / "prettier"),
Expand All @@ -33,104 +35,120 @@ def downloadSheet(url):
return list(reader)


languageSpreadsheetURL = (
"https://docs.google.com/"
"spreadsheets/d/1woTU8dZCHJh7yvdk-N1kgQBUj4Sn3SdRsbKgn6ltJQs/"
)
def main(wip):
languageSpreadsheetURL = (
"https://docs.google.com/"
"spreadsheets/d/1woTU8dZCHJh7yvdk-N1kgQBUj4Sn3SdRsbKgn6ltJQs/"
)

rows = downloadSheet(languageSpreadsheetURL + "export?format=csv")
rows = downloadSheet(languageSpreadsheetURL + "export?format=csv")

numHeaders = 5
headers = rows[:numHeaders]
assert headers[0][0] == "Go to Documentation", headers[0][0]
assert headers[1][2] == "English", headers[1][2]
assert headers[2][2] == "English", headers[2][2]
assert headers[3][2] == "en", headers[3][2]
numHeaders = 5
headers = rows[:numHeaders]
assert headers[0][0] == "Go to Documentation", headers[0][0]
assert headers[1][2] == "English", headers[1][2]
assert headers[2][2] == "English", headers[2][2]
assert headers[3][2] == "en", headers[3][2]

rows = rows[numHeaders:]
rows = rows[numHeaders:]

startColumn = 2
startColumn = 2

languages = []
languageStrings = {}
languages = []
languageStrings = {}

for columnIndex in range(startColumn, len(headers[1])):
languageInEnglish = headers[1][columnIndex]
languageInLanguage = headers[2][columnIndex]
languageCode = headers[3][columnIndex]
languageStatus = headers[4][columnIndex]
assert languageCode
if not languageStatus.strip():
continue
for columnIndex in range(startColumn, len(headers[1])):
languageInEnglish = headers[1][columnIndex]
languageInLanguage = headers[2][columnIndex]
languageCode = headers[3][columnIndex]
languageStatus = headers[4][columnIndex]
assert languageCode
if not languageStatus.strip():
continue

languages.append(
dict(
code=languageCode,
langEn=languageInEnglish,
langLang=languageInLanguage,
status=languageStatus,
languages.append(
dict(
code=languageCode,
langEn=languageInEnglish,
langLang=languageInLanguage,
status=languageStatus,
)
)

languageStrings[languageCode] = strings = {}

for row in rows:
if wip and row[0] == "WIP":
# Skip rows marked as work in progress
continue

key = row[1]
if not key.strip():
continue
string = row[columnIndex]
if not string or string == "-":
string = languageStrings["en"].get(key, "!missing!")
strings[key] = string

repoDir = pathlib.Path(__file__).resolve().parent.parent
langDir = repoDir / "src" / "fontra" / "client" / "lang"
assert langDir.is_dir()
localizationJSPath = (
repoDir / "src" / "fontra" / "client" / "core" / "localization.js"
)
assert localizationJSPath.is_file()

localizationJSSource = localizationJSPath.read_text(encoding="utf-8")

languagesList = "\n".join(f" {jsonDump(langs)}," for langs in languages)
languagesBlock = f"""// Don't edit this block, see scripts/rebuild_languages.py
export const languages = [
{languagesList}
];
"""

indexStart = localizationJSSource.find(languagesBlock.splitlines()[0])
assert indexStart > 0
indexEnd = localizationJSSource.find("];\n", indexStart)
assert indexEnd > indexStart

localizationJSSource = (
localizationJSSource[:indexStart]
+ languagesBlock
+ localizationJSSource[indexEnd + 3 :]
)

languageStrings[languageCode] = strings = {}
localizationJSPath.write_text(localizationJSSource, encoding="utf-8")
prettier(localizationJSPath, repoDir)

languageSourceTemplate = """\
// Don't edit this file: it is generated by scripts/rebuild_languages.py
// The strings are maintained here: {url}
export const strings = {{
{stringsBlock}
}};
"""

for languageCode, strings in languageStrings.items():
languagePath = langDir / f"{languageCode.strip()}.js"
print("writing", languagePath)
lines = []
for k, v in sorted(strings.items()):
lines.append(f" {jsonDump(k.strip())}: {jsonDump(v)},")
stringsBlock = "\n".join(lines)
languageSource = languageSourceTemplate.format(
stringsBlock=stringsBlock, url=languageSpreadsheetURL
)
languagePath.write_text(languageSource, encoding="utf-8")
prettier(languagePath, repoDir)

for row in rows:
key = row[1]
if not key.strip():
continue
string = row[columnIndex]
if not string or string == "-":
string = languageStrings["en"].get(key, "!missing!")
strings[key] = string


repoDir = pathlib.Path(__file__).resolve().parent.parent
langDir = repoDir / "src" / "fontra" / "client" / "lang"
assert langDir.is_dir()
localizationJSPath = repoDir / "src" / "fontra" / "client" / "core" / "localization.js"
assert localizationJSPath.is_file()

localizationJSSource = localizationJSPath.read_text(encoding="utf-8")

languagesList = "\n".join(f" {jsonDump(langs)}," for langs in languages)
languagesBlock = f"""// Don't edit this block, see scripts/rebuild_languages.py
export const languages = [
{languagesList}
];
"""

indexStart = localizationJSSource.find(languagesBlock.splitlines()[0])
assert indexStart > 0
indexEnd = localizationJSSource.find("];\n", indexStart)
assert indexEnd > indexStart

localizationJSSource = (
localizationJSSource[:indexStart]
+ languagesBlock
+ localizationJSSource[indexEnd + 3 :]
)

localizationJSPath.write_text(localizationJSSource, encoding="utf-8")
prettier(localizationJSPath)


languageSourceTemplate = """\
// Don't edit this file: it is generated by scripts/rebuild_languages.py
// The strings are maintained here: {url}
export const strings = {{
{stringsBlock}
}};
"""

for languageCode, strings in languageStrings.items():
languagePath = langDir / f"{languageCode.strip()}.js"
print("writing", languagePath)
lines = []
for k, v in sorted(strings.items()):
lines.append(f" {jsonDump(k.strip())}: {jsonDump(v)},")
stringsBlock = "\n".join(lines)
languageSource = languageSourceTemplate.format(
stringsBlock=stringsBlock, url=languageSpreadsheetURL

if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Script to rebuild the language files from the Google Sheet"
)
parser.add_argument(
"--wip", action="store_true", required=False, help="Flag to skip WIP rows"
)
languagePath.write_text(languageSource, encoding="utf-8")
prettier(languagePath)
args = parser.parse_args()
main(args.wip)

0 comments on commit 3e25eec

Please sign in to comment.