2023-08-11 15:57:32 +00:00
|
|
|
from __future__ import annotations
|
|
|
|
|
|
|
|
import datetime
|
|
|
|
import json
|
|
|
|
import logging
|
|
|
|
import typing as t
|
|
|
|
import subprocess
|
|
|
|
from pathlib import Path
|
|
|
|
|
|
|
|
import click
|
|
|
|
|
|
|
|
from trezorlib import cosi, models, merkle_tree
|
|
|
|
from trezorlib._internal import translations
|
2024-03-25 15:18:17 +00:00
|
|
|
from trezorlib._internal.translations import VersionTuple
|
2023-08-11 15:57:32 +00:00
|
|
|
|
|
|
|
HERE = Path(__file__).parent.resolve()
|
|
|
|
LOG = logging.getLogger(__name__)
|
|
|
|
|
2024-09-02 08:26:13 +00:00
|
|
|
ALL_MODELS = {models.T2B1, models.T2T1, models.T3T1, models.T3B1}
|
2023-08-11 15:57:32 +00:00
|
|
|
|
|
|
|
PRIVATE_KEYS_DEV = [byte * 32 for byte in (b"\xdd", b"\xde", b"\xdf")]
|
|
|
|
|
|
|
|
PUBLIC_KEYS_PROD = [
|
|
|
|
bytes.fromhex(key)
|
|
|
|
for key in (
|
2024-03-07 11:26:58 +00:00
|
|
|
"62cea8257b0bca15b33a76405a79c4881eb20ee82346813181a50291d6caec67",
|
|
|
|
"594ef09e51139372e528c6c5b8742ee1806f9114caeaeedb04be6a98e1ce3020",
|
2023-08-11 15:57:32 +00:00
|
|
|
)
|
|
|
|
]
|
|
|
|
|
|
|
|
VERSION_H = HERE.parent / "embed" / "firmware" / "version.h"
|
|
|
|
SIGNATURES_JSON = HERE / "signatures.json"
|
|
|
|
|
|
|
|
|
2024-03-25 14:35:19 +00:00
|
|
|
class SignedInfo(t.TypedDict):
|
|
|
|
merkle_root: str
|
|
|
|
signature: str
|
|
|
|
datetime: str
|
|
|
|
commit: str
|
2024-03-25 15:18:17 +00:00
|
|
|
version: str
|
2024-03-25 14:35:19 +00:00
|
|
|
|
|
|
|
|
|
|
|
class UnsignedInfo(t.TypedDict):
|
2023-08-11 15:57:32 +00:00
|
|
|
merkle_root: str
|
|
|
|
datetime: str
|
|
|
|
commit: str
|
|
|
|
|
|
|
|
|
|
|
|
class SignatureFile(t.TypedDict):
|
2024-03-25 14:35:19 +00:00
|
|
|
current: UnsignedInfo
|
|
|
|
history: list[SignedInfo]
|
2023-08-11 15:57:32 +00:00
|
|
|
|
|
|
|
|
2024-03-25 15:18:17 +00:00
|
|
|
def _version_from_version_h() -> VersionTuple:
|
2023-08-11 15:57:32 +00:00
|
|
|
defines: t.Dict[str, int] = {}
|
|
|
|
with open(VERSION_H) as f:
|
|
|
|
for line in f:
|
|
|
|
try:
|
|
|
|
define, symbol, number = line.rstrip().split()
|
|
|
|
assert define == "#define"
|
|
|
|
defines[symbol] = int(number)
|
|
|
|
except Exception:
|
|
|
|
# not a #define, not a number, wrong number of parts
|
|
|
|
continue
|
|
|
|
|
|
|
|
return (
|
|
|
|
defines["VERSION_MAJOR"],
|
|
|
|
defines["VERSION_MINOR"],
|
|
|
|
defines["VERSION_PATCH"],
|
|
|
|
defines["VERSION_BUILD"],
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-06-12 09:21:34 +00:00
|
|
|
def _version_str(version: tuple[int, ...]) -> str:
|
2024-03-25 15:18:17 +00:00
|
|
|
return ".".join(str(v) for v in version)
|
|
|
|
|
|
|
|
|
2024-03-25 14:35:19 +00:00
|
|
|
def make_tree_info(merkle_root: bytes) -> UnsignedInfo:
|
2023-08-11 15:57:32 +00:00
|
|
|
now = datetime.datetime.utcnow()
|
|
|
|
commit = (
|
|
|
|
subprocess.check_output(["git", "rev-parse", "HEAD"], cwd=HERE)
|
|
|
|
.decode("ascii")
|
|
|
|
.strip()
|
|
|
|
)
|
2024-03-25 14:35:19 +00:00
|
|
|
return UnsignedInfo(
|
|
|
|
merkle_root=merkle_root.hex(), datetime=now.isoformat(), commit=commit
|
2023-08-11 15:57:32 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
|
2024-03-25 15:18:17 +00:00
|
|
|
def sign_info(
|
|
|
|
info: UnsignedInfo, signature: bytes, version: VersionTuple
|
|
|
|
) -> SignedInfo:
|
|
|
|
return SignedInfo(signature=signature.hex(), version=_version_str(version), **info)
|
2024-03-25 14:35:19 +00:00
|
|
|
|
|
|
|
|
2023-08-11 15:57:32 +00:00
|
|
|
def update_merkle_root(signature_file: SignatureFile, merkle_root: bytes) -> bool:
|
|
|
|
"""Update signatures.json with the new Merkle root.
|
|
|
|
|
|
|
|
Returns True if the signature file was updated, False if it was already up-to-date.
|
|
|
|
"""
|
|
|
|
current = signature_file["current"]
|
|
|
|
|
|
|
|
if current["merkle_root"] == merkle_root.hex():
|
|
|
|
# Merkle root is already up to date
|
|
|
|
return False
|
|
|
|
|
2024-03-25 14:35:19 +00:00
|
|
|
# overwrite with a new one
|
|
|
|
signature_file["current"] = make_tree_info(merkle_root)
|
|
|
|
SIGNATURES_JSON.write_text(json.dumps(signature_file, indent=2))
|
2023-08-11 15:57:32 +00:00
|
|
|
return True
|
|
|
|
|
|
|
|
|
2024-03-25 15:18:17 +00:00
|
|
|
class TranslationsDir:
|
|
|
|
def __init__(self, path: Path = HERE):
|
|
|
|
self.path = path
|
|
|
|
self.order = translations.order_from_json(
|
|
|
|
json.loads((self.path / "order.json").read_text())
|
|
|
|
)
|
2023-08-11 15:57:32 +00:00
|
|
|
|
2024-03-25 15:18:17 +00:00
|
|
|
@property
|
|
|
|
def fonts_dir(self) -> Path:
|
|
|
|
return self.path / "fonts"
|
2023-08-11 15:57:32 +00:00
|
|
|
|
2024-03-25 15:18:17 +00:00
|
|
|
def _lang_path(self, lang: str) -> Path:
|
|
|
|
return self.path / f"{lang}.json"
|
2023-08-11 15:57:32 +00:00
|
|
|
|
2024-03-25 15:18:17 +00:00
|
|
|
def load_lang(self, lang: str) -> translations.JsonDef:
|
2024-09-02 08:26:13 +00:00
|
|
|
json_def = json.loads(self._lang_path(lang).read_text())
|
|
|
|
# special-case for T2B1 and T3B1, so that we keep the info in one place instead
|
|
|
|
# of duplicating it in two entries, risking a desync
|
|
|
|
if (fonts_safe3 := json_def.get("fonts", {}).get("##Safe3")) is not None:
|
|
|
|
json_def["fonts"]["T2B1"] = fonts_safe3
|
|
|
|
json_def["fonts"]["T3B1"] = fonts_safe3
|
|
|
|
return json_def
|
2023-08-11 15:57:32 +00:00
|
|
|
|
2024-03-25 15:18:17 +00:00
|
|
|
def save_lang(self, lang: str, data: translations.JsonDef) -> None:
|
2024-06-12 09:21:34 +00:00
|
|
|
self._lang_path(lang).write_text(
|
|
|
|
json.dumps(
|
|
|
|
data,
|
|
|
|
indent=2,
|
|
|
|
ensure_ascii=False,
|
2024-07-09 08:17:36 +00:00
|
|
|
sort_keys=True,
|
2024-06-12 09:21:34 +00:00
|
|
|
)
|
|
|
|
+ "\n"
|
|
|
|
)
|
2024-03-25 15:18:17 +00:00
|
|
|
|
|
|
|
def all_languages(self) -> t.Iterable[str]:
|
|
|
|
return (lang_file.stem for lang_file in self.path.glob("??.json"))
|
|
|
|
|
2024-06-12 09:21:34 +00:00
|
|
|
def update_version_from_h(self) -> VersionTuple:
|
|
|
|
version = _version_from_version_h()
|
|
|
|
for lang in self.all_languages():
|
|
|
|
blob_json = self.load_lang(lang)
|
|
|
|
blob_version = translations.version_from_json(
|
|
|
|
blob_json["header"]["version"]
|
|
|
|
)
|
|
|
|
if blob_version != version:
|
|
|
|
blob_json["header"]["version"] = _version_str(version[:3])
|
|
|
|
self.save_lang(lang, blob_json)
|
|
|
|
return version
|
|
|
|
|
2024-03-25 15:18:17 +00:00
|
|
|
def generate_single_blob(
|
|
|
|
self,
|
|
|
|
lang: str,
|
|
|
|
model: models.TrezorModel,
|
|
|
|
version: VersionTuple | None,
|
|
|
|
) -> translations.TranslationsBlob:
|
|
|
|
blob_json = self.load_lang(lang)
|
|
|
|
blob_version = translations.version_from_json(blob_json["header"]["version"])
|
|
|
|
return translations.blob_from_defs(
|
2024-06-12 09:21:34 +00:00
|
|
|
blob_json, self.order, model, version or blob_version, self.fonts_dir
|
2024-03-25 15:18:17 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
def generate_all_blobs(
|
2024-06-12 09:21:34 +00:00
|
|
|
self, version: VersionTuple | None
|
2024-03-25 15:18:17 +00:00
|
|
|
) -> list[translations.TranslationsBlob]:
|
|
|
|
common_version = None
|
|
|
|
|
|
|
|
all_blobs: list[translations.TranslationsBlob] = []
|
|
|
|
for lang in self.all_languages():
|
|
|
|
if lang == "en":
|
|
|
|
continue
|
|
|
|
|
|
|
|
for model in ALL_MODELS:
|
|
|
|
try:
|
2024-06-12 09:21:34 +00:00
|
|
|
blob = self.generate_single_blob(lang, model, version)
|
2024-03-25 15:18:17 +00:00
|
|
|
blob_version = blob.header.firmware_version
|
2023-08-11 15:57:32 +00:00
|
|
|
if common_version is None:
|
2024-03-25 15:18:17 +00:00
|
|
|
common_version = blob_version
|
2023-08-11 15:57:32 +00:00
|
|
|
elif blob_version != common_version:
|
|
|
|
raise ValueError(
|
2024-03-25 15:18:17 +00:00
|
|
|
f"Language {lang} has version {blob_version} but expected {common_version}"
|
2023-08-11 15:57:32 +00:00
|
|
|
)
|
2024-03-25 15:18:17 +00:00
|
|
|
all_blobs.append(blob)
|
|
|
|
except Exception as e:
|
|
|
|
import traceback
|
2023-08-11 15:57:32 +00:00
|
|
|
|
2024-03-25 15:18:17 +00:00
|
|
|
traceback.print_exc()
|
|
|
|
LOG.warning(
|
|
|
|
f"Failed to build {lang} for {model.internal_name}: {e}"
|
|
|
|
)
|
|
|
|
continue
|
2023-08-11 15:57:32 +00:00
|
|
|
|
2024-03-25 15:18:17 +00:00
|
|
|
LOG.info(f"Built {lang} for {model.internal_name}")
|
2023-08-11 15:57:32 +00:00
|
|
|
|
2024-03-25 15:18:17 +00:00
|
|
|
return all_blobs
|
2023-08-11 15:57:32 +00:00
|
|
|
|
|
|
|
|
|
|
|
def build_all_blobs(
|
|
|
|
all_blobs: list[translations.TranslationsBlob],
|
|
|
|
merkle_tree: merkle_tree.MerkleTree,
|
|
|
|
sigmask: int,
|
|
|
|
signature: bytes,
|
|
|
|
production: bool = False,
|
|
|
|
) -> None:
|
|
|
|
for blob in all_blobs:
|
|
|
|
proof = translations.Proof(
|
|
|
|
merkle_proof=merkle_tree.get_proof(blob.header_bytes),
|
|
|
|
signature=signature,
|
|
|
|
sigmask=sigmask,
|
|
|
|
)
|
|
|
|
blob.proof = proof
|
|
|
|
header = blob.header
|
|
|
|
model = header.model.value.decode("ascii")
|
2024-06-12 09:21:34 +00:00
|
|
|
version = _version_str(header.firmware_version[:3])
|
2023-08-11 15:57:32 +00:00
|
|
|
if production:
|
|
|
|
suffix = ""
|
|
|
|
else:
|
|
|
|
suffix = "-unsigned"
|
|
|
|
filename = f"translation-{model}-{header.language}-{version}{suffix}.bin"
|
|
|
|
(HERE / filename).write_bytes(blob.build())
|
|
|
|
LOG.info(f"Wrote {header.language} for {model} v{version}: {filename}")
|
|
|
|
|
|
|
|
|
|
|
|
@click.group()
|
|
|
|
def cli() -> None:
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
@cli.command()
|
2024-03-25 14:35:19 +00:00
|
|
|
@click.option("--signed", is_flag=True, help="Generate signed blobs.")
|
2024-03-25 15:18:17 +00:00
|
|
|
@click.option(
|
|
|
|
"--version", "version_str", help="Set the blob version independent of JSON data."
|
|
|
|
)
|
|
|
|
def gen(signed: bool | None, version_str: str | None) -> None:
|
2023-08-11 15:57:32 +00:00
|
|
|
"""Generate all language blobs for all models.
|
|
|
|
|
|
|
|
The generated blobs will be signed with the development keys.
|
|
|
|
"""
|
2024-06-12 09:21:34 +00:00
|
|
|
tdir = TranslationsDir()
|
|
|
|
|
|
|
|
if version_str is None:
|
|
|
|
version = tdir.update_version_from_h()
|
2024-03-25 15:18:17 +00:00
|
|
|
else:
|
2024-06-12 09:21:34 +00:00
|
|
|
version = translations.version_from_json(version_str)
|
2024-03-25 15:18:17 +00:00
|
|
|
|
|
|
|
all_blobs = tdir.generate_all_blobs(version)
|
2023-08-11 15:57:32 +00:00
|
|
|
tree = merkle_tree.MerkleTree(b.header_bytes for b in all_blobs)
|
|
|
|
root = tree.get_root_hash()
|
2024-03-25 14:35:19 +00:00
|
|
|
|
|
|
|
signature_file: SignatureFile = json.loads(SIGNATURES_JSON.read_text())
|
|
|
|
|
|
|
|
if signed:
|
|
|
|
for entry in signature_file["history"]:
|
|
|
|
if entry["merkle_root"] == root.hex():
|
|
|
|
signature_hex = entry["signature"]
|
|
|
|
signature_bytes = bytes.fromhex(signature_hex)
|
|
|
|
sigmask, signature = signature_bytes[0], signature_bytes[1:]
|
|
|
|
build_all_blobs(all_blobs, tree, sigmask, signature, production=True)
|
|
|
|
return
|
|
|
|
else:
|
|
|
|
raise click.ClickException(
|
|
|
|
"No matching signature found in signatures.json. Run `cli.py sign` first."
|
|
|
|
)
|
2024-03-25 15:18:17 +00:00
|
|
|
|
2023-08-11 15:57:32 +00:00
|
|
|
signature = cosi.sign_with_privkeys(root, PRIVATE_KEYS_DEV)
|
|
|
|
sigmask = 0b111
|
|
|
|
build_all_blobs(all_blobs, tree, sigmask, signature)
|
2024-03-25 15:18:17 +00:00
|
|
|
|
|
|
|
if version_str is not None:
|
|
|
|
click.echo("Skipping Merkle root update because of explicit version.")
|
|
|
|
elif update_merkle_root(signature_file, root):
|
2023-08-11 15:57:32 +00:00
|
|
|
SIGNATURES_JSON.write_text(json.dumps(signature_file, indent=2) + "\n")
|
|
|
|
click.echo("Updated signatures.json")
|
|
|
|
else:
|
|
|
|
click.echo("signatures.json is already up-to-date")
|
|
|
|
|
|
|
|
|
|
|
|
@cli.command()
|
2024-03-25 15:18:17 +00:00
|
|
|
@click.option(
|
|
|
|
"--version", "version_str", help="Set the blob version independent of JSON data."
|
|
|
|
)
|
|
|
|
def merkle_root(version_str: str | None) -> None:
|
2023-08-11 15:57:32 +00:00
|
|
|
"""Print the Merkle root of all language blobs."""
|
2024-03-25 15:18:17 +00:00
|
|
|
if version_str is None:
|
2024-06-12 09:21:34 +00:00
|
|
|
version = None
|
2024-03-25 15:18:17 +00:00
|
|
|
else:
|
|
|
|
version = translations.version_from_json(version_str)
|
|
|
|
|
|
|
|
tdir = TranslationsDir()
|
|
|
|
all_blobs = tdir.generate_all_blobs(version)
|
2023-08-11 15:57:32 +00:00
|
|
|
tree = merkle_tree.MerkleTree(b.header_bytes for b in all_blobs)
|
|
|
|
root = tree.get_root_hash()
|
|
|
|
|
2024-03-25 15:18:17 +00:00
|
|
|
if version_str is not None:
|
|
|
|
# short-circuit: just print the Merkle root
|
|
|
|
click.echo(root.hex())
|
|
|
|
return
|
|
|
|
|
|
|
|
# we are using in-tree version. check in-tree merkle root
|
2023-08-11 15:57:32 +00:00
|
|
|
signature_file: SignatureFile = json.loads(SIGNATURES_JSON.read_text())
|
|
|
|
if signature_file["current"]["merkle_root"] != root.hex():
|
|
|
|
raise click.ClickException(
|
|
|
|
f"Merkle root mismatch!\n"
|
|
|
|
f"Expected: {root.hex()}\n"
|
|
|
|
f"Stored in signatures.json: {signature_file['current']['merkle_root']}\n"
|
2024-02-12 12:40:42 +00:00
|
|
|
"Run `cli.py gen` to update the stored Merkle root."
|
2023-08-11 15:57:32 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
click.echo(root.hex())
|
|
|
|
|
|
|
|
|
|
|
|
@cli.command()
|
2024-03-25 14:35:19 +00:00
|
|
|
@click.argument("signature_hex")
|
|
|
|
@click.option("--force", is_flag=True, help="Write even if the signature is invalid.")
|
2024-03-25 15:18:17 +00:00
|
|
|
@click.option(
|
|
|
|
"--version", "version_str", help="Set the blob version independent of JSON data."
|
|
|
|
)
|
|
|
|
def sign(signature_hex: str, force: bool | None, version_str: str | None) -> None:
|
2024-03-25 14:35:19 +00:00
|
|
|
"""Insert a signature into language blobs."""
|
2024-03-25 15:18:17 +00:00
|
|
|
if version_str is None:
|
2024-06-12 09:21:34 +00:00
|
|
|
version = None
|
2024-03-25 15:18:17 +00:00
|
|
|
else:
|
|
|
|
version = translations.version_from_json(version_str)
|
|
|
|
|
|
|
|
tdir = TranslationsDir()
|
|
|
|
all_blobs = tdir.generate_all_blobs(version)
|
2023-08-11 15:57:32 +00:00
|
|
|
tree = merkle_tree.MerkleTree(b.header_bytes for b in all_blobs)
|
|
|
|
root = tree.get_root_hash()
|
|
|
|
|
2024-03-25 15:18:17 +00:00
|
|
|
blob_version = all_blobs[0].header.firmware_version
|
2023-08-11 15:57:32 +00:00
|
|
|
signature_file: SignatureFile = json.loads(SIGNATURES_JSON.read_text())
|
2024-03-25 15:18:17 +00:00
|
|
|
|
|
|
|
if version_str is None:
|
|
|
|
# we are using in-tree version. check in-tree merkle root
|
|
|
|
if signature_file["current"]["merkle_root"] != root.hex():
|
|
|
|
raise click.ClickException(
|
|
|
|
f"Merkle root mismatch!\n"
|
|
|
|
f"Expected: {root.hex()}\n"
|
|
|
|
f"Stored in signatures.json: {signature_file['current']['merkle_root']}"
|
|
|
|
)
|
|
|
|
# else, proceed with the calculated Merkle root
|
2023-08-11 15:57:32 +00:00
|
|
|
|
2024-03-25 14:35:19 +00:00
|
|
|
# Update signature file data. It will be written only if the signature verifies.
|
|
|
|
tree_info = make_tree_info(root)
|
2024-03-25 15:18:17 +00:00
|
|
|
signed_info = sign_info(tree_info, bytes.fromhex(signature_hex), blob_version)
|
2024-03-25 14:35:19 +00:00
|
|
|
signature_file["history"].insert(0, signed_info)
|
2023-08-11 15:57:32 +00:00
|
|
|
|
|
|
|
signature_bytes = bytes.fromhex(signature_hex)
|
|
|
|
sigmask, signature = signature_bytes[0], signature_bytes[1:]
|
|
|
|
|
|
|
|
try:
|
|
|
|
cosi.verify(signature, root, 2, PUBLIC_KEYS_PROD, sigmask)
|
|
|
|
except Exception as e:
|
|
|
|
if force:
|
|
|
|
LOG.warning(f"Invalid signature: {e}. --force is provided, writing anyway.")
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
raise click.ClickException(f"Invalid signature: {e}") from e
|
|
|
|
|
|
|
|
SIGNATURES_JSON.write_text(json.dumps(signature_file, indent=2) + "\n")
|
|
|
|
build_all_blobs(all_blobs, tree, sigmask, signature, production=True)
|
|
|
|
|
|
|
|
|
2024-07-09 08:17:36 +00:00
|
|
|
def _dict_merge(a: dict, b: dict) -> None:
|
|
|
|
for k, v in b.items():
|
|
|
|
if k in a and isinstance(a[k], dict) and isinstance(v, dict):
|
|
|
|
_dict_merge(a[k], v)
|
|
|
|
else:
|
|
|
|
a[k] = v
|
|
|
|
|
|
|
|
|
|
|
|
@cli.command()
|
|
|
|
@click.argument("update_json", type=click.File("r"), nargs=-1)
|
|
|
|
def merge(update_json: t.Tuple[t.TextIO, ...]) -> None:
|
|
|
|
"""Update translations from JSON files."""
|
|
|
|
tdir = TranslationsDir()
|
|
|
|
for f in update_json:
|
|
|
|
new_data = json.load(f)
|
|
|
|
lang = new_data["header"]["language"][:2]
|
|
|
|
orig_data = tdir.load_lang(lang)
|
|
|
|
_dict_merge(orig_data, new_data)
|
|
|
|
tdir.save_lang(lang, orig_data)
|
|
|
|
click.echo(f"Updated {lang}")
|
|
|
|
|
|
|
|
|
2024-07-21 18:54:23 +00:00
|
|
|
@cli.command()
|
|
|
|
@click.argument("lang_file", type=click.File("r"), nargs=1)
|
|
|
|
def characters(lang_file: t.TextIO) -> None:
|
|
|
|
"""Extract all non-ASCII characters."""
|
|
|
|
all_chars = set(lang_file.read())
|
|
|
|
chars = filter(lambda c: ord(c) > 127, all_chars)
|
|
|
|
print("(")
|
|
|
|
for c in sorted(chars):
|
|
|
|
print(f" \"{c}\",")
|
|
|
|
print(")")
|
|
|
|
|
|
|
|
|
|
|
|
@cli.command()
|
|
|
|
@click.argument("lang_file", type=click.Path(), nargs=1)
|
|
|
|
def lowercase(lang_file: Path) -> None:
|
|
|
|
"""Convert strings to lowercase."""
|
|
|
|
with open(lang_file, "r") as fh:
|
|
|
|
data = json.load(fh)
|
|
|
|
new_translations = {}
|
|
|
|
|
|
|
|
def f(s: str) -> str:
|
|
|
|
if not all(map(lambda c: c.isupper() or not c.isalpha(), s)):
|
|
|
|
return s
|
|
|
|
else:
|
|
|
|
return s[0] + s[1:].lower()
|
|
|
|
|
|
|
|
for k, v in data['translations'].items():
|
|
|
|
new_translations[k] = f(v)
|
|
|
|
|
|
|
|
data["translations"] = new_translations
|
|
|
|
with open(lang_file, "w") as fh:
|
|
|
|
json.dump(data, fh, indent=2, ensure_ascii=False)
|
|
|
|
|
2023-08-11 15:57:32 +00:00
|
|
|
if __name__ == "__main__":
|
|
|
|
cli()
|