Merge branch 'master' into master

This commit is contained in:
Ben V. Brown
2021-04-13 21:43:08 +10:00
committed by GitHub
9 changed files with 909 additions and 115 deletions

View File

@@ -5,13 +5,14 @@ import functools
import json
import logging
import os
import pickle
import re
import subprocess
import sys
from datetime import datetime
from itertools import chain
from pathlib import Path
from typing import Dict, List, Optional, TextIO, Tuple, Union
from typing import BinaryIO, Dict, List, Optional, TextIO, Tuple, Union
from dataclasses import dataclass
from bdflib import reader as bdfreader
@@ -79,7 +80,7 @@ def write_start(f: TextIO):
f.write('#include "Translation.h"\n')
def get_constants() -> List[Tuple[str, str]]:
def get_constants(build_version: str) -> List[Tuple[str, str]]:
# Extra constants that are used in the firmware that are shared across all languages
return [
("SymbolPlus", "+"),
@@ -94,7 +95,7 @@ def get_constants() -> List[Tuple[str, str]]:
("SymbolVolts", "V"),
("SymbolDC", "DC"),
("SymbolCellCount", "S"),
("SymbolVersionNumber", buildVersion),
("SymbolVersionNumber", build_version),
]
@@ -116,7 +117,7 @@ def get_debug_menu() -> List[str]:
]
def get_letter_counts(defs: dict, lang: dict) -> List[str]:
def get_letter_counts(defs: dict, lang: dict, build_version: str) -> List[str]:
text_list = []
# iterate over all strings
obj = lang["menuOptions"]
@@ -169,7 +170,7 @@ def get_letter_counts(defs: dict, lang: dict) -> List[str]:
for mod in defs["menuGroups"]:
eid = mod["id"]
text_list.append(obj[eid]["desc"])
constants = get_constants()
constants = get_constants(build_version)
for x in constants:
text_list.append(x[1])
text_list.extend(get_debug_menu())
@@ -290,7 +291,15 @@ def bytes_to_escaped(b: bytes) -> str:
return "".join((f"\\x{i:02X}" for i in b))
def get_font_map_and_table(text_list: List[str]) -> Tuple[str, Dict[str, bytes]]:
@dataclass
class FontMap:
font12: Dict[str, str]
font06: Dict[str, str]
def get_font_map_and_table(
text_list: List[str],
) -> Tuple[List[str], FontMap, Dict[str, bytes]]:
# the text list is sorted
# allocate out these in their order as number codes
symbol_map: Dict[str, bytes] = {"\n": bytes([1])}
@@ -324,56 +333,56 @@ def get_font_map_and_table(text_list: List[str]) -> Tuple[str, Dict[str, bytes]]
logging.info(f"Generating fonts for {total_symbol_count} symbols")
for sym in chain(ordered_normal_sym_list, ordered_cjk_sym_list):
sym_list = ordered_normal_sym_list + ordered_cjk_sym_list
for sym in sym_list:
if sym in symbol_map:
raise ValueError("Symbol not found in symbol map")
symbol_map[sym] = get_bytes_from_font_index(index)
index += 1
font_table_strings = []
font_small_table_strings = []
font12_map: Dict[str, str] = {}
font06_map: Dict[str, str] = {}
for sym in ordered_normal_sym_list:
if sym not in font_table:
logging.error(f"Missing Large font element for {sym}")
sys.exit(1)
font_line: str = font_table[sym]
font_table_strings.append(
f"{font_line}//{bytes_to_escaped(symbol_map[sym])} -> {sym}"
)
font12_map[sym] = font_table[sym]
if sym not in font_small_table:
logging.error(f"Missing Small font element for {sym}")
sys.exit(1)
font_line = font_small_table[sym]
font_small_table_strings.append(
f"{font_line}//{bytes_to_escaped(symbol_map[sym])} -> {sym}"
)
font06_map[sym] = font_small_table[sym]
for sym in ordered_cjk_sym_list:
if sym in font_table:
raise ValueError("Symbol already exists in font_table")
font_line = get_cjk_glyph(sym)
font_line: str = get_cjk_glyph(sym)
if font_line is None:
logging.error(f"Missing Large font element for {sym}")
sys.exit(1)
font_table_strings.append(
f"{font_line}//{bytes_to_escaped(symbol_map[sym])} -> {sym}"
)
font12_map[sym] = font_line
# No data to add to the small font table
font_small_table_strings.append(
f"// {bytes_to_escaped(symbol_map[sym])} -> {sym}"
)
font06_map[sym] = "// " # placeholder
return sym_list, FontMap(font12_map, font06_map), symbol_map
def make_font_table_cpp(
sym_list: List[str], font_map: FontMap, symbol_map: Dict[str, bytes]
) -> str:
output_table = "const uint8_t USER_FONT_12[] = {\n"
for line in font_table_strings:
# join font table int one large string
output_table += line + "\n"
for sym in sym_list:
output_table += (
f"{font_map.font12[sym]}//{bytes_to_escaped(symbol_map[sym])} -> {sym}\n"
)
output_table += "};\n"
output_table += "const uint8_t USER_FONT_6x8[] = {\n"
for line in font_small_table_strings:
# join font table int one large string
output_table += line + "\n"
for sym in sym_list:
output_table += (
f"{font_map.font06[sym]}//{bytes_to_escaped(symbol_map[sym])} -> {sym}\n"
)
output_table += "};\n"
return output_table, symbol_map
return output_table
def convert_string_bytes(symbol_conversion_table: Dict[str, bytes], text: str) -> bytes:
@@ -397,29 +406,132 @@ def escape(string: str) -> str:
return json.dumps(string, ensure_ascii=False)
def write_bytes_as_c_array(
f: TextIO, name: str, data: bytes, indent: int = 2, bytes_per_line: int = 16
) -> None:
f.write(f"const uint8_t {name}[] = {{\n")
for i in range(0, len(data), bytes_per_line):
f.write(" " * indent)
f.write(", ".join((f"0x{b:02X}" for b in data[i : i + bytes_per_line])))
f.write(",\n")
f.write(f"}}; // {name}\n\n")
@dataclass
class TranslationItem:
info: str
str_index: int
class LanguageData:
lang: dict
defs: dict
build_version: str
sym_list: List[str]
font_map: FontMap
symbol_conversion_table: Dict[str, bytes]
def write_language(lang: dict, defs: dict, f: TextIO) -> None:
def prepare_language(lang: dict, defs: dict, build_version: str) -> LanguageData:
language_code: str = lang["languageCode"]
logging.info(f"Preparing language data for {language_code}")
# Iterate over all of the text to build up the symbols & counts
text_list = get_letter_counts(defs, lang, build_version)
# From the letter counts, need to make a symbol translator & write out the font
sym_list, font_map, symbol_conversion_table = get_font_map_and_table(text_list)
return LanguageData(
lang, defs, build_version, sym_list, font_map, symbol_conversion_table
)
def write_language(
data: LanguageData, f: TextIO, lzfx_strings: Optional[bytes] = None
) -> None:
lang = data.lang
defs = data.defs
build_version = data.build_version
sym_list = data.sym_list
font_map = data.font_map
symbol_conversion_table = data.symbol_conversion_table
language_code: str = lang["languageCode"]
logging.info(f"Generating block for {language_code}")
# Iterate over all of the text to build up the symbols & counts
text_list = get_letter_counts(defs, lang)
# From the letter counts, need to make a symbol translator & write out the font
font_table_text, symbol_conversion_table = get_font_map_and_table(text_list)
font_table_text = make_font_table_cpp(sym_list, font_map, symbol_conversion_table)
try:
lang_name = lang["languageLocalName"]
except KeyError:
lang_name = language_code
if lzfx_strings:
f.write('#include "lzfx.h"\n')
f.write(f"\n// ---- {lang_name} ----\n\n")
f.write(font_table_text)
f.write(f"\n// ---- {lang_name} ----\n\n")
translation_common_text = get_translation_common_text(
defs, symbol_conversion_table, build_version
)
f.write(translation_common_text)
f.write(
f"const bool HasFahrenheit = {('true' if lang.get('tempUnitFahrenheit', True) else 'false')};\n\n"
"extern const uint8_t *const Font_12x16 = USER_FONT_12;\n"
"extern const uint8_t *const Font_6x8 = USER_FONT_6x8;\n\n"
)
if not lzfx_strings:
translation_strings_and_indices_text = get_translation_strings_and_indices_text(
lang, defs, symbol_conversion_table
)
f.write(translation_strings_and_indices_text)
f.write(
"const TranslationIndexTable *const Tr = &TranslationIndices;\n"
"const char *const TranslationStrings = TranslationStringsData;\n\n"
"void prepareTranslations() {}\n\n"
)
else:
write_bytes_as_c_array(f, "translation_data_lzfx", lzfx_strings)
f.write(
"static uint8_t translation_data_out_buffer[4096] __attribute__((__aligned__(2)));\n\n"
"const TranslationIndexTable *const Tr = reinterpret_cast<const TranslationIndexTable *>(translation_data_out_buffer);\n"
"const char *const TranslationStrings = reinterpret_cast<const char *>(translation_data_out_buffer) + sizeof(TranslationIndexTable);\n\n"
"void prepareTranslations() {\n"
" unsigned int outsize = sizeof(translation_data_out_buffer);\n"
" lzfx_decompress(translation_data_lzfx, sizeof(translation_data_lzfx), translation_data_out_buffer, &outsize);\n"
"}\n\n"
)
sanity_checks_text = get_translation_sanity_checks_text(defs)
f.write(sanity_checks_text)
def get_translation_common_text(
defs: dict, symbol_conversion_table: Dict[str, bytes], build_version
) -> str:
translation_common_text = ""
# Write out firmware constant options
constants = get_constants(build_version)
for x in constants:
translation_common_text += f'const char* {x[0]} = "{convert_string(symbol_conversion_table, x[1])}";//{x[1]} \n'
translation_common_text += "\n"
# Debug Menu
translation_common_text += "const char* DebugMenu[] = {\n"
for c in get_debug_menu():
translation_common_text += (
f'\t "{convert_string(symbol_conversion_table, c)}",//{c} \n'
)
translation_common_text += "};\n\n"
return translation_common_text
@dataclass
class TranslationItem:
info: str
str_index: int
def get_translation_strings_and_indices_text(
lang: dict, defs: dict, symbol_conversion_table: Dict[str, bytes]
) -> str:
str_table: List[str] = []
str_group_messages: List[TranslationItem] = []
str_group_messageswarn: List[TranslationItem] = []
@@ -478,21 +590,6 @@ def write_language(lang: dict, defs: dict, f: TextIO) -> None:
str_group_characters.append(TranslationItem(eid, len(str_table)))
str_table.append(obj[eid])
# Write out firmware constant options
constants = get_constants()
for x in constants:
f.write(
f'const char* {x[0]} = "{convert_string(symbol_conversion_table, x[1])}";//{x[1]} \n'
)
f.write("\n")
# Debug Menu
f.write("const char* DebugMenu[] = {\n")
for c in get_debug_menu():
f.write(f'\t "{convert_string(symbol_conversion_table, c)}",//{c} \n')
f.write("};\n\n")
# ----- Reading SettingsDescriptions
obj = lang["menuOptions"]
@@ -537,8 +634,6 @@ def write_language(lang: dict, defs: dict, f: TextIO) -> None:
)
str_table.append(obj[eid]["desc"])
f.write("\n")
@dataclass
class RemappedTranslationItem:
str_index: int
@@ -573,14 +668,13 @@ def write_language(lang: dict, defs: dict, f: TextIO) -> None:
str_offsets = [-1] * len(str_table)
offset = 0
write_null = False
f.write("const char TranslationStringsData[] = {\n")
translation_strings_text = "const char TranslationStringsData[] = {\n"
for i, source_str in enumerate(str_table):
if write_null:
f.write(' "\\0"\n')
write_null = True
if str_remapping[i] is not None:
write_null = False
continue
if write_null:
translation_strings_text += ' "\\0"\n'
write_null = True
# Find what items use this string
str_used_by = [i] + [
j for j, r in enumerate(str_remapping) if r and r.str_index == i
@@ -597,40 +691,35 @@ def write_language(lang: dict, defs: dict, f: TextIO) -> None:
]:
for item in group:
if item.str_index == j:
f.write(f" // - {pre_info} {item.info}\n")
translation_strings_text += (
f" // - {pre_info} {item.info}\n"
)
if j == i:
f.write(f" // {offset: >4}: {escape(source_str)}\n")
translation_strings_text += f" // {offset: >4}: {escape(source_str)}\n"
str_offsets[j] = offset
else:
remapped = str_remapping[j]
assert remapped is not None
f.write(
f" // {offset + remapped.str_start_offset: >4}: {escape(str_table[j])}\n"
)
translation_strings_text += f" // {offset + remapped.str_start_offset: >4}: {escape(str_table[j])}\n"
str_offsets[j] = offset + remapped.str_start_offset
converted_str = convert_string(symbol_conversion_table, source_str)
f.write(f' "{converted_str}"')
converted_bytes = convert_string_bytes(symbol_conversion_table, source_str)
translation_strings_text += f' "{bytes_to_escaped(converted_bytes)}"'
str_offsets[i] = offset
# Sanity check: Each "char" in `converted_str` should be in format
# `\xFF`, so the length should be divisible by 4.
assert len(converted_str) % 4 == 0
# Add the length and the null terminator
offset += len(converted_str) // 4 + 1
f.write("\n};\n\n")
offset += len(converted_bytes) + 1
translation_strings_text += "\n}; // TranslationStringsData\n\n"
def get_offset(idx: int) -> int:
assert str_offsets[idx] >= 0
return str_offsets[idx]
f.write("const TranslationIndexTable TranslationIndices = {\n")
translation_indices_text = "const TranslationIndexTable TranslationIndices = {\n"
# ----- Write the messages string indices:
for group in [str_group_messages, str_group_messageswarn, str_group_characters]:
for item in group:
f.write(
f" .{item.info} = {get_offset(item.str_index)}, // {escape(str_table[item.str_index])}\n"
)
f.write("\n")
translation_indices_text += f" .{item.info} = {get_offset(item.str_index)}, // {escape(str_table[item.str_index])}\n"
translation_indices_text += "\n"
# ----- Write the settings index tables:
for group, name in [
@@ -640,30 +729,25 @@ def write_language(lang: dict, defs: dict, f: TextIO) -> None:
(str_group_settingmenuentriesdesc, "SettingsMenuEntriesDescriptions"),
]:
max_len = 30
f.write(f" .{name} = {{\n")
translation_indices_text += f" .{name} = {{\n"
for item in group:
f.write(
f" /* {item.info.ljust(max_len)[:max_len]} */ {get_offset(item.str_index)}, // {escape(str_table[item.str_index])}\n"
)
f.write(f" }}, // {name}\n\n")
translation_indices_text += f" /* {item.info.ljust(max_len)[:max_len]} */ {get_offset(item.str_index)}, // {escape(str_table[item.str_index])}\n"
translation_indices_text += f" }}, // {name}\n\n"
f.write("}; // TranslationIndices\n\n")
f.write("const TranslationIndexTable *const Tr = &TranslationIndices;\n")
f.write("const char *const TranslationStrings = TranslationStringsData;\n\n")
translation_indices_text += "}; // TranslationIndices\n\n"
f.write(
f"const bool HasFahrenheit = {('true' if lang.get('tempUnitFahrenheit', True) else 'false')};\n"
)
return translation_strings_text + translation_indices_text
f.write("\n// Verify SettingsItemIndex values:\n")
def get_translation_sanity_checks_text(defs: dict) -> str:
sanity_checks_text = "\n// Verify SettingsItemIndex values:\n"
for i, mod in enumerate(defs["menuOptions"]):
eid = mod["id"]
f.write(
sanity_checks_text += (
f"static_assert(static_cast<uint8_t>(SettingsItemIndex::{eid}) == {i});\n"
)
f.write(
f"static_assert(static_cast<uint8_t>(SettingsItemIndex::NUM_ITEMS) == {len(defs['menuOptions'])});\n"
)
sanity_checks_text += f"static_assert(static_cast<uint8_t>(SettingsItemIndex::NUM_ITEMS) == {len(defs['menuOptions'])});\n"
return sanity_checks_text
def read_version() -> str:
@@ -683,6 +767,27 @@ def read_version() -> str:
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument(
"--output-pickled",
help="Write pickled language data for later reuse",
type=argparse.FileType("wb"),
required=False,
dest="output_pickled",
)
parser.add_argument(
"--input-pickled",
help="Use previously generated pickled language data",
type=argparse.FileType("rb"),
required=False,
dest="input_pickled",
)
parser.add_argument(
"--lzfx-strings",
help="Use compressed TranslationIndices + TranslationStrings data",
type=argparse.FileType("rb"),
required=False,
dest="lzfx_strings",
)
parser.add_argument(
"--output", "-o", help="Target file", type=argparse.FileType("w"), required=True
)
@@ -690,23 +795,52 @@ def parse_args() -> argparse.Namespace:
return parser.parse_args()
if __name__ == "__main__":
def main() -> None:
json_dir = HERE
args = parse_args()
try:
buildVersion = read_version()
except FileNotFoundError:
logging.error("error: Could not find version info ")
if args.input_pickled and args.output_pickled:
logging.error("error: Both --output-pickled and --input-pickled are specified")
sys.exit(1)
logging.info(f"Build version: {buildVersion}")
logging.info(f"Making {args.languageCode} from {json_dir}")
language_data: LanguageData
if args.input_pickled:
logging.info(f"Reading pickled language data from {args.input_pickled.name}...")
language_data = pickle.load(args.input_pickled)
if language_data.lang["languageCode"] != args.languageCode:
logging.error(
f"error: languageCode {args.languageCode} does not match language data {language_data.lang['languageCode']}"
)
sys.exit(1)
logging.info(f"Read language data for {language_data.lang['languageCode']}")
logging.info(f"Build version: {language_data.build_version}")
else:
try:
build_version = read_version()
except FileNotFoundError:
logging.error("error: Could not find version info ")
sys.exit(1)
logging.info(f"Build version: {build_version}")
logging.info(f"Making {args.languageCode} from {json_dir}")
lang_ = read_translation(json_dir, args.languageCode)
defs_ = load_json(os.path.join(json_dir, "translations_def.js"), True)
language_data = prepare_language(lang_, defs_, build_version)
lang_ = read_translation(json_dir, args.languageCode)
defs_ = load_json(os.path.join(json_dir, "translations_def.js"), True)
out_ = args.output
write_start(out_)
write_language(lang_, defs_, out_)
if args.lzfx_strings:
write_language(language_data, out_, args.lzfx_strings.read())
else:
write_language(language_data, out_)
if args.output_pickled:
logging.info(f"Writing pickled data to {args.output_pickled.name}")
pickle.dump(language_data, args.output_pickled)
logging.info("Done")
if __name__ == "__main__":
main()