mirror of
https://github.com/Ralim/IronOS.git
synced 2025-02-26 07:53:55 +00:00
Give all languages a unique ID
This commit is contained in:
@@ -3,6 +3,7 @@
|
|||||||
import argparse
|
import argparse
|
||||||
import functools
|
import functools
|
||||||
import json
|
import json
|
||||||
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import pickle
|
import pickle
|
||||||
@@ -41,6 +42,16 @@ def load_json(filename: str, skip_first_line: bool) -> dict:
|
|||||||
return json.loads(f.read())
|
return json.loads(f.read())
|
||||||
|
|
||||||
|
|
||||||
|
def get_language_unqiue_id(language_ascii_name: str):
|
||||||
|
"""
|
||||||
|
Given a language code, it will return a unique (enough) uint16_t id code
|
||||||
|
When we have a collision here we can tweak this, but language list should be fairly stable from now on
|
||||||
|
"""
|
||||||
|
return (
|
||||||
|
int(hashlib.sha1(language_ascii_name.encode("utf-8")).hexdigest(), 16) % 0xFFFF
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def read_translation(json_root: Union[str, Path], lang_code: str) -> dict:
|
def read_translation(json_root: Union[str, Path], lang_code: str) -> dict:
|
||||||
filename = f"translation_{lang_code}.json"
|
filename = f"translation_{lang_code}.json"
|
||||||
|
|
||||||
@@ -868,10 +879,12 @@ def write_languages(
|
|||||||
f.write("const LanguageMeta LanguageMetas[] = {\n")
|
f.write("const LanguageMeta LanguageMetas[] = {\n")
|
||||||
for lang in data.langs:
|
for lang in data.langs:
|
||||||
lang_code = lang["languageCode"]
|
lang_code = lang["languageCode"]
|
||||||
|
lang_id = get_language_unqiue_id(lang_code)
|
||||||
f.write(
|
f.write(
|
||||||
" {\n"
|
" {\n"
|
||||||
# NOTE: Cannot specify C99 designator here due to GCC (g++) bug: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=55227
|
# NOTE: Cannot specify C99 designator here due to GCC (g++) bug: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=55227
|
||||||
f' /* .code = */ "{lang_code}",\n'
|
f' /* .code = */ "{lang_code}",\n'
|
||||||
|
f" .uniqueID = {lang_id},\n"
|
||||||
f" .translation_data = reinterpret_cast<const uint8_t *>(&translation_{lang_code}),\n"
|
f" .translation_data = reinterpret_cast<const uint8_t *>(&translation_{lang_code}),\n"
|
||||||
f" .translation_size = sizeof(translation_{lang_code}),\n"
|
f" .translation_size = sizeof(translation_{lang_code}),\n"
|
||||||
f" .translation_is_compressed = false,\n"
|
f" .translation_is_compressed = false,\n"
|
||||||
|
|||||||
19
Translations/make_translation_test.py
Normal file → Executable file
19
Translations/make_translation_test.py
Normal file → Executable file
@@ -1,4 +1,6 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
import json
|
||||||
|
import os
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
|
|
||||||
@@ -30,6 +32,23 @@ class TestMakeTranslation(unittest.TestCase):
|
|||||||
self.assertEqual(bytes_to_c_hex(b"\x00"), "0x00,")
|
self.assertEqual(bytes_to_c_hex(b"\x00"), "0x00,")
|
||||||
self.assertEqual(bytes_to_c_hex(b"\xF1\xAB"), "0xF1, 0xAB,")
|
self.assertEqual(bytes_to_c_hex(b"\xF1\xAB"), "0xF1, 0xAB,")
|
||||||
|
|
||||||
|
def test_no_language_id_collisions(self):
|
||||||
|
"""
|
||||||
|
Asserting that we have no language collisions and that the has works ok
|
||||||
|
"""
|
||||||
|
from make_translation import get_language_unqiue_id
|
||||||
|
|
||||||
|
seen_ids = []
|
||||||
|
for filename in os.listdir("."):
|
||||||
|
if filename.endswith(".json") and filename.startswith("translation_"):
|
||||||
|
with open(filename) as f:
|
||||||
|
data = json.loads(f.read())
|
||||||
|
lang_code = data.get("languageCode")
|
||||||
|
self.assertNotEqual(lang_code, None)
|
||||||
|
id = get_language_unqiue_id(lang_code)
|
||||||
|
self.assertFalse(id in seen_ids)
|
||||||
|
seen_ids.append(id)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|||||||
Reference in New Issue
Block a user