Exemple #1
0
def write_languages(
    data: LanguageData,
    f: TextIO,
    strings_obj_path: Optional[str] = None,
    compress_font: bool = False,
) -> None:
    defs = data.defs
    build_version = data.build_version
    combined_sym_list = data.sym_list
    sym_lists_by_font = data.sym_lists_by_font
    font_map = data.font_map

    symbol_conversion_table = build_symbol_conversion_map(combined_sym_list)

    language_codes: List[str] = [lang["languageCode"] for lang in data.langs]
    logging.info(f"Generating block for {language_codes}")

    lang_names = [
        lang.get("languageLocalName", lang["languageCode"])
        for lang in data.langs
    ]

    f.write('#include "Translation_multi.h"')

    f.write(f"\n// ---- {lang_names} ----\n\n")

    max_decompressed_font_size = 0
    if not compress_font:
        font_table_text = ""
        font_section_info_text = (
            "const FontSectionDataInfo FontSectionDataInfos[] = {\n")
        for font, current_sym_list in sym_lists_by_font.items():
            font_table_text += f"const uint8_t font_table_data_{font}[] = {{\n"
            font_table_text += "// 12x16:\n"
            font_table_text += make_font_table_named_cpp(
                None,
                current_sym_list,
                font_map.font12,
                symbol_conversion_table,
            )
            if font != font_tables.NAME_CJK:
                font_table_text += "// 6x8:\n"
                font_table_text += make_font_table_named_cpp(
                    None,
                    current_sym_list,
                    font_map.font06,  # type: ignore[arg-type]
                    symbol_conversion_table,
                )
            font_table_text += f"}}; // font_table_data_{font}\n"
            current_sym_start = combined_sym_list.index(
                current_sym_list[0]) + 2
            font_section_info_text += (
                "  {\n"
                f"    .symbol_start = {current_sym_start},\n"
                f"    .symbol_count = {len(current_sym_list)},\n"
                f"    .data_size = sizeof(font_table_data_{font}),\n"
                "    .data_is_compressed = false,\n"
                f"    .data_ptr = font_table_data_{font},\n"
                "  },\n")

        f.write(font_table_text)
        font_section_info_text += (
            "};\n"
            "const uint8_t FontSectionDataCount = sizeof(FontSectionDataInfos) / sizeof(FontSectionDataInfos[0]);\n\n"
        )
        f.write(font_section_info_text)
        f.write(
            "FontSection DynamicFontSections[4] = {};\n"
            "const FontSection *const FontSections = DynamicFontSections;\n"
            "const uint8_t FontSectionsCount = sizeof(DynamicFontSections) / sizeof(DynamicFontSections[0]);\n"
        )
    else:
        font_section_info_text = (
            "const FontSectionDataInfo FontSectionDataInfos[] = {\n")
        for font, current_sym_list in sym_lists_by_font.items():
            current_sym_start = combined_sym_list.index(
                current_sym_list[0]) + 2
            font_uncompressed = bytearray()
            for sym in current_sym_list:
                font_uncompressed.extend(font_map.font12[sym])
            if font != font_tables.NAME_CJK:
                for sym in current_sym_list:
                    font_uncompressed.extend(
                        font_map.font06[sym])  # type: ignore[arg-type]
            font_compressed = brieflz.compress(bytes(font_uncompressed))
            logging.info(
                f"Font table for {font} compressed from {len(font_uncompressed)} to {len(font_compressed)} bytes (ratio {len(font_compressed) / len(font_uncompressed):.3})"
            )
            max_decompressed_font_size += len(font_uncompressed)
            write_bytes_as_c_array(f, f"font_data_brieflz_{font}",
                                   font_compressed)
            font_section_info_text += (
                "  {\n"
                f"    .symbol_start = {current_sym_start},\n"
                f"    .symbol_count = {len(current_sym_list)},\n"
                f"    .data_size = sizeof(font_data_brieflz_{font}),\n"
                "    .data_is_compressed = true,\n"
                f"    .data_ptr = font_data_brieflz_{font},\n"
                "  },\n")
        font_section_info_text += (
            "};\n"
            "const uint8_t FontSectionDataCount = sizeof(FontSectionDataInfos) / sizeof(FontSectionDataInfos[0]);\n\n"
        )
        f.write(font_section_info_text)
        f.write(
            "FontSection DynamicFontSections[4] = {};\n"
            "const FontSection *const FontSections = DynamicFontSections;\n"
            "const uint8_t FontSectionsCount = sizeof(DynamicFontSections) / sizeof(DynamicFontSections[0]);\n"
        )

    f.write(f"\n// ---- {lang_names} ----\n\n")

    translation_common_text = get_translation_common_text(
        defs, symbol_conversion_table, build_version)
    f.write(translation_common_text)
    f.write(
        f"const bool HasFahrenheit = {('true' if any([lang.get('tempUnitFahrenheit', True) for lang in data.langs]) else 'false')};\n\n"
    )

    max_decompressed_translation_size = 0
    if not strings_obj_path:
        for lang in data.langs:
            lang_code = lang["languageCode"]
            translation_strings_and_indices_text = (
                get_translation_strings_and_indices_text(
                    lang,
                    defs,
                    symbol_conversion_table,
                    suffix=f"_{lang_code}"))
            f.write(translation_strings_and_indices_text)
        f.write("const LanguageMeta LanguageMetas[] = {\n")
        for lang in data.langs:
            lang_code = lang["languageCode"]
            lang_id = get_language_unqiue_id(lang_code)
            f.write(
                "  {\n"
                f"    .uniqueID = {lang_id},\n"
                f"    .translation_data = reinterpret_cast<const uint8_t *>(&translation_{lang_code}),\n"
                f"    .translation_size = sizeof(translation_{lang_code}),\n"
                f"    .translation_is_compressed = false,\n"
                "  },\n")
        f.write("};\n")
    else:
        for lang in data.langs:
            lang_code = lang["languageCode"]
            sym_name = objcopy.cpp_var_to_section_name(
                f"translation_{lang_code}")
            strings_bin = objcopy.get_binary_from_obj(strings_obj_path,
                                                      sym_name)
            if len(strings_bin) == 0:
                raise ValueError(f"Output for {sym_name} is empty")
            max_decompressed_translation_size = max(
                max_decompressed_translation_size, len(strings_bin))
            compressed = brieflz.compress(strings_bin)
            logging.info(
                f"Strings for {lang_code} compressed from {len(strings_bin)} to {len(compressed)} bytes (ratio {len(compressed) / len(strings_bin):.3})"
            )
            write_bytes_as_c_array(f, f"translation_data_brieflz_{lang_code}",
                                   compressed)
        f.write("const LanguageMeta LanguageMetas[] = {\n")
        for lang in data.langs:
            lang_code = lang["languageCode"]
            lang_id = get_language_unqiue_id(lang_code)
            f.write(
                "  {\n"
                f"    .uniqueID = {lang_id},\n"
                f"    .translation_data = translation_data_brieflz_{lang_code},\n"
                f"    .translation_size = sizeof(translation_data_brieflz_{lang_code}),\n"
                f"    .translation_is_compressed = true,\n"
                "  },\n")
        f.write("};\n")
    f.write(
        "const uint8_t LanguageCount = sizeof(LanguageMetas) / sizeof(LanguageMetas[0]);\n\n"
        f"alignas(TranslationData) uint8_t translation_data_out_buffer[{max_decompressed_translation_size + max_decompressed_font_size}];\n"
        "const uint16_t translation_data_out_buffer_size = sizeof(translation_data_out_buffer);\n\n"
    )

    sanity_checks_text = get_translation_sanity_checks_text(defs)
    f.write(sanity_checks_text)
Exemple #2
0
def main() -> None:
    json_dir = HERE

    args = parse_args()
    if args.input_pickled and args.output_pickled:
        logging.error(
            "error: Both --output-pickled and --input-pickled are specified")
        sys.exit(1)

    language_data: LanguageData
    if args.input_pickled:
        logging.info(
            f"Reading pickled language data from {args.input_pickled.name}...")
        language_data = pickle.load(args.input_pickled)
        language_codes = [lang["languageCode"] for lang in language_data.langs]
        if language_codes != args.languageCodes:
            logging.error(
                f"error: languageCode {args.languageCode} does not match language data {language_codes}"
            )
            sys.exit(1)
        logging.info(f"Read language data for {language_codes}")
        logging.info(f"Build version: {language_data.build_version}")
    else:
        try:
            build_version = read_version()
        except FileNotFoundError:
            logging.error("error: Could not find version info ")
            sys.exit(1)

        logging.info(f"Build version: {build_version}")
        logging.info(f"Making {args.languageCodes} from {json_dir}")

        defs_ = load_json(os.path.join(json_dir, "translations_def.js"), True)
        if len(args.languageCodes) == 1:
            lang_ = read_translation(json_dir, args.languageCodes[0])
            language_data = prepare_language(lang_, defs_, build_version)
        else:
            langs_ = [
                read_translation(json_dir, lang_code)
                for lang_code in args.languageCodes
            ]
            language_data = prepare_languages(langs_, defs_, build_version)

    out_ = args.output
    write_start(out_)
    if len(language_data.langs) == 1:
        if args.strings_obj:
            sym_name = objcopy.cpp_var_to_section_name("translation")
            strings_bin = objcopy.get_binary_from_obj(args.strings_obj.name,
                                                      sym_name)
            if len(strings_bin) == 0:
                raise ValueError(f"Output for {sym_name} is empty")
            write_language(
                language_data,
                out_,
                strings_bin=strings_bin,
                compress_font=args.compress_font,
            )
        else:
            write_language(language_data,
                           out_,
                           compress_font=args.compress_font)
    else:
        if args.strings_obj:
            write_languages(
                language_data,
                out_,
                strings_obj_path=args.strings_obj.name,
                compress_font=args.compress_font,
            )
        else:
            write_languages(language_data,
                            out_,
                            compress_font=args.compress_font)

    if args.output_pickled:
        logging.info(f"Writing pickled data to {args.output_pickled.name}")
        pickle.dump(language_data, args.output_pickled)

    logging.info("Done")