Esempio n. 1
0
def translate_raws(po_filename: Path, path: Path,
                   encoding: str) -> Iterator[str]:
    with open(po_filename, "r", encoding="utf-8") as pofile:
        dictionary = {(item.text, item.context): item.translation
                      for item in load_po(pofile)}

    for file_path in path.glob("*.txt"):
        if file_path.is_file() and not file_path.name.startswith("language_"):
            with backup(file_path) as bak_name:
                yield from translate_single_raw_file(bak_name, file_path,
                                                     dictionary, encoding)
Esempio n. 2
0
def test_save_load_po():
    entries: List[TranslationItem] = [
        TranslationItem("asddf", "qwert"),
        TranslationItem("xcvf", "fghrth"),
        TranslationItem("cvbeb", "jtyjkty"),
    ]
    template = (item.text for item in entries)
    file = StringIO()
    save_po(file, template, entries)
    file.seek(0)

    assert list(load_po(file))[1:] == entries
Esempio n. 3
0
def test_parse_metadata():
    header = strip_margin(
        r"""
        |msgid ""
        |msgstr ""
        |"Content-Type: text/plain; charset=UTF-8\n"
        |"Content-Transfer-Encoding: 8bit\n"
        """
    ).strip()

    metadata = next(load_po(io.StringIO(header)))
    assert parse_metadata(metadata) == {
        "Content-Type": "text/plain; charset=UTF-8",
        "Content-Transfer-Encoding": "8bit",
    }
def translate_plain_text(po_filename: Path,
                         path: Path,
                         encoding: str,
                         join_paragraphs=True):
    with open(po_filename, "r", encoding="utf-8") as po_file:
        dictionary = {
            item.text: item.translation
            for item in load_po(po_file) if item.text
        }

    for path in Path(path).rglob("*.txt"):
        if path.is_file():
            with backup(path) as backup_file:
                yield from translate_plain_text_file(backup_file, path,
                                                     dictionary, encoding,
                                                     join_paragraphs)
Esempio n. 5
0
def translate_compressed(po_filename: Path, path: Path, encoding: str):
    with open(po_filename, "r", encoding="utf-8") as pofile:
        dictionary = {
            item.text: item.translation
            for item in load_po(pofile) if item.text
        }

    for file in path.rglob("*"):
        if file.is_file() and "." not in file.name and file.name != "index":
            # Don't patch index file
            # (it's encoded, it is possible to decode/encode it, but the game crashes if it is changed)
            with backup(file) as backup_file:
                try:
                    yield from translate_compressed_file(
                        backup_file, file, dictionary, encoding)
                except Exception as ex:
                    yield f"Error: {ex}"
Esempio n. 6
0
def test_load_po():
    data = strip_margin(
        """
        |# Some comment
        |#: body_default.txt:7
        |msgctxt "BODY:BASIC_1PARTBODY"
        |msgid "[BP:UB:body:bodies]"
        |msgstr "[BP:UB:тело:тела]"
        """
    )

    expected = TranslationItem(
        context="BODY:BASIC_1PARTBODY",
        text="[BP:UB:body:bodies]",
        translation="[BP:UB:тело:тела]",
        # translator_comment="Some comment\n",
        # source_file="body_default.txt",
        # line_number=7,
    )

    result = next(load_po(StringIO(data)))
    assert result == expected
Esempio n. 7
0
def main(input_file: str, output_file: str, encoding: str = "utf-8"):
    with open(input_file, "r", encoding="utf-8") as pofile:
        dictionary = [(item.text, item.translation) for item in load_po(pofile)
                      if item.text]

    exclusions_leading = {"  Choose Name  ", "  Trade Agreement with "}
    exclusions_trailing = {"  Choose Name  "}

    if encoding == "cp1251":
        exclusions_trailing.add("Histories of ")

    with open(output_file,
              "w",
              newline="",
              encoding=encoding,
              errors="replace") as outfile:
        writer = csv.writer(outfile, dialect="unix")

        for original_string, translation in prepare_dictionary(
                dictionary, exclusions_leading, exclusions_trailing):
            writer.writerow(
                [escape_string(original_string),
                 escape_string(translation)])