コード例 #1
0
ファイル: test_output_doc.py プロジェクト: buret/pylmflib
 def test_doc_write(self):
     import sys, os
     # Create LMF objects
     lexical_entry = LexicalEntry()
     lexical_entry.lemma = Lemma()
     lexical_entry.partOfSpeech = "toto"
     lexical_entry.status = "draft"
     lexical_entry.lemma.lexeme = "hello"
     lexicon = Lexicon()
     lexicon.add_lexical_entry(lexical_entry)
     lexical_resource = LexicalResource()
     lexical_resource.add_lexicon(lexicon)
     # Write document file and test result
     utest_path = sys.path[0] + '/'
     doc_filename = utest_path + "output.docx"
     doc_write(lexical_resource, doc_filename)
     doc_file = open(doc_filename, "r")
     doc_file.readlines()
     doc_file.close()
     # Customize mapping
     def lmf2doc(lexicon, document, items, sort_order, paradigms, reverse):
         return "test"
     # Write document file and test result
     doc_write(lexical_resource, doc_filename, None, lmf2doc)
     doc_file = open(doc_filename, "r")
     doc_file.readlines()
     doc_file.close()
     del lexical_entry.lemma
     lexical_entry.lemma = None
     del lexical_entry, lexicon
     lexicon = None
     del lexical_resource
     # Remove document file
     os.remove(doc_filename)
コード例 #2
0
 def test_xml_lmf_write(self):
     import sys, os
     # Create LMF objects
     lexical_entry = LexicalEntry()
     lexical_entry.lemma = Lemma()
     lexical_entry.partOfSpeech = "toto"
     lexical_entry.status = "draft"
     lexical_entry.lemma.lexeme = "hello"
     # Write XML LMF file and test result
     utest_path = sys.path[0] + '/'
     xml_lmf_filename = utest_path + "lmf_output.xml"
     xml_lmf_write(lexical_entry, xml_lmf_filename)
     xml_lmf_file = open(xml_lmf_filename, "r")
     expected_lines = ["""<?xml version="1.0" encoding="utf-8"?>""" + EOL,
         """<LexicalEntry id="0">""" + EOL,
         """    <feat att="status" val="draft"/>""" + EOL,
         """    <Lemma>""" + EOL,
         """        <feat att="lexeme" val="hello"/>""" + EOL,
         """    </Lemma>""" + EOL,
         """    <feat att="partOfSpeech" val="toto"/>""" + EOL,
         """</LexicalEntry>""" + EOL]
     self.assertListEqual(expected_lines, xml_lmf_file.readlines())
     xml_lmf_file.close()
     del lexical_entry.lemma
     lexical_entry.lemma = None
     del lexical_entry
     # Remove XML LMF file
     os.remove(xml_lmf_filename)
コード例 #3
0
 def test_build_sub_elements(self):
     # Create LMF objects and an empty XML element
     instance = LexicalEntry()
     instance.lemma = Lemma()
     instance.partOfSpeech = "toto"
     instance.status = "draft"
     instance.lemma.lexeme = "hello"
     element = Element("LexicalEntry")
     # Build sub-elements and test result
     build_sub_elements(instance, element)
     lemma = element.find("Lemma")
     lexeme = lemma.find("feat")
     self.assertEqual(lexeme.attrib["att"], "lexeme")
     self.assertEqual(lexeme.attrib["val"], "hello")
     [status, partOfSpeech] = element.findall("feat")
     self.assertEqual(partOfSpeech.attrib["att"], "partOfSpeech")
     self.assertEqual(partOfSpeech.attrib["val"], "toto")
     self.assertEqual(status.attrib["att"], "status")
     self.assertEqual(status.attrib["val"], "draft")
     del instance.lemma
     instance.lemma = None
     del instance, element
コード例 #4
0
 def test_mdf_write(self):
     import sys, os
     # Create LMF objects
     lexical_entry = LexicalEntry()
     lexical_entry.lemma = Lemma()
     lexical_entry.partOfSpeech = "toto"
     lexical_entry.status = "draft"
     lexical_entry.lemma.lexeme = "hello"
     lexicon = Lexicon()
     lexicon.add_lexical_entry(lexical_entry)
     # Write MDF file and test result
     utest_path = sys.path[0] + '/'
     mdf_filename = utest_path + "output.txt"
     mdf_write(lexicon, mdf_filename)
     mdf_file = open(mdf_filename, "r")
     expected_lines = ["\\lx hello" + EOL, "\\ps toto" + EOL, "\\st draft" + EOL, EOL]
     self.assertListEqual(expected_lines, mdf_file.readlines())
     mdf_file.close()
     # Customize mapping
     lmf2mdf = dict({
         "lx" : lambda lexical_entry: lexical_entry.get_status(),
         "ps" : lambda lexical_entry: lexical_entry.get_partOfSpeech(),
         "st" : lambda lexical_entry: lexical_entry.get_lexeme()
     })
     order = ["st", "lx", "ps"]
     # Write MDF file and test result
     mdf_write(lexicon, mdf_filename, lmf2mdf, order)
     mdf_file = open(mdf_filename, "r")
     expected_lines = ["\\st hello" + EOL, "\\lx draft" + EOL, "\\ps toto" + EOL, EOL]
     self.assertListEqual(expected_lines, mdf_file.readlines())
     mdf_file.close()
     del lexical_entry.lemma
     lexical_entry.lemma = None
     del lexical_entry, lexicon
     # Remove MDF file
     os.remove(mdf_filename)
コード例 #5
0
    def test_odt_write(self):
        import sys, os
        # Create LMF objects
        lexical_entry = LexicalEntry()
        lexical_entry.lemma = Lemma()
        lexical_entry.partOfSpeech = "toto"
        lexical_entry.status = "draft"
        lexical_entry.lemma.lexeme = "hello"
        lexicon = Lexicon()
        lexicon.add_lexical_entry(lexical_entry)
        lexical_resource = LexicalResource()
        lexical_resource.add_lexicon(lexicon)
        # Write document file and test result
        utest_path = sys.path[0] + '/'
        odt_filename = utest_path + "output.odt"
        odt_write(lexical_resource, odt_filename)
        odt_file = open(odt_filename, "r")
        odt_file.readlines()
        odt_file.close()

        # Customize mapping
        def lmf2odt(lexicon, document, items, sort_order, paradigms, reverse):
            return "test"

        # Write document file and test result
        odt_write(lexical_resource, odt_filename, None, lmf2odt)
        odt_file = open(odt_filename, "r")
        odt_file.readlines()
        odt_file.close()
        del lexical_entry.lemma
        lexical_entry.lemma = None
        del lexical_entry, lexicon
        lexicon = None
        del lexical_resource
        # Remove document file
        os.remove(odt_filename)
コード例 #6
0
    def test_tex_write(self):
        import sys, os
        # Create LMF objects
        lexical_entry = LexicalEntry()
        lexical_entry.lemma = Lemma()
        lexical_entry.partOfSpeech = "toto"
        lexical_entry.status = "draft"
        lexical_entry.lemma.lexeme = "hello"
        lexicon = Lexicon()
        lexicon.add_lexical_entry(lexical_entry)
        lexical_resource = LexicalResource()
        lexical_resource.add_lexicon(lexicon)
        # Write LaTeX file and test result
        utest_path = sys.path[0] + '/'
        tex_filename = utest_path + "output.tex"
        tex_write(lexical_resource, tex_filename)
        tex_file = open(tex_filename, "r")
        begin_lines = [
            EOL, "\\begin{document}" + EOL, "\\maketitle" + EOL,
            "\\newpage" + EOL, EOL,
            "\\def\\mytextsc{\\bgroup\\obeyspaces\\mytextscaux}" + EOL,
            "\\def\\mytextscaux#1{\\mytextscauxii #1\\relax\\relax\\egroup}" +
            EOL, "\\def\\mytextscauxii#1{%" + EOL,
            "\\ifx\\relax#1\\else \\ifcat#1\\@sptoken{} \\expandafter\\expandafter\\expandafter\\mytextscauxii\\else"
            + EOL,
            "\\ifnum`#1=\\uccode`#1 {\\normalsize #1}\\else {\\footnotesize \\uppercase{#1}}\\fi \\expandafter\\expandafter\\expandafter\\mytextscauxii\\expandafter\\fi\\fi}"
            + EOL, EOL, "\\setlength\\parindent{0cm}" + EOL, EOL,
            "\\addmediapath{.}" + EOL, "\\addmediapath{./mp3}" + EOL,
            "\\addmediapath{./wav}" + EOL, "\\graphicspath{{" +
            os.path.abspath('.') + "/pylmflib/output/img/}}" + EOL, EOL,
            "\\newpage" + EOL, "\\begin{multicols}{2}" + EOL, EOL
        ]
        end_lines = ["\end{multicols}" + EOL, "\end{document}" + EOL]
        expected_lines = [
            "\\newpage" + EOL,
            "\\section*{\\centering- \\textbf{\ipa{H}} \\textbf{\ipa{h}} -}" +
            EOL,
            #"\\pdfbookmark[1]{\ipa{ H h }}{ H h }" + EOL,
            "\\paragraph{\\hspace{-0.5cm} \\textbf{\ipa{hello}}} \\hypertarget{01}{}"
            + EOL,
            "\markboth{\\textbf{\\ipa{hello}}}{}" + EOL,
            "\\textit{Status:} draft" + EOL,
            "\lhead{\\firstmark}" + EOL,
            "\\rhead{\\botmark}" + EOL,
            EOL
        ]
        self.assertListEqual(begin_lines + expected_lines + end_lines,
                             tex_file.readlines())
        tex_file.close()
        # Customize mapping
        my_lmf_tex = dict({
            "Lemma.lexeme":
            lambda lexical_entry: "is " + lexical_entry.get_lexeme(
            ) + "." + EOL,
            "LexicalEntry.id":
            lambda lexical_entry: "The lexical entry " + str(lexical_entry.
                                                             get_id()) + " ",
            "LexicalEntry.partOfSpeech":
            lambda lexical_entry: "Its grammatical category is " +
            lexical_entry.get_partOfSpeech() + "." + EOL,
            "LexicalEntry.status":
            lambda lexical_entry: "Warning: " + lexical_entry.get_status(
            ) + " version!" + EOL
        })
        my_order = [
            "LexicalEntry.id", "Lemma.lexeme", "LexicalEntry.partOfSpeech",
            "LexicalEntry.status"
        ]

        def lmf2tex(entry, font):
            result = ""
            for attribute in my_order:
                result += my_lmf_tex[attribute](entry)
            return result

        # Write LaTeX file and test result
        tex_write(lexical_resource, tex_filename, None, None, lmf2tex, font)
        tex_file = open(tex_filename, "r")
        expected_lines = [
            "\\newpage" + EOL,
            "\\section*{\\centering- \\textbf{\ipa{H}} \\textbf{\ipa{h}} -}" +
            EOL,
            #"\\pdfbookmark[1]{\ipa{ H h }}{ H h }" + EOL,
            "The lexical entry 01 is hello." + EOL,
            "Its grammatical category is toto." + EOL,
            "Warning: draft version!" + EOL,
            "\lhead{\\firstmark}" + EOL,
            "\\rhead{\\botmark}" + EOL,
            EOL
        ]
        self.assertListEqual(begin_lines + expected_lines + end_lines,
                             tex_file.readlines())
        tex_file.close()
        del lexical_entry.lemma
        lexical_entry.lemma = None
        del lexical_entry, lexicon
        lexicon = None
        del lexical_resource
        # Remove LaTeX file
        os.remove(tex_filename)
コード例 #7
0
ファイル: test_output_tex.py プロジェクト: buret/pylmflib
 def test_tex_write(self):
     import sys, os
     # Create LMF objects
     lexical_entry = LexicalEntry()
     lexical_entry.lemma = Lemma()
     lexical_entry.partOfSpeech = "toto"
     lexical_entry.status = "draft"
     lexical_entry.lemma.lexeme = "hello"
     lexicon = Lexicon()
     lexicon.add_lexical_entry(lexical_entry)
     lexical_resource = LexicalResource()
     lexical_resource.add_lexicon(lexicon)
     # Write LaTeX file and test result
     utest_path = sys.path[0] + '/'
     tex_filename = utest_path + "output.tex"
     tex_write(lexical_resource, tex_filename)
     tex_file = open(tex_filename, "r")
     begin_lines = [EOL,
         "\\begin{document}" + EOL,
         "\\maketitle" + EOL,
         "\\newpage" + EOL,
         EOL,
         "\\def\\mytextsc{\\bgroup\\obeyspaces\\mytextscaux}" + EOL,
         "\\def\\mytextscaux#1{\\mytextscauxii #1\\relax\\relax\\egroup}" + EOL,
         "\\def\\mytextscauxii#1{%" + EOL,
         "\\ifx\\relax#1\\else \\ifcat#1\\@sptoken{} \\expandafter\\expandafter\\expandafter\\mytextscauxii\\else" + EOL,
         "\\ifnum`#1=\\uccode`#1 {\\normalsize #1}\\else {\\footnotesize \\uppercase{#1}}\\fi \\expandafter\\expandafter\\expandafter\\mytextscauxii\\expandafter\\fi\\fi}" + EOL,
         EOL,
         "\\setlength\\parindent{0cm}" + EOL,
         EOL,
         "\\addmediapath{.}" + EOL,
         "\\addmediapath{./mp3}" + EOL,
         "\\addmediapath{./wav}" + EOL,
         "\\graphicspath{{" + os.path.abspath('.') + "/pylmflib/output/img/}}" + EOL,
         EOL,
         "\\newpage" + EOL,
         "\\begin{multicols}{2}" + EOL,
         EOL
     ]
     end_lines = [
         "\end{multicols}" + EOL,
         "\end{document}" + EOL
     ]
     expected_lines = [
         "\\newpage" + EOL,
         "\\section*{\\centering- \\textbf{\ipa{H}} \\textbf{\ipa{h}} -}" + EOL,
         #"\\pdfbookmark[1]{\ipa{ H h }}{ H h }" + EOL,
         "\\paragraph{\\hspace{-0.5cm} \\textbf{\ipa{hello}}} \\hypertarget{01}{}" + EOL,
         "\markboth{\\textbf{\\ipa{hello}}}{}" + EOL,
         "\\textit{Status:} draft" + EOL,
         "\lhead{\\firstmark}" + EOL,
         "\\rhead{\\botmark}" + EOL,
         EOL
     ]
     self.assertListEqual(begin_lines + expected_lines + end_lines, tex_file.readlines())
     tex_file.close()
     # Customize mapping
     my_lmf_tex = dict({
         "Lemma.lexeme" : lambda lexical_entry: "is " + lexical_entry.get_lexeme() + "." + EOL,
         "LexicalEntry.id" : lambda lexical_entry: "The lexical entry " + str(lexical_entry.get_id()) + " ",
         "LexicalEntry.partOfSpeech" : lambda lexical_entry: "Its grammatical category is " + lexical_entry.get_partOfSpeech() + "." + EOL,
         "LexicalEntry.status" : lambda lexical_entry: "Warning: " + lexical_entry.get_status() + " version!" + EOL
     })
     my_order = ["LexicalEntry.id", "Lemma.lexeme", "LexicalEntry.partOfSpeech", "LexicalEntry.status"]
     def lmf2tex(entry, font):
         result = ""
         for attribute in my_order:
             result += my_lmf_tex[attribute](entry)
         return result
     # Write LaTeX file and test result
     tex_write(lexical_resource, tex_filename, None, None, lmf2tex, font)
     tex_file = open(tex_filename, "r")
     expected_lines = [
         "\\newpage" + EOL,
         "\\section*{\\centering- \\textbf{\ipa{H}} \\textbf{\ipa{h}} -}" + EOL,
         #"\\pdfbookmark[1]{\ipa{ H h }}{ H h }" + EOL,
         "The lexical entry 01 is hello." + EOL,
         "Its grammatical category is toto." + EOL,
         "Warning: draft version!" + EOL,
         "\lhead{\\firstmark}" + EOL,
         "\\rhead{\\botmark}" + EOL,
         EOL
         ]
     self.assertListEqual(begin_lines + expected_lines + end_lines, tex_file.readlines())
     tex_file.close()
     del lexical_entry.lemma
     lexical_entry.lemma = None
     del lexical_entry, lexicon
     lexicon = None
     del lexical_resource
     # Remove LaTeX file
     os.remove(tex_filename)