예제 #1
0
 def test_doc_write(self):
     import sys, os
     # Create LMF objects
     lexical_entry = LexicalEntry()
     lexical_entry.lemma = Lemma()
     lexical_entry.partOfSpeech = "toto"
     lexical_entry.status = "draft"
     lexical_entry.lemma.lexeme = "hello"
     lexicon = Lexicon()
     lexicon.add_lexical_entry(lexical_entry)
     lexical_resource = LexicalResource()
     lexical_resource.add_lexicon(lexicon)
     # Write document file and test result
     utest_path = sys.path[0] + '/'
     doc_filename = utest_path + "output.docx"
     doc_write(lexical_resource, doc_filename)
     doc_file = open(doc_filename, "r")
     doc_file.readlines()
     doc_file.close()
     # Customize mapping
     def lmf2doc(lexicon, document, items, sort_order, paradigms, reverse):
         return "test"
     # Write document file and test result
     doc_write(lexical_resource, doc_filename, None, lmf2doc)
     doc_file = open(doc_filename, "r")
     doc_file.readlines()
     doc_file.close()
     del lexical_entry.lemma
     lexical_entry.lemma = None
     del lexical_entry, lexicon
     lexicon = None
     del lexical_resource
     # Remove document file
     os.remove(doc_filename)
예제 #2
0
 def test_mdf_write(self):
     import sys, os
     # Create LMF objects
     lexical_entry = LexicalEntry()
     lexical_entry.lemma = Lemma()
     lexical_entry.partOfSpeech = "toto"
     lexical_entry.status = "draft"
     lexical_entry.lemma.lexeme = "hello"
     lexicon = Lexicon()
     lexicon.add_lexical_entry(lexical_entry)
     # Write MDF file and test result
     utest_path = sys.path[0] + '/'
     mdf_filename = utest_path + "output.txt"
     mdf_write(lexicon, mdf_filename)
     mdf_file = open(mdf_filename, "r")
     expected_lines = ["\\lx hello" + EOL, "\\ps toto" + EOL, "\\st draft" + EOL, EOL]
     self.assertListEqual(expected_lines, mdf_file.readlines())
     mdf_file.close()
     # Customize mapping
     lmf2mdf = dict({
         "lx" : lambda lexical_entry: lexical_entry.get_status(),
         "ps" : lambda lexical_entry: lexical_entry.get_partOfSpeech(),
         "st" : lambda lexical_entry: lexical_entry.get_lexeme()
     })
     order = ["st", "lx", "ps"]
     # Write MDF file and test result
     mdf_write(lexicon, mdf_filename, lmf2mdf, order)
     mdf_file = open(mdf_filename, "r")
     expected_lines = ["\\st hello" + EOL, "\\lx draft" + EOL, "\\ps toto" + EOL, EOL]
     self.assertListEqual(expected_lines, mdf_file.readlines())
     mdf_file.close()
     del lexical_entry.lemma
     lexical_entry.lemma = None
     del lexical_entry, lexicon
     # Remove MDF file
     os.remove(mdf_filename)
예제 #3
0
    def test_odt_write(self):
        import sys, os
        # Create LMF objects
        lexical_entry = LexicalEntry()
        lexical_entry.lemma = Lemma()
        lexical_entry.partOfSpeech = "toto"
        lexical_entry.status = "draft"
        lexical_entry.lemma.lexeme = "hello"
        lexicon = Lexicon()
        lexicon.add_lexical_entry(lexical_entry)
        lexical_resource = LexicalResource()
        lexical_resource.add_lexicon(lexicon)
        # Write document file and test result
        utest_path = sys.path[0] + '/'
        odt_filename = utest_path + "output.odt"
        odt_write(lexical_resource, odt_filename)
        odt_file = open(odt_filename, "r")
        odt_file.readlines()
        odt_file.close()

        # Customize mapping
        def lmf2odt(lexicon, document, items, sort_order, paradigms, reverse):
            return "test"

        # Write document file and test result
        odt_write(lexical_resource, odt_filename, None, lmf2odt)
        odt_file = open(odt_filename, "r")
        odt_file.readlines()
        odt_file.close()
        del lexical_entry.lemma
        lexical_entry.lemma = None
        del lexical_entry, lexicon
        lexicon = None
        del lexical_resource
        # Remove document file
        os.remove(odt_filename)
예제 #4
0
    def test_tex_write(self):
        import sys, os
        # Create LMF objects
        lexical_entry = LexicalEntry()
        lexical_entry.lemma = Lemma()
        lexical_entry.partOfSpeech = "toto"
        lexical_entry.status = "draft"
        lexical_entry.lemma.lexeme = "hello"
        lexicon = Lexicon()
        lexicon.add_lexical_entry(lexical_entry)
        lexical_resource = LexicalResource()
        lexical_resource.add_lexicon(lexicon)
        # Write LaTeX file and test result
        utest_path = sys.path[0] + '/'
        tex_filename = utest_path + "output.tex"
        tex_write(lexical_resource, tex_filename)
        tex_file = open(tex_filename, "r")
        begin_lines = [
            EOL, "\\begin{document}" + EOL, "\\maketitle" + EOL,
            "\\newpage" + EOL, EOL,
            "\\def\\mytextsc{\\bgroup\\obeyspaces\\mytextscaux}" + EOL,
            "\\def\\mytextscaux#1{\\mytextscauxii #1\\relax\\relax\\egroup}" +
            EOL, "\\def\\mytextscauxii#1{%" + EOL,
            "\\ifx\\relax#1\\else \\ifcat#1\\@sptoken{} \\expandafter\\expandafter\\expandafter\\mytextscauxii\\else"
            + EOL,
            "\\ifnum`#1=\\uccode`#1 {\\normalsize #1}\\else {\\footnotesize \\uppercase{#1}}\\fi \\expandafter\\expandafter\\expandafter\\mytextscauxii\\expandafter\\fi\\fi}"
            + EOL, EOL, "\\setlength\\parindent{0cm}" + EOL, EOL,
            "\\addmediapath{.}" + EOL, "\\addmediapath{./mp3}" + EOL,
            "\\addmediapath{./wav}" + EOL, "\\graphicspath{{" +
            os.path.abspath('.') + "/pylmflib/output/img/}}" + EOL, EOL,
            "\\newpage" + EOL, "\\begin{multicols}{2}" + EOL, EOL
        ]
        end_lines = ["\end{multicols}" + EOL, "\end{document}" + EOL]
        expected_lines = [
            "\\newpage" + EOL,
            "\\section*{\\centering- \\textbf{\ipa{H}} \\textbf{\ipa{h}} -}" +
            EOL,
            #"\\pdfbookmark[1]{\ipa{ H h }}{ H h }" + EOL,
            "\\paragraph{\\hspace{-0.5cm} \\textbf{\ipa{hello}}} \\hypertarget{01}{}"
            + EOL,
            "\markboth{\\textbf{\\ipa{hello}}}{}" + EOL,
            "\\textit{Status:} draft" + EOL,
            "\lhead{\\firstmark}" + EOL,
            "\\rhead{\\botmark}" + EOL,
            EOL
        ]
        self.assertListEqual(begin_lines + expected_lines + end_lines,
                             tex_file.readlines())
        tex_file.close()
        # Customize mapping
        my_lmf_tex = dict({
            "Lemma.lexeme":
            lambda lexical_entry: "is " + lexical_entry.get_lexeme(
            ) + "." + EOL,
            "LexicalEntry.id":
            lambda lexical_entry: "The lexical entry " + str(lexical_entry.
                                                             get_id()) + " ",
            "LexicalEntry.partOfSpeech":
            lambda lexical_entry: "Its grammatical category is " +
            lexical_entry.get_partOfSpeech() + "." + EOL,
            "LexicalEntry.status":
            lambda lexical_entry: "Warning: " + lexical_entry.get_status(
            ) + " version!" + EOL
        })
        my_order = [
            "LexicalEntry.id", "Lemma.lexeme", "LexicalEntry.partOfSpeech",
            "LexicalEntry.status"
        ]

        def lmf2tex(entry, font):
            result = ""
            for attribute in my_order:
                result += my_lmf_tex[attribute](entry)
            return result

        # Write LaTeX file and test result
        tex_write(lexical_resource, tex_filename, None, None, lmf2tex, font)
        tex_file = open(tex_filename, "r")
        expected_lines = [
            "\\newpage" + EOL,
            "\\section*{\\centering- \\textbf{\ipa{H}} \\textbf{\ipa{h}} -}" +
            EOL,
            #"\\pdfbookmark[1]{\ipa{ H h }}{ H h }" + EOL,
            "The lexical entry 01 is hello." + EOL,
            "Its grammatical category is toto." + EOL,
            "Warning: draft version!" + EOL,
            "\lhead{\\firstmark}" + EOL,
            "\\rhead{\\botmark}" + EOL,
            EOL
        ]
        self.assertListEqual(begin_lines + expected_lines + end_lines,
                             tex_file.readlines())
        tex_file.close()
        del lexical_entry.lemma
        lexical_entry.lemma = None
        del lexical_entry, lexicon
        lexicon = None
        del lexical_resource
        # Remove LaTeX file
        os.remove(tex_filename)
예제 #5
0
파일: mdf.py 프로젝트: yuhsianglin/HimalCo
def mdf_read(filename=None,
             mdf2lmf=mdf_lmf,
             lexicon=None,
             id=None,
             encoding=ENCODING):
    """! @brief Read an MDF file.
    @param filename The name of the MDF file to read with full path, for instance 'user/input.txt'.
    @param mdf2lmf A Python dictionary describing the mapping between MDF markers and LMF representation. Default value is 'mdf_lmf' dictionary defined in 'pylmflib/config/mdf.py'. Please refer to it as an example.
    @param lexicon An existing Lexicon to fill with lexical entries to read.
    @param id A Python string identifying the lexicon to create.
    @param encoding Use 'utf-8' encoding by default. Otherwise, user has to precise the native encoding of its document.
    @return A Lexicon instance containing all lexical entries.
    """
    import re
    # If not provided, create a Lexicon instance to contain all lexical entries
    if lexicon is None:
        lexicon = Lexicon(id)
    # Read in unicode
    if filename is None:
        filename = lexicon.get_entrySource()
    else:
        # Set lexicon attribute
        lexicon.set_entrySource(filename)
    # Read in unicode
    mdf_file = open_read(filename, encoding=encoding)
    # MDF syntax is the following: '\marker value'
    mdf_pattern = """^\\\(\w*) (<(.*)>)? ?(.*)$"""
    # Add each lexical entry to the lexicon
    current_entry = None
    sub_entry = None
    component = None
    main_entry = None
    for line in mdf_file.readlines():
        # Do not parse empty lines
        if line != EOL:
            result = re.match(mdf_pattern, line)
            if result is None:
                # Line is empty => continue parsing next line
                continue
            marker = result.group(1)
            attrs = result.group(3)
            value = result.group(4)
            # Do not consider markers starting with an underscore character (e.g. '_sh' and '_DateStampHasFourDigitYear')
            if marker[0] == '_':
                continue
            # Remove trailing spaces and end-of-line characters
            value = value.rstrip(' \r\n')
            # Do not consider empty fields
            if value == "":
                continue
            # Check if the current entry is a multiword expression
            is_mwe = False
            if marker == "lf":
                lf = value.split(" = ")
                if lf[0].startswith("Component"):
                    component_nb = lf[0].lstrip("Component")
                    value = lf[1]
                    is_mwe = True
            # 'lx' and 'se' markers indicate a new entry
            if marker == "lx" or marker == "se" or is_mwe:
                # Compute a unique identifier
                uid = uni2sampa(value)
                if marker == "se":
                    # Create a subentry
                    sub_entry = LexicalEntry(uid)
                    # An MDF subentry corresponds to an LMF lexical entry
                    mdf2lmf["lx"](value, sub_entry)
                    # Add it to the lexicon
                    lexicon.add_lexical_entry(sub_entry)
                    # Manage main entry
                    if main_entry is None:
                        main_entry = current_entry
                    else:
                        current_entry = main_entry
                    # Set main entry
                    homonym_nb = current_entry.get_homonymNumber()
                    if homonym_nb is None:
                        homonym_nb = ""
                    sub_entry.create_and_add_related_form(
                        current_entry.get_lexeme() + homonym_nb, "main entry")
                elif is_mwe:
                    # Create a subentry
                    component = LexicalEntry(uid)
                    # An MDF subentry corresponds to an LMF lexical entry
                    mdf2lmf["lx"](value, component)
                    # Add it to the lexicon
                    lexicon.add_lexical_entry(component)
                    # Manage current entry
                    if sub_entry is not None:
                        current_entry = sub_entry
                    # Set component
                    homonym_nb = current_entry.get_homonymNumber()
                    if homonym_nb is None:
                        homonym_nb = ""
                    current_entry.create_and_add_component(component_nb, value)
                    component.create_and_add_related_form(
                        current_entry.get_lexeme() + homonym_nb,
                        "complex predicate")
                    component.set_independentWord(False)
                else:
                    # Create a new entry
                    current_entry = LexicalEntry(uid)
                    # Add it to the lexicon
                    lexicon.add_lexical_entry(current_entry)
                    # Reset main entry
                    main_entry = None
            # Map MDF marker and value to LMF representation
            try:
                if attrs is not None:
                    # There are attributes
                    attributes = {}
                    # Remove quotation marks from attributes if any
                    attrs = attrs.replace('"', '')
                    for attr in attrs.split(' '):
                        attributes.update(
                            {attr.split('=')[0]: attr.split('=')[1]})
                    # A customized marker starts with '__' characters
                    mdf2lmf["__" + marker](attributes, value, current_entry)
                else:
                    mdf2lmf[marker](value, current_entry)
                if sub_entry is not None:
                    current_entry = sub_entry
                    sub_entry = None
                if component is not None:
                    sub_entry = current_entry
                    current_entry = component
                    component = None
            except KeyError:
                # When printing, we need to convert 'unicode' into 'str' using 'utf-8' encoding:
                print Warning(
                    "MDF marker '%s' encountered for lexeme '%s' is not defined in configuration"
                    % (marker.encode(ENCODING),
                       current_entry.get_lexeme().encode(ENCODING)))
            except Error as exception:
                exception.handle()
    mdf_file.close()
    return lexicon
예제 #6
0
 def test_tex_write(self):
     import sys, os
     # Create LMF objects
     lexical_entry = LexicalEntry()
     lexical_entry.lemma = Lemma()
     lexical_entry.partOfSpeech = "toto"
     lexical_entry.status = "draft"
     lexical_entry.lemma.lexeme = "hello"
     lexicon = Lexicon()
     lexicon.add_lexical_entry(lexical_entry)
     lexical_resource = LexicalResource()
     lexical_resource.add_lexicon(lexicon)
     # Write LaTeX file and test result
     utest_path = sys.path[0] + '/'
     tex_filename = utest_path + "output.tex"
     tex_write(lexical_resource, tex_filename)
     tex_file = open(tex_filename, "r")
     begin_lines = [EOL,
         "\\begin{document}" + EOL,
         "\\maketitle" + EOL,
         "\\newpage" + EOL,
         EOL,
         "\\def\\mytextsc{\\bgroup\\obeyspaces\\mytextscaux}" + EOL,
         "\\def\\mytextscaux#1{\\mytextscauxii #1\\relax\\relax\\egroup}" + EOL,
         "\\def\\mytextscauxii#1{%" + EOL,
         "\\ifx\\relax#1\\else \\ifcat#1\\@sptoken{} \\expandafter\\expandafter\\expandafter\\mytextscauxii\\else" + EOL,
         "\\ifnum`#1=\\uccode`#1 {\\normalsize #1}\\else {\\footnotesize \\uppercase{#1}}\\fi \\expandafter\\expandafter\\expandafter\\mytextscauxii\\expandafter\\fi\\fi}" + EOL,
         EOL,
         "\\setlength\\parindent{0cm}" + EOL,
         EOL,
         "\\addmediapath{.}" + EOL,
         "\\addmediapath{./mp3}" + EOL,
         "\\addmediapath{./wav}" + EOL,
         "\\graphicspath{{" + os.path.abspath('.') + "/pylmflib/output/img/}}" + EOL,
         EOL,
         "\\newpage" + EOL,
         "\\begin{multicols}{2}" + EOL,
         EOL
     ]
     end_lines = [
         "\end{multicols}" + EOL,
         "\end{document}" + EOL
     ]
     expected_lines = [
         "\\newpage" + EOL,
         "\\section*{\\centering- \\textbf{\ipa{H}} \\textbf{\ipa{h}} -}" + EOL,
         #"\\pdfbookmark[1]{\ipa{ H h }}{ H h }" + EOL,
         "\\paragraph{\\hspace{-0.5cm} \\textbf{\ipa{hello}}} \\hypertarget{01}{}" + EOL,
         "\markboth{\\textbf{\\ipa{hello}}}{}" + EOL,
         "\\textit{Status:} draft" + EOL,
         "\lhead{\\firstmark}" + EOL,
         "\\rhead{\\botmark}" + EOL,
         EOL
     ]
     self.assertListEqual(begin_lines + expected_lines + end_lines, tex_file.readlines())
     tex_file.close()
     # Customize mapping
     my_lmf_tex = dict({
         "Lemma.lexeme" : lambda lexical_entry: "is " + lexical_entry.get_lexeme() + "." + EOL,
         "LexicalEntry.id" : lambda lexical_entry: "The lexical entry " + str(lexical_entry.get_id()) + " ",
         "LexicalEntry.partOfSpeech" : lambda lexical_entry: "Its grammatical category is " + lexical_entry.get_partOfSpeech() + "." + EOL,
         "LexicalEntry.status" : lambda lexical_entry: "Warning: " + lexical_entry.get_status() + " version!" + EOL
     })
     my_order = ["LexicalEntry.id", "Lemma.lexeme", "LexicalEntry.partOfSpeech", "LexicalEntry.status"]
     def lmf2tex(entry, font):
         result = ""
         for attribute in my_order:
             result += my_lmf_tex[attribute](entry)
         return result
     # Write LaTeX file and test result
     tex_write(lexical_resource, tex_filename, None, None, lmf2tex, font)
     tex_file = open(tex_filename, "r")
     expected_lines = [
         "\\newpage" + EOL,
         "\\section*{\\centering- \\textbf{\ipa{H}} \\textbf{\ipa{h}} -}" + EOL,
         #"\\pdfbookmark[1]{\ipa{ H h }}{ H h }" + EOL,
         "The lexical entry 01 is hello." + EOL,
         "Its grammatical category is toto." + EOL,
         "Warning: draft version!" + EOL,
         "\lhead{\\firstmark}" + EOL,
         "\\rhead{\\botmark}" + EOL,
         EOL
         ]
     self.assertListEqual(begin_lines + expected_lines + end_lines, tex_file.readlines())
     tex_file.close()
     del lexical_entry.lemma
     lexical_entry.lemma = None
     del lexical_entry, lexicon
     lexicon = None
     del lexical_resource
     # Remove LaTeX file
     os.remove(tex_filename)
예제 #7
0
class TestLexiconFunctions(unittest.TestCase):
    def setUp(self):
        # Instantiate a Lexicon object
        self.lexicon = Lexicon()

    def tearDown(self):
        # Release instantiated objects
        del self.lexicon

    def test_init(self):
        self.assertIsNone(self.lexicon.language)
        self.assertIsNone(self.lexicon.languageScript)
        self.assertIsNone(self.lexicon.id)
        self.assertIsNone(self.lexicon.label)
        self.assertIsNone(self.lexicon.lexiconType)
        self.assertIsNone(self.lexicon.entrySource)
        self.assertIsNone(self.lexicon.vowelHarmony)
        self.assertListEqual(self.lexicon.lexical_entry, [])
        self.assertIsNone(self.lexicon.localPath)

    def test_set_id(self):
        id = "English lexicon"
        self.assertEqual(self.lexicon.set_id(id), self.lexicon)
        self.assertEqual(self.lexicon.id, id)

    def test_get_id(self):
        self.assertIs(self.lexicon.get_id(), self.lexicon.id)

    def test_set_language(self):
        language = "eng"
        self.assertEqual(self.lexicon.set_language(language), self.lexicon)
        self.assertEqual(self.lexicon.language, language)

    def test_get_language(self):
        self.assertIs(self.lexicon.get_language(), self.lexicon.language)

    def test_set_languageScript(self):
        script = "latn"
        self.assertEqual(self.lexicon.set_languageScript(script), self.lexicon)
        self.assertEqual(self.lexicon.languageScript, script)

    def test_get_languageScript(self):
        self.assertIs(self.lexicon.get_languageScript(),
                      self.lexicon.languageScript)

    def test_set_label(self):
        label = "online dictionary"
        self.assertEqual(self.lexicon.set_label(label), self.lexicon)
        self.assertEqual(self.lexicon.label, label)

    def test_get_label(self):
        self.assertIs(self.lexicon.get_label(), self.lexicon.label)

    def test_set_lexiconType(self):
        type = "bilingual dictionary"
        self.assertEqual(self.lexicon.set_lexiconType(type), self.lexicon)
        self.assertEqual(self.lexicon.lexiconType, type)

    def test_get_lexiconType(self):
        self.assertIs(self.lexicon.get_lexiconType(), self.lexicon.lexiconType)

    def test_set_entrySource(self):
        source = "test.txt"
        self.assertEqual(self.lexicon.set_entrySource(source), self.lexicon)
        self.assertEqual(self.lexicon.entrySource, source)

    def test_get_entrySource(self):
        self.assertIs(self.lexicon.get_entrySource(), self.lexicon.entrySource)

    def test_set_vowelHarmony(self):
        test = False
        try:
            self.lexicon.set_vowelHarmony(None)
        except NotImplementedError:
            test = True
        self.assertTrue(test)

    def test_get_vowelHarmony(self):
        test = False
        try:
            self.lexicon.get_vowelHarmony()
        except NotImplementedError:
            test = True
        self.assertTrue(test)

    def test_set_localPath(self):
        path = "/full/local/path/to/audio/files/"
        self.assertEqual(self.lexicon.set_localPath(path), self.lexicon)
        self.assertEqual(self.lexicon.localPath, path)

    def test_get_localPath(self):
        self.assertIs(self.lexicon.get_localPath(), self.lexicon.localPath)

    def test_get_lexical_entries(self):
        # Create lexical entries
        entry1 = LexicalEntry()
        entry2 = LexicalEntry()
        # Add entries to the lexicon
        self.lexicon.lexical_entry = [entry1, entry2]
        # Test get lexical entries
        self.assertListEqual(self.lexicon.get_lexical_entries(),
                             [entry1, entry2])
        self.lexicon.lexical_entry.append(entry1)
        self.assertListEqual(self.lexicon.get_lexical_entries(),
                             [entry1, entry2, entry1])
        # Release LexicalEntry instances
        del self.lexicon.lexical_entry[:]
        del entry1, entry2

    def test_add_lexical_entry(self):
        # Create lexical entries
        entry1 = LexicalEntry()
        entry2 = LexicalEntry()
        # Test add entries to the lexicon
        self.assertEqual(self.lexicon.add_lexical_entry(entry1), self.lexicon)
        self.assertListEqual(self.lexicon.lexical_entry, [entry1])
        self.assertEqual(self.lexicon.add_lexical_entry(entry2), self.lexicon)
        self.assertListEqual(self.lexicon.lexical_entry, [entry1, entry2])
        # Release LexicalEntry instances
        del self.lexicon.lexical_entry[:]
        del entry1, entry2

    def test_remove_lexical_entry(self):
        # Create lexical entries
        entry1 = LexicalEntry()
        entry2 = LexicalEntry()
        # Add entries to the lexicon
        self.lexicon.lexical_entry = [entry1, entry2]
        # Test remove lexical entries
        self.assertEqual(self.lexicon.remove_lexical_entry(entry1),
                         self.lexicon)
        self.assertListEqual(self.lexicon.lexical_entry, [entry2])
        self.assertEqual(self.lexicon.remove_lexical_entry(entry2),
                         self.lexicon)
        self.assertListEqual(self.lexicon.lexical_entry, [])
        # Release LexicalEntry instances
        del entry1, entry2

    def test_count_lexical_entries(self):
        # Create lexical entries
        entry1 = LexicalEntry()
        entry2 = LexicalEntry()
        # Add entries to the lexicon
        self.lexicon.lexical_entry = [entry1]
        # Test count lexical entries
        self.assertEqual(self.lexicon.count_lexical_entries(), 1)
        self.lexicon.lexical_entry.append(entry2)
        self.assertEqual(self.lexicon.count_lexical_entries(), 2)
        self.lexicon.lexical_entry.append(entry1)
        self.assertEqual(self.lexicon.count_lexical_entries(), 3)
        # Release LexicalEntry instances
        del self.lexicon.lexical_entry[:]
        del entry1, entry2

    def test_sort_homonym_numbers(self):
        # Create several lexical entries
        entry1 = LexicalEntry().set_lexeme("aa").set_homonymNumber("2")
        entry2 = LexicalEntry().set_lexeme("aa").set_homonymNumber("1")
        entry3 = LexicalEntry().set_lexeme("ab")
        entry4 = LexicalEntry().set_lexeme("ba")
        entry5 = LexicalEntry().set_lexeme("bb").set_homonymNumber("6")
        entry6 = LexicalEntry().set_lexeme("bb").set_homonymNumber("5")
        # Add entries to the lexicon
        self.lexicon.lexical_entry = [
            entry1, entry2, entry3, entry4, entry5, entry6
        ]
        # Test sort homonym numbers
        self.assertListEqual(self.lexicon.sort_homonym_numbers(),
                             [entry2, entry1, entry3, entry4, entry6, entry5])
        self.assertListEqual(self.lexicon.lexical_entry,
                             [entry2, entry1, entry3, entry4, entry6, entry5])
        # Release LexicalEntry instances
        del self.lexicon.lexical_entry[:]
        del entry1, entry2, entry3, entry4, entry5, entry6

    def test_sort_lexical_entries(self):
        # Create several lexical entries with different lexemes
        entry1 = LexicalEntry().set_lexeme("aa")
        entry2 = LexicalEntry().set_lexeme("ab")
        entry3 = LexicalEntry().set_lexeme("ba")
        entry4 = LexicalEntry().set_lexeme("bb")
        # Add entries to the lexicon
        self.lexicon.lexical_entry = [entry4, entry1, entry2, entry3]
        # Test sort lexical entries
        self.assertListEqual(self.lexicon.sort_lexical_entries(),
                             [entry1, entry2, entry3, entry4])
        self.assertListEqual(self.lexicon.lexical_entry,
                             [entry1, entry2, entry3, entry4])
        # Provide a sort order
        my_order = dict({'A': 1.1, 'a': 1.2, 'B': 2.1, 'b': 2.2})
        my_unicode_order = ({})
        for key in my_order.keys():
            my_unicode_order.update(
                {key.decode(encoding='utf8'): my_order[key]})
        entry5 = LexicalEntry().set_lexeme("Aa")
        entry6 = LexicalEntry().set_lexeme("bB")
        self.lexicon.lexical_entry.append(entry5)
        self.lexicon.lexical_entry.append(entry6)
        self.assertListEqual(
            self.lexicon.sort_lexical_entries(sort_order=my_order),
            [entry5, entry1, entry2, entry3, entry6, entry4])
        self.assertListEqual(self.lexicon.lexical_entry,
                             [entry5, entry1, entry2, entry3, entry6, entry4])
        # Release LexicalEntry instances
        del self.lexicon.lexical_entry[:]
        del entry1, entry2, entry3, entry4, entry5, entry6

    def test_find_lexical_entries(self):
        # Create several lexical entries with different lexemes
        entry1 = LexicalEntry().set_lexeme("Hello")
        entry2 = LexicalEntry().set_lexeme("world!")
        entry3 = LexicalEntry().set_lexeme("hello")
        entry4 = LexicalEntry().set_lexeme("world")
        # Add entries to the lexicon
        self.lexicon.lexical_entry = [entry1, entry2, entry3, entry4]
        # Test find lexical entries
        self.assertListEqual(
            self.lexicon.find_lexical_entries(
                lambda entry: entry.get_lexeme() == "Hello"), [entry1])

        def test_filter(entry):
            return entry.get_lexeme().lower() == "hello"

        # List is randomly ordered => create a set to avoid random results
        self.assertEqual(set(self.lexicon.find_lexical_entries(test_filter)),
                         set([entry1, entry3]))
        # Release LexicalEntry instances
        del self.lexicon.lexical_entry[:]
        del entry1, entry2, entry3, entry4

    def test_check_cross_references(self):
        # Create lexical entries with lexemes and related lexemes
        entry1 = LexicalEntry().set_lexeme(
            "Hello").create_and_add_related_form("world!", "main entry")
        entry2 = LexicalEntry().set_lexeme(
            "world!").create_and_add_related_form("Hello", "subentry")
        # Add entries to the lexicon
        self.lexicon.lexical_entry = [entry1, entry2]
        # Test check cross references
        self.assertIs(self.lexicon.check_cross_references(), self.lexicon)
        self.assertIs(entry1.related_form[0].get_lexical_entry(), entry2)
        self.assertIs(entry2.related_form[0].get_lexical_entry(), entry1)
        # Test warning case: entry not found
        entry3 = LexicalEntry().set_lexeme(
            "hello").create_and_add_related_form("world", "main entry")
        self.lexicon.lexical_entry.append(entry3)
        self.lexicon.reset_check()
        self.lexicon.check_cross_references()
        # Retrieve nominal case
        entry4 = LexicalEntry().set_lexeme("world")
        self.lexicon.lexical_entry.append(entry4)
        self.lexicon.reset_check()
        self.assertIs(self.lexicon.check_cross_references(), self.lexicon)
        self.assertIs(entry3.related_form[0].get_lexical_entry(), entry4)
        # Test warning case: several entries found
        entry5 = LexicalEntry().set_lexeme("world")
        self.lexicon.lexical_entry.append(entry5)
        self.lexicon.reset_check()
        self.lexicon.check_cross_references()
        # Test check cross references with homonym number
        entry3.related_form[0].set_lexical_entry(None)
        entry3.related_form[0].targets = "world2"
        entry4.homonymNumber = "1"
        entry5.homonymNumber = "2"
        self.lexicon.reset_check()
        self.assertIs(self.lexicon.check_cross_references(), self.lexicon)
        self.assertIs(entry3.related_form[0].get_lexical_entry(), entry5)
        # Release LexicalEntry instances
        del self.lexicon.lexical_entry[:]
        del entry1, entry2, entry3, entry4, entry5

    def test_convert_to_latex(self):
        pass
예제 #8
0
파일: mdf.py 프로젝트: buret/pylmflib
def mdf_read(filename=None, mdf2lmf=mdf_lmf, lexicon=None, id=None, encoding=ENCODING):
    """! @brief Read an MDF file.
    @param filename The name of the MDF file to read with full path, for instance 'user/input.txt'.
    @param mdf2lmf A Python dictionary describing the mapping between MDF markers and LMF representation. Default value is 'mdf_lmf' dictionary defined in 'pylmflib/config/mdf.py'. Please refer to it as an example.
    @param lexicon An existing Lexicon to fill with lexical entries to read.
    @param id A Python string identifying the lexicon to create.
    @param encoding Use 'utf-8' encoding by default. Otherwise, user has to precise the native encoding of its document.
    @return A Lexicon instance containing all lexical entries.
    """
    import re
    # If not provided, create a Lexicon instance to contain all lexical entries
    if lexicon is None:
        lexicon = Lexicon(id)
    # Read in unicode
    if filename is None:
        filename = lexicon.get_entrySource()
    else:
        # Set lexicon attribute
        lexicon.set_entrySource(filename)
    # Read in unicode
    mdf_file = open_read(filename, encoding=encoding)
    # MDF syntax is the following: '\marker value'
    mdf_pattern = """^\\\(\w*) (<(.*)>)? ?(.*)$"""
    # Add each lexical entry to the lexicon
    current_entry = None
    sub_entry = None
    component = None
    main_entry = None
    for line in mdf_file.readlines():
        # Do not parse empty lines
        if line != EOL:
            result = re.match(mdf_pattern, line)
            if result is None:
                # Line is empty => continue parsing next line
                continue
            marker = result.group(1)
            attrs = result.group(3)
            value = result.group(4)
            # Do not consider markers starting with an underscore character (e.g. '_sh' and '_DateStampHasFourDigitYear')
            if marker[0] == '_':
                continue
            # Remove trailing spaces and end-of-line characters
            value = value.rstrip(' \r\n')
            # Do not consider empty fields
            if value == "":
                continue
            # Check if the current entry is a multiword expression
            is_mwe = False
            if marker == "lf":
                lf = value.split(" = ")
                if lf[0].startswith("Component"):
                    component_nb = lf[0].lstrip("Component")
                    value = lf[1]
                    is_mwe = True
            # 'lx' and 'se' markers indicate a new entry
            if marker == "lx" or marker == "se" or is_mwe:
                # Compute a unique identifier
                uid = uni2sampa(value)
                if marker == "se":
                    # Create a subentry
                    sub_entry = LexicalEntry(uid)
                    # An MDF subentry corresponds to an LMF lexical entry
                    mdf2lmf["lx"](value, sub_entry)
                    # Add it to the lexicon
                    lexicon.add_lexical_entry(sub_entry)
                    # Manage main entry
                    if main_entry is None:
                        main_entry = current_entry
                    else:
                        current_entry = main_entry
                    # Set main entry
                    homonym_nb = current_entry.get_homonymNumber()
                    if homonym_nb is None:
                        homonym_nb = ""
                    sub_entry.create_and_add_related_form(current_entry.get_lexeme() + homonym_nb, "main entry")
                elif is_mwe:
                    # Create a subentry
                    component = LexicalEntry(uid)
                    # An MDF subentry corresponds to an LMF lexical entry
                    mdf2lmf["lx"](value, component)
                    # Add it to the lexicon
                    lexicon.add_lexical_entry(component)
                    # Manage current entry
                    if sub_entry is not None:
                        current_entry = sub_entry
                    # Set component
                    homonym_nb = current_entry.get_homonymNumber()
                    if homonym_nb is None:
                        homonym_nb = ""
                    current_entry.create_and_add_component(component_nb, value)
                    component.create_and_add_related_form(current_entry.get_lexeme() + homonym_nb, "complex predicate")
                    component.set_independentWord(False)
                else:
                    # Create a new entry
                    current_entry = LexicalEntry(uid)
                    # Add it to the lexicon
                    lexicon.add_lexical_entry(current_entry)
                    # Reset main entry
                    main_entry = None
            # Map MDF marker and value to LMF representation
            try:
                if attrs is not None:
                    # There are attributes
                    attributes = {}
                    # Remove quotation marks from attributes if any
                    attrs = attrs.replace('"', '')
                    for attr in attrs.split(' '):
                        attributes.update({attr.split('=')[0] : attr.split('=')[1]})
                    # A customized marker starts with '__' characters
                    mdf2lmf["__" + marker](attributes, value, current_entry)
                else:
                    mdf2lmf[marker](value, current_entry)
                if sub_entry is not None:
                    current_entry = sub_entry
                    sub_entry = None
                if component is not None:
                    sub_entry = current_entry
                    current_entry = component
                    component = None
            except KeyError:
                # When printing, we need to convert 'unicode' into 'str' using 'utf-8' encoding:
                print Warning("MDF marker '%s' encountered for lexeme '%s' is not defined in configuration" % (marker.encode(ENCODING), current_entry.get_lexeme().encode(ENCODING)))
            except Error as exception:
                exception.handle()
    mdf_file.close()
    return lexicon
예제 #9
0
class TestLexiconFunctions(unittest.TestCase):

    def setUp(self):
        # Instantiate a Lexicon object
        self.lexicon = Lexicon()

    def tearDown(self):
        # Release instantiated objects
        del self.lexicon

    def test_init(self):
        self.assertIsNone(self.lexicon.language)
        self.assertIsNone(self.lexicon.languageScript)
        self.assertIsNone(self.lexicon.id)
        self.assertIsNone(self.lexicon.label)
        self.assertIsNone(self.lexicon.lexiconType)
        self.assertIsNone(self.lexicon.entrySource)
        self.assertIsNone(self.lexicon.vowelHarmony)
        self.assertListEqual(self.lexicon.lexical_entry, [])
        self.assertIsNone(self.lexicon.localPath)

    def test_set_id(self):
        id = "English lexicon"
        self.assertEqual(self.lexicon.set_id(id), self.lexicon)
        self.assertEqual(self.lexicon.id, id)

    def test_get_id(self):
        self.assertIs(self.lexicon.get_id(), self.lexicon.id)

    def test_set_language(self):
        language = "eng"
        self.assertEqual(self.lexicon.set_language(language), self.lexicon)
        self.assertEqual(self.lexicon.language, language)

    def test_get_language(self):
        self.assertIs(self.lexicon.get_language(), self.lexicon.language)

    def test_set_languageScript(self):
        script = "latn"
        self.assertEqual(self.lexicon.set_languageScript(script), self.lexicon)
        self.assertEqual(self.lexicon.languageScript, script)

    def test_get_languageScript(self):
        self.assertIs(self.lexicon.get_languageScript(), self.lexicon.languageScript)

    def test_set_label(self):
        label = "online dictionary"
        self.assertEqual(self.lexicon.set_label(label), self.lexicon)
        self.assertEqual(self.lexicon.label, label)

    def test_get_label(self):
        self.assertIs(self.lexicon.get_label(), self.lexicon.label)

    def test_set_lexiconType(self):
        type = "bilingual dictionary"
        self.assertEqual(self.lexicon.set_lexiconType(type), self.lexicon)
        self.assertEqual(self.lexicon.lexiconType, type)

    def test_get_lexiconType(self):
        self.assertIs(self.lexicon.get_lexiconType(), self.lexicon.lexiconType)

    def test_set_entrySource(self):
        source = "test.txt"
        self.assertEqual(self.lexicon.set_entrySource(source), self.lexicon)
        self.assertEqual(self.lexicon.entrySource, source)

    def test_get_entrySource(self):
        self.assertIs(self.lexicon.get_entrySource(), self.lexicon.entrySource)

    def test_set_vowelHarmony(self):
        test = False
        try:
            self.lexicon.set_vowelHarmony(None)
        except NotImplementedError:
            test = True
        self.assertTrue(test)

    def test_get_vowelHarmony(self):
        test = False
        try:
            self.lexicon.get_vowelHarmony()
        except NotImplementedError:
            test = True
        self.assertTrue(test)

    def test_set_localPath(self):
        path = "/full/local/path/to/audio/files/"
        self.assertEqual(self.lexicon.set_localPath(path), self.lexicon)
        self.assertEqual(self.lexicon.localPath, path)

    def test_get_localPath(self):
        self.assertIs(self.lexicon.get_localPath(), self.lexicon.localPath)

    def test_get_lexical_entries(self):
        # Create lexical entries
        entry1 = LexicalEntry()
        entry2 = LexicalEntry()
        # Add entries to the lexicon
        self.lexicon.lexical_entry = [entry1, entry2]
        # Test get lexical entries
        self.assertListEqual(self.lexicon.get_lexical_entries(), [entry1, entry2])
        self.lexicon.lexical_entry.append(entry1)
        self.assertListEqual(self.lexicon.get_lexical_entries(), [entry1, entry2, entry1])
        # Release LexicalEntry instances
        del self.lexicon.lexical_entry[:]
        del entry1, entry2

    def test_add_lexical_entry(self):
        # Create lexical entries
        entry1 = LexicalEntry()
        entry2 = LexicalEntry()
        # Test add entries to the lexicon
        self.assertEqual(self.lexicon.add_lexical_entry(entry1), self.lexicon)
        self.assertListEqual(self.lexicon.lexical_entry, [entry1])
        self.assertEqual(self.lexicon.add_lexical_entry(entry2), self.lexicon)
        self.assertListEqual(self.lexicon.lexical_entry, [entry1, entry2])
        # Release LexicalEntry instances
        del self.lexicon.lexical_entry[:]
        del entry1, entry2

    def test_remove_lexical_entry(self):
        # Create lexical entries
        entry1 = LexicalEntry()
        entry2 = LexicalEntry()
        # Add entries to the lexicon
        self.lexicon.lexical_entry = [entry1, entry2]
        # Test remove lexical entries
        self.assertEqual(self.lexicon.remove_lexical_entry(entry1), self.lexicon)
        self.assertListEqual(self.lexicon.lexical_entry, [entry2])
        self.assertEqual(self.lexicon.remove_lexical_entry(entry2), self.lexicon)
        self.assertListEqual(self.lexicon.lexical_entry, [])
        # Release LexicalEntry instances
        del entry1, entry2

    def test_count_lexical_entries(self):
        # Create lexical entries
        entry1 = LexicalEntry()
        entry2 = LexicalEntry()
        # Add entries to the lexicon
        self.lexicon.lexical_entry = [entry1]
        # Test count lexical entries
        self.assertEqual(self.lexicon.count_lexical_entries(), 1)
        self.lexicon.lexical_entry.append(entry2)
        self.assertEqual(self.lexicon.count_lexical_entries(), 2)
        self.lexicon.lexical_entry.append(entry1)
        self.assertEqual(self.lexicon.count_lexical_entries(), 3)
        # Release LexicalEntry instances
        del self.lexicon.lexical_entry[:]
        del entry1, entry2

    def test_sort_homonym_numbers(self):
        # Create several lexical entries
        entry1 = LexicalEntry().set_lexeme("aa").set_homonymNumber("2")
        entry2 = LexicalEntry().set_lexeme("aa").set_homonymNumber("1")
        entry3 = LexicalEntry().set_lexeme("ab")
        entry4 = LexicalEntry().set_lexeme("ba")
        entry5 = LexicalEntry().set_lexeme("bb").set_homonymNumber("6")
        entry6 = LexicalEntry().set_lexeme("bb").set_homonymNumber("5")
        # Add entries to the lexicon
        self.lexicon.lexical_entry = [entry1, entry2, entry3, entry4, entry5, entry6]
        # Test sort homonym numbers
        self.assertListEqual(self.lexicon.sort_homonym_numbers(), [entry2, entry1, entry3, entry4, entry6, entry5])
        self.assertListEqual(self.lexicon.lexical_entry, [entry2, entry1, entry3, entry4, entry6, entry5])
        # Release LexicalEntry instances
        del self.lexicon.lexical_entry[:]
        del entry1, entry2, entry3, entry4, entry5, entry6

    def test_sort_lexical_entries(self):
        # Create several lexical entries with different lexemes
        entry1 = LexicalEntry().set_lexeme("aa")
        entry2 = LexicalEntry().set_lexeme("ab")
        entry3 = LexicalEntry().set_lexeme("ba")
        entry4 = LexicalEntry().set_lexeme("bb")
        # Add entries to the lexicon
        self.lexicon.lexical_entry = [entry4, entry1, entry2, entry3]
        # Test sort lexical entries
        self.assertListEqual(self.lexicon.sort_lexical_entries(), [entry1, entry2, entry3, entry4])
        self.assertListEqual(self.lexicon.lexical_entry, [entry1, entry2, entry3, entry4])
        # Provide a sort order
        my_order = dict({'A':1.1, 'a':1.2, 'B':2.1, 'b':2.2})
        my_unicode_order = ({})
        for key in my_order.keys():
            my_unicode_order.update({key.decode(encoding='utf8'):my_order[key]})
        entry5 = LexicalEntry().set_lexeme("Aa")
        entry6 = LexicalEntry().set_lexeme("bB")
        self.lexicon.lexical_entry.append(entry5)
        self.lexicon.lexical_entry.append(entry6)
        self.assertListEqual(self.lexicon.sort_lexical_entries(sort_order=my_order), [entry5, entry1, entry2, entry3, entry6, entry4])
        self.assertListEqual(self.lexicon.lexical_entry, [entry5, entry1, entry2, entry3, entry6, entry4])
        # Release LexicalEntry instances
        del self.lexicon.lexical_entry[:]
        del entry1, entry2, entry3, entry4, entry5, entry6

    def test_find_lexical_entries(self):
        # Create several lexical entries with different lexemes
        entry1 = LexicalEntry().set_lexeme("Hello")
        entry2 = LexicalEntry().set_lexeme("world!")
        entry3 = LexicalEntry().set_lexeme("hello")
        entry4 = LexicalEntry().set_lexeme("world")
        # Add entries to the lexicon
        self.lexicon.lexical_entry = [entry1, entry2, entry3, entry4]
        # Test find lexical entries
        self.assertListEqual(self.lexicon.find_lexical_entries(lambda entry: entry.get_lexeme() == "Hello"), [entry1])
        def test_filter(entry):
            return entry.get_lexeme().lower() == "hello"
        # List is randomly ordered => create a set to avoid random results
        self.assertEqual(set(self.lexicon.find_lexical_entries(test_filter)), set([entry1, entry3]))
        # Release LexicalEntry instances
        del self.lexicon.lexical_entry[:]
        del entry1, entry2, entry3, entry4

    def test_check_cross_references(self):
        # Create lexical entries with lexemes and related lexemes
        entry1 = LexicalEntry().set_lexeme("Hello").create_and_add_related_form("world!", "main entry")
        entry2 = LexicalEntry().set_lexeme("world!").create_and_add_related_form("Hello", "subentry")
        # Add entries to the lexicon
        self.lexicon.lexical_entry = [entry1, entry2]
        # Test check cross references
        self.assertIs(self.lexicon.check_cross_references(), self.lexicon)
        self.assertIs(entry1.related_form[0].get_lexical_entry(), entry2)
        self.assertIs(entry2.related_form[0].get_lexical_entry(), entry1)
        # Test warning case: entry not found
        entry3 = LexicalEntry().set_lexeme("hello").create_and_add_related_form("world", "main entry")
        self.lexicon.lexical_entry.append(entry3)
        self.lexicon.reset_check()
        self.lexicon.check_cross_references()
        # Retrieve nominal case
        entry4 = LexicalEntry().set_lexeme("world")
        self.lexicon.lexical_entry.append(entry4)
        self.lexicon.reset_check()
        self.assertIs(self.lexicon.check_cross_references(), self.lexicon)
        self.assertIs(entry3.related_form[0].get_lexical_entry(), entry4)
        # Test warning case: several entries found
        entry5 = LexicalEntry().set_lexeme("world")
        self.lexicon.lexical_entry.append(entry5)
        self.lexicon.reset_check()
        self.lexicon.check_cross_references()
        # Test check cross references with homonym number
        entry3.related_form[0].set_lexical_entry(None)
        entry3.related_form[0].targets = "world2"
        entry4.homonymNumber = "1"
        entry5.homonymNumber = "2"
        self.lexicon.reset_check()
        self.assertIs(self.lexicon.check_cross_references(), self.lexicon)
        self.assertIs(entry3.related_form[0].get_lexical_entry(), entry5)
        # Release LexicalEntry instances
        del self.lexicon.lexical_entry[:]
        del entry1, entry2, entry3, entry4, entry5

    def test_convert_to_latex(self):
        pass