def test_simple_ig_2(self):
        # Write rules

        l_rules = []

        # Initialization rules

        l_rules.append(ProductionRule("S", "Cinit", "end"))
        l_rules.append(ProductionRule("Cinit", "C", "b"))
        l_rules.append(ConsumptionRule("end", "C", "T"))
        l_rules.append(EndRule("T", "epsilon"))

        # C[b sigma] -> C[cm sigma] c b C[sigma]

        l_rules.append(ConsumptionRule("b", "C", "B"))
        l_rules.append(DuplicationRule("B", "A1", "D"))
        l_rules.append(ConsumptionRule("b", "A1", "A1"))
        l_rules.append(ConsumptionRule("bm", "A1", "A1"))
        l_rules.append(ConsumptionRule("c", "A1", "A1"))
        l_rules.append(ConsumptionRule("cm", "A1", "A1"))
        l_rules.append(ConsumptionRule("end", "A1", "Abackm2"))
        l_rules.append(ProductionRule("Abackm2", "Abackm1", "end"))
        l_rules.append(ProductionRule("Abackm1", "C", "cm"))
        l_rules.append(DuplicationRule("D", "E0", "C"))
        l_rules.append(DuplicationRule("E0", "F0", "E1"))
        l_rules.append(DuplicationRule("E1", "F1", "E2"))
        l_rules.append(EndRule("E2", "epsilon"))
        l_rules.append(EndRule("F0", "c"))
        l_rules.append(EndRule("F1", "b"))

        rules = Rules(l_rules)
        i_grammar = IndexedGrammar(rules)
        self.assertTrue(i_grammar.is_empty())
Esempio n. 2
0
    def test_intersection_indexed_grammar(self):
        """ Test the intersection with indexed grammar """
        l_rules = []
        rules = Rules(l_rules)
        indexed_grammar = IndexedGrammar(rules)
        fst = FST()
        intersection = fst & indexed_grammar
        self.assertTrue(intersection.is_empty())

        l_rules.append(ProductionRule("S", "D", "f"))
        l_rules.append(DuplicationRule("D", "A", "B"))
        l_rules.append(ConsumptionRule("f", "A", "Afinal"))
        l_rules.append(ConsumptionRule("f", "B", "Bfinal"))
        l_rules.append(EndRule("Afinal", "a"))
        l_rules.append(EndRule("Bfinal", "b"))

        rules = Rules(l_rules)
        indexed_grammar = IndexedGrammar(rules)
        intersection = fst.intersection(indexed_grammar)
        self.assertTrue(intersection.is_empty())

        fst.add_start_state("q0")
        fst.add_final_state("final")
        fst.add_transition("q0", "a", "q1", ["a"])
        fst.add_transition("q1", "b", "final", ["b"])
        intersection = fst.intersection(indexed_grammar)
        self.assertFalse(intersection.is_empty())
 def test_simple_ig7(self):
     """ Test 7 """
     l_rules = []
     l_rules.append(ProductionRule("S", "A", "end"))
     l_rules.append(ConsumptionRule("end", "A", "S"))
     l_rules.append(DuplicationRule("A", "B", "C"))
     l_rules.append(EndRule("B", "b"))
     l_rules.append(EndRule("C", "c"))
     rules = Rules(l_rules)
     i_grammar = IndexedGrammar(rules)
     self.assertFalse(i_grammar.is_empty())
 def _test_intersection(self):
     """ Tests the intersection of indexed grammar with regex
     Long to run!
     """
     l_rules = []
     l_rules.append(ProductionRule("S", "D", "f"))
     l_rules.append(DuplicationRule("D", "A", "B"))
     l_rules.append(ConsumptionRule("f", "A", "Afinal"))
     l_rules.append(ConsumptionRule("f", "B", "Bfinal"))
     l_rules.append(EndRule("Afinal", "a"))
     l_rules.append(EndRule("Bfinal", "b"))
     rules = Rules(l_rules, 6)
     indexed_grammar = IndexedGrammar(rules)
     i_inter = indexed_grammar.intersection(Regex("(a|b)*"))
     self.assertFalse(i_inter.is_empty())
 def test_intersection(self):
     """ Tests the intersection of indexed grammar with regex
     Long to run!
     """
     l_rules = [
         ProductionRule("S", "D", "f"),
         DuplicationRule("D", "A", "B"),
         ConsumptionRule("f", "A", "Afinal"),
         ConsumptionRule("f", "B", "Bfinal"),
         EndRule("Afinal", "a"),
         EndRule("Bfinal", "b")
     ]
     rules = Rules(l_rules, 6)
     indexed_grammar = IndexedGrammar(rules)
     i_inter = indexed_grammar.intersection(Regex("a.b"))
     self.assertTrue(i_inter)
    def test_start_symbol(self):
        """ Tests the change of the start symbol """
        l_rules = []
        l_rules.append(EndRule("S", "s"))
        rules = Rules(l_rules)
        i_grammar = IndexedGrammar(rules, "S2")
        self.assertTrue(i_grammar.is_empty())

        i_grammar = IndexedGrammar(rules, "S")
        self.assertFalse(i_grammar.is_empty())

        l_rules = []
        l_rules.append(EndRule("S2", "s"))
        rules = Rules(l_rules)
        i_grammar = IndexedGrammar(rules, start_variable="S2")
        self.assertFalse(i_grammar.is_empty())
Esempio n. 7
0
 def test_rules(self):
     """ Tests the rules """
     l_rules = []
     l_rules.append(ProductionRule("S", "Cinit", "end"))
     l_rules.append(ProductionRule("Cinit", "C", "b"))
     l_rules.append(ConsumptionRule("end", "C", "T"))
     l_rules.append(EndRule("T", "epsilon"))
     l_rules.append(ConsumptionRule("b", "C", "B0"))
     l_rules.append(DuplicationRule("B0", "A0", "C"))
     l_rules.append(EndRule("A0", "b"))
     rules = Rules(l_rules)
     self.assertEqual(rules.terminals, {"b", "end", "epsilon"})
     self.assertEqual(rules.length, (5, 2))
     rules.remove_production("S", "Cinit", "end")
     self.assertEqual(rules.length, (4, 2))
     rules.add_production("S", "Cinit", "end")
     self.assertEqual(rules.length, (5, 2))
Esempio n. 8
0
 def _extract_fst_delta_intersection(self, new_rules):
     for key in self._delta:
         state_p = key[0]
         terminal = key[1]
         for transition in self._delta[key]:
             state_q = transition[0]
             symbol = transition[1]
             new_rules.append(
                 EndRule(str((state_p, terminal, state_q)), symbol))
    def test_simple_ig_regular_expression(self):
        # Test for regular expression functions

        l_rules = []
        l_rules.append(ProductionRule("S", "Ci", "end"))
        l_rules.append(ProductionRule("Ci", "C", "q"))
        l_rules.append(ConsumptionRule("q", "C", "C0"))
        l_rules.append(ProductionRule("C0", "C0", "a-"))
        l_rules.append(DuplicationRule("C0", "T", "C"))
        l_rules.append(EndRule("T", "epsilon"))
        l_rules.append(ConsumptionRule("end", "C", "Cend"))
        l_rules.append(EndRule("Cend", "epsilon"))
        l_rules.append(ConsumptionRule("a-", "C", "C1"))
        l_rules.append(ConsumptionRule("a-", "C1", "C2"))
        l_rules.append(ConsumptionRule("a-", "C2", "C3"))
        l_rules.append(ConsumptionRule("a-", "C3", "C4"))
        l_rules.append(ConsumptionRule("a-", "C4", "C"))

        rules = Rules(l_rules)
        i_grammar = IndexedGrammar(rules)
        self.assertFalse(i_grammar.is_empty())
    def test_removal_useless(self):
        """ Tests the removal of useless symbols """
        l_rules = []

        l_rules.append(ProductionRule("S", "D", "f"))
        l_rules.append(DuplicationRule("D", "A", "B"))
        l_rules.append(ConsumptionRule("f", "A", "Afinal"))
        l_rules.append(ConsumptionRule("f", "B", "Bfinal"))
        l_rules.append(EndRule("Afinal", "a"))
        l_rules.append(EndRule("Bfinal", "b"))
        l_rules.append(ConsumptionRule("f", "A", "Q"))
        l_rules.append(EndRule("R", "b"))

        rules = Rules(l_rules)
        i_grammar = IndexedGrammar(rules, start_variable="S")
        i_grammar2 = i_grammar.remove_useless_rules()
        self.assertFalse(i_grammar.is_empty())
        self.assertEqual(i_grammar2.non_terminals,
                         i_grammar2.get_generating_non_terminals())
        self.assertEqual(i_grammar2.non_terminals,
                         i_grammar2.get_reachable_non_terminals())
    def test_simple_ig_3(self):
        # Write rules

        l_rules = []

        # Initialization rules

        l_rules.append(ProductionRule("S", "Cinit", "end"))
        l_rules.append(ProductionRule("Cinit", "C", "b"))
        l_rules.append(ConsumptionRule("end", "C", "T"))
        l_rules.append(EndRule("T", "epsilon"))

        # C[cm sigma] -> cm C[sigma]

        l_rules.append(ConsumptionRule("cm", "C", "B0"))
        l_rules.append(DuplicationRule("B0", "A0", "C"))
        l_rules.append(EndRule("A0", "cm"))

        rules = Rules(l_rules)
        i_grammar = IndexedGrammar(rules)
        self.assertTrue(i_grammar.is_empty())
    def test_simple_ig6(self):
        """ Test number 6 """
        l_rules = []
        l_rules.append(DuplicationRule("S", "S", "B"))
        rules = Rules(l_rules)
        i_grammar = IndexedGrammar(rules)
        self.assertTrue(i_grammar.is_empty())

        l_rules = []
        l_rules.append(DuplicationRule("S", "B", "S"))
        rules = Rules(l_rules)
        i_grammar = IndexedGrammar(rules)
        self.assertTrue(i_grammar.is_empty())

        l_rules = []
        l_rules.append(DuplicationRule("S", "A", "B"))
        l_rules.append(EndRule("A", "a"))
        l_rules.append(EndRule("B", "b"))
        rules = Rules(l_rules)
        i_grammar = IndexedGrammar(rules)
        self.assertFalse(i_grammar.is_empty())
 def test_simple_ig9(self):
     """ Tests 9 {a^n b^n c^n}"""
     l_rules = []
     l_rules.append(ProductionRule("S", "T", "g"))
     l_rules.append(ProductionRule("T", "T", "f"))
     l_rules.append(DuplicationRule("T", "AB", "C"))
     l_rules.append(DuplicationRule("AB", "A", "B"))
     l_rules.append(ConsumptionRule("f", "A", "A2"))
     l_rules.append(ConsumptionRule("f", "B", "B2"))
     l_rules.append(ConsumptionRule("f", "C", "C2"))
     l_rules.append(DuplicationRule("A2", "Afinal", "A"))
     l_rules.append(DuplicationRule("B2", "Bfinal", "B"))
     l_rules.append(DuplicationRule("C2", "Cfinal", "C"))
     l_rules.append(EndRule("Afinal", "a"))
     l_rules.append(EndRule("Bfinal", "b"))
     l_rules.append(EndRule("Cfinal", "c"))
     l_rules.append(ConsumptionRule("g", "A", "Afinal"))
     l_rules.append(ConsumptionRule("g", "B", "Bfinal"))
     l_rules.append(ConsumptionRule("g", "C", "Cfinal"))
     rules = Rules(l_rules)
     i_grammar = IndexedGrammar(rules)
     self.assertFalse(i_grammar.is_empty())
 def test_simple_ig8(self):
     """ Tests 8 """
     l_rules = []
     l_rules.append(ProductionRule("S", "Q", "end"))
     l_rules.append(ProductionRule("Q", "A", "end"))
     l_rules.append(ConsumptionRule("end", "A", "B"))
     l_rules.append(ConsumptionRule("end", "A", "C"))
     l_rules.append(ConsumptionRule("end", "A", "D"))
     l_rules.append(DuplicationRule("C", "G", "E"))
     l_rules.append(DuplicationRule("E", "G", "F"))
     l_rules.append(DuplicationRule("F", "G", "G"))
     l_rules.append(EndRule("G", "G"))
     rules = Rules(l_rules)
     i_grammar = IndexedGrammar(rules)
     self.assertFalse(i_grammar.is_empty())
    def test_simple_ig_0(self):
        """Test"""
        # Write rules

        l_rules = []

        # Initialization rules

        l_rules.append(ProductionRule("S", "Cinit", "end"))
        l_rules.append(ProductionRule("Cinit", "C", "b"))
        l_rules.append(ConsumptionRule("end", "C", "T"))
        l_rules.append(EndRule("T", "epsilon"))

        # C[cm sigma] -> cm C[sigma]

        l_rules.append(ConsumptionRule("b", "C", "B0"))
        l_rules.append(DuplicationRule("B0", "A0", "C"))
        l_rules.append(EndRule("A0", "b"))

        for i in range(9):
            rules = Rules(l_rules, i)
            i_grammar = IndexedGrammar(rules)
            self.assertFalse(i_grammar.is_empty())
            self.assertEqual(i_grammar.terminals, {"end", "b", "epsilon"})
 def test_reachable(self):
     """ Tests the reachable symbols """
     l_rules = []
     l_rules.append(DuplicationRule("S", "A", "B"))
     l_rules.append(ProductionRule("A", "D", "f"))
     l_rules.append(ProductionRule("E", "D", "f"))
     l_rules.append(ProductionRule("E", "K", "f"))
     l_rules.append(EndRule("D", "d"))
     l_rules.append(DuplicationRule("D", "D", "D"))
     l_rules.append(DuplicationRule("D", "D", "A"))
     l_rules.append(ConsumptionRule("f", "B", "G"))
     l_rules.append(ConsumptionRule("f", "B", "A"))
     rules = Rules(l_rules)
     i_grammar = IndexedGrammar(rules, start_variable="S")
     reachable = i_grammar.get_reachable_non_terminals()
     self.assertEqual(reachable, {"S", "A", "B", "D", "G"})
 def test_generating(self):
     """ Tests the generating symbols """
     l_rules = []
     l_rules.append(DuplicationRule("S", "A", "B"))
     l_rules.append(ProductionRule("A", "D", "f"))
     l_rules.append(ProductionRule("E", "D", "f"))
     l_rules.append(EndRule("D", "d"))
     l_rules.append(ConsumptionRule("f", "B", "G"))
     l_rules.append(ConsumptionRule("f", "X", "G"))
     l_rules.append(DuplicationRule("Q", "A", "E"))
     l_rules.append(DuplicationRule("Q", "A", "D"))
     l_rules.append(DuplicationRule("Q", "D", "E"))
     rules = Rules(l_rules)
     i_grammar = IndexedGrammar(rules, start_variable="S")
     generating = i_grammar.get_generating_non_terminals()
     self.assertEqual(generating, {"D", "A", "E", "Q"})
Esempio n. 18
0
    def intersection(self, indexed_grammar):
        """ Compute the intersection with an other object

        Equivalent to:
          >> fst and indexed_grammar
        """
        rules = indexed_grammar.rules
        new_rules = []
        new_rules.append(EndRule("T", "epsilon"))
        self._extract_consumption_rules_intersection(rules, new_rules)
        self._extract_indexed_grammar_rules_intersection(rules, new_rules)
        self._extract_terminals_intersection(rules, new_rules)
        self._extract_epsilon_transitions_intersection(new_rules)
        self._extract_fst_delta_intersection(new_rules)
        self._extract_fst_epsilon_intersection(new_rules)
        self._extract_fst_duplication_rules_intersection(new_rules)
        rules = Rules(new_rules, rules.optim)
        return IndexedGrammar(rules).remove_useless_rules()
    def test_simple_ig_5(self):
        # Write rules

        l_rules = []

        # Initialization rules

        l_rules.append(ProductionRule("S", "A", "f"))
        l_rules.append(ConsumptionRule("f", "A", "B"))
        l_rules.append(ConsumptionRule("f", "C", "F"))
        l_rules.append(ProductionRule("B", "C", "f"))
        l_rules.append(ProductionRule("D", "E", "f"))
        l_rules.append(EndRule("F", "epsilon"))
        l_rules.append(DuplicationRule("B0", "A0", "C"))

        rules = Rules(l_rules)
        i_grammar = IndexedGrammar(rules)
        self.assertFalse(i_grammar.is_empty())
Esempio n. 20
0
 def test_end_rule(self):
     """ Tests the end rules """
     end_rule = EndRule("A0", "b")
     self.assertEqual(end_rule.terminals, {"b"})
     self.assertEqual(end_rule.right_term, "b")
     self.assertEqual(str(end_rule), "A0 -> b")
Esempio n. 21
0
 def _extract_fst_epsilon_intersection(self, new_rules):
     for state_p in self._states:
         new_rules.append(
             EndRule(str((state_p, "epsilon", state_p)), "epsilon"))