示例#1
0
 def test_unique_successor_states(self):
     grammar = Grammar('grammars/function.grammar')
     successors = {5: 2, 6: 9, 10: 8, 11: 12, 13: 14, 15: 2, 16: 17, 17: 18}
     for source_id, target_id in successors.items():
         source = State(grammar, 'function', source_id)
         target = State(grammar, 'function', target_id)
         states = source.find_successor_states()
         self.assertEqual(states, {target})
示例#2
0
 def process_token(self, token):
     """
     Process the token according to the current node.
     :param token: a token object
     :return: None
     """
     node_type = self._state.node.type
     node_value = self._state.node.value
     if node_type is NodeType.EXPRESSION:
         expression_name = self._state.node.value
         node_id = self._state.grammar.expressions[expression_name].get_start_node_id()
         self._state = State(self._state.grammar, expression_name, node_id, self._state)
     elif node_type is NodeType.FINISH:
         if self._state.return_state is None:
             self._token = self.get_finish_token()
             self._ready = True
     elif node_type is NodeType.INFO:
         self.show_info(node_value, token)
     elif node_type is NodeType.ERROR:
         self.show_error(node_value, token)
     elif node_type is NodeType.TRANSFORMATION:
         self._token = self.transform(node_value, token)
     elif node_type is NodeType.OPERATION:
         self.operate(node_value, token)
     elif node_type is NodeType.STACK:
         self.push_stack(node_value, token)
     elif node_type is NodeType.CLEAN:
         self.clean_stack(node_value, token)
     elif node_type in [NodeType.TOKEN, NodeType.EXCEPT_TOKEN, NodeType.DEFAULT_TOKEN]:
         self._source.parse()
示例#3
0
 def test_multiple_successor_states(self):
     grammar = Grammar('grammars/function.grammar')
     successors = {
         1: [3, 11],
         3: [4, 13],
         4: [5, 8],
         8: [6, 7],
         9: [5, 10, 15]
     }
     for source_id, target_ids in successors.items():
         source = State(grammar, 'function', source_id)
         targets = {
             State(grammar, 'function', target_id)
             for target_id in target_ids
         }
         states = source.find_successor_states()
         self.assertEqual(states, targets)
示例#4
0
 def get_initial_state(self):
     """
     Get the initial state of the grammar.
     :return: the State object of the grammar
     """
     expression_name = self.get_entry_expression_name()
     node_id = self.expressions[expression_name].get_start_node_id()
     return State(self, expression_name, node_id, None)
示例#5
0
 def test_find_next_state(self):
     grammar = Grammar('grammars/function.grammar', classifier=FunctionClassifier())
     transitions = [
         {
             'source': State(grammar, 'function', 1),
             'token': Token('keyword', ''),
             'target': State(grammar, 'function', 3)
         },
         {
             'source': State(grammar, 'function', 1),
             'token': Token('number', ''),
             'target': State(grammar, 'function', 11)
         },
         {
             'source': State(grammar, 'function', 2),
             'token': Token('keyword', ''),
             'target': None
         },
         {
             'source': State(grammar, 'function', 3),
             'token': Token('(', ''),
             'target': State(grammar, 'function', 4)
         },
         {
             'source': State(grammar, 'function', 3),
             'token': Token('number', ''),
             'target': State(grammar, 'function', 13)
         },
         {
             'source': State(grammar, 'function', 4),
             'token': Token(')', ''),
             'target': State(grammar, 'function', 5)
         },
         {
             'source': State(grammar, 'function', 4),
             'token': Token('number', ''),
             'target': State(grammar, 'function', 8)
         },
         {
             'source': State(grammar, 'function', 4),
             'token': Token('[', ''),
             'target': State(grammar, 'function', 8)
         },
         {
             'source': State(grammar, 'function', 4),
             'token': Token('keyword', ''),
             'target': State(grammar, 'function', 16)
         },
         {
             'source': State(grammar, 'function', 5),
             'token': Token('empty', ''),
             'target': State(grammar, 'function', 2)
         },
         {
             'source': State(grammar, 'function', 6),
             'token': Token(')', ''),
             'target': State(grammar, 'function', 9)
         },
         {
             'source': State(grammar, 'function', 6),
             'token': Token('comma', ''),
             'target': State(grammar, 'function', 9)
         },
         {
             'source': State(grammar, 'function', 6),
             'token': Token('empty', ''),
             'target': State(grammar, 'function', 9)
         },
         {
             'source': State(grammar, 'function', 7),
             'token': Token('[', ''),
             'target': State(grammar, 'list', 5, State(grammar, 'function', 7))
         },
         {
             'source': State(grammar, 'function', 7),
             'token': Token('keyword', ''),
             'target': State(grammar, 'function', 16)
         },
         {
             'source': State(grammar, 'function', 8),
             'token': Token('number', ''),
             'target': State(grammar, 'function', 6)
         },
         {
             'source': State(grammar, 'function', 8),
             'token': Token('[', ''),
             'target': State(grammar, 'function', 7)
         },
         {
             'source': State(grammar, 'function', 8),
             'token': Token('keyword', ''),
             'target': State(grammar, 'function', 16)
         },
         {
             'source': State(grammar, 'function', 9),
             'token': Token(')', ''),
             'target': State(grammar, 'function', 5)
         },
         {
             'source': State(grammar, 'function', 9),
             'token': Token('comma', ''),
             'target': State(grammar, 'function', 10)
         },
         {
             'source': State(grammar, 'function', 9),
             'token': Token('keyword', ''),
             'target': State(grammar, 'function', 15)
         },
         {
             'source': State(grammar, 'function', 10),
             'token': Token('number', ''),
             'target': State(grammar, 'function', 8)
         },
         {
             'source': State(grammar, 'function', 10),
             'token': Token('[', ''),
             'target': State(grammar, 'function', 8)
         },
         {
             'source': State(grammar, 'function', 10),
             'token': Token('keyword', ''),
             'target': State(grammar, 'function', 16)
         },
         {
             'source': State(grammar, 'function', 11),
             'token': Token('number', ''),
             'target': State(grammar, 'function', 12)
         },
         {
             'source': State(grammar, 'function', 12),
             'token': Token('number', ''),
             'target': None
         },
         {
             'source': State(grammar, 'function', 13),
             'token': Token('number', ''),
             'target': State(grammar, 'function', 14)
         },
         {
             'source': State(grammar, 'function', 14),
             'token': Token('number', ''),
             'target': None
         },
         {
             'source': State(grammar, 'function', 15),
             'token': Token('keyword', ''),
             'target': State(grammar, 'function', 2)
         },
         {
             'source': State(grammar, 'function', 16),
             'token': Token('number', ''),
             'target': State(grammar, 'function', 17)
         },
         {
             'source': State(grammar, 'function', 17),
             'token': Token('number', ''),
             'target': State(grammar, 'function', 18)
         },
         {
             'source': State(grammar, 'function', 18),
             'token': Token('number', ''),
             'target': None
         },
         {
             'source': State(grammar, 'list', 1, State(grammar, 'function', 7)),
             'token': Token('[', ''),
             'target': State(grammar, 'list', 5, State(grammar, 'function', 7))
         },
         {
             'source': State(grammar, 'list', 1, State(grammar, 'function', 7)),
             'token': Token('number', ''),
             'target': State(grammar, 'list', 8, State(grammar, 'function', 7))
         },
         {
             'source': State(grammar, 'list', 2, State(grammar, 'function', 7)),
             'token': Token(')', ''),
             'target': State(grammar, 'function', 9)
         },
         {
             'source': State(grammar, 'list', 2, State(grammar, 'function', 7)),
             'token': Token('comma', ''),
             'target': State(grammar, 'function', 9)
         },
         {
             'source': State(grammar, 'list', 2, State(grammar, 'function', 7)),
             'token': Token('keyword', ''),
             'target': State(grammar, 'function', 9)
         },
         {
             'source': State(grammar, 'list', 3, State(grammar, 'function', 7)),
             'token': Token('number', ''),
             'target': State(grammar, 'list', 4, State(grammar, 'function', 7))
         },
         {
             'source': State(grammar, 'list', 3, State(grammar, 'function', 7)),
             'token': Token('keyword', ''),
             'target': State(grammar, 'list', 8, State(grammar, 'function', 7))
         },
         {
             'source': State(grammar, 'list', 4, State(grammar, 'function', 7)),
             'token': Token(']', ''),
             'target': State(grammar, 'list', 6, State(grammar, 'function', 7))
         },
         {
             'source': State(grammar, 'list', 4, State(grammar, 'function', 7)),
             'token': Token('comma', ''),
             'target': State(grammar, 'list', 3, State(grammar, 'function', 7))
         },
         {
             'source': State(grammar, 'list', 4, State(grammar, 'function', 7)),
             'token': Token('keyword', ''),
             'target': State(grammar, 'list', 8, State(grammar, 'function', 7))
         },
         {
             'source': State(grammar, 'list', 5, State(grammar, 'function', 7)),
             'token': Token(']', ''),
             'target': State(grammar, 'list', 7, State(grammar, 'function', 7))
         },
         {
             'source': State(grammar, 'list', 5, State(grammar, 'function', 7)),
             'token': Token('number', ''),
             'target': State(grammar, 'list', 4, State(grammar, 'function', 7))
         },
         {
             'source': State(grammar, 'list', 5, State(grammar, 'function', 7)),
             'token': Token('keyword', ''),
             'target': State(grammar, 'list', 8, State(grammar, 'function', 7))
         },
         {
             'source': State(grammar, 'list', 6, State(grammar, 'function', 7)),
             'token': Token(')', ''),
             'target': State(grammar, 'list', 2, State(grammar, 'function', 7))
         },
         {
             'source': State(grammar, 'list', 6, State(grammar, 'function', 7)),
             'token': Token('comma', ''),
             'target': State(grammar, 'list', 2, State(grammar, 'function', 7))
         },
         {
             'source': State(grammar, 'list', 6, State(grammar, 'function', 7)),
             'token': Token('keyword', ''),
             'target': State(grammar, 'list', 2, State(grammar, 'function', 7))
         },
         {
             'source': State(grammar, 'list', 7, State(grammar, 'function', 7)),
             'token': Token(']', ''),
             'target': State(grammar, 'list', 6, State(grammar, 'function', 7))
         },
         {
             'source': State(grammar, 'list', 7, State(grammar, 'function', 7)),
             'token': Token('comma', ''),
             'target': State(grammar, 'list', 8, State(grammar, 'function', 7))
         },
         {
             'source': State(grammar, 'list', 8, State(grammar, 'function', 7)),
             'token': Token('comma', ''),
             'target': State(grammar, 'list', 9, State(grammar, 'function', 7))
         },
         {
             'source': State(grammar, 'list', 9, State(grammar, 'function', 7)),
             'token': Token('comma', ''),
             'target': State(grammar, 'list', 10, State(grammar, 'function', 7))
         }
     ]
     for transition in transitions:
         target_state = transition['target']
         if target_state is not None:
             self.assertEqual(router.find_next_state(transition['source'], transition['token']), target_state)
         else:
             with self.assertRaises(RuntimeError):
                 router.find_next_state(transition['source'], transition['token'])
示例#6
0
 def test_without_default(self):
     grammar = Grammar('grammars/route_samples.grammar', classifier=SampleClassifier())
     state = State(grammar, 'sample', 6)
     self.assertFalse(router.has_default_successor(state))
示例#7
0
 def test_indirect_default(self):
     grammar = Grammar('grammars/route_samples.grammar', classifier=SampleClassifier())
     state = State(grammar, 'sample', 1)
     self.assertTrue(router.has_default_successor(state))
示例#8
0
 def test_multiple_matching(self):
     grammar = Grammar('grammars/route_samples.grammar', classifier=SampleClassifier())
     state = State(grammar, 'sample', 2)
     token = Token('char', '*')
     with self.assertRaises(RuntimeError):
         _ = router.has_matching_successor(state, token)
示例#9
0
 def test_without_matching(self):
     grammar = Grammar('grammars/route_samples.grammar', classifier=SampleClassifier())
     state = State(grammar, 'sample', 6)
     token = Token('char', 'c')
     self.assertFalse(router.has_matching_successor(state, token))
示例#10
0
 def test_indirect_matching(self):
     grammar = Grammar('grammars/route_samples.grammar', classifier=SampleClassifier())
     state = State(grammar, 'sample', 2)
     token = Token('char', 'b')
     self.assertTrue(router.has_matching_successor(state, token))
示例#11
0
 def test_finish_states(self):
     grammar = Grammar('grammars/function.grammar')
     for node_id in [2, 12, 14, 18]:
         state = State(grammar, 'function', node_id, None)
         with self.assertRaises(RuntimeError):
             _ = state.find_successor_states()
示例#12
0
 def test_expression_exit(self):
     grammar = Grammar('grammars/function.grammar')
     source = State(grammar, 'list', 2, State(grammar, 'function', 7))
     target = State(grammar, 'function', 9)
     self.assertEqual(source.find_successor_states(), {target})
示例#13
0
 def test_recurrent_next_state(self):
     grammar = Grammar('grammars/function.grammar')
     token_state = State(grammar, 'skip', 3)
     finish_state = State(grammar, 'skip', 2)
     states = token_state.find_successor_states()
     self.assertEqual(states, {token_state, finish_state})