def parse(self): """Select the next available character.""" if self._index < len(self._input): self._token = Token('char', self._input[self._index]) self._index += 1 else: self._token = Source.get_finish_token()
def operate(self, operation, token): """ Save the name and text values :param operation: ['save', 'name', 'text', 'close'] :param token: the considered token """ if operation == 'save': self._value = ''.join(self._stacks['']) elif operation == 'name': self._token = Token('name', self._value) self._ready = True elif operation == 'text': self._token = Token('text', self._value) self._ready = True elif operation == 'close': self._token = Token(';', ';') else: raise ValueError( 'Undefined operation name! "{}"'.format(operation))
def parse(self): """Select the next available character.""" if self._input is not None: c = self._input.read(1) if c: self._token = Token('char', c) else: self._token = Source.get_finish_token() self._input.close() self._input = None self._ready = True
def operate(self, operation, token): """ Create a token with the given type. :param operation: ['keyword', 'text', 'number'] :param token: the considered token (not the resulted) :return: None """ if operation in ['keyword', 'text', 'number']: value = ''.join(self._stacks['']) self._token = Token(operation, value) self._ready = True else: raise ValueError( 'The operation name {} has not defined!'.format(operation))
def test_find_next_state(self): grammar = Grammar('grammars/function.grammar', classifier=FunctionClassifier()) transitions = [ { 'source': State(grammar, 'function', 1), 'token': Token('keyword', ''), 'target': State(grammar, 'function', 3) }, { 'source': State(grammar, 'function', 1), 'token': Token('number', ''), 'target': State(grammar, 'function', 11) }, { 'source': State(grammar, 'function', 2), 'token': Token('keyword', ''), 'target': None }, { 'source': State(grammar, 'function', 3), 'token': Token('(', ''), 'target': State(grammar, 'function', 4) }, { 'source': State(grammar, 'function', 3), 'token': Token('number', ''), 'target': State(grammar, 'function', 13) }, { 'source': State(grammar, 'function', 4), 'token': Token(')', ''), 'target': State(grammar, 'function', 5) }, { 'source': State(grammar, 'function', 4), 'token': Token('number', ''), 'target': State(grammar, 'function', 8) }, { 'source': State(grammar, 'function', 4), 'token': Token('[', ''), 'target': State(grammar, 'function', 8) }, { 'source': State(grammar, 'function', 4), 'token': Token('keyword', ''), 'target': State(grammar, 'function', 16) }, { 'source': State(grammar, 'function', 5), 'token': Token('empty', ''), 'target': State(grammar, 'function', 2) }, { 'source': State(grammar, 'function', 6), 'token': Token(')', ''), 'target': State(grammar, 'function', 9) }, { 'source': State(grammar, 'function', 6), 'token': Token('comma', ''), 'target': State(grammar, 'function', 9) }, { 'source': State(grammar, 'function', 6), 'token': Token('empty', ''), 'target': State(grammar, 'function', 9) }, { 'source': State(grammar, 'function', 7), 'token': Token('[', ''), 'target': State(grammar, 'list', 5, State(grammar, 'function', 7)) }, { 'source': State(grammar, 'function', 7), 'token': Token('keyword', ''), 'target': State(grammar, 'function', 16) }, { 'source': State(grammar, 'function', 8), 'token': Token('number', ''), 'target': State(grammar, 'function', 6) }, { 'source': State(grammar, 'function', 8), 'token': Token('[', ''), 'target': State(grammar, 'function', 7) }, { 'source': State(grammar, 'function', 8), 'token': Token('keyword', ''), 'target': State(grammar, 'function', 16) }, { 'source': State(grammar, 'function', 9), 'token': Token(')', ''), 'target': State(grammar, 'function', 5) }, { 'source': State(grammar, 'function', 9), 'token': Token('comma', ''), 'target': State(grammar, 'function', 10) }, { 'source': State(grammar, 'function', 9), 'token': Token('keyword', ''), 'target': State(grammar, 'function', 15) }, { 'source': State(grammar, 'function', 10), 'token': Token('number', ''), 'target': State(grammar, 'function', 8) }, { 'source': State(grammar, 'function', 10), 'token': Token('[', ''), 'target': State(grammar, 'function', 8) }, { 'source': State(grammar, 'function', 10), 'token': Token('keyword', ''), 'target': State(grammar, 'function', 16) }, { 'source': State(grammar, 'function', 11), 'token': Token('number', ''), 'target': State(grammar, 'function', 12) }, { 'source': State(grammar, 'function', 12), 'token': Token('number', ''), 'target': None }, { 'source': State(grammar, 'function', 13), 'token': Token('number', ''), 'target': State(grammar, 'function', 14) }, { 'source': State(grammar, 'function', 14), 'token': Token('number', ''), 'target': None }, { 'source': State(grammar, 'function', 15), 'token': Token('keyword', ''), 'target': State(grammar, 'function', 2) }, { 'source': State(grammar, 'function', 16), 'token': Token('number', ''), 'target': State(grammar, 'function', 17) }, { 'source': State(grammar, 'function', 17), 'token': Token('number', ''), 'target': State(grammar, 'function', 18) }, { 'source': State(grammar, 'function', 18), 'token': Token('number', ''), 'target': None }, { 'source': State(grammar, 'list', 1, State(grammar, 'function', 7)), 'token': Token('[', ''), 'target': State(grammar, 'list', 5, State(grammar, 'function', 7)) }, { 'source': State(grammar, 'list', 1, State(grammar, 'function', 7)), 'token': Token('number', ''), 'target': State(grammar, 'list', 8, State(grammar, 'function', 7)) }, { 'source': State(grammar, 'list', 2, State(grammar, 'function', 7)), 'token': Token(')', ''), 'target': State(grammar, 'function', 9) }, { 'source': State(grammar, 'list', 2, State(grammar, 'function', 7)), 'token': Token('comma', ''), 'target': State(grammar, 'function', 9) }, { 'source': State(grammar, 'list', 2, State(grammar, 'function', 7)), 'token': Token('keyword', ''), 'target': State(grammar, 'function', 9) }, { 'source': State(grammar, 'list', 3, State(grammar, 'function', 7)), 'token': Token('number', ''), 'target': State(grammar, 'list', 4, State(grammar, 'function', 7)) }, { 'source': State(grammar, 'list', 3, State(grammar, 'function', 7)), 'token': Token('keyword', ''), 'target': State(grammar, 'list', 8, State(grammar, 'function', 7)) }, { 'source': State(grammar, 'list', 4, State(grammar, 'function', 7)), 'token': Token(']', ''), 'target': State(grammar, 'list', 6, State(grammar, 'function', 7)) }, { 'source': State(grammar, 'list', 4, State(grammar, 'function', 7)), 'token': Token('comma', ''), 'target': State(grammar, 'list', 3, State(grammar, 'function', 7)) }, { 'source': State(grammar, 'list', 4, State(grammar, 'function', 7)), 'token': Token('keyword', ''), 'target': State(grammar, 'list', 8, State(grammar, 'function', 7)) }, { 'source': State(grammar, 'list', 5, State(grammar, 'function', 7)), 'token': Token(']', ''), 'target': State(grammar, 'list', 7, State(grammar, 'function', 7)) }, { 'source': State(grammar, 'list', 5, State(grammar, 'function', 7)), 'token': Token('number', ''), 'target': State(grammar, 'list', 4, State(grammar, 'function', 7)) }, { 'source': State(grammar, 'list', 5, State(grammar, 'function', 7)), 'token': Token('keyword', ''), 'target': State(grammar, 'list', 8, State(grammar, 'function', 7)) }, { 'source': State(grammar, 'list', 6, State(grammar, 'function', 7)), 'token': Token(')', ''), 'target': State(grammar, 'list', 2, State(grammar, 'function', 7)) }, { 'source': State(grammar, 'list', 6, State(grammar, 'function', 7)), 'token': Token('comma', ''), 'target': State(grammar, 'list', 2, State(grammar, 'function', 7)) }, { 'source': State(grammar, 'list', 6, State(grammar, 'function', 7)), 'token': Token('keyword', ''), 'target': State(grammar, 'list', 2, State(grammar, 'function', 7)) }, { 'source': State(grammar, 'list', 7, State(grammar, 'function', 7)), 'token': Token(']', ''), 'target': State(grammar, 'list', 6, State(grammar, 'function', 7)) }, { 'source': State(grammar, 'list', 7, State(grammar, 'function', 7)), 'token': Token('comma', ''), 'target': State(grammar, 'list', 8, State(grammar, 'function', 7)) }, { 'source': State(grammar, 'list', 8, State(grammar, 'function', 7)), 'token': Token('comma', ''), 'target': State(grammar, 'list', 9, State(grammar, 'function', 7)) }, { 'source': State(grammar, 'list', 9, State(grammar, 'function', 7)), 'token': Token('comma', ''), 'target': State(grammar, 'list', 10, State(grammar, 'function', 7)) } ] for transition in transitions: target_state = transition['target'] if target_state is not None: self.assertEqual(router.find_next_state(transition['source'], transition['token']), target_state) else: with self.assertRaises(RuntimeError): router.find_next_state(transition['source'], transition['token'])
def test_multiple_matching(self): grammar = Grammar('grammars/route_samples.grammar', classifier=SampleClassifier()) state = State(grammar, 'sample', 2) token = Token('char', '*') with self.assertRaises(RuntimeError): _ = router.has_matching_successor(state, token)
def test_without_matching(self): grammar = Grammar('grammars/route_samples.grammar', classifier=SampleClassifier()) state = State(grammar, 'sample', 6) token = Token('char', 'c') self.assertFalse(router.has_matching_successor(state, token))
def test_indirect_matching(self): grammar = Grammar('grammars/route_samples.grammar', classifier=SampleClassifier()) state = State(grammar, 'sample', 2) token = Token('char', 'b') self.assertTrue(router.has_matching_successor(state, token))
def get_finish_token(self): """Defines the empty token.""" return Token('empty', '')
def get_finish_token(): """Provides the finish token.""" return Token('empty', '')