def from_string(string: str): result = string.replace(' ', '').split('->') visited, left = result[1].split('.') return ParserItem( NonTerminal(result[0]), ParserPosition([Symbol(x) for x in visited], [Symbol(x) for x in left]))
def test_when_given_wrong_expression_expect_analyzer_raises_exception( self): # given: action_table = ActionTable(StateFiniteAutomaton(self.grammar)) analyzer = SemanticAnalyzer(action_table, Symbol.from_string("aabc")) # when/then: self.assertRaises(AnalyzerConflict, analyzer.analyze)
def test_when_getting_action_for_next_state_expect_right_action(self): # given: action_table = ActionTable(StateFiniteAutomaton(self.grammar)) analyzer = SemanticAnalyzer(action_table, Symbol.from_string("abbbc")) # when: action = analyzer.next_action # then: self.assertEqual(action.source, analyzer.next_state)
def test_when_given_correct_expression_expect_analyzer_returns_true(self): # given: action_table = ActionTable(StateFiniteAutomaton(self.grammar)) analyzer = SemanticAnalyzer(action_table, Symbol.from_string("abbbc")) # when: actual = analyzer.analyze() # print(actual) # then: self.assertTrue(actual)
def test_when_analyzer_accepts_input_expect_is_accepted_equals_true(self): # given: action_table = ActionTable(StateFiniteAutomaton(self.grammar)) analyzer = SemanticAnalyzer(action_table, Symbol.from_string("abbbc")) # when: actual = analyzer.shift().shift().shift().shift().shift().reduce( ).reduce().reduce().reduce().reduce().is_accepted # then: self.assertTrue(actual)
def test_when_reducing_analyzer_expect_valid_next_parser_step(self): # given: action_table = ActionTable(StateFiniteAutomaton(self.grammar)) analyzer = SemanticAnalyzer(action_table, Symbol.from_string("abbbc")) # when: next_parser_step = analyzer.shift().shift().shift().shift().shift( ).reduce().reduce().parser_step # then: self.assertEqual(next_parser_step.current_state.items, [ParserItem.from_string('A -> bA.')])
def test_when_getting_next_state_from_parser_step_expect_next_state(self): # given: action_table = ActionTable(StateFiniteAutomaton(self.grammar)) analyzer = SemanticAnalyzer(action_table, Symbol.from_string("abbbc")) # when: next_state = analyzer.next_state # then: self.assertEqual(next_state.items, [ ParserItem.from_string('S -> a.A'), ParserItem.from_string('A -> .bA'), ParserItem.from_string('A -> .c') ])
def test_when_is_going_to_next_state_expect_valid_next_state(self): # declarations: extended = self.grammar.extend() item = ParserItem.item_for(extended.rules_of(NonTerminal("E"))[0]) items = Closure(item, extended).closure() state = State(items, self.grammar) # when actual = state.go_to(Symbol("a")) # then: self.assertEqual(actual.items, [ ParserItem.from_string('S -> a . A'), ParserItem.from_string('A -> . b A'), ParserItem.from_string('A -> . c') ])
def test_when_grammar_has_empty_rule_expect_to_accept_input_stream(self): # given: data = { 'terminals': ['1', '2'], 'non-terminals': ['S'], 'rules': ['S -> ', 'S -> 1 S', 'S -> 2 S'], 'start': 'S' } grammar = ContextFreeGrammar.from_complex_dictionary(data) # when/then: action_table = ActionTable(StateFiniteAutomaton(grammar)) analyzer = SemanticAnalyzer(action_table, Symbol.from_complex_string("1 1 1 2")) # when: actual = analyzer.analyze() # then: self.assertTrue(actual)
def current_symbol(self): return Symbol('ε') if not len( self.__input_stream) else self.__input_stream[0]
def __build_initial_parser_step(self, input_stream: List[Symbol]): return ParserStep([ DestinationState(self.__action_table.actions[0].source, Symbol("$")) ], input_stream, [])
import json import sys from parsing.domain.context_free_grammar import ContextFreeGrammar from parsing.domain.non_terminal import NonTerminal from parsing.domain.rule import Rule from parsing.domain.symbol import Symbol from parsing.domain.terminal import Terminal if __name__ == '__main__': with open(sys.argv[1], "r") as file: given = json.loads(file.read()) non_terminals = [NonTerminal(x) for x in given["non_terminals"]] alphabet = [Terminal(x) for x in given["alphabet"]] rules = [Rule(NonTerminal(x), [Symbol(y)]) for x, y in [tuple(rule.split(" -> ")) for rule in given["rules"]]] start = NonTerminal(given["start"]) grammar = ContextFreeGrammar(non_terminals, alphabet, rules, start) print(grammar.extend())
def from_complex_string(string: str): result = string.split(' -> ') return Rule( NonTerminal(result[0].replace(' ', '')), [Symbol(x) for x in Rule.__get_symbols_from_string(result)])
def from_string(string: str): result = string.replace(' ', '').split('->') return Rule(NonTerminal(result[0]), [Symbol(x) for x in result[1]])
def __convert_internal_form_to_symbols( internal_form: InternalForm) -> List[Symbol]: return [Symbol(key) for key in internal_form.atom_keys]