def test_direct_rule_call() -> None: """ Test regression where in direct rule call semantic action is erroneously attached to both caller and callee. """ def grammar(): return rule1, rule2 def rule1(): return "a" def rule2(): return rule1 call_count = [0] class DummySemAction(SemanticAction): def first_pass(self, parser, node, nodes): call_count[0] += 1 return SemanticAction.first_pass(self, parser, node, nodes) # Sem action is attached to rule2 only but # this bug will attach it to rule1 also resulting in # wrong call count. rule2.sem = DummySemAction() # type: ignore parser = ParserPython(grammar) parse_tree = parser.parse("aa") parser.getASG() assert call_count[0] == 1, "Semantic action should be called once!"
def test_direct_rule_call(): ''' Test regression where in direct rule call semantic action is erroneously attached to both caller and callee. ''' def grammar(): return rule1, rule2 def rule1(): return "a" def rule2(): return rule1 call_count = [0] class DummySemAction(SemanticAction): def first_pass(self, parser, node, nodes): call_count[0] += 1 return SemanticAction.first_pass(self, parser, node, nodes) # Sem action is attached to rule2 only but # this bug will attach it to rule1 also resulting in # wrong call count. rule2.sem = DummySemAction() parser = ParserPython(grammar) parse_tree = parser.parse("aa") parser.getASG() assert call_count[0] == 1, "Semantic action should be called once!"
def test_default_action_disabled(): parser = ParserPython(grammar) parser.parse('(-34) strmatch') parser.getASG(defaults=False) assert not p_removed assert not number_str assert parse_tree_node
def test_semantic_action_results(): global first_sar, third_sar input = "4 3 3 3 a 3 3 b" parser = ParserPython(grammar, reduce_tree=False) result = parser.parse(input) PTDOTExporter().exportFile(result, 'test_semantic_action_results_pt.dot') parser.getASG() assert isinstance(first_sar, SemanticActionResults) assert len(first_sar.third) == 3 assert third_sar.third_str[0] == '3'
def main(debug=False): parser = ParserPython(initial, debug=debug) file_input = open("input.txt", 'r') input_expr = file_input.read() parse_tree = parser.parse(input_expr) result = parser.getASG()
def main(debug=False): # First we will make a parser - an instance of the calc parser model. # Parser model is given in the form of python constructs therefore we # are using ParserPython class. parser = ParserPython(calc, debug=debug) # An expression we want to evaluate input_expr = "-(4-1)*5+(2+4.67)+5.89/(.2+7)" # We create a parse tree out of textual input_expr parse_tree = parser.parse(input_expr) result = parser.getASG() if debug: # getASG will start semantic analysis. # In this case semantic analysis will evaluate expression and # returned value will be the result of the input_expr expression. print("{} = {}".format(input_expr, result))
def main(debug=False): # First we will make a parser - an instance of the calc parser model. # Parser model is given in the form of python constructs therefore we # are using ParserPython class. parser = ParserPython(calc, debug=debug) # An expression we want to evaluate input_expr = "-(4-1)*5+(2+4.67)+5.89/(.2+7)" # We create a parse tree out of textual input_expr parse_tree = parser.parse(input_expr) result = parser.getASG() if debug: # getASG will start semantic analysis. # In this case semantic analysis will evaluate expression and # returned value will be the result of the input_expr expression. print("{} = {}".format(input_expr, result))
def language_from_str(language_def, metamodel): """ Constructs parser and initializes metamodel from language description given in textX language. Args: language_def (str): A language description in textX. metamodel (TextXMetaModel): A metamodel to initialize. Returns: Parser for the new language. """ if metamodel.debug: metamodel.dprint("*** PARSING LANGUAGE DEFINITION ***") # Check the cache for already conctructed textX parser if metamodel.debug in textX_parsers: parser = textX_parsers[metamodel.debug] else: # Create parser for TextX descriptions from # the textX grammar specified in this module parser = ParserPython(textx_model, comment_def=comment, ignore_case=False, reduce_tree=False, debug=metamodel.debug) # Prepare regex used in keyword-like strmatch detection. # See str_match_SA flags = 0 if metamodel.ignore_case: flags = re.IGNORECASE parser.keyword_regex = re.compile(r'[^\d\W]\w*', flags) # Cache it for subsequent calls textX_parsers[metamodel.debug] = parser # This is used during parser construction phase. # Metamodel is filled in. Classes are created based on grammar rules. parser.metamodel = metamodel # Builtin rules representing primitive types parser.root_rule_name = None # Parse language description with textX parser try: parser.parse(language_def) except NoMatch as e: line, col = parser.pos_to_linecol(e.position) raise TextXSyntaxError(text(e), line, col) # Construct new parser based on the given language description. lang_parser = parser.getASG() # Meta-model is constructed. Validate semantics of metamodel. parser.metamodel.validate() # Meta-model is constructed. Here we connect meta-model and language # parser for convenience. lang_parser.metamodel = parser.metamodel metamodel.parser = lang_parser if metamodel.debug: # Create dot file for debuging purposes PMDOTExporter().exportFile( lang_parser.parser_model, "{}_parser_model.dot".format(metamodel.rootcls.__name__)) return lang_parser
def language_from_str(language_def, metamodel): """ Constructs parser and initializes metamodel from language description given in textX language. Args: language_def (str): A language description in textX. metamodel (TextXMetaModel): A metamodel to initialize. Returns: Parser for the new language. """ if metamodel.debug: metamodel.dprint("*** PARSING LANGUAGE DEFINITION ***") # Check the cache for already conctructed textX parser if metamodel.debug in textX_parsers: parser = textX_parsers[metamodel.debug] else: # Create parser for TextX descriptions from # the textX grammar specified in this module parser = ParserPython(textx_model, comment_def=comment, ignore_case=False, reduce_tree=False, debug=metamodel.debug) # Prepare regex used in keyword-like strmatch detection. # See str_match_SA flags = 0 if metamodel.ignore_case: flags = re.IGNORECASE parser.keyword_regex = re.compile(r'[^\d\W]\w*', flags) # Cache it for subsequent calls textX_parsers[metamodel.debug] = parser # This is used during parser construction phase. # Metamodel is filled in. Classes are created based on grammar rules. parser.metamodel = metamodel # Builtin rules representing primitive types parser.root_rule_name = None # Parse language description with textX parser try: parser.parse(language_def) except NoMatch as e: line, col = parser.pos_to_linecol(e.position) raise TextXSyntaxError(text(e), line, col) # Construct new parser based on the given language description. lang_parser = parser.getASG() # Meta-model is constructed. Validate semantics of metamodel. parser.metamodel.validate() # Meta-model is constructed. Here we connect meta-model and language # parser for convenience. lang_parser.metamodel = parser.metamodel metamodel.parser = lang_parser if metamodel.debug: # Create dot file for debuging purposes PMDOTExporter().exportFile( lang_parser.parser_model, "{}_parser_model.dot".format(metamodel.rootcls.__name__)) return lang_parser