Example #1
0
 def parse_string(self, src, grammar=program, filename=None):
     oldsrcs = self.input_sources
     self.context.optimization_level = self.optimization_level
     self.input_sources = src
     parser = ParserPython(
         grammar,
         comment_def=comment,
         skipws=True,
         reduce_tree=False,
         memoization=True,
         debug=False,
     )
     self.context.parsers.append(parser)
     self.context.filenames.append(filename)
     try:
         parse_tree = parser.parse(self.input_sources)
         visitor = MuvVisitor(debug=False)
         visitor.muvparser = self
         parse_tree = visit_parse_tree(parse_tree, visitor)
         out = parse_tree.generate_code(self.context)
         if self.error_found:
             return False
         if len(self.context.filenames) == 1:
             if self.context.filenames[-1]:
                 filetext = " from {0}".format(
                     self.context.filenames[-1]
                 )
             else:
                 filetext = ''
             self.output = (
                 "( Generated{0} by the MUV compiler. )\n"
                 "(   https://github.com/revarbat/pymuv )\n"
                 "{1}\n"
             ).format(filetext, self.output)
         self.output += out
         if not self.error_found and len(self.context.filenames) == 1:
             if self.wrapper_program:
                 self.output = (
                     "@program {0}\n"
                     "1 99999 d\n"
                     "1 i\n"
                     "{1}\n"
                     ".\n"
                     "c\n"
                     "q\n"
                 ).format(self.wrapper_program, self.output)
         return True
     except MuvError as e:
         line, col = parser.pos_to_linecol(e.position)
         self.print_error(filename, line, col, str(e))
         return False
     except NoMatch as e:
         line, col = parser.pos_to_linecol(e.position)
         expected = self.simplify_parse_error(e)
         self.print_error(filename, line, col, "Expected %s" % expected)
         return False
     finally:
         self.input_sources = oldsrcs
         self.context.parsers.pop()
         self.context.filenames.pop()
def test_pos_to_linecol():
    def grammar():
        return ("a", "b", "c")

    parser = ParserPython(grammar)

    parse_tree = parser.parse("a\n\n\n b\nc")

    a_pos = parse_tree[0].position
    assert parser.pos_to_linecol(a_pos) == (1, 1)
    b_pos = parse_tree[1].position
    assert parser.pos_to_linecol(b_pos) == (4, 2)
    c_pos = parse_tree[2].position
    assert parser.pos_to_linecol(c_pos) == (5, 1)
Example #3
0
def test_pos_to_linecol():

    def grammar(): return ("a", "b", "c")

    parser = ParserPython(grammar)

    parse_tree = parser.parse("a\n\n\n b\nc")

    a_pos = parse_tree[0].position
    assert parser.pos_to_linecol(a_pos) == (1, 1)
    b_pos = parse_tree[1].position
    assert parser.pos_to_linecol(b_pos) == (4, 2)
    c_pos = parse_tree[2].position
    assert parser.pos_to_linecol(c_pos) == (5, 1)
Example #4
0
def language_from_str(language_def, metamodel):
    """
    Constructs parser and initializes metamodel from language description
    given in textX language.

    Args:
        language_def (str): A language description in textX.
        metamodel (TextXMetaModel): A metamodel to initialize.

    Returns:
        Parser for the new language.
    """

    if metamodel.debug:
        metamodel.dprint("*** PARSING LANGUAGE DEFINITION ***")

    # Check the cache for already conctructed textX parser
    if metamodel.debug in textX_parsers:
        parser = textX_parsers[metamodel.debug]
    else:
        # Create parser for TextX grammars using
        # the arpeggio grammar specified in this module
        parser = ParserPython(textx_model,
                              comment_def=comment,
                              ignore_case=False,
                              reduce_tree=False,
                              memoization=metamodel.memoization,
                              debug=metamodel.debug,
                              file=metamodel.file)

        # Cache it for subsequent calls
        textX_parsers[metamodel.debug] = parser

    # Parse language description with textX parser
    try:
        parse_tree = parser.parse(language_def)
    except NoMatch as e:
        line, col = parser.pos_to_linecol(e.position)
        raise TextXSyntaxError(text(e), line, col)

    # Construct new parser and meta-model based on the given language
    # description.
    lang_parser = visit_parse_tree(parse_tree, TextXVisitor(parser, metamodel))

    # Meta-model is constructed. Validate its semantics.
    metamodel.validate()

    # Here we connect meta-model and language parser for convenience.
    lang_parser.metamodel = metamodel
    metamodel._parser_blueprint = lang_parser

    if metamodel.debug:
        # Create dot file for debuging purposes
        PMDOTExporter().exportFile(
            lang_parser.parser_model,
            "{}_parser_model.dot".format(metamodel.rootcls.__name__))

    return lang_parser
Example #5
0
def language_from_str(language_def, metamodel):
    """
    Constructs parser and initializes metamodel from language description
    given in textX language.

    Args:
        language_def (str): A language description in textX.
        metamodel (TextXMetaModel): A metamodel to initialize.

    Returns:
        Parser for the new language.
    """

    if metamodel.debug:
        metamodel.dprint("*** PARSING LANGUAGE DEFINITION ***")

    # Check the cache for already conctructed textX parser
    if metamodel.debug in textX_parsers:
        parser = textX_parsers[metamodel.debug]
    else:
        # Create parser for TextX descriptions from
        # the textX grammar specified in this module
        parser = ParserPython(textx_model,
                              comment_def=comment,
                              ignore_case=False,
                              reduce_tree=False,
                              debug=metamodel.debug)

        # Prepare regex used in keyword-like strmatch detection.
        # See str_match_SA
        flags = 0
        if metamodel.ignore_case:
            flags = re.IGNORECASE
        parser.keyword_regex = re.compile(r'[^\d\W]\w*', flags)

        # Cache it for subsequent calls
        textX_parsers[metamodel.debug] = parser

    # This is used during parser construction phase.
    # Metamodel is filled in. Classes are created based on grammar rules.
    parser.metamodel = metamodel

    # Builtin rules representing primitive types
    parser.root_rule_name = None

    # Parse language description with textX parser
    try:
        parser.parse(language_def)
    except NoMatch as e:
        line, col = parser.pos_to_linecol(e.position)
        raise TextXSyntaxError(text(e), line, col)

    # Construct new parser based on the given language description.
    lang_parser = parser.getASG()

    # Meta-model is constructed. Validate semantics of metamodel.
    parser.metamodel.validate()

    # Meta-model is constructed. Here we connect meta-model and language
    # parser for convenience.
    lang_parser.metamodel = parser.metamodel
    metamodel.parser = lang_parser

    if metamodel.debug:
        # Create dot file for debuging purposes
        PMDOTExporter().exportFile(
            lang_parser.parser_model,
            "{}_parser_model.dot".format(metamodel.rootcls.__name__))

    return lang_parser
Example #6
0
File: textx.py Project: zeph/textX
def language_from_str(language_def, metamodel):
    """
    Constructs parser and initializes metamodel from language description
    given in textX language.

    Args:
        language_def (str): A language description in textX.
        metamodel (TextXMetaModel): A metamodel to initialize.

    Returns:
        Parser for the new language.
    """

    if metamodel.debug:
        metamodel.dprint("*** PARSING LANGUAGE DEFINITION ***")

    # Check the cache for already conctructed textX parser
    if metamodel.debug in textX_parsers:
        parser = textX_parsers[metamodel.debug]
    else:
        # Create parser for TextX descriptions from
        # the textX grammar specified in this module
        parser = ParserPython(textx_model, comment_def=comment,
                              ignore_case=False,
                              reduce_tree=False, debug=metamodel.debug)

        # Prepare regex used in keyword-like strmatch detection.
        # See str_match_SA
        flags = 0
        if metamodel.ignore_case:
            flags = re.IGNORECASE
        parser.keyword_regex = re.compile(r'[^\d\W]\w*', flags)

        # Cache it for subsequent calls
        textX_parsers[metamodel.debug] = parser

    # This is used during parser construction phase.
    # Metamodel is filled in. Classes are created based on grammar rules.
    parser.metamodel = metamodel

    # Builtin rules representing primitive types
    parser.root_rule_name = None

    # Parse language description with textX parser
    try:
        parser.parse(language_def)
    except NoMatch as e:
        line, col = parser.pos_to_linecol(e.position)
        raise TextXSyntaxError(text(e), line, col)

    # Construct new parser based on the given language description.
    lang_parser = parser.getASG()

    # Meta-model is constructed. Validate semantics of metamodel.
    parser.metamodel.validate()

    # Meta-model is constructed. Here we connect meta-model and language
    # parser for convenience.
    lang_parser.metamodel = parser.metamodel
    metamodel.parser = lang_parser

    if metamodel.debug:
        # Create dot file for debuging purposes
        PMDOTExporter().exportFile(
            lang_parser.parser_model,
            "{}_parser_model.dot".format(metamodel.rootcls.__name__))

    return lang_parser
Example #7
0
def language_from_str(language_def, metamodel):
    """
    Constructs parser and initializes metamodel from language description
    given in textX language.

    Args:
        language_def (str): A language description in textX.
        metamodel (TextXMetaModel): A metamodel to initialize.

    Returns:
        Parser for the new language.
    """

    if type(language_def) is not text:
        raise TextXError("textX accepts only unicode strings.")

    if metamodel.debug:
        metamodel.dprint("*** PARSING LANGUAGE DEFINITION ***")

    # Check the cache for already conctructed textX parser
    if metamodel.debug in textX_parsers:
        parser = textX_parsers[metamodel.debug]
    else:
        # Create parser for TextX grammars using
        # the arpeggio grammar specified in this module
        parser = ParserPython(textx_model, comment_def=comment,
                              ignore_case=False,
                              reduce_tree=False,
                              memoization=metamodel.memoization,
                              debug=metamodel.debug,
                              file=metamodel.file)

        # Cache it for subsequent calls
        textX_parsers[metamodel.debug] = parser

    # Parse language description with textX parser
    try:
        parse_tree = parser.parse(language_def)
    except NoMatch as e:
        line, col = parser.pos_to_linecol(e.position)
        raise TextXSyntaxError(text(e), line, col)

    # Construct new parser and meta-model based on the given language
    # description.
    lang_parser = visit_parse_tree(parse_tree,
                                   TextXVisitor(parser, metamodel))

    # Meta-model is constructed. Validate its semantics.
    metamodel.validate()

    # Here we connect meta-model and language parser for convenience.
    lang_parser.metamodel = metamodel
    metamodel._parser_blueprint = lang_parser

    if metamodel.debug:
        # Create dot file for debuging purposes
        PMDOTExporter().exportFile(
            lang_parser.parser_model,
            "{}_parser_model.dot".format(metamodel.rootcls.__name__))

    return lang_parser