Example #1
0
    def parse(self, content: Optional[str] = None) -> Node:
        """ Actually parses the file given in the constructor.

        Parameters:
            content (str, optional): Optional content of the file. If None, then the file is read from disk with open.

        Returns:
            The root node of the parsed file.
        """
        self.parsed = True
        if content is None:
            with open(self.file, encoding=self.encoding) as fd:
                self.source = fd.read()
        else:
            self.source = content
        self._line_lengths = [
            len(line) + 1
            for line
            in self.source.split('\n')
        ]
        input_stream = antlr4.InputStream(self.source)
        lexer = _LatexLexer(input_stream)
        lexer.removeErrorListeners()
        stream = antlr4.CommonTokenStream(lexer)
        parser = _LatexParser(stream)
        parser.removeErrorListeners()
        error_listener = _SyntaxErrorErrorListener(self.file)
        parser.addErrorListener(error_listener)
        listener = Listener(self)
        walker = antlr4.ParseTreeWalker()
        parse_tree = parser.main()
        walker.walk(listener, parse_tree)
        self.syntax_errors.extend(error_listener.syntax_errors)
        self.root = listener.stack[0]
        return self.root
Example #2
0
    def morph(cls, src, slicing=False):
        #filename = "Test4.sparql"
        #inputstream = antlr4.InputStream.InputStream(src)
        inputstream = InputStream(src)
        lexer = SparqlLexer(inputstream)
        stream = antlr4.CommonTokenStream(lexer)
        parser = SparqlParser(stream)
        #tree = parser.StartRule()
        tree = parser.query()
        #fmind = Fmind(gname)
        #fnode = fmind.make_right(u"root")
        #toFmind(fnode, tree)
        #fmind.unfold_all()
        #fmind.dump_to_file("l.mm")

        #tree = parser.prologue()

        morpher = MorpherContext3(slicing=slicing)
        listener = MySparqlParserListener(morpher)
        walker = antlr4.ParseTreeWalker()
        walker.walk(listener, tree)

        #logging.info("Output:%s", sys.argv[1])
        #print "# ", sys.argv[1]
        return morpher.get_result()
Example #3
0
 def parse(self, input, trace=False):
     if self.useDatabase:
         self.db = model.A2LDatabase(self.fnbase, debug=self.debug)
     lexer = self.lexerClass(input)
     lexer.removeErrorListeners()
     lexer.addErrorListener(MyErrorListener())
     tokenStream = antlr4.CommonTokenStream(lexer)
     #        tokenStream = BufferedTokenStream(lexer)
     parser = self.parserClass(tokenStream)
     parser.setTrace(trace)
     parser.removeErrorListeners()
     parser.addErrorListener(MyErrorListener())
     meth = getattr(parser, self.startSymbol)
     self._syntaxErrors = parser._syntaxErrors
     tree = meth()
     if self.listener:
         if self.useDatabase:
             self.listener.db = self.db
         listener = self.listener()
         walker = antlr4.ParseTreeWalker()
         result = walker.walk(listener, tree)
     if self.useDatabase:
         self.db.session.commit()
         return self.db
     else:
         return listener
    def run(self, scalingFactor: float, inputFilename: str, prefix: str,
            directory: str):

        if not prefix:
            prefix = os.path.basename(os.path.splitext(prefix)[0])

        if not os.path.exists(directory):
            os.makedirs(directory)

        for value in getCodeInsideTIKZAfterUnrolling(inputFilename):
            self.printContents(value)
            input_stream = antlr4.InputStream(value)
            lexer = TikzLexer(input_stream)
            stream = antlr4.CommonTokenStream(lexer)
            parser = TikzParser(stream)
            parser.addErrorListener(TikzErrorListener())
            tree = parser.begin()

            # we save file as filename_t_{n}_graph.graphml
            # Getting next available output file path
            j = 0
            while (os.path.exists(directory + "/" + prefix + "_" + str(j) +
                                  "_graph.graphml")):
                j += 1
            outputFilename = directory + "/" + prefix + "_" + str(
                j) + "_graph.graphml"
            htmlChat = CustomTikzListener(inputFilename, outputFilename,
                                          scalingFactor)
            walker = antlr4.ParseTreeWalker()
            walker.walk(htmlChat, tree)
Example #5
0
def parseAsJson(input_file):
    """Return a jsonized PDDL for a given file"""

    tree = parse(input_file)
    visitor = Pddl2JsonVisitor()
    antlr4.ParseTreeWalker().walk(visitor, tree)
    return visitor.result
Example #6
0
    def __call__(self, parser, namespace, values, option_string=None):
        # antlr4 is not available everywhere, restrict the import scope so that commands
        # that do not need it don't fail when it is absent
        import antlr4

        from azext_scheduled_query.grammar.scheduled_query import (
            ScheduleQueryConditionLexer, ScheduleQueryConditionParser,
            ScheduleQueryConditionValidator)

        usage = 'usage error: --condition {avg,min,max,total,count} ["METRIC COLUMN" from]\n' \
                '                         "QUERY_PLACEHOLDER" {=,!=,>,>=,<,<=} THRESHOLD\n' \
                '                         [resource id RESOURCEID]\n' \
                '                         [where DIMENSION {includes,excludes} VALUE [or VALUE ...]\n' \
                '                         [and   DIMENSION {includes,excludes} VALUE [or VALUE ...] ...]]\n' \
                '                         [at least MinTimeToFail violations out of EvaluationPeriod aggregated points]'
        string_val = ' '.join(values)

        lexer = ScheduleQueryConditionLexer(antlr4.InputStream(string_val))
        stream = antlr4.CommonTokenStream(lexer)
        parser = ScheduleQueryConditionParser(stream)
        tree = parser.expression()

        try:
            validator = ScheduleQueryConditionValidator()
            walker = antlr4.ParseTreeWalker()
            walker.walk(validator, tree)
            scheduled_query_condition = validator.result()
            for item in ['time_aggregation', 'threshold', 'operator']:
                if not getattr(scheduled_query_condition, item, None):
                    raise InvalidArgumentValueError(usage)
        except (AttributeError, TypeError, KeyError):
            raise InvalidArgumentValueError(usage)
        super(ScheduleQueryConditionAction,
              self).__call__(parser, namespace, scheduled_query_condition,
                             option_string)
Example #7
0
    def parse(self, input, trace=False):
        lexer = self.lexerClass(input)
        tokenStream = antlr4.CommonTokenStream(lexer)
        parser = self.parserClass(tokenStream)
        parser.setTrace(True if trace else False)
        meth = getattr(parser, self.startSymbol)
        self._syntaxErrors = parser._syntaxErrors
        tree = meth()

        listener = osekoilListener()
        walker = antlr4.ParseTreeWalker()
        walker.walk(listener, tree)

        #        pprint(tree.toStringTree())
        """
        JavaLexer lexer = new JavaLexer(input);
        CommonTokenStream tokens = new CommonTokenStream(lexer);
        JavaParser parser = new JavaParser(tokens);

        ParseTree tree = parser.compilationUnit(); // parse
        ParseTreeWalker walker = new ParseTreeWalker(); // create standard walker
        ExtractInterfaceListener extractor = new ExtractInterfaceListener(parser);
        walker.walk(extractor, tree); // initiate walk of tree with listener
        """

        return tree
Example #8
0
def _compile_file(file_to_compile, src_dir):
    """
    Compiles the given file, which should actually be a path to a file.
    """
    output_file_name = os.path.splitext(file_to_compile)[0]
    # Now we have output_file_name = "path/to/file" without the .ndl
    # but we want "file" not "path/to/file", so:
    output_file_name = output_file_name.split(os.path.sep)[-1]

    input = antlr4.FileStream(file_to_compile)
    lexer = NNDLLexer.NNDLLexer(input)
    stream = antlr4.CommonTokenStream(lexer)
    parser = NNDLParser.NNDLParser(stream)
    tree = parser.prog()

    # Walk the tree and generate the dot file
    print("Walking tree...")
    dg = dotgen.DotGenerator(file_to_compile, output_file_name + ".dot")
    walker = antlr4.ParseTreeWalker()
    walker.walk(dg, tree)

    # Use the dotgenerator's network that it figured out from the nndl file
    # to generate the cpp file
    print("Generating cpp files...")
    nw = dg._network
    cppwriter.write_file(nw, output_file_name, src_dir)
Example #9
0
    def __call__(self, parser, namespace, values, option_string=None):
        from azure.cli.command_modules.monitor.grammar.autoscale import (
            AutoscaleConditionLexer, AutoscaleConditionParser,
            AutoscaleConditionValidator)

        # pylint: disable=line-too-long
        usage = 'usage error: --condition ["NAMESPACE"] METRIC {==,!=,>,>=,<,<=} THRESHOLD {avg,min,max,total,count} PERIOD\n' \
                '                         [where DIMENSION {==,!=} VALUE [or VALUE ...]\n' \
                '                         [and   DIMENSION {==,!=} VALUE [or VALUE ...] ...]]'

        string_val = ' '.join(values)

        lexer = AutoscaleConditionLexer(antlr4.InputStream(string_val))
        stream = antlr4.CommonTokenStream(lexer)
        parser = AutoscaleConditionParser(stream)
        tree = parser.expression()

        try:
            validator = AutoscaleConditionValidator()
            walker = antlr4.ParseTreeWalker()
            walker.walk(validator, tree)
            autoscale_condition = validator.result()
            for item in [
                    'time_aggregation', 'metric_name', 'threshold', 'operator',
                    'time_window'
            ]:
                if not getattr(autoscale_condition, item, None):
                    raise CLIError(usage)
        except (AttributeError, TypeError, KeyError):
            raise CLIError(usage)

        namespace.condition = autoscale_condition
def main(argv):
    parser = argparse.ArgumentParser(
        description="Clang diagnostics group parser")
    common.add_common_parser_options(parser)
    parser.add_argument("groups_file",
                        metavar="groups-file",
                        help="""\
The path of clang diagnostic groups file.
""")
    args = parser.parse_args(argv[1:])

    string_input = antlr4.FileStream(args.groups_file)
    lexer = TableGenLexer.TableGenLexer(string_input)
    stream = antlr4.CommonTokenStream(lexer)
    parser = TableGenParser.TableGenParser(stream)
    tree = parser.expression()

    diagnostics = ClangDiagnosticGroupsListener()
    walker = antlr4.ParseTreeWalker()
    walker.walk(diagnostics, tree)

    for name in sorted(diagnostics.switchNames.keys(),
                       key=lambda x: x.lower()):
        if args.top_level and not is_root_class(diagnostics, name):
            continue
        dummy_string = create_dummy_text(diagnostics, name)
        print("-W%s%s" % (name, dummy_string))
        if args.unique:
            continue
        print_references(diagnostics, name, 1)
Example #11
0
def parse(data, listener=BlackbirdListener, cwd=None):
    """Parse a blackbird data stream.

    Args:
        data (antlr4.InputStream): ANTLR4 data stream of the Blackbird script
        Listener (BlackbirdListener): an Blackbird listener to use to walk the AST.
            By default, the basic :class:`~.BlackbirdListener` defined above
            is used.

    Returns:
        BlackbirdProgram: returns an instance of the :class:`BlackbirdProgram` class after
        parsing the abstract syntax tree
    """
    lexer = blackbirdLexer(data)
    stream = antlr4.CommonTokenStream(lexer)

    parser = blackbirdParser(stream)
    parser.removeErrorListeners()
    parser.addErrorListener(BlackbirdErrorListener())
    tree = parser.start()

    blackbird = listener(cwd=cwd)
    walker = antlr4.ParseTreeWalker()
    walker.walk(blackbird, tree)

    return blackbird.program
Example #12
0
def main() -> None:
    file_name = "/home/raptor/projects/thrifty/thriftpy/echo.thrift"
    with open(file_name, 'r', encoding='utf-8') as f:
        lexer = ThriftLexer(antlr4.InputStream(f.read()))

    token_stream = antlr4.CommonTokenStream(lexer)

    parser = ThriftParser(token_stream)

    tree_walker = antlr4.ParseTreeWalker()

    file_loader = FileLoader(name=file_name)
    tree_walker.walk(file_loader, parser.document())

    model = file_loader.thrifty_file

    # ====================================================
    # generate the files
    # ====================================================
    template_name = "/home/raptor/projects/thrifty/thriftgen/thriftgen/templates/py3/service.pyi.hbs"
    with open(template_name, 'r', encoding='utf-8') as template_file:
        template = template_file.read()

    hbs = pybars.Compiler().compile(source=template)
    print(hbs(model, helpers=helpers))
Example #13
0
def main():

    parser = argparse.ArgumentParser(
        description='Generate performance model part a')
    parser.add_argument('-kernelfile',
                        type=str,
                        nargs=1,
                        help='Input file (OpenCL)',
                        required=True)
    parser.add_argument('-v',
                        action='store_true',
                        help='Verbose mode to print more',
                        required=False)

    args = parser.parse_args()

    verbosity_needed = False
    if args.v:
        verbosity_needed = True
        print 'Verbose mode'

    filename = args.kernelfile[0]
    print filename
    ipt = antlr4.FileStream(args.kernelfile[0])

    lexer = CLexer(ipt)
    stream = antlr4.CommonTokenStream(lexer)
    parser = CParser(stream)

    tree = parser.compilationUnit()

    printer = CPrintListener(filename, verbosity_needed)

    walker = antlr4.ParseTreeWalker()
    walker.walk(printer, tree)
Example #14
0
    def __call__(self, parser, namespace, values, option_string=None):
        from azure.cli.command_modules.monitor.grammar import (
            MetricAlertConditionLexer, MetricAlertConditionParser,
            MetricAlertConditionValidator)

        usage = 'usage error: --condition {avg,min,max,total,count} [NAMESPACE.]METRIC {=,!=,>,>=,<,<=} THRESHOLD\n' \
                '                         [where DIMENSION {includes,excludes} VALUE [or VALUE ...]\n' \
                '                         [and   DIMENSION {includes,excludes} VALUE [or VALUE ...] ...]]'

        string_val = ' '.join(values)

        lexer = MetricAlertConditionLexer(antlr4.InputStream(string_val))
        stream = antlr4.CommonTokenStream(lexer)
        parser = MetricAlertConditionParser(stream)
        tree = parser.expression()

        try:
            validator = MetricAlertConditionValidator()
            walker = antlr4.ParseTreeWalker()
            walker.walk(validator, tree)
            metric_condition = validator.result()
            for item in [
                    'time_aggregation', 'metric_name', 'threshold', 'operator'
            ]:
                if not getattr(metric_condition, item, None):
                    raise CLIError(usage)
        except (AttributeError, TypeError, KeyError):
            raise CLIError(usage)
        super(MetricAlertConditionAction,
              self).__call__(parser, namespace, metric_condition,
                             option_string)
Example #15
0
def partOne(inp):
    parseTree = getTree(inp)
    walker = antlr4.ParseTreeWalker()
    counter = NumCounter()
    walker.walk(counter, parseTree)

    return counter.getCount()
def main():
    text = ''.join(line[6:73] for line in sys.stdin)
    lexer = copybook_lexer(antlr4.InputStream(text))
    stream = antlr4.CommonTokenStream(lexer)
    parser = copybook_parser(stream)
    tree = parser.main()
    walker = antlr4.ParseTreeWalker()
    walker.walk(Listener(), tree)
Example #17
0
def apply_listener(string_value, listener):
    string_input = antlr4.InputStream(string_value)
    lexer = GccOptionsLexer.GccOptionsLexer(string_input)
    stream = antlr4.CommonTokenStream(lexer)
    parser = GccOptionsParser.GccOptionsParser(stream)
    tree = parser.optionAttributes()
    walker = antlr4.ParseTreeWalker()
    walker.walk(listener, tree)
Example #18
0
def parser(rdl_string):
    inputstream = antlr4.InputStream(rdl_string)
    lexer = SystemRDLLexer(inputstream)
    stream = antlr4.CommonTokenStream(lexer)
    parser = SystemRDLParser(stream)
    tree = parser.root()
    listener = Listener(parser)
    walker = antlr4.ParseTreeWalker()
    return walker, listener, tree
Example #19
0
def parse(s):
    lexer = Lexer(antlr4.InputStream('\n'.join(s)))
    parser = Parser(antlr4.CommonTokenStream(lexer))
    # parser._errHandler = antlr4.error.ErrorStrategy.BailErrorStrategy()

    symbols = Collector()
    antlr4.ParseTreeWalker().walk(symbols, parser.spec())

    return symbols
Example #20
0
def partTwo(inp):
    parseTree = getTree(inp)
    walker = antlr4.ParseTreeWalker()

    painter = RedPainter()
    walker.walk(painter, parseTree)

    counter = NoRedCounter()
    walker.walk(counter, parseTree)

    return counter.getCount()
Example #21
0
 def parse(self, input):
     lexer = self.lexerClass(input)
     tokenStream = antlr4.CommonTokenStream(lexer)
     parser = self.parserClass(tokenStream)
     meth = getattr(parser, self.startSymbol)
     self._syntaxErrors = parser._syntaxErrors
     tree = meth()
     listener = amllib.Listener()
     walker = antlr4.ParseTreeWalker()
     walker.walk(listener, tree)
     return tree
Example #22
0
def _parse_string(s):
    lexer = minemeld.ft.condition.BoolExprLexer(antlr4.InputStream(s))
    stream = antlr4.CommonTokenStream(lexer)
    parser = minemeld.ft.condition.BoolExprParser(stream)
    tree = parser.booleanExpression()

    eb = ExprBuilder()
    walker = antlr4.ParseTreeWalker()
    walker.walk(eb, tree)

    return eb
Example #23
0
def parse(input_stream):
    lexer = TagmaLexer(input_stream)
    stream = antlr4.CommonTokenStream(lexer)
    parser = TagmaParser(stream)
    tree = parser.prog()
    loader = TagmaLoader()
    walker = antlr4.ParseTreeWalker()
    walker.walk(loader, tree)
    # printRule(loader.rule_list)
    # print_tree(tree, parser.ruleNames)
    return loader.rule_list
    def __init__(self):
        self.tables = []
        self.columns = []
        self.column_aliases = []
        self.keywords = []
        self.functions = []
        self.column_name_listener = ColumnNameListener()
        self.table_name_listener = TableNameListener()
        self.walker = antlr4.ParseTreeWalker()

        self.data = []
Example #25
0
    def _parse_boolexpr(self, s):
        lexer = BoolExprLexer(antlr4.InputStream(s))
        stream = antlr4.CommonTokenStream(lexer)
        parser = BoolExprParser(stream)
        tree = parser.booleanExpression()

        eb = _BECompiler()
        walker = antlr4.ParseTreeWalker()
        walker.walk(eb, tree)

        return eb.expression, eb.comparator, eb.value
Example #26
0
def get_grammar_dependencies(grammar: str) -> DependencyListener:
    """Parses JSGF grammar and creates a dependency graph."""
    # Parse
    parser = get_parser(grammar)
    context = parser.r()
    walker = antlr4.ParseTreeWalker()

    listener = DependencyListener()
    walker.walk(listener, context)

    return listener
    def __init__(self, query=None):
        self.walker = antlr4.ParseTreeWalker()

        self.columns = set()
        self.keywords = set()
        self.functions = set()
        self.display_columns = []
        self.syntax_error_listener = SyntaxErrorListener()
        if query is not None:
            self._query = self._strip_query(query)
            self.process_query()
Example #28
0
    def _parse_input(text, cwd=None):
        """The parser fixture accepts a blackbird string to parse"""
        lexer = blackbirdLexer(antlr4.InputStream(text))
        stream = antlr4.CommonTokenStream(lexer)
        parser = blackbirdParser(stream)

        tree = parser.start()

        bb = BlackbirdListener(cwd=cwd)
        walker = antlr4.ParseTreeWalker()
        walker.walk(bb, tree)
        return bb.program
Example #29
0
def parse(text):
    input_stream = antlr4.InputStream(text)
    lexer = ModelicaLexer(input_stream)
    stream = antlr4.CommonTokenStream(lexer)
    parser = ModelicaParser(stream)
    # parser.buildParseTrees = False
    parse_tree = parser.stored_definition()
    ast_listener = ASTListener()
    parse_walker = antlr4.ParseTreeWalker()
    parse_walker.walk(ast_listener, parse_tree)
    modelica_file = ast_listener.ast_result
    return file_to_tree(modelica_file)
def parse_modified_tokens(code):
    code_stream = antlr4.InputStream(code)
    lexer = JavaLexer(code_stream)
    token_stream = antlr4.CommonTokenStream(lexer)
    parser = JavaParser(token_stream)
    tree = parser.compilationUnit()

    printer = KeyPrinter()
    walker = antlr4.ParseTreeWalker()
    walker.walk(printer, tree)

    return printer.get_result()