示例#1
0
    def process(self, infile, outfile):
        """
        Processes a SDoc1 document.

        :param str infile: The input filename with the SDoc2 document.
        :param str outfile: The output filename with the target document.
        """
        in_stream = antlr4.FileStream(infile, 'utf-8')

        lexer = sdoc2Lexer(in_stream)
        tokens = antlr4.CommonTokenStream(lexer)
        parser = sdoc2Parser(tokens)
        tree = parser.sdoc()
        visitor = SDoc2Visitor()

        visitor.visit(tree)

        sdoc.sdoc2.node_store.prepare_content_tree()
示例#2
0
def compile(filename):

    # prepare file to be parsed
    inputfile = a4.FileStream(filename)
    lexer = ClassLayoutLexer(inputfile)
    stream = a4.CommonTokenStream(lexer)
    # parse to a tree
    parser = ClassLayoutParser(stream)
    tree = parser.u2cFile()

    # open the listener
    fl = FileListener()

    # walk the tree
    walker = a4.ParseTreeWalker()
    walker.walk(fl, tree)

    print(fl.c.write('java'))
示例#3
0
    def process(self, infile, outfile):
        """
        Processes a SDoc1 document.

        :param str infile: The input filename with the SDoc1 document.
        :param str outfile: The output filename with the SDoc2 document.
        """
        in_stream = antlr4.FileStream(infile)
        out_stream = open(outfile, 'wt')

        lexer = sdoc1Lexer(in_stream)
        tokens = antlr4.CommonTokenStream(lexer)
        parser = sdoc1Parser(tokens)
        tree = parser.sdoc()
        visitor = SDoc1Visitor(
            root_dir=os.path.dirname(os.path.realpath(infile)))

        visitor.set_output(out_stream)
        visitor.visit(tree)
示例#4
0
    def create_program(self, templatefilepath, directory, filename):
        tmpfilepath = '/tmp/' + templatefilepath.split('/')[-1]
        with open(tmpfilepath, 'w') as tmpfile:
            tmpfile.write(ReOrder(templatefilepath).reordered_code)

        template = antlr4.FileStream(tmpfilepath)
        lexer = Template2Lexer(template)
        stream = antlr4.CommonTokenStream(lexer)
        parser = Template2Parser(stream)
        template = parser.template()

        walker = ParseTreeWalker()
        walker.walk(self, template)

        with open(
                directory + '/' +
                filename.split('/')[-1].replace('.template', '.py'),
                'w') as modelfile:
            modelfile.write(self.output_program)
示例#5
0
    def process(self, infile: str) -> int:
        """
        Processes a SDoc1 document and returns the error count.

        :param str infile: The input filename with the SDoc2 document.
        """
        in_stream = antlr4.FileStream(infile, 'utf-8')

        lexer = sdoc2Lexer(in_stream)
        tokens = antlr4.CommonTokenStream(lexer)
        parser = sdoc2Parser(tokens)
        tree = parser.sdoc()
        visitor = SDoc2Visitor(infile, self._io)

        visitor.visit(tree)

        sdoc2.node_store.prepare_content_tree()

        return visitor.errors
示例#6
0
def main(argv):
    if len(argv) != 3:
        raise AttributeError('invalid number of arguments to compiler')
    input_file, project_dir = argv[1:]
    if not input_file.endswith('.lat'):
        raise AttributeError('input_file must have `.lat` extension')

    out_path = os.path.dirname(input_file)
    base_name = os.path.split(input_file)[1][:-4]
    out_base_name = os.path.join(out_path, base_name)

    input_file_stream = antlr4.FileStream(input_file)
    syntax_error_listener = LatteParserErrorListener()

    lexer = LatteLexer(input_file_stream)
    lexer.removeErrorListeners()
    lexer.addErrorListener(syntax_error_listener)
    token_stream = antlr4.CommonTokenStream(lexer)

    parser = LatteParser(token_stream)
    parser.removeErrorListeners()
    parser.addErrorListener(syntax_error_listener)
    prog_tree = parser.program()

    compiler = LLVMCompiler()
    code = compiler.visit_prog(prog_tree)
    print('OK', file=sys.stderr)

    ll_file_path = out_base_name + '.ll'
    runtime_path = os.path.join(project_dir, 'lib', 'runtime.bc')
    bc_no_runtime_path = out_base_name + '_no_runtime.bc'
    bc_final_path = out_base_name + '.bc'
    with open(ll_file_path, 'w') as f:
        f.write(code)
        print(f'Saved {ll_file_path}')
    if os.system(f'llvm-as -o {bc_no_runtime_path} {ll_file_path}') != 0:
        sys.exit(3)
    print(f'Compiled to {bc_no_runtime_path}')
    if os.system(f'llvm-link -o {bc_final_path} '
                 f'{bc_no_runtime_path} {runtime_path}') != 0:
        sys.exit(4)
    os.remove(bc_no_runtime_path)
    print(f'Linked to runtime: {bc_final_path}')
示例#7
0
    def get_pyro_features(self, pyrofile_path):
        import antlr4
        from antlr4 import *
        from parser.Python3Parser import Python3Parser
        from parser.Python3Lexer import Python3Lexer

        pyrofile = antlr4.FileStream(pyrofile_path)
        lexer = Python3Lexer(pyrofile)
        stream = antlr4.CommonTokenStream(lexer)
        parser = Python3Parser(stream)

        code = parser.file_input()
        walker = ParseTreeWalker()
        walker.walk(self, code)
        feature_vector = {}
        for k in self.post_map:
            if 'post' in self.post_map[k]:
                feature_vector['vi_' + self.post_map[k]['prior'] + '_' +
                               self.post_map[k]['post']] = 1
        return feature_vector
示例#8
0
    def test_parse_file(self, tmpdir):
        """Test that device name is extracted"""
        filename = tmpdir.join("test.xbb")

        with open(filename, "w") as f:
            f.write(test_file)

        bb = parse(antlr4.FileStream(filename))

        assert bb._var == {"alpha": 0.3423}

        expected = {"name": "fock", "options": {"num_subsystems": 1, "cutoff_dim": 7, "shots": 10}}
        assert bb.target == expected

        expected = [
            {"op": "Coherent", "args": [0.3423, np.sqrt(np.pi)], "kwargs": {}, "modes": [0]},
            {"op": "MeasureFock", "args": [], "kwargs": {}, "modes": [0]},
        ]

        assert bb.operations == expected
示例#9
0
def main():
    # command line
    parser = argparse.ArgumentParser(description='Exec/Type mu files.')
    parser.add_argument('path', type=str, help='file to exec and type')
    args = parser.parse_args()

    # lex and parse
    input_s = antlr4.FileStream(args.path, encoding='utf8')
    lexer = MuLexer(input_s)
    stream = antlr4.CommonTokenStream(lexer)
    parser = MuParser(stream)
    tree = parser.prog()

    # eval Visitor - You have some TODOS in this file!
    visitor2 = MuEvalVisitor()
    try:
        visitor2.visit(tree)
    except (MuRuntimeError, MuSyntaxError) as e:
        print(e.args[0])
        exit(1)
示例#10
0
def initialize_transform_rules():

    # 文件中的自定义转换规则
    rule_file = './rule.txt'

    input = antlr4.FileStream(rule_file)
    lexer = rulesLexer(input)
    stream = antlr4.CommonTokenStream(lexer)
    parser = rulesParser(stream)
    tree = parser.rules()

    rules = set()
    for single_rule in tree.getChildren():  # 这里的child下标和g4文件定义的规则相关
        rule_name = single_rule.getChild(0).getText()  # 最前面的部分是规则名称
        origin_parse_tree = expr.ParseTree(
            single_rule.getChild(2))  # input pattern
        transformed_parse_tree = expr.ParseTree(
            single_rule.getChild(4))  # output pattern
        if single_rule.getChildCount() == 10:
            cp_parse_tree = None
            pp_parse_tree = None
            weight = int(single_rule.getChild(8).getText()[1:])

        elif single_rule.getChildCount() == 11:
            if single_rule.getChild(6).getText() != '@':
                cp_parse_tree = expr.ParseTree(single_rule.getChild(6))
                pp_parse_tree = None
            else:
                cp_parse_tree = None
                pp_parse_tree = expr.ParseTree(single_rule.getChild(7))
            weight = int(single_rule.getChild(9).getText()[1:])

        else:
            cp_parse_tree = expr.ParseTree(single_rule.getChild(6))
            pp_parse_tree = expr.ParseTree(single_rule.getChild(8))
            weight = int(single_rule.getChild(10).getText()[1:])

        rules.add(
            TransformRule(rule_name, origin_parse_tree, transformed_parse_tree,
                          cp_parse_tree, pp_parse_tree, weight))
    return rules
示例#11
0
文件: main.py 项目: lraszkiewicz/mrjp
def main(argv):
    if len(argv) != 4:
        raise AttributeError('invalid number of arguments to compiler')
    input_file, target_vm, project_dir = argv[1:]
    if not input_file.endswith('.ins'):
        raise AttributeError('input_file must have `ins` extension')

    out_path = os.path.dirname(input_file)
    base_name = os.path.split(input_file)[1][:-4]
    out_base_name = os.path.join(out_path, base_name)

    input_file_stream = antlr4.FileStream(input_file)
    lexer = InstantLexer(input_file_stream)
    token_stream = antlr4.CommonTokenStream(lexer)
    parser = InstantParser(token_stream)
    prog_tree = parser.prog()
    if target_vm == 'jvm':
        compiler = JVMCompiler(base_name)
    elif target_vm == 'llvm':
        compiler = LLVMCompiler()
    else:
        raise AttributeError(f'unknown target VM: `{target_vm}`')

    code = compiler.visit_prog(prog_tree)

    if target_vm == 'llvm':
        ll_file_path = out_base_name + '.ll'
        bc_file_path = out_base_name + '.bc'
        with open(ll_file_path, 'w') as f:
            f.write(code)
            print(f'Saved {ll_file_path}')
        os.system(f'llvm-as {ll_file_path} -o {bc_file_path}')
        print(f'Compiled to {bc_file_path}')
    elif target_vm == 'jvm':
        j_file_path = out_base_name + '.j'
        with open(j_file_path, 'w') as f:
            f.write(code)
            print(f'Saved {j_file_path}')
        jasmin_path = os.path.join(project_dir, 'lib', 'jasmin.jar')
        os.system(f'java -jar {jasmin_path} -d {out_path} {j_file_path}')
示例#12
0
    def visitCmd_include(self, ctx):
        """
        Includes another SDoc into this SDoc.

        :param sdoc1Parser.Cmd_includeContext ctx: The parse tree.
        """
        # Test the maximum include level.
        if self._include_level >= self._options['max_include_level']:
            raise RuntimeError(
                "Maximum include level exceeded."
            )  # @todo More verbose logging, own exception class.

        # Open a stream for the sub-document.
        file_name = sdoc.unescape(ctx.SIMPLE_ARG().getText())
        if not os.path.isabs(file_name):
            file_name = os.path.join(self._root_dir, file_name + '.sdoc')
        print("Including %s" % os.path.relpath(file_name))
        stream = antlr4.FileStream(file_name, 'utf-8')

        # root_dir

        # Create a new lexer and parser for the sub-document.
        lexer = sdoc1Lexer(stream)
        tokens = antlr4.CommonTokenStream(lexer)
        parser = sdoc1Parser(tokens)
        tree = parser.sdoc()

        # Create a visitor.
        visitor = SDoc1Visitor(
            root_dir=os.path.dirname(os.path.realpath(file_name)))

        # Set or inherit properties from the parser of the parent document.
        visitor._include_level = self._include_level + 1
        visitor.set_output(self._output)
        visitor._set_global_scope(self._global_scope)

        # Run the visitor on the parse tree.
        visitor.visit(tree)

        self.put_position(ctx, 'stop')
示例#13
0
    def create_program(self):
        template = antlr4.FileStream(self.templatefile)
        lexer = TemplateLexer(template)
        stream = antlr4.CommonTokenStream(lexer)
        parser = TemplateParser(stream)
        self.visit(parser.template())

        # write data
        with open(self._directory + '/data.json', 'w') as datafile:
            for k in six.iterkeys(self.data_json):
                if isinstance(self.data_json[k], np.ndarray):
                    self.data_json[k] = self.data_json[k].tolist()
            json.dump(self.data_json, datafile)

        with open(self._directory + '/model.stan', 'w') as modelfile:
            modelfile.write(self.output_program)
            modelfile.write(self.model_string)
            try:
                if self.config["stan"]["quants"] is True:
                    modelfile.write(self.add_quants())
            except:
                pass
示例#14
0
def main():
    # command line
    parser = argparse.ArgumentParser(description='Exec/Type mu files.')
    parser.add_argument('path', type=str,
                        help='file to exec and type')
    args = parser.parse_args()

    # lex and parse
    input_s = antlr4.FileStream(args.path, encoding='utf8')
    lexer = MuLexer(input_s)
    counter = CountErrorListener()
    lexer._listeners.append(counter)
    stream = antlr4.CommonTokenStream(lexer)
    parser = MuParser(stream)
    parser._listeners.append(counter)
    tree = parser.prog()
    if counter.count > 0:
        exit(1)  # Syntax or lexicography errors occured, don't try to go further.

    # typing Visitor - This is given to you
    if enable_typing:
        visitor1 = MuTypingVisitor()
        try:
            visitor1.visit(tree)
        except MuTypeError as e:
            print(e.args[0])
            exit(1)

    # eval Visitor - You have some TODOS in this file!
    visitor2 = MuEvalVisitor()
    try:
        visitor2.visit(tree)
    except MuRuntimeError as e:
        print(e.args[0])
        exit(1)
    except MuInternalError as e:
        print(e.args[0])
        exit(2)
示例#15
0
文件: tools.py 项目: jwoehr/openqasm
def pretty_tree(*, program: str = None, file: str = None) -> str:
    """Get a pretty-printed string of the parsed AST of the QASM input.

    The input will be taken either verbatim from the string ``program``, or read
    from the file with name ``file``.  Use exactly one of the possible input
    arguments, passed by keyword.

    Args:
        program: a string containing the QASM to be parsed.
        file: a string of the filename containing the QASM to be parsed.

    Returns:
        a pretty-printed version of the parsed AST of the given program.

    Raises:
        ValueError: no input is given, or too many inputs are given.
        Qasm3ParserError: the input was not parseable as valid QASM 3.
    """
    if program is not None and file is not None:
        raise ValueError("Must supply only one of 'program' and 'file'.")
    if program is not None:
        input_stream = antlr4.InputStream(program)
    elif file is not None:
        input_stream = antlr4.FileStream(file, encoding="utf-8")
    else:
        raise ValueError("One of 'program' and 'file' must be supplied.")

    # ANTLR errors (lexing and parsing) are sent to stderr, which we redirect
    # to the variable `err`.
    with io.StringIO() as err, contextlib.redirect_stderr(err):
        lexer = qasm3Lexer(input_stream)
        token_stream = antlr4.CommonTokenStream(lexer)
        parser = qasm3Parser(token_stream)
        tree = _pretty_tree_inner(parser.program(), parser.ruleNames, 0)
        error = err.getvalue()
    if error:
        raise Qasm3ParserError(f"Parse tree build failed. Error:\n{error}")
    return tree
示例#16
0
def main():
    # command line
    parser = argparse.ArgumentParser(description='Exec/Type mu files.')
    parser.add_argument('path', type=str,
                        help='file to exec and type')
    args = parser.parse_args()

    # lex and parse
    input_s = antlr4.FileStream(args.path, encoding='utf8')
    lexer = MiniCLexer(input_s)
    counter = CountErrorListener()
    lexer._listeners.append(counter)
    stream = antlr4.CommonTokenStream(lexer)
    parser = MiniCParser(stream)
    parser._listeners.append(counter)
    tree = parser.prog()
    if counter.count > 0:
        exit(3)  # Syntax or lexicography errors occured

    # typing Visitor
    if enable_typing:
        typing_visitor = MiniCTypingVisitor()
        try:
            typing_visitor.visit(tree)
        except MiniCTypeError as e:
            print(e.args[0])
            exit(2)

    # interpret Visitor
    interpreter_visitor = MiniCInterpretVisitor()
    try:
        interpreter_visitor.visit(tree)
    except MiniCRuntimeError as e:
        print(e.args[0])
        exit(1)
    except MiniCInternalError as e:
        print(e.args[0])
        exit(4)
示例#17
0
    def exitInclude(self, ctx: blackbirdParser.IncludeContext):
        """Run after exiting include statement.

        Args:
            ctx: IncludeContext
        """
        filename = os.path.join(self._cwd, ctx.STR().getText()[1:-1])

        # check if filename has already been included
        for _, f in self._includes.items():
            if f[0] == filename:
                return

        cwd = os.path.dirname(filename)
        data = antlr4.FileStream(filename)

        # parse the included file
        lexer = blackbirdLexer(data)
        stream = antlr4.CommonTokenStream(lexer)

        parser = blackbirdParser(stream)
        parser.removeErrorListeners()
        parser.addErrorListener(BlackbirdErrorListener())
        tree = parser.start()

        listener = BlackbirdListener(cwd=cwd)
        walker = antlr4.ParseTreeWalker()
        walker.walk(listener, tree)

        # add parsed blackbird program to the include
        # dictionary
        bb = listener.program
        self._includes[bb.name] = [filename, bb]

        # make sure to also add all nested includes
        # to the top level include dictionary
        self._includes.update(listener._includes)
示例#18
0
    def process(self, infile: str, outfile: str) -> int:
        """
        Processes a SDoc1 document.

        :param str infile: The input filename with the SDoc1 document.
        :param str outfile: The output filename with the SDoc2 document.
        """
        in_stream = antlr4.FileStream(infile)

        self._io.writeln('Writing <fso>{0!s}</fso>'.format(outfile))
        with open(outfile, 'wt') as out_stream:
            lexer = sdoc1Lexer(in_stream)
            tokens = antlr4.CommonTokenStream(lexer)
            parser = sdoc1Parser(tokens)
            tree = parser.sdoc()

            visitor = SDoc1Visitor(self._io,
                                   root_dir=os.path.dirname(
                                       os.path.realpath(infile)))

            visitor.output = out_stream
            visitor.visit(tree)

            return visitor.errors
示例#19
0
    def run(self):
        """
        Runs the compiler
        """
        logger = logging.getLogger("compiler")
        if not self.quiet:
            print("SimplePOS Compiler")
            print("Compiling", self.inputFile, "to", self.outputFile)

        inputFile = antlr4.FileStream(self.inputFile, encoding="utf-8")
        lexer = SimplePOSLexer(inputFile)
        stream = antlr4.CommonTokenStream(lexer)

        parser = SimplePOSParser(stream)
        counter = ErrorCounter(self.inputFile)
        parser.removeErrorListeners()
        parser.addErrorListener(counter)
        logger.debug ("Starting ANTLR4 parser")
        tree = parser.sourcefile()
        if counter.errorCount > 0:
            sys.exit(-1)
        logger.debug ("Parsing complete without errors")

        listener = CompilerListener(self.inputFile, self.outputFile)
        walker = antlr4.ParseTreeWalker()
        logger.debug ("Walking syntax tree to generate "
                      "intermediate representation")
        walker.walk(listener, tree)

        if not self.quiet:
            listener.module.printStats()
        if self.logLevel == logging.DEBUG:
            print (listener.module)
        logger.debug ("Writing output file")
        with open(self.outputFile, "wb") as outFile:
            pickle.dump(listener.module, outFile)
示例#20
0
def main(argv):
    parser = argparse.ArgumentParser(description='Crust compiler')
    parser.add_argument('--file')
    parser.add_argument('--lexer', action='store_true')
    parser.add_argument('--ast', action='store_true')
    parser.add_argument('--symbol', action='store_true')

    args = parser.parse_args()

    input_str = antlr4.FileStream(args.file)
    lexer = crustLexer(input_str)
    stream = antlr4.CommonTokenStream(lexer)
    parser = crustParser(stream)
    tree = parser.program()

    if args.lexer:
        for token in stream.tokens:
            print(token.line, ":", token.text)

    ast = AstBuilderVisitor().visit(tree)

    if args.ast:
        json_tree = AstPrinter().visit(ast)
        json_tree = json.dumps(json_tree, indent=2)
        print(json_tree)

    semantic = SymbolTableGenerator(ast)

    if args.symbol:
        print(semantic)

    if semantic.errors:
        print('Errors')
        for error in semantic.errors:
            print(error)
        return
示例#21
0
            print('Function %r' % name)
            # self.that.globals[name.lower()] = ctx

    def enterBlockStmt(self, ctx):
        print('enterBlockStmt:')
        print(ctx.getText())

    def enterLiteral(self, ctx):
        print('enterLiteral:')
        print(ctx.getText())


try:
    filename = sys.argv[1]
except:
    sys.exit('Usage: %s <VBA text file>' % sys.argv[0])

print('Parsing %s' % filename)
print('Lexer')
lexer = vbaLexer(antlr4.FileStream(sys.argv[1]))
print('Stream')
stream = antlr4.CommonTokenStream(lexer)
print('vbaParser')
parser = vbaParser(stream)
print('Parsing from startRule')
tree = parser.startRule()
print('Walking the parse tree')
listener = MyListener()
walker = antlr4.ParseTreeWalker()
walker.walk(listener, tree)
示例#22
0
import sys
import gen.aslLexer
import gen.aslParser
import antlr4
from antlr4.tree.Trees import Trees

if __name__ == "__main__":
    filename = r"C:\Users\terzi\OneDrive\Documents\asli\examples\constant_declarations"

    #filename = r"C:\Users\terzi\OneDrive\Documents\asli\examples\operations"
    #filename = r"C:\Users\terzi\OneDrive\Documents\asli\examples\enumerations.asl"

    file_stream = antlr4.FileStream(filename)
    lexer = gen.aslLexer.aslLexer(file_stream)
    stream = antlr4.CommonTokenStream(lexer)

    parser = gen.aslParser.aslParser(stream)

    tree = parser.rulelist()
    print(Trees.toStringTree(tree, None, parser))

    #htmlChat = HtmlChatListener(output)
    #walker = ParseTreeWalker()
    #walker.walk(htmlChat, tree)

    # import your parser & lexer here

    # setup your lexer, stream, parser and tree like normal
示例#23
0
def parse_file(path):
    input_stream = antlr4.FileStream(path)
    return parse_js_stream(input_stream)
示例#24
0
import sys
import antlr4
from VPLLexer import VPLLexer
from VPLParser import VPLParser
from translateVisitor import TranslateVisitor
from semanticVisitor import semanticVisitor
from varCheckVisitor import varCheckVisitor
from VPLVisitor import VPLVisitor

char_stream = antlr4.FileStream(sys.argv[1])
lexer = VPLLexer(char_stream)
tokens = antlr4.CommonTokenStream(lexer)
parser = VPLParser(tokens)
tree = parser.program()
# semantic analysis
semanticChecker = semanticVisitor()
semanticChecker.visit(tree)
varChecker = varCheckVisitor()
varChecker.visit(tree)

translator = TranslateVisitor(varChecker.usedVars)
translator.visit(tree)

print(translator)
def Populate(source_name_content_generators,
             parse_flags):  # { "name" : def Func() -> content, }
    include_filenames = []

    root = Item(
        declaration_type=Item.DeclarationType.Object,
        item_type=Item.ItemType.Standard,
        parent=None,
        source="<root>",
        line=-1,
        column=-1,
        is_external=False,
    )

    root.name = "<root>"
    root.declaration_type = None
    root.metadata = ResolvedMetadata({}, [], [])

    root.config = OrderedDict()

    # ----------------------------------------------------------------------
    class Visitor(SimpleSchemaVisitor):
        # <PascalCase naming style> pylint: disable = C0103

        # ----------------------------------------------------------------------
        def __init__(self, source_name, is_external):
            self._source_name = source_name
            self._is_external = is_external

            self._stack = [root]

        # ----------------------------------------------------------------------
        def visitIdRule(self, ctx):
            assert len(ctx.children) == 1, ctx.children
            self._stack.append(ctx.children[0].symbol.text)

        # ----------------------------------------------------------------------
        def visitIntRule(self, ctx):
            assert len(ctx.children) == 1, ctx.children
            self._stack.append(int(ctx.children[0].symbol.text))

        # ----------------------------------------------------------------------
        def visitNumber(self, ctx):
            assert len(ctx.children) == 1, ctx.children
            self._stack.append(float(ctx.children[0].symbol.text))

        # ----------------------------------------------------------------------
        def visitString(self, ctx):
            return self.visitEnhancedString(ctx)

        # ----------------------------------------------------------------------
        def visitEnhancedString(self, ctx):
            while not isinstance(ctx, antlr4.tree.Tree.TerminalNode):
                assert len(ctx.children) == 1
                ctx = ctx.children[0]

            token = ctx.symbol
            value = token.text

            # At the very least, we should have a beginning and ending quote
            assert len(value) >= 2, value

            if (value.startswith('"""')
                    and value.endswith('"""')) or (value.startswith("'''")
                                                   and value.endswith("'''")):
                initial_whitespace = token.column

                # ----------------------------------------------------------------------
                def TrimPrefix(line, line_offset):
                    index = 0
                    whitespace = 0

                    while index < len(
                            line) and whitespace < initial_whitespace:
                        if line[index] == " ":
                            whitespace += 1
                        elif line[index] == "\t":
                            whitespace += 4
                        elif line[index] == "\r":
                            break
                        else:
                            raise Exceptions.PopulateInvalidTripleStringPrefixException(
                                self._source_name, token.line + line_offset,
                                token.column + 1 + whitespace)
                        index += 1

                    return line[index:]

                # ----------------------------------------------------------------------

                lines = value.split("\n")

                initial_line = lines[0].rstrip()
                if len(initial_line) != 3:
                    raise Exceptions.PopulateInvalidTripleStringHeaderException(
                        self._source_name, token.line, token.column + 1)

                final_line = lines[-1]
                if len(TrimPrefix(final_line, len(lines))) != 3:
                    raise Exceptions.PopulateInvalidTripleStringFooterException(
                        self._source_name, token.line, token.column + 1)

                lines = [
                    TrimPrefix(line, index + 1)
                    for index, line in enumerate(lines[1:-1])
                ]

                value = "\n".join(lines)

            elif value[0] == '"' and value[-1] == '"':
                value = value[1:-1].replace('\\"', '"')

            elif value[0] == "'" and value[-1] == "'":
                value = value[1:-1].replace("\\'", "'")

            else:
                assert False, value

            self._stack.append(value)

        # ----------------------------------------------------------------------
        def visitArgList(self, ctx):
            values = self._GetChildValues(ctx)
            self._stack.append(values)

        # ----------------------------------------------------------------------
        def visitMetadata(self, ctx):
            values = self._GetChildValues(ctx)
            assert len(values) == 2, values

            name, value = values

            self._stack.append(
                (name,
                 MetadataValue(value, MetadataSource.Explicit,
                               self._source_name, ctx.start.line,
                               ctx.start.column + 1)))

        # ----------------------------------------------------------------------
        def visitMetadataList(self, ctx):
            values = self._GetChildValues(ctx)

            metadata = OrderedDict()

            for name, value in values:
                if name in metadata:
                    raise Exceptions.PopulateDuplicateMetadataException(
                        value.Source,
                        value.Line,
                        value.Column,
                        name=name,
                        original_source=metadata[name].Source,
                        original_line=metadata[name].Line,
                        original_column=metadata[name].Column,
                    )

                metadata[name] = value

            self._stack.append(
                Metadata(metadata, self._source_name, ctx.start.line,
                         ctx.start.column + 1))

        # ----------------------------------------------------------------------
        def visitArityOptional(self, ctx):
            self._stack.append(Arity.FromString("?"))

        # ----------------------------------------------------------------------
        def visitArityZeroOrMore(self, ctx):
            self._stack.append(Arity.FromString("*"))

        # ----------------------------------------------------------------------
        def visitArityOneOrMore(self, ctx):
            self._stack.append(Arity.FromString("+"))

        # ----------------------------------------------------------------------
        def visitArityFixed(self, ctx):
            values = self._GetChildValues(ctx)
            assert len(values) == 1, values

            value = values[0]

            if value <= 0:
                raise Exceptions.PopulateInvalidArityException(
                    self._source_name,
                    ctx.start.line,
                    ctx.start.column + 1,
                    value=value,
                )

            self._stack.append(Arity(value, value))

        # ----------------------------------------------------------------------
        def visitArityVariable(self, ctx):
            values = self._GetChildValues(ctx)
            assert len(values) == 2, values

            min_value, max_value = values

            if min_value <= 0:
                raise Exceptions.PopulateInvalidArityException(
                    self._source_name,
                    ctx.start.line,
                    ctx.start.column + 1,
                    value=min_value,
                )

            if max_value <= 0:
                raise Exceptions.PopulateInvalidArityException(
                    self._source_name,
                    ctx.start.line,
                    ctx.start.column + 1,
                    value=max_value,
                )

            if max_value < min_value:
                raise Exceptions.PopulateInvalidMaxArityException(
                    self._source_name,
                    ctx.start.line,
                    ctx.start.column + 1,
                    min=min_value,
                    max=max_value,
                )

            self._stack.append(Arity(min_value, max_value))

        # ----------------------------------------------------------------------
        def visitIncludeStatement(self, ctx):
            if not parse_flags & ParseFlag.SupportIncludeStatements:
                raise Exceptions.PopulateUnsupportedIncludeStatementsException(
                    self._source_name, ctx.start.line, ctx.start.column + 1)

            values = self._GetChildValues(ctx)
            assert len(values) == 1, values
            filename = values[0]

            filename = os.path.normpath(
                os.path.join(os.path.dirname(self._source_name), filename))
            if not os.path.isfile(filename):
                raise Exceptions.PopulateInvalidIncludeFilenameException(
                    self._source_name,
                    ctx.start.line,
                    ctx.start.column + 1,
                    name=filename,
                )

            if filename not in source_name_content_generators and filename not in include_filenames:
                include_filenames.append(filename)

        # ----------------------------------------------------------------------
        def visitConfigStatement(self, ctx):
            if not parse_flags & ParseFlag.SupportConfigStatements:
                raise Exceptions.PopulateUnsupportedConfigStatementsException(
                    self._source_name, ctx.start.line, ctx.start.column + 1)
            values = self._GetChildValues(ctx)

            # There should be at least the name and 1 metadata item
            assert len(values) >= 2, len(values)

            name = values.pop(0)

            root.config.setdefault(name, []).append(
                Metadata(
                    OrderedDict([(
                        k,
                        v._replace(Source=MetadataSource.Config, ),
                    ) for k, v in values], ),
                    self._source_name,
                    ctx.start.line,
                    ctx.start.column + 1,
                ), )

        # ----------------------------------------------------------------------
        def visitUnnamedObj(self, ctx):
            if not parse_flags & ParseFlag.SupportUnnamedObjects:
                raise Exceptions.PopulateUnsupportedUnnamedObjectsException(
                    self._source_name, ctx.start.line, ctx.start.column + 1)

            if len(self._stack) == 1:
                if not parse_flags & ParseFlag.SupportRootObjects:
                    raise Exceptions.PopulateUnsupportedRootObjectsException(
                        self._source_name, ctx.start.line,
                        ctx.start.column + 1)
            else:
                if not parse_flags & ParseFlag.SupportChildObjects:
                    raise Exceptions.PopulateUnsupportedChildObjectsException(
                        self._source_name, ctx.start.line,
                        ctx.start.column + 1)

            with self._PushNewStackItem(ctx, Item.DeclarationType.Object):
                values = self._GetChildValues(ctx)
                assert not values, values

        # ----------------------------------------------------------------------
        def visitObj(self, ctx):
            if not parse_flags & ParseFlag.SupportNamedObjects:
                raise Exceptions.PopulateUnsupportedNamedObjectsException(
                    self._source_name, ctx.start.line, ctx.start.column + 1)

            if len(self._stack) == 1:
                if not parse_flags & ParseFlag.SupportRootObjects:
                    raise Exceptions.PopulateUnsupportedRootObjectsException(
                        self._source_name, ctx.start.line,
                        ctx.start.column + 1)
            else:
                if not parse_flags & ParseFlag.SupportChildObjects:
                    raise Exceptions.PopulateUnsupportedChildObjectsException(
                        self._source_name, ctx.start.line,
                        ctx.start.column + 1)

            with self._PushNewStackItem(ctx,
                                        Item.DeclarationType.Object) as item:
                values = self._GetChildValues(ctx)

                # ( ID ID? ... )
                # < ID ID? ... >
                # [ ID ID? ... ]
                if len(values) == 2:
                    name, reference = values
                elif len(values) == 1:
                    name = values[0]
                    reference = None
                else:
                    assert False, values

                self._ValidateName(item, name)

                item.name = name

                if reference is not None:
                    # TODO: This will be updated soon
                    item.references.append(reference)

        # ----------------------------------------------------------------------
        def visitObjAttributes(self, ctx):
            values = self._GetChildValues(ctx)
            assert len(values) <= 3, values

            item = self._GetStackParent()

            references = None
            metadata = None
            arity = None

            for value in values:
                if self._IsMetadata(value):
                    assert metadata is None, (metadata, value)
                    metadata = value

                elif self._IsArity(value):
                    assert arity is None, (arity, value)
                    arity = value

                else:
                    if isinstance(value, list):
                        item.multi_reference_type = Item.MultiReferenceType.Compound
                    else:
                        value = [value]

                    assert references is None, (references, value)
                    references = value

            assert not item.references, item.references
            item.references = references or []
            item.metadata = metadata
            item.arity = arity

        # ----------------------------------------------------------------------
        def visitObjAttributesItems(self, ctx):
            values = self._GetChildValues(ctx)
            assert values

            self._stack.append(values)

        # ----------------------------------------------------------------------
        def visitUnnamedDeclaration(self, ctx):
            if not parse_flags & ParseFlag.SupportUnnamedDeclarations:
                raise Exceptions.PopulateUnsupportedUnnamedDeclarationsException(
                    self._source_name, ctx.start.line, ctx.start.column + 1)

            if len(self._stack) == 1:
                if not parse_flags & ParseFlag.SupportRootDeclarations:
                    raise Exceptions.PopulateUnsupportedRootDeclarationsException(
                        self._source_name, ctx.start.line,
                        ctx.start.column + 1)
            else:
                if not parse_flags & ParseFlag.SupportChildDeclarations:
                    raise Exceptions.PopulateUnsupportedChildDeclarationsException(
                        self._source_name, ctx.start.line,
                        ctx.start.column + 1)

            with self._PushNewStackItem(ctx, Item.DeclarationType.Declaration):
                values = self._GetChildValues(ctx)
                assert not values, values

        # ----------------------------------------------------------------------
        def visitDeclaration(self, ctx):
            if not parse_flags & ParseFlag.SupportNamedDeclarations:
                raise Exceptions.PopulateUnsupportedNamedDeclarationsException(
                    self._source_name, ctx.start.line, ctx.start.column + 1)

            if len(self._stack) == 1:
                if not parse_flags & ParseFlag.SupportRootDeclarations:
                    raise Exceptions.PopulateUnsupportedRootDeclarationsException(
                        self._source_name, ctx.start.line,
                        ctx.start.column + 1)
            else:
                if not parse_flags & ParseFlag.SupportChildDeclarations:
                    raise Exceptions.PopulateUnsupportedChildDeclarationsException(
                        self._source_name, ctx.start.line,
                        ctx.start.column + 1)

            with self._PushNewStackItem(
                    ctx, Item.DeclarationType.Declaration) as item:
                values = self._GetChildValues(ctx)

                assert len(values) == 1, values
                name = values[0]

                self._ValidateName(item, name)

                item.name = name

        # ----------------------------------------------------------------------
        def visitDeclarationAttributes(self, ctx):
            values = self._GetChildValues(ctx)
            assert values

            item = self._GetStackParent()

            # First item will always be the id or attributes list
            value = values.pop(0)
            assert value
            assert not item.references, item.references

            if isinstance(value, list):
                item.references = value
                item.multi_reference_type = Item.MultiReferenceType.Variant
            else:
                item.references.append(value)

            assert values, "Metadata is always present"

            if len(values) == 1:
                metadata = values[0]
                assert self._IsMetadata(metadata), metadata

                arity = None

            elif len(values) == 2:
                metadata, arity = values

                assert self._IsMetadata(metadata), metadata
                assert self._IsArity(arity), arity

            else:
                assert False

            item.metadata = metadata
            item.arity = arity

        # ----------------------------------------------------------------------
        def visitDeclarationAttributesItems(self, ctx):
            values = self._GetChildValues(ctx)

            # Values should be alternating id, metadata list (an even number of items)
            assert not len(values) & 1, values

            result = []

            index = 0
            while index < len(values):
                result.append((values[index], values[index + 1]))
                index += 2

            self._stack.append(result)

        # ----------------------------------------------------------------------
        def visitExtension(self, ctx):
            if not parse_flags & ParseFlag.SupportExtensionsStatements:
                raise Exceptions.PopulateUnsupportedExtensionStatementException(
                    self._source_name, ctx.start.line, ctx.start.column + 1)

            with self._PushNewStackItem(
                    ctx, Item.DeclarationType.Extension) as item:
                values = self._GetChildValues(ctx)
                assert len(values) in [1, 2], values

                name = values[0]
                self._ValidateName(item, name)

                item.name = name
                item.metadata = ResolvedMetadata({}, [], [])

                if len(values) > 1:
                    item.arity = values[1]

        # ----------------------------------------------------------------------
        def visitExtensionContentPositional(self, ctx):
            values = self._GetChildValues(ctx)
            assert len(values) == 1, values

            item = self._GetStackParent()

            item.positional_arguments.append(values[0])

        # ----------------------------------------------------------------------
        def visitExtensionContentKeyword(self, ctx):
            values = self._GetChildValues(ctx)

            assert len(values) == 2, values
            key, value = values

            item = self._GetStackParent()

            if key in item.keyword_arguments:
                raise Exceptions.PopulateDuplicateKeywordArgumentException(
                    self._source_name,
                    ctx.start.line,
                    ctx.start.column + 1,
                    name=key,
                    value=value,
                    original_value=item.keyword_arguments[key],
                )

            item.keyword_arguments[key] = value

        # ----------------------------------------------------------------------
        # ----------------------------------------------------------------------
        # ----------------------------------------------------------------------
        def _GetChildValues(self, ctx):
            num_elements = len(self._stack)

            self.visitChildren(ctx)

            result = self._stack[num_elements:]
            self._stack = self._stack[:num_elements]

            return result

        # ----------------------------------------------------------------------
        @contextmanager
        def _PushNewStackItem(self, ctx, declaration_type):
            if ctx.start.type == ctx.parser.LBRACK:
                item_type = Item.ItemType.Attribute
            elif ctx.start.type == ctx.parser.LPAREN:
                item_type = Item.ItemType.Definition
            else:
                item_type = Item.ItemType.Standard

            parent = self._GetStackParent()

            item = Item(
                declaration_type,
                item_type,
                parent,
                self._source_name,
                ctx.start.line,
                ctx.start.column + 1,
                is_external=self._is_external,
            )

            parent.items.append(item)

            self._stack.append(item)

            # Note that the lambda seems to be necessary;
            #
            #   with CallOnExit(self._stack.pop):
            #       ...
            #
            # didn't modify the _stack. Strange.
            with CallOnExit(lambda: self._stack.pop()):
                yield item

        # ----------------------------------------------------------------------
        def _GetStackParent(self):
            """\
            Return the parent item. The parent item won't always be the last item on the stack,
            as we may have pushed ids that have yet to be consumed.
            """

            index = -1

            while True:
                assert -index <= len(self._stack), (-index, len(self._stack))

                item = self._stack[index]

                if isinstance(item, Item):
                    return item

                index -= 1

        # ----------------------------------------------------------------------
        @staticmethod
        def _IsMetadata(value):
            return isinstance(value, Metadata)

        # ----------------------------------------------------------------------
        @staticmethod
        def _IsArity(value):
            return isinstance(value, Arity)

        # ----------------------------------------------------------------------
        @staticmethod
        def _ValidateName(item, name):
            # Validating name here rather than in Validate.py as the name is used
            # during Resolution (in Resolve.py), which happens before Validate is
            # called.
            if name in FUNDAMENTAL_ATTRIBUTE_INFO_MAP or name in [
                    "any", "custom"
            ]:
                raise Exceptions.PopulateReservedNameException(
                    item.Source,
                    item.Line,
                    item.Column,
                    name=name,
                )

    # ----------------------------------------------------------------------
    def Impl(name, antlr_stream, is_external):
        lexer = SimpleSchemaLexer(antlr_stream)
        tokens = antlr4.CommonTokenStream(lexer)

        tokens.fill()

        parser = SimpleSchemaParser(tokens)
        parser.addErrorListener(ErrorListener(name))

        ast = parser.statements()
        assert ast

        ast.accept(Visitor(name, is_external))

    # ----------------------------------------------------------------------

    for source_name, content_generator in six.iteritems(
            source_name_content_generators):
        content = content_generator()
        content += "\n"

        antlr_stream = antlr4.InputStream(content)
        antlr_stream.filename = source_name

        Impl(
            source_name,
            antlr_stream,
            is_external=False,
        )

    # Iterating via index rather than by element as processing the content may update
    # the list.
    index = 0

    while index < len(include_filenames):
        Impl(
            include_filenames[0],
            antlr4.FileStream(include_filenames[index]),
            is_external=True,
        )

        index += 1

    return root
示例#26
0
def main(argv) -> dict:
    """
    Run the assembler with the given arguments, and
    return any directives found in the program.
    
    """

    infile, options_args, options_noargs = _assembly_utils.parse_input(argv)

    ##############################################################
    # PARSING
    ##############################################################

    try:
        file_stream = antlr4.FileStream(infile)
    except FileNotFoundError:
        print(f'Error: cannot find file "{infile}"\n')
        sys.exit(1)
    lexer = CorLexer(file_stream)
    lexer.removeErrorListeners()

    error_listener = _vus_listener.ImportErrorListener(infile)
    lexer.addErrorListener(error_listener)
    stream = antlr4.CommonTokenStream(lexer)
    parser = CorParser(stream)

    import_listener = _vus_listener.ImportListener(infile, stream)

    parser.removeErrorListeners()
    parser.addErrorListener(error_listener)
    tree = parser.initial()
    walker = antlr4.ParseTreeWalker()

    # recursively seraches through files until all are added to imports list
    walker.walk(import_listener, tree)

    # proper parsing
    listener = _vus_listener.VusListener(
        import_listener.getImports()[-1]["name"],
        _assembly_utils.Globals.RAM_ADDRESS_BEGIN,
        _assembly_utils.Globals.PGM_ADDRESS_BEGIN,
        import_listener.imports[-1]["path"],
        _assembly_utils.Globals.SYSVARS,
    )

    # NOTE -- main file is the last addition to imports list
    labels = _vus_listener.Labels()
    instructions = _vus_listener.Instructions()

    import_shuffle = import_listener.getImports()
    if len(import_shuffle) > 2:
        import_shuffle = import_shuffle[len(import_shuffle) -
                                        2::-1] + import_shuffle[-1:]
    for file in import_shuffle:
        # print(file)
        error_listener = _vus_listener.VusErrorListener(filepath=file["path"])
        file_stream = antlr4.FileStream(file["path"])
        lexer = CorLexer(file_stream)
        lexer.removeErrorListeners()
        lexer.addErrorListener(error_listener)
        stream = antlr4.CommonTokenStream(lexer)

        parser = CorParser(stream)
        parser.removeErrorListeners()
        parser.addErrorListener(error_listener)
        tree = parser.parse()

        num_instructions = listener.getNumInstructions()
        labels.insert(listener.getLabels().getLabels(), num_instructions)
        instructions.insert(listener.getInstructions().getInstructions(),
                            num_instructions)

        listener.reset(
            file["name"],
            file["path"],
            stream,
            start_addr=_assembly_utils.Globals.PGM_ADDRESS_BEGIN,
        )

        walker = antlr4.ParseTreeWalker()
        walker.walk(listener, tree)

    num_instructions = listener.getNumInstructions()
    labels.insert(listener.getLabels().getLabels(), num_instructions)
    instructions.insert(listener.getInstructions().getInstructions(),
                        num_instructions)

    labels.setInit(
        program_start_addr=_assembly_utils.Globals.PGM_ADDRESS_BEGIN)

    # unnecessary with loading from flash
    # _assembly_utils.init_ram(instructions, listener.getVariables(), labels)

    for i in range(len(_assembly_utils.Globals.INIT_INSTRUCTIONS) - 1, -1, -1):
        instructions.getInstructions().insert(
            0, _assembly_utils.Globals.INIT_INSTRUCTIONS[i])
    _assembly_utils.set_interrupts(labels.getLabels(),
                                   instructions.getInstructions())

    # Final stage
    # _assembly_utils.generate_output(
    #     options_args, options_noargs, listener, labels, instructions
    # )
    _assembly_utils.generate_binary(options_args, options_noargs, listener,
                                    labels, instructions)

    # this is a bit of a hack, but I don't feel like fixing it right now
    testing = _assembly_utils.test_output(instructions,
                                          listener.getVariables(), labels)

    return listener.directives, testing
示例#27
0
"""


__version__ = '0.4.0'
__author__ = 'Morteza'

import antlr4
import java8speedy
from java8speedy.parser import sa_javalabeled
from java8speedy.benchmark import benchmark



if __name__ == '__main__':
    if sa_javalabeled.USE_CPP_IMPLEMENTATION:
        print("JAVA 8 Using C++ implementation of parser")
    else:
        print("Using Python implementation of parser")

    file_stream = antlr4.FileStream('../tests/utils_test2.java')
    file_stream = antlr4.FileStream('Test_WekaPackageManager.java')
    sa_javalabeled._cpp_parse(file_stream, 'compilationUnit')

    # java9speedy.print_tree('../refactorings/input.java')
    # java9speedy.benchmark('../refactorings/input.java', 1000)
    # java9speedy.print_tree('Test_WekaPackageManager.java')

    benchmark('Test_WekaPackageManager.java', 5)
    # benchmark('../tests/utils_test2.java')
示例#28
0
def parse_file(*args, **kwargs) -> RootNode:
    """Parse a json path from a file."""
    file_stream = antlr4.FileStream(*args, **kwargs)
    return _parse_input_stream(file_stream)
示例#29
0
 def process(self, filename: str):
     lexer = self.lexer_factory(antlr4.FileStream(filename))
     tokens = antlr4.CommonTokenStream(lexer)
     parser = self.parser_factory(tokens)
     walker = antlr4.ParseTreeWalker()
     walker.walk(self.listener_factory(), parser.entry_point())
示例#30
0
文件: silicon.py 项目: theQRL/silicon
import sys
import antlr4

from gen.siliconLexer import siliconLexer
from gen.siliconParser import siliconParser

from parser.Scope import Scope
from parser.ast.Visitor import Visitor
from parser.SymbolVisitor import SymbolVisitor

if len(sys.argv) < 2:
    print("Missing Filename")
    sys.exit(0)

filename = sys.argv[1]
code = antlr4.FileStream(filename)
lexer = siliconLexer(code)
stream = antlr4.CommonTokenStream(lexer)
parser = siliconParser(stream)

tree = parser.sourceUnit()
scope = Scope()
functions = dict()
symbolVisitor = SymbolVisitor(functions)
symbolVisitor.visit(tree)
visitor = Visitor(scope, functions)
visitor.visit(tree)