def createInputStream(filename='', fromstr='', srcName=''): if filename: istr = FileStream(filename) if not srcName: import os srcName = ("<file: %s>" % os.path.basename(filename)) elif fromstr: istr = InputStream(fromstr) if not srcName: srcName = "<input_str>" else: raise Exception("Either filename of fromstr must be specified") istr.name = srcName return istr
def main(argv): # don't know if this works for all OS input_stream = FileStream(argv[1]) lexer = pmlLexer(input_stream) stream = CommonTokenStream(lexer) parser = pmlParser(stream) parser.removeErrorListeners() exceptionListener = ParserExceptionListener() parser.addErrorListener(exceptionListener) # error management hadSyntaxErrors = False try: tree = parser.styles() except Exception as e: errorText = str(e) hadSyntaxErrors = True if not hadSyntaxErrors: translator = PythonListener() walker = ParseTreeWalker() walker.walk(translator, tree) sys.stdout.write(translator.getCode()) else: sys.stdout.write(errorText)
def main(argv): logfmt = ('[%(levelname)s]\t%(name)s:%(threadName)-10s' + '(%(asctime)s.%(msecs)d) ' + '%(filename)s:%(lineno)d:%(message)s') datefmt = '%H:%M:%S' logging.basicConfig(level=logging.INFO, format=logfmt, datefmt=datefmt) #input = FileStream(argv[1]) gname = argv[1] inputstream = FileStream(gname, encoding='utf-8') lexer = SparqlLexer(inputstream) stream = CommonTokenStream(lexer) parser = SparqlParser(stream) #tree = parser.StartRule() tree = parser.query() fmind = Fmind(gname) fnode = fmind.make_right(u"root") toFmind(fnode, tree) fmind.unfold_all() fmind.dump_to_file("l2.mm") #tree = parser.prologue() morpher = MorpherContext2() listener = MySparqlParserListener(morpher) walker = ParseTreeWalker() walker.walk(listener, tree) logging.info("Output:%s", sys.argv[1]) print "# ", sys.argv[1] print morpher.get_result()
def main(argv): """ CLI arg is the path to CSPICE.java. """ input_stream = FileStream(argv[1]) lexer = DeclarationsLexer(input_stream) stream = CommonTokenStream(lexer) parser = DeclarationsParser(stream) functions = parser.cspice().result template_dir = os.path.join('src', 'build', 'templates') out = os.path.join('build', 'generated', 'sources', 'automation', 'main') java_out = os.path.join(out, 'java') proto_out = os.path.join(out, 'proto') for o in [out, java_out, proto_out]: if not os.path.exists(o): os.makedirs(o) functions = [func for func in functions if valid_function(func)] generate_proto(functions, proto_out, template_dir) generate_endpoints(functions, ['ParSPICE.java', 'SpiceService.java'], java_out, template_dir) for func in functions: if func.classification == parse_tree.Classification.NORMAL: try: generate_java(func, ['Call.java', 'Batch.java'], java_out, template_dir) except ValueError: print('not yet working: %s' % func.name)
def compile_script_file(file_name): try: input = FileStream(file_name, encoding='utf8') except Exception as e: err = e return None, err return compile_script(input)
def generate_stateless_alu(self, alu_name, potential_operands): # Grab the stateless alu file name by using input_stream = FileStream(self.stateless_alu_filename_) lexer = aluLexer(input_stream) stream = CommonTokenStream(lexer) parser = aluParser(stream) tree = parser.alu() sketch_stateless_alu_visitor = \ SketchStatelessAluVisitor( self.stateless_alu_filename_, self.sketch_name_ + '_' + alu_name, potential_operands, self.generate_mux, self.constant_arr_size_) sketch_stateless_alu_visitor.visit(tree) self.add_holes(sketch_stateless_alu_visitor.global_holes) self.stateless_alu_hole_arguments_ = [ x for x in sorted( sketch_stateless_alu_visitor.stateless_alu_args )] self.num_stateless_muxes_ = \ len(sketch_stateless_alu_visitor.packet_fields) return (sketch_stateless_alu_visitor.helper_function_strings + sketch_stateless_alu_visitor.main_function)
def get_default_netlist(self, cn_id: str, name_gen: NameGenerator) -> MINTDevice: if self.type is not PrimitiveType.NETLIST: raise Exception( "Cannot execute this method for this kind of a primitive") default_mint_file = parameters.LIB_DIR.joinpath( self._default_netlist).resolve() if not path.exists(default_mint_file): raise Exception("Default netlist file does not exist") finput = FileStream(default_mint_file) lexer = mintLexer(finput) stream = CommonTokenStream(lexer) parser = mintParser(stream) tree = parser.netlist() walker = ParseTreeWalker() listener = MINTCompiler() walker.walk(listener, tree) device = listener.current_device name_gen.rename_netlist(cn_id, device) # Return the default netlist return device
def main(): # Argument parser parser_args = argparse.ArgumentParser(prog='expresso', description='C-- interpreter') parser_args.add_argument('input', type=str, help='source code') parser_args.add_argument('-o', metavar='', type=str, default=None, help='python output') args = parser_args.parse_args() ################################# input_stream = FileStream(args.input) lexer = expressoLexer(input_stream) stream = CommonTokenStream(lexer) parser = expressoParser(stream) tree = parser.start() # Get AST # Transverse AST to generate python ast -> NOT YET IMPLEMENTED visitor = astVisitor(args.input) ast = visitor.visitStart(tree) code = compile(source=ast, filename=args.input, mode='exec') exec(code, globals()) if args.o: with open(args.o, 'w') as file: file.write(decompile(ast))
def execute(self, input_source): parser = JavaParser(CommonTokenStream(JavaLexer(FileStream(input_source, encoding="utf-8")))) walker = ParseTreeWalker() walker.walk(self.listener, parser.compilationUnit()) # print(self.listener.called_methods) # print(self.listener.methods) # print(self.listener.calsses) print(self.listener.calledMethodToMethod) for key in self.listener.calledMethodToMethod: print(key) for value in self.listener.calledMethodToMethod.values(): print(value) save_row = {} with open("a.csv",'w') as f: fieldnames = ['called method', 'method'] writer = csv.DictWriter(f, fieldnames=fieldnames, delimiter=",",quotechar='"') writer.writeheader() for calledMethod in self.listener.calledMethodToMethod.keys(): writer.writerow({'called method': calledMethod, 'method': self.listener.calledMethodToMethod[calledMethod] }) print(calledMethod) print(self.listener.calledMethodToMethod[calledMethod])
def comp(source, file=False): """Parse the Switch source code and walk it, then return the python code""" output = bytearray("", "utf-8") namespace = { "->": "print_no_nl", ":": "SwitchMap", "...": "SwitchList", } if file: lexer = switchLexer(FileStream(source)) else: lexer = switchLexer(InputStream(source)) stream = CommonTokenStream(lexer) parser = switchParser(stream) lexer.removeErrorListeners() lexer.addErrorListener(ExceptionListener()) parser.removeErrorListeners() parser.addErrorListener(ExceptionListener()) parse_tree = parser.switch_file() printer = SwitchPrintListener(output, namespace) walker = MyWalker() walker.walk(printer, parse_tree) return output
def _parse(self, grammar, encoding, lib_dir): work_list = {grammar} root = None while work_list: grammar = work_list.pop() antlr_parser = self.antlr_parser_cls( CommonTokenStream( self.antlr_lexer_cls(FileStream(grammar, encoding=encoding)))) current_root = antlr_parser.grammarSpec() # assert antlr_parser._syntaxErrors > 0, 'Parse error in ANTLR grammar.' # Save the 'outermost' grammar. if not root: root = current_root else: # Unite the rules of the imported grammar with the host grammar's rules. for rule in current_root.rules().ruleSpec(): root.rules().addChild(rule) work_list |= self._collect_imports(current_root, dirname(grammar), lib_dir) return root
def main(): """Entry point for the CLI.""" # Tokenize the input: file_stream = FileStream(sys.argv[1]) # FIXME: use argparse/click. lexer = XLexer(file_stream) token_stream = CommonTokenStream(lexer) # Parse the program: parser = XParser(token_stream) tree = parser.program() # Compile the code: code_generator = CodeGenerator() code_generator.visit(tree) llvm.initialize() llvm.initialize_native_target() llvm.initialize_native_asmprinter() target = llvm.Target.from_default_triple() target_machine = target.create_target_machine() backing_module = llvm.parse_assembly('') engine = llvm.create_mcjit_compiler(backing_module, target_machine) module = llvm.parse_assembly(code_generator.code) module.verify() engine.add_module(module) engine.finalize_object() func_ptr = engine.get_function_address('main') c_func = CFUNCTYPE(c_int)(func_ptr) print(c_func())
def main(inputname, verbose): if inputname: lexer = SujetALexer(FileStream(inputname)) else: lexer = SujetALexer(StdinStream()) if not verbose: lexer.removeErrorListener(ConsoleErrorListener.INSTANCE) counter = CountErrorListener() lexer._listeners.append(counter) stream = CommonTokenStream(lexer) stream.fill() if counter.count > 0: # wrong token ! print("lexical error") exit(1) parser = SujetAParser(stream) if not verbose: parser.removeErrorListener(ConsoleErrorListener.INSTANCE) parser._listeners.append(counter) parser.start() if counter.count > 0: print("syntax error") exit(2) else: print("ok") exit(0)
def print_tokens(filename, use_cpp): fs = FileStream(filename) sa_modelica.USE_CPP_IMPLEMENTATION = use_cpp tree = sa_modelica.parse(fs, 'stored_definition') printer = HelloPrintListener() walker = ParseTreeWalker() walker.walk(printer, tree)
def main(): in_stream = FileStream( os.path.join(os.path.dirname(__file__), "assets", "example.json"), encoding="utf8" ) trans = TranspilerFactory.new_transpiler("json", "md") res = trans.convert(in_stream) print(res)
async def getTokenizedDocument(self, path: Union[str, PurePath]): class MyLexerErrorListener(ErrorListener): def syntaxError(self, recognizer, offendingSymbol, line, column, msg, e): _logger.warning("%s:%d,%d: %s", path, line, column, msg) if isinstance(path, str): path = Path(path).resolve() doc = self._tokenizedDocCache.get(path) if doc: return doc textDoc = TextDocument.loadFile(path, self.inferLanguageId(path)) input = FileStream(path, encoding="utf-8", errors="replace") lexer = self.createLexer(input) assert isinstance(lexer, Lexer) lexer.removeErrorListeners() lexer.addErrorListener(MyLexerErrorListener()) documentSymbols = [] if await self.openDocument(textDoc): try: documentSymbols = await self._lspClient.server.textDocumentGetSymbols(textDoc.uri) finally: # _logger.info("Close doc") self._lspClient.server.textDocumentDidClose(textDoc.uri) def tokenGenerator(): while True: tk = lexer.nextToken() if tk.type == Token.EOF: return if self.filterToken(tk): yield tk doc = TokenizedDocument(tokenGenerator(), documentSymbols, path, documentSymbolFilter=lambda s: self.filterSymbol(s)) self._tokenizedDocCache[path] = doc return doc
def profiling(): file_name = 'code/cpp/crazy.cpp' ABCVisitor = CPPABCVisitor time_limit = 0.5 # seconds print(f'Profiling for {file_name}') print(f'Time limit is {time_limit}s') times = list() for i in range(10): input = FileStream(file_name, encoding='utf8').strdata start_time = datetime.now() visitor = ABCVisitor.from_code(input, time_limit=time_limit) delta_time = datetime.now() - start_time times.append(delta_time.total_seconds() * 1000) print('.' if visitor.success else 'F', end='', flush=True) print() print(f'a: {visitor.a}') print(f'b: {visitor.b}') print(f'c: {visitor.c}') print(f'ABC score: {visitor.abc_score}\n') mid = sorted(times[1:])[(len(times) - 1) // 2] caching_time = times[0] - mid print(f'caching time: {format(caching_time, ".5f")}ms') print(f'min: {format(min(times[1:]), ".5f")}ms') print(f'mid: {format(mid, ".5f")}ms') print(f'max: {format(max(times[1:]), ".5f")}ms')
def getTree(file): input = FileStream(file) lexer = NetAddDslLexer(input) stream = CommonTokenStream(lexer) parser = NetAddDslParser(stream) tree = parser.root() return tree
def get_tree(file): inp = FileStream(file) lexer = MineScriptLexer(inp) stream = CommonTokenStream(lexer) parser = MineScriptParser(stream) tree = parser.prog() return tree
def execute(self, input_source): parser = Java8Parser( CommonTokenStream( Java8Lexer(FileStream(input_source, encoding="utf-8")))) walker = ParseTreeWalker() walker.walk(self.listener, parser.compilationUnit()) return self.listener.ast_info
def test_parse(self): with open(self.root_dir + "/1.jpg", 'rb') as f: data = b"".join(f.readlines()) engine = Dynabuffers.parse(FileStream(self.root_dir + "/schema07.dbs")) map = engine.deserialize(engine.serialize({"image": data})) self.assertEqual(map, {"image": data})
def analyseTestFile(self): try: # measure process time in seconds t0 = time.clock() fileStream = FileStream(fileName=self.filePath, encoding='utf8') lexer = CalculatorLexer(fileStream) commonTokenStream = CommonTokenStream(lexer) parser = CalculatorParser(commonTokenStream) ''' Define new cache ''' #predictionContextCache = PredictionContextCache(); ''' Define new/clean DFA array ''' #decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(parser.atn.decisionToState) ] #parser._interp = ParserATNSimulator(parser, parser.atn, decisionsToDFA, predictionContextCache) #=================================================================== parser.removeErrorListeners() throwingErrorListener = ThrowingErrorListener() parser.addErrorListener(throwingErrorListener) #=================================================================== #if (self.trace): # parser.setTrace(True) ''' try PredictionMode.LL first ''' #parser._interp.predictionMode = PredictionMode.LL #parser.errHandler = BailErrorStrategy() ''' try PredictionMode.SLL ''' #parser._interp.predictionMode = PredictionMode.SLL; ''' parse using entry rule sql_script ''' tree = parser.start() extendedVisitor = ExtendedVisitor() extendedVisitor.visit(tree) variable = extendedVisitor.getFirstVariable() print "First Variable = {variable} --- Result: {result}".format( variable=variable, result=extendedVisitor.getValue(variable)) #=================================================================== # listener = RuleParserListener() # walker = ParseTreeWalker() # walker.walk(listener, tree) # #=================================================================== t1 = time.clock() self.analysisDurationSeconds = t1 - t0 self.errorsList = throwingErrorListener.getErrorsList() if len(self.errorsList) > 0: print 'Test File - error List = {0}'.format(self.errorsList) # print ' identifiers set = {0}'.format(self.identifierSet) self.analysisResult = "Success" ''' clear cache ''' except Exception as ex: print 'analyseSqlTestFile - exception during parse {filePath} - exception = {ex}'.format( filePath=self.filePath, ex=ex) self.identifierSet = set() self.analysisResult = "Failed"
def execute(self, input_source): parser = JavaParser(CommonTokenStream(JavaLexer(FileStream(input_source, encoding="utf-8")))) walker = ParseTreeWalker() walker.walk(self.listener, parser.compilationUnit()) # self.logger.debug('Display all data extracted by AST. \n' + pformat(self.listener.ast_info, width=160)) # print(self.listener.call_methods) # print(self.listener.ast_info['methods']) return self.listener.ast_info
def test_create_tree(self): input_stream = FileStream(self._template_path) lexer = PrismTemplateLexer(input_stream) stream = CommonTokenStream(lexer) parser = PrismTemplateParser(stream) tree = parser.program() self.assertIsNotNone(tree)
def launch(): i_stream = FileStream('in.txt', encoding='utf-8') t_stream = lex(i_stream) built_tree, error_listener = parse(t_stream) result = tree_to_json(built_tree, error_listener) with open('out.txt', 'w') as f_out: f_out.write(result)
def test_parse(self): engine = Dynabuffers.parse(FileStream(f"{self.root_dir}/schema03.dbs")) map = engine.deserialize( engine.serialize({"results": [{ "text": "hello world" }]})) self.assertEqual(map, {"results": [{"text": "hello world"}]})
def compile(self) -> Program: input_stream = FileStream(self._template_path) lexer = PrismTemplateLexer(input_stream) stream = CommonTokenStream(lexer) parser = PrismTemplateParser(stream) return self.visit(parser.program())
def tree_from_file(self, fn, rule, out, encoding): logger.info('Process file %s.', fn) try: tree = self.create_tree(FileStream(fn, encoding=encoding), rule, fn) if tree is not None: tree.save(join(out, basename(fn) + Tree.extension), max_depth=self.max_depth) except Exception as e: logger.warning('Exception while processing %s: %s', fn, str(e))
def get_tree(file): check_EOF(file) input = FileStream(file) lexer = MineScriptLexer(input) stream = CommonTokenStream(lexer) parser = MineScriptParser(stream) tree = parser.prog() return tree
def compile_module_file(file_name): try: input = FileStream(file_name, encoding='utf8') m, err = compile_module(input) return m, err except Exception as e: err = e return None, err