def test_generated_parser(self): """The 'test' here is the very fact that we can successfully import the generated antlr code. However there's no harm in also exercising it. This code is modified from the canonical example at http://www.antlr.org/wiki/display/ANTLR3/Example. """ char_stream = antlr3.ANTLRStringStream('4 + 5\n') lexer = ExprLexer(char_stream) tokens = antlr3.CommonTokenStream(lexer) parser = ExprParser(tokens) r = parser.prog() # this is the root of the AST root = r.tree nodes = antlr3.tree.CommonTreeNodeStream(root) nodes.setTokenStream(tokens) eval = Eval(nodes) eval.prog()
def serve(self, script, forest): char_stream = antlr3.ANTLRStringStream(script) #print "parsing..." lexer = BatchScriptLexer(char_stream) tokens = antlr3.CommonTokenStream(lexer) parser = BatchScriptParser(tokens) #print "have script..." program = parser.main() #print str(program) if not forest: forest = {} out = {} env = Env('ROOT', self.root, None) #if forest : # for v in forest : # env = Env(v, forest[v], env) program.interp(forest, out, env) return out
def main(expr): """Code that emits the value of a simple arithmetic expression. Exercises interaction with ANTLR3-generated Python code. This code is modified from the canonical example at http://www.antlr.org/wiki/display/ANTLR3/Example. """ char_stream = antlr3.ANTLRStringStream('{}\n'.format(expr)) lexer = ExprLexer(char_stream) tokens = antlr3.CommonTokenStream(lexer) parser = ExprParser(tokens) r = parser.prog() # this is the root of the AST root = r.tree nodes = antlr3.tree.CommonTreeNodeStream(root) nodes.setTokenStream(tokens) eval = Eval(nodes) eval.prog()
def treadfile(inputFileName, srcpath, outputpath): print inputFileName (path, filename) = os.path.split(inputFileName) path = path[len(srcpath):] (leftname, ext) = os.path.splitext(filename) s = codecs.open(inputFileName, "r", "utf-8").read() if ord(s[0]) > 256: s = s[1:] cStream = antlr3.ANTLRStringStream(s) lexer = AS3_exLexer(cStream) tStream = antlr3.CommonTokenStream(lexer) ts = tStream.getTokens() if path != "": path = outputpath + "/" + path else: path = outputpath print path + "/" + filename f = codecs.open(path + "/" + filename, "w", "utf-8") for t in ts: f.write(convert(t.text)) f.close()
def parseQuery(self, query): try: l = SqlDASLexer(antlr3.ANTLRStringStream(query)) tokens = antlr3.CommonTokenStream(l) p = SqlDASParser(tokens) p.stmt() toReturn = {} toReturn['FIND_KEYWORDS'] = kws toReturn['WHERE_CONSTRAINTS'] = constraints toReturn['ORDER_BY_KEYWORDS'] = okws toReturn['ORDERING'] = orderingkw return toReturn except antlr3.exceptions.NoViableAltException as expObj: #print 'error ',expObj t = expObj.token msg = "Invalid Token " + str(t.getText()) + " on line " + str(t.getLine()) + " at column " + str(t.getCharPositionInLine()) + "\n" msg += "QUERY " + query + "\nPOSITION " pos = int(t.getCharPositionInLine()) if pos > 0: for i in range(pos): msg += " " msg += "^\n"; #print msg raise msg
def treadfile(inputFileName,srcpath,outputpath): print inputFileName (path,filename)=os.path.split(inputFileName) path=path[len(srcpath):] (leftname,ext)=os.path.splitext(filename) s=codecs.open(inputFileName,"r","utf-8").read() if ord(s[0])>256: s=s[1:] s=removeMutilComment(s) cStream = antlr3.ANTLRStringStream(s) lexer = AS3_exLexer(cStream) tStream = antlr3.CommonTokenStream(lexer) if path!="": path=outputpath+"/"+path else: path=outputpath print path+"/"+filename parser = AS3_exParser(tStream) fc=parser.fileContents(); f=codecs.open(path+"/"+filename,"w","utf-8") f.write(fc.value) f.close()
def parse_string(self, data): """Parse from a string.""" return self._parse(antlr3.ANTLRStringStream(data))
def convert_string(string,verbose=0): char_stream = antlr3.ANTLRStringStream(string) return convert_charstream(char_stream,verbose)
def execute(self, argv): options, args = self.parseOptions(argv) self.setUp(options) if options.interactive: while True: try: input = raw_input(">>> ") except (EOFError, KeyboardInterrupt): self.stdout.write("\nBye.\n") break inStream = antlr3.ANTLRStringStream(input) self.parseStream(options, inStream) else: if options.input is not None: inStream = antlr3.ANTLRStringStream(options.input) elif len(args) == 1 and args[0] != '-': inStream = antlr3.ANTLRFileStream( args[0], encoding=options.encoding ) else: inStream = antlr3.ANTLRInputStream( self.stdin, encoding=options.encoding ) if options.profile: try: import cProfile as profile except ImportError: import profile profile.runctx( 'self.parseStream(options, inStream)', globals(), locals(), 'profile.dat' ) import pstats stats = pstats.Stats('profile.dat') stats.strip_dirs() stats.sort_stats('time') stats.print_stats(100) elif options.hotshot: import hotshot profiler = hotshot.Profile('hotshot.dat') profiler.runctx( 'self.parseStream(options, inStream)', globals(), locals() ) else: self.parseStream(options, inStream)
def __init__(self, tokens, e): self.tokens = tokens self.e = e class LexerError(Exception): def __init__(self, tokens, e): self.tokens = tokens self.e = e class ExceptionalJSParser(JavaScriptParser): def displayRecognitionError(self, tokens, e): raise ParseError(tokens, e) class ExceptionalJSLexer(JavaScriptLexer): def displayRecognitionError(self, tokens, e): raise LexerError(tokens, e) input = 'var foo = function() { var foo = document.cookie; return foo+2; };;;\n;' char_stream = antlr3.ANTLRStringStream(input) # or to parse a file: # char_stream = antlr3.ANTLRFileStream(path_to_input) # # or to parse an opened file or any other file-like object: # char_stream = antlr3.ANTLRInputStream(file) lexer = ExceptionalJSLexer(char_stream) tokens = antlr3.CommonTokenStream(lexer) parser = ExceptionalJSParser(tokens) try: program = parser.program() print str(program.tree)+" -> "+str(program.tree.getType()) for l in program.tree.getChildren(): print str(l)+" -> "+str(l.getType()) except ParseError, e: print "P|"+str((e.e.token.type))+"|"
def load_from_string(string): """ Parse a FCL string to a fuzzy.systems (Mamdani, Sugeno or Tsukamoto) instance """ return __load(antlr3.ANTLRStringStream(string))
import antlr3 from PyFuncLexer import PyFuncLexer from PyFuncParser import PyFuncParser pfun = 'a.b.c.MyFun (123, dyz, "abc") AS MyFunx ' char_stream = antlr3.ANTLRStringStream(pfun) # or to parse a file: # char_stream = antlr3.ANTLRFileStream(path_to_input) # or to parse an opened file or any other file-like object: # char_stream = antlr3.ANTLRInputStream(file) lexer = PyFuncLexer(char_stream) tokens = antlr3.CommonTokenStream(lexer) tokens.fillBuffer() parser = PyFuncParser(tokens) func = parser.pyfunc() print "------- %s --------" % func.name print func.packages print func.name print func.alias print func.arguments ''' print parser.toStrings(tokens.tokens) for token in tokens.tokens: print "text=%s, type=%s\n" % (token.text, token.type) '''
def make_tree(self,expression): tokens = antlr3.CommonTokenStream(ExpLexer(antlr3.ANTLRStringStream(expression))) nodes = antlr3.tree.CommonTreeNodeStream(ExpParser(tokens).low_precedence_exp().tree) nodes.setTokenStream(tokens) return ExpTreeParser(nodes).exp()
def load_from_string(self, str): """Load a fuzzy system from FCL string.""" return self.__load(antlr3.ANTLRStringStream(str))
def load_from_string(self,str): return self.__load(antlr3.ANTLRStringStream(str))