Exemplo n.º 1
0
    def execParser(self,
                   grammar,
                   grammarEntry,
                   input,
                   listener,
                   parser_args={}):
        if listener is None:
            port = 49100
            debugger = Debugger(port)
            debugger.start()
            # TODO(pink): install alarm, so it doesn't hang forever in case of a bug

        else:
            port = None

        try:
            lexerCls, parserCls = self.compileInlineGrammar(grammar,
                                                            options='-debug')

            cStream = antlr3.StringStream(input)
            lexer = lexerCls(cStream)
            tStream = antlr3.CommonTokenStream(lexer)
            parser = parserCls(tStream, dbg=listener, port=port, **parser_args)
            getattr(parser, grammarEntry)()

        finally:
            if listener is None:
                debugger.join()
                return debugger
Exemplo n.º 2
0
    def testLBBeyondBegin(self):
        """CommonTokenStream.LB(-1): beyond begin"""

        self.source.tokens.append(
            antlr3.CommonToken(type=12)
            )
        
        self.source.tokens.append(
            antlr3.CommonToken(type=12, channel=antlr3.HIDDEN_CHANNEL)
            )
        
        self.source.tokens.append(
            antlr3.CommonToken(type=12, channel=antlr3.HIDDEN_CHANNEL)
            )
        
        self.source.tokens.append(
            antlr3.CommonToken(type=13)
            )
        
        stream = antlr3.CommonTokenStream(self.source)
        self.failUnless(stream.LB(1) is None)

        stream.consume()
        stream.consume()
        self.failUnless(stream.LB(3) is None)
Exemplo n.º 3
0
def convert_charstream(char,verbose=0):
    lexer = JavaScriptLexer(char)
    tokens = antlr3.CommonTokenStream(lexer)
    pp = JavaScriptParser(tokens)
    prgm = pp.program()
    tree = prgm.tree
    return convert_tree(tree,verbose)
Exemplo n.º 4
0
def parse(string, noprint=False):
  cStream = antlr3.StringStream(string)
  lexer   = foo_langLexer(cStream)
  tStream = antlr3.CommonTokenStream(lexer)
  parser  = foo_langParser(tStream)

  try:
    return parser.start()
  except RecognitionException as e:
    if not noprint:
      print "Exception:", e, ":"
      print "  index  :", e.index
      print "  token  :", e.token
      print "  c      :", e.c
      print "  line   :", e.line
      lines = string.split("\n")
      print "          ", lines[e.line-2]
      print "       -->", lines[e.line-1]
      if e.line < len(lines):
        print "          ", lines[e.line]
      print "  pos    :", e.charPositionInLine
      print "  info   :", e.approximateLineInfo
      raise RuntimeError("Failed to parse")
    else:
      raise
Exemplo n.º 5
0
    def sql2sparkScaffold(self, relative_path):
        '''
        Parses the given SQL file and compares the parse tree with the expected parse tree
        '''
        errorMsg = """Expected output and produced output do not match for %s:
                         Expected output: %s
                         ---------------------------------------------------------------
                         Produced output: %s
                         ---------------------------------------------------------------
                         Diff: %s
                         ==============================================================="""
        path = os.path.join(base_tests_path, relative_path)
        with open(path) as sqlFile:
            # TODO Meisam: Make the grammar case insensitive?
            query = sqlFile.read().upper()
            stringStream = antlr3.StringStream(query)
            lexer = YSmartLexer(stringStream)

            tokenStream = antlr3.CommonTokenStream(lexer)

            parser = YSmartParser(tokenStream)

            parse_tree = parser.start_rule()

            graphviz = visit_tree(parse_tree.tree)
            with open(path + ".dot", 'w') as dot_file:
                dot_file.write(graphviz)
                dot_file.flush()
                dot_file.close()
Exemplo n.º 6
0
    def testValid1(self):
        cStream = antlr3.StringStream('int a;')

        lexer = self.getLexer(cStream)
        tStream = antlr3.CommonTokenStream(lexer)
        parser = self.getParser(tStream)
        events = parser.translation_unit()
 def testValid(self):
     cStream = antlr3.StringStream("int foo;")
     lexer = self.getLexer(cStream)
     tStream = antlr3.CommonTokenStream(lexer)
     parser = self.getParser(tStream)
     name = parser.declaration()
     assert name == 'foo', name
Exemplo n.º 8
0
    def testMarkRewind(self):
        """CommonTokenStream.mark()/rewind()"""

        self.source.tokens.append(
            antlr3.CommonToken(type=12)
            )
        
        self.source.tokens.append(
            antlr3.CommonToken(type=13)
            )
        
        self.source.tokens.append(
            antlr3.CommonToken(type=antlr3.EOF)
            )
        
        stream = antlr3.CommonTokenStream(self.source)
        stream.fillBuffer()
        
        stream.consume()
        marker = stream.mark()
        
        stream.consume()
        stream.rewind(marker)
        
        self.failUnlessEqual(stream.LA(1), 13)
Exemplo n.º 9
0
def parse_func(pfunc):
    char_stream = antlr3.ANTLRStringStream(pfunc)
    lexer = PyFuncLexer(char_stream)
    tokens = antlr3.CommonTokenStream(lexer)
    tokens.fillBuffer()
    parser = PyFuncParser(tokens)
    return parser.pyfunc()
Exemplo n.º 10
0
    def do_test(self, dgdl):
        """Test the given dgdl specification
            :param dgdl the DGDL specification
            :type dgdl str
        """
        try:
            '''To catch parse errors, redirect the output of sys.stderr
                to a variable'''
            old_stderr = sys.stderr
            sys.stderr = mystdout = StringIO()
            '''Attempt to parse the DGDL spec with antlr'''
            char_stream = antlr3.ANTLRStringStream(dgdl)
            lexer = dgdlLexer(char_stream)
            tokens = antlr3.CommonTokenStream(lexer)
            parser = dgdlParser(tokens)
            tree = parser.system().tree
            ''' Reassign sys.stderr to its previous location '''
            sys.stderr = old_stderr
            '''Return value will be any parse errors, or an empty string if
                no errors'''
            return mystdout.getvalue()

        except Exception as e:
            ''' This is an error with even trying to parse, not a parse error'''
            return None
Exemplo n.º 11
0
def main(argv=sys.argv):
    inpath, outpath = argv[1:]
    with open(inpath, 'rb') as inf:
        char_stream = antlr3.ANTLRInputStream(inf)
    lexer = ZasLexer(char_stream)
    tokens = antlr3.CommonTokenStream(lexer)
    parser = ZasParser(tokens)
    r = parser.program()
    t = r.tree
    #print t.toStringTree()
    nodes = antlr3.tree.CommonTreeNodeStream(t)
    nodes.setTokenStream(tokens)
    assembler = ZasAssembler()
    walker = ZasWalker(nodes, assembler)
    walker.program()
    assembler.finalize()
    zcode = []
    for secname in ('data', 'rodata', 'text'):
        zcode.append(assembler.sections[secname].getvalue())
    zcode = ''.join(zcode)[0x40:]
    header = ZHeader()
    header.version = 5
    header.initpc = assembler.start
    header.globals = assembler.globals
    header.statmem = assembler.sections['rodata'].base
    header.himem = assembler.sections['text'].base
    header.filesz = len(zcode) + 0x40
    with open(outpath, 'wb') as outf:
        outf.write(str(header))
        outf.write(zcode)
    return 0
Exemplo n.º 12
0
def CreateParser(query):
    """Creates a Query Parser."""
    input_string = antlr3.ANTLRStringStream(query)
    lexer = QueryLexerWithErrors(input_string)
    tokens = antlr3.CommonTokenStream(lexer)
    parser = QueryParserWithErrors(tokens)
    return parser
def CreateParser(expression):
    """Creates a Expression Parser."""
    input_string = antlr3.ANTLRStringStream(expression)
    lexer = ExpressionLexerWithErrors(input_string)
    tokens = antlr3.CommonTokenStream(lexer)
    parser = ExpressionParserWithErrors(tokens)
    return parser
Exemplo n.º 14
0
def parseFSM(path):
    char_stream = antlr3.ANTLRInputStream(open(path))
    lexer = FsmlLexer(char_stream)
    tokens = antlr3.CommonTokenStream(lexer)
    parser = FsmlParser(tokens)
    parser.fsm()
    return parser.fsmObject
Exemplo n.º 15
0
	def parse(self,text):
		if(type(text) is not type("ü".decode('UTF-8'))):
			logger.info("converting input string to utf-8")
			text = text.decode('UTF-8')
		logger.debug(text)
		char_stream = antlr3.ANTLRStringStream(text)
		try:
			lexer = cp_lexer(char_stream)
			tokenstream = antlr3.CommonTokenStream(lexer)
			parser = cp_parser(tokenstream)
			try:
				r=parser.doc()
				root = r.tree
				print(root.toStringTree())
				logger.debug(root.toStringTree())
				try:
					nodes = antlr3.tree.CommonTreeNodeStream(root)
					nodes.setTokenStream(tokenstream)
					tp = cp_treeparser(nodes)
					tp.doc()
					logger.debug(tp.refs)
					json_string = json.dumps(tp.refs)
					return json.loads(json_string)
				except Exception as e:
					logger.error("%s"%e)
					logger.error("there was a problem w/ the TreeParser: exiting")
			except Exception as e:
				logger.error("%s"%e)
				logger.error("there was a problem with the parser: exiting")
		except Exception as e:
			logger.error("%s"%e)
			logger.error("there was a problem with the lexer: exiting")
Exemplo n.º 16
0
    def testConsume(self):
        """CommonTokenStream.consume()"""

        self.source.tokens.append(
            antlr3.CommonToken(type=12)
            )
        
        self.source.tokens.append(
            antlr3.CommonToken(type=13)
            )
        
        self.source.tokens.append(
            antlr3.CommonToken(type=antlr3.EOF)
            )
        
        stream = antlr3.CommonTokenStream(self.source)
        self.failUnlessEqual(stream.LA(1), 12)

        stream.consume()
        self.failUnlessEqual(stream.LA(1), 13)

        stream.consume()
        self.failUnlessEqual(stream.LA(1), antlr3.EOF)

        stream.consume()
        self.failUnlessEqual(stream.LA(1), antlr3.EOF)
Exemplo n.º 17
0
 def testValid4(self):
     cStream = antlr3.StringStream('bar name1 name2')
     lexer = self.getLexer(cStream)
     tStream = antlr3.CommonTokenStream(lexer)
     parser = self.getParser(tStream)
     parser.cond = False
     events = parser.r()
Exemplo n.º 18
0
    def __init__(self, file="", filename="", graphexport=False, dbg_enabled: bool = False):
        Debug.__init__(self, dbg_enabled)

        # PROTO DATA OBJECTS ####
        self.constNode: Dict[str, CommonTree] = {}
        self.networkNode: List[CommonTree] = []

        # Architecture nodes
        self.cache_node: PCCObject = None
        self.dir_node: PCCObject = None
        self.mem_node: PCCObject = None

        self.msgNode: List[PCCObject] = []
        self.msgTypes: List[str] = []
        self.dataMsgTypes: List[str] = []  # Data msg type names, should be included in the message

        self.archNode: Dict[str, List[Transaction]] = {}
        self.stableStates: Dict[str, List[str]] = {}        # [arch_name, List[stable_state_names]
        self.initStateNodes: Dict[str, str] = {}                # This is missing

        if file and filename:
            self.filename = filename
            lexer = ProtoCCLexer(antlr3.StringStream(file))
            parser = ProtoCCParser(antlr3.CommonTokenStream(lexer))
            tree = parser.document().getTree()
            new_tree_base = copy_tree(tree)
            self.pdebug(new_tree_base.toStringTree())
            self._ParseNodes(new_tree_base)

            self.perror("Accesses for SSP not defined", self.checkAccessBehaviourDefined())
            self.perror("Terminal states detected in SSP", self.checkAllStatesReachable())

            if graphexport:
                self._dArch()
Exemplo n.º 19
0
    def testFillBuffer(self):
        """CommonTokenStream.fillBuffer()"""

        self.source.tokens.append(
            antlr3.CommonToken(type=12)
            )
        
        self.source.tokens.append(
            antlr3.CommonToken(type=13)
            )
        
        self.source.tokens.append(
            antlr3.CommonToken(type=14)
            )
        
        self.source.tokens.append(
            antlr3.CommonToken(type=antlr3.EOF)
            )
        
        stream = antlr3.CommonTokenStream(self.source)
        stream.fillBuffer()

        self.failUnlessEqual(len(stream.tokens), 3)
        self.failUnlessEqual(stream.tokens[0].type, 12)
        self.failUnlessEqual(stream.tokens[1].type, 13)
        self.failUnlessEqual(stream.tokens[2].type, 14)
Exemplo n.º 20
0
	def compile(self, srcfile, base_dir, output_dir):
		#fp = codecs.open(sys.argv[1], 'r', 'utf-8')
		fp = open(srcfile, 'r')
		char_stream = antlr3.ANTLRInputStream(fp)
		lexer = ExprLexer(char_stream)
		tokens = antlr3.CommonTokenStream(lexer)

		parser = ExprParser(tokens)
		r = parser.prog()

		# this is the root of the AST
		root = r.tree
		#print (root.toStringTree())
		#print '-------'

		nodes = antlr3.tree.CommonTreeNodeStream(root)
		nodes.setTokenStream(tokens)
		from Eval import Eval
		eval = Eval(nodes)

		#######################################
		head, tail = os.path.split(srcfile)

		if not os.path.exists(output_dir):
			os.mkdir(output_dir)
		if not os.path.exists(output_dir + '/__init__.py'):
			fp = open(output_dir + '/__init__.py', 'w')
			fp.close()

		dstfile = os.path.normpath(output_dir + '/' + tail.split('.')[0] + '.py')
		#print 'compile: %-30s=> %s' % (srcfile, dstfile)

		cpy = CpyBuilder(dstfile, base_dir, output_dir)
		eval.prog(cpy)
		return dstfile
Exemplo n.º 21
0
 def parse_str(self, content):
     stream = antlr3.ANTLRInputStream(StringIO.StringIO(content))
     lexer = CoreLexer(stream)
     tokens = antlr3.CommonTokenStream(lexer)
     parser = CoreParser(tokens)
     ast = parser.program()
     return ast.tree
Exemplo n.º 22
0
def CreateParser(parse_string):
    """Creates a Groc Parser."""
    input_string = antlr3.ANTLRStringStream(parse_string)
    lexer = GrocLexerWithErrors(input_string)
    tokens = antlr3.CommonTokenStream(lexer)
    parser = GrocParserWithErrors(tokens)
    return parser
Exemplo n.º 23
0
    def execParser(self, grammar, grammarEntry, input, expectErrors=False):
        lexerCls, parserCls = self.compileInlineGrammar(grammar)

        cStream = antlr3.StringStream(input)
        lexer = lexerCls(cStream)
        tStream = antlr3.CommonTokenStream(lexer)
        parser = parserCls(tStream)
        r = getattr(parser, grammarEntry)()

        if not expectErrors:
            self.assertEquals(len(parser._errors), 0, parser._errors)

        result = ""

        if r is not None:
            if hasattr(r, 'result'):
                result += r.result

            if r.tree is not None:
                result += r.tree.toStringTree()

        if not expectErrors:
            return result

        else:
            return result, parser._errors
Exemplo n.º 24
0
def _token_stream(name, text):
    """ Helper method for generating a token stream from text. """
    char_stream = antlr3.ANTLRStringStream(text)
    lexer = DemystifyLexer.DemystifyLexer(char_stream)
    lexer.card = name
    # tokenizes completely and logs on errors
    return antlr3.CommonTokenStream(lexer)
Exemplo n.º 25
0
    def testLTEmptySource(self):
        """CommonTokenStream.LT(): EOF (empty source)"""

        stream = antlr3.CommonTokenStream(self.source)

        lt1 = stream.LT(1)
        self.failUnlessEqual(lt1.type, antlr3.EOF)
Exemplo n.º 26
0
    def testSetTokenSource(self):
        """CommonTokenStream.setTokenSource()"""

        stream = antlr3.CommonTokenStream(None)
        stream.setTokenSource(self.source)
        self.failUnlessEqual(stream.index(), -1)
        self.failUnlessEqual(stream.channel, antlr3.DEFAULT_CHANNEL)
    def testValid1(self):
        cStream = antlr3.StringStream('   a')

        lexer = self.getLexer(cStream)
        tStream = antlr3.CommonTokenStream(lexer)
        parser = self.getParser(tStream)
        events = parser.a()
    def testValid(self):
        cStream = antlr3.StringStream('foobar')
        lexer = self.getLexer(cStream)
        tStream = antlr3.CommonTokenStream(lexer)
        parser = self.getParser(tStream)
        parser.document()

        assert parser.events == ['before', 'after']
Exemplo n.º 29
0
    def testValid(self):
        cStream = antlr3.StringStream("int foo;")
        lexer = self.getLexer(cStream)
        tStream = antlr3.CommonTokenStream(lexer)
        parser = self.getParser(tStream)
        parser.program()

        assert len(parser.reportedErrors) == 0, parser.reportedErrors
Exemplo n.º 30
0
 def __init__(self, file=""):
     if file:
         self.file = file
         lexer = ProtoCCLexer(antlr3.StringStream(open(file).read()))
         parser = ProtoCCParser(antlr3.CommonTokenStream(lexer))
         tree = parser.document().getTree()
         pdebug(tree.toStringTree())
         self._ParseNodes(tree)