Example #1
0
 def parse(self, filename):
     input = antlr3.FileStream (filename)
     lexer = MonitorLexer (input)
     tokens = antlr3.CommonTokenStream (lexer)
     parser = MonitorParser (tokens)
     adaptor = CommonTreeAdaptor()
     parser.setTreeAdaptor(adaptor)
     res = parser.description ()
     return res
Example #2
0
def toAST(files):
  ast = antlr3.tree.CommonTree(None)
  for fname in files:
    logging.debug("reading: " + os.path.normpath(fname))
    f_stream = antlr3.FileStream(fname)
    lexer = Lexer(f_stream)
    t_stream = antlr3.CommonTokenStream(lexer)
    parser = Parser(t_stream)
    try: _ast = parser.compilationUnit()
    except antlr3.RecognitionException:
      traceback.print_stack()
      sys.exit(1)
    ast.addChild(_ast.tree)
  return ast
Example #3
0
    def testEncoded(self):
        path = os.path.join(os.path.dirname(__file__), 'teststreams.input2')

        stream = antlr3.FileStream(path, 'utf-8')

        stream.seek(4)
        marker1 = stream.mark()

        stream.consume()
        marker2 = stream.mark()

        stream.consume()
        marker3 = stream.mark()

        stream.rewind(marker2)
        self.failUnlessEqual(stream.markDepth, 1)
        self.failUnlessEqual(stream.index(), 5)
        self.failUnlessEqual(stream.line, 2)
        self.failUnlessEqual(stream.charPositionInLine, 1)
        self.failUnlessEqual(stream.LA(1), u'รค')
Example #4
0
    def testNoEncoding(self):
        path = os.path.join(os.path.dirname(__file__), 'teststreams.input1')

        stream = antlr3.FileStream(path)

        stream.seek(4)
        marker1 = stream.mark()

        stream.consume()
        marker2 = stream.mark()

        stream.consume()
        marker3 = stream.mark()

        stream.rewind(marker2)
        self.assertEqual(stream.markDepth, 1)
        self.assertEqual(stream.index(), 5)
        self.assertEqual(stream.line, 2)
        self.assertEqual(stream.charPositionInLine, 1)
        self.assertEqual(stream.LT(1), 'a')
        self.assertEqual(stream.LA(1), ord('a'))
Example #5
0
import sys
import antlr3
from PythonLexer import PythonLexer
from PythonParser import PythonParser
from PythonTokenSource import PythonTokenSource


# override nextToken to set startPos (this seems too hard)
class MyLexer(PythonLexer):
    def nextToken(self):
        self.startPos = self.getCharPositionInLine()
        return PythonLexer.nextToken(self)


cStream = antlr3.FileStream(sys.argv[1])
lexer = MyLexer(cStream)
tokens = antlr3.CommonTokenStream(lexer)
tokens.discardOffChannelTokens = True
indentedSource = PythonTokenSource(tokens)
tokens = antlr3.CommonTokenStream(indentedSource)
parser = PythonParser(tokens)
parser.file_input()