Exemplo n.º 1
0
 def consume(self, input:InputStream):
     curChar = input.LA(1)
     if curChar==ord('\n'):
         self.line += 1
         self.column = 0
     else:
         self.column += 1
     input.consume()
Exemplo n.º 2
0
    def accept(self, input:InputStream, lexerActionExecutor:LexerActionExecutor, startIndex:int, index:int, line:int, charPos:int):
        if LexerATNSimulator.debug:
            print("ACTION", lexerActionExecutor)

        # seek to after last char in token
        input.seek(index)
        self.line = line
        self.column = charPos

        if lexerActionExecutor is not None and self.recog is not None:
            lexerActionExecutor.execute(self.recog, input, startIndex)
Exemplo n.º 3
0
 def match(self, input:InputStream , mode:int):
     self.match_calls += 1
     self.mode = mode
     mark = input.mark()
     try:
         self.startIndex = input.index
         self.prevAccept.reset()
         dfa = self.decisionToDFA[mode]
         if dfa.s0 is None:
             return self.matchATN(input)
         else:
             return self.execATN(input, dfa.s0)
     finally:
         input.release(mark)
Exemplo n.º 4
0
    def evaluatePredicate(self, input:InputStream, ruleIndex:int, predIndex:int, speculative:bool):
        # assume true if no recognizer was provided
        if self.recog is None:
            return True

        if not speculative:
            return self.recog.sempred(None, ruleIndex, predIndex)

        savedcolumn = self.column
        savedLine = self.line
        index = input.index
        marker = input.mark()
        try:
            self.consume(input)
            return self.recog.sempred(None, ruleIndex, predIndex)
        finally:
            self.column = savedcolumn
            self.line = savedLine
            input.seek(index)
            input.release(marker)
Exemplo n.º 5
0
    def accept(
        self,
        input: InputStream,
        lexerActionExecutor: LexerActionExecutor,
        startIndex: int,
        index: int,
        line: int,
        charPos: int,
    ):
        if self.debug:
            print("ACTION %s\n", lexerActionExecutor)

        # seek to after last char in token
        input.seek(index)
        self.line = line
        self.column = charPos
        if input.LA(1) != Token.EOF:
            self.consume(input)

        if lexerActionExecutor is not None and self.recog is not None:
            lexerActionExecutor.execute(self.recog, input, startIndex)
Exemplo n.º 6
0
    def run_ldpath_program(self, uri, ldprogram_data):
        #See https://github.com/antlr/antlr4/blob/master/runtime/Python3/src/antlr4/FileStream.py#L27
        ldprogram_raw = codecs.decode(ldprogram_data, 'ascii', 'strict')

        self.logger.debug("Running ldpath program on uri: {}".format(uri))
        self.logger.debug("Running ldpath program: {}".format(ldprogram_data))

        ldprogram = LDPathProgram(self._db, self.config["cache"]["timeout"])
        # This instantiates the InputStream as needed by antlr
        self.logger.debug("Creating InputStream")
        ldprogram.raw = InputStream(ldprogram_raw)
        self.logger.debug("Parsing")
        ldprogram.parse()
        self.logger.debug("Running")
        ldprogram.run(uri)

        return ldprogram.response
Exemplo n.º 7
0
    def testOverlappingReplace2(self):
        input = InputStream('abc')
        lexer = TestLexer(input)
        stream = CommonTokenStream(lexer=lexer)
        stream.fill()
        rewriter = TokenStreamRewriter(tokens=stream)

        rewriter.replaceRange(0, 3, 'bar')
        rewriter.replaceRange(1, 2, 'foo')

        with self.assertRaises(ValueError) as ctx:
            rewriter.getDefaultText()

        self.assertEquals(
            """replace op boundaries of <ReplaceOp@[@1,1:1='b',<2>,1:1]..[@2,2:2='c',<3>,1:2]:"foo"> overlap with previous <ReplaceOp@[@0,0:0='a',<1>,1:0]..[@3,3:2='<EOF>',<-1>,1:3]:"bar">""",
            ctx.exception.message
        )
Exemplo n.º 8
0
    def testReplaceThenReplaceLowerIndexedSuperset(self):
        input = InputStream('abcccba')
        lexer = TestLexer(input)
        stream = CommonTokenStream(lexer=lexer)
        stream.fill()
        rewriter = TokenStreamRewriter(tokens=stream)

        rewriter.replaceRange(2, 4, 'xyz')
        rewriter.replaceRange(1, 3, 'foo')

        with self.assertRaises(ValueError) as ctx:
            rewriter.getDefaultText()
        msg = ctx.exception.message
        self.assertEquals(
            """replace op boundaries of <ReplaceOp@[@1,1:1='b',<2>,1:1]..[@3,3:3='c',<3>,1:3]:"foo"> overlap with previous <ReplaceOp@[@2,2:2='c',<3>,1:2]..[@4,4:4='c',<3>,1:4]:"xyz">""",
            msg
        )
Exemplo n.º 9
0
    def parse(self):
        if self.spec is None:
            raise STLParseException('STL specification if empty')

        # Parse the STL spec - ANTLR4 magic

        entire_spec = self.modular_spec + self.spec
        input_stream = InputStream(entire_spec)
        lexer = LtlLexer(input_stream)
        stream = CommonTokenStream(lexer)
        parser = LtlParser(stream)
        parser._listeners = [LTLParserErrorListener()]
        ctx = parser.specification_file()

        # Create the visitor for the actual spec nodes
        visitor = LTLSpecificationParser(self)
        self.top = visitor.visitSpecification_file(ctx)
Exemplo n.º 10
0
    def testReplaceRangeThenInsertAtRightEdge(self):
        input = InputStream('abcccba')
        lexer = TestLexer(input)
        stream = CommonTokenStream(lexer=lexer)
        stream.fill()
        rewriter = TokenStreamRewriter(tokens=stream)

        rewriter.replaceRange(2, 4, 'x')
        rewriter.insertBeforeIndex(4, 'y')

        with self.assertRaises(ValueError) as ctx:
            rewriter.getDefaultText()
        msg = ctx.exception.message
        self.assertEquals(
            "insert op <InsertBeforeOp@[@4,4:4='c',<3>,1:4]:\"y\"> within boundaries of previous <ReplaceOp@[@2,2:2='c',<3>,1:2]..[@4,4:4='c',<3>,1:4]:\"x\">",
            msg
        )
Exemplo n.º 11
0
    def _runTest(self, text, name):
        input_stream = InputStream(text)
        parser = CUParser(input_stream, name)
        cu = parser.parse()

        if len(cu.markers) > 0:
            print("Test Failed:")
            in_reader = StringIO(text)
            i = 1
            while True:
                line = in_reader.readline()
                if line == "":
                    break
                line = line[:-1]
                print("%3d: %s" % (i, line))
                i += 1

        self.assertEqual(len(cu.markers), 0, "Errors")
Exemplo n.º 12
0
def main(argv):
    usingFile = False
    # if argv is not None:
    #     inputStream = FileStream(sys.argv[1])
    #     usingFile = True
    while True:
        stdout.write("mathjunkie>> ")
        stdout.flush()  # this flushes the output out immediately.
        if not usingFile:
            inputStream = InputStream(stdin.readline())
        lexer = MathCrazeLexer(inputStream)
        tokenStream = CommonTokenStream(lexer)
        parser = MathCrazeParser(tokenStream)
        tree = parser.equal()
        # lispTree = tree.toStringTree(recog=parser)
        # print(lispTree)
        visitor = Visitor()
        print(visitor.visit(tree))
Exemplo n.º 13
0
def build_objects(cdl_filename=None):
    '''
    Build a VesselSeason instance from the CDL file
    '''

    if cdl_filename is None:
        input_stream = InputStream(sys.stdin.readline())
    else:
        input_stream = FileStream(cdl_filename)
    lexer = CdlLexer(input_stream)
    token_stream = CommonTokenStream(lexer)
    parser = CdlParser(token_stream)
    tree = parser.cdl_file()
    
    visitor = CdlFileVisitor()
    cdl_file = visitor.visit(tree)
    locator.save()
    return cdl_file
Exemplo n.º 14
0
def parse_plaintext(text, LexerClass, ParserClass, rule_name):
    '''
    There's half a dozen lines of boilerplate code to initialize a
    lexer, parser, and parse the plaintext into a tree.  This method
    centralizes the logic all in one place.

    Returns: the parsed tree, accessible through the rule with `rule_name`
    from the parser's grammar.
    '''
    input_ = InputStream(text)
    lexer = LexerClass(input_)
    token_stream = CommonTokenStream(lexer)
    parser = ParserClass(token_stream)
    if hasattr(parser, rule_name):
        return getattr(parser, rule_name)()
    else:
        raise KeyError("Main rule %s doesn't exist in your parser's grammar",
                       rule_name)
Exemplo n.º 15
0
    def context(self,
                text,
                symbol,
                trace=False,
                diag=False,
                build_parse_trees=True,
                as_string=False,
                fail_on_error=False,
                antlr_hook=None):
        """Returns an object subclass of a ``antlr4.ParserRuleContext`` corresponding to the specified symbol (possibly as a string).

        Args:
            text (str): the text to be parsed.
            symbol (str): the symbol (rule name) the parse should start at.
            trace (bool): if ``True`` the method ``antlr4.Parser.setTrace`` will be used to activate *tracing*.
            diag (bool): if ``True`` the parsing will be run with a ``antlr4.error.DiagnosticErrorListener`` setting the prediction mode to ``antlr4.atn.PredictionMode.LL_EXACT_AMBIG_DETECTION``.
            build_parse_trees (bool): if ``False`` the field ``antlr4.Parser.buildParseTrees`` will be set to ``False`` so that no trees will be generated.
            as_string (bool): if ``True`` the method will return the string representation of the context obtained using its ``toStringTree`` method.
            fail_on_error (bool): if ``True`` the method will return ``None`` in case of paring errors.
            antlr_hook (function): if not ``None`` will be called with the lexer and parser as arguments to further configure them before use

        Returns:
            A parser context, in case of parsing errors the it can be used to investigate the errors (unless ``fail_on_error`` is ``True`` in what case this method returns ``None``).
        """
        lexer = self.Lexer(InputStream(text))
        stream = CommonTokenStream(lexer)
        parser = self.Parser(stream)
        parser.setTrace(trace)
        if diag:
            parser.addErrorListener(DiagnosticErrorListener())
            parser._interp.predictionMode = PredictionMode.LL_EXACT_AMBIG_DETECTION
        parser.buildParseTrees = build_parse_trees
        if antlr_hook is not None: antlr_hook(lexer, parser)
        buf = StringIO()
        with redirect_stderr(buf):
            ctx = getattr(parser, symbol)()
        errs = buf.getvalue().strip()
        if errs:
            warn(errs)
            if fail_on_error: return None
        if as_string:
            return ctx.toStringTree(recog=self.Parser)
        else:
            return ctx
Exemplo n.º 16
0
def deductionParser(expresion):
    input_stream = InputStream(expresion)
    # Setup Lexer
    print(f"Data:\n{input_stream}")
    lexer = WangLexer(input_stream)
    token_stream = CommonTokenStream(lexer)
    #Setup Parser (and own ErrorListener)
    parser = WangParser(token_stream)
    parser.removeErrorListeners()
    parser.addErrorListener(MyErrorListener())
    try:
        tree = parser.assertion()
    except SyntaxError as e:
        print(e.msg)
        sys.exit(-1)
        #Setup the Visitor and visit Parse tree
    visitor = WangPrintVisitor()
    print("*** Starts visit of data ***")
    return list(visitor.visit(tree))
Exemplo n.º 17
0
def compile(in_file, out_file):
    global options
    input = InputStream(in_file.read())
    lexer = MalicLexer(input)
    tokens = CommonTokenStream(lexer)
    parser = MalicParser(tokens)
    tree = parser.compilationUnit()
    walker = ParseTreeWalker()
    listener = ASTBuilder()
    walker.walk(listener, tree)
    ast = listener.ast
    #    initialize_builtin_type()
    ast.resolve_symbol()
    ast.check_type()
    if options.enable_output_irrelevant_elimination:
        ast.eliminate_output_irrelevant_node()

    ir_builder = IRBuilder(ast)
    ir_builder.generate_ir()

    emmiter = InstructionEmmiter(ir_builder)
    emmiter.emit()

    cfg_builder = ControlFlowAnalyzer(emmiter)
    cfg_builder.build_controlflow()

    #    print_instructions(emmiter.function_entities)
    dataflow_analyzer = DataFlowAnalyzer(emmiter)
    dataflow_analyzer.optimize()

    if options.print_instruction:
        print_instructions(emmiter.function_entities)
    register_config = RegisterConfig()

    allocator = Allocator(emmiter, register_config)
    allocator.allocate()

    translator = Translator(emmiter, register_config)
    asm = translator.translate()

    for s in asm:
        out_file.write(s + '\n')
Exemplo n.º 18
0
    def compile(self, script, raise_exceptions=False):

        input_stream = InputStream(script)

        lexer = EveScriptLexer(input_stream)

        if raise_exceptions:
            lexer.removeErrorListeners()
            lexer.addErrorListener(ThrowingErrorListener())

        token_stream = CommonTokenStream(lexer)

        parser = EveScriptParser(token_stream)

        if raise_exceptions:
            parser.removeErrorListeners()
            parser.addErrorListener(ThrowingErrorListener())

        tree = parser.script()

        return self.visitScript(tree)
Exemplo n.º 19
0
def main():
    """Funcio principal, inicialitza l'entrada, guarda en un pickle i genera una gràfica."""

    outputfile = "networkx_graph"

    if len(sys.argv) > 1:
        input_stream = FileStream(sys.argv[1], encoding="utf-8")
        if len(sys.argv) > 2:
            outputfile = sys.argv[2]
    else:
        input_stream = InputStream(input("? "))

    graph = parse_to_network(input_stream)

    graph_file = outputfile + ".pckl"
    plot_file = outputfile + ".png"

    save_pickle(graph, graph_file)
    graph_restored = read_pickle(graph_file)

    plot_network(graph_restored, plot_file)
Exemplo n.º 20
0
    def testToStringStartStop2(self):
        input = InputStream('x = 3 * 0 + 2 * 0;')
        lexer = TestLexer2(input)
        stream = CommonTokenStream(lexer=lexer)
        stream.fill()
        rewriter = TokenStreamRewriter(tokens=stream)

        self.assertEquals('x = 3 * 0 + 2 * 0;', rewriter.getDefaultText())

        # replace 3 * 0 with 0
        rewriter.replaceRange(4, 8, '0')
        self.assertEquals('x = 0 + 2 * 0;', rewriter.getDefaultText())
        self.assertEquals('x = 0 + 2 * 0;', rewriter.getText('default', Interval(0, 17)))
        self.assertEquals('0', rewriter.getText('default', Interval(4, 8)))
        self.assertEquals('x = 0', rewriter.getText('default', Interval(0, 8)))
        self.assertEquals('2 * 0', rewriter.getText('default', Interval(12, 16)))

        rewriter.insertAfter(17, "// comment")
        self.assertEquals('2 * 0;// comment', rewriter.getText('default', Interval(12, 18)))

        self.assertEquals('x = 0', rewriter.getText('default', Interval(0, 8)))
Exemplo n.º 21
0
    def test_basic_operation(self):
        not_a_target = """# Not a target:
.l.r:
#  Builtin rule
#  Implicit rule search has not been done.
#  Modification time never checked.
#  File has not been updated.
#  recipe to execute (built-in):
	$(LEX.l) $< > $@ 
	 mv -f lex.yy.r $@"""
        phony = """.PHONY: all
#  Implicit rule search has not been done.
#  Modification time never checked.
#  File has not been updated."""
        target1 = """objdir/bar.o: bar.c | objdir
#  Implicit rule search has been done.
#  Implicit/static pattern stem: 'bar'
#  Last modified 2017-02-20 14:12:50.825407391
#  File has been updated.
#  Successfully updated.
#  recipe to execute (from 'Makefile', line 5):
	touch $@"""
        target2 = """all: objdir/foo.o objdir/bar.o objdir/baz.o
#  Phony target (prerequisite of .PHONY).
#  Implicit rule search has not been done.
#  File does not exist.
#  File has been updated.
#  Successfully updated."""
        string = '\n\n'.join([not_a_target, phony, target1, not_a_target, target2])
        input_ = InputStream(string)
        lexer = TargetParagraphLexer(input_)
        token = lexer.nextToken()
        self.assertEqual(token.text, ''.join([target1, '\n']))
        self.assertEqual(token.type, TargetParagraphLexer.TARGET_PARAGRAPH)
        token = lexer.nextToken()
        self.assertEqual(token.text, target2)
        self.assertEqual(token.type, TargetParagraphLexer.TARGET_PARAGRAPH)
        token = lexer.nextToken()
        self.assertEqual(token.text, '<EOF>')
        self.assertEqual(token.type, Token.EOF)
Exemplo n.º 22
0
def main():
    global _verbosity
    compile_units = []
    parser = get_parser()

    argv = []
    i = 1
    while i < len(sys.argv):
        arg = sys.argv[i]
        argv.append(arg)
        i += 1

    args = parser.parse_args(argv)

    if args.v:
        _verbosity = 1

    for f in args.files:
        if not os.path.isfile(f):
            print("Error: file \"" + f + "\" does not exist")
            sys.exit(1)

        with open(f, "r") as fp:
            input_stream = InputStream(fp.read())

        verbose("Parsing file %s", f)
        parser = CUParser(input_stream, f)
        cu = parser.parse()

        if len(cu.markers) > 0:
            for m in cu.markers:
                print("Error: " + m.msg)
            sys.exit(1)

        compile_units.append(cu)

    if args.link:
        print("Error: link not currently supported")
        sys.exit(1)
Exemplo n.º 23
0
 def execute(self, lexer:Lexer, input:InputStream, startIndex:int):
     requiresSeek = False
     stopIndex = input.index
     try:
         for lexerAction in self.lexerActions:
             if isinstance(lexerAction, LexerIndexedCustomAction):
                 offset = lexerAction.offset
                 input.seek(startIndex + offset)
                 lexerAction = lexerAction.action
                 requiresSeek = (startIndex + offset) != stopIndex
             elif lexerAction.isPositionDependent:
                 input.seek(stopIndex)
                 requiresSeek = False
             lexerAction.execute(lexer)
     finally:
         if requiresSeek:
             input.seek(stopIndex)
 def execute(self, lexer: Lexer, input: InputStream, startIndex: int):
     requiresSeek = False
     stopIndex = input.index
     try:
         for lexerAction in self.lexerActions:
             if isinstance(lexerAction, LexerIndexedCustomAction):
                 offset = lexerAction.offset
                 input.seek(startIndex + offset)
                 lexerAction = lexerAction.action
                 requiresSeek = (startIndex + offset) != stopIndex
             elif lexerAction.isPositionDependent:
                 input.seek(stopIndex)
                 requiresSeek = False
             lexerAction.execute(lexer)
     finally:
         if requiresSeek:
             input.seek(stopIndex)
Exemplo n.º 25
0
def evaluacion(to_parse_line):

    input_stream = InputStream(to_parse_line)

    # Setup Lexer
    print(f"Data:\n{input_stream}")
    lexer = WangLexer(input_stream)
    token_stream = CommonTokenStream(lexer)
    #Setup Parser (and own ErrorListener)
    parser = WangParser(token_stream)
    parser.removeErrorListeners()
    parser.addErrorListener(MyErrorListener())
    try:
        tree = parser.assertion()
    except SyntaxError as e:
        print(e.msg)
        sys.exit(-1)
    #Setup the Visitor and visit Parse tree
    visitor = WangPrintVisitor()
    print("*** Starts visit of data ***")
    res = visitor.visit(tree)
    print(f"*** La evaluacion de su propuesta es igual a {res} ***")
    return f"*** La evaluacion de su propuesta es igual a {res} ***"
Exemplo n.º 26
0
    def parse(answerSets, clingoOutput, two_stageParsing):
        tokens = CommonTokenStream(ClingoLexer(InputStream(clingoOutput)))
        parser = ClingoParser(tokens)
        visitor = ClingoParserVisitorImplementation(answerSets)

        if not two_stageParsing:
            visitor.visit(parser.output())

            return

        parser._interp.predictionMode = PredictionMode.SLL
        parser.removeErrorListeners()
        parser._errHandler = BailErrorStrategy()

        try:
            visitor.visit(parser.output())
        except RuntimeError as exception:
            if isinstance(exception, RecognitionException):
                tokens.seek(0)
                parser.addErrorListener(ConsoleErrorListener.INSTANCE)
                parser._errHandler = DefaultErrorStrategy()
                parser._interp.predictionMode = PredictionMode.LL
                visitor.visit(parser.output())
Exemplo n.º 27
0
    def parse(atom):
        tokens = CommonTokenStream(ASPGrammarLexer(InputStream(atom)))
        parser = ASPGrammarParser(tokens)
        visitor = ASPParser()
        parser._interp.predictionMode = PredictionMode.SLL

        parser.removeErrorListeners()

        parser._errHandler = BailErrorStrategy()

        try:
            visitor.visit(parser.output())
        except RuntimeError as exception:
            if isinstance(exception, RecognitionException):
                tokens.seek(0)
                parser.addErrorListener(ConsoleErrorListener.INSTANCE)

                parser._errHandler = DefaultErrorStrategy()
                parser._interp.predictionMode = PredictionMode.LL

                visitor.visit(parser.output())

        return visitor
Exemplo n.º 28
0
def interactive():
    Plotter.interactive(True)
    
    parser = GraphParser(None)
    visitor = ExecVisitor()

    lineno = 1

    while True:
        line = input(f"Graph [{lineno}]> ")

        istream = InputStream(line)
        lexer = GraphLexer(istream)
        lexer.line = lineno
        lexer.column = 0
        token_stream = CommonTokenStream(lexer)
        parser.setInputStream(token_stream)
        tree = parser.statement()

        result = visitor.visit(tree)

        print(f"Out   [{lineno}]>", result, '\n')
        lineno += 1
Exemplo n.º 29
0
def readAndPrint(str_stream):

    input_stream = InputStream(str_stream)

    lexer = reactLexer(input_stream)
    token_stream = CommonTokenStream(lexer)
    parser = reactParser(token_stream)
    #tree = parser.reaction()
    tree = parser.entries()

    #print(tree.toStringTree(recog=parser))
    visitor = ReactVisitor()
    visitor.visit(tree)

    print()
    visitor.genElements()

    print()
    visitor.printSODE()

    print()
    visitor.printJacobian()

    print()
Exemplo n.º 30
0
from stlgrammarLexer import stlgrammarLexer
from stlgrammarParser import stlgrammarParser
from stlgrammarInterpreter import stlgrammarInterpreter
from stlgrammarSimplifier import stlgrammarSimplifier

if __name__ == '__main__':
    if len(sys.argv) > 1:
        stlFile = open(sys.argv[1], 'r')
    else:
        # input_stream = InputStream(sys.stdin.readline())
        stlFile = open('stl.expr', 'r')

    for line in stlFile.readlines():
        stlRule = line.strip()

        lexer = stlgrammarLexer(InputStream(stlRule))
        token_stream = CommonTokenStream(lexer)
        parser = stlgrammarParser(token_stream)
        tree = parser.prog()  # parse; start at prog

        # print tree as text
        lisp_tree_str = tree.toStringTree(recog=parser)
        print(lisp_tree_str)

        # stlgrammarSimplifier

        simplifier = stlgrammarSimplifier()
        walker = ParseTreeWalker()
        walker.walk(simplifier, tree)

        # stlgrammarInterpreter
Exemplo n.º 31
0
    def enterComparatorExpression(
            self, ctx: SimpleBooleanParser.ComparatorExpressionContext):
        print('comparation expr')

    def enterIdentifierExpression(
            self, ctx: SimpleBooleanParser.IdentifierExpressionContext):
        print('identifier')


if __name__ == '__main__':
    variables = {
        'a',
        True,
        'A',
        True,
    }
    exprs = ['A > 2' '1 >= 1.0']

    for expr in exprs:
        expr_stream = InputStream(expr)
        lexer = SimpleBooleanLexer(expr_stream)
        stream = CommonTokenStream(lexer)
        parser = SimpleBooleanParser(stream)

        tree = parser.parse()
        #printer = CustomVisitor()
        #printer.visit(tree)
        walker = ParseTreeWalker()
        walker.walk(CustomListener(), tree)
Exemplo n.º 32
0
 def getText(self, input: InputStream):
     # index is first lookahead char, don't include.
     return input.getText(self.startIndex, input.index - 1)
Exemplo n.º 33
0
 def __init__(self):
     InputStream.__init__(self, sys.stdin.read())
Exemplo n.º 34
0
 def newTokenStreamFromString(self, input_):
     stream = InputStream(input_)
     return EIndentingLexer(stream)
Exemplo n.º 35
0
if __name__ == '__main__':
    print("*** Testing Wang Parser (EIF400 II-2018) ***")
    if len(sys.argv) > 1:
        file = sys.argv[1]
        print(f'*** Processing from file "{file}" ***')
        input_stream = FileStream(file)
    else:
        print(f'*** Processing from console ***\n>', end='')
        to_parse_line = sys.stdin.readline()
        # Use demo line if none was typed
        if len(to_parse_line) <= 1:  # Just enter was hit
            to_parse_line = "q1 | q2 & q3 => q1, q2"
            print(f"Empty line. Testing with demo: {to_parse_line}")
        #
        input_stream = InputStream(to_parse_line)

    # Setup Lexer
    print(f"Data:\n{input_stream}")
    lexer = WangLexer(input_stream)
    token_stream = CommonTokenStream(lexer)
    #Setup Parser (and own ErrorListener)
    parser = WangParser(token_stream)
    parser.removeErrorListeners()
    parser.addErrorListener(MyErrorListener())
    try:
        tree = parser.assertion()
    except SyntaxError as e:
        print(e.msg)
        sys.exit(-1)
    #Setup the Visitor and visit Parse tree
Exemplo n.º 36
0
 def getText(self, input:InputStream):
     # index is first lookahead char, don't include.
     return input.getText(self.startIndex, input.index-1)
Exemplo n.º 37
0
        for function_name, message_text in zip(self.function_list, self.message_list):
            test_function_name = function_name.replace("(", "(self, ")
            unit_test_string += "def test_" + test_function_name + ":\n\t\tself.assertFalse("
            unit_test_string += "" + function_name + ", '" + message_text + "')\n\n\t"
        unit_test_string += "\n\nif __name__ == \"__main__\":\n\tif len(sys.argv) > 1:\n\t\t"
        unit_test_string += "potential_file = sys.argv.pop()\n\t\tif os.path.isfile(potential_file):\n\t\t\t"
        unit_test_string += "with open(potential_file, 'r') as read_file:\n\t\t\t\tMistakeTest.SUBMISSION_STRING = "
        unit_test_string += "read_file.read()\n\tunittest.main()"
        return unit_test_string


if __name__ == '__main__':
    if len(sys.argv) > 1:  # len(sys.argv) > 2 means output_file then input_file
        input_stream = FileStream(sys.argv[1])
    else:
        input_stream = InputStream(sys.stdin.read())

    lexer = specLang_draftP4Lexer(input_stream)
    token_stream = CommonTokenStream(lexer)
    parser = specLang_draftP4Parser(token_stream)
    tree = parser.document()

    #lisp_tree_str = tree.toStringTree(recog=parser)
    #print(lisp_tree_str)

    # listener
    print("Start Walking...")
    listener = SpecLangToPedalCodeTranslator()
    walker = ParseTreeWalker()
    walker.walk(listener, tree)
    final_string = listener.getTranslationString()
Exemplo n.º 38
0
 def tokenize(self, word: str) -> Tuple[Token, ...]:
     return self.get_all_tokens(self.lexer_instance(InputStream(word)))