Esempio n. 1
0
                cmdStart = False
            # Sanity check to make sure we haven't gone too far.
            tok_count += 1
            if tok_count > tok_lim and tok['start_column'] < init_indentation:
                break
        # end while
        curr_node.set_line_end_num(self.tokenizer.curr_line_no())
        return tok['style'] != shared_lexer.EOF_STYLE
    # end parse_aux()
        

# end class Parser

if __name__ == "__main__":
    if len(sys.argv) == 1:
        sample_code = tcl_lexer.provide_sample_code();
        fs = None
    elif sys.argv[1] == "-":
        fs = sys.stdin
        closefs = False
    else:
        fs = open(sys.argv[1], "r")
        closefs = True
    if fs is not None:
        sample_code = shared_lexer.read_and_detab(fs, closefs)
        # fs comes back closed
    tokenizer = tcl_lexer.TclLexer(sample_code)
    parser = Parser(tokenizer, "Tcl")
    tree = parser.parse()
    print "Analyze the parse tree"
    tree.dump()
Esempio n. 2
0
            # Sanity check to make sure we haven't gone too far.
            tok_count += 1
            if tok_count > tok_lim and tok['start_column'] < init_indentation:
                break
        # end while
        curr_node.set_line_end_num(self.tokenizer.curr_line_no())
        return tok['style'] != shared_lexer.EOF_STYLE

    # end parse_aux()


# end class Parser

if __name__ == "__main__":
    if len(sys.argv) == 1:
        sample_code = tcl_lexer.provide_sample_code()
        fs = None
    elif sys.argv[1] == "-":
        fs = sys.stdin
        closefs = False
    else:
        fs = open(sys.argv[1], "r")
        closefs = True
    if fs is not None:
        sample_code = shared_lexer.read_and_detab(fs, closefs)
        # fs comes back closed
    tokenizer = tcl_lexer.TclLexer(sample_code)
    parser = Parser(tokenizer, "Tcl")
    tree = parser.parse()
    print "Analyze the parse tree"
    tree.dump()