def test_lexer(): """ Has a typo in the generator stream on purpose. >>> generator = test_lexer() >>> generator.next() toe.symbol.intern("invalid") """ generator_stream = cStringIO.StringIO() generator_stream.write(""" 'NAMESPACE'[[:whitespace:]][[:whitespace:]]* NAMESPACE ' ' IGNORE """) generator_stream.seek(0) table_1 = toe.symbol.table() generator = TLexerGenerator(table_1) lexer = TLexer() lexer.states = generator.load(generator_stream, False) assert(len(lexer.states) > 2) # initial, invalid test_stream = cStringIO.StringIO() test_stream.write("namespacenamespace") test_stream.seek(0) lexer.source_stream = test_stream while not lexer.eof_p: yield lexer.token lexer.consume()
def test_lexer(): """ >>> generator = test_lexer() >>> generator.next() toe.symbol.intern("invalid") >>> generator.next() Traceback (most recent call last): ELexerEofError: Unexpected end of file """ generator_stream = cStringIO.StringIO() generator_stream.write(""" 'NAMESPACE' NAMESPACE """) generator_stream.seek(0) table_1 = toe.symbol.table() generator = TLexerGenerator(table_1) lexer = TLexer() lexer.states = generator.load(generator_stream, False) assert(len(lexer.states) > 2) # initial, invalid test_stream = cStringIO.StringIO() test_stream.write(""" """) test_stream.seek(0) lexer.source_stream = test_stream while not lexer.eof_p: yield lexer.token lexer.consume()
def test_lexer(): """ >>> generator = test_lexer() >>> generator.next() toe.symbol.intern("namespace") >>> generator.next() toe.symbol.intern("namespace") >>> generator.next() Traceback (most recent call last): File "<stdin>", line 1, in ? StopIteration """ generator_stream = cStringIO.StringIO() generator_stream.write(""" 'NAMESPACE' ' ' IGNORE """) generator_stream.seek(0) table_1 = toe.symbol.table() generator = TLexerGenerator(table_1) lexer = TLexer() lexer.states = generator.load(generator_stream, False) assert(len(lexer.states) > 2) # initial, invalid test_stream = cStringIO.StringIO() test_stream.write("namespacenamespace") test_stream.seek(0) lexer.source_stream = test_stream while not lexer.eof_p: yield lexer.token lexer.consume()
def test_lexer(): """ Has a typo in the generator stream on purpose. >>> generator = test_lexer() >>> generator.next() toe.symbol.intern("invalid") """ generator_stream = cStringIO.StringIO() generator_stream.write(""" 'NAMESPACE'[[:whitespace:]][[:whitespace:]]* NAMESPACE ' ' IGNORE """) generator_stream.seek(0) table_1 = toe.symbol.table() generator = TLexerGenerator(table_1) lexer = TLexer() lexer.states = generator.load(generator_stream, False) assert (len(lexer.states) > 2) # initial, invalid test_stream = cStringIO.StringIO() test_stream.write("namespacenamespace") test_stream.seek(0) lexer.source_stream = test_stream while not lexer.eof_p: yield lexer.token lexer.consume()
def test_lexer(): """ >>> generator = test_lexer() >>> generator.next() toe.symbol.intern("namespace") >>> generator.next() toe.symbol.intern("namespace") >>> generator.next() Traceback (most recent call last): File "<stdin>", line 1, in ? StopIteration """ generator_stream = cStringIO.StringIO() generator_stream.write(""" 'NAMESPACE' ' ' IGNORE """) generator_stream.seek(0) table_1 = toe.symbol.table() generator = TLexerGenerator(table_1) lexer = TLexer() lexer.states = generator.load(generator_stream, False) assert (len(lexer.states) > 2) # initial, invalid test_stream = cStringIO.StringIO() test_stream.write("namespacenamespace") test_stream.seek(0) lexer.source_stream = test_stream while not lexer.eof_p: yield lexer.token lexer.consume()
def test_lexer(): """ >>> generator = test_lexer() >>> generator.next() toe.symbol.intern("invalid") >>> generator.next() Traceback (most recent call last): ELexerEofError: Unexpected end of file """ generator_stream = cStringIO.StringIO() generator_stream.write(""" 'NAMESPACE' NAMESPACE """) generator_stream.seek(0) table_1 = toe.symbol.table() generator = TLexerGenerator(table_1) lexer = TLexer() lexer.states = generator.load(generator_stream, False) assert (len(lexer.states) > 2) # initial, invalid test_stream = cStringIO.StringIO() test_stream.write(""" """) test_stream.seek(0) lexer.source_stream = test_stream while not lexer.eof_p: yield lexer.token lexer.consume()
def test_lexer(): """ >>> generator = test_lexer() >>> generator.next() ('len(lexer.states)', 1371) >>> generator.next() toe.symbol.intern("namespace") >>> generator.next() toe.symbol.intern("id") >>> generator.next() toe.symbol.intern("dot") >>> generator.next() toe.symbol.intern("id") >>> generator.next() toe.symbol.intern("dot") >>> generator.next() toe.symbol.intern("id") >>> generator.next() toe.symbol.intern("newline") >>> generator.next() toe.symbol.intern("newline") >>> generator.next() Traceback (most recent call last): File "<stdin>", line 1, in ? StopIteration """ generator_stream = cStringIO.StringIO() generator_stream.write(""" [[:newline:]] NEWLINE [[:whitespace:]] IGNORE 'namespace'[[:whitespace:]]* NAMESPACE [a-z][a-z0-9_?!]* ID ':='[[:whitespace:]]* ASSIGNMENT '+'[[:whitespace:]]* PLUS '.' DOT """) generator_stream.seek(0) table_1 = toe.symbol.table() generator = TLexerGenerator(table_1) lexer = TLexer() lexer.states = generator.load(generator_stream, False) #for i in range(len(lexer.states)): # print generator.string_transitions(i) assert(len(lexer.states) > 2) # initial, invalid yield ("len(lexer.states)", len(lexer.states)) test_stream = cStringIO.StringIO() test_stream.write("""namespace aaa.aaa.aaa """) test_stream.seek(0) lexer.source_stream = test_stream while not lexer.eof_p: yield lexer.token lexer.consume()
def test_lexer(): """ >>> generator = test_lexer() >>> generator.next() toe.symbol.intern("namespace") >>> generator.next() toe.symbol.intern("id") >>> generator.next() toe.symbol.intern("dot") >>> generator.next() toe.symbol.intern("id") >>> generator.next() toe.symbol.intern("dot") >>> generator.next() toe.symbol.intern("id") >>> generator.next() Traceback (most recent call last): File "<stdin>", line 1, in ? StopIteration """ generator_stream = cStringIO.StringIO() generator_stream.write(""" [[:whitespace:]] IGNORE 'NAMESPACE'[[:whitespace:]][[:whitespace:]]* NAMESPACE [a-zA-Z_][a-zA-Z0-9_]*[?!]*[[:whitespace:]]* ID '.' DOT """) generator_stream.seek(0) table_1 = toe.symbol.table() generator = TLexerGenerator(table_1) lexer = TLexer() lexer.states = generator.load(generator_stream, False) assert(len(lexer.states) > 2) # initial, invalid #print "len(lexer.states)", len(lexer.states) test_stream = cStringIO.StringIO() test_stream.write("NAMESPACE f.b.b ") test_stream.seek(0) lexer.source_stream = test_stream while not lexer.eof_p: yield lexer.token lexer.consume()