Ejemplo n.º 1
0
def test_lexer():
  """
>>> generator = test_lexer()
>>> generator.next()
toe.symbol.intern("invalid")
>>> generator.next()
Traceback (most recent call last):
ELexerEofError: Unexpected end of file
  """
  
  generator_stream = cStringIO.StringIO()
  generator_stream.write("""
'NAMESPACE'   NAMESPACE

""")
  generator_stream.seek(0)

  table_1 = toe.symbol.table()
  generator = TLexerGenerator(table_1)

  lexer = TLexer()
  lexer.states = generator.load(generator_stream, False)
  assert(len(lexer.states) > 2) # initial, invalid

  test_stream = cStringIO.StringIO()
  test_stream.write("""
""")
  test_stream.seek(0)
  lexer.source_stream = test_stream

  while not lexer.eof_p:
    yield lexer.token
    lexer.consume()
Ejemplo n.º 2
0
def test_lexer():
    """
Has a typo in the generator stream on purpose.

>>> generator = test_lexer()
>>> generator.next()
toe.symbol.intern("invalid")
  """

    generator_stream = cStringIO.StringIO()
    generator_stream.write("""
'NAMESPACE'[[:whitespace:]][[:whitespace:]]*      NAMESPACE
' '              IGNORE
""")
    generator_stream.seek(0)

    table_1 = toe.symbol.table()
    generator = TLexerGenerator(table_1)

    lexer = TLexer()
    lexer.states = generator.load(generator_stream, False)
    assert (len(lexer.states) > 2)  # initial, invalid

    test_stream = cStringIO.StringIO()
    test_stream.write("namespacenamespace")
    test_stream.seek(0)
    lexer.source_stream = test_stream

    while not lexer.eof_p:
        yield lexer.token
        lexer.consume()
Ejemplo n.º 3
0
def test_lexer():
  """
Has a typo in the generator stream on purpose.

>>> generator = test_lexer()
>>> generator.next()
toe.symbol.intern("invalid")
  """
  
  generator_stream = cStringIO.StringIO()
  generator_stream.write("""
'NAMESPACE'[[:whitespace:]][[:whitespace:]]*      NAMESPACE
' '              IGNORE
""")
  generator_stream.seek(0)

  table_1 = toe.symbol.table()
  generator = TLexerGenerator(table_1)

  lexer = TLexer()
  lexer.states = generator.load(generator_stream, False)
  assert(len(lexer.states) > 2) # initial, invalid

  test_stream = cStringIO.StringIO()
  test_stream.write("namespacenamespace")
  test_stream.seek(0)
  lexer.source_stream = test_stream

  while not lexer.eof_p:
    yield lexer.token
    lexer.consume()
Ejemplo n.º 4
0
def test_lexer():
    """
>>> generator = test_lexer()
>>> generator.next()
toe.symbol.intern("namespace")
>>> generator.next()
toe.symbol.intern("namespace")
>>> generator.next()
Traceback (most recent call last):
  File "<stdin>", line 1, in ?
StopIteration
  """

    generator_stream = cStringIO.StringIO()
    generator_stream.write("""
'NAMESPACE'
' '           IGNORE
""")
    generator_stream.seek(0)

    table_1 = toe.symbol.table()
    generator = TLexerGenerator(table_1)

    lexer = TLexer()
    lexer.states = generator.load(generator_stream, False)
    assert (len(lexer.states) > 2)  # initial, invalid

    test_stream = cStringIO.StringIO()
    test_stream.write("namespacenamespace")
    test_stream.seek(0)
    lexer.source_stream = test_stream

    while not lexer.eof_p:
        yield lexer.token
        lexer.consume()
Ejemplo n.º 5
0
def test_lexer():
  """
>>> generator = test_lexer()
>>> generator.next()
toe.symbol.intern("namespace")
>>> generator.next()
toe.symbol.intern("namespace")
>>> generator.next()
Traceback (most recent call last):
  File "<stdin>", line 1, in ?
StopIteration
  """
  
  generator_stream = cStringIO.StringIO()
  generator_stream.write("""
'NAMESPACE'
' '           IGNORE
""")
  generator_stream.seek(0)

  table_1 = toe.symbol.table()
  generator = TLexerGenerator(table_1)

  lexer = TLexer()
  lexer.states = generator.load(generator_stream, False)
  assert(len(lexer.states) > 2) # initial, invalid

  test_stream = cStringIO.StringIO()
  test_stream.write("namespacenamespace")
  test_stream.seek(0)
  lexer.source_stream = test_stream

  while not lexer.eof_p:
    yield lexer.token
    lexer.consume()
Ejemplo n.º 6
0
def test_lexer():
    """
>>> generator = test_lexer()
>>> generator.next()
toe.symbol.intern("invalid")
>>> generator.next()
Traceback (most recent call last):
ELexerEofError: Unexpected end of file
  """

    generator_stream = cStringIO.StringIO()
    generator_stream.write("""
'NAMESPACE'   NAMESPACE

""")
    generator_stream.seek(0)

    table_1 = toe.symbol.table()
    generator = TLexerGenerator(table_1)

    lexer = TLexer()
    lexer.states = generator.load(generator_stream, False)
    assert (len(lexer.states) > 2)  # initial, invalid

    test_stream = cStringIO.StringIO()
    test_stream.write("""
""")
    test_stream.seek(0)
    lexer.source_stream = test_stream

    while not lexer.eof_p:
        yield lexer.token
        lexer.consume()
Ejemplo n.º 7
0
def test_lexer():
  """
>>> generator = test_lexer()
>>> generator.next()
('len(lexer.states)', 1371)
>>> generator.next()
toe.symbol.intern("namespace")
>>> generator.next()
toe.symbol.intern("id")
>>> generator.next()
toe.symbol.intern("dot")
>>> generator.next()
toe.symbol.intern("id")
>>> generator.next()
toe.symbol.intern("dot")
>>> generator.next()
toe.symbol.intern("id")
>>> generator.next()
toe.symbol.intern("newline")
>>> generator.next()
toe.symbol.intern("newline")
>>> generator.next()
Traceback (most recent call last):
  File "<stdin>", line 1, in ?
StopIteration
  """
  generator_stream = cStringIO.StringIO()
  generator_stream.write("""
[[:newline:]]                 NEWLINE
[[:whitespace:]]              IGNORE
'namespace'[[:whitespace:]]*  NAMESPACE
[a-z][a-z0-9_?!]*             ID
':='[[:whitespace:]]*         ASSIGNMENT
'+'[[:whitespace:]]*          PLUS
'.'                           DOT
""")
  generator_stream.seek(0)

  table_1 = toe.symbol.table()
  generator = TLexerGenerator(table_1)

  lexer = TLexer()
  lexer.states = generator.load(generator_stream, False)

  #for i in range(len(lexer.states)):
  #  print generator.string_transitions(i)

  assert(len(lexer.states) > 2) # initial, invalid
  yield ("len(lexer.states)", len(lexer.states))

  test_stream = cStringIO.StringIO()
  test_stream.write("""namespace aaa.aaa.aaa

""")
  test_stream.seek(0)
  lexer.source_stream = test_stream

  while not lexer.eof_p:
    yield lexer.token
    lexer.consume()
Ejemplo n.º 8
0
def test_lexer():
  """

>>> generator = test_lexer()
>>> generator.next()
toe.symbol.intern("namespace")
>>> generator.next()
toe.symbol.intern("id")
>>> generator.next()
toe.symbol.intern("dot")
>>> generator.next()
toe.symbol.intern("id")
>>> generator.next()
toe.symbol.intern("dot")
>>> generator.next()
toe.symbol.intern("id")
>>> generator.next()
Traceback (most recent call last):
  File "<stdin>", line 1, in ?
StopIteration
  """
  
  generator_stream = cStringIO.StringIO()
  generator_stream.write("""
[[:whitespace:]]                                IGNORE
'NAMESPACE'[[:whitespace:]][[:whitespace:]]*    NAMESPACE
[a-zA-Z_][a-zA-Z0-9_]*[?!]*[[:whitespace:]]*    ID
'.'	DOT
""")
  generator_stream.seek(0)

  table_1 = toe.symbol.table()
  generator = TLexerGenerator(table_1)

  lexer = TLexer()
  lexer.states = generator.load(generator_stream, False)
  assert(len(lexer.states) > 2) # initial, invalid
  #print "len(lexer.states)", len(lexer.states)

  test_stream = cStringIO.StringIO()
  test_stream.write("NAMESPACE f.b.b ")
  test_stream.seek(0)
  lexer.source_stream = test_stream

  while not lexer.eof_p:
    yield lexer.token
    lexer.consume()
Ejemplo n.º 9
0
def _test_nosy_lexer():
    """
  >>> generator = _test_nosy_lexer()
  >>> generator.next()
  (toe.symbol.intern("string_literal"), 'toe.symbol.intern("string_literal")', '"he said "hello""')
  >>> generator.next()
  Traceback (most recent call last):
    File "<stdin>", line 1, in ?
  StopIteration
  """

    import cStringIO

    generator_stream = cStringIO.StringIO()
    generator_stream.write("""
'"'[^"]*'"'	STRING_LITERAL

""")
    generator_stream.seek(0)

    from toe.lexer.compiler import TLexerGenerator

    table_1 = toe.symbol.table()
    generator = TLexerGenerator(table_1)

    lexer = TNosyLexer()
    lexer.states = generator.load(generator_stream, False)
    assert (len(lexer.states) > 2)  # initial, invalid

    test_stream = cStringIO.StringIO()
    test_stream.write("\"he said \\\"hello\\\"\"")
    test_stream.seek(0)
    lexer.source_stream = test_stream

    while not lexer.eof_p:
        yield (lexer.token, repr(lexer.token), lexer.matched_text)
        lexer.consume()
Ejemplo n.º 10
0
def _test_nosy_lexer():
  """
  >>> generator = _test_nosy_lexer()
  >>> generator.next()
  (toe.symbol.intern("string_literal"), 'toe.symbol.intern("string_literal")', '"he said "hello""')
  >>> generator.next()
  Traceback (most recent call last):
    File "<stdin>", line 1, in ?
  StopIteration
  """

  import cStringIO
  
  generator_stream = cStringIO.StringIO()
  generator_stream.write("""
'"'[^"]*'"'	STRING_LITERAL

""")
  generator_stream.seek(0)

  from toe.lexer.compiler import TLexerGenerator
  
  table_1 = toe.symbol.table()
  generator = TLexerGenerator(table_1)

  lexer = TNosyLexer()
  lexer.states = generator.load(generator_stream, False)
  assert(len(lexer.states) > 2) # initial, invalid

  test_stream = cStringIO.StringIO()
  test_stream.write("\"he said \\\"hello\\\"\"")
  test_stream.seek(0)
  lexer.source_stream = test_stream

  while not lexer.eof_p:
    yield (lexer.token, repr(lexer.token), lexer.matched_text)
    lexer.consume()