def testPushback(self): TESTSIZE = 128 str1, str2 = '', '' buff = CharBuffer(file('CharBuffer.py')) for i in range(TESTSIZE): str1 += buff.getChar() line1 = buff.line() pos = buff.charInLine() str1 = str1[-pos:] buff.pushback(len(str1)) for i in range(len(str1)): str2 += buff.getChar() self.assertEquals(str1, str2) self.assertEquals(line1, buff.line()) buff.close()
def testProgressiveLossyReads(self): buff = CharBuffer('CharBufferTest.py') str1 = '' ch = '' line1 = 1 ch = buff.getChar() while not buff.eof(): str1 += ch if ch == '\n': self.assertEquals(line1, buff.line()) line1 += 1 ch = buff.getChar() testfile1 = open("testfile1.py", 'w') testfile1.write(str1) testfile1.close() refeed_buff = CharBuffer('testfile1.py') str2 = '' ch = '' line2 = 1 ch = refeed_buff.getChar() while not refeed_buff.eof(): str2 += ch if ch == '\n': self.assertEquals(line2, refeed_buff.line()) line2 += 1 ch = refeed_buff.getChar() self.assertEquals(line1, line2) print line2 self.assertEquals(str1, str2)
def testPushback(self): TESTSIZE = 128 str1, str2 = '', '' buff = CharBuffer('CharBufferTest.py') for i in range(TESTSIZE): str1 += buff.getChar() line1 = buff.line() pos = buff.charInLine() str1 = str1[-pos:] buff.pushback(len(str1)) for i in range(len(str1)): str2 += buff.getChar() self.assertEquals(str1, str2) self.assertEquals(line1, buff.line()) buff.close()
def testBasicOpen(self): buff = CharBuffer('CharBufferTest.py') self.assertFalse(buff.closed()) buff.close() self.assertTrue(buff.closed())
def testBasicRead(self): buff = CharBuffer('CharBufferTest.py') ch = buff.getChar() self.assertEquals('#', ch) ch = buff.getChar() self.assertEquals('!', ch) ch = buff.getChar() self.assertEquals('/', ch) ch = buff.getChar() self.assertEquals('u', ch) ch = buff.getChar() self.assertEquals('s', ch) ch = buff.getChar() self.assertEquals('r', ch) ch = buff.getChar() self.assertEquals('/', ch) ch = buff.getChar() self.assertEquals('b', ch) ch = buff.getChar() self.assertEquals('i', ch) ch = buff.getChar() self.assertEquals('n', ch) ch = buff.getChar() self.assertEquals('/', ch) ch = buff.getChar() self.assertEquals('p', ch) ch = buff.getChar() self.assertEquals('y', ch) ch = buff.getChar() self.assertEquals('t', ch) ch = buff.getChar() self.assertEquals('h', ch) ch = buff.getChar() self.assertEquals('o', ch) ch = buff.getChar() self.assertEquals('n', ch) ch = buff.getChar() self.assertEquals('\n', ch) ch = buff.getChar() self.assertEquals('\n', ch) ch = buff.getChar() self.assertEquals('i', ch) ch = buff.getChar() self.assertEquals('m', ch) buff.close()
def testProgressiveLossyReads(self): buff = CharBuffer(file('CharBufferTest.py')) str1 = '' ch = '' line1 = 1 while not buff.eof(): ch = buff.getChar() str1 += ch if ch is '\n': line1 += 1 self.assertEquals(line1, buff.line()) buff.close() testfile1 = file("testfile1.py", 'w') testfile1.write(str1) testfile1.close() buff = CharBuffer(file('testfile1.py')) str2 = '' ch = '' line2 = 1 ch = buff.getChar() while not buff.eof(): str2 += ch if ch is '\n': line2 += 1 self.assertEquals(line2, buff.line()) ch = buff.getChar() buff.close() self.assertEquals(str1, str2)
def testOpen(self): buff = CharBuffer(file('CharBuffer.py')) self.assertFalse(buff.closed()) buff.close() self.assertTrue(buff.closed())
try: listing = open(name + '.lis', 'w') except: print("Could not create listing file") exit(-1) try: dest = open(name + '.s', 'w') except: print("Could not create file") exit(-1) symtab = MasterSymbolTable() # this is a series of closures used to group functions # which have some global state. This is done to # avoid sharing globals between modules, at least as much # as possible. error, warning = error_handlers(symtab, listing, dest) print_listing = lister(listing) # get the listing function emit, emit_epilog, load, store = codegen(symtab, dest) get = Token.Tokenizer(CharBuffer(src), warning) parse(get, dest, print_listing, symtab, warning, error, emit, load, store) listing.write('\n\n' + str(symtab)) emit_epilog() listing.close() dest.close() print()
')') else: return self.token_desc[self.__toktype] def line(self): """Return the line on which the token was found.""" return self.__line def type(self): """Return a string with the name of the token type.""" return self.__toktype def value(self): """Return the token's lexeme.""" return self.__lexeme ###################################### ## test code if __name__ == "__main__": def wr(tok, msg): print("bad " + str(tok) + ' ' + msg) get = Token.Tokenizer(CharBuffer(open("assign-test.al")), wr) tok = next(get) while tok.type() != Token.EOF: print(tok, ':', repr(tok)) tok = next(get)