Пример #1
0
    def new(cls, io):
        IO_SIZE = 1000
        BUFFER_SIZE = 10000
        INDEX_COUNT = 50
        OUTPOINTS_SIZE = 1000
        
        fileheader = FileHeader()
        io.write(FileHeaderSerializer.serialize(fileheader))
        #write chunks
        chunkfile = ChunkFile(io)
        chunkfile.append_chunk( LOG_INDEX_NAME, INDEX_COUNT * LogIndexEntrySerializer.SERIALIZED_LENGTH)
        chunkfile.append_chunk( LOG_BUFFER_NAME, BUFFER_SIZE )
        chunkfile.append_chunk( OUTPOINTS_NAME, OUTPOINTS_SIZE )
        #Load log index and log buffer
        _, _logindexheader, logindexio = chunkfile.open_chunk(LOG_INDEX_NAME)
        logindex_reader = LogIndexReader(logindexio, INDEX_COUNT)

        logindex = SerializedLogIndex.new(logindex_reader)
        _, logbufferheader, logbufferio = chunkfile.open_chunk(LOG_BUFFER_NAME)
        buffer_reader = LogBufferReader(logbufferio, logbufferheader.length)
        logbuffer = LogBuffer(buffer_reader)
        # format other chunks ( not transactionally)
        _, outpointsheader, outpointsio = chunkfile.open_chunk(OUTPOINTS_NAME)
        outpoint_dict = SerializedDict.new(outpointsio, outpointsheader.length, serializer=OutpointIndexSerializer)
        SerializedItemSet.load(outpoint_dict)
        # 
        txlog = TransactionLog(chunkfile, logindex, logbuffer)

        outpointchunk, outpointchunkheader = chunkfile.get_chunk(OUTPOINTS_NAME) 
        outpoint_io = TransactionalIO(txlog, outpointchunk)
        outpoint_reader = SerializedDict.load(outpoint_io, OUTPOINTS_SIZE, OutpointIndexSerializer)
        outpoints = SerializedItemSet.load(outpoint_reader)
        return cls(txlog, outpoints)
Пример #2
0
 def test_SerializedLogIndex_recover_ErasesLastUnfinishedTransaction(self):
     COUNT = 10
     io = IoHandle.using_stringio(COUNT)
     reader = LogIndexReader(io, COUNT)
     logindex = SerializedLogIndex.new(reader)
     logindex.log(LogIndexEntry(LogIndexEntry.BEGIN_TX))
     logindex.log(LogIndexEntry(LogIndexEntry.WRITE, 1))
     logindex.log(LogIndexEntry(LogIndexEntry.WRITE, 2))
     logindex.log(LogIndexEntry(LogIndexEntry.END_TX))
     logindex.log(LogIndexEntry(LogIndexEntry.BEGIN_TX))
     logindex.log(LogIndexEntry(LogIndexEntry.WRITE, 3))
     logindex.log(LogIndexEntry(LogIndexEntry.WRITE, 4))
     logindex2 = SerializedLogIndex.load(reader)
     
     logindex2.recover()
     
     self.assertEquals(logindex2.tocommit, 
                       deque([ LogIndexEntry(LogIndexEntry.BEGIN_TX),
                               LogIndexEntry(LogIndexEntry.WRITE, 1),
                               LogIndexEntry(LogIndexEntry.WRITE, 2),
                               LogIndexEntry(LogIndexEntry.END_TX)] ))
Пример #3
0
    def test_SerializedLogIndex_recover_ErasesLastUnfinishedTransaction(self):
        COUNT = 10
        io = IoHandle.using_stringio(COUNT)
        reader = LogIndexReader(io, COUNT)
        logindex = SerializedLogIndex.new(reader)
        logindex.log(LogIndexEntry(LogIndexEntry.BEGIN_TX))
        logindex.log(LogIndexEntry(LogIndexEntry.WRITE, 1))
        logindex.log(LogIndexEntry(LogIndexEntry.WRITE, 2))
        logindex.log(LogIndexEntry(LogIndexEntry.END_TX))
        logindex.log(LogIndexEntry(LogIndexEntry.BEGIN_TX))
        logindex.log(LogIndexEntry(LogIndexEntry.WRITE, 3))
        logindex.log(LogIndexEntry(LogIndexEntry.WRITE, 4))
        logindex2 = SerializedLogIndex.load(reader)

        logindex2.recover()

        self.assertEquals(
            logindex2.tocommit,
            deque([
                LogIndexEntry(LogIndexEntry.BEGIN_TX),
                LogIndexEntry(LogIndexEntry.WRITE, 1),
                LogIndexEntry(LogIndexEntry.WRITE, 2),
                LogIndexEntry(LogIndexEntry.END_TX)
            ]))
Пример #4
0
 def test_1(self):
     IO_SIZE = 1000
     BUFFER_SIZE = 1000
     INDEX_COUNT = 20
     io = IoHandle.using_stringio(BUFFER_SIZE)
     buffer_reader = LogBufferReader(io, BUFFER_SIZE)
     logbuffer = LogBuffer(buffer_reader)
     
     io = IoHandle.using_stringio(LogIndexEntrySerializer.SERIALIZED_LENGTH * INDEX_COUNT)
     logindex_reader = LogIndexReader(io, INDEX_COUNT)
     logindex = SerializedLogIndex.new(logindex_reader)
     
     io = MultiChunkIO.using_stringios({0:IO_SIZE})
     log = TransactionalChunkFile(io, logindex, logbuffer, 1, 2)
     log.start_transaction()
     log.write(0, 3, "hello test")
     log.write(0, 12, "hello blog")
Пример #5
0
    def test_1(self):
        IO_SIZE = 1000
        BUFFER_SIZE = 1000
        INDEX_COUNT = 20
        io = IoHandle.using_stringio(BUFFER_SIZE)
        buffer_reader = LogBufferReader(io, BUFFER_SIZE)
        logbuffer = LogBuffer(buffer_reader)

        io = IoHandle.using_stringio(
            LogIndexEntrySerializer.SERIALIZED_LENGTH * INDEX_COUNT)
        logindex_reader = LogIndexReader(io, INDEX_COUNT)
        logindex = SerializedLogIndex.new(logindex_reader)

        io = MultiChunkIO.using_stringios({0: IO_SIZE})
        log = TransactionalChunkFile(io, logindex, logbuffer, 1, 2)
        log.start_transaction()
        log.write(0, 3, "hello test")
        log.write(0, 12, "hello blog")
Пример #6
0
    def load(cls, io):
        chunkfile = ChunkFile.open(io)
        _, logindexheader, logindexio = ChunkIO.from_name(chunkfile, LOG_INDEX_NAME)
        _, logbufferheader, logbufferio = ChunkIO.from_name(chunkfile, LOG_BUFFER_NAME)
       
        logindex_reader = LogIndexReader(logindexio, logindexheader.length/LogIndexEntrySerializer.SERIALIZED_LENGTH)
        logindex = SerializedLogIndex.load(logindex_reader)

        logbuffer_reader = LogBufferReader(logbufferio, logbufferheader.length)
        logbuffer = LogBuffer.load(logindex, logbuffer_reader)
        
        txlog = TransactionLog(chunkfile, logindex, logbuffer)
        
        outpointchunk, outpointchunkheader = chunkfile.get_chunk(OUTPOINTS_NAME) 
        outpoint_io = TransactionalIO(txlog, outpointchunk)
        outpoint_reader = OutpointIndexReader(outpoint_io, outpointchunkheader.length)
        outpoints = SerializedItemSet.load(outpoint_reader)
        return cls(txlog, outpoints)
Пример #7
0
    def new(cls, io):
        IO_SIZE = 1000
        BUFFER_SIZE = 10000
        INDEX_COUNT = 50
        OUTPOINTS_SIZE = 1000

        fileheader = FileHeader()
        io.write(FileHeaderSerializer.serialize(fileheader))
        #write chunks
        chunkfile = ChunkFile(io)
        chunkfile.append_chunk(
            LOG_INDEX_NAME,
            INDEX_COUNT * LogIndexEntrySerializer.SERIALIZED_LENGTH)
        chunkfile.append_chunk(LOG_BUFFER_NAME, BUFFER_SIZE)
        chunkfile.append_chunk(OUTPOINTS_NAME, OUTPOINTS_SIZE)
        #Load log index and log buffer
        _, _logindexheader, logindexio = chunkfile.open_chunk(LOG_INDEX_NAME)
        logindex_reader = LogIndexReader(logindexio, INDEX_COUNT)

        logindex = SerializedLogIndex.new(logindex_reader)
        _, logbufferheader, logbufferio = chunkfile.open_chunk(LOG_BUFFER_NAME)
        buffer_reader = LogBufferReader(logbufferio, logbufferheader.length)
        logbuffer = LogBuffer(buffer_reader)
        # format other chunks ( not transactionally)
        _, outpointsheader, outpointsio = chunkfile.open_chunk(OUTPOINTS_NAME)
        outpoint_dict = SerializedDict.new(outpointsio,
                                           outpointsheader.length,
                                           serializer=OutpointIndexSerializer)
        SerializedItemSet.load(outpoint_dict)
        #
        txlog = TransactionLog(chunkfile, logindex, logbuffer)

        outpointchunk, outpointchunkheader = chunkfile.get_chunk(
            OUTPOINTS_NAME)
        outpoint_io = TransactionalIO(txlog, outpointchunk)
        outpoint_reader = SerializedDict.load(outpoint_io, OUTPOINTS_SIZE,
                                              OutpointIndexSerializer)
        outpoints = SerializedItemSet.load(outpoint_reader)
        return cls(txlog, outpoints)
Пример #8
0
    def load(cls, io):
        chunkfile = ChunkFile.open(io)
        _, logindexheader, logindexio = ChunkIO.from_name(
            chunkfile, LOG_INDEX_NAME)
        _, logbufferheader, logbufferio = ChunkIO.from_name(
            chunkfile, LOG_BUFFER_NAME)

        logindex_reader = LogIndexReader(
            logindexio,
            logindexheader.length / LogIndexEntrySerializer.SERIALIZED_LENGTH)
        logindex = SerializedLogIndex.load(logindex_reader)

        logbuffer_reader = LogBufferReader(logbufferio, logbufferheader.length)
        logbuffer = LogBuffer.load(logindex, logbuffer_reader)

        txlog = TransactionLog(chunkfile, logindex, logbuffer)

        outpointchunk, outpointchunkheader = chunkfile.get_chunk(
            OUTPOINTS_NAME)
        outpoint_io = TransactionalIO(txlog, outpointchunk)
        outpoint_reader = OutpointIndexReader(outpoint_io,
                                              outpointchunkheader.length)
        outpoints = SerializedItemSet.load(outpoint_reader)
        return cls(txlog, outpoints)