Example #1
0
    def test_ChunkFile_AppendAChunk_IOSeeksAndIOWriteAreCalledInSequence(self):
        iocalls = mock.Mock()
        io = mock.Mock()
        io.seek = lambda *args: iocalls("seek", *args)
        io.write = lambda *args, **kwargs: iocalls("write", *args, **kwargs)
        stubSerializer = mock.Mock(serialize=mock.Mock(return_value=""))
        chunk_file = ChunkFile(io, serializer=stubSerializer)
        
        chunk_file.append_chunk(name="", length=0, version=0)

        iocalls.assert_has_calls( [mock.call("seek", 0, SEEK_END),
                                   mock.call("write", data="") ])
Example #2
0
    def test_ChunkFile_AppendAChunk_IOSeeksAndIOWriteAreCalledInSequence(self):
        iocalls = mock.Mock()
        io = mock.Mock()
        io.seek = lambda *args: iocalls("seek", *args)
        io.write = lambda *args, **kwargs: iocalls("write", *args, **kwargs)
        stubSerializer = mock.Mock(serialize=mock.Mock(return_value=""))
        chunk_file = ChunkFile(io, serializer=stubSerializer)

        chunk_file.append_chunk(name="", length=0, version=0)

        iocalls.assert_has_calls(
            [mock.call("seek", 0, SEEK_END),
             mock.call("write", data="")])
Example #3
0
 def new(cls, io, INDEX_COUNT=50, BUFFER_SIZE=10000):
     chunkfile = ChunkFile(io)
     chunkfile.append_chunk( LOG_INDEX_NAME, INDEX_COUNT * LogIndexEntrySerializer.SERIALIZED_LENGTH)
     chunkfile.append_chunk( LOG_BUFFER_NAME, BUFFER_SIZE )
     #Setup log index and log buffer
     logindexio = ChunkIO.from_name(chunkfile, LOG_INDEX_NAME)
     logindex_reader = LogIndexReader(logindexio, INDEX_COUNT)
     logindex = SerializedLogIndex.new(logindex_reader)
     
     logbufferio = ChunkIO.from_name(chunkfile,LOG_BUFFER_NAME)
     
     buffer_reader = LogBufferReader(logbufferio, logbufferio.size)
     logbuffer = LogBuffer(buffer_reader)
     return cls(chunkfile, logindex, logbuffer, logindexio.chunkid, logbufferio.chunkid)
Example #4
0
    def load(cls, io, iosize):
        chunkfile = ChunkFile.open(io, iosize)
        logindexio = ChunkIO.from_name(chunkfile, LOG_INDEX_NAME)
        logbufferio = ChunkIO.from_name(chunkfile, LOG_BUFFER_NAME)
       
        logindex_reader = LogIndexReader(logindexio, logindexio.size/LogIndexEntrySerializer.SERIALIZED_LENGTH)
        logindex = SerializedLogIndex.load(logindex_reader)

        logbuffer_reader = LogBufferReader(logbufferio, logbufferio.size)
        logbuffer = LogBuffer.load(logindex, logbuffer_reader)
                
        return cls(chunkfile, logindex, logbuffer, logindexio.chunkid, logbufferio.chunkid)
Example #5
0
    def load(cls, io, iosize):
        chunkfile = ChunkFile.open(io, iosize)
        logindexio = ChunkIO.from_name(chunkfile, LOG_INDEX_NAME)
        logbufferio = ChunkIO.from_name(chunkfile, LOG_BUFFER_NAME)

        logindex_reader = LogIndexReader(
            logindexio,
            logindexio.size / LogIndexEntrySerializer.SERIALIZED_LENGTH)
        logindex = SerializedLogIndex.load(logindex_reader)

        logbuffer_reader = LogBufferReader(logbufferio, logbufferio.size)
        logbuffer = LogBuffer.load(logindex, logbuffer_reader)

        return cls(chunkfile, logindex, logbuffer, logindexio.chunkid,
                   logbufferio.chunkid)
Example #6
0
    def load(cls, io):
        chunkfile = ChunkFile.open(io)
        _, logindexheader, logindexio = ChunkIO.from_name(chunkfile, LOG_INDEX_NAME)
        _, logbufferheader, logbufferio = ChunkIO.from_name(chunkfile, LOG_BUFFER_NAME)
       
        logindex_reader = LogIndexReader(logindexio, logindexheader.length/LogIndexEntrySerializer.SERIALIZED_LENGTH)
        logindex = SerializedLogIndex.load(logindex_reader)

        logbuffer_reader = LogBufferReader(logbufferio, logbufferheader.length)
        logbuffer = LogBuffer.load(logindex, logbuffer_reader)
        
        txlog = TransactionLog(chunkfile, logindex, logbuffer)
        
        outpointchunk, outpointchunkheader = chunkfile.get_chunk(OUTPOINTS_NAME) 
        outpoint_io = TransactionalIO(txlog, outpointchunk)
        outpoint_reader = OutpointIndexReader(outpoint_io, outpointchunkheader.length)
        outpoints = SerializedItemSet.load(outpoint_reader)
        return cls(txlog, outpoints)
Example #7
0
    def new(cls, io, INDEX_COUNT=50, BUFFER_SIZE=10000):
        chunkfile = ChunkFile(io)
        chunkfile.append_chunk(
            LOG_INDEX_NAME,
            INDEX_COUNT * LogIndexEntrySerializer.SERIALIZED_LENGTH)
        chunkfile.append_chunk(LOG_BUFFER_NAME, BUFFER_SIZE)
        #Setup log index and log buffer
        logindexio = ChunkIO.from_name(chunkfile, LOG_INDEX_NAME)
        logindex_reader = LogIndexReader(logindexio, INDEX_COUNT)
        logindex = SerializedLogIndex.new(logindex_reader)

        logbufferio = ChunkIO.from_name(chunkfile, LOG_BUFFER_NAME)

        buffer_reader = LogBufferReader(logbufferio, logbufferio.size)
        logbuffer = LogBuffer(buffer_reader)
        return cls(chunkfile, logindex, logbuffer, logindexio.chunkid,
                   logbufferio.chunkid)
Example #8
0
    def load(cls, io):
        chunkfile = ChunkFile.open(io)
        _, logindexheader, logindexio = ChunkIO.from_name(
            chunkfile, LOG_INDEX_NAME)
        _, logbufferheader, logbufferio = ChunkIO.from_name(
            chunkfile, LOG_BUFFER_NAME)

        logindex_reader = LogIndexReader(
            logindexio,
            logindexheader.length / LogIndexEntrySerializer.SERIALIZED_LENGTH)
        logindex = SerializedLogIndex.load(logindex_reader)

        logbuffer_reader = LogBufferReader(logbufferio, logbufferheader.length)
        logbuffer = LogBuffer.load(logindex, logbuffer_reader)

        txlog = TransactionLog(chunkfile, logindex, logbuffer)

        outpointchunk, outpointchunkheader = chunkfile.get_chunk(
            OUTPOINTS_NAME)
        outpoint_io = TransactionalIO(txlog, outpointchunk)
        outpoint_reader = OutpointIndexReader(outpoint_io,
                                              outpointchunkheader.length)
        outpoints = SerializedItemSet.load(outpoint_reader)
        return cls(txlog, outpoints)
Example #9
0
    def new(cls, io):
        IO_SIZE = 1000
        BUFFER_SIZE = 10000
        INDEX_COUNT = 50
        OUTPOINTS_SIZE = 1000
        
        fileheader = FileHeader()
        io.write(FileHeaderSerializer.serialize(fileheader))
        #write chunks
        chunkfile = ChunkFile(io)
        chunkfile.append_chunk( LOG_INDEX_NAME, INDEX_COUNT * LogIndexEntrySerializer.SERIALIZED_LENGTH)
        chunkfile.append_chunk( LOG_BUFFER_NAME, BUFFER_SIZE )
        chunkfile.append_chunk( OUTPOINTS_NAME, OUTPOINTS_SIZE )
        #Load log index and log buffer
        _, _logindexheader, logindexio = chunkfile.open_chunk(LOG_INDEX_NAME)
        logindex_reader = LogIndexReader(logindexio, INDEX_COUNT)

        logindex = SerializedLogIndex.new(logindex_reader)
        _, logbufferheader, logbufferio = chunkfile.open_chunk(LOG_BUFFER_NAME)
        buffer_reader = LogBufferReader(logbufferio, logbufferheader.length)
        logbuffer = LogBuffer(buffer_reader)
        # format other chunks ( not transactionally)
        _, outpointsheader, outpointsio = chunkfile.open_chunk(OUTPOINTS_NAME)
        outpoint_dict = SerializedDict.new(outpointsio, outpointsheader.length, serializer=OutpointIndexSerializer)
        SerializedItemSet.load(outpoint_dict)
        # 
        txlog = TransactionLog(chunkfile, logindex, logbuffer)

        outpointchunk, outpointchunkheader = chunkfile.get_chunk(OUTPOINTS_NAME) 
        outpoint_io = TransactionalIO(txlog, outpointchunk)
        outpoint_reader = SerializedDict.load(outpoint_io, OUTPOINTS_SIZE, OutpointIndexSerializer)
        outpoints = SerializedItemSet.load(outpoint_reader)
        return cls(txlog, outpoints)
Example #10
0
 def test_ChunkFile_NewChunkFile_ContainsNoChunk(self):
     io = StringIO.StringIO()
     chunk_file = ChunkFile(io)
Example #11
0
    def new(cls, io):
        IO_SIZE = 1000
        BUFFER_SIZE = 10000
        INDEX_COUNT = 50
        OUTPOINTS_SIZE = 1000

        fileheader = FileHeader()
        io.write(FileHeaderSerializer.serialize(fileheader))
        #write chunks
        chunkfile = ChunkFile(io)
        chunkfile.append_chunk(
            LOG_INDEX_NAME,
            INDEX_COUNT * LogIndexEntrySerializer.SERIALIZED_LENGTH)
        chunkfile.append_chunk(LOG_BUFFER_NAME, BUFFER_SIZE)
        chunkfile.append_chunk(OUTPOINTS_NAME, OUTPOINTS_SIZE)
        #Load log index and log buffer
        _, _logindexheader, logindexio = chunkfile.open_chunk(LOG_INDEX_NAME)
        logindex_reader = LogIndexReader(logindexio, INDEX_COUNT)

        logindex = SerializedLogIndex.new(logindex_reader)
        _, logbufferheader, logbufferio = chunkfile.open_chunk(LOG_BUFFER_NAME)
        buffer_reader = LogBufferReader(logbufferio, logbufferheader.length)
        logbuffer = LogBuffer(buffer_reader)
        # format other chunks ( not transactionally)
        _, outpointsheader, outpointsio = chunkfile.open_chunk(OUTPOINTS_NAME)
        outpoint_dict = SerializedDict.new(outpointsio,
                                           outpointsheader.length,
                                           serializer=OutpointIndexSerializer)
        SerializedItemSet.load(outpoint_dict)
        #
        txlog = TransactionLog(chunkfile, logindex, logbuffer)

        outpointchunk, outpointchunkheader = chunkfile.get_chunk(
            OUTPOINTS_NAME)
        outpoint_io = TransactionalIO(txlog, outpointchunk)
        outpoint_reader = SerializedDict.load(outpoint_io, OUTPOINTS_SIZE,
                                              OutpointIndexSerializer)
        outpoints = SerializedItemSet.load(outpoint_reader)
        return cls(txlog, outpoints)