def load(cls, io, iosize): chunkfile = ChunkFile.open(io, iosize) logindexio = ChunkIO.from_name(chunkfile, LOG_INDEX_NAME) logbufferio = ChunkIO.from_name(chunkfile, LOG_BUFFER_NAME) logindex_reader = LogIndexReader(logindexio, logindexio.size/LogIndexEntrySerializer.SERIALIZED_LENGTH) logindex = SerializedLogIndex.load(logindex_reader) logbuffer_reader = LogBufferReader(logbufferio, logbufferio.size) logbuffer = LogBuffer.load(logindex, logbuffer_reader) return cls(chunkfile, logindex, logbuffer, logindexio.chunkid, logbufferio.chunkid)
def new(cls, io, INDEX_COUNT=50, BUFFER_SIZE=10000): chunkfile = ChunkFile(io) chunkfile.append_chunk( LOG_INDEX_NAME, INDEX_COUNT * LogIndexEntrySerializer.SERIALIZED_LENGTH) chunkfile.append_chunk( LOG_BUFFER_NAME, BUFFER_SIZE ) #Setup log index and log buffer logindexio = ChunkIO.from_name(chunkfile, LOG_INDEX_NAME) logindex_reader = LogIndexReader(logindexio, INDEX_COUNT) logindex = SerializedLogIndex.new(logindex_reader) logbufferio = ChunkIO.from_name(chunkfile,LOG_BUFFER_NAME) buffer_reader = LogBufferReader(logbufferio, logbufferio.size) logbuffer = LogBuffer(buffer_reader) return cls(chunkfile, logindex, logbuffer, logindexio.chunkid, logbufferio.chunkid)
def load(cls, io, iosize): chunkfile = ChunkFile.open(io, iosize) logindexio = ChunkIO.from_name(chunkfile, LOG_INDEX_NAME) logbufferio = ChunkIO.from_name(chunkfile, LOG_BUFFER_NAME) logindex_reader = LogIndexReader( logindexio, logindexio.size / LogIndexEntrySerializer.SERIALIZED_LENGTH) logindex = SerializedLogIndex.load(logindex_reader) logbuffer_reader = LogBufferReader(logbufferio, logbufferio.size) logbuffer = LogBuffer.load(logindex, logbuffer_reader) return cls(chunkfile, logindex, logbuffer, logindexio.chunkid, logbufferio.chunkid)
def new(cls, io, INDEX_COUNT=50, BUFFER_SIZE=10000): chunkfile = ChunkFile(io) chunkfile.append_chunk( LOG_INDEX_NAME, INDEX_COUNT * LogIndexEntrySerializer.SERIALIZED_LENGTH) chunkfile.append_chunk(LOG_BUFFER_NAME, BUFFER_SIZE) #Setup log index and log buffer logindexio = ChunkIO.from_name(chunkfile, LOG_INDEX_NAME) logindex_reader = LogIndexReader(logindexio, INDEX_COUNT) logindex = SerializedLogIndex.new(logindex_reader) logbufferio = ChunkIO.from_name(chunkfile, LOG_BUFFER_NAME) buffer_reader = LogBufferReader(logbufferio, logbufferio.size) logbuffer = LogBuffer(buffer_reader) return cls(chunkfile, logindex, logbuffer, logindexio.chunkid, logbufferio.chunkid)
def load(cls, io): chunkfile = ChunkFile.open(io) _, logindexheader, logindexio = ChunkIO.from_name(chunkfile, LOG_INDEX_NAME) _, logbufferheader, logbufferio = ChunkIO.from_name(chunkfile, LOG_BUFFER_NAME) logindex_reader = LogIndexReader(logindexio, logindexheader.length/LogIndexEntrySerializer.SERIALIZED_LENGTH) logindex = SerializedLogIndex.load(logindex_reader) logbuffer_reader = LogBufferReader(logbufferio, logbufferheader.length) logbuffer = LogBuffer.load(logindex, logbuffer_reader) txlog = TransactionLog(chunkfile, logindex, logbuffer) outpointchunk, outpointchunkheader = chunkfile.get_chunk(OUTPOINTS_NAME) outpoint_io = TransactionalIO(txlog, outpointchunk) outpoint_reader = OutpointIndexReader(outpoint_io, outpointchunkheader.length) outpoints = SerializedItemSet.load(outpoint_reader) return cls(txlog, outpoints)
def new(cls, io, fileheader=FileHeader(), INDEX_COUNT=50, BUFFER_SIZE=10000, OUTPOINTS_SIZE=1000): fileheader = io.write(data=FileHeaderSerializer.serialize(fileheader)) txchunk_file = TransactionalChunkFile.new(io, INDEX_COUNT=INDEX_COUNT, BUFFER_SIZE=BUFFER_SIZE) chunkfile = txchunk_file.chunkfile # Appending/format other chunks (Not done transactionally) chunkfile.append_chunk( OUTPOINTS_NAME, OUTPOINTS_SIZE ) outpointsio = ChunkIO.from_name(chunkfile, OUTPOINTS_NAME) outpoint_dict = SerializedDict.new(outpointsio, outpointsio.size, serializer=OutpointIndexSerializer) # re-open them transactionally outpoint_io = TransactionalIO.from_chunkname(txchunk_file, OUTPOINTS_NAME) outpoint_dict = SerializedDict.load(outpoint_io, OUTPOINTS_SIZE, OutpointIndexSerializer) return cls(fileheader, txchunk_file, outpoint_dict)
def load(cls, io): chunkfile = ChunkFile.open(io) _, logindexheader, logindexio = ChunkIO.from_name( chunkfile, LOG_INDEX_NAME) _, logbufferheader, logbufferio = ChunkIO.from_name( chunkfile, LOG_BUFFER_NAME) logindex_reader = LogIndexReader( logindexio, logindexheader.length / LogIndexEntrySerializer.SERIALIZED_LENGTH) logindex = SerializedLogIndex.load(logindex_reader) logbuffer_reader = LogBufferReader(logbufferio, logbufferheader.length) logbuffer = LogBuffer.load(logindex, logbuffer_reader) txlog = TransactionLog(chunkfile, logindex, logbuffer) outpointchunk, outpointchunkheader = chunkfile.get_chunk( OUTPOINTS_NAME) outpoint_io = TransactionalIO(txlog, outpointchunk) outpoint_reader = OutpointIndexReader(outpoint_io, outpointchunkheader.length) outpoints = SerializedItemSet.load(outpoint_reader) return cls(txlog, outpoints)
def new(cls, io, fileheader=FileHeader(), INDEX_COUNT=50, BUFFER_SIZE=10000, OUTPOINTS_SIZE=1000): fileheader = io.write(data=FileHeaderSerializer.serialize(fileheader)) txchunk_file = TransactionalChunkFile.new(io, INDEX_COUNT=INDEX_COUNT, BUFFER_SIZE=BUFFER_SIZE) chunkfile = txchunk_file.chunkfile # Appending/format other chunks (Not done transactionally) chunkfile.append_chunk(OUTPOINTS_NAME, OUTPOINTS_SIZE) outpointsio = ChunkIO.from_name(chunkfile, OUTPOINTS_NAME) outpoint_dict = SerializedDict.new(outpointsio, outpointsio.size, serializer=OutpointIndexSerializer) # re-open them transactionally outpoint_io = TransactionalIO.from_chunkname(txchunk_file, OUTPOINTS_NAME) outpoint_dict = SerializedDict.load(outpoint_io, OUTPOINTS_SIZE, OutpointIndexSerializer) return cls(fileheader, txchunk_file, outpoint_dict)