Example #1
0
    def new(cls, io):
        IO_SIZE = 1000
        BUFFER_SIZE = 10000
        INDEX_COUNT = 50
        OUTPOINTS_SIZE = 1000
        
        fileheader = FileHeader()
        io.write(FileHeaderSerializer.serialize(fileheader))
        #write chunks
        chunkfile = ChunkFile(io)
        chunkfile.append_chunk( LOG_INDEX_NAME, INDEX_COUNT * LogIndexEntrySerializer.SERIALIZED_LENGTH)
        chunkfile.append_chunk( LOG_BUFFER_NAME, BUFFER_SIZE )
        chunkfile.append_chunk( OUTPOINTS_NAME, OUTPOINTS_SIZE )
        #Load log index and log buffer
        _, _logindexheader, logindexio = chunkfile.open_chunk(LOG_INDEX_NAME)
        logindex_reader = LogIndexReader(logindexio, INDEX_COUNT)

        logindex = SerializedLogIndex.new(logindex_reader)
        _, logbufferheader, logbufferio = chunkfile.open_chunk(LOG_BUFFER_NAME)
        buffer_reader = LogBufferReader(logbufferio, logbufferheader.length)
        logbuffer = LogBuffer(buffer_reader)
        # format other chunks ( not transactionally)
        _, outpointsheader, outpointsio = chunkfile.open_chunk(OUTPOINTS_NAME)
        outpoint_dict = SerializedDict.new(outpointsio, outpointsheader.length, serializer=OutpointIndexSerializer)
        SerializedItemSet.load(outpoint_dict)
        # 
        txlog = TransactionLog(chunkfile, logindex, logbuffer)

        outpointchunk, outpointchunkheader = chunkfile.get_chunk(OUTPOINTS_NAME) 
        outpoint_io = TransactionalIO(txlog, outpointchunk)
        outpoint_reader = SerializedDict.load(outpoint_io, OUTPOINTS_SIZE, OutpointIndexSerializer)
        outpoints = SerializedItemSet.load(outpoint_reader)
        return cls(txlog, outpoints)
Example #2
0
 def load(cls, io, iosize):
     # read fileheader
     fileheader = FileHeaderSerializer.deserialize(io.read(length=FileHeaderSerializer.SERIALIZED_LENGTH))
                                                   
     txchunk_file = TransactionalChunkFile.load(io, iosize)
     outpoint_io = TransactionalIO.from_chunkname(txchunk_file, OUTPOINTS_NAME)
     outpoint_dict = SerializedDict.load(outpoint_io, outpoint_io.size, serializer=OutpointIndexSerializer)
     return cls(fileheader, txchunk_file, outpoint_dict)
Example #3
0
    def load(cls, io, iosize):
        # read fileheader
        fileheader = FileHeaderSerializer.deserialize(
            io.read(length=FileHeaderSerializer.SERIALIZED_LENGTH))

        txchunk_file = TransactionalChunkFile.load(io, iosize)
        outpoint_io = TransactionalIO.from_chunkname(txchunk_file,
                                                     OUTPOINTS_NAME)
        outpoint_dict = SerializedDict.load(outpoint_io,
                                            outpoint_io.size,
                                            serializer=OutpointIndexSerializer)
        return cls(fileheader, txchunk_file, outpoint_dict)
Example #4
0
    def new(cls, io, fileheader=FileHeader(), INDEX_COUNT=50, BUFFER_SIZE=10000, OUTPOINTS_SIZE=1000):
        fileheader = io.write(data=FileHeaderSerializer.serialize(fileheader))

        txchunk_file = TransactionalChunkFile.new(io, INDEX_COUNT=INDEX_COUNT, BUFFER_SIZE=BUFFER_SIZE)
        chunkfile = txchunk_file.chunkfile
        # Appending/format other chunks (Not done transactionally)
        chunkfile.append_chunk( OUTPOINTS_NAME, OUTPOINTS_SIZE )
        outpointsio = ChunkIO.from_name(chunkfile, OUTPOINTS_NAME)
        outpoint_dict = SerializedDict.new(outpointsio, outpointsio.size, serializer=OutpointIndexSerializer)
        # re-open them transactionally
        outpoint_io = TransactionalIO.from_chunkname(txchunk_file, OUTPOINTS_NAME)
        outpoint_dict = SerializedDict.load(outpoint_io, OUTPOINTS_SIZE, OutpointIndexSerializer)
        return cls(fileheader, txchunk_file, outpoint_dict)
Example #5
0
    def new(cls, io):
        IO_SIZE = 1000
        BUFFER_SIZE = 10000
        INDEX_COUNT = 50
        OUTPOINTS_SIZE = 1000

        fileheader = FileHeader()
        io.write(FileHeaderSerializer.serialize(fileheader))
        #write chunks
        chunkfile = ChunkFile(io)
        chunkfile.append_chunk(
            LOG_INDEX_NAME,
            INDEX_COUNT * LogIndexEntrySerializer.SERIALIZED_LENGTH)
        chunkfile.append_chunk(LOG_BUFFER_NAME, BUFFER_SIZE)
        chunkfile.append_chunk(OUTPOINTS_NAME, OUTPOINTS_SIZE)
        #Load log index and log buffer
        _, _logindexheader, logindexio = chunkfile.open_chunk(LOG_INDEX_NAME)
        logindex_reader = LogIndexReader(logindexio, INDEX_COUNT)

        logindex = SerializedLogIndex.new(logindex_reader)
        _, logbufferheader, logbufferio = chunkfile.open_chunk(LOG_BUFFER_NAME)
        buffer_reader = LogBufferReader(logbufferio, logbufferheader.length)
        logbuffer = LogBuffer(buffer_reader)
        # format other chunks ( not transactionally)
        _, outpointsheader, outpointsio = chunkfile.open_chunk(OUTPOINTS_NAME)
        outpoint_dict = SerializedDict.new(outpointsio,
                                           outpointsheader.length,
                                           serializer=OutpointIndexSerializer)
        SerializedItemSet.load(outpoint_dict)
        #
        txlog = TransactionLog(chunkfile, logindex, logbuffer)

        outpointchunk, outpointchunkheader = chunkfile.get_chunk(
            OUTPOINTS_NAME)
        outpoint_io = TransactionalIO(txlog, outpointchunk)
        outpoint_reader = SerializedDict.load(outpoint_io, OUTPOINTS_SIZE,
                                              OutpointIndexSerializer)
        outpoints = SerializedItemSet.load(outpoint_reader)
        return cls(txlog, outpoints)
Example #6
0
    def new(cls,
            io,
            fileheader=FileHeader(),
            INDEX_COUNT=50,
            BUFFER_SIZE=10000,
            OUTPOINTS_SIZE=1000):
        fileheader = io.write(data=FileHeaderSerializer.serialize(fileheader))

        txchunk_file = TransactionalChunkFile.new(io,
                                                  INDEX_COUNT=INDEX_COUNT,
                                                  BUFFER_SIZE=BUFFER_SIZE)
        chunkfile = txchunk_file.chunkfile
        # Appending/format other chunks (Not done transactionally)
        chunkfile.append_chunk(OUTPOINTS_NAME, OUTPOINTS_SIZE)
        outpointsio = ChunkIO.from_name(chunkfile, OUTPOINTS_NAME)
        outpoint_dict = SerializedDict.new(outpointsio,
                                           outpointsio.size,
                                           serializer=OutpointIndexSerializer)
        # re-open them transactionally
        outpoint_io = TransactionalIO.from_chunkname(txchunk_file,
                                                     OUTPOINTS_NAME)
        outpoint_dict = SerializedDict.load(outpoint_io, OUTPOINTS_SIZE,
                                            OutpointIndexSerializer)
        return cls(fileheader, txchunk_file, outpoint_dict)