示例#1
0
 def load(cls, io, iosize):
     # read fileheader
     fileheader = FileHeaderSerializer.deserialize(io.read(length=FileHeaderSerializer.SERIALIZED_LENGTH))
                                                   
     txchunk_file = TransactionalChunkFile.load(io, iosize)
     outpoint_io = TransactionalIO.from_chunkname(txchunk_file, OUTPOINTS_NAME)
     outpoint_dict = SerializedDict.load(outpoint_io, outpoint_io.size, serializer=OutpointIndexSerializer)
     return cls(fileheader, txchunk_file, outpoint_dict)
示例#2
0
    def load(cls, io, iosize):
        # read fileheader
        fileheader = FileHeaderSerializer.deserialize(
            io.read(length=FileHeaderSerializer.SERIALIZED_LENGTH))

        txchunk_file = TransactionalChunkFile.load(io, iosize)
        outpoint_io = TransactionalIO.from_chunkname(txchunk_file,
                                                     OUTPOINTS_NAME)
        outpoint_dict = SerializedDict.load(outpoint_io,
                                            outpoint_io.size,
                                            serializer=OutpointIndexSerializer)
        return cls(fileheader, txchunk_file, outpoint_dict)
示例#3
0
    def new(cls, io, fileheader=FileHeader(), INDEX_COUNT=50, BUFFER_SIZE=10000, OUTPOINTS_SIZE=1000):
        fileheader = io.write(data=FileHeaderSerializer.serialize(fileheader))

        txchunk_file = TransactionalChunkFile.new(io, INDEX_COUNT=INDEX_COUNT, BUFFER_SIZE=BUFFER_SIZE)
        chunkfile = txchunk_file.chunkfile
        # Appending/format other chunks (Not done transactionally)
        chunkfile.append_chunk( OUTPOINTS_NAME, OUTPOINTS_SIZE )
        outpointsio = ChunkIO.from_name(chunkfile, OUTPOINTS_NAME)
        outpoint_dict = SerializedDict.new(outpointsio, outpointsio.size, serializer=OutpointIndexSerializer)
        # re-open them transactionally
        outpoint_io = TransactionalIO.from_chunkname(txchunk_file, OUTPOINTS_NAME)
        outpoint_dict = SerializedDict.load(outpoint_io, OUTPOINTS_SIZE, OutpointIndexSerializer)
        return cls(fileheader, txchunk_file, outpoint_dict)
    def test_1(self):
        IO_SIZE = 1000
        BUFFER_SIZE = 1000
        INDEX_COUNT = 20
        io = IoHandle.using_stringio(BUFFER_SIZE)
        buffer_reader = LogBufferReader(io, BUFFER_SIZE)
        logbuffer = LogBuffer(buffer_reader)

        io = IoHandle.using_stringio(
            LogIndexEntrySerializer.SERIALIZED_LENGTH * INDEX_COUNT)
        logindex_reader = LogIndexReader(io, INDEX_COUNT)
        logindex = SerializedLogIndex.new(logindex_reader)

        io = MultiChunkIO.using_stringios({0: IO_SIZE})
        log = TransactionalChunkFile(io, logindex, logbuffer, 1, 2)
        log.start_transaction()
        log.write(0, 3, "hello test")
        log.write(0, 12, "hello blog")
示例#5
0
 def test_1(self):
     IO_SIZE = 1000
     BUFFER_SIZE = 1000
     INDEX_COUNT = 20
     io = IoHandle.using_stringio(BUFFER_SIZE)
     buffer_reader = LogBufferReader(io, BUFFER_SIZE)
     logbuffer = LogBuffer(buffer_reader)
     
     io = IoHandle.using_stringio(LogIndexEntrySerializer.SERIALIZED_LENGTH * INDEX_COUNT)
     logindex_reader = LogIndexReader(io, INDEX_COUNT)
     logindex = SerializedLogIndex.new(logindex_reader)
     
     io = MultiChunkIO.using_stringios({0:IO_SIZE})
     log = TransactionalChunkFile(io, logindex, logbuffer, 1, 2)
     log.start_transaction()
     log.write(0, 3, "hello test")
     log.write(0, 12, "hello blog")
示例#6
0
    def new(cls,
            io,
            fileheader=FileHeader(),
            INDEX_COUNT=50,
            BUFFER_SIZE=10000,
            OUTPOINTS_SIZE=1000):
        fileheader = io.write(data=FileHeaderSerializer.serialize(fileheader))

        txchunk_file = TransactionalChunkFile.new(io,
                                                  INDEX_COUNT=INDEX_COUNT,
                                                  BUFFER_SIZE=BUFFER_SIZE)
        chunkfile = txchunk_file.chunkfile
        # Appending/format other chunks (Not done transactionally)
        chunkfile.append_chunk(OUTPOINTS_NAME, OUTPOINTS_SIZE)
        outpointsio = ChunkIO.from_name(chunkfile, OUTPOINTS_NAME)
        outpoint_dict = SerializedDict.new(outpointsio,
                                           outpointsio.size,
                                           serializer=OutpointIndexSerializer)
        # re-open them transactionally
        outpoint_io = TransactionalIO.from_chunkname(txchunk_file,
                                                     OUTPOINTS_NAME)
        outpoint_dict = SerializedDict.load(outpoint_io, OUTPOINTS_SIZE,
                                            OutpointIndexSerializer)
        return cls(fileheader, txchunk_file, outpoint_dict)