def unserialize_blocks(file): blocks = [] while True: digest = file.read(Digest.dataDigestSize()) if digest == "": break size = Format.read_int(file) code = Format.read_int(file) blocks.append((digest, size, code)) return blocks
def start_dump(self, sequence_id, index): assert self.mode is None self.mode = "DUMP" self.sequence_id = sequence_id self.index = index self.header_file = self.storage.open_header_file( self.sequence_id, self.index) assert self.header_file.tell() == 0 self.body_file = self.storage.open_body_file( self.sequence_id, self.index) assert self.body_file.tell() == 0 self.piggyback_headers_num = 0 self.piggyback_headers_size = 0 self.max_num_piggyback_headers = compute_num_piggyback_headers(self.index) logging.debug("Container %d can add %d piggyback headers" % (self.index, self.max_num_piggyback_headers)) self.body_dumper = DataDumper(self.body_file) self.header_dump_os = StringIO.StringIO() self.header_dumper = DataDumper(self.header_dump_os) if self.storage.get_encryption_key() != "": self.encryption_active = True self.body_dumper.start_encryption( CODE_ENCRYPTION_ARC4, os.urandom(Digest.dataDigestSize()), self.storage.get_encryption_key()) self.header_dumper.start_encryption( CODE_ENCRYPTION_ARC4, os.urandom(Digest.dataDigestSize()), self.storage.get_encryption_key()) else: self.encryption_active = False self.body_dumper.start_compression(CODE_COMPRESSION_BZ2) self.compression_active = True self.compressed_data = 0
def _load_header(self, header_file): logging.debug("****************************** loading header") magic = header_file.read(len(MAGIC)) if MAGIC != magic: raise Exception("Manent: magic number not found") version = Format.read_int(header_file) if version != VERSION: raise Exception("Container %d has unsupported version" % self.index) index = Format.read_int(header_file) if index != self.index: raise Exception( "Manent: wrong container file index. Expected %s, found %s" % (str(self.index), str(index))) header_table_size = Format.read_int(header_file) header_table_digest = header_file.read(Digest.dataDigestSize()) header_table_str = header_file.read(header_table_size) if Digest.dataDigest(header_table_str) != header_table_digest: raise Exception("Manent: header of container file corrupted") header_dump_len = Format.read_int(header_file) header_dump_str = header_file.read(header_dump_len) header_table_io = StringIO.StringIO(header_table_str) header_blocks = unserialize_blocks(header_table_io) class BlockTableListener: def __init__(self): self.body_table_str = None def is_requested(self, digest, code): return code == CODE_BLOCK_TABLE def loaded(self, digest, code, data): assert code == CODE_BLOCK_TABLE self.body_table_str = data listener = BlockTableListener() header_dump_str_io = StringIO.StringIO(header_dump_str) header_dump_loader = DataDumpLoader(header_dump_str_io, header_blocks, password=self.storage.get_encryption_key()) header_dump_loader.load_blocks(listener) body_table_io = StringIO.StringIO(listener.body_table_str) blocks = unserialize_blocks(body_table_io) return blocks
def is_filled_by(self, num_blocks, size_blocks): """Test if the given number of blocks with given size will fill the container. Assume that the blocks are not compressible""" return not self._can_add_bytes(num_blocks * (8 + Digest.dataDigestSize()) + size_blocks)
def encode_piggyback_container_index(index): index_str = str(index) extra_chars = Digest.dataDigestSize() - len(index_str) index_str = index_str + (" " * extra_chars) return index_str
def test_data_dumper_stress(self): # Test with really lots of randomly generated data handler = MockHandler() outfile = StringIO.StringIO() dumper = Container.DataDumper(outfile) encryption_active = None compression_active = None known_blocks = {} for i in range(10000): action = random.randint(0,2) if compression_active is not None: compression_active -= 1 if compression_active == 0: dumper.stop_compression() compression_active = None else: #print " Compression has %d rounds to go"%compression_active pass if encryption_active is not None: encryption_active -= 1 if encryption_active == 0: if compression_active is not None: # If we need to stop encryption, compression must be stopped first dumper.stop_compression() #print " Stopping encryption" dumper.stop_encryption() encryption_active = None else: #print " Encryption has %d rounds to go"%encryption_active pass if action==0: # Generate new data item data_size = random.randint(0,1000) data = os.urandom(data_size) code = random.choice([ Container.CODE_DATA, Container.CODE_DIR, Container.CODE_DATA, Container.CODE_DATA_PACKER, Container.CODE_DATA, Container.CODE_DIR_PACKER, Container.CODE_DATA, Container.CODE_INCREMENT_DESCRIPTOR]) digest = Digest.dataDigest(data) if code == Container.CODE_DATA and known_blocks.has_key(digest): # We can't expect the same data block to be added twice to a container continue known_blocks[digest] = 1 dumper.add_block(digest, code, data) # test not requesting to reload every CODE_DATA item if code != Container.CODE_DATA or random.randint(0, 100) > 90: handler.add_expected(digest, code, data) elif action==1: #continue # Try to start encryption # We can start encryption only if it is not active already if encryption_active != None: continue if compression_active != None: continue encryption_active = random.randint(1,100) #print " Starting encryption for %d rounds" seed = os.urandom(Digest.dataDigestSize()) dumper.start_encryption(Container.CODE_ENCRYPTION_ARC4, seed, "kakamaika") elif action==2: # Try to start compression if compression_active != None: continue compression_active = random.randint(1,100) if encryption_active != None: compression_active = min(compression_active, encryption_active) algorithm = random.choice([Container.CODE_COMPRESSION_BZ2]) dumper.start_compression(algorithm) if compression_active is not None: dumper.stop_compression() if encryption_active is not None: dumper.stop_encryption() infile = StringIO.StringIO(outfile.getvalue()) blocks = dumper.get_blocks() #print "blocks:" #for digest,size,code in blocks: #print base64.b64encode(digest), size, code undumper = Container.DataDumpLoader(infile, blocks, password="******") undumper.load_blocks(handler) self.failUnless(handler.check())