def test_write_transfer(self): checksum = self.checksum() test_data = ('1234' * 1024)[:-10] size = len(test_data) meta_chunk = self.meta_chunk() nb = len(meta_chunk) resps = [201] * nb source = StringIO(test_data) put_reqs = defaultdict(lambda: {'parts': []}) def cb_body(conn_id, part): put_reqs[conn_id]['parts'].append(part) with set_http_connect(*resps, cb_body=cb_body): handler = ReplicatedChunkWriteHandler( self.sysmeta, meta_chunk, checksum, self.storage_method) bytes_transferred, checksum, chunks = handler.stream(source, size) final_checksum = self.checksum(test_data).hexdigest() self.assertEqual(len(test_data), bytes_transferred) self.assertEqual(final_checksum, checksum) bodies = [] for conn_id, info in put_reqs.items(): body, trailers = decode_chunked_body(''.join(info['parts'])) # TODO check trailers? bodies.append(body) self.assertEqual(len(bodies), nb) for body in bodies: self.assertEqual(len(test_data), len(body)) self.assertEqual(self.checksum(body).hexdigest(), final_checksum)
def test_write_transfer(self): checksum = self.checksum() test_data = (b'1234' * 1024)[:-10] size = len(test_data) meta_chunk = self.meta_chunk() nb = len(meta_chunk) resps = [201] * nb source = BytesIO(test_data) put_reqs = defaultdict(lambda: {'parts': []}) def cb_body(conn_id, part): put_reqs[conn_id]['parts'].append(part) with set_http_connect(*resps, cb_body=cb_body): handler = ReplicatedMetachunkWriter( self.sysmeta, meta_chunk, checksum, self.storage_method) bytes_transferred, checksum, chunks = handler.stream(source, size) final_checksum = self.checksum(test_data).hexdigest() self.assertEqual(len(test_data), bytes_transferred) self.assertEqual(final_checksum, checksum) bodies = [] for conn_id, info in put_reqs.items(): body, trailers = decode_chunked_body(b''.join(info['parts'])) # TODO check trailers? bodies.append(body) self.assertEqual(len(bodies), nb) for body in bodies: self.assertEqual(len(test_data), len(body)) self.assertEqual(self.checksum(body).hexdigest(), final_checksum)
def test_write_transfer(self): checksum = self.checksum() segment_size = self.storage_method.ec_segment_size test_data = (b'1234' * segment_size)[:-10] size = len(test_data) test_data_checksum = self.checksum(test_data).hexdigest() nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity resps = [201] * nb source = BytesIO(test_data) put_reqs = defaultdict(lambda: {'parts': []}) def cb_body(conn_id, part): put_reqs[conn_id]['parts'].append(part) # TODO test headers with set_http_connect(*resps, cb_body=cb_body): handler = EcMetachunkWriter(self.sysmeta, self.meta_chunk(), checksum, self.storage_method) bytes_transferred, checksum, chunks = handler.stream(source, size) self.assertEqual(len(test_data), bytes_transferred) self.assertEqual(checksum, self.checksum(test_data).hexdigest()) fragments = [] for conn_id, info in put_reqs.items(): body, trailers = decode_chunked_body(b''.join(info['parts'])) fragments.append(body) metachunk_size = int(trailers[CHUNK_HEADERS['metachunk_size']]) metachunk_hash = trailers[CHUNK_HEADERS['metachunk_hash']] self.assertEqual(metachunk_size, size) self.assertEqual(metachunk_hash, test_data_checksum) self.assertEqual(len(fragments), nb) fragment_size = self.storage_method.ec_fragment_size # retrieve segments frags = [] for frag in fragments: data = [ frag[x:x + fragment_size] for x in range(0, len(frag), fragment_size) ] frags.append(data) fragments = zip(*frags) final_data = b'' for frag in fragments: self.assertEqual(len(frag), nb) frag = list(frag) final_data += self.storage_method.driver.decode(frag) self.assertEqual(len(test_data), len(final_data)) self.assertEqual(test_data_checksum, self.checksum(final_data).hexdigest())
def test_write_transfer(self): checksum = self.checksum() segment_size = self.storage_method.ec_segment_size test_data = ('1234' * segment_size)[:-10] size = len(test_data) test_data_checksum = self.checksum(test_data).hexdigest() nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity resps = [201] * nb source = StringIO(test_data) put_reqs = defaultdict(lambda: {'parts': []}) def cb_body(conn_id, part): put_reqs[conn_id]['parts'].append(part) # TODO test headers with set_http_connect(*resps, cb_body=cb_body): handler = ECChunkWriteHandler(self.sysmeta, self.meta_chunk(), checksum, self.storage_method) bytes_transferred, checksum, chunks = handler.stream(source, size) self.assertEqual(len(test_data), bytes_transferred) self.assertEqual(checksum, self.checksum(test_data).hexdigest()) fragments = [] for conn_id, info in put_reqs.items(): body, trailers = decode_chunked_body(''.join(info['parts'])) fragments.append(body) metachunk_size = int(trailers[chunk_headers['metachunk_size']]) metachunk_hash = trailers[chunk_headers['metachunk_hash']] self.assertEqual(metachunk_size, size) self.assertEqual(metachunk_hash, test_data_checksum) self.assertEqual(len(fragments), nb) fragment_size = self.storage_method.ec_fragment_size # retrieve segments frags = [] for frag in fragments: data = [frag[x:x + fragment_size] for x in range(0, len(frag), fragment_size)] frags.append(data) fragments = zip(*frags) final_data = '' for frag in fragments: self.assertEqual(len(frag), nb) frag = list(frag) final_data += self.storage_method.driver.decode(frag) self.assertEqual(len(test_data), len(final_data)) self.assertEqual( test_data_checksum, self.checksum(final_data).hexdigest())