def write_ec_meta_chunk(self, source, size, storage_method, sysmeta, meta_chunk): meta_checksum = md5() handler = EcMetachunkWriter(sysmeta, meta_chunk, meta_checksum, storage_method) bytes_transferred, checksum, chunks = handler.stream(source, size) return Response("OK")
def test_write_connect_errors(self): test_cases = [ { 'error': green.ConnectionTimeout(1.0), 'msg': 'connect: Connection timeout 1.0 second' }, { 'error': Exception('failure'), 'msg': 'connect: failure' }, ] for test in test_cases: checksum = self.checksum() source = empty_stream() size = CHUNK_SIZE * self.storage_method.ec_nb_data nb = self.storage_method.ec_nb_data + \ self.storage_method.ec_nb_parity resps = [201] * (nb - 1) # Put the error in the middle to mess with chunk indices err_pos = random.randint(0, nb) resps.insert(err_pos, test['error']) with set_http_connect(*resps): handler = EcMetachunkWriter(self.sysmeta, self.meta_chunk_copy(), checksum, self.storage_method) bytes_transferred, checksum, chunks = handler.stream( source, size) self.assertEqual(len(chunks), nb) for i in range(nb - 1): self.assertEqual(chunks[i].get('error'), None) self.assertEqual(chunks[nb - 1].get('error'), test['msg']) self.assertEqual(bytes_transferred, 0) self.assertEqual(checksum, EMPTY_MD5)
def test_write_response_error(self): test_cases = [ { 'error': green.ChunkWriteTimeout(1.0), 'msg': 'resp: Chunk write timeout 1.0 second' }, { 'error': Exception('failure'), 'msg': 'resp: failure' }, ] for test in test_cases: checksum = self.checksum() source = empty_stream() size = CHUNK_SIZE * self.storage_method.ec_nb_data nb = self.storage_method.ec_nb_data + \ self.storage_method.ec_nb_parity resps = [201] * (nb - 1) resps.append((100, test['error'])) with set_http_connect(*resps): handler = EcMetachunkWriter(self.sysmeta, self.meta_chunk_copy(), checksum, self.storage_method) bytes_transferred, checksum, chunks = handler.stream( source, size) self.assertEqual(len(chunks), nb) for i in range(nb - 1): self.assertEqual(chunks[i].get('error'), None) self.assertEqual(chunks[nb - 1].get('error'), test['msg']) self.assertEqual(bytes_transferred, 0) self.assertEqual(checksum, EMPTY_MD5)
def test_write_transfer(self): checksum = self.checksum() segment_size = self.storage_method.ec_segment_size test_data = (b'1234' * segment_size)[:-10] size = len(test_data) test_data_checksum = self.checksum(test_data).hexdigest() nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity resps = [201] * nb source = BytesIO(test_data) put_reqs = defaultdict(lambda: {'parts': []}) def cb_body(conn_id, part): put_reqs[conn_id]['parts'].append(part) # TODO test headers with set_http_connect(*resps, cb_body=cb_body): handler = EcMetachunkWriter(self.sysmeta, self.meta_chunk(), checksum, self.storage_method) bytes_transferred, checksum, chunks = handler.stream(source, size) self.assertEqual(len(test_data), bytes_transferred) self.assertEqual(checksum, self.checksum(test_data).hexdigest()) fragments = [] for conn_id, info in put_reqs.items(): body, trailers = decode_chunked_body(b''.join(info['parts'])) fragments.append(body) metachunk_size = int(trailers[CHUNK_HEADERS['metachunk_size']]) metachunk_hash = trailers[CHUNK_HEADERS['metachunk_hash']] self.assertEqual(metachunk_size, size) self.assertEqual(metachunk_hash, test_data_checksum) self.assertEqual(len(fragments), nb) fragment_size = self.storage_method.ec_fragment_size # retrieve segments frags = [] for frag in fragments: data = [ frag[x:x + fragment_size] for x in range(0, len(frag), fragment_size) ] frags.append(data) fragments = zip(*frags) final_data = b'' for frag in fragments: self.assertEqual(len(frag), nb) frag = list(frag) final_data += self.storage_method.driver.decode(frag) self.assertEqual(len(test_data), len(final_data)) self.assertEqual(test_data_checksum, self.checksum(final_data).hexdigest())
def test_write_simple(self): checksum = self.checksum() source = empty_stream() size = CHUNK_SIZE * self.storage_method.ec_nb_data nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity resps = [201] * nb with set_http_connect(*resps): handler = EcMetachunkWriter(self.sysmeta, self.meta_chunk(), checksum, self.storage_method) bytes_transferred, checksum, chunks = handler.stream(source, size) self.assertEqual(len(chunks), nb) self.assertEqual(bytes_transferred, 0) self.assertEqual(checksum, EMPTY_MD5)
def _test_write_checksum_algo(self, expected_checksum, **kwargs): global_checksum = self.checksum() source = empty_stream() size = CHUNK_SIZE * self.storage_method.ec_nb_data nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity resps = [201] * nb with set_http_connect(*resps): handler = EcMetachunkWriter(self.sysmeta, self.meta_chunk(), global_checksum, self.storage_method, **kwargs) bytes_transferred, checksum, chunks = handler.stream(source, size) self.assertEqual(nb, len(chunks)) self.assertEqual(0, bytes_transferred) self.assertEqual(expected_checksum, checksum)
def test_write_exception(self): checksum = self.checksum() source = empty_stream() size = CHUNK_SIZE * self.storage_method.ec_nb_data nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity resps = [500] * nb with set_http_connect(*resps): handler = EcMetachunkWriter(self.sysmeta, self.meta_chunk(), checksum, self.storage_method) self.assertRaises(exc.OioException, handler.stream, source, size)
def test_write_quorum_success(self): checksum = self.checksum() source = empty_stream() size = CHUNK_SIZE * self.storage_method.ec_nb_data nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity quorum_size = self.storage_method.quorum resps = [201] * quorum_size resps += [500] * (nb - quorum_size) with set_http_connect(*resps): handler = EcMetachunkWriter(self.sysmeta, self.meta_chunk(), checksum, self.storage_method) bytes_transferred, checksum, chunks = handler.stream(source, size) self.assertEqual(len(chunks), nb) for i in range(quorum_size): self.assertEqual(chunks[i].get('error'), None) for i in range(quorum_size, nb): self.assertEqual(chunks[i].get('error'), 'resp: HTTP 500') self.assertEqual(bytes_transferred, 0) self.assertEqual(checksum, EMPTY_MD5)
def test_write_quorum_error(self): checksum = self.checksum() source = empty_stream() size = CHUNK_SIZE * self.storage_method.ec_nb_data nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity quorum_size = self.storage_method.quorum resps = [500] * quorum_size resps += [201] * (nb - quorum_size) with set_http_connect(*resps): handler = EcMetachunkWriter(self.sysmeta, self.meta_chunk(), checksum, self.storage_method) self.assertRaises(exc.ServiceBusy, handler.stream, source, size)
def test_write_exception(self): checksum = self.checksum() source = empty_stream() size = CHUNK_SIZE * self.storage_method.ec_nb_data nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity resps = [500] * nb with set_http_connect(*resps): handler = EcMetachunkWriter(self.sysmeta, self.meta_chunk(), checksum, self.storage_method) # From now on, exceptions happening during chunk upload are # considered retryable, and thus will tell the caller the # service was just too busy. self.assertRaises(exc.ServiceBusy, handler.stream, source, size)
def test_write_timeout_source(self): class TestReader(object): def read(self, size): raise Timeout(1.0) checksum = self.checksum() source = TestReader() size = CHUNK_SIZE * self.storage_method.ec_nb_data nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity resps = [201] * nb with set_http_connect(*resps): handler = EcMetachunkWriter(self.sysmeta, self.meta_chunk(), checksum, self.storage_method) self.assertRaises(exc.OioTimeout, handler.stream, source, size)
def test_write_exception_source(self): class TestReader(object): def read(self, size): raise Exception('failure') checksum = self.checksum() source = TestReader() size = CHUNK_SIZE * self.storage_method.ec_nb_data nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity resps = [201] * nb with set_http_connect(*resps): handler = EcMetachunkWriter(self.sysmeta, self.meta_chunk(), checksum, self.storage_method) # TODO specialize exception self.assertRaises(Exception, handler.stream, source, size)