def test_read_zero_byte(self): test_data = '' data_checksum = self.checksum(test_data).hexdigest() meta_chunk = self.meta_chunk() responses = [ FakeResponse(200, test_data), FakeResponse(200, test_data), FakeResponse(200, test_data), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) headers = {} data = '' parts = [] with set_http_requests(get_response) as conn_record: reader = io.ChunkReader(iter(meta_chunk), None, headers) it = reader.get_iter() for part in it: parts.append(part) for d in part['iter']: data += d self.assertEqual(len(parts), 1) self.assertEqual(len(test_data), len(data)) self.assertEqual(data_checksum, self.checksum(data).hexdigest()) self.assertEqual(len(conn_record), 1)
def test_read_timeout_resume(self): test_data = ('1234' * 1024*1024)[:-10] data_checksum = self.checksum(test_data).hexdigest() meta_chunk = self.meta_chunk() headers = {} responses = [ FakeResponse(200, test_data, headers, slow=0.05), FakeResponse(200, test_data, headers), FakeResponse(200, test_data, headers), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) headers = {} data = '' parts = [] with set_http_requests(get_response) as conn_record: reader = io.ChunkReader(iter(meta_chunk), None, headers, read_timeout=0.01) it = reader.get_iter() for part in it: parts.append(part) for d in part['iter']: data += d self.assertEqual(len(parts), 1) self.assertEqual(data_checksum, self.checksum(data).hexdigest()) self.assertEqual(len(conn_record), 2)
def test_rebuild_failure(self): meta_chunk = self.meta_chunk() missing_chunk = meta_chunk.pop(1) nb = self.storage_method.ec_nb_data +\ self.storage_method.ec_nb_parity # add errors on other chunks errors = [Timeout(), 404, Exception('failure')] responses = [FakeResponse(random.choice(errors), '', {}) for i in range(nb - 1)] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) missing = missing_chunk['num'] nb = self.storage_method.ec_nb_data +\ self.storage_method.ec_nb_parity with set_http_requests(get_response) as conn_record: handler = ECRebuildHandler( meta_chunk, missing, self.storage_method) # TODO use specialized exception self.assertRaises(exc.OioException, handler.rebuild) self.assertEqual(len(conn_record), nb - 1)
def test_rebuild_with_wrong_chunk_size(self): test_data = (b'1234' * self.storage_method.ec_segment_size)[:-777] ec_chunks = self._make_ec_chunks(test_data) missing_chunk_body = ec_chunks.pop(1) meta_chunk = self.meta_chunk() missing_chunk = meta_chunk.pop(1) responses = list() for i, ec_chunk in enumerate(ec_chunks): chunk_size = len(ec_chunk) if i < self.storage_method.ec_nb_parity - 1: # Change the chunk size for the first chunks chunk_size = random.randrange(chunk_size) headers = {'x-oio-chunk-meta-chunk-size': chunk_size} responses.append(FakeResponse(200, ec_chunk[:chunk_size], headers)) def get_response(req): return responses.pop(0) if responses else FakeResponse(404) missing = missing_chunk['num'] nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity with set_http_requests(get_response) as conn_record: handler = ECRebuildHandler(meta_chunk, missing, self.storage_method) expected_chunk_size, stream = handler.rebuild() if expected_chunk_size is not None: self.assertEqual(expected_chunk_size, len(missing_chunk_body)) result = b''.join(stream) self.assertEqual(len(result), len(missing_chunk_body)) self.assertEqual( self.checksum(result).hexdigest(), self.checksum(missing_chunk_body).hexdigest()) self.assertEqual(len(conn_record), nb - 1)
def test_read_timeout_resume(self): test_data = (b'1234' * 1024 * 1024)[:-10] data_checksum = self.checksum(test_data).hexdigest() meta_chunk = self.meta_chunk() headers = {} responses = [ FakeResponse(200, test_data, headers, slow=0.05), FakeResponse(200, test_data, headers), FakeResponse(200, test_data, headers), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) headers = {} data = b'' parts = [] with set_http_requests(get_response) as conn_record: reader = io.ChunkReader(iter(meta_chunk), None, headers, read_timeout=0.01) it = reader.get_iter() for part in it: parts.append(part) for d in part['iter']: data += d self.assertEqual(len(parts), 1) self.assertEqual(data_checksum, self.checksum(data).hexdigest()) self.assertEqual(len(conn_record), 2)
def test_read_range_unsatisfiable(self): responses = [ FakeResponse(416), FakeResponse(416), FakeResponse(416), FakeResponse(416), FakeResponse(416), FakeResponse(416), FakeResponse(416), FakeResponse(416), ] # unsatisfiable range responses def get_response(req): return responses.pop(0) if responses else FakeResponse(404) headers = {} meta_start = None meta_end = 10000000000 meta_chunk = self.meta_chunk() meta_chunk[0]['size'] = 1024 nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity with set_http_requests(get_response) as conn_record: handler = ECChunkDownloadHandler(self.storage_method, meta_chunk, meta_start, meta_end, headers) # TODO specialize Exception here (UnsatisfiableRange) self.assertRaises(exc.OioException, handler.get_stream) self.assertEqual(len(conn_record), nb)
def test_read_zero_byte(self): test_data = b'' data_checksum = self.checksum(test_data).hexdigest() meta_chunk = self.meta_chunk() responses = [ FakeResponse(200, test_data), FakeResponse(200, test_data), FakeResponse(200, test_data), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) headers = {} data = b'' parts = [] with set_http_requests(get_response) as conn_record: reader = io.ChunkReader(iter(meta_chunk), None, headers) it = reader.get_iter() for part in it: parts.append(part) for d in part['iter']: data += d self.assertEqual(len(parts), 1) self.assertEqual(len(test_data), len(data)) self.assertEqual(data_checksum, self.checksum(data).hexdigest()) self.assertEqual(len(conn_record), 1)
def test_rebuild_failure(self): meta_chunk = self.meta_chunk() missing_chunk = meta_chunk.pop(1) nb = self.storage_method.ec_nb_data +\ self.storage_method.ec_nb_parity # add errors on other chunks errors = [Timeout(), 404, Exception('failure')] responses = [ FakeResponse(random.choice(errors), b'', {}) for i in range(nb - 1) ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) missing = missing_chunk['num'] nb = self.storage_method.ec_nb_data +\ self.storage_method.ec_nb_parity with set_http_requests(get_response) as conn_record: handler = ECRebuildHandler(meta_chunk, missing, self.storage_method) # TODO use specialized exception self.assertRaises(exc.OioException, handler.rebuild) self.assertEqual(len(conn_record), nb - 1)
def test_read_advanced(self): segment_size = self.storage_method.ec_segment_size test_data = (b'1234' * segment_size)[:-657] ec_chunks = self._make_ec_chunks(test_data) chunks = [ { 'path': '/0' }, { 'path': '/1' }, { 'path': '/2' }, { 'path': '/3' }, { 'path': '/4' }, { 'path': '/5' }, { 'path': '/6' }, { 'path': '/7' }, ] responses = { n['path']: FakeResponse(200, ec_chunks[i]) for i, n in enumerate(chunks) } def get_response(req): return responses.pop(req['path']) headers = {} meta_start = None meta_end = None meta_chunk = self.meta_chunk() meta_chunk[0]['size'] = len(test_data) with set_http_requests(get_response) as conn_record: handler = ECChunkDownloadHandler(self.storage_method, meta_chunk, meta_start, meta_end, headers) stream = handler.get_stream() for part in stream: for x in part['iter']: pass # nb_data requests self.assertEqual(len(conn_record), self.storage_method.ec_nb_data) # nb_parity remaining self.assertEqual(len(responses), self.storage_method.ec_nb_parity)
def test_read_range(self): fragment_size = self.storage_method.ec_fragment_size test_data, ec_chunks = self._make_ec_meta_resp() part_size = len(ec_chunks[0]) # TODO tests random ranges headers = { 'Content-Length': fragment_size, 'Content-Type': 'text/plain', 'Content-Range': 'bytes 0-%s/%s' % (fragment_size - 1, part_size) } responses = [ FakeResponse(206, ec_chunks[0][:fragment_size], headers), FakeResponse(206, ec_chunks[1][:fragment_size], headers), FakeResponse(206, ec_chunks[2][:fragment_size], headers), FakeResponse(206, ec_chunks[3][:fragment_size], headers), FakeResponse(206, ec_chunks[4][:fragment_size], headers), FakeResponse(206, ec_chunks[5][:fragment_size], headers), FakeResponse(206, ec_chunks[6][:fragment_size], headers), FakeResponse(206, ec_chunks[7][:fragment_size], headers), ] # TODO tests ranges overlapping multiple fragments range_header = 'bytes=0-%s' % (fragment_size - 1) def get_response(req): self.assertEqual(req['headers'].get('Range'), range_header) return responses.pop(0) if responses else FakeResponse(404) headers = dict() meta_start = 1 meta_end = 4 meta_chunk = self.meta_chunk() meta_chunk[0]['size'] = len(test_data) data = b'' parts = [] with set_http_requests(get_response) as conn_record: handler = ECChunkDownloadHandler(self.storage_method, meta_chunk, meta_start, meta_end, headers) stream = handler.get_stream() for part in stream: parts.append(part) for x in part['iter']: data += x self.assertEqual(len(parts), 1) self.assertEqual(parts[0]['start'], 1) self.assertEqual(parts[0]['end'], 4) self.assertEqual(data, test_data[meta_start:meta_end + 1]) self.assertEqual(len(conn_record), self.storage_method.ec_nb_data)
def test_read_range(self): fragment_size = self.storage_method.ec_fragment_size test_data, ec_chunks = self._make_ec_meta_resp() part_size = len(ec_chunks[0]) headers = { 'Content-Length': fragment_size, 'Content-Type': 'text/plain', 'Content-Range': 'bytes 0-%s/%s' % (fragment_size - 1, part_size)} responses = [ FakeResponse(206, ec_chunks[0][:fragment_size], headers), FakeResponse(206, ec_chunks[1][:fragment_size], headers), FakeResponse(206, ec_chunks[2][:fragment_size], headers), FakeResponse(206, ec_chunks[3][:fragment_size], headers), FakeResponse(206, ec_chunks[4][:fragment_size], headers), FakeResponse(206, ec_chunks[5][:fragment_size], headers), FakeResponse(206, ec_chunks[6][:fragment_size], headers), FakeResponse(206, ec_chunks[7][:fragment_size], headers), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) headers = {} meta_start = 1 meta_end = 4 meta_chunk = self.meta_chunk() meta_chunk[0]['size'] = len(test_data) data = '' parts = [] with set_http_requests(get_response) as conn_record: handler = ECChunkDownloadHandler( self.storage_method, meta_chunk, meta_start, meta_end, headers) stream = handler.get_stream() for part in stream: parts.append(part) for x in part['iter']: data += x self.assertEqual(len(parts), 1) self.assertEqual(parts[0]['start'], 1) self.assertEqual(parts[0]['end'], 4) self.assertEqual(data, '2341') self.assertEqual(len(conn_record), self.storage_method.ec_nb_data)
def test_read_range_unsatisfiable(self): responses = [ FakeResponse(416), FakeResponse(416), FakeResponse(416), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) meta_end = 1000000000 meta_chunk = self.meta_chunk() headers = {'Range': 'bytes=-%s' % (meta_end)} with set_http_requests(get_response) as conn_record: reader = io.ChunkReader(iter(meta_chunk), None, headers) self.assertEqual(None, reader.get_iter()) self.assertEqual(len(conn_record), self.storage_method.nb_copy)
def test_read_range_unsatisfiable(self): responses = [ FakeResponse(416), FakeResponse(416), FakeResponse(416), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) meta_end = 1000000000 meta_chunk = self.meta_chunk() headers = {'Range': 'bytes=-%s' % (meta_end)} with set_http_requests(get_response) as conn_record: reader = io.ChunkReader(iter(meta_chunk), None, headers) self.assertRaises(exc.ClientException, reader.get_iter) self.assertEqual(len(conn_record), self.storage_method.nb_copy)
def test_rebuild_parity_errors(self): test_data = (b'1234' * self.storage_method.ec_segment_size)[:-777] ec_chunks = self._make_ec_chunks(test_data) # break one parity chunk missing_chunk_body = ec_chunks.pop(-1) meta_chunk = self.meta_chunk() missing_chunk = meta_chunk.pop(-1) # add also error on another chunk for error in (Timeout(), 404, Exception('failure')): headers = {} base_responses = list() for ec_chunk in ec_chunks: base_responses.append(FakeResponse(200, ec_chunk, headers)) responses = base_responses error_idx = random.randint(0, len(responses) - 1) responses[error_idx] = FakeResponse(error, b'', {}) def get_response(req): return responses.pop(0) if responses else FakeResponse(404) missing = missing_chunk['num'] nb = self.storage_method.ec_nb_data +\ self.storage_method.ec_nb_parity with set_http_requests(get_response) as conn_record: handler = ECRebuildHandler(meta_chunk, missing, self.storage_method) expected_chunk_size, stream = handler.rebuild() if expected_chunk_size is not None: self.assertEqual(expected_chunk_size, len(missing_chunk_body)) result = b''.join(stream) self.assertEqual(len(result), len(missing_chunk_body)) self.assertEqual( self.checksum(result).hexdigest(), self.checksum(missing_chunk_body).hexdigest()) self.assertEqual(len(conn_record), nb - 1)
def test_read_advanced(self): segment_size = self.storage_method.ec_segment_size test_data = ('1234' * segment_size)[:-657] ec_chunks = self._make_ec_chunks(test_data) chunks = [ {'path': '/0'}, {'path': '/1'}, {'path': '/2'}, {'path': '/3'}, {'path': '/4'}, {'path': '/5'}, {'path': '/6'}, {'path': '/7'}, ] responses = { n['path']: FakeResponse(200, ec_chunks[i]) for i, n in enumerate(chunks) } def get_response(req): return responses.pop(req['path']) headers = {} meta_start = None meta_end = None meta_chunk = self.meta_chunk() meta_chunk[0]['size'] = len(test_data) with set_http_requests(get_response) as conn_record: handler = ECChunkDownloadHandler(self.storage_method, meta_chunk, meta_start, meta_end, headers) stream = handler.get_stream() for part in stream: for x in part['iter']: pass # nb_data requests self.assertEqual(len(conn_record), self.storage_method.ec_nb_data) # nb_parity remaining self.assertEqual(len(responses), self.storage_method.ec_nb_parity)
def test_read_timeout(self): segment_size = self.storage_method.ec_segment_size test_data = (b'1234' * segment_size)[:-333] ec_chunks = self._make_ec_chunks(test_data) headers = {} responses = [ FakeResponse(200, ec_chunks[0], headers, slow=0.1), FakeResponse(200, ec_chunks[1], headers, slow=0.1), FakeResponse(200, ec_chunks[2], headers, slow=0.1), FakeResponse(200, ec_chunks[3], headers, slow=0.1), FakeResponse(200, ec_chunks[4], headers, slow=0.1), FakeResponse(200, ec_chunks[5], headers, slow=0.1), FakeResponse(200, ec_chunks[6], headers, slow=0.1), FakeResponse(200, ec_chunks[7], headers, slow=0.1), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity meta_chunk = self.meta_chunk() meta_chunk[0]['size'] = len(test_data) meta_start = None meta_end = None with set_http_requests(get_response) as conn_record: handler = ECChunkDownloadHandler(self.storage_method, meta_chunk, meta_start, meta_end, headers, read_timeout=0.05) stream = handler.get_stream() body = b'' for part in stream: for body_chunk in part['iter']: body += body_chunk self.assertNotEqual( self.checksum(test_data).hexdigest(), self.checksum(body).hexdigest()) self.assertEqual(len(conn_record), nb)
def test_read_range(self): test_data = (b'1024' * 1024)[:-10] meta_chunk = self.meta_chunk() meta_start = 1 meta_end = 4 part_data = test_data[meta_start:meta_end + 1] headers = { 'Content-Length': str(len(part_data)), 'Content-Type': 'text/plain', 'Content-Range': 'bytes %s-%s/%s' % (meta_start, meta_end, len(test_data)) } responses = [ FakeResponse(206, part_data, headers), FakeResponse(206, part_data, headers), FakeResponse(206, part_data, headers), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) headers = {'Range': 'bytes=%s-%s' % (meta_start, meta_end)} data = b'' parts = [] with set_http_requests(get_response) as conn_record: reader = io.ChunkReader(iter(meta_chunk), None, headers) it = reader.get_iter() for part in it: parts.append(part) for d in part['iter']: data += d self.assertEqual(len(parts), 1) self.assertEqual(parts[0]['start'], 1) self.assertEqual(parts[0]['end'], 4) self.assertEqual(len(part_data), len(data)) self.assertEqual(len(conn_record), 1)
def test_read_zero_byte(self): empty = '' headers = { 'Content-Length': 0, } responses = [ FakeResponse(200, b'', headers), FakeResponse(200, b'', headers), FakeResponse(200, b'', headers), FakeResponse(200, b'', headers), FakeResponse(200, b'', headers), FakeResponse(200, b'', headers), FakeResponse(200, b'', headers), FakeResponse(200, b'', headers), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) headers = {} meta_start = 1 meta_end = 4 meta_chunk = self.meta_chunk() meta_chunk[0]['size'] = len(empty) data = '' parts = [] with set_http_requests(get_response) as conn_record: handler = ECChunkDownloadHandler(self.storage_method, meta_chunk, meta_start, meta_end, headers) stream = handler.get_stream() for part in stream: parts.append(part) for x in part['iter']: data += x self.assertEqual(len(parts), 0) self.assertEqual(data, empty) self.assertEqual(len(conn_record), self.storage_method.ec_nb_data)
def test_read_zero_byte(self): empty = '' headers = { 'Content-Length': 0, } responses = [ FakeResponse(200, '', headers), FakeResponse(200, '', headers), FakeResponse(200, '', headers), FakeResponse(200, '', headers), FakeResponse(200, '', headers), FakeResponse(200, '', headers), FakeResponse(200, '', headers), FakeResponse(200, '', headers), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) headers = {} meta_start = 1 meta_end = 4 meta_chunk = self.meta_chunk() meta_chunk[0]['size'] = len(empty) data = '' parts = [] with set_http_requests(get_response) as conn_record: handler = ECChunkDownloadHandler( self.storage_method, meta_chunk, meta_start, meta_end, headers) stream = handler.get_stream() for part in stream: parts.append(part) for x in part['iter']: data += x self.assertEqual(len(parts), 0) self.assertEqual(data, empty) self.assertEqual(len(conn_record), self.storage_method.ec_nb_data)
def test_read_range(self): test_data = ('1024' * 1024)[:-10] meta_chunk = self.meta_chunk() meta_start = 1 meta_end = 4 part_data = test_data[meta_start:meta_end+1] headers = { 'Content-Length': str(len(part_data)), 'Content-Type': 'text/plain', 'Content-Range': 'bytes %s-%s/%s' % (meta_start, meta_end, len(test_data)) } responses = [ FakeResponse(206, part_data, headers), FakeResponse(206, part_data, headers), FakeResponse(206, part_data, headers), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) headers = {'Range': 'bytes=%s-%s' % (meta_start, meta_end)} data = '' parts = [] with set_http_requests(get_response) as conn_record: reader = io.ChunkReader(iter(meta_chunk), None, headers) it = reader.get_iter() for part in it: parts.append(part) for d in part['iter']: data += d self.assertEqual(len(parts), 1) self.assertEqual(parts[0]['start'], 1) self.assertEqual(parts[0]['end'], 4) self.assertEqual(len(part_data), len(data)) self.assertEqual(len(conn_record), 1)
def test_rebuild(self): test_data = (b'1234' * self.storage_method.ec_segment_size)[:-777] ec_chunks = self._make_ec_chunks(test_data) missing_chunk_body = ec_chunks.pop(1) meta_chunk = self.meta_chunk() missing_chunk = meta_chunk.pop(1) headers = {} responses = [ FakeResponse(200, ec_chunks[0], headers), FakeResponse(200, ec_chunks[1], headers), FakeResponse(200, ec_chunks[2], headers), FakeResponse(200, ec_chunks[3], headers), FakeResponse(200, ec_chunks[4], headers), FakeResponse(200, ec_chunks[5], headers), FakeResponse(200, ec_chunks[6], headers), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) missing = missing_chunk['num'] nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity with set_http_requests(get_response) as conn_record: handler = ECRebuildHandler(meta_chunk, missing, self.storage_method) expected_chunk_size, stream = handler.rebuild() if expected_chunk_size is not None: self.assertEqual(expected_chunk_size, len(missing_chunk_body)) result = b''.join(stream) self.assertEqual(len(result), len(missing_chunk_body)) self.assertEqual( self.checksum(result).hexdigest(), self.checksum(missing_chunk_body).hexdigest()) self.assertEqual(len(conn_record), nb - 1)
def test_rebuild_parity_errors(self): test_data = ('1234' * self.storage_method.ec_segment_size)[:-777] ec_chunks = self._make_ec_chunks(test_data) # break one parity chunk missing_chunk_body = ec_chunks.pop(-1) meta_chunk = self.meta_chunk() missing_chunk = meta_chunk.pop(-1) # add also error on another chunk for error in (Timeout(), 404, Exception('failure')): headers = {} base_responses = list() for ec_chunk in ec_chunks: base_responses.append(FakeResponse(200, ec_chunk, headers)) responses = base_responses error_idx = random.randint(0, len(responses) - 1) responses[error_idx] = FakeResponse(error, '', {}) def get_response(req): return responses.pop(0) if responses else FakeResponse(404) missing = missing_chunk['num'] nb = self.storage_method.ec_nb_data +\ self.storage_method.ec_nb_parity with set_http_requests(get_response) as conn_record: handler = ECRebuildHandler( meta_chunk, missing, self.storage_method) stream = handler.rebuild() result = ''.join(stream) self.assertEqual(len(result), len(missing_chunk_body)) self.assertEqual(self.checksum(result).hexdigest(), self.checksum(missing_chunk_body).hexdigest()) self.assertEqual(len(conn_record), nb - 1)
def test_read_timeout(self): segment_size = self.storage_method.ec_segment_size test_data = ('1234' * segment_size)[:-333] ec_chunks = self._make_ec_chunks(test_data) headers = {} responses = [ FakeResponse(200, ec_chunks[0], headers, slow=0.1), FakeResponse(200, ec_chunks[1], headers, slow=0.1), FakeResponse(200, ec_chunks[2], headers, slow=0.1), FakeResponse(200, ec_chunks[3], headers, slow=0.1), FakeResponse(200, ec_chunks[4], headers, slow=0.1), FakeResponse(200, ec_chunks[5], headers, slow=0.1), FakeResponse(200, ec_chunks[6], headers, slow=0.1), FakeResponse(200, ec_chunks[7], headers, slow=0.1), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity meta_chunk = self.meta_chunk() meta_chunk[0]['size'] = len(test_data) meta_start = None meta_end = None with set_http_requests(get_response) as conn_record: handler = ECChunkDownloadHandler( self.storage_method, meta_chunk, meta_start, meta_end, headers, read_timeout=0.05) stream = handler.get_stream() body = '' for part in stream: for body_chunk in part['iter']: body += body_chunk self.assertNotEqual(self.checksum(test_data).hexdigest(), self.checksum(body).hexdigest()) self.assertEqual(len(conn_record), nb)
def test_rebuild(self): test_data = ('1234' * self.storage_method.ec_segment_size)[:-777] ec_chunks = self._make_ec_chunks(test_data) missing_chunk_body = ec_chunks.pop(1) meta_chunk = self.meta_chunk() missing_chunk = meta_chunk.pop(1) headers = {} responses = [ FakeResponse(200, ec_chunks[0], headers), FakeResponse(200, ec_chunks[1], headers), FakeResponse(200, ec_chunks[2], headers), FakeResponse(200, ec_chunks[3], headers), FakeResponse(200, ec_chunks[4], headers), FakeResponse(200, ec_chunks[5], headers), FakeResponse(200, ec_chunks[6], headers), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) missing = missing_chunk['num'] nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity with set_http_requests(get_response) as conn_record: handler = ECRebuildHandler( meta_chunk, missing, self.storage_method) stream = handler.rebuild() result = ''.join(stream) self.assertEqual(len(result), len(missing_chunk_body)) self.assertEqual(self.checksum(result).hexdigest(), self.checksum(missing_chunk_body).hexdigest()) self.assertEqual(len(conn_record), nb - 1)