def _fetch_stream_ec(self, meta, chunks, ranges, storage_method, headers): ranges = ranges or [(None, None)] meta_ranges = get_meta_ranges(ranges, chunks) for pos, meta_range in meta_ranges.iteritems(): meta_start, meta_end = meta_range handler = ECChunkDownloadHandler(storage_method, chunks[pos], meta_start, meta_end, headers) stream = handler.get_stream() for part_info in stream: for d in part_info['iter']: yield d stream.close()
def test_read_range(self): fragment_size = self.storage_method.ec_fragment_size test_data, ec_chunks = self._make_ec_meta_resp() part_size = len(ec_chunks[0]) headers = { 'Content-Length': fragment_size, 'Content-Type': 'text/plain', 'Content-Range': 'bytes 0-%s/%s' % (fragment_size - 1, part_size)} responses = [ FakeResponse(206, ec_chunks[0][:fragment_size], headers), FakeResponse(206, ec_chunks[1][:fragment_size], headers), FakeResponse(206, ec_chunks[2][:fragment_size], headers), FakeResponse(206, ec_chunks[3][:fragment_size], headers), FakeResponse(206, ec_chunks[4][:fragment_size], headers), FakeResponse(206, ec_chunks[5][:fragment_size], headers), FakeResponse(206, ec_chunks[6][:fragment_size], headers), FakeResponse(206, ec_chunks[7][:fragment_size], headers), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) headers = {} meta_start = 1 meta_end = 4 meta_chunk = self.meta_chunk() meta_chunk[0]['size'] = len(test_data) data = '' parts = [] with set_http_requests(get_response) as conn_record: handler = ECChunkDownloadHandler( self.storage_method, meta_chunk, meta_start, meta_end, headers) stream = handler.get_stream() for part in stream: parts.append(part) for x in part['iter']: data += x self.assertEqual(len(parts), 1) self.assertEqual(parts[0]['start'], 1) self.assertEqual(parts[0]['end'], 4) self.assertEqual(data, '2341') self.assertEqual(len(conn_record), self.storage_method.ec_nb_data)
def test_read(self): segment_size = self.storage_method.ec_segment_size data = ('1234' * segment_size)[:-10] d = [data[x:x + segment_size] for x in range(0, len(data), segment_size)] fragmented_data = [] for c in d: fragments = self.storage_method.driver.encode(c) if not fragments: break fragmented_data.append(fragments) result = '' for fragment_data in fragmented_data: result += self.storage_method.driver.decode( fragment_data) self.assertEqual(len(data), len(result)) self.assertEqual(data, result) chunk_fragments = list(zip(*fragmented_data)) nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity self.assertEqual(len(chunk_fragments), nb) chunks_resps = [(200, ''.join(chunk_fragments[i])) for i in range(self.storage_method.ec_nb_data)] resps, body_iter = zip(*chunks_resps) meta_start = None meta_end = None headers = {} meta_chunk = self.meta_chunk() meta_chunk[0]['size'] = len(data) with set_http_connect(*resps, body_iter=body_iter): handler = ECChunkDownloadHandler(self.storage_method, meta_chunk, meta_start, meta_end, headers) stream = handler.get_stream() body = '' for part in stream: for body_chunk in part['iter']: body += body_chunk self.assertEqual(len(data), len(body)) self.assertEqual(data, body)
def test_read_advanced(self): segment_size = self.storage_method.ec_segment_size test_data = ('1234' * segment_size)[:-657] ec_chunks = self._make_ec_chunks(test_data) chunks = [ {'path': '/0'}, {'path': '/1'}, {'path': '/2'}, {'path': '/3'}, {'path': '/4'}, {'path': '/5'}, {'path': '/6'}, {'path': '/7'}, ] responses = { n['path']: FakeResponse(200, ec_chunks[i]) for i, n in enumerate(chunks) } def get_response(req): return responses.pop(req['path']) headers = {} meta_start = None meta_end = None meta_chunk = self.meta_chunk() meta_chunk[0]['size'] = len(test_data) with set_http_requests(get_response) as conn_record: handler = ECChunkDownloadHandler(self.storage_method, meta_chunk, meta_start, meta_end, headers) stream = handler.get_stream() for part in stream: for x in part['iter']: pass # nb_data requests self.assertEqual(len(conn_record), self.storage_method.ec_nb_data) # nb_parity remaining self.assertEqual(len(responses), self.storage_method.ec_nb_parity)
def test_read_zero_byte(self): empty = '' headers = { 'Content-Length': 0, } responses = [ FakeResponse(200, '', headers), FakeResponse(200, '', headers), FakeResponse(200, '', headers), FakeResponse(200, '', headers), FakeResponse(200, '', headers), FakeResponse(200, '', headers), FakeResponse(200, '', headers), FakeResponse(200, '', headers), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) headers = {} meta_start = 1 meta_end = 4 meta_chunk = self.meta_chunk() meta_chunk[0]['size'] = len(empty) data = '' parts = [] with set_http_requests(get_response) as conn_record: handler = ECChunkDownloadHandler( self.storage_method, meta_chunk, meta_start, meta_end, headers) stream = handler.get_stream() for part in stream: parts.append(part) for x in part['iter']: data += x self.assertEqual(len(parts), 0) self.assertEqual(data, empty) self.assertEqual(len(conn_record), self.storage_method.ec_nb_data)
def test_read_timeout(self): segment_size = self.storage_method.ec_segment_size test_data = ('1234' * segment_size)[:-333] ec_chunks = self._make_ec_chunks(test_data) headers = {} responses = [ FakeResponse(200, ec_chunks[0], headers, slow=0.1), FakeResponse(200, ec_chunks[1], headers, slow=0.1), FakeResponse(200, ec_chunks[2], headers, slow=0.1), FakeResponse(200, ec_chunks[3], headers, slow=0.1), FakeResponse(200, ec_chunks[4], headers, slow=0.1), FakeResponse(200, ec_chunks[5], headers, slow=0.1), FakeResponse(200, ec_chunks[6], headers, slow=0.1), FakeResponse(200, ec_chunks[7], headers, slow=0.1), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) nb = self.storage_method.ec_nb_data + self.storage_method.ec_nb_parity meta_chunk = self.meta_chunk() meta_chunk[0]['size'] = len(test_data) meta_start = None meta_end = None with set_http_requests(get_response) as conn_record: handler = ECChunkDownloadHandler( self.storage_method, meta_chunk, meta_start, meta_end, headers, read_timeout=0.05) stream = handler.get_stream() body = '' for part in stream: for body_chunk in part['iter']: body += body_chunk self.assertNotEqual(self.checksum(test_data).hexdigest(), self.checksum(body).hexdigest()) self.assertEqual(len(conn_record), nb)