def test_read_timeout_resume(self): test_data = (b'1234' * 1024 * 1024)[:-10] data_checksum = self.checksum(test_data).hexdigest() meta_chunk = self.meta_chunk() headers = {} responses = [ FakeResponse(200, test_data, headers, slow=0.05), FakeResponse(200, test_data, headers), FakeResponse(200, test_data, headers), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) headers = {} data = b'' parts = [] with set_http_requests(get_response) as conn_record: reader = io.ChunkReader(iter(meta_chunk), None, headers, read_timeout=0.01) it = reader.get_iter() for part in it: parts.append(part) for d in part['iter']: data += d self.assertEqual(len(parts), 1) self.assertEqual(data_checksum, self.checksum(data).hexdigest()) self.assertEqual(len(conn_record), 2)
def test_read_zero_byte(self): test_data = b'' data_checksum = self.checksum(test_data).hexdigest() meta_chunk = self.meta_chunk() responses = [ FakeResponse(200, test_data), FakeResponse(200, test_data), FakeResponse(200, test_data), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) headers = {} data = b'' parts = [] with set_http_requests(get_response) as conn_record: reader = io.ChunkReader(iter(meta_chunk), None, headers) it = reader.get_iter() for part in it: parts.append(part) for d in part['iter']: data += d self.assertEqual(len(parts), 1) self.assertEqual(len(test_data), len(data)) self.assertEqual(data_checksum, self.checksum(data).hexdigest()) self.assertEqual(len(conn_record), 1)
def _fetch_stream(self, meta, chunks, ranges, storage_method, headers): total_bytes = 0 headers = headers or {} ranges = ranges or [(None, None)] meta_range_list = get_meta_ranges(ranges, chunks) for meta_range_dict in meta_range_list: for pos, meta_range in meta_range_dict.iteritems(): meta_start, meta_end = meta_range if meta_start is not None and meta_end is not None: headers['Range'] = http_header_from_ranges([meta_range]) reader = io.ChunkReader( iter(chunks[pos]), io.READ_CHUNK_SIZE, headers, connection_timeout=self.connection_timeout, response_timeout=self.read_timeout, read_timeout=self.read_timeout) try: it = reader.get_iter() except Exception as err: raise exc.OioException( "Error while downloading position %d: %s" % (pos, err)) for part in it: for d in part['iter']: total_bytes += len(d) yield d
def _get_fragment(self, chunk_iter, storage_method): # TODO generate proper headers headers = {} reader = io.ChunkReader(chunk_iter, storage_method.ec_fragment_size, headers, self.connection_timeout, self.response_timeout, self.read_timeout) return (reader, reader.get_iter())
def fetch_stream(chunks, ranges, storage_method, headers=None, **kwargs): ranges = ranges or [(None, None)] meta_range_list = get_meta_ranges(ranges, chunks) for meta_range_dict in meta_range_list: for pos in sorted(meta_range_dict.keys()): meta_start, meta_end = meta_range_dict[pos] if meta_start is not None and meta_end is not None: headers['Range'] = http_header_from_ranges( (meta_range_dict[pos], )) reader = io.ChunkReader(iter(chunks[pos]), io.READ_CHUNK_SIZE, headers=headers, **kwargs) try: it = reader.get_iter() except exc.NotFound as err: raise exc.UnrecoverableContent( "Cannot download position %d: %s" % (pos, err)) except Exception as err: raise exc.OioException( "Error while downloading position %d: %s" % (pos, err)) for part in it: for dat in part['iter']: yield dat
def _fetch_stream(self, chunks, storage_method, headers): meta_ranges = get_meta_ranges([(None, None)], chunks) for pos, meta_range in meta_ranges.iteritems(): meta_start, meta_end = meta_range reader = io.ChunkReader(iter(chunks[pos]), io.READ_CHUNK_SIZE, headers) it = reader.get_iter() if not it: raise UnrecoverableContent("Error while downloading") for part in it: for d in part['iter']: yield d
def _get_fragment(self, chunk_iter, range_infos, storage_method): headers = dict() headers.update(self.headers) if range_infos: # only handle one range range_info = range_infos[0] headers['Range'] = 'bytes=%s-%s' % ( range_info['req_fragment_start'], range_info['req_fragment_end']) reader = io.ChunkReader(chunk_iter, storage_method.ec_fragment_size, headers, self.connection_timeout, self.read_timeout, align=True) return (reader, reader.get_iter())
def _fetch_stream(self, chunks, storage_method, headers): meta_range_list = get_meta_ranges([(None, None)], chunks) for meta_range_dict in meta_range_list: for pos, meta_range in meta_range_dict.iteritems(): meta_start, meta_end = meta_range reader = io.ChunkReader(iter(chunks[pos]), io.READ_CHUNK_SIZE, headers) try: it = reader.get_iter() except Exception as err: raise UnrecoverableContent("Error while downloading: %s" % err) for part in it: for d in part['iter']: yield d
def test_read_range_unsatisfiable(self): responses = [ FakeResponse(416), FakeResponse(416), FakeResponse(416), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) meta_end = 1000000000 meta_chunk = self.meta_chunk() headers = {'Range': 'bytes=-%s' % (meta_end)} with set_http_requests(get_response) as conn_record: reader = io.ChunkReader(iter(meta_chunk), None, headers) self.assertRaises(exc.ClientException, reader.get_iter) self.assertEqual(len(conn_record), self.storage_method.nb_copy)
def _fetch_stream(self, meta, chunks, ranges, storage_method, headers): total_bytes = 0 headers = headers or {} ranges = ranges or [(None, None)] meta_ranges = get_meta_ranges(ranges, chunks) for pos, meta_range in meta_ranges.iteritems(): meta_start, meta_end = meta_range reader = io.ChunkReader(iter(chunks[pos]), io.READ_CHUNK_SIZE, headers) it = reader.get_iter() if not it: raise exc.OioException("Error while downloading") for part in it: for d in part['iter']: total_bytes += len(d) yield d
def test_read_range(self): test_data = (b'1024' * 1024)[:-10] meta_chunk = self.meta_chunk() meta_start = 1 meta_end = 4 part_data = test_data[meta_start:meta_end + 1] headers = { 'Content-Length': str(len(part_data)), 'Content-Type': 'text/plain', 'Content-Range': 'bytes %s-%s/%s' % (meta_start, meta_end, len(test_data)) } responses = [ FakeResponse(206, part_data, headers), FakeResponse(206, part_data, headers), FakeResponse(206, part_data, headers), ] def get_response(req): return responses.pop(0) if responses else FakeResponse(404) headers = {'Range': 'bytes=%s-%s' % (meta_start, meta_end)} data = b'' parts = [] with set_http_requests(get_response) as conn_record: reader = io.ChunkReader(iter(meta_chunk), None, headers) it = reader.get_iter() for part in it: parts.append(part) for d in part['iter']: data += d self.assertEqual(len(parts), 1) self.assertEqual(parts[0]['start'], 1) self.assertEqual(parts[0]['end'], 4) self.assertEqual(len(part_data), len(data)) self.assertEqual(len(conn_record), 1)