def fetch_stream(chunks, ranges, storage_method, headers=None, **kwargs): ranges = ranges or [(None, None)] meta_range_list = get_meta_ranges(ranges, chunks) for meta_range_dict in meta_range_list: for pos in sorted(meta_range_dict.keys()): meta_start, meta_end = meta_range_dict[pos] if meta_start is not None and meta_end is not None: headers['Range'] = http_header_from_ranges( (meta_range_dict[pos], )) reader = ChunkReader(iter(chunks[pos]), READ_CHUNK_SIZE, headers=headers, **kwargs) try: it = reader.get_iter() except exc.NotFound as err: raise exc.UnrecoverableContent( "Cannot download position %d: %s" % (pos, err)) except Exception as err: raise exc.ServiceUnavailable( "Error while downloading position %d: %s" % (pos, err)) for part in it: for dat in part['iter']: yield dat
def chunk_get(self, url, **kwargs): req_id = kwargs.get('req_id') if not req_id: req_id = utils.request_id() reader = ChunkReader([{'url': url}], READ_BUFFER_SIZE, {'X-oio-req-id': req_id}) # This must be done now if we want to access headers stream = reader.stream() headers = extract_headers_meta(reader.headers) return headers, stream
def test_reader_buf_size(self): reader = ChunkReader(None, 8, {}) chunk = {} source = FakeSource(["1234", "abcd", "123", "4a", "bcd1234abcd1234a", "b"]) it = reader._create_iter(chunk, source) data = list(it) self.assertEqual(data, ["1234abcd", "1234abcd", "1234abcd", "1234ab"])
def test_reader_buf_size(self): reader = ChunkReader(None, 8, {}) chunk = {} source = FakeSource( ['1234', 'abcd', '123', '4a', 'bcd1234abcd1234a', 'b']) it = reader._create_iter(chunk, source) data = list(it) self.assertEqual(data, ['1234abcd', '1234abcd', '1234abcd', '1234ab'])
def chunk_get(self, url, **kwargs): req_id = kwargs.get('req_id') if not req_id: req_id = utils.request_id() reader = ChunkReader([{ 'url': url }], READ_BUFFER_SIZE, {'X-oio-req-id': req_id}) # This must be done now if we want to access headers stream = reader.stream() headers = extract_headers_meta(reader.headers) return headers, stream
def chunk_get(self, url, check_headers=True, **kwargs): """ :keyword check_headers: when True (the default), raise FaultyChunk if a mandatory response header is missing. :returns: a tuple with a dictionary of chunk metadata and a stream to the chunk's data. """ url = self.resolve_url(url) reader = ChunkReader([{'url': url}], READ_BUFFER_SIZE, **kwargs) # This must be done now if we want to access headers stream = reader.stream() headers = extract_headers_meta(reader.headers, check=check_headers) return headers, stream
def test_reader_buf_resume(self): chunk = {} reader = ChunkReader(None, 8, {}) # provide source0 with failure source0 = FakeSource(['1234', 'abcd', '123', None]) it = reader._create_iter(chunk, source0) # provide source1 for recovery source1 = FakeSource(['5678efgh']) with patch.object(reader, '_get_source', lambda: (source1, chunk)): data = list(it) self.assertEqual(data, ['1234abcd', '5678efgh'])
def test_reader_buf_resume(self): chunk = {} reader = ChunkReader(None, 8, {}) # provide source0 with failure source0 = FakeSource(["1234", "abcd", "123", None]) it = reader._create_iter(chunk, source0) # provide source1 for recovery source1 = FakeSource(["5678efgh"]) with patch.object(reader, "_get_source", lambda: (source1, chunk)): data = list(it) self.assertEqual(data, ["1234abcd", "5678efgh"])
def read_meta_chunk(self, storage_method, meta_chunk, headers={}): handler = ChunkReader(meta_chunk, None, headers) stream = handler.get_iter() return Response(part_iter_to_bytes_iter(stream), 200)
def test_recover(self): # basic without range reader = ChunkReader(None, None, {}) reader.recover(10) self.assertEqual(reader.request_headers['Range'], 'bytes=10-') # full byte range reader = ChunkReader(None, None, {'Range': 'bytes=21-40'}) reader.recover(10) self.assertEqual(reader.request_headers['Range'], 'bytes=31-40') # ask byte range too large self.assertRaises(exc.UnsatisfiableRange, reader.recover, 100) # ask empty byte range self.assertRaises(exc.EmptyByteRange, reader.recover, 10) # prefix byte range reader = ChunkReader(None, None, {'Range': 'bytes=11-'}) reader.recover(10) self.assertEqual(reader.request_headers['Range'], 'bytes=21-') # suffix byte range reader = ChunkReader(None, None, {'Range': 'bytes=-50'}) reader.recover(10) self.assertEqual(reader.request_headers['Range'], 'bytes=-40') # single byte range reader = ChunkReader(None, None, {'Range': 'bytes=0-0'}) # ask empty byte range self.assertRaises(exc.EmptyByteRange, reader.recover, 1)
def test_recover(self): # basic without range reader = ChunkReader(None, None, {}) reader.recover(10) self.assertEqual(reader.request_headers["Range"], "bytes=10-") # full byte range reader = ChunkReader(None, None, {"Range": "bytes=21-40"}) reader.recover(10) self.assertEqual(reader.request_headers["Range"], "bytes=31-40") # ask byte range too large self.assertRaises(exc.UnsatisfiableRange, reader.recover, 100) # ask empty byte range self.assertRaises(exc.EmptyByteRange, reader.recover, 10) # prefix byte range reader = ChunkReader(None, None, {"Range": "bytes=11-"}) reader.recover(10) self.assertEqual(reader.request_headers["Range"], "bytes=21-") # suffix byte range reader = ChunkReader(None, None, {"Range": "bytes=-50"}) reader.recover(10) self.assertEqual(reader.request_headers["Range"], "bytes=-40") # single byte range reader = ChunkReader(None, None, {"Range": "bytes=0-0"}) # ask empty byte range self.assertRaises(exc.EmptyByteRange, reader.recover, 1)
def read_meta_chunk(self, storage_method, meta_chunk): headers = {} handler = ChunkReader(meta_chunk, headers) stream = handler.get_iter() return Response(part_iter_to_bytes_iter(stream), 200)