Exemple #1
0
    def get_stream(self):
        range_infos = self._get_range_infos()
        chunk_iter = iter(self.chunks)

        # we use eventlet GreenPool to manage readers
        with green.ContextPool(self.storage_method.ec_nb_data) as pool:
            pile = GreenPile(pool)
            # we use eventlet GreenPile to spawn readers
            for _j in range(self.storage_method.ec_nb_data):
                pile.spawn(self._get_fragment, chunk_iter, range_infos,
                           self.storage_method)

            readers = []
            for reader, parts_iter in pile:
                if reader.status in (200, 206):
                    readers.append((reader, parts_iter))
                # TODO log failures?

        # with EC we need at least ec_nb_data valid readers
        if len(readers) >= self.storage_method.ec_nb_data:
            # all readers should return the same Content-Length
            # so just take the headers from one of them
            resp_headers = HeadersDict(readers[0][0].headers)
            fragment_length = int(resp_headers.get('Content-Length'))
            read_iterators = [it for _, it in readers]
            stream = ECStream(self.storage_method, read_iterators, range_infos,
                              self.meta_length, fragment_length)
            # start the stream
            stream.start()
            return stream
        else:
            raise exceptions.ServiceUnavailable(
                'Not enough valid sources to read (%d/%d)' % (
                    len(readers), self.storage_method.ec_nb_data))
Exemple #2
0
def fetch_stream(chunks, ranges, storage_method, headers=None, **kwargs):
    ranges = ranges or [(None, None)]
    meta_range_list = get_meta_ranges(ranges, chunks)

    for meta_range_dict in meta_range_list:
        for pos in sorted(meta_range_dict.keys()):
            meta_start, meta_end = meta_range_dict[pos]
            if meta_start is not None and meta_end is not None:
                headers['Range'] = http_header_from_ranges(
                    (meta_range_dict[pos], ))
            reader = ChunkReader(iter(chunks[pos]),
                                 READ_CHUNK_SIZE,
                                 headers=headers,
                                 **kwargs)
            try:
                it = reader.get_iter()
            except exc.NotFound as err:
                raise exc.UnrecoverableContent(
                    "Cannot download position %d: %s" % (pos, err))
            except Exception as err:
                raise exc.ServiceUnavailable(
                    "Error while downloading position %d: %s" % (pos, err))
            for part in it:
                for dat in part['iter']:
                    yield dat
Exemple #3
0
 def get_iter(self):
     source, chunk = self._get_source()
     if source:
         return self._get_iter(chunk, source)
     errors = group_chunk_errors(self._resp_by_chunk.items())
     if len(errors) == 1:
         # All errors are of the same type, group them
         status, chunks = errors.popitem()
         raise exc.from_status(status[0], "%s %s" % (status[1], chunks))
     raise exc.ServiceUnavailable("unavailable chunks: %s" %
                                  self._resp_by_chunk)
Exemple #4
0
 def test_GET_service_unavailable(self):
     req = Request.blank('/v1/a/c/o')
     ret_value = ({
         'hash': 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
         'mtime': 0,
         'length': 10,
         'deleted': False,
         'version': 42,
     }, fake_stream(10, exc.ServiceUnavailable('missing chunks')))
     self.storage.object_fetch = Mock(return_value=ret_value)
     resp = req.get_response(self.app)
     # Everything seems ok,
     self.assertEqual(resp.status_int, 200)
     # but an exception is raised when trying to read response data.
     try:
         for _ in resp.app_iter:
             pass
     except swob.HTTPException as err:
         self.assertEqual(503, err.status_int)
Exemple #5
0
def fetch_stream_ec(chunks, ranges, storage_method, **kwargs):
    ranges = ranges or [(None, None)]
    meta_range_list = get_meta_ranges(ranges, chunks)
    for meta_range_dict in meta_range_list:
        for pos in sorted(meta_range_dict.keys()):
            meta_start, meta_end = meta_range_dict[pos]
            handler = ECChunkDownloadHandler(storage_method, chunks[pos],
                                             meta_start, meta_end, **kwargs)
            try:
                stream = handler.get_stream()
            except exc.NotFound as err:
                raise exc.UnrecoverableContent(
                    "Cannot download position %d: %s" % (pos, err))
            except Exception as err:
                raise exc.ServiceUnavailable(
                    "Error while downloading position %d: %s" % (pos, err))
            try:
                for part_info in stream:
                    for dat in part_info['iter']:
                        yield dat
            finally:
                # This must be done in a finally block to handle the case
                # when the reader does not read until the end of the stream.
                stream.close()