예제 #1
0
def fetch_stream(chunks, ranges, storage_method, headers=None, **kwargs):
    ranges = ranges or [(None, None)]
    meta_range_list = get_meta_ranges(ranges, chunks)

    for meta_range_dict in meta_range_list:
        for pos in sorted(meta_range_dict.keys()):
            meta_start, meta_end = meta_range_dict[pos]
            if meta_start is not None and meta_end is not None:
                headers['Range'] = http_header_from_ranges(
                    (meta_range_dict[pos], ))
            reader = ChunkReader(iter(chunks[pos]),
                                 READ_CHUNK_SIZE,
                                 headers=headers,
                                 **kwargs)
            try:
                it = reader.get_iter()
            except exc.NotFound as err:
                raise exc.UnrecoverableContent(
                    "Cannot download position %d: %s" % (pos, err))
            except Exception as err:
                raise exc.ServiceUnavailable(
                    "Error while downloading position %d: %s" % (pos, err))
            for part in it:
                for dat in part['iter']:
                    yield dat
예제 #2
0
파일: ec.py 프로젝트: lhllacp/oio-sds
    def rebuild(self):
        pile = GreenPile(len(self.meta_chunk))

        nb_data = self.storage_method.ec_nb_data

        headers = {}
        for chunk in self.meta_chunk:
            pile.spawn(self._get_response, chunk, headers)

        # Sort all responses according to the chunk size
        total_resps = 0
        resps_by_size = dict()
        resps_without_chunk_size = list()
        for resp in pile:
            if not resp:
                continue
            chunk_size = int_value(
                resp.getheader(CHUNK_HEADERS['chunk_size'], None), None)
            if chunk_size is None:
                self.logger.warning('Missing chunk size')
                resps_without_chunk_size.append(resp)
                continue
            total_resps += 1
            resps_by_size.setdefault(chunk_size, list()).append(resp)
        # Select the chunk with the majority chunk size
        resps = None
        max_resps = 0
        assumed_chunk_size = None
        for chunk_size, resps in resps_by_size.items():
            nb_resp = len(resps)
            if nb_resp > max_resps:
                max_resps = nb_resp
                assumed_chunk_size = chunk_size
        if assumed_chunk_size is None:
            self.logger.warning(
                'No chunk available with chunk size information')
            resps = list()
        else:
            resps = resps_by_size[assumed_chunk_size]
            if max_resps != total_resps:
                self.logger.warning(
                    '%d/%d chunks are not the same size as others (%d), '
                    'they should be removed',
                    total_resps - max_resps, total_resps, assumed_chunk_size)
        # Check the number of chunks available
        if max_resps < nb_data:
            # Add the chunks without size information
            # assuming they are the correct size
            resps = resps + resps_without_chunk_size
            if len(resps) < nb_data:
                self.logger.error(
                    'Unable to read enough valid sources to rebuild')
                raise exceptions.UnrecoverableContent(
                    'Not enough valid sources to rebuild')
            self.logger.warning(
                'Use chunk(s) without size information to rebuild a chunk')

        rebuild_iter = self._make_rebuild_iter(resps[:nb_data])
        return assumed_chunk_size, rebuild_iter
예제 #3
0
파일: ec.py 프로젝트: lanweichang/oio-sds
    def rebuild(self):
        pile = GreenPile(len(self.meta_chunk))

        nb_data = self.storage_method.ec_nb_data

        headers = {}
        for chunk in self.meta_chunk:
            pile.spawn(self._get_response, chunk, headers)

        resps = []
        for resp in pile:
            if not resp:
                continue
            resps.append(resp)
            if len(resps) >= self.storage_method.ec_nb_data:
                break
        else:
            logger.error('Unable to read enough valid sources to rebuild')
            raise exc.UnrecoverableContent('Unable to rebuild chunk')

        rebuild_iter = self._make_rebuild_iter(resps[:nb_data])
        return rebuild_iter
예제 #4
0
def fetch_stream_ec(chunks, ranges, storage_method, **kwargs):
    ranges = ranges or [(None, None)]
    meta_range_list = get_meta_ranges(ranges, chunks)
    for meta_range_dict in meta_range_list:
        for pos in sorted(meta_range_dict.keys()):
            meta_start, meta_end = meta_range_dict[pos]
            handler = ECChunkDownloadHandler(storage_method, chunks[pos],
                                             meta_start, meta_end, **kwargs)
            try:
                stream = handler.get_stream()
            except exc.NotFound as err:
                raise exc.UnrecoverableContent(
                    "Cannot download position %d: %s" % (pos, err))
            except Exception as err:
                raise exc.ServiceUnavailable(
                    "Error while downloading position %d: %s" % (pos, err))
            try:
                for part_info in stream:
                    for dat in part_info['iter']:
                        yield dat
            finally:
                # This must be done in a finally block to handle the case
                # when the reader does not read until the end of the stream.
                stream.close()