示例#1
0
        def app_iterator(response):
            segmenter = ZfecSegmenter(_min_segments, _max_segments)
            sent = 0
            try:
                for segments in chain([first_segments], retrieved):
                    segment_numbers = segments.keys()
                    encoded_segments = list()
                    zfec_padding_size = None

                    for segment_number in segment_numbers:
                        encoded_segment, zfec_padding_size = \
                                segments[segment_number]
                        encoded_segments.append(encoded_segment)

                    data_list = segmenter.decode(encoded_segments,
                                                 segment_numbers,
                                                 zfec_padding_size)

                    for data in data_list:
                        yield data
                        sent += len(data)

            except RetrieveFailedError, instance:
                self._log.error('retrieve failed: {0} {1}'.format(
                    description, instance))
                self._stats["retrieves"] -= 1
                response.status_int = 503
                return
示例#2
0
        def app_iterator(response):
            segmenter = ZfecSegmenter( _min_segments, _max_segments)
            sent = 0
            try:
                for segments in chain([first_segments], retrieved):
                    segment_numbers = segments.keys()
                    encoded_segments = list()
                    zfec_padding_size = None

                    for segment_number in segment_numbers:
                        encoded_segment, zfec_padding_size = \
                                segments[segment_number]
                        encoded_segments.append(encoded_segment)

                    data_list = segmenter.decode(
                        encoded_segments,
                        segment_numbers,
                        zfec_padding_size
                    )

                    for data in data_list:
                        yield data
                        sent += len(data)

            except RetrieveFailedError, instance:
                self._log.error('retrieve failed: {0} {1}'.format(
                    description, instance
                ))
                self._stats["retrieves"] -= 1
                response.status_int = 503
                return
示例#3
0
    def test_padded_segment(self):
        """test a segment that needs padding"""
        segment_size = incoming_slice_size - 1
        test_data = os.urandom(segment_size)
        segmenter = ZfecSegmenter(_min_segments, _num_segments)

        padding_size = segmenter.padding_size(test_data)
        encoded_segments = segmenter.encode(block_generator(test_data))

        segment_numbers = range(1, _num_segments + 1)

        test_segment_numbers = random.sample(segment_numbers, _min_segments)
        test_segments = [encoded_segments[n - 1] for n in test_segment_numbers]

        decoded_segments = segmenter.decode(test_segments,
                                            test_segment_numbers, padding_size)

        decoded_data = "".join(decoded_segments)
        self.assertTrue(decoded_data == test_data, len(decoded_data))
    def test_padded_segment(self):
        """test a segment that needs padding"""
        segment_size = incoming_slice_size - 1
        test_data = os.urandom(segment_size)
        segmenter = ZfecSegmenter(_min_segments, _num_segments)

        padding_size = segmenter.padding_size(test_data)
        encoded_segments = segmenter.encode(block_generator(test_data))
        
        segment_numbers = range(1, _num_segments+1)

        test_segment_numbers = random.sample(segment_numbers, _min_segments)
        test_segments = [encoded_segments[n-1] for n in test_segment_numbers]

        decoded_segments = segmenter.decode(
            test_segments, test_segment_numbers, padding_size
        )

        decoded_data = "".join(decoded_segments)
        self.assertTrue(decoded_data == test_data, len(decoded_data))
示例#5
0
            conjoined_part,
            user_request_id,
        )

        if not conjoined_archive:
            queue_entry = \
                redis_queue_entry_tuple(timestamp=timestamp,
                                        collection_id=collection_row["id"],
                                        value=1)
            self._redis_queue.put(("archive_request", queue_entry, ))

        data_queue = gevent.queue.Queue()
        reader = ReaderGreenlet(req.body_file, data_queue)
        reader.start()

        segmenter = ZfecSegmenter(_min_segments, len(data_writers))
        actual_content_length = 0
        file_adler32 = zlib.adler32('')
        file_md5 = hashlib.md5()
        file_size = 0
        segments = None
        zfec_padding_size = None
        try:
            while True:
                slice_item = \
                    data_queue.get(block=True, 
                                   timeout=_max_sequence_upload_interval)
                if slice_item is None:
                    break
                actual_content_length += len(slice_item)
                file_adler32 = zlib.adler32(slice_item, file_adler32)
示例#6
0
        if not conjoined_archive:
            queue_entry = \
                redis_queue_entry_tuple(timestamp=timestamp,
                                        collection_id=collection_row["id"],
                                        value=1)
            self._redis_queue.put((
                "archive_request",
                queue_entry,
            ))

        data_queue = gevent.queue.Queue()
        reader = ReaderGreenlet(req.body_file, data_queue)
        reader.start()

        segmenter = ZfecSegmenter(_min_segments, len(data_writers))
        actual_content_length = 0
        file_adler32 = zlib.adler32('')
        file_md5 = hashlib.md5()
        file_size = 0
        segments = None
        zfec_padding_size = None
        try:
            while True:
                slice_item = \
                    data_queue.get(block=True,
                                   timeout=_max_sequence_upload_interval)
                if slice_item is None:
                    break
                actual_content_length += len(slice_item)
                file_adler32 = zlib.adler32(slice_item, file_adler32)