コード例 #1
0
    def test_padded_segment(self):
        """test a segment that needs padding"""
        segment_size = incoming_slice_size - 1
        test_data = os.urandom(segment_size)
        segmenter = ZfecSegmenter(_min_segments, _num_segments)

        padding_size = segmenter.padding_size(test_data)
        encoded_segments = segmenter.encode(block_generator(test_data))

        segment_numbers = range(1, _num_segments + 1)

        test_segment_numbers = random.sample(segment_numbers, _min_segments)
        test_segments = [encoded_segments[n - 1] for n in test_segment_numbers]

        decoded_segments = segmenter.decode(test_segments,
                                            test_segment_numbers, padding_size)

        decoded_data = "".join(decoded_segments)
        self.assertTrue(decoded_data == test_data, len(decoded_data))
コード例 #2
0
    def test_padded_segment(self):
        """test a segment that needs padding"""
        segment_size = incoming_slice_size - 1
        test_data = os.urandom(segment_size)
        segmenter = ZfecSegmenter(_min_segments, _num_segments)

        padding_size = segmenter.padding_size(test_data)
        encoded_segments = segmenter.encode(block_generator(test_data))
        
        segment_numbers = range(1, _num_segments+1)

        test_segment_numbers = random.sample(segment_numbers, _min_segments)
        test_segments = [encoded_segments[n-1] for n in test_segment_numbers]

        decoded_segments = segmenter.decode(
            test_segments, test_segment_numbers, padding_size
        )

        decoded_data = "".join(decoded_segments)
        self.assertTrue(decoded_data == test_data, len(decoded_data))
コード例 #3
0
ファイル: application.py プロジェクト: HackLinux/nimbus.io
 file_size = 0
 segments = None
 zfec_padding_size = None
 try:
     while True:
         slice_item = \
             data_queue.get(block=True, 
                            timeout=_max_sequence_upload_interval)
         if slice_item is None:
             break
         actual_content_length += len(slice_item)
         file_adler32 = zlib.adler32(slice_item, file_adler32)
         file_md5.update(slice_item)
         file_size += len(slice_item)
         segments = segmenter.encode(block_generator(slice_item))
         zfec_padding_size = segmenter.padding_size(slice_item)
         if actual_content_length == expected_content_length:
             archiver.archive_final(
                 file_size,
                 file_adler32,
                 file_md5.digest(),
                 segments,
                 zfec_padding_size,
                 _reply_timeout
             )
         else:
             archiver.archive_slice(
                 segments, zfec_padding_size, _reply_timeout
             )
 except gevent.queue.Empty, instance:
     # Ticket #69 Protection in Web Writer from Slow Uploads
コード例 #4
0
 file_size = 0
 segments = None
 zfec_padding_size = None
 try:
     while True:
         slice_item = \
             data_queue.get(block=True,
                            timeout=_max_sequence_upload_interval)
         if slice_item is None:
             break
         actual_content_length += len(slice_item)
         file_adler32 = zlib.adler32(slice_item, file_adler32)
         file_md5.update(slice_item)
         file_size += len(slice_item)
         segments = segmenter.encode(block_generator(slice_item))
         zfec_padding_size = segmenter.padding_size(slice_item)
         if actual_content_length == expected_content_length:
             archiver.archive_final(file_size, file_adler32,
                                    file_md5.digest(), segments,
                                    zfec_padding_size, _reply_timeout)
         else:
             archiver.archive_slice(segments, zfec_padding_size,
                                    _reply_timeout)
 except gevent.queue.Empty, instance:
     # Ticket #69 Protection in Web Writer from Slow Uploads
     self._log.error("archive failed: {0} timeout {1}".format(
         description, instance))
     _send_archive_cancel(user_request_id, unified_id, conjoined_part,
                          self._data_writer_clients)
     queue_entry = \
         redis_queue_entry_tuple(timestamp=timestamp,