Exemplo n.º 1
0
    def test_padded_segment(self):
        """test a segment that needs padding"""
        segment_size = incoming_slice_size - 1
        test_data = os.urandom(segment_size)
        segmenter = ZfecSegmenter(_min_segments, _num_segments)

        padding_size = segmenter.padding_size(test_data)
        encoded_segments = segmenter.encode(block_generator(test_data))

        segment_numbers = range(1, _num_segments + 1)

        test_segment_numbers = random.sample(segment_numbers, _min_segments)
        test_segments = [encoded_segments[n - 1] for n in test_segment_numbers]

        decoded_segments = segmenter.decode(test_segments,
                                            test_segment_numbers, padding_size)

        decoded_data = "".join(decoded_segments)
        self.assertTrue(decoded_data == test_data, len(decoded_data))
Exemplo n.º 2
0
    def test_padded_segment(self):
        """test a segment that needs padding"""
        segment_size = incoming_slice_size - 1
        test_data = os.urandom(segment_size)
        segmenter = ZfecSegmenter(_min_segments, _num_segments)

        padding_size = segmenter.padding_size(test_data)
        encoded_segments = segmenter.encode(block_generator(test_data))
        
        segment_numbers = range(1, _num_segments+1)

        test_segment_numbers = random.sample(segment_numbers, _min_segments)
        test_segments = [encoded_segments[n-1] for n in test_segment_numbers]

        decoded_segments = segmenter.decode(
            test_segments, test_segment_numbers, padding_size
        )

        decoded_data = "".join(decoded_segments)
        self.assertTrue(decoded_data == test_data, len(decoded_data))
Exemplo n.º 3
0
def _run_test(req_socket, segment_size):
    """test a segment that doesn't need padding"""
    log = logging.getLogger("_run_test_{0}".format(segment_size))
    test_data = os.urandom(segment_size)

    encoded_blocks = list()
    for raw_block in block_generator(test_data):
        request = {
            "message-type": "zfec-encode",
        }
        reply, reply_data = _contact_server(req_socket, request, [
            raw_block,
        ])
        if reply["result"] != "success":
            log.error("{0} failed {1}".format(request, reply["error-message"]))
            return False
        encoded_blocks.append((
            reply["padding-size"],
            reply_data,
        ))

    segment_numbers = range(1, _num_segments + 1)

    test_segment_numbers = random.sample(segment_numbers, _min_segments)

    decoded_segments = list()
    for padding_size, encoded_block in encoded_blocks:
        request = {
            "message-type": "zfec-decode",
            "segment-numbers": test_segment_numbers,
            "padding-size": padding_size
        }
        test_segments = [encoded_block[n - 1] for n in test_segment_numbers]
        reply, reply_data = _contact_server(req_socket, request, test_segments)
        if reply["result"] != "success":
            log.error("{0} failed {1}".format(request, reply["error-message"]))
            return False
        decoded_segments.append(reply_data[0])

    decoded_data = b"".join(decoded_segments)
    if decoded_data != test_data:
        log.error("decoded data does not match test data")
        return False

    for padding_size, encoded_block in encoded_blocks:
        good_segment_numbers = random.sample(segment_numbers, _min_segments)
        bad_segment_numbers = \
                list(set(segment_numbers) - set(good_segment_numbers))
        request = {
            "message-type": "zfec-rebuild-encoded-shares",
            "segment-numbers": good_segment_numbers,
            "needed-segment-numbers": bad_segment_numbers,
            "padding-size": padding_size
        }
        good_segments = [encoded_block[n - 1] for n in good_segment_numbers]
        reply, reply_data = _contact_server(req_socket, request, good_segments)
        if reply["result"] != "success":
            log.error("{0} failed {1}".format(request, reply))
            return False

        zip_object = zip(reply["rebuilt-segment-numbers"], reply_data)
        for segment_num, rebuilt_segment in zip_object:
            if rebuilt_segment != encoded_block[segment_num - 1]:
                log.error("{0} failed rebuilt block mismatch".format(request))
                return False

    return True
Exemplo n.º 4
0
 file_md5 = hashlib.md5()
 file_size = 0
 segments = None
 zfec_padding_size = None
 try:
     while True:
         slice_item = \
             data_queue.get(block=True, 
                            timeout=_max_sequence_upload_interval)
         if slice_item is None:
             break
         actual_content_length += len(slice_item)
         file_adler32 = zlib.adler32(slice_item, file_adler32)
         file_md5.update(slice_item)
         file_size += len(slice_item)
         segments = segmenter.encode(block_generator(slice_item))
         zfec_padding_size = segmenter.padding_size(slice_item)
         if actual_content_length == expected_content_length:
             archiver.archive_final(
                 file_size,
                 file_adler32,
                 file_md5.digest(),
                 segments,
                 zfec_padding_size,
                 _reply_timeout
             )
         else:
             archiver.archive_slice(
                 segments, zfec_padding_size, _reply_timeout
             )
 except gevent.queue.Empty, instance:
Exemplo n.º 5
0
 file_md5 = hashlib.md5()
 file_size = 0
 segments = None
 zfec_padding_size = None
 try:
     while True:
         slice_item = \
             data_queue.get(block=True,
                            timeout=_max_sequence_upload_interval)
         if slice_item is None:
             break
         actual_content_length += len(slice_item)
         file_adler32 = zlib.adler32(slice_item, file_adler32)
         file_md5.update(slice_item)
         file_size += len(slice_item)
         segments = segmenter.encode(block_generator(slice_item))
         zfec_padding_size = segmenter.padding_size(slice_item)
         if actual_content_length == expected_content_length:
             archiver.archive_final(file_size, file_adler32,
                                    file_md5.digest(), segments,
                                    zfec_padding_size, _reply_timeout)
         else:
             archiver.archive_slice(segments, zfec_padding_size,
                                    _reply_timeout)
 except gevent.queue.Empty, instance:
     # Ticket #69 Protection in Web Writer from Slow Uploads
     self._log.error("archive failed: {0} timeout {1}".format(
         description, instance))
     _send_archive_cancel(user_request_id, unified_id, conjoined_part,
                          self._data_writer_clients)
     queue_entry = \