Esempio n. 1
0
    def _copy_part(
        self,
        large_file_id,
        part_copy_source,
        part_number,
        large_file_upload_state,
        finished_parts,
        destination_encryption: Optional[EncryptionSetting],
        source_encryption: Optional[EncryptionSetting],
    ):
        """
        Copy a file part to started large file.

        :param :param str bucket_id: a bucket ID
        :param large_file_id: a large file ID
        :param b2sdk.v2.CopySource part_copy_source: copy source that represents a range (not necessarily a whole file)
        :param b2sdk.v2.LargeFileUploadState large_file_upload_state: state object for progress reporting
                                                                      on large file upload
        :param dict,None finished_parts: dictionary of known finished parts, keys are part numbers,
                                         values are instances of :class:`~b2sdk.v2.Part`
        :param b2sdk.v2.EncryptionSetting destination_encryption: encryption settings for the destination
                        (``None`` if unknown)
        :param b2sdk.v2.EncryptionSetting source_encryption: encryption settings for the source
                        (``None`` if unknown)
        """
        # b2_copy_part doesn't need SSE-B2. Large file encryption is decided on b2_start_large_file.
        if destination_encryption is not None and destination_encryption.mode == EncryptionMode.SSE_B2:
            destination_encryption = None

        # Check if this part was uploaded before
        if finished_parts is not None and part_number in finished_parts:
            # Report this part finished
            part = finished_parts[part_number]
            large_file_upload_state.update_part_bytes(part.content_length)

            # Return SHA1 hash
            return {'contentSha1': part.content_sha1}

        # if another part has already had an error there's no point in
        # uploading this part
        if large_file_upload_state.has_error():
            raise AlreadyFailed(large_file_upload_state.get_error_message())

        response = self.services.session.copy_part(
            part_copy_source.file_id,
            large_file_id,
            part_number,
            bytes_range=part_copy_source.get_bytes_range(),
            destination_server_side_encryption=destination_encryption,
            source_server_side_encryption=source_encryption,
        )
        large_file_upload_state.update_part_bytes(response['contentLength'])
        return response
Esempio n. 2
0
    def _copy_part(
        self,
        large_file_id,
        part_copy_source,
        part_number,
        large_file_upload_state,
        finished_parts=None
    ):
        """
        Copy a file part to started large file.

        :param :param str bucket_id: a bucket ID
        :param file_id: a large file ID
        :param b2sdk.v1.CopySourcePart copy_source_part: wrapper for copy source that represnts part range
        :param b2sdk.v1.LargeFileUploadState large_file_upload_state: state object for progress reporting
                                                                      on large file upload
        :param dict,None finished_parts: dictionary of known finished parts, keys are part numbers,
                                         values are instances of :class:`~b2sdk.v1.Part`
        """

        # Check if this part was uploaded before
        if finished_parts is not None and part_number in finished_parts:
            # Report this part finished
            part = finished_parts[part_number]
            large_file_upload_state.update_part_bytes(part.content_length)

            # Return SHA1 hash
            return {'contentSha1': part.content_sha1}

        # if another part has already had an error there's no point in
        # uploading this part
        if large_file_upload_state.has_error():
            raise AlreadyFailed(large_file_upload_state.get_error_message())

        response = self.services.session.copy_part(
            part_copy_source.file_id,
            large_file_id,
            part_number,
            bytes_range=part_copy_source.get_bytes_range(),
        )
        large_file_upload_state.update_part_bytes(response['contentLength'])
        return response
Esempio n. 3
0
 def test_already_failed_exception(self):
     try:
         raise AlreadyFailed('foo')
     except AlreadyFailed as e:
         assert str(e) == 'Already failed: foo', str(e)
    def _upload_part(
        self,
        bucket_id,
        file_id,
        part_upload_source,
        part_number,
        large_file_upload_state,
        finished_parts,
        encryption: EncryptionSetting,
    ):
        """
        Upload a file part to started large file.

        :param :param str bucket_id: a bucket ID
        :param file_id: a large file ID
        :param b2sdk.v1.UploadSourcePart upload_source_part: wrapper for upload source that reads only required range
        :param b2sdk.v1.LargeFileUploadState large_file_upload_state: state object for progress reporting
                                                                      on large file upload
        :param dict,None finished_parts: dictionary of known finished parts, keys are part numbers,
                                         values are instances of :class:`~b2sdk.v1.Part`
        :param b2sdk.v1.EncryptionSetting encryption: encryption setting (``None`` if unknown)
        """
        assert encryption is None or encryption.mode in (
            EncryptionMode.SSE_B2, )

        # b2_upload_part doesn't need SSE-B2. Large file encryption is decided on b2_start_large_file.
        if encryption is not None and encryption.mode == EncryptionMode.SSE_B2:
            encryption = None

        # Check if this part was uploaded before
        if finished_parts is not None and part_number in finished_parts:
            # Report this part finished
            part = finished_parts[part_number]
            large_file_upload_state.update_part_bytes(
                part_upload_source.get_content_length())

            # Return SHA1 hash
            return {'contentSha1': part.content_sha1}

        # Set up a progress listener
        part_progress_listener = PartProgressReporter(large_file_upload_state)

        # Retry the upload as needed
        exception_list = []
        for _ in range(self.MAX_UPLOAD_ATTEMPTS):
            # if another part has already had an error there's no point in
            # uploading this part
            if large_file_upload_state.has_error():
                raise AlreadyFailed(
                    large_file_upload_state.get_error_message())

            try:
                with part_upload_source.open() as part_stream:
                    content_length = part_upload_source.get_content_length()
                    input_stream = ReadingStreamWithProgress(
                        part_stream,
                        part_progress_listener,
                        length=content_length)
                    if part_upload_source.is_sha1_known():
                        content_sha1 = part_upload_source.get_content_sha1()
                    else:
                        input_stream = StreamWithHash(
                            input_stream, stream_length=content_length)
                        content_sha1 = HEX_DIGITS_AT_END
                    # it is important that `len()` works on `input_stream`
                    response = self.services.session.upload_part(
                        file_id,
                        part_number,
                        len(input_stream),
                        content_sha1,
                        input_stream,
                        server_side_encryption=
                        encryption,  # todo: client side encryption
                    )
                    if content_sha1 == HEX_DIGITS_AT_END:
                        content_sha1 = input_stream.hash
                    assert content_sha1 == response['contentSha1']
                    return response

            except B2Error as e:
                if not e.should_retry_upload():
                    raise
                exception_list.append(e)
                self.account_info.clear_bucket_upload_data(bucket_id)

        large_file_upload_state.set_error(str(exception_list[-1]))
        raise MaxRetriesExceeded(self.MAX_UPLOAD_ATTEMPTS, exception_list)
Esempio n. 5
0
    def _upload_part(self,
                     bucket_id,
                     file_id,
                     part_upload_source,
                     part_number,
                     large_file_upload_state,
                     finished_parts=None):
        """
        Upload a file part to started large file.

        :param :param str bucket_id: a bucket ID
        :param file_id: a large file ID
        :param b2sdk.v1.UploadSourcePart upload_source_part: wrapper for upload source that reads only required range
        :param b2sdk.v1.LargeFileUploadState large_file_upload_state: state object for progress reporting
                                                                      on large file upload
        :param dict,None finished_parts: dictionary of known finished parts, keys are part numbers,
                                         values are instances of :class:`~b2sdk.v1.Part`
        """
        # Check if this part was uploaded before
        if finished_parts is not None and part_number in finished_parts:
            # Report this part finished
            part = finished_parts[part_number]
            large_file_upload_state.update_part_bytes(
                part_upload_source.get_content_length())

            # Return SHA1 hash
            return {'contentSha1': part.content_sha1}

        # Set up a progress listener
        part_progress_listener = PartProgressReporter(large_file_upload_state)

        # Retry the upload as needed
        exception_list = []
        for _ in six.moves.xrange(self.MAX_UPLOAD_ATTEMPTS):
            # if another part has already had an error there's no point in
            # uploading this part
            if large_file_upload_state.has_error():
                raise AlreadyFailed(
                    large_file_upload_state.get_error_message())

            try:
                with part_upload_source.open() as part_stream:
                    input_stream = ReadingStreamWithProgress(
                        part_stream, part_progress_listener)
                    hashing_stream = StreamWithHash(
                        input_stream,
                        stream_length=part_upload_source.get_content_length())
                    # it is important that `len()` works on `hashing_stream`
                    response = self.services.session.upload_part(
                        file_id, part_number, hashing_stream.length,
                        HEX_DIGITS_AT_END, hashing_stream)
                    assert hashing_stream.hash == response['contentSha1']
                    return response

            except B2Error as e:
                if not e.should_retry_upload():
                    raise
                exception_list.append(e)
                self.account_info.clear_bucket_upload_data(bucket_id)

        large_file_upload_state.set_error(str(exception_list[-1]))
        raise MaxRetriesExceeded(self.MAX_UPLOAD_ATTEMPTS, exception_list)