Esempio n. 1
0
    def _upload_small_file(self, bucket_id, upload_source, file_name,
                           content_type, file_info, progress_listener):
        content_length = upload_source.get_content_length()
        exception_info_list = []
        progress_listener.set_total_bytes(content_length)
        with progress_listener:
            for _ in range(self.MAX_UPLOAD_ATTEMPTS):
                try:
                    with upload_source.open() as file:
                        input_stream = ReadingStreamWithProgress(
                            file, progress_listener, length=content_length)
                        if upload_source.is_sha1_known():
                            content_sha1 = upload_source.get_content_sha1()
                        else:
                            input_stream = StreamWithHash(
                                input_stream, stream_length=content_length)
                            content_sha1 = HEX_DIGITS_AT_END
                        # it is important that `len()` works on `input_stream`
                        response = self.services.session.upload_file(
                            bucket_id, file_name, len(input_stream),
                            content_type, content_sha1, file_info,
                            input_stream)
                        if content_sha1 == HEX_DIGITS_AT_END:
                            content_sha1 = input_stream.hash
                        assert content_sha1 == response['contentSha1']
                        return FileVersionInfoFactory.from_api_response(
                            response)

                except B2Error as e:
                    if not e.should_retry_upload():
                        raise
                    exception_info_list.append(e)
                    self.account_info.clear_bucket_upload_data(bucket_id)

        raise MaxRetriesExceeded(self.MAX_UPLOAD_ATTEMPTS, exception_info_list)
Esempio n. 2
0
 def file_info(self, filename):
     response = self.bucket.api.session.list_file_names(self.bucket.id_, filename, 1, None)
     for entry in response[u'files']:
         file_version_info = FileVersionInfoFactory.from_api_response(entry)
         if file_version_info.file_name == filename:
             return file_version_info
     raise BackendException(u'File not found')
def _mockFileExists(tempFile: File, b2FileId: str = "someId") -> None:
    bucket.get_file_info_by_name.side_effect = None
    bucket.get_file_info_by_name.return_value = FileVersionInfoFactory.from_response_headers(
        {
            "x-bz-file-id": b2FileId,
            "content-length": tempFile.size
        })
Esempio n. 4
0
    def cancel_large_file(self, file_id):
        """
        Cancel a large file upload.

        :param str file_id: a file ID
        :rtype: None
        """
        response = self.services.session.cancel_large_file(file_id)
        return FileVersionInfoFactory.from_cancel_large_file_response(response)
 def existOrThrow():
     if doesFileExist():
         return FileVersionInfoFactory.from_response_headers({
             "id":
             id,
             "content-length":
             size
         })
     raise FileNotPresent()
Esempio n. 6
0
def test_get_available_nameWithOverwrites(settings):
    mockedBucket = mock.Mock(spec=Bucket)
    mockedBucket.get_file_info_by_name.return_value = FileVersionInfoFactory.from_response_headers(
        {
            "id_": 1,
            "file_name": "some_name.txt"
        })

    with mock.patch.object(settings, "BACKBLAZE_CONFIG",
                           _settingsDict({})), mock.patch.object(
                               B2Api, "authorize_account"), mock.patch.object(
                                   B2Api, "get_bucket_by_name") as api:
        api.return_value = mockedBucket
        storage = BackblazeB2Storage(opts={"allowFileOverwrites": True})

        availableName = storage.get_available_name("some_name.txt",
                                                   max_length=None)

        assert availableName == "some_name.txt"
Esempio n. 7
0
    def _copy_small_file(
        self,
        copy_source,
        file_name,
        content_type,
        file_info,
        destination_bucket_id,
        progress_listener,
    ):
        with progress_listener:
            progress_listener.set_total_bytes(copy_source.get_content_length()
                                              or 0)

            bytes_range = copy_source.get_bytes_range()

            if content_type is None:
                if file_info is not None:
                    raise ValueError(
                        'File info can be set only when content type is set')
                metadata_directive = MetadataDirectiveMode.COPY
            else:
                if file_info is None:
                    raise ValueError(
                        'File info can be not set only when content type is not set'
                    )
                metadata_directive = MetadataDirectiveMode.REPLACE

            response = self.services.session.copy_file(
                copy_source.file_id,
                file_name,
                bytes_range=bytes_range,
                metadata_directive=metadata_directive,
                content_type=content_type,
                file_info=file_info,
                destination_bucket_id=destination_bucket_id)
            file_info = FileVersionInfoFactory.from_api_response(response)
            if progress_listener is not None:
                progress_listener.bytes_completed(file_info.size)

        return file_info
Esempio n. 8
0
    def execute_plan(self, emerge_plan):
        total_length = emerge_plan.get_total_length()

        if total_length is not None and total_length > self.MAX_LARGE_FILE_SIZE:
            raise MaxFileSizeExceeded(total_length, self.MAX_LARGE_FILE_SIZE)

        plan_id = emerge_plan.get_plan_id()

        file_info = dict(self.file_info) if self.file_info is not None else {}
        if plan_id is not None:
            file_info['plan_id'] = plan_id

        self.progress_listener.set_total_bytes(total_length or 0)

        emerge_parts_dict = None
        if total_length is not None:
            emerge_parts_dict = dict(emerge_plan.enumerate_emerge_parts())

        unfinished_file, finished_parts = self._get_unfinished_file_and_parts(
            self.bucket_id,
            self.file_name,
            file_info,
            self.continue_large_file_id,
            emerge_parts_dict=emerge_parts_dict
        )

        if unfinished_file is None:
            if self.content_type is None:
                content_type = self.DEFAULT_CONTENT_TYPE
            else:
                content_type = self.content_type
            unfinished_file = self.services.large_file.start_large_file(
                self.bucket_id, self.file_name, content_type, file_info
            )
        file_id = unfinished_file.file_id

        with self.progress_listener:
            large_file_upload_state = LargeFileUploadState(self.progress_listener)

            part_futures = []
            for part_number, emerge_part in emerge_plan.enumerate_emerge_parts():
                execution_step_factory = LargeFileEmergeExecutionStepFactory(
                    self,
                    emerge_part,
                    part_number,
                    file_id,
                    large_file_upload_state,
                    finished_parts=finished_parts,
                )
                execution_step = execution_step_factory.get_execution_step()
                future = self._execute_step(execution_step)
                part_futures.append(future)

            # Collect the sha1 checksums of the parts as the uploads finish.
            # If any of them raised an exception, that same exception will
            # be raised here by result()
            part_sha1_array = [interruptible_get_result(f)['contentSha1'] for f in part_futures]

        # Finish the large file
        response = self.services.session.finish_large_file(file_id, part_sha1_array)
        return FileVersionInfoFactory.from_api_response(response)