def _MakeTask(self, source, dest): """Make a file copy Task for a single source. Args: source: paths.Path, The source file to copy. dest: path.Path, The destination to copy the file to. Raises: InvalidDestinationError: If this would end up copying to a path that has '.' or '..' as a segment. LocationMismatchError: If trying to copy a local file to a local file. Returns: storage_parallel.Task, The copy task to execute. """ if not dest.IsPathSafe(): raise InvalidDestinationError(source, dest) if source.is_remote: source_obj = storage_util.ObjectReference.FromUrl(source.path) if dest.is_remote: dest_obj = storage_util.ObjectReference.FromUrl(dest.path) return storage_parallel.FileRemoteCopyTask( source_obj, dest_obj) return storage_parallel.FileDownloadTask(source_obj, dest.path) # Local source file. if dest.is_remote: dest_obj = storage_util.ObjectReference.FromUrl(dest.path) return storage_parallel.FileUploadTask(source.path, dest_obj) # Both local, can't do this. raise LocationMismatchError( 'Cannot copy local file [{}] to local file [{}]'.format( source.path, dest.path))
def testUpload(self): self.upload_mock = self.storage_client_mock.CopyFileToGCS local = '/some/file' remote = storage_util.ObjectReference(self._TEST_BUCKET, 'remote/obj') task = storage_parallel.FileUploadTask(local, remote) storage_parallel.ExecuteTasks([task]) self.upload_mock.assert_called_once_with(local, remote)
def _MakeTestTasks(self, count): tasks = [] for n in range(count): tasks.append(storage_parallel.FileUploadTask( 'local{0}'.format(n), storage_util.ObjectReference( self._TEST_BUCKET, 'remote{0}'.format(n)))) return tasks
def _MakeFileUploadTasks(self, bucket_ref): tasks = [] name_generator = e2e_utils.GetResourceNameGenerator(prefix='storage-file') for remote_file in itertools.islice(name_generator, self.NUM_FILES): task = storage_parallel.FileUploadTask( self.file_path, storage_util.ObjectReference.FromBucketRef( bucket_ref, remote_file)) tasks.append(task) return tasks
def _UploadFilesThreads(files_to_upload, bucket_ref): """Uploads files to App Engine Cloud Storage bucket using threads. Args: files_to_upload: dict {str: str}, map of checksum to local path bucket_ref: storage_api.BucketReference, the reference to the bucket files will be placed in. Raises: MultiError: if one or more errors occurred during file upload. """ threads_per_proc = (properties.VALUES.app.num_file_upload_threads.GetInt() or storage_parallel.DEFAULT_NUM_THREADS) tasks = [] # Have to sort files because the test framework requires a known order for # mocked API calls. for sha1_hash, path in sorted(files_to_upload.iteritems()): task = storage_parallel.FileUploadTask(path, bucket_ref.ToBucketUrl(), sha1_hash) tasks.append(task) storage_parallel.UploadFiles(tasks, threads_per_process=threads_per_proc)