def testCopy(self): self.copy_mock = self.storage_client_mock.Copy remote1 = storage_util.ObjectReference(self._TEST_BUCKET, 'remote/obj1') remote2 = storage_util.ObjectReference(self._TEST_BUCKET, 'remote/obj2') task = storage_parallel.FileRemoteCopyTask(remote1, remote2) storage_parallel.ExecuteTasks([task]) self.copy_mock.assert_called_once_with(remote1, remote2)
def testDownload(self): self.download_mock = self.storage_client_mock.CopyFileFromGCS local = '/some/file' remote = storage_util.ObjectReference(self._TEST_BUCKET, 'remote/obj') task = storage_parallel.FileDownloadTask(remote, local) storage_parallel.ExecuteTasks([task]) self.download_mock.assert_called_once_with(remote, local)
def testUpload(self): self.upload_mock = self.storage_client_mock.CopyFileToGCS local = '/some/file' remote = storage_util.ObjectReference(self._TEST_BUCKET, 'remote/obj') task = storage_parallel.FileUploadTask(local, remote) storage_parallel.ExecuteTasks([task]) self.upload_mock.assert_called_once_with(local, remote)
def Run(self, args): paths = args.path or ['gs://'] expander = expansion.GCSPathExpander() objects, dirs = expander.ExpandPaths(paths) if dirs and not args.recursive: raise exceptions.RequiredArgumentException( '--recursive', 'Source path matches directories but --recursive was not specified.' ) buckets = [] dir_paths = [] for d in dirs: obj_ref = storage_util.ObjectReference.FromUrl( d, allow_empty_object=True) if not obj_ref.name: buckets.append(obj_ref.bucket_ref) dir_paths.append(d + '**') sub_objects, _ = expander.ExpandPaths(dir_paths) objects.update(sub_objects) tasks = [] for o in sorted(objects): tasks.append( storage_parallel.ObjectDeleteTask( storage_util.ObjectReference.FromUrl(o))) if buckets: # Extra warnings and confirmation if any buckets will be deleted. log.warning( 'Deleting a bucket is irreversible and makes that bucket ' 'name available for others to claim.') message = 'This command will delete the following buckets:\n ' message += '\n '.join([b.bucket for b in buckets]) console_io.PromptContinue(message=message, throw_if_unattended=True, cancel_on_no=True) # TODO(b/120033753): Handle long lists of items. message = 'You are about to delete the following:' message += ''.join(['\n ' + b.ToUrl() for b in buckets]) message += ''.join(['\n ' + t.obj_ref.ToUrl() for t in tasks]) console_io.PromptContinue(message=message, throw_if_unattended=True, cancel_on_no=True) storage_parallel.ExecuteTasks(tasks, num_threads=args.num_threads, progress_bar_label='Deleting Files') log.status.write('Deleted [{}] file{}.\n'.format( len(tasks), 's' if len(tasks) > 1 else '')) storage_client = storage_api.StorageClient() for b in buckets: storage_client.DeleteBucket(b) log.DeletedResource(b.ToUrl(), kind='bucket')
def Run(self, args): sources = [paths.Path(p) for p in args.source] dest = paths.Path(args.destination) copier = copying.CopyTaskGenerator() tasks = copier.GetCopyTasks(sources, dest, recursive=args.recursive) storage_parallel.ExecuteTasks(tasks, num_threads=args.num_threads, progress_bar_label='Copying Files') log.status.write('Copied [{}] file{}.\n'.format( len(tasks), 's' if len(tasks) > 1 else ''))
def testDelete(self): self.delete_mock = self.storage_client_mock.DeleteObject remote = storage_util.ObjectReference(self._TEST_BUCKET, 'remote/obj') task = storage_parallel.ObjectDeleteTask(remote) storage_parallel.ExecuteTasks([task]) self.delete_mock.assert_called_once_with(remote)