def SetUp(self):
        self.project = 'fake-project'
        properties.VALUES.core.project.Set(self.project)
        self.client = mock.Client(
            client_class=apis.GetClientClass('storage', 'v1'))
        self.client.Mock()
        self.addCleanup(self.client.Unmock)

        self.messages = apis.GetMessagesModule('storage', 'v1')
        self.expander = expansion.GCSPathExpander()

        self.buckets_response = self.messages.Buckets(items=[
            self.messages.Bucket(name='bucket1'),
            self.messages.Bucket(name='bucket2'),
        ])
        self.bucket1_resp = self.messages.Objects(items=[
            self.messages.Object(name='dir1/sub1/a.txt'),
            self.messages.Object(name='dir1/sub1/aab.txt'),
            self.messages.Object(name='dir1/sub2/aaaa.txt'),
            self.messages.Object(name='dir1/sub2/c.txt'),
            self.messages.Object(name='dir2/sub1/aaaaaa.txt'),
            self.messages.Object(name='dir2/sub1/d.txt'),
            self.messages.Object(name='dir2/sub2/aaaaaaaa.txt'),
            self.messages.Object(name='dir2/sub2/e.txt'),
            self.messages.Object(name='dir3/deeper/sub1/a.txt'),
            self.messages.Object(name='dir3/deeper/sub2/b.txt'),
        ])
    def Run(self, args):
        paths = args.path or ['gs://']
        expander = expansion.GCSPathExpander()
        objects, dirs = expander.ExpandPaths(paths)
        if dirs and not args.recursive:
            raise exceptions.RequiredArgumentException(
                '--recursive',
                'Source path matches directories but --recursive was not specified.'
            )

        buckets = []
        dir_paths = []
        for d in dirs:
            obj_ref = storage_util.ObjectReference.FromUrl(
                d, allow_empty_object=True)
            if not obj_ref.name:
                buckets.append(obj_ref.bucket_ref)
            dir_paths.append(d + '**')
        sub_objects, _ = expander.ExpandPaths(dir_paths)
        objects.update(sub_objects)

        tasks = []
        for o in sorted(objects):
            tasks.append(
                storage_parallel.ObjectDeleteTask(
                    storage_util.ObjectReference.FromUrl(o)))

        if buckets:
            # Extra warnings and confirmation if any buckets will be deleted.
            log.warning(
                'Deleting a bucket is irreversible and makes that bucket '
                'name available for others to claim.')
            message = 'This command will delete the following buckets:\n  '
            message += '\n  '.join([b.bucket for b in buckets])
            console_io.PromptContinue(message=message,
                                      throw_if_unattended=True,
                                      cancel_on_no=True)

        # TODO(b/120033753): Handle long lists of items.
        message = 'You are about to delete the following:'
        message += ''.join(['\n  ' + b.ToUrl() for b in buckets])
        message += ''.join(['\n  ' + t.obj_ref.ToUrl() for t in tasks])
        console_io.PromptContinue(message=message,
                                  throw_if_unattended=True,
                                  cancel_on_no=True)

        storage_parallel.ExecuteTasks(tasks,
                                      num_threads=args.num_threads,
                                      progress_bar_label='Deleting Files')
        log.status.write('Deleted [{}] file{}.\n'.format(
            len(tasks), 's' if len(tasks) > 1 else ''))

        storage_client = storage_api.StorageClient()
        for b in buckets:
            storage_client.DeleteBucket(b)
            log.DeletedResource(b.ToUrl(), kind='bucket')
Exemple #3
0
  def Run(self, args):
    paths = args.path or ['gs://']
    expander = expansion.GCSPathExpander()
    objects, dirs = expander.ExpandPaths(paths)

    if args.IsSpecified('flatten_results'):
      # Respect the user's choice if given explicitly.
      flatten = args.flatten_results
    else:
      # Get a default for this mode if not specifically provided.
      # Simplest case where we are listing only files or a single directory,
      # don't nest output in tables by directory.
      flatten = bool(not args.recursive and
                     not (objects and dirs) and
                     len(dirs) < 2)

    # First collect all the directly matching objects.
    results = []
    if objects:
      results.append(
          {'dir': '',
           'objects': expander.GetSortedObjectDetails(objects)})

    # For each matching directory, get the objects directly under it.
    dirs_to_process = queue.Queue()
    for d in sorted(dirs):
      dirs_to_process.put(d)
    while not dirs_to_process.empty():
      d = dirs_to_process.get()
      children = [d + o for o in sorted(expander.ListDir(d))]
      details = expander.GetSortedObjectDetails(children)
      results.append({'dir': d, 'objects': details})

      if args.recursive:
        # Recurse on any directories that are found under the current parent.
        for c in children:
          if expander.IsDir(c):
            dirs_to_process.put(c + '/')

    if not flatten:
      return results
    # Flatten results.
    args.GetDisplayInfo().AddFormat(List.OBJECT_FORMAT_STRING)
    return itertools.chain.from_iterable([x['objects'] for x in results])
Exemple #4
0
 def __init__(self):
     # Create a single instance of each expander so that all expansion uses the
     # same cached data.
     self._local_expander = expansion.LocalPathExpander()
     self._gcs_expander = expansion.GCSPathExpander()