Example #1
0
    def testDownloadsSingleHuntFileIfAuthorizationIsPresent(self):
        hunt_urn = self._CreateHuntWithDownloadedFile()
        hunt_id = hunt_urn.Basename()
        results = self.GetHuntResults(hunt_urn)

        self.RequestAndGrantHuntApproval(hunt_id)

        self.Open("/")
        self.Click("css=a[grrtarget=hunts]")
        self.Click("css=td:contains('%s')" % hunt_id)
        self.Click("css=li[heading=Results]")

        if data_store.RelationalDBEnabled():
            fd = file_store.OpenFile(
                flow_export.CollectionItemToClientPath(results[0]))
        else:
            fd = aff4.FACTORY.Open(flow_export.CollectionItemToAff4Path(
                results[0]),
                                   token=self.token)

        with mock.patch.object(fd.__class__, "Read") as mock_obj:
            self.Click(
                "css=grr-results-collection button:has(span.glyphicon-download)"
            )
            self.WaitUntil(lambda: mock_obj.called)
Example #2
0
  def Handle(self, args, context=None):
    if not args.hunt_id:
      raise ValueError("ApiGetHuntFileArgs.hunt_id can't be unset")

    if not args.client_id:
      raise ValueError("ApiGetHuntFileArgs.client_id can't be unset")

    if not args.vfs_path:
      raise ValueError("ApiGetHuntFileArgs.vfs_path can't be unset")

    if not args.timestamp:
      raise ValueError("ApiGetHuntFileArgs.timestamp can't be unset")

    api_vfs.ValidateVfsPath(args.vfs_path)

    path_type, components = rdf_objects.ParseCategorizedPath(args.vfs_path)
    expected_client_path = db.ClientPath(
        str(args.client_id), path_type, components)

    results = data_store.REL_DB.ReadHuntResults(
        str(args.hunt_id),
        offset=0,
        count=self.MAX_RECORDS_TO_CHECK,
        with_timestamp=args.timestamp)
    for item in results:
      try:
        # Do not pass the client id we got from the caller. This will
        # get filled automatically from the hunt results and we check
        # later that the aff4_path we get is the same as the one that
        # was requested.
        client_path = export.CollectionItemToClientPath(item, client_id=None)
      except export.ItemNotExportableError:
        continue

      if client_path != expected_client_path:
        continue

      try:
        # TODO(user): this effectively downloads the latest version of
        # the file and always disregards the timestamp. Reconsider this logic
        # after AFF4 implementation is gone. We also most likely don't need
        # the MAX_RECORDS_TO_CHECK logic in the new implementation.
        file_obj = file_store.OpenFile(client_path)
        return api_call_handler_base.ApiBinaryStream(
            "%s_%s" % (args.client_id, os.path.basename(file_obj.Path())),
            content_generator=self._GenerateFile(file_obj),
            content_length=file_obj.size)
      except (file_store.FileHasNoContentError, file_store.FileNotFoundError):
        break

    raise HuntFileNotFoundError(
        "File %s with timestamp %s and client %s "
        "wasn't found among the results of hunt %s" %
        (args.vfs_path, args.timestamp, args.client_id, args.hunt_id))
Example #3
0
    def Generate(self, items, token=None):
        """Generates archive from a given collection.

    Iterates the collection and generates an archive by yielding contents
    of every referenced AFF4Stream.

    Args:
      items: Iterable of rdf_client_fs.StatEntry objects
      token: User's ACLToken.

    Yields:
      Binary chunks comprising the generated archive.
    """

        del token  # unused, to be removed with AFF4 code

        client_ids = set()
        for item_batch in collection.Batch(items, self.BATCH_SIZE):

            client_paths = set()
            for item in item_batch:
                try:
                    client_path = flow_export.CollectionItemToClientPath(
                        item, self.client_id)
                except flow_export.ItemNotExportableError:
                    continue

                if not self.predicate(client_path):
                    self.ignored_files.add(client_path)
                    self.processed_files.add(client_path)
                    continue

                client_ids.add(client_path.client_id)
                client_paths.add(client_path)

            for chunk in file_store.StreamFilesChunks(client_paths):
                self.processed_files.add(chunk.client_path)
                for output in self._WriteFileChunk(chunk=chunk):
                    yield output

            self.processed_files |= client_paths - (self.ignored_files
                                                    | self.archived_files)

        if client_ids:
            for client_id, client_info in iteritems(
                    data_store.REL_DB.MultiReadClientFullInfo(client_ids)):
                client = api_client.ApiClient().InitFromClientInfo(client_info)
                for chunk in self._GenerateClientInfo(client_id, client):
                    yield chunk

        for chunk in self._GenerateDescription():
            yield chunk

        yield self.archive_generator.Close()
Example #4
0
    def Generate(self, items, token=None):
        """Generates archive from a given collection.

    Iterates the collection and generates an archive by yielding contents
    of every referenced AFF4Stream.

    Args:
      items: Iterable of rdf_client_fs.StatEntry objects
      token: User's ACLToken.

    Yields:
      Binary chunks comprising the generated archive.
    """

        del token  # unused, to be removed with AFF4 code

        client_ids = set()
        for item_batch in collection.Batch(items, self.BATCH_SIZE):

            fds_to_write = {}
            for item in item_batch:
                try:
                    urn = flow_export.CollectionItemToAff4Path(
                        item, self.client_id)
                    client_path = flow_export.CollectionItemToClientPath(
                        item, self.client_id)
                except flow_export.ItemNotExportableError:
                    continue

                fd = file_store.OpenFile(client_path)
                self.total_files += 1

                if not self.predicate(client_path):
                    self.ignored_files.append(utils.SmartUnicode(urn))
                    continue

                client_ids.add(client_path.client_id)

                # content_path = os.path.join(self.prefix, *urn_components)
                self.archived_files += 1

                # Make sure size of the original file is passed. It's required
                # when output_writer is StreamingTarWriter.
                st = os.stat_result((0o644, 0, 0, 0, 0, 0, fd.size, 0, 0, 0))
                fds_to_write[fd] = (client_path, urn, st)

            if fds_to_write:
                for fd, (client_path, urn, st) in iteritems(fds_to_write):
                    try:
                        for i, chunk in enumerate(
                                file_store.StreamFilesChunks([client_path])):
                            if i == 0:
                                target_path = os.path.join(
                                    self.prefix,
                                    urn.Path()[1:])
                                yield self.archive_generator.WriteFileHeader(
                                    target_path, st=st)

                            yield self.archive_generator.WriteFileChunk(
                                chunk.data)

                        yield self.archive_generator.WriteFileFooter()
                    except Exception as exception:  # pylint: disable=broad-except
                        logging.exception(exception)

                        self.archived_files -= 1
                        self.failed_files.append(unicode(urn))

                if self.archive_generator.is_file_write_in_progress:
                    yield self.archive_generator.WriteFileFooter()

        if client_ids:
            for client_id, client_info in iteritems(
                    data_store.REL_DB.MultiReadClientFullInfo(client_ids)):
                client = api_client.ApiClient().InitFromClientInfo(client_info)
                for chunk in self._GenerateClientInfo(client_id, client):
                    yield chunk

        for chunk in self._GenerateDescription():
            yield chunk

        yield self.archive_generator.Close()