Example #1
0
    def Handle(self, args, token=None):
        client_id = str(args.client_id)
        path = args.file_path
        if not path:
            start_paths = ["fs/os", "fs/tsk", "registry", "temp"]
            prefix = "vfs_" + re.sub("[^0-9a-zA-Z]", "_", client_id)
        else:
            ValidateVfsPath(path)
            if path.rstrip("/") == "fs":
                start_paths = ["fs/os", "fs/tsk"]
            else:
                start_paths = [path]
            prefix = "vfs_" + re.sub("[^0-9a-zA-Z]", "_",
                                     client_id + "_" + path).strip("_")

        content_generator = self._WrapContentGenerator(
            self._GenerateContent(client_id, start_paths, args.timestamp,
                                  prefix), args, token.username)
        return api_call_handler_base.ApiBinaryStream(
            prefix + ".zip", content_generator=content_generator)
Example #2
0
    def Handle(
        self,
        args: ApiGetCollectedHuntTimelinesArgs,
        token: Optional[access_control.ACLToken] = None,
    ) -> api_call_handler_base.ApiBinaryStream:
        """Handles requests for the hunt timelines export API call."""
        hunt_id = str(args.hunt_id)

        hunt_obj = data_store.REL_DB.ReadHuntObject(hunt_id)
        if hunt_obj.args.standard.flow_name != timeline.TimelineFlow.__name__:
            message = f"Hunt '{hunt_id}' is not a timeline hunt"
            raise ValueError(message)

        if (args.format != ApiGetCollectedTimelineArgs.Format.RAW_GZCHUNKED
                and args.format != ApiGetCollectedTimelineArgs.Format.BODY):
            message = f"Incorrect timeline export format {args.format}"
            raise ValueError(message)

        filename = f"timelines_{hunt_id}.zip"
        content = self._Generate(hunt_id, args.format)
        return api_call_handler_base.ApiBinaryStream(filename, content)
Example #3
0
  def Handle(self, args, context=None):
    collection, description = self._LoadData(args, context=context)
    target_file_prefix = "hunt_" + str(args.hunt_id).replace(":", "_")

    if args.archive_format == args.ArchiveFormat.ZIP:
      archive_format = archive_generator.CollectionArchiveGenerator.ZIP
      file_extension = ".zip"
    elif args.archive_format == args.ArchiveFormat.TAR_GZ:
      archive_format = archive_generator.CollectionArchiveGenerator.TAR_GZ
      file_extension = ".tar.gz"
    else:
      raise ValueError("Unknown archive format: %s" % args.archive_format)

    generator = archive_generator.CollectionArchiveGenerator(
        prefix=target_file_prefix,
        description=description,
        archive_format=archive_format)
    content_generator = self._WrapContentGenerator(
        generator, collection, args, context=context)
    return api_call_handler_base.ApiBinaryStream(
        target_file_prefix + file_extension,
        content_generator=content_generator)
Example #4
0
    def Handle(self, args, token=None):
        iop_cls = instant_output_plugin.InstantOutputPlugin
        plugin_cls = iop_cls.GetPluginClassByPluginName(args.plugin_name)

        # TODO(user): Instant output plugins shouldn't depend on tokens
        # and URNs.
        flow_urn = rdfvalue.RDFURN("{}/flows/{}".format(
            args.client_id, args.flow_id))
        plugin = plugin_cls(source_urn=flow_urn, token=token)

        client_id = str(args.client_id)
        flow_id = str(args.flow_id)
        types = data_store.REL_DB.CountFlowResultsByType(client_id, flow_id)

        def FetchFn(type_name):
            """Fetches all flow results of a given type."""
            offset = 0
            while True:
                results = data_store.REL_DB.ReadFlowResults(
                    client_id,
                    flow_id,
                    offset=offset,
                    count=self._RESULTS_PAGE_SIZE,
                    with_type=type_name)
                if not results:
                    break

                for r in results:
                    msg = r.AsLegacyGrrMessage()
                    msg.source = client_id
                    yield msg

                offset += self._RESULTS_PAGE_SIZE

        content_generator = instant_output_plugin.ApplyPluginToTypedCollection(
            plugin, types, FetchFn)

        return api_call_handler_base.ApiBinaryStream(
            plugin.output_file_name, content_generator=content_generator)
Example #5
0
File: hunt.py Project: hfakar/grr
    def Handle(self, args, token=None):
        iop_cls = instant_output_plugin.InstantOutputPlugin
        plugin_cls = iop_cls.GetPluginClassByPluginName(args.plugin_name)

        hunt_urn = args.hunt_id.ToURN()
        try:
            aff4.FACTORY.Open(hunt_urn,
                              aff4_type=implementation.GRRHunt,
                              mode="rw",
                              token=token)
        except aff4.InstantiationError:
            raise HuntNotFoundError("Hunt with id %s could not be found" %
                                    args.hunt_id)

        output_collection = implementation.GRRHunt.TypedResultCollectionForHID(
            hunt_urn)

        plugin = plugin_cls(source_urn=hunt_urn, token=token)
        return api_call_handler_base.ApiBinaryStream(
            plugin.output_file_name,
            content_generator=instant_output_plugin.
            ApplyPluginToMultiTypeCollection(plugin, output_collection))
Example #6
0
File: vfs.py Project: hfakar/grr
  def Handle(self, args, token=None):
    ValidateVfsPath(args.file_path)

    if args.timestamp:
      age = args.timestamp
    else:
      age = aff4.NEWEST_TIME

    try:
      file_obj = aff4.FACTORY.Open(
          args.client_id.ToClientURN().Add(args.file_path),
          aff4_type=aff4.AFF4Stream,
          mode="r",
          age=age,
          token=token)

      file_content_missing = not file_obj.GetContentAge()
    except aff4.InstantiationError:
      file_content_missing = True

    if file_content_missing:
      raise FileContentNotFoundError(
          "File %s with timestamp %s wasn't found on client %s" %
          (utils.SmartStr(args.file_path), utils.SmartStr(args.timestamp),
           utils.SmartStr(args.client_id)))

    total_size = _Aff4Size(file_obj)
    if not args.length:
      args.length = total_size - args.offset
    else:
      # Make sure args.length is in the allowed range.
      args.length = min(abs(args.length), total_size - args.offset)

    generator = self._GenerateFile(file_obj, args.offset, args.length)

    return api_call_handler_base.ApiBinaryStream(
        filename=file_obj.urn.Basename(),
        content_generator=generator,
        content_length=args.length)
Example #7
0
File: vfs.py Project: hfakar/grr
  def Handle(self, args, token=None):
    client_urn = args.client_id.ToClientURN()
    path = args.file_path
    if not path:
      start_urns = [client_urn.Add(p) for p in ROOT_FILES_WHITELIST]
      prefix = "vfs_" + re.sub("[^0-9a-zA-Z]", "_",
                               utils.SmartStr(args.client_id))
    else:
      ValidateVfsPath(args.file_path)
      start_urns = [client_urn.Add(args.file_path)]
      prefix = "vfs_" + re.sub("[^0-9a-zA-Z]", "_",
                               start_urns[0].Path()).strip("_")

    if args.timestamp:
      age = args.timestamp
    else:
      age = aff4.NEWEST_TIME

    content_generator = self._GenerateContent(
        start_urns, prefix, age=age, token=token)
    return api_call_handler_base.ApiBinaryStream(
        prefix + ".zip", content_generator=content_generator)
Example #8
0
    def Handle(
        self,
        args: ApiGetCollectedHuntTimelinesArgs,
        context: Optional[api_call_context.ApiCallContext] = None,
    ) -> api_call_handler_base.ApiBinaryStream:
        """Handles requests for the hunt timelines export API call."""
        hunt_id = str(args.hunt_id)

        hunt_obj = data_store.REL_DB.ReadHuntObject(hunt_id)
        if hunt_obj.args.standard.flow_name != timeline.TimelineFlow.__name__:
            message = f"Hunt '{hunt_id}' is not a timeline hunt"
            raise ValueError(message)

        fmt = args.format
        if (fmt != timeline_pb2.ApiGetCollectedTimelineArgs.RAW_GZCHUNKED
                and fmt != timeline_pb2.ApiGetCollectedTimelineArgs.BODY):
            message = f"Incorrect timeline export format: {fmt}"
            raise ValueError(message)

        filename = f"timelines_{hunt_id}.zip"
        content = self._GenerateArchive(args)
        return api_call_handler_base.ApiBinaryStream(filename, content)
Example #9
0
  def Handle(self, args, context=None):
    hunt_id = str(args.hunt_id)
    source_urn = rdfvalue.RDFURN("hunts").Add(hunt_id)

    iop_cls = instant_output_plugin.InstantOutputPlugin
    plugin_cls = iop_cls.GetPluginClassByPluginName(args.plugin_name)
    # TODO(user): Instant output plugins shouldn't depend on contexts
    # and URNs.
    plugin = plugin_cls(
        source_urn=source_urn,
        token=access_control.ACLToken(username=context.username))

    types = data_store.REL_DB.CountHuntResultsByType(hunt_id)

    def FetchFn(type_name):
      """Fetches all hunt results of a given type."""
      offset = 0
      while True:
        results = data_store.REL_DB.ReadHuntResults(
            hunt_id,
            offset=offset,
            count=self._RESULTS_PAGE_SIZE,
            with_type=type_name)

        if not results:
          break

        for r in results:
          msg = r.AsLegacyGrrMessage()
          msg.source_urn = source_urn
          yield msg

        offset += self._RESULTS_PAGE_SIZE

    content_generator = instant_output_plugin.ApplyPluginToTypedCollection(
        plugin, types, FetchFn)

    return api_call_handler_base.ApiBinaryStream(
        plugin.output_file_name, content_generator=content_generator)
Example #10
0
File: flow.py Project: rainser/grr
  def Handle(self, args, token=None):
    flow_urn = args.flow_id.ResolveClientFlowURN(args.client_id, token=token)
    flow_obj = aff4.FACTORY.Open(
        flow_urn, aff4_type=flow.GRRFlow, mode="r", token=token)

    flow_api_object = ApiFlow().InitFromAff4Object(
        flow_obj, flow_id=args.flow_id)
    description = (
        "Files downloaded by flow %s (%s) that ran on client %s by "
        "user %s on %s" % (flow_api_object.name, args.flow_id, args.client_id,
                           flow_api_object.creator, flow_api_object.started_at))

    target_file_prefix = "%s_flow_%s_%s" % (
        args.client_id, flow_obj.runner_args.flow_name,
        flow_urn.Basename().replace(":", "_"))

    collection = flow.GRRFlow.ResultCollectionForFID(flow_urn)

    if args.archive_format == args.ArchiveFormat.ZIP:
      archive_format = api_call_handler_utils.CollectionArchiveGenerator.ZIP
      file_extension = ".zip"
    elif args.archive_format == args.ArchiveFormat.TAR_GZ:
      archive_format = api_call_handler_utils.CollectionArchiveGenerator.TAR_GZ
      file_extension = ".tar.gz"
    else:
      raise ValueError("Unknown archive format: %s" % args.archive_format)

    generator = api_call_handler_utils.CollectionArchiveGenerator(
        prefix=target_file_prefix,
        description=description,
        archive_format=archive_format,
        predicate=self._BuildPredicate(args.client_id, token=token),
        client_id=args.client_id.ToClientURN())
    content_generator = self._WrapContentGenerator(
        generator, collection, args, token=token)
    return api_call_handler_base.ApiBinaryStream(
        target_file_prefix + file_extension,
        content_generator=content_generator)
Example #11
0
File: hunt.py Project: hfakar/grr
    def Handle(self, args, token=None):
        hunt_urn = args.hunt_id.ToURN()
        hunt = aff4.FACTORY.Open(hunt_urn,
                                 aff4_type=implementation.GRRHunt,
                                 token=token)

        hunt_api_object = ApiHunt().InitFromAff4Object(hunt)
        description = (
            "Files downloaded by hunt %s (%s, '%s') created by user %s "
            "on %s" % (hunt_api_object.name, hunt_api_object.urn.Basename(),
                       hunt_api_object.description, hunt_api_object.creator,
                       hunt_api_object.created))

        collection = implementation.GRRHunt.ResultCollectionForHID(hunt_urn)

        target_file_prefix = "hunt_" + hunt.urn.Basename().replace(":", "_")

        if args.archive_format == args.ArchiveFormat.ZIP:
            archive_format = api_call_handler_utils.CollectionArchiveGenerator.ZIP
            file_extension = ".zip"
        elif args.archive_format == args.ArchiveFormat.TAR_GZ:
            archive_format = api_call_handler_utils.CollectionArchiveGenerator.TAR_GZ
            file_extension = ".tar.gz"
        else:
            raise ValueError("Unknown archive format: %s" %
                             args.archive_format)

        generator = api_call_handler_utils.CollectionArchiveGenerator(
            prefix=target_file_prefix,
            description=description,
            archive_format=archive_format)
        content_generator = self._WrapContentGenerator(generator,
                                                       collection,
                                                       args,
                                                       token=token)
        return api_call_handler_base.ApiBinaryStream(
            target_file_prefix + file_extension,
            content_generator=content_generator)
Example #12
0
    def Handle(self, args, token=None):
        flow_api_object, flow_results = self._GetFlow(args, token)

        description = (
            "Files downloaded by flow %s (%s) that ran on client %s by "
            "user %s on %s" %
            (flow_api_object.name, args.flow_id, args.client_id,
             flow_api_object.creator, flow_api_object.started_at))

        target_file_prefix = "%s_flow_%s_%s" % (
            args.client_id, flow_api_object.name, str(
                flow_api_object.flow_id).replace(":", "_"))

        if args.archive_format == args.ArchiveFormat.ZIP:
            archive_format = archive_generator.CollectionArchiveGenerator.ZIP
            file_extension = ".zip"
        elif args.archive_format == args.ArchiveFormat.TAR_GZ:
            archive_format = archive_generator.CollectionArchiveGenerator.TAR_GZ
            file_extension = ".tar.gz"
        else:
            raise ValueError("Unknown archive format: %s" %
                             args.archive_format)

        generator = archive_generator.CollectionArchiveGenerator(
            prefix=target_file_prefix,
            description=description,
            archive_format=archive_format,
            predicate=self._BuildPredicate(unicode(args.client_id),
                                           token=token),
            client_id=args.client_id.ToString())
        content_generator = self._WrapContentGenerator(generator,
                                                       flow_results,
                                                       args,
                                                       token=token)
        return api_call_handler_base.ApiBinaryStream(
            target_file_prefix + file_extension,
            content_generator=content_generator)
Example #13
0
 def Handle(self, unused_args, context=None):
     return api_call_handler_base.ApiBinaryStream(
         "test.ext",
         content_generator=self._Generate(),
         content_length=1337)
Example #14
0
File: hunt.py Project: hfakar/grr
    def Handle(self, args, token=None):
        if not args.hunt_id:
            raise ValueError("hunt_id can't be None")

        if not args.client_id:
            raise ValueError("client_id can't be None")

        if not args.vfs_path:
            raise ValueError("vfs_path can't be None")

        if not args.timestamp:
            raise ValueError("timestamp can't be None")

        api_vfs.ValidateVfsPath(args.vfs_path)

        results = implementation.GRRHunt.ResultCollectionForHID(
            args.hunt_id.ToURN())

        expected_aff4_path = args.client_id.ToClientURN().Add(args.vfs_path)
        # TODO(user): should after_timestamp be strictly less than the desired
        # timestamp.
        timestamp = rdfvalue.RDFDatetime(int(args.timestamp) - 1)

        # If the entry corresponding to a given path is not found within
        # MAX_RECORDS_TO_CHECK from a given timestamp, we report a 404.
        for _, item in results.Scan(
                after_timestamp=timestamp.AsMicrosecondsSinceEpoch(),
                max_records=self.MAX_RECORDS_TO_CHECK):
            try:
                # Do not pass the client id we got from the caller. This will
                # get filled automatically from the hunt results and we check
                # later that the aff4_path we get is the same as the one that
                # was requested.
                aff4_path = export.CollectionItemToAff4Path(item,
                                                            client_id=None)
            except export.ItemNotExportableError:
                continue

            if aff4_path != expected_aff4_path:
                continue

            try:
                aff4_stream = aff4.FACTORY.Open(aff4_path,
                                                aff4_type=aff4.AFF4Stream,
                                                token=token)
                if not aff4_stream.GetContentAge():
                    break

                return api_call_handler_base.ApiBinaryStream(
                    "%s_%s" %
                    (args.client_id, utils.SmartStr(aff4_path.Basename())),
                    content_generator=self._GenerateFile(aff4_stream),
                    content_length=len(aff4_stream))
            except aff4.InstantiationError:
                break

        raise HuntFileNotFoundError(
            "File %s with timestamp %s and client %s "
            "wasn't found among the results of hunt %s" %
            (utils.SmartStr(args.vfs_path), utils.SmartStr(args.timestamp),
             utils.SmartStr(args.client_id), utils.SmartStr(args.hunt_id)))
Example #15
0
 def _HandleDefaultFormat(self, args):
     items = _GetTimelineItems(args.client_id, args.file_path)
     return api_call_handler_base.ApiBinaryStream(
         "%s_%s_timeline" %
         (args.client_id, os.path.basename(args.file_path)),
         content_generator=self._GenerateDefaultExport(items))
Example #16
0
File: vfs.py Project: hfakar/grr
 def _HandleBodyFormat(self, args):
   file_infos = _GetTimelineStatEntries(
       args.client_id, args.file_path, with_history=False)
   return api_call_handler_base.ApiBinaryStream(
       "%s_%s_timeline" % (args.client_id, os.path.basename(args.file_path)),
       content_generator=self._GenerateBodyExport(file_infos))