Esempio n. 1
0
 def RekallAction(self, rekall_request):
   if rekall_request.device.path != "/proc/kcore":
     return [rdf_flows.GrrStatus(
         status=rdf_flows.GrrStatus.ReturnedStatus.GENERIC_ERROR,
         error_message="Should use kcore device when present.")]
   response = rdf_rekall_types.RekallResponse(json_messages="{}")
   return [response, rdf_client.Iterator(state="FINISHED")]
Esempio n. 2
0
    def LoadProfile(self, name, **kw):
        """Wraps the Rekall profile's LoadProfile to fetch profiles from GRR."""
        profile = None

        # If the user specified a special profile path we use their choice.
        try:
            profile = super(GrrRekallSession, self).LoadProfile(name, **kw)
        except io_manager.IOManagerError as e:
            # Currently, Rekall will raise when the repository directory is not
            # created. This is fine, because we'll create the directory after
            # WriteRekallProfile runs a few lines later.
            self.logging.warning(e)

        if profile:
            return profile

        # Cant load the profile, we need to ask the server for it.
        self.logging.info("Asking server for profile %s", name)
        self.action.SendReply(
            rekall_types.RekallResponse(
                missing_profile=name,
                repository_version=constants.PROFILE_REPOSITORY_VERSION,
            ))

        # Wait for the server to wake us up. When we wake up the server should
        # have sent the profile over by calling the WriteRekallProfile.
        self.action.Suspend()

        # Now the server should have sent the data already. We try to load the
        # profile one more time.
        return super(GrrRekallSession, self).LoadProfile(name, use_cache=False)
Esempio n. 3
0
    def RekallAction(self, request):
        self.rekall_request = request

        # Pretend Rekall returned the memory file.
        return [
            rdf_rekall_types.RekallResponse(json_messages="""
        [["file",{"path": "%s", "pathtype": "TMPFILE"}]]
        """ % self.memory_file,
                                            plugin="aff4acquire"),
            rdf_client.Iterator(state="FINISHED")
        ]
Esempio n. 4
0
    def RekallAction(self, _):
        # Generate this file with:
        # rekall -r data -f win7_trial_64bit.raw pslist > rekall_pslist_result.dat
        ps_list_file = os.path.join(config_lib.CONFIG["Test.data_dir"],
                                    self.result_filename)
        result = rdf_rekall_types.RekallResponse(
            json_messages=open(ps_list_file).read(10000000),
            plugin="pslist",
            client_urn=self.client_id)

        return [result, rdf_client.Iterator(state="FINISHED")]
Esempio n. 5
0
    def DeleteFiles(self, responses):
        # Check that the MultiGetFile flow worked.
        if not responses.success:
            raise flow.FlowError("Could not get files: %s" % responses.status)

        for output_file in self.state.output_files:
            self.CallClient(server_stubs.DeleteGRRTempFiles,
                            output_file,
                            next_state="LogDeleteFiles")

        # Let calling flows know where files ended up in AFF4 space.
        self.SendReply(
            rekall_types.RekallResponse(downloaded_files=[
                x.AFF4Path(self.client_id) for x in responses
            ]))
Esempio n. 6
0
    def write_data_stream(self):
        """Prepares a RekallResponse and send to the server."""
        if self.data:

            response_msg = rekall_types.RekallResponse(
                json_messages=self.robust_encoder.encode(self.data),
                json_context_messages=self.robust_encoder.encode(
                    self.context_messages.items()),
                plugin=self.plugin)

            self.context_messages = self.new_context_messages
            self.new_context_messages = {}

            # Queue the response to the server.
            self.action.SendReply(response_msg)
Esempio n. 7
0
    def testBasicParsing(self):
        ps_list_file = os.path.join(config.CONFIG["Test.data_dir"],
                                    "rekall_vad_result.dat.gz")

        result = rdf_rekall_types.RekallResponse(json_messages=gzip.open(
            ps_list_file, "rb").read(),
                                                 plugin="pslist")

        knowledge_base = rdf_client.KnowledgeBase()
        knowledge_base.environ_systemdrive = "C:"

        parser = rekall_artifact_parser.RekallVADParser()
        parsed_pathspecs = list(parser.Parse(result, knowledge_base))

        paths = [p.path for p in parsed_pathspecs]
        self.assertIn(u"C:\\Windows\\System32\\spoolsv.exe", paths)
Esempio n. 8
0
    def LoadProfile(self, name):
        """Wraps the Rekall profile's LoadProfile to fetch profiles from GRR."""
        # If the user specified a special profile path we use their choice.
        profile = super(GrrRekallSession, self).LoadProfile(name)
        if profile:
            return profile

        # Cant load the profile, we need to ask the server for it.
        logging.debug("Asking server for profile %s", name)
        self.action.SendReply(
            rekall_types.RekallResponse(
                missing_profile=name,
                repository_version=constants.PROFILE_REPOSITORY_VERSION,
            ))

        # Wait for the server to wake us up. When we wake up the server should
        # have sent the profile over by calling the WriteRekallProfile.
        self.action.Suspend()

        # Now the server should have sent the data already. We try to load the
        # profile one more time.
        return super(GrrRekallSession, self).LoadProfile(name, use_cache=False)
Esempio n. 9
0
  def RekallAction(self, _):
    ps_list_file = os.path.join(config.CONFIG["Test.data_dir"],
                                "rekall_vad_result.dat.gz")
    response = rdf_rekall_types.RekallResponse(
        json_messages=gzip.open(ps_list_file, "rb").read(), plugin="pslist")

    # If we are given process names here we need to craft a Rekall result
    # containing them. This is so they point to valid files in the fixture.
    if self.process_list:
      json_data = json.loads(response.json_messages)
      template = json_data[7]
      if template[1]["filename"] != ur"\Windows\System32\ntdll.dll":
        raise RuntimeError("Test data invalid.")

      json_data = []
      for process in self.process_list:
        new_entry = copy.deepcopy(template)
        new_entry[1]["filename"] = process
        json_data.append(new_entry)
      response.json_messages = json.dumps(json_data)

    return [response, rdf_client.Iterator(state="FINISHED")]