Ejemplo n.º 1
0
    def RekallAction(self, request):
        self.rekall_request = request

        # Pretend Rekall returned the memory file.
        return [
            rdf_rekall_types.RekallResponse(json_messages="""
        [["file",{"path": "%s", "pathtype": "TMPFILE"}]]
        """ % self.memory_file,
                                            plugin="aff4acquire"),
            rdf_client.Iterator(state="FINISHED")
        ]
Ejemplo n.º 2
0
  def RekallAction(self, _):
    # Generate this file with:
    # rekal --output data -f win7_trial_64bit.raw \
    # pslist | gzip - > rekall_pslist_result.dat.gz
    ps_list_file = os.path.join(config.CONFIG["Test.data_dir"],
                                self.result_filename)
    result = rdf_rekall_types.RekallResponse(
        json_messages=gzip.open(ps_list_file).read(),
        plugin="pslist",
        client_urn=self.client_id)

    return [result, rdf_client.Iterator(state="FINISHED")]
Ejemplo n.º 3
0
Archivo: memory.py Proyecto: qsdj/grr
    def DeleteFiles(self, responses):
        # Check that the MultiGetFile flow worked.
        if not responses.success:
            raise flow.FlowError("Could not get files: %s" % responses.status)

        for output_file in self.state.output_files:
            self.CallClient(server_stubs.DeleteGRRTempFiles,
                            output_file,
                            next_state="LogDeleteFiles")

        # Let calling flows know where files ended up in AFF4 space.
        self.SendReply(
            rdf_rekall_types.RekallResponse(downloaded_files=[
                x.AFF4Path(self.client_id) for x in responses
            ]))
Ejemplo n.º 4
0
  def testBasicParsing(self):
    ps_list_file = os.path.join(config.CONFIG["Test.data_dir"],
                                "rekall_vad_result.dat.gz")

    result = rdf_rekall_types.RekallResponse(
        json_messages=gzip.open(ps_list_file, "rb").read(), plugin="pslist")

    knowledge_base = rdf_client.KnowledgeBase()
    knowledge_base.environ_systemdrive = "C:"

    parser = rekall_artifact_parser.RekallVADParser()
    parsed_pathspecs = list(parser.Parse(result, knowledge_base))

    paths = [p.path for p in parsed_pathspecs]
    self.assertIn(u"C:\\Windows\\System32\\spoolsv.exe", paths)
Ejemplo n.º 5
0
    def write_data_stream(self):
        """Prepares a RekallResponse and send to the server."""
        if self.data:

            response_msg = rdf_rekall_types.RekallResponse(
                json_messages=self.robust_encoder.encode(self.data),
                json_context_messages=self.robust_encoder.encode(
                    self.context_messages.items()),
                plugin=self.plugin)

            self.context_messages = self.new_context_messages
            self.new_context_messages = {}

            # Queue the response to the server.
            self.action.SendReply(response_msg)
Ejemplo n.º 6
0
  def RekallAction(self, _):
    ps_list_file = os.path.join(config.CONFIG["Test.data_dir"],
                                "rekall_vad_result.dat.gz")
    response = rdf_rekall_types.RekallResponse(
        json_messages=gzip.open(ps_list_file, "rb").read(), plugin="pslist")

    # If we are given process names here we need to craft a Rekall result
    # containing them. This is so they point to valid files in the fixture.
    if self.process_list:
      json_data = json.loads(response.json_messages)
      template = json_data[7]
      if template[1]["filename"] != ur"\Windows\System32\ntdll.dll":
        raise RuntimeError("Test data invalid.")

      json_data = []
      for process in self.process_list:
        new_entry = copy.deepcopy(template)
        new_entry[1]["filename"] = process
        json_data.append(new_entry)
      response.json_messages = json.dumps(json_data)

    return [response, rdf_client.Iterator(state="FINISHED")]