Exemple #1
0
  def _SampleTwoTypeHuntResults(self,
                                client_id=None,
                                flow_id=None,
                                hunt_id=None,
                                count_per_type=5,
                                timestamp_start=10):
    self.assertIsNotNone(client_id)
    self.assertIsNotNone(flow_id)
    self.assertIsNotNone(hunt_id)

    return [
        rdf_flow_objects.FlowResult(
            client_id=client_id,
            flow_id=flow_id,
            hunt_id=hunt_id,
            tag="tag_%d" % i,
            payload=rdf_client.ClientSummary(
                client_id=client_id,
                system_manufacturer="manufacturer_%d" % i,
                install_date=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(
                    timestamp_start + i))) for i in range(count_per_type)
    ] + [
        rdf_flow_objects.FlowResult(
            client_id=client_id,
            flow_id=flow_id,
            hunt_id=hunt_id,
            tag="tag_%d" % i,
            payload=rdf_client.ClientCrash(
                client_id=client_id,
                timestamp=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(
                    timestamp_start + i))) for i in range(count_per_type)
    ]
Exemple #2
0
    def testBodyMultipleResults(self):
        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        flow_id = "ABCDEF42"

        flow_obj = rdf_flow_objects.Flow()
        flow_obj.client_id = client_id
        flow_obj.flow_id = flow_id
        flow_obj.flow_class_name = timeline.TimelineFlow.__name__
        flow_obj.create_time = rdfvalue.RDFDatetime.Now()
        data_store.REL_DB.WriteFlowObject(flow_obj)

        entry_1 = rdf_timeline.TimelineEntry()
        entry_1.path = "/foo".encode("utf-8")

        blobs_1 = list(
            rdf_timeline.TimelineEntry.SerializeStream(iter([entry_1])))
        (blob_id_1, ) = data_store.BLOBS.WriteBlobsWithUnknownHashes(blobs_1)

        result_1 = rdf_timeline.TimelineResult()
        result_1.entry_batch_blob_ids = [blob_id_1.AsBytes()]

        entry_2 = rdf_timeline.TimelineEntry()
        entry_2.path = "/bar".encode("utf-8")

        blobs_2 = list(
            rdf_timeline.TimelineEntry.SerializeStream(iter([entry_2])))
        (blob_id_2, ) = data_store.BLOBS.WriteBlobsWithUnknownHashes(blobs_2)

        result_2 = rdf_timeline.TimelineResult()
        result_2.entry_batch_blob_ids = [blob_id_2.AsBytes()]

        flow_result_1 = rdf_flow_objects.FlowResult()
        flow_result_1.client_id = client_id
        flow_result_1.flow_id = flow_id
        flow_result_1.payload = result_1

        flow_result_2 = rdf_flow_objects.FlowResult()
        flow_result_2.client_id = client_id
        flow_result_2.flow_id = flow_id
        flow_result_2.payload = result_2

        data_store.REL_DB.WriteFlowResults([flow_result_1, flow_result_2])

        args = api_timeline.ApiGetCollectedTimelineArgs()
        args.client_id = client_id
        args.flow_id = flow_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY

        result = self.handler.Handle(args)
        content = b"".join(result.GenerateContent()).decode("utf-8")

        self.assertIn("|/foo|", content)
        self.assertIn("|/bar|", content)
Exemple #3
0
  def AddResultsToHunt(self, hunt_id, client_id, values):
    if isinstance(client_id, rdfvalue.RDFURN):
      client_id = client_id.Basename()

    if isinstance(hunt_id, rdfvalue.RDFURN):
      hunt_id = hunt_id.Basename()

    if data_store.RelationalDBEnabled():
      flow_id = self._EnsureClientHasHunt(client_id, hunt_id)

      for value in values:
        data_store.REL_DB.WriteFlowResults([
            rdf_flow_objects.FlowResult(
                client_id=client_id,
                flow_id=flow_id,
                hunt_id=hunt_id,
                payload=value)
        ])
    else:
      collection = aff4.FACTORY.Open(
          rdfvalue.RDFURN("hunts").Add(hunt_id),
          token=self.token).ResultCollection()
      with data_store.DB.GetMutationPool() as pool:
        for value in values:
          collection.Add(
              rdf_flows.GrrMessage(payload=value, source=client_id),
              mutation_pool=pool)
Exemple #4
0
  def SendReply(self, response, tag=None):
    """Allows this flow to send a message to its parent flow.

    If this flow does not have a parent, the message is ignored.

    Args:
      response: An RDFValue() instance to be sent to the parent.
      tag: If specified, tag the result with this tag.

    Raises:
      ValueError: If responses is not of the correct type.
    """
    if not isinstance(response, rdfvalue.RDFValue):
      raise ValueError("SendReply can only send RDFValues")

    if self.rdf_flow.parent_flow_id:
      response = rdf_flow_objects.FlowResponse(
          client_id=self.rdf_flow.client_id,
          request_id=self.rdf_flow.parent_request_id,
          response_id=self.GetNextResponseId(),
          payload=response,
          flow_id=self.rdf_flow.parent_flow_id,
          tag=tag)

      self.flow_responses.append(response)
    else:
      reply = rdf_flow_objects.FlowResult(payload=response, tag=tag)
      self.replies_to_write.append(reply)
      self.replies_to_process.append(reply)

    self.rdf_flow.num_replies_sent += 1
Exemple #5
0
    def testLaunchBinaryFlowResultsHaveReadableStdOutAndStdErr(self):
        flow_id = flow_test_lib.StartFlow(gui_test_lib.RecursiveTestFlow,
                                          client_id=self.client_id)

        stderr = "Oh, ok, this is just a string 昨"
        stdout = "\00\00\00\00"
        response = rdf_client_action.ExecuteResponse(
            stderr=stderr.encode("utf-8"), stdout=stdout.encode("utf-8"))

        if data_store.RelationalDBEnabled():
            data_store.REL_DB.WriteFlowResults([
                rdf_flow_objects.FlowResult(client_id=self.client_id,
                                            flow_id=flow_id,
                                            payload=response)
            ])
        else:
            with data_store.DB.GetMutationPool() as pool:
                flow.GRRFlow.ResultCollectionForFID(
                    rdfvalue.RDFURN(
                        self.client_id).Add("flows").Add(flow_id)).Add(
                            response, mutation_pool=pool)

        self.Open("/#/clients/%s/flows/%s/results" % (self.client_id, flow_id))
        # jQuery treats the backslash ('\') character as a special one, hence we
        # have to escape it twice: once for Javascript itself and second time
        # for jQuery.
        self.WaitUntil(
            self.IsElementPresent, r"css=grr-flow-inspector:contains('Oh, ok, "
            r"this is just a string \\\\xe6\\\\x98\\\\xa8')")
        self.WaitUntil(
            self.IsElementPresent,
            r"css=grr-flow-inspector:contains('\\\\x00\\\\x00\\\\x00\\\\x00')")
Exemple #6
0
  def ReadHuntResults(self,
                      hunt_id,
                      offset,
                      count,
                      with_tag=None,
                      with_type=None,
                      with_substring=None):
    """Reads hunt results of a given hunt using given query options."""
    all_results = []
    for flow_obj in self._GetHuntFlows(hunt_id):
      for entry in self.ReadFlowResults(
          flow_obj.client_id,
          flow_obj.flow_id,
          0,
          sys.maxsize,
          with_tag=with_tag,
          with_type=with_type,
          with_substring=with_substring):
        all_results.append(
            rdf_flow_objects.FlowResult(
                hunt_id=hunt_id,
                client_id=flow_obj.client_id,
                flow_id=flow_obj.flow_id,
                timestamp=entry.timestamp,
                tag=entry.tag,
                payload=entry.payload))

    return sorted(all_results, key=lambda x: x.timestamp)[offset:offset + count]
Exemple #7
0
    def SendReply(self,
                  response: rdfvalue.RDFValue,
                  tag: Optional[str] = None) -> None:
        """Allows this flow to send a message to its parent flow.

    If this flow does not have a parent, the message is saved to the database
    as flow result.

    Args:
      response: An RDFValue() instance to be sent to the parent.
      tag: If specified, tag the result with this tag.

    Raises:
      ValueError: If responses is not of the correct type.
    """
        if not isinstance(response, rdfvalue.RDFValue):
            raise ValueError("SendReply can only send RDFValues")

        if not any(isinstance(response, t) for t in self.result_types):
            logging.warning("Flow %s sends response of unexpected type %s.",
                            type(self).__name__,
                            type(response).__name__)

        reply = rdf_flow_objects.FlowResult(
            client_id=self.rdf_flow.client_id,
            flow_id=self.rdf_flow.flow_id,
            hunt_id=self.rdf_flow.parent_hunt_id,
            payload=response,
            tag=tag)
        if self.rdf_flow.parent_flow_id:
            response = rdf_flow_objects.FlowResponse(
                client_id=self.rdf_flow.client_id,
                request_id=self.rdf_flow.parent_request_id,
                response_id=self.GetNextResponseId(),
                payload=response,
                flow_id=self.rdf_flow.parent_flow_id,
                tag=tag)

            self.flow_responses.append(response)
            # For nested flows we want the replies to be written,
            # but not to be processed by output plugins.
            self.replies_to_write.append(reply)
        else:
            self.replies_to_write.append(reply)
            self.replies_to_process.append(reply)

        self.rdf_flow.num_replies_sent += 1

        # Keeping track of result types/tags in a plain Python
        # _num_replies_per_type_tag dict. In RDFValues/proto2 we have to represent
        # dictionaries as lists of key-value pairs (i.e. there's no library
        # support for dicts as data structures). Hence, updating a key would require
        # iterating over the pairs - which might get expensive for hundreds of
        # thousands of results. To avoid the issue we keep a non-serialized Python
        # dict to be later accumulated into a serializable FlowResultCount
        # in PersistState().
        key = (type(response).__name__, tag or "")
        self._num_replies_per_type_tag[key] += 1
  def Run(self):
    client_id = self.SetupClient(0)

    hunt_id = self.CreateHunt(creator=self.token.username)
    flow_id = flow_test_lib.StartFlow(
        flows_processes.ListProcesses,
        client_id=client_id,
        parent_hunt_id=hunt_id)

    with test_lib.FakeTime(rdfvalue.RDFDatetime.FromSecondsSinceEpoch(2)):
      data_store.REL_DB.WriteFlowResults([
          rdf_flow_objects.FlowResult(
              client_id=client_id,
              flow_id=flow_id,
              hunt_id=hunt_id,
              payload=rdfvalue.RDFString("blah1"))
      ])

    with test_lib.FakeTime(rdfvalue.RDFDatetime.FromSecondsSinceEpoch(43)):
      data_store.REL_DB.WriteFlowResults([
          rdf_flow_objects.FlowResult(
              client_id=client_id,
              flow_id=flow_id,
              hunt_id=hunt_id,
              payload=rdfvalue.RDFString("blah2-foo"))
      ])

    replace = {hunt_id: "H:123456"}
    self.Check(
        "ListHuntResults",
        args=hunt_plugin.ApiListHuntResultsArgs(hunt_id=hunt_id),
        replace=replace)
    self.Check(
        "ListHuntResults",
        args=hunt_plugin.ApiListHuntResultsArgs(hunt_id=hunt_id, count=1),
        replace=replace)
    self.Check(
        "ListHuntResults",
        args=hunt_plugin.ApiListHuntResultsArgs(
            hunt_id=hunt_id, offset=1, count=1),
        replace=replace)
    self.Check(
        "ListHuntResults",
        args=hunt_plugin.ApiListHuntResultsArgs(hunt_id=hunt_id, filter="foo"),
        replace=replace)
Exemple #9
0
    def AddResultsToHunt(self, hunt_id, client_id, values):
        flow_id = self._EnsureClientHasHunt(client_id, hunt_id)

        for value in values:
            data_store.REL_DB.WriteFlowResults([
                rdf_flow_objects.FlowResult(client_id=client_id,
                                            flow_id=flow_id,
                                            hunt_id=hunt_id,
                                            payload=value)
            ])
Exemple #10
0
 def _AddResultToFlow(self, flow_id, result):
   if data_store.RelationalDBFlowsEnabled():
     flow_result = rdf_flow_objects.FlowResult(
         client_id=self.client_id, flow_id=flow_id, payload=result)
     data_store.REL_DB.WriteFlowResults([flow_result])
   else:
     flow_urn = rdfvalue.RDFURN(self.client_id).Add("flows").Add(flow_id)
     with data_store.DB.GetMutationPool() as pool:
       flow.GRRFlow.ResultCollectionForFID(flow_urn).Add(
           result, mutation_pool=pool)
Exemple #11
0
def AddResultsToFlow(client_id: str,
                     flow_id: str,
                     payloads: Iterable[rdf_structs.RDFProtoStruct],
                     tag: Optional[str] = None) -> None:
  """Adds results with given payloads to a given flow."""
  data_store.REL_DB.WriteFlowResults([
      rdf_flow_objects.FlowResult(
          client_id=client_id, flow_id=flow_id, tag=tag, payload=payload)
      for payload in payloads
  ])
Exemple #12
0
 def _SampleSingleTypeHuntResults(self, client_id=None, count=10):
     client_id = client_id or "C.1234567890123456"
     return [
         rdf_flow_objects.FlowResult(
             tag="tag_%d" % i,
             payload=rdf_client.ClientSummary(
                 client_id=client_id,
                 system_manufacturer="manufacturer_%d" % i,
                 install_date=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(
                     10 + i))) for i in range(count)
     ]
Exemple #13
0
    def _SetupFlowWithStatEntryResults(self):
        client_id = self.SetupClient(0)
        # Start a flow. The exact type of the flow doesn't matter:
        # we'll add results manually.
        flow_id = flow_test_lib.StartFlow(processes.ListProcesses,
                                          client_id=client_id)

        data_store.REL_DB.WriteFlowResults([
            rdf_flow_objects.FlowResult(
                client_id=client_id,
                flow_id=flow_id,
                payload=rdf_client_fs.StatEntry(pathspec=rdf_paths.PathSpec(
                    path="/foo/bar1",
                    pathtype=rdf_paths.PathSpec.PathType.OS))),
            rdf_flow_objects.FlowResult(
                client_id=client_id,
                flow_id=flow_id,
                payload=rdf_client_fs.StatEntry(pathspec=rdf_paths.PathSpec(
                    path="/foo/bar2",
                    pathtype=rdf_paths.PathSpec.PathType.OS))),
        ])

        return client_id, flow_id
Exemple #14
0
    def SendReply(self,
                  response: rdfvalue.RDFValue,
                  tag: Optional[str] = None) -> None:
        """Allows this flow to send a message to its parent flow.

    If this flow does not have a parent, the message is saved to the database
    as flow result.

    Args:
      response: An RDFValue() instance to be sent to the parent.
      tag: If specified, tag the result with this tag.

    Raises:
      ValueError: If responses is not of the correct type.
    """
        if not isinstance(response, rdfvalue.RDFValue):
            raise ValueError("SendReply can only send RDFValues")

        if not any(isinstance(response, t) for t in self.result_types):
            logging.warning("Flow %s sends response of unexpected type %s.",
                            type(self).__name__,
                            type(response).__name__)

        reply = rdf_flow_objects.FlowResult(
            client_id=self.rdf_flow.client_id,
            flow_id=self.rdf_flow.flow_id,
            hunt_id=self.rdf_flow.parent_hunt_id,
            payload=response,
            tag=tag)
        if self.rdf_flow.parent_flow_id:
            response = rdf_flow_objects.FlowResponse(
                client_id=self.rdf_flow.client_id,
                request_id=self.rdf_flow.parent_request_id,
                response_id=self.GetNextResponseId(),
                payload=response,
                flow_id=self.rdf_flow.parent_flow_id,
                tag=tag)

            self.flow_responses.append(response)
            # For nested flows we want the replies to be written,
            # but not to be processed by output plugins.
            self.replies_to_write.append(reply)
        else:
            self.replies_to_write.append(reply)
            self.replies_to_process.append(reply)

        self.rdf_flow.num_replies_sent += 1
Exemple #15
0
    def testNotAppliedParsers(self, db: abstract_db.Database) -> None:
        client_id = db_test_utils.InitializeClient(db)
        flow_id = "4815162342ABCDEF"

        flow_obj = rdf_flow_objects.Flow()
        flow_obj.client_id = client_id
        flow_obj.flow_id = flow_id
        flow_obj.flow_class_name = collectors.ArtifactCollectorFlow.__name__
        flow_obj.args = rdf_artifacts.ArtifactCollectorFlowArgs(
            apply_parsers=False)
        db.WriteFlowObject(flow_obj)

        flow_result = rdf_flow_objects.FlowResult()
        flow_result.client_id = client_id
        flow_result.flow_id = flow_id
        flow_result.tag = "artifact:Fake"
        flow_result.payload = rdfvalue.RDFString("foobar")
        db.WriteFlowResults([flow_result])

        args = flow_plugin.ApiListFlowApplicableParsersArgs()
        args.client_id = client_id
        args.flow_id = flow_id

        result = self.handler.Handle(args)
        self.assertCountEqual(result.parsers, [
            flow_plugin.ApiParserDescriptor(
                type=flow_plugin.ApiParserDescriptor.Type.SINGLE_RESPONSE,
                name="FakeSingleResponse",
            ),
            flow_plugin.ApiParserDescriptor(
                type=flow_plugin.ApiParserDescriptor.Type.MULTI_RESPONSE,
                name="FakeMultiResponse",
            ),
            flow_plugin.ApiParserDescriptor(
                type=flow_plugin.ApiParserDescriptor.Type.SINGLE_FILE,
                name="FakeSingleFile",
            ),
            flow_plugin.ApiParserDescriptor(
                type=flow_plugin.ApiParserDescriptor.Type.MULTI_FILE,
                name="FakeMultiFile",
            ),
        ])
Exemple #16
0
    def testNtfsFileReferenceFormatInference(self):
        entry = rdf_timeline.TimelineEntry()
        entry.path = "/foo/bar/baz".encode("utf-8")
        entry.ino = 1688849860339456

        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        flow_id = "F00BA542"

        flow_obj = rdf_flow_objects.Flow()
        flow_obj.client_id = client_id
        flow_obj.flow_id = flow_id
        flow_obj.flow_class_name = timeline.TimelineFlow.__name__
        flow_obj.create_time = rdfvalue.RDFDatetime.Now()
        data_store.REL_DB.WriteFlowObject(flow_obj)

        blobs = list(rdf_timeline.TimelineEntry.SerializeStream(iter([entry])))
        blob_ids = data_store.BLOBS.WriteBlobsWithUnknownHashes(blobs)

        result = rdf_timeline.TimelineResult()
        result.entry_batch_blob_ids = [
            blob_id.AsBytes() for blob_id in blob_ids
        ]
        result.filesystem_type = "NTFS"

        flow_result = rdf_flow_objects.FlowResult()
        flow_result.client_id = client_id
        flow_result.flow_id = flow_id
        flow_result.payload = result
        data_store.REL_DB.WriteFlowResults([flow_result])

        args = api_timeline.ApiGetCollectedTimelineArgs()
        args.client_id = client_id
        args.flow_id = flow_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY

        result = self.handler.Handle(args)
        content = b"".join(result.GenerateContent()).decode("utf-8")

        rows = list(csv.reader(io.StringIO(content), delimiter="|"))
        self.assertLen(rows, 1)
        self.assertEqual(rows[0][1], "/foo/bar/baz")
        self.assertEqual(rows[0][2], "75520-6")
Exemple #17
0
    def testListFlowApplicableParsers(self):
        client_id = self.SetupClient(0)
        flow_id = "4815162342ABCDEF"

        flow = rdf_flow_objects.Flow()
        flow.client_id = client_id
        flow.flow_id = flow_id
        flow.flow_class_name = collectors.ArtifactCollectorFlow.__name__
        flow.args = rdf_artifacts.ArtifactCollectorFlowArgs(
            apply_parsers=False)
        data_store.REL_DB.WriteFlowObject(flow)

        result = rdf_flow_objects.FlowResult()
        result.client_id = client_id
        result.flow_id = flow_id
        result.tag = "artifact:Fake"
        result.payload = rdf_client_action.ExecuteResponse(stderr=b"foobar")
        data_store.REL_DB.WriteFlowResults([result])

        class FakeParser(parser.SingleResponseParser[None]):

            supported_artifacts = ["Fake"]

            def ParseResponse(
                self,
                knowledge_base: rdf_client.KnowledgeBase,
                response: rdfvalue.RDFValue,
            ) -> Iterable[None]:
                raise NotImplementedError()

        with parser_test_lib._ParserContext("Fake", FakeParser):
            results = self.api.Client(client_id).Flow(
                flow_id).ListApplicableParsers()

        self.assertLen(results.parsers, 1)

        result = results.parsers[0]
        self.assertEqual(result.name, "Fake")
        self.assertEqual(result.type,
                         flow_pb2.ApiParserDescriptor.SINGLE_RESPONSE)
Exemple #18
0
    def testFlowWithResult(self, db: abstract_db.Database) -> None:
        client_id = "C.1234567890123456"
        flow_id = "ABCDEF92"

        db.WriteClientMetadata(client_id, last_ping=rdfvalue.RDFDatetime.Now())

        flow_obj = rdf_flow_objects.Flow()
        flow_obj.client_id = client_id
        flow_obj.flow_id = flow_id
        flow_obj.flow_class_name = timeline_flow.TimelineFlow.__name__
        flow_obj.create_time = rdfvalue.RDFDatetime.Now()
        db.WriteFlowObject(flow_obj)

        flow_result = rdf_flow_objects.FlowResult()
        flow_result.client_id = client_id
        flow_result.flow_id = flow_id
        flow_result.payload = rdf_timeline.TimelineResult(
            filesystem_type="ntfs")
        db.WriteFlowResults([flow_result])

        self.assertEqual(timeline_flow.FilesystemType(client_id, flow_id),
                         "ntfs")
Exemple #19
0
def _WriteTimeline(
    client_id: Text,
    entries: Sequence[rdf_timeline.TimelineEntry],
    hunt_id: Optional[Text] = None,
) -> Text:
    """Writes a timeline to the database (as fake flow result).

  Args:
    client_id: An identifier of the client for which the flow ran.
    entries: A sequence of timeline entries produced by the flow run.
    hunt_id: An (optional) identifier of a hunt the flows belong to.

  Returns:
    An identifier of the flow.
  """
    flow_id = "".join(random.choice("ABCDEF") for _ in range(8))

    flow_obj = rdf_flow_objects.Flow()
    flow_obj.flow_id = flow_id
    flow_obj.client_id = client_id
    flow_obj.flow_class_name = timeline.TimelineFlow.__name__
    flow_obj.create_time = rdfvalue.RDFDatetime.Now()
    flow_obj.parent_hunt_id = hunt_id
    data_store.REL_DB.WriteFlowObject(flow_obj)

    blobs = list(rdf_timeline.TimelineEntry.SerializeStream(iter(entries)))
    blob_ids = data_store.BLOBS.WriteBlobsWithUnknownHashes(blobs)

    result = rdf_timeline.TimelineResult()
    result.entry_batch_blob_ids = [blob_id.AsBytes() for blob_id in blob_ids]

    flow_result = rdf_flow_objects.FlowResult()
    flow_result.client_id = client_id
    flow_result.flow_id = flow_id
    flow_result.payload = result

    data_store.REL_DB.WriteFlowResults([flow_result])

    return flow_id
Exemple #20
0
  def testLaunchBinaryFlowResultsHaveReadableStdOutAndStdErr(self):
    flow_id = flow.StartFlow(
        client_id=self.client_id, flow_cls=gui_test_lib.RecursiveTestFlow)

    stderr = "Oh, ok, this is just a string 昨"
    stdout = "\00\00\00\00"
    response = rdf_client_action.ExecuteResponse(
        stderr=stderr.encode("utf-8"), stdout=stdout.encode("utf-8"))
    flow_result = rdf_flow_objects.FlowResult(tag="tag", payload=response)

    data_store.REL_DB.WriteFlowResults(self.client_id, flow_id, [flow_result])

    self.Open("/#/clients/%s/flows/%s/results" % (self.client_id, flow_id))
    # jQuery treats the backslash ('\') character as a special one, hence we
    # have to escape it twice: once for Javascript itself and second time
    # for jQuery.
    self.WaitUntil(
        self.IsElementPresent, r"css=grr-flow-inspector:contains('Oh, ok, "
        r"this is just a string \\\\xe6\\\\x98\\\\xa8')")
    self.WaitUntil(
        self.IsElementPresent,
        r"css=grr-flow-inspector:contains('\\\\x00\\\\x00\\\\x00\\\\x00')")
Exemple #21
0
    def testAlreadyAppliedParsers(self, db: abstract_db.Database) -> None:
        client_id = db_test_utils.InitializeClient(db)
        flow_id = "4815162342ABCDEF"

        flow_obj = rdf_flow_objects.Flow()
        flow_obj.client_id = client_id
        flow_obj.flow_id = flow_id
        flow_obj.flow_class_name = collectors.ArtifactCollectorFlow.__name__
        flow_obj.args = rdf_artifacts.ArtifactCollectorFlowArgs(
            apply_parsers=True)
        db.WriteFlowObject(flow_obj)

        flow_result = rdf_flow_objects.FlowResult()
        flow_result.client_id = client_id
        flow_result.flow_id = flow_id
        flow_result.tag = "artifact:Fake"
        db.WriteFlowResults([flow_result])

        args = flow_plugin.ApiListFlowApplicableParsersArgs()
        args.client_id = client_id
        args.flow_id = flow_id

        result = self.handler.Handle(args)
        self.assertEmpty(result.parsers)
 def _AddResultToFlow(self, flow_id, result):
   flow_result = rdf_flow_objects.FlowResult(
       client_id=self.client_id, flow_id=flow_id, payload=result)
   data_store.REL_DB.WriteFlowResults([flow_result])
    def Run(self):
        client_id = self.SetupClient(0).Basename()

        if data_store.RelationalDBEnabled():
            hunt_id = self.CreateHunt()
            flow_id = flow_test_lib.StartFlow(flows_processes.ListProcesses,
                                              client_id=client_id,
                                              parent_hunt_id=hunt_id)

            with test_lib.FakeTime(
                    rdfvalue.RDFDatetime.FromSecondsSinceEpoch(2)):
                data_store.REL_DB.WriteFlowResults([
                    rdf_flow_objects.FlowResult(
                        client_id=client_id,
                        flow_id=flow_id,
                        hunt_id=hunt_id,
                        payload=rdfvalue.RDFString("blah1"))
                ])

            with test_lib.FakeTime(
                    rdfvalue.RDFDatetime.FromSecondsSinceEpoch(43)):
                data_store.REL_DB.WriteFlowResults([
                    rdf_flow_objects.FlowResult(
                        client_id=client_id,
                        flow_id=flow_id,
                        hunt_id=hunt_id,
                        payload=rdfvalue.RDFString("blah2-foo"))
                ])
        else:
            hunt_urn = rdfvalue.RDFURN("aff4:/hunts/H:123456")
            hunt_id = hunt_urn.Basename()

            results = implementation.GRRHunt.ResultCollectionForHID(hunt_urn)
            with data_store.DB.GetMutationPool() as pool:
                result = rdf_flows.GrrMessage(
                    source=client_id,
                    payload=rdfvalue.RDFString("blah1"),
                    age=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(1))
                results.Add(result,
                            timestamp=result.age + rdfvalue.Duration("1s"),
                            mutation_pool=pool)

                result = rdf_flows.GrrMessage(
                    source=client_id,
                    payload=rdfvalue.RDFString("blah2-foo"),
                    age=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(42))
                results.Add(result,
                            timestamp=result.age + rdfvalue.Duration("1s"),
                            mutation_pool=pool)

        replace = {hunt_id: "H:123456"}
        self.Check("ListHuntResults",
                   args=hunt_plugin.ApiListHuntResultsArgs(hunt_id=hunt_id),
                   replace=replace)
        self.Check("ListHuntResults",
                   args=hunt_plugin.ApiListHuntResultsArgs(hunt_id=hunt_id,
                                                           count=1),
                   replace=replace)
        self.Check("ListHuntResults",
                   args=hunt_plugin.ApiListHuntResultsArgs(hunt_id=hunt_id,
                                                           offset=1,
                                                           count=1),
                   replace=replace)
        self.Check("ListHuntResults",
                   args=hunt_plugin.ApiListHuntResultsArgs(hunt_id=hunt_id,
                                                           filter="foo"),
                   replace=replace)
Exemple #24
0
    def testGetCollectedTimelinesBody(self):
        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        fqdn = "foo.bar.quux"

        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id
        snapshot.knowledge_base.fqdn = fqdn
        data_store.REL_DB.WriteClientSnapshot(snapshot)

        hunt_id = "B1C2E3D4"
        flow_id = "1B2C3E4D"

        hunt_obj = rdf_hunt_objects.Hunt()
        hunt_obj.hunt_id = hunt_id
        hunt_obj.args.standard.client_ids = [client_id]
        hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__
        hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED
        data_store.REL_DB.WriteHuntObject(hunt_obj)

        flow_obj = rdf_flow_objects.Flow()
        flow_obj.client_id = client_id
        flow_obj.flow_id = flow_id
        flow_obj.flow_class_name = timeline.TimelineFlow.__name__
        flow_obj.create_time = rdfvalue.RDFDatetime.Now()
        flow_obj.parent_hunt_id = hunt_id
        data_store.REL_DB.WriteFlowObject(flow_obj)

        entry_1 = rdf_timeline.TimelineEntry()
        entry_1.path = "/bar/baz/quux".encode("utf-8")
        entry_1.ino = 5926273453
        entry_1.size = 13373
        entry_1.atime_ns = 111 * 10**9
        entry_1.mtime_ns = 222 * 10**9
        entry_1.ctime_ns = 333 * 10**9
        entry_1.mode = 0o664

        entry_2 = rdf_timeline.TimelineEntry()
        entry_2.path = "/bar/baz/quuz".encode("utf-8")
        entry_2.ino = 6037384564
        entry_2.size = 13374
        entry_2.atime_ns = 777 * 10**9
        entry_2.mtime_ns = 888 * 10**9
        entry_2.ctime_ns = 999 * 10**9
        entry_2.mode = 0o777

        entries = [entry_1, entry_2]
        blobs = list(rdf_timeline.TimelineEntry.SerializeStream(iter(entries)))
        blob_ids = data_store.BLOBS.WriteBlobsWithUnknownHashes(blobs)

        result = rdf_timeline.TimelineResult()
        result.entry_batch_blob_ids = [
            blob_id.AsBytes() for blob_id in blob_ids
        ]

        flow_result = rdf_flow_objects.FlowResult()
        flow_result.client_id = client_id
        flow_result.flow_id = flow_id
        flow_result.payload = result

        data_store.REL_DB.WriteFlowResults([flow_result])

        buffer = io.BytesIO()
        self.api.Hunt(hunt_id).GetCollectedTimelines(
            timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY
        ).WriteToStream(buffer)

        with zipfile.ZipFile(buffer, mode="r") as archive:
            with archive.open(f"{client_id}_{fqdn}.body", mode="r") as file:
                content_file = file.read().decode("utf-8")

                rows = list(
                    csv.reader(io.StringIO(content_file), delimiter="|"))
                self.assertLen(rows, 2)

                self.assertEqual(rows[0][1], "/bar/baz/quux")
                self.assertEqual(rows[0][2], "5926273453")
                self.assertEqual(rows[0][3], stat.filemode(0o664))
                self.assertEqual(rows[0][6], "13373")
                self.assertEqual(rows[0][7], "111")
                self.assertEqual(rows[0][8], "222")
                self.assertEqual(rows[0][9], "333")

                self.assertEqual(rows[1][1], "/bar/baz/quuz")
                self.assertEqual(rows[1][2], "6037384564")
                self.assertEqual(rows[1][3], stat.filemode(0o777))
                self.assertEqual(rows[1][6], "13374")
                self.assertEqual(rows[1][7], "777")
                self.assertEqual(rows[1][8], "888")
                self.assertEqual(rows[1][9], "999")
Exemple #25
0
    def ReadHuntResults(self,
                        hunt_id,
                        offset,
                        count,
                        with_tag=None,
                        with_type=None,
                        with_substring=None,
                        with_timestamp=None,
                        cursor=None):
        """Reads hunt results of a given hunt using given query options."""
        hunt_id_int = db_utils.HuntIDToInt(hunt_id)

        query = ("SELECT client_id, flow_id, hunt_id, payload, type, "
                 "UNIX_TIMESTAMP(timestamp), tag "
                 "FROM flow_results "
                 "FORCE INDEX(flow_results_hunt_id_flow_id_timestamp) "
                 "WHERE hunt_id = %s ")

        args = [hunt_id_int]

        if with_tag:
            query += "AND tag = %s "
            args.append(with_tag)

        if with_type:
            query += "AND type = %s "
            args.append(with_type)

        if with_substring:
            query += "AND payload LIKE %s "
            args.append("%" + db_utils.EscapeWildcards(with_substring) + "%")

        if with_timestamp:
            query += "AND timestamp = FROM_UNIXTIME(%s) "
            args.append(mysql_utils.RDFDatetimeToTimestamp(with_timestamp))

        query += "ORDER BY timestamp ASC LIMIT %s OFFSET %s"
        args.append(count)
        args.append(offset)

        cursor.execute(query, args)

        ret = []
        for (
                client_id_int,
                flow_id_int,
                hunt_id_int,
                serialized_payload,
                payload_type,
                timestamp,
                tag,
        ) in cursor.fetchall():
            if payload_type in rdfvalue.RDFValue.classes:
                payload = rdfvalue.RDFValue.classes[
                    payload_type].FromSerializedBytes(serialized_payload)
            else:
                payload = rdf_objects.SerializedValueOfUnrecognizedType(
                    type_name=payload_type, value=serialized_payload)

            result = rdf_flow_objects.FlowResult(
                client_id=db_utils.IntToClientID(client_id_int),
                flow_id=db_utils.IntToFlowID(flow_id_int),
                hunt_id=hunt_id,
                payload=payload,
                timestamp=mysql_utils.TimestampToRDFDatetime(timestamp))
            if tag is not None:
                result.tag = tag

            ret.append(result)

        return ret
Exemple #26
0
    def testGetCollectedTimelinesGzchunked(self):
        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        fqdn = "foo.bar.baz"

        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id
        snapshot.knowledge_base.fqdn = fqdn
        data_store.REL_DB.WriteClientSnapshot(snapshot)

        hunt_id = "A0B1D2C3"
        flow_id = "0A1B2D3C"

        hunt_obj = rdf_hunt_objects.Hunt()
        hunt_obj.hunt_id = hunt_id
        hunt_obj.args.standard.client_ids = [client_id]
        hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__
        hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED
        data_store.REL_DB.WriteHuntObject(hunt_obj)

        flow_obj = rdf_flow_objects.Flow()
        flow_obj.client_id = client_id
        flow_obj.flow_id = flow_id
        flow_obj.flow_class_name = timeline.TimelineFlow.__name__
        flow_obj.create_time = rdfvalue.RDFDatetime.Now()
        flow_obj.parent_hunt_id = hunt_id
        data_store.REL_DB.WriteFlowObject(flow_obj)

        entry_1 = rdf_timeline.TimelineEntry()
        entry_1.path = "/foo/bar".encode("utf-8")
        entry_1.ino = 7890178901
        entry_1.size = 4815162342
        entry_1.atime_ns = 123 * 10**9
        entry_1.mtime_ns = 234 * 10**9
        entry_1.ctime_ns = 567 * 10**9
        entry_1.mode = 0o654

        entry_2 = rdf_timeline.TimelineEntry()
        entry_2.path = "/foo/baz".encode("utf-8")
        entry_1.ino = 8765487654
        entry_2.size = 1337
        entry_1.atime_ns = 987 * 10**9
        entry_1.mtime_ns = 876 * 10**9
        entry_1.ctime_ns = 765 * 10**9
        entry_2.mode = 0o757

        entries = [entry_1, entry_2]
        blobs = list(rdf_timeline.TimelineEntry.SerializeStream(iter(entries)))
        blob_ids = data_store.BLOBS.WriteBlobsWithUnknownHashes(blobs)

        result = rdf_timeline.TimelineResult()
        result.entry_batch_blob_ids = [
            blob_id.AsBytes() for blob_id in blob_ids
        ]

        flow_result = rdf_flow_objects.FlowResult()
        flow_result.client_id = client_id
        flow_result.flow_id = flow_id
        flow_result.payload = result

        data_store.REL_DB.WriteFlowResults([flow_result])

        buffer = io.BytesIO()

        fmt = timeline_pb2.ApiGetCollectedTimelineArgs.Format.RAW_GZCHUNKED
        self.api.Hunt(hunt_id).GetCollectedTimelines(fmt).WriteToStream(buffer)

        with zipfile.ZipFile(buffer, mode="r") as archive:
            with archive.open(f"{client_id}_{fqdn}.gzchunked",
                              mode="r") as file:
                chunks = chunked.ReadAll(file)
                entries = list(
                    rdf_timeline.TimelineEntry.DeserializeStream(chunks))
                self.assertEqual(entries, [entry_1, entry_2])
Exemple #27
0
    def testListParsedFlowResults(self):
        client_id = self.SetupClient(0)
        flow_id = "4815162342ABCDEF"

        flow = rdf_flow_objects.Flow()
        flow.client_id = client_id
        flow.flow_id = flow_id
        flow.flow_class_name = collectors.ArtifactCollectorFlow.__name__
        flow.args = rdf_artifacts.ArtifactCollectorFlowArgs(
            apply_parsers=False)
        flow.persistent_data = {"knowledge_base": rdf_client.KnowledgeBase()}
        data_store.REL_DB.WriteFlowObject(flow)

        result = rdf_flow_objects.FlowResult()
        result.client_id = client_id
        result.flow_id = flow_id
        result.tag = "artifact:Echo"

        response = rdf_client_action.ExecuteResponse()
        response.stderr = "Lorem ipsum.".encode("utf-8")

        result.payload = response
        data_store.REL_DB.WriteFlowResults([result])

        response = rdf_client_action.ExecuteResponse()
        response.stderr = "Dolor sit amet.".encode("utf-8")

        result.payload = response
        data_store.REL_DB.WriteFlowResults([result])

        class StderrToStdoutParser(
                parser.SingleResponseParser[rdf_client_action.ExecuteResponse]
        ):

            supported_artifacts = ["Echo"]

            def ParseResponse(
                self,
                knowledge_base: rdf_client.KnowledgeBase,
                response: rdf_client_action.ExecuteResponse,
            ) -> Iterable[rdf_client_action.ExecuteResponse]:
                del knowledge_base  # Unused.

                if not isinstance(response, rdf_client_action.ExecuteResponse):
                    raise TypeError(
                        f"Unexpected response type: {type(response)}")

                parsed_response = rdf_client_action.ExecuteResponse()
                parsed_response.stdout = response.stderr

                return [parsed_response]

        with parser_test_lib._ParserContext("StderrToStdout",
                                            StderrToStdoutParser):
            results = self.api.Client(client_id).Flow(
                flow_id).ListParsedResults()

        stdouts = [result.payload.stdout.decode("utf-8") for result in results]
        self.assertLen(stdouts, 2)
        self.assertEqual(stdouts[0], "Lorem ipsum.")
        self.assertEqual(stdouts[1], "Dolor sit amet.")