Exemplo n.º 1
0
    def testStartVariableHuntRaisesIfMoreThanOneFlowPerClient(self):
        client_id = self.SetupClients(1)[0]

        hunt_obj = rdf_hunt_objects.Hunt(client_rate=0)
        hunt_obj.args.hunt_type = hunt_obj.args.HuntType.VARIABLE
        for index in range(2):
            hunt_obj.args.variable.flow_groups.append(
                rdf_hunt_objects.VariableHuntFlowGroup(
                    client_ids=[client_id.Basename()],
                    flow_name=compatibility.GetName(transfer.GetFile),
                    flow_args=transfer.GetFileArgs(pathspec=rdf_paths.PathSpec(
                        path="/tmp/evil_%d.txt" % index,
                        pathtype=rdf_paths.PathSpec.PathType.OS,
                    ))))

        data_store.REL_DB.WriteHuntObject(hunt_obj)

        with self.assertRaises(hunt.CanStartAtMostOneFlowPerClientError):
            hunt.StartHunt(hunt_obj.hunt_id)

        # Check that no flows were scheduled on the client.
        flows = data_store.REL_DB.ReadAllFlowObjects(client_id.Basename())
        self.assertEmpty(flows)
Exemplo n.º 2
0
Arquivo: hunt.py Projeto: cclauss/grr
def CreateAndStartHunt(flow_name, flow_args, creator, **kwargs):
  """Creates and starts a new hunt."""

  # This interface takes a time when the hunt expires. However, the legacy hunt
  # starting interface took an rdfvalue.DurationSeconds object which was then
  # added to the current time to get the expiry. This check exists to make sure
  # we don't  confuse the two.
  if "duration" in kwargs:
    precondition.AssertType(kwargs["duration"], rdfvalue.Duration)

  hunt_args = rdf_hunt_objects.HuntArguments.Standard(
      flow_name=flow_name, flow_args=rdf_structs.AnyValue.Pack(flow_args))

  hunt_obj = rdf_hunt_objects.Hunt(
      creator=creator,
      args=hunt_args,
      create_time=rdfvalue.RDFDatetime.Now(),
      **kwargs)

  CreateHunt(hunt_obj)
  StartHunt(hunt_obj.hunt_id)

  return hunt_obj.hunt_id
Exemplo n.º 3
0
  def testReadHuntResultsCorrectlyAppliesWithSubstringFilter(self):
    hunt_obj = rdf_hunt_objects.Hunt(description="foo")
    self.db.WriteHuntObject(hunt_obj)

    client_id, flow_id = self._SetupHuntClientAndFlow(hunt_id=hunt_obj.hunt_id)
    sample_results = self._SampleSingleTypeHuntResults(
        client_id=client_id, flow_id=flow_id, hunt_id=hunt_obj.hunt_id)
    self._WriteHuntResults(sample_results)

    results = self.db.ReadHuntResults(
        hunt_obj.hunt_id, 0, 100, with_substring="blah")
    self.assertFalse(results)

    results = self.db.ReadHuntResults(
        hunt_obj.hunt_id, 0, 100, with_substring="manufacturer")
    self.assertEqual(
        [i.payload for i in results],
        [i.payload for i in sample_results],
    )

    results = self.db.ReadHuntResults(
        hunt_obj.hunt_id, 0, 100, with_substring="manufacturer_1")
    self.assertEqual([i.payload for i in results], [sample_results[1].payload])
Exemplo n.º 4
0
    def CreateHunt(self,
                   flow_runner_args=None,
                   flow_args=None,
                   client_rule_set=None,
                   original_object=None,
                   client_rate=0,
                   duration=None,
                   creator=None,
                   **kwargs):
        # Only initialize default flow_args value if default flow_runner_args value
        # is to be used.
        if not flow_runner_args:
            flow_args = (flow_args
                         or transfer.GetFileArgs(pathspec=rdf_paths.PathSpec(
                             path="/tmp/evil.txt",
                             pathtype=rdf_paths.PathSpec.PathType.OS)))

        flow_runner_args = (flow_runner_args or rdf_flow_runner.FlowRunnerArgs(
            flow_name=transfer.GetFile.__name__))

        client_rule_set = (client_rule_set
                           or self._CreateForemanClientRuleSet())

        hunt_args = rdf_hunt_objects.HuntArguments.Standard(
            flow_name=flow_runner_args.flow_name,
            flow_args=rdf_structs.AnyValue.Pack(flow_args))

        hunt_obj = rdf_hunt_objects.Hunt(creator=creator,
                                         client_rule_set=client_rule_set,
                                         original_object=original_object,
                                         client_rate=client_rate,
                                         duration=duration,
                                         args=hunt_args,
                                         **kwargs)
        hunt.CreateHunt(hunt_obj)

        return hunt_obj.hunt_id
Exemplo n.º 5
0
    def testVariableHuntSchedulesAllFlowsOnStart(self):
        client_ids = self.SetupClients(10)

        hunt_obj = rdf_hunt_objects.Hunt(client_rate=0)
        hunt_obj.args.hunt_type = hunt_obj.args.HuntType.VARIABLE

        for index, pair in enumerate(collection.Batch(client_ids, 2)):
            hunt_obj.args.variable.flow_groups.append(
                rdf_hunt_objects.VariableHuntFlowGroup(
                    client_ids=[c.Basename() for c in pair],
                    flow_name=compatibility.GetName(transfer.GetFile),
                    flow_args=transfer.GetFileArgs(pathspec=rdf_paths.PathSpec(
                        path="/tmp/evil_%d.txt" % index,
                        pathtype=rdf_paths.PathSpec.PathType.OS,
                    ))))

        data_store.REL_DB.WriteHuntObject(hunt_obj)
        hunt.StartHunt(hunt_obj.hunt_id)

        hunt_counters = data_store.REL_DB.ReadHuntCounters(hunt_obj.hunt_id)
        self.assertEqual(hunt_counters.num_clients, 10)

        all_flows = data_store.REL_DB.ReadHuntFlows(hunt_obj.hunt_id, 0,
                                                    sys.maxsize)
        self.assertItemsEqual([c.Basename() for c in client_ids],
                              [f.client_id for f in all_flows])

        for index, pair in enumerate(collection.Batch(client_ids, 2)):
            for client_id in pair:
                all_flows = data_store.REL_DB.ReadAllFlowObjects(
                    client_id.Basename())
                self.assertLen(all_flows, 1)

                self.assertEqual(all_flows[0].flow_class_name,
                                 compatibility.GetName(transfer.GetFile))
                self.assertEqual(all_flows[0].args.pathspec.path,
                                 "/tmp/evil_%d.txt" % index)
Exemplo n.º 6
0
  def _WriteHuntOutputPluginLogEntries(self):
    hunt_obj = rdf_hunt_objects.Hunt(description="foo")
    self.db.WriteHuntObject(hunt_obj)

    output_plugin_id = "1"
    for i in range(10):
      client_id, flow_id = self._SetupHuntClientAndFlow(
          client_id="C.12345678901234a%d" % i, hunt_id=hunt_obj.hunt_id)
      enum = rdf_flow_objects.FlowOutputPluginLogEntry.LogEntryType
      if i % 3 == 0:
        log_entry_type = enum.ERROR
      else:
        log_entry_type = enum.LOG
      self.db.WriteFlowOutputPluginLogEntries([
          rdf_flow_objects.FlowOutputPluginLogEntry(
              client_id=client_id,
              flow_id=flow_id,
              hunt_id=hunt_obj.hunt_id,
              output_plugin_id=output_plugin_id,
              log_entry_type=log_entry_type,
              message="blah%d" % i)
      ])

    return hunt_obj
Exemplo n.º 7
0
    def testReadHuntFlowsCorrectlyAppliesOffsetAndCountFilters(self):
        hunt_obj = rdf_hunt_objects.Hunt(description="foo")
        self.db.WriteHuntObject(hunt_obj)

        expectations = self._BuildFilterConditionExpectations(hunt_obj)
        for filter_condition, _ in expectations.items():
            full_results = self.db.ReadHuntFlows(
                hunt_obj.hunt_id, 0, 1024, filter_condition=filter_condition)
            full_results_ids = [r.flow_id for r in full_results]
            for index in range(0, 2):
                for count in range(1, 3):
                    results = self.db.ReadHuntFlows(
                        hunt_obj.hunt_id,
                        index,
                        count,
                        filter_condition=filter_condition)
                    results_ids = [r.flow_id for r in results]
                    expected_ids = full_results_ids[index:index + count]
                    self.assertCountEqual(
                        results_ids, expected_ids,
                        "Result items do not match for "
                        "(filter_condition=%d, index=%d, count=%d): %s vs %s" %
                        (filter_condition, index, count, expected_ids,
                         results_ids))
Exemplo n.º 8
0
def CreateAndStartHunt(flow_name, flow_args, creator, **kwargs):
    """Creates and starts a new hunt."""

    # This interface takes a time when the hunt expires. However, the legacy hunt
    # starting interface took an rdfvalue.Duration object which was then added to
    # the current time to get the expiry. This check exists to make sure we don't
    # confuse the two.
    if "expiry_time" in kwargs:
        precondition.AssertType(kwargs["expiry_time"], rdfvalue.RDFDatetime)

    hunt_args = rdf_hunt_objects.HuntArguments(
        hunt_type=rdf_hunt_objects.HuntArguments.HuntType.STANDARD,
        standard=rdf_hunt_objects.HuntArgumentsStandard(flow_name=flow_name,
                                                        flow_args=flow_args))

    hunt_obj = rdf_hunt_objects.Hunt(creator=creator,
                                     args=hunt_args,
                                     create_time=rdfvalue.RDFDatetime.Now(),
                                     **kwargs)

    CreateHunt(hunt_obj)
    StartHunt(hunt_obj.hunt_id)

    return hunt_obj.hunt_id
Exemplo n.º 9
0
  def testReadHuntResultsCorrectlyAppliesWithTypeFilter(self):
    hunt_obj = rdf_hunt_objects.Hunt(description="foo")
    self.db.WriteHuntObject(hunt_obj)

    sample_results = []
    for i in range(10):
      client_id, flow_id = self._SetupHuntClientAndFlow(
          hunt_id=hunt_obj.hunt_id)
      results = self._SampleTwoTypeHuntResults(
          client_id=client_id,
          flow_id=flow_id,
          hunt_id=hunt_obj.hunt_id,
          count_per_type=1)
      sample_results.extend(results)
      self._WriteHuntResults(results)

    results = self.db.ReadHuntResults(
        hunt_obj.hunt_id,
        0,
        100,
        with_type=compatibility.GetName(rdf_client.ClientInformation))
    self.assertFalse(results)

    results = self.db.ReadHuntResults(
        hunt_obj.hunt_id,
        0,
        100,
        with_type=compatibility.GetName(rdf_client.ClientSummary))
    self.assertCountEqual(
        [i.payload for i in results],
        [
            i.payload
            for i in sample_results
            if isinstance(i.payload, rdf_client.ClientSummary)
        ],
    )
Exemplo n.º 10
0
  def testReadHuntResultsCorrectlyAppliedOffsetAndCountFilters(self):
    hunt_obj = rdf_hunt_objects.Hunt(
        hunt_id=hunt.RandomHuntId(), description="foo")
    self.db.WriteHuntObject(hunt_obj)

    sample_results = []
    for i in range(10):
      client_id, flow_id = self._SetupHuntClientAndFlow(
          hunt_id=hunt_obj.hunt_id)
      results = self._SampleSingleTypeHuntResults(client_id=client_id, count=1)
      sample_results.extend(results)
      self._WriteHuntResults({(client_id, flow_id): results})

    for l in range(1, 11):
      for i in range(10):
        results = self.db.ReadHuntResults(hunt_obj.hunt_id, i, l)
        expected = sample_results[i:i + l]

        result_payloads = [x.payload for x in results]
        expected_payloads = [x.payload for x in expected]
        self.assertEqual(
            result_payloads, expected_payloads,
            "Results differ from expected (from %d, size %d): %s vs %s" %
            (i, l, result_payloads, expected_payloads))
Exemplo n.º 11
0
    def testReadHuntResultsReturnsPayloadWithMissingTypeAsSpecialValue(self):
        hunt_obj = rdf_hunt_objects.Hunt(hunt_id=hunt.RandomHuntId(),
                                         description="foo")
        self.db.WriteHuntObject(hunt_obj)

        client_id, flow_id = self._SetupHuntClientAndFlow(
            hunt_id=hunt_obj.hunt_id)
        sample_results = self._SampleSingleTypeHuntResults()
        self._WriteHuntResults({(client_id, flow_id): sample_results})

        type_name = compatibility.GetName(rdf_client.ClientSummary)
        try:
            cls = rdfvalue.RDFValue.classes.pop(type_name)

            results = self.db.ReadHuntResults(hunt_obj.hunt_id, 0, 100)
        finally:
            rdfvalue.RDFValue.classes[type_name] = cls

        self.assertEqual(len(sample_results), len(results))
        for r in results:
            self.assertTrue(
                isinstance(r.payload,
                           rdf_objects.SerializedValueOfUnrecognizedType))
            self.assertEqual(r.payload.type_name, type_name)
Exemplo n.º 12
0
    def testForemanRulesAreCorrectlyPropagatedWhenHuntStarts(self):
        client_rule_set = foreman_rules.ForemanClientRuleSet(rules=[
            foreman_rules.ForemanClientRule(
                rule_type=foreman_rules.ForemanClientRule.Type.REGEX,
                regex=foreman_rules.ForemanRegexClientRule(
                    field="CLIENT_NAME", attribute_regex="HUNT")),
            foreman_rules.ForemanClientRule(
                rule_type=foreman_rules.ForemanClientRule.Type.INTEGER,
                integer=foreman_rules.ForemanIntegerClientRule(
                    field="CLIENT_CLOCK",
                    operator=foreman_rules.ForemanIntegerClientRule.Operator.
                    GREATER_THAN,
                    value=1336650631137737))
        ])

        self.assertEmpty(data_store.REL_DB.ReadAllForemanRules())

        hunt_obj = rdf_hunt_objects.Hunt(client_rule_set=client_rule_set)
        hunt_obj.args.hunt_type = hunt_obj.args.HuntType.STANDARD
        data_store.REL_DB.WriteHuntObject(hunt_obj)

        hunt_obj = hunt.StartHunt(hunt_obj.hunt_id)

        rules = data_store.REL_DB.ReadAllForemanRules()
        self.assertLen(rules, 1)
        rule = rules[0]
        self.assertEqual(rule.client_rule_set, client_rule_set)
        self.assertEqual(rule.hunt_id, hunt_obj.hunt_id)
        self.assertEqual(rule.expiration_time,
                         hunt_obj.init_start_time + hunt_obj.duration)

        # Running a second time should not change the rules any more.
        with self.assertRaises(hunt.OnlyPausedHuntCanBeStartedError):
            hunt.StartHunt(hunt_obj.hunt_id)
        rules = data_store.REL_DB.ReadAllForemanRules()
        self.assertLen(rules, 1)
Exemplo n.º 13
0
    def testGetCollectedTimelinesGzchunked(self):
        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        fqdn = "foo.bar.baz"

        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id
        snapshot.knowledge_base.fqdn = fqdn
        data_store.REL_DB.WriteClientSnapshot(snapshot)

        hunt_id = "A0B1D2C3"
        flow_id = "0A1B2D3C"

        hunt_obj = rdf_hunt_objects.Hunt()
        hunt_obj.hunt_id = hunt_id
        hunt_obj.args.standard.client_ids = [client_id]
        hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__
        hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED
        data_store.REL_DB.WriteHuntObject(hunt_obj)

        flow_obj = rdf_flow_objects.Flow()
        flow_obj.client_id = client_id
        flow_obj.flow_id = flow_id
        flow_obj.flow_class_name = timeline.TimelineFlow.__name__
        flow_obj.create_time = rdfvalue.RDFDatetime.Now()
        flow_obj.parent_hunt_id = hunt_id
        data_store.REL_DB.WriteFlowObject(flow_obj)

        entry_1 = rdf_timeline.TimelineEntry()
        entry_1.path = "/foo/bar".encode("utf-8")
        entry_1.ino = 7890178901
        entry_1.size = 4815162342
        entry_1.atime_ns = 123 * 10**9
        entry_1.mtime_ns = 234 * 10**9
        entry_1.ctime_ns = 567 * 10**9
        entry_1.mode = 0o654

        entry_2 = rdf_timeline.TimelineEntry()
        entry_2.path = "/foo/baz".encode("utf-8")
        entry_1.ino = 8765487654
        entry_2.size = 1337
        entry_1.atime_ns = 987 * 10**9
        entry_1.mtime_ns = 876 * 10**9
        entry_1.ctime_ns = 765 * 10**9
        entry_2.mode = 0o757

        entries = [entry_1, entry_2]
        blobs = list(rdf_timeline.TimelineEntry.SerializeStream(iter(entries)))
        blob_ids = data_store.BLOBS.WriteBlobsWithUnknownHashes(blobs)

        result = rdf_timeline.TimelineResult()
        result.entry_batch_blob_ids = [
            blob_id.AsBytes() for blob_id in blob_ids
        ]

        flow_result = rdf_flow_objects.FlowResult()
        flow_result.client_id = client_id
        flow_result.flow_id = flow_id
        flow_result.payload = result

        data_store.REL_DB.WriteFlowResults([flow_result])

        buffer = io.BytesIO()

        fmt = timeline_pb2.ApiGetCollectedTimelineArgs.Format.RAW_GZCHUNKED
        self.api.Hunt(hunt_id).GetCollectedTimelines(fmt).WriteToStream(buffer)

        with zipfile.ZipFile(buffer, mode="r") as archive:
            with archive.open(f"{client_id}_{fqdn}.gzchunked",
                              mode="r") as file:
                chunks = chunked.ReadAll(file)
                entries = list(
                    rdf_timeline.TimelineEntry.DeserializeStream(chunks))
                self.assertEqual(entries, [entry_1, entry_2])
Exemplo n.º 14
0
    def testGetCollectedTimelinesBody(self):
        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        fqdn = "foo.bar.quux"

        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id
        snapshot.knowledge_base.fqdn = fqdn
        data_store.REL_DB.WriteClientSnapshot(snapshot)

        hunt_id = "B1C2E3D4"
        flow_id = "1B2C3E4D"

        hunt_obj = rdf_hunt_objects.Hunt()
        hunt_obj.hunt_id = hunt_id
        hunt_obj.args.standard.client_ids = [client_id]
        hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__
        hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED
        data_store.REL_DB.WriteHuntObject(hunt_obj)

        flow_obj = rdf_flow_objects.Flow()
        flow_obj.client_id = client_id
        flow_obj.flow_id = flow_id
        flow_obj.flow_class_name = timeline.TimelineFlow.__name__
        flow_obj.create_time = rdfvalue.RDFDatetime.Now()
        flow_obj.parent_hunt_id = hunt_id
        data_store.REL_DB.WriteFlowObject(flow_obj)

        entry_1 = rdf_timeline.TimelineEntry()
        entry_1.path = "/bar/baz/quux".encode("utf-8")
        entry_1.ino = 5926273453
        entry_1.size = 13373
        entry_1.atime_ns = 111 * 10**9
        entry_1.mtime_ns = 222 * 10**9
        entry_1.ctime_ns = 333 * 10**9
        entry_1.mode = 0o664

        entry_2 = rdf_timeline.TimelineEntry()
        entry_2.path = "/bar/baz/quuz".encode("utf-8")
        entry_2.ino = 6037384564
        entry_2.size = 13374
        entry_2.atime_ns = 777 * 10**9
        entry_2.mtime_ns = 888 * 10**9
        entry_2.ctime_ns = 999 * 10**9
        entry_2.mode = 0o777

        entries = [entry_1, entry_2]
        blobs = list(rdf_timeline.TimelineEntry.SerializeStream(iter(entries)))
        blob_ids = data_store.BLOBS.WriteBlobsWithUnknownHashes(blobs)

        result = rdf_timeline.TimelineResult()
        result.entry_batch_blob_ids = [
            blob_id.AsBytes() for blob_id in blob_ids
        ]

        flow_result = rdf_flow_objects.FlowResult()
        flow_result.client_id = client_id
        flow_result.flow_id = flow_id
        flow_result.payload = result

        data_store.REL_DB.WriteFlowResults([flow_result])

        buffer = io.BytesIO()
        self.api.Hunt(hunt_id).GetCollectedTimelines(
            timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY
        ).WriteToStream(buffer)

        with zipfile.ZipFile(buffer, mode="r") as archive:
            with archive.open(f"{client_id}_{fqdn}.body", mode="r") as file:
                content_file = file.read().decode("utf-8")

                rows = list(
                    csv.reader(io.StringIO(content_file), delimiter="|"))
                self.assertLen(rows, 2)

                self.assertEqual(rows[0][1], "/bar/baz/quux")
                self.assertEqual(rows[0][2], "5926273453")
                self.assertEqual(rows[0][3], stat.filemode(0o664))
                self.assertEqual(rows[0][6], "13373")
                self.assertEqual(rows[0][7], "111")
                self.assertEqual(rows[0][8], "222")
                self.assertEqual(rows[0][9], "333")

                self.assertEqual(rows[1][1], "/bar/baz/quuz")
                self.assertEqual(rows[1][2], "6037384564")
                self.assertEqual(rows[1][3], stat.filemode(0o777))
                self.assertEqual(rows[1][6], "13374")
                self.assertEqual(rows[1][7], "777")
                self.assertEqual(rows[1][8], "888")
                self.assertEqual(rows[1][9], "999")
Exemplo n.º 15
0
  def testReadHuntClientResourcesStatsCorrectlyAggregatesData(self):
    hunt_obj = rdf_hunt_objects.Hunt(description="foo")
    self.db.WriteHuntObject(hunt_obj)

    flow_data = []
    expected_user_cpu_histogram = rdf_stats.StatsHistogram.FromBins(
        rdf_stats.ClientResourcesStats.CPU_STATS_BINS)
    expected_system_cpu_histogram = rdf_stats.StatsHistogram.FromBins(
        rdf_stats.ClientResourcesStats.CPU_STATS_BINS)
    expected_network_histogram = rdf_stats.StatsHistogram.FromBins(
        rdf_stats.ClientResourcesStats.NETWORK_STATS_BINS)
    for i in range(10):
      user_cpu_time = 4.5 + i
      system_cpu_time = 10 + i * 2
      network_bytes_sent = 42 + i * 3

      client_id, flow_id = self._SetupHuntClientAndFlow(
          flow_state=rdf_flow_objects.Flow.FlowState.FINISHED,
          cpu_time_used=rdf_client_stats.CpuSeconds(
              user_cpu_time=user_cpu_time, system_cpu_time=system_cpu_time),
          network_bytes_sent=network_bytes_sent,
          hunt_id=hunt_obj.hunt_id)

      expected_user_cpu_histogram.RegisterValue(user_cpu_time)
      expected_system_cpu_histogram.RegisterValue(system_cpu_time)
      expected_network_histogram.RegisterValue(network_bytes_sent)

      flow_data.append((client_id, flow_id, (user_cpu_time, system_cpu_time,
                                             network_bytes_sent)))

    usage_stats = self.db.ReadHuntClientResourcesStats(hunt_obj.hunt_id)

    self.assertEqual(usage_stats.user_cpu_stats.num, 10)
    self.assertAlmostEqual(usage_stats.user_cpu_stats.mean, 9)
    self.assertAlmostEqual(usage_stats.user_cpu_stats.std, 2.8722813232690143)
    self.assertLen(usage_stats.user_cpu_stats.histogram.bins,
                   len(expected_user_cpu_histogram.bins))
    for b, model_b in zip(usage_stats.user_cpu_stats.histogram.bins,
                          expected_user_cpu_histogram.bins):
      self.assertAlmostEqual(b.range_max_value, model_b.range_max_value)
      self.assertEqual(b.num, model_b.num)

    self.assertEqual(usage_stats.system_cpu_stats.num, 10)
    self.assertAlmostEqual(usage_stats.system_cpu_stats.mean, 19)
    self.assertAlmostEqual(usage_stats.system_cpu_stats.std, 5.744562646538029)
    self.assertLen(usage_stats.system_cpu_stats.histogram.bins,
                   len(expected_system_cpu_histogram.bins))
    for b, model_b in zip(usage_stats.system_cpu_stats.histogram.bins,
                          expected_system_cpu_histogram.bins):
      self.assertAlmostEqual(b.range_max_value, model_b.range_max_value)
      self.assertEqual(b.num, model_b.num)

    self.assertEqual(usage_stats.network_bytes_sent_stats.num, 10)
    self.assertAlmostEqual(usage_stats.network_bytes_sent_stats.mean, 55.5)
    self.assertAlmostEqual(usage_stats.network_bytes_sent_stats.std,
                           8.616843969807043)
    self.assertLen(usage_stats.network_bytes_sent_stats.histogram.bins,
                   len(expected_network_histogram.bins))
    for b, model_b in zip(usage_stats.network_bytes_sent_stats.histogram.bins,
                          expected_network_histogram.bins):
      self.assertAlmostEqual(b.range_max_value, model_b.range_max_value)
      self.assertEqual(b.num, model_b.num)

    self.assertLen(usage_stats.worst_performers, 10)
    for worst_performer, flow_d in zip(usage_stats.worst_performers,
                                       reversed(flow_data)):
      client_id, flow_id, (user_cpu_time, system_cpu_time,
                           network_bytes_sent) = flow_d
      self.assertEqual(worst_performer.client_id.Basename(), client_id)
      self.assertAlmostEqual(worst_performer.cpu_usage.user_cpu_time,
                             user_cpu_time)
      self.assertAlmostEqual(worst_performer.cpu_usage.system_cpu_time,
                             system_cpu_time)
      self.assertEqual(worst_performer.network_bytes_sent, network_bytes_sent)
      self.assertEqual(worst_performer.session_id.Path(),
                       "/%s/%s" % (client_id, flow_id))
Exemplo n.º 16
0
    def testCountHuntFlowsReturnsEmptyListWhenNoFlows(self):
        hunt_obj = rdf_hunt_objects.Hunt(hunt_id=hunt.RandomHuntId(),
                                         description="foo")
        self.db.WriteHuntObject(hunt_obj)

        self.assertEqual(self.db.CountHuntFlows(hunt_obj.hunt_id), 0)
Exemplo n.º 17
0
    def testReadHuntFlowsReturnsEmptyListWhenNoFlows(self):
        hunt_obj = rdf_hunt_objects.Hunt(hunt_id=hunt.RandomHuntId(),
                                         description="foo")
        self.db.WriteHuntObject(hunt_obj)

        self.assertEmpty(self.db.ReadHuntFlows(hunt_obj.hunt_id, 0, 10))
Exemplo n.º 18
0
    def _CreateHunt(self, **kwargs):
        hunt_obj = rdf_hunt_objects.Hunt(creator=self.test_username, **kwargs)
        hunt.CreateHunt(hunt_obj)
        hunt_obj = hunt.StartHunt(hunt_obj.hunt_id)

        return hunt_obj.hunt_id
Exemplo n.º 19
0
    def testRawGzchunkedMultipleClients(self):
        client_id_1 = db_test_utils.InitializeClient(data_store.REL_DB)
        client_id_2 = db_test_utils.InitializeClient(data_store.REL_DB)

        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id_1
        snapshot.knowledge_base.fqdn = "foo.quux.com"
        data_store.REL_DB.WriteClientSnapshot(snapshot)

        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id_2
        snapshot.knowledge_base.fqdn = "foo.norf.com"
        data_store.REL_DB.WriteClientSnapshot(snapshot)

        hunt_id = "A0B1D2C3E4"

        hunt_obj = rdf_hunt_objects.Hunt()
        hunt_obj.hunt_id = hunt_id
        hunt_obj.args.standard.client_ids = [client_id_1, client_id_2]
        hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__
        hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED

        data_store.REL_DB.WriteHuntObject(hunt_obj)

        entry_1 = rdf_timeline.TimelineEntry()
        entry_1.path = "foo_1".encode("utf-8")
        entry_1.ino = 5432154321
        entry_1.size = 13371
        entry_1.atime_ns = 122 * 10**9
        entry_1.mtime_ns = 233 * 10**9
        entry_1.ctime_ns = 344 * 10**9
        entry_1.mode = 0o663

        entry_2 = rdf_timeline.TimelineEntry()
        entry_2.path = "foo_2".encode("utf-8")
        entry_1.ino = 7654376543
        entry_2.size = 13372
        entry_1.atime_ns = 788 * 10**9
        entry_1.mtime_ns = 899 * 10**9
        entry_1.ctime_ns = 900 * 10**9
        entry_1.mode = 0o763

        _WriteTimeline(client_id_1, [entry_1], hunt_id=hunt_id)
        _WriteTimeline(client_id_2, [entry_2], hunt_id=hunt_id)

        args = api_timeline.ApiGetCollectedHuntTimelinesArgs()
        args.hunt_id = hunt_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.RAW_GZCHUNKED

        content = b"".join(self.handler.Handle(args).GenerateContent())
        buffer = io.BytesIO(content)

        with zipfile.ZipFile(buffer, mode="r") as archive:
            client_filename_1 = f"{client_id_1}_foo.quux.com.gzchunked"
            with archive.open(client_filename_1, mode="r") as file:
                chunks = chunked.ReadAll(file)
                entries = list(
                    rdf_timeline.TimelineEntry.DeserializeStream(chunks))
                self.assertEqual(entries, [entry_1])

            client_filename_2 = f"{client_id_2}_foo.norf.com.gzchunked"
            with archive.open(client_filename_2, mode="r") as file:
                chunks = chunked.ReadAll(file)
                entries = list(
                    rdf_timeline.TimelineEntry.DeserializeStream(chunks))
                self.assertEqual(entries, [entry_2])
Exemplo n.º 20
0
    def testBodyMultipleClients(self):
        client_id_1 = db_test_utils.InitializeClient(data_store.REL_DB)
        client_id_2 = db_test_utils.InitializeClient(data_store.REL_DB)

        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id_1
        snapshot.knowledge_base.fqdn = "bar.quux.com"
        data_store.REL_DB.WriteClientSnapshot(snapshot)

        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id_2
        snapshot.knowledge_base.fqdn = "bar.quuz.com"
        data_store.REL_DB.WriteClientSnapshot(snapshot)

        hunt_id = "B1C2E3D4F5"

        hunt_obj = rdf_hunt_objects.Hunt()
        hunt_obj.hunt_id = hunt_id
        hunt_obj.args.standard.client_ids = [client_id_1, client_id_2]
        hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__
        hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED

        data_store.REL_DB.WriteHuntObject(hunt_obj)

        entry_1 = rdf_timeline.TimelineEntry()
        entry_1.path = "/bar/baz/quux".encode("utf-8")
        entry_1.ino = 5926273453
        entry_1.size = 13373
        entry_1.atime_ns = 111 * 10**9
        entry_1.mtime_ns = 222 * 10**9
        entry_1.ctime_ns = 333 * 10**9
        entry_1.mode = 0o664

        entry_2 = rdf_timeline.TimelineEntry()
        entry_2.path = "/bar/baz/quuz".encode("utf-8")
        entry_2.ino = 6037384564
        entry_2.size = 13374
        entry_2.atime_ns = 777 * 10**9
        entry_2.mtime_ns = 888 * 10**9
        entry_2.ctime_ns = 999 * 10**9
        entry_2.mode = 0o777

        _WriteTimeline(client_id_1, [entry_1], hunt_id=hunt_id)
        _WriteTimeline(client_id_2, [entry_2], hunt_id=hunt_id)

        args = api_timeline.ApiGetCollectedHuntTimelinesArgs()
        args.hunt_id = hunt_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY

        content = b"".join(self.handler.Handle(args).GenerateContent())
        buffer = io.BytesIO(content)

        with zipfile.ZipFile(buffer, mode="r") as archive:
            client_filename_1 = f"{client_id_1}_bar.quux.com.body"
            with archive.open(client_filename_1, mode="r") as file:
                content_file = file.read().decode("utf-8")

                rows = list(
                    csv.reader(io.StringIO(content_file), delimiter="|"))
                self.assertLen(rows, 1)
                self.assertEqual(rows[0][1], "/bar/baz/quux")
                self.assertEqual(rows[0][2], "5926273453")
                self.assertEqual(rows[0][3], stat.filemode(0o664))
                self.assertEqual(rows[0][6], "13373")
                self.assertEqual(rows[0][7], "111")
                self.assertEqual(rows[0][8], "222")
                self.assertEqual(rows[0][9], "333")

            client_filename_2 = f"{client_id_2}_bar.quuz.com.body"
            with archive.open(client_filename_2, mode="r") as file:
                content_file = file.read().decode("utf-8")

                rows = list(
                    csv.reader(io.StringIO(content_file), delimiter="|"))
                self.assertLen(rows, 1)
                self.assertEqual(rows[0][1], "/bar/baz/quuz")
                self.assertEqual(rows[0][2], "6037384564")
                self.assertEqual(rows[0][3], stat.filemode(0o777))
                self.assertEqual(rows[0][6], "13374")
                self.assertEqual(rows[0][7], "777")
                self.assertEqual(rows[0][8], "888")
                self.assertEqual(rows[0][9], "999")
Exemplo n.º 21
0
    def _CreateHunt(self, **kwargs):
        hunt_obj = rdf_hunt_objects.Hunt(creator=self.token.username, **kwargs)
        data_store.REL_DB.WriteHuntObject(hunt_obj)
        hunt_obj = hunt.StartHunt(hunt_obj.hunt_id)

        return hunt_obj.hunt_id