コード例 #1
0
    def testRawGzchunkedMultipleClients(self):
        client_id_1 = db_test_utils.InitializeClient(data_store.REL_DB)
        client_id_2 = db_test_utils.InitializeClient(data_store.REL_DB)

        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id_1
        snapshot.knowledge_base.fqdn = "foo.quux.com"
        data_store.REL_DB.WriteClientSnapshot(snapshot)

        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id_2
        snapshot.knowledge_base.fqdn = "foo.norf.com"
        data_store.REL_DB.WriteClientSnapshot(snapshot)

        hunt_id = "A0B1D2C3E4"

        hunt_obj = rdf_hunt_objects.Hunt()
        hunt_obj.hunt_id = hunt_id
        hunt_obj.args.standard.client_ids = [client_id_1, client_id_2]
        hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__
        hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED

        data_store.REL_DB.WriteHuntObject(hunt_obj)

        entry_1 = rdf_timeline.TimelineEntry()
        entry_1.path = "foo_1".encode("utf-8")
        entry_1.size = 13371

        entry_2 = rdf_timeline.TimelineEntry()
        entry_2.path = "foo_2".encode("utf-8")
        entry_2.size = 13372

        _WriteTimeline(client_id_1, [entry_1], hunt_id=hunt_id)
        _WriteTimeline(client_id_2, [entry_2], hunt_id=hunt_id)

        args = api_timeline.ApiGetCollectedHuntTimelinesArgs()
        args.hunt_id = hunt_id

        content = b"".join(self.handler.Handle(args).GenerateContent())
        buffer = io.BytesIO(content)

        with zipfile.ZipFile(buffer, mode="r") as archive:
            client_filename_1 = f"{client_id_1}_foo.quux.com.gzchunked"
            with archive.open(client_filename_1, mode="r") as file:
                chunks = chunked.ReadAll(file)
                entries = list(
                    rdf_timeline.TimelineEntry.DeserializeStream(chunks))
                self.assertEqual(entries, [entry_1])

            client_filename_2 = f"{client_id_2}_foo.norf.com.gzchunked"
            with archive.open(client_filename_2, mode="r") as file:
                chunks = chunked.ReadAll(file)
                entries = list(
                    rdf_timeline.TimelineEntry.DeserializeStream(chunks))
                self.assertEqual(entries, [entry_2])
コード例 #2
0
ファイル: timeline_test.py プロジェクト: syth3/grr
  def testRaisesOnIncorrectFlowType(self):
    client_id = db_test_utils.InitializeClient(data_store.REL_DB)
    hunt_id = "A0B1D2C3E4"

    hunt_obj = rdf_hunt_objects.Hunt()
    hunt_obj.hunt_id = hunt_id
    hunt_obj.args.standard.client_ids = [client_id]
    hunt_obj.args.standard.flow_name = "NotTimelineFlow"
    hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED

    data_store.REL_DB.WriteHuntObject(hunt_obj)

    args = api_timeline.ApiGetCollectedHuntTimelinesArgs()
    args.hunt_id = hunt_id

    with self.assertRaises(ValueError):
      self.handler.Handle(args)
コード例 #3
0
    def testRaisesOnIncorrectFormat(self):
        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        hunt_id = "B1C2E3D4F5"

        hunt_obj = rdf_hunt_objects.Hunt()
        hunt_obj.hunt_id = hunt_id
        hunt_obj.args.standard.client_ids = [client_id]
        hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__
        hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED

        data_store.REL_DB.WriteHuntObject(hunt_obj)

        args = api_timeline.ApiGetCollectedHuntTimelinesArgs()
        args.hunt_id = hunt_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.UNSPECIFIED

        with self.assertRaises(ValueError):
            self.handler.Handle(args)
コード例 #4
0
ファイル: timeline_test.py プロジェクト: khanhgithead/grr
    def testBodySubsecondPrecision(self):
        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        hunt_id = "ABCDEABCDE"

        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id
        snapshot.knowledge_base.fqdn = "foo.bar.baz"
        data_store.REL_DB.WriteClientSnapshot(snapshot)

        hunt_obj = rdf_hunt_objects.Hunt()
        hunt_obj.hunt_id = hunt_id
        hunt_obj.args.standard.client_ids = [client_id]
        hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__
        hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED
        data_store.REL_DB.WriteHuntObject(hunt_obj)

        entry = rdf_timeline.TimelineEntry()
        entry.path = "/foo/bar/baz".encode("utf-8")
        entry.btime_ns = int(1337.42 * 10**9)

        timeline_test_lib.WriteTimeline(client_id, [entry], hunt_id=hunt_id)

        args = api_timeline.ApiGetCollectedHuntTimelinesArgs()
        args.hunt_id = hunt_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY
        args.body_opts.timestamp_subsecond_precision = True

        content = b"".join(self.handler.Handle(args).GenerateContent())
        buffer = io.BytesIO(content)

        with zipfile.ZipFile(buffer, mode="r") as archive:
            with archive.open(f"{client_id}_foo.bar.baz.body",
                              mode="r") as file:
                content_file = file.read().decode("utf-8")
                rows = list(
                    csv.reader(io.StringIO(content_file), delimiter="|"))

        self.assertLen(rows, 1)
        self.assertEqual(rows[0][1], "/foo/bar/baz")
        self.assertEqual(rows[0][10], "1337.42")
コード例 #5
0
    def testBodyMultipleClients(self):
        client_id_1 = db_test_utils.InitializeClient(data_store.REL_DB)
        client_id_2 = db_test_utils.InitializeClient(data_store.REL_DB)

        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id_1
        snapshot.knowledge_base.fqdn = "bar.quux.com"
        data_store.REL_DB.WriteClientSnapshot(snapshot)

        snapshot = rdf_objects.ClientSnapshot()
        snapshot.client_id = client_id_2
        snapshot.knowledge_base.fqdn = "bar.quuz.com"
        data_store.REL_DB.WriteClientSnapshot(snapshot)

        hunt_id = "B1C2E3D4F5"

        hunt_obj = rdf_hunt_objects.Hunt()
        hunt_obj.hunt_id = hunt_id
        hunt_obj.args.standard.client_ids = [client_id_1, client_id_2]
        hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__
        hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED

        data_store.REL_DB.WriteHuntObject(hunt_obj)

        entry_1 = rdf_timeline.TimelineEntry()
        entry_1.path = "/bar/baz/quux".encode("utf-8")
        entry_1.ino = 5926273453
        entry_1.size = 13373
        entry_1.atime_ns = 111 * 10**9
        entry_1.mtime_ns = 222 * 10**9
        entry_1.ctime_ns = 333 * 10**9
        entry_1.mode = 0o664

        entry_2 = rdf_timeline.TimelineEntry()
        entry_2.path = "/bar/baz/quuz".encode("utf-8")
        entry_2.ino = 6037384564
        entry_2.size = 13374
        entry_2.atime_ns = 777 * 10**9
        entry_2.mtime_ns = 888 * 10**9
        entry_2.ctime_ns = 999 * 10**9
        entry_2.mode = 0o777

        _WriteTimeline(client_id_1, [entry_1], hunt_id=hunt_id)
        _WriteTimeline(client_id_2, [entry_2], hunt_id=hunt_id)

        args = api_timeline.ApiGetCollectedHuntTimelinesArgs()
        args.hunt_id = hunt_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY

        content = b"".join(self.handler.Handle(args).GenerateContent())
        buffer = io.BytesIO(content)

        with zipfile.ZipFile(buffer, mode="r") as archive:
            client_filename_1 = f"{client_id_1}_bar.quux.com.body"
            with archive.open(client_filename_1, mode="r") as file:
                content_file = file.read().decode("utf-8")

                rows = list(
                    csv.reader(io.StringIO(content_file), delimiter="|"))
                self.assertLen(rows, 1)
                self.assertEqual(rows[0][1], "/bar/baz/quux")
                self.assertEqual(rows[0][2], "5926273453")
                self.assertEqual(rows[0][3], stat.filemode(0o664))
                self.assertEqual(rows[0][6], "13373")
                self.assertEqual(rows[0][7], "111")
                self.assertEqual(rows[0][8], "222")
                self.assertEqual(rows[0][9], "333")

            client_filename_2 = f"{client_id_2}_bar.quuz.com.body"
            with archive.open(client_filename_2, mode="r") as file:
                content_file = file.read().decode("utf-8")

                rows = list(
                    csv.reader(io.StringIO(content_file), delimiter="|"))
                self.assertLen(rows, 1)
                self.assertEqual(rows[0][1], "/bar/baz/quuz")
                self.assertEqual(rows[0][2], "6037384564")
                self.assertEqual(rows[0][3], stat.filemode(0o777))
                self.assertEqual(rows[0][6], "13374")
                self.assertEqual(rows[0][7], "777")
                self.assertEqual(rows[0][8], "888")
                self.assertEqual(rows[0][9], "999")