示例#1
0
    def testRawGzchunkedMulipleEntries(self):
        entries = []

        for idx in range(1024):
            entry = rdf_timeline.TimelineEntry()
            entry.path = "/quux/thud/bar/baz/foo{}".format(idx).encode("utf-8")
            entry.size = random.randint(0, 1024)
            entries.append(entry)

        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        flow_id = _WriteTimeline(client_id, entries)

        args = api_timeline.ApiGetCollectedTimelineArgs()
        args.client_id = client_id
        args.flow_id = flow_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.RAW_GZCHUNKED

        content = b"".join(self.handler.Handle(args).GenerateContent())

        buf = io.BytesIO(content)
        chunks = chunked.ReadAll(buf)
        deserialized = list(
            rdf_timeline.TimelineEntry.DeserializeStream(chunks))

        self.assertEqual(entries, deserialized)
示例#2
0
    def testBodyMultipleEntries(self):
        entries = []

        for idx in range(1024):
            entry = rdf_timeline.TimelineEntry()
            entry.path = "/foo/bar/baz/quux/norf/thud{}".format(idx).encode(
                "utf-8")
            entry.size = random.randint(0, 1024)
            entries.append(entry)

        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        flow_id = _WriteTimeline(client_id, entries)

        args = api_timeline.ApiGetCollectedTimelineArgs()
        args.client_id = client_id
        args.flow_id = flow_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY

        result = self.handler.Handle(args)
        content = b"".join(result.GenerateContent()).decode("utf-8")

        rows = list(csv.reader(io.StringIO(content), delimiter="|"))
        self.assertLen(rows, len(entries))

        for idx, row in enumerate(rows):
            self.assertEqual(row[1].encode("utf-8"), entries[idx].path)
            self.assertEqual(int(row[6]), entries[idx].size)
示例#3
0
    def testBodySingleEntry(self):
        entry = rdf_timeline.TimelineEntry()
        entry.path = "/foo/bar/baz".encode("utf-8")
        entry.ino = 4815162342
        entry.size = 42
        entry.atime_ns = 123 * 10**9
        entry.mtime_ns = 456 * 10**9
        entry.ctime_ns = 789 * 10**9

        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        flow_id = _WriteTimeline(client_id, [entry])

        args = api_timeline.ApiGetCollectedTimelineArgs()
        args.client_id = client_id
        args.flow_id = flow_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY

        result = self.handler.Handle(args)
        content = b"".join(result.GenerateContent()).decode("utf-8")

        rows = list(csv.reader(io.StringIO(content), delimiter="|"))
        self.assertLen(rows, 1)
        self.assertEqual(rows[0][1], "/foo/bar/baz")
        self.assertEqual(rows[0][2], "4815162342")
        self.assertEqual(rows[0][6], "42")
        self.assertEqual(rows[0][7], "123")
        self.assertEqual(rows[0][8], "456")
        self.assertEqual(rows[0][9], "789")
示例#4
0
  def testGetCollectedTimelineChecksClientAccessIfNotPartOfHunt(self):
    client_id = self.SetupClient(0)
    flow_id = flow_test_lib.StartFlow(
        timeline.TimelineFlow, client_id=client_id)

    args = api_timeline.ApiGetCollectedTimelineArgs(
        client_id=client_id, flow_id=flow_id)
    self.CheckMethodIsAccessChecked(
        self.router.GetCollectedTimeline, "CheckClientAccess", args=args)
    def testGetCollectedTimelineRefusesAccessIfWrongFlow(self):
        client_id = self.SetupClient(0)
        flow_id = flow_test_lib.StartFlow(flow_test_lib.DummyFlow,
                                          client_id=client_id)

        args = api_timeline.ApiGetCollectedTimelineArgs(client_id=client_id,
                                                        flow_id=flow_id)
        with self.assertRaises(ValueError):
            self.router.GetCollectedTimeline(args=args, context=self.context)
  def testGetCollectedTimelineGrantsAccessIfPartOfHunt(self):
    client_id = self.SetupClient(0)
    hunt_id = self.CreateHunt()
    flow_id = flow_test_lib.StartFlow(
        timeline.TimelineFlow, client_id=client_id, parent_hunt_id=hunt_id)

    args = api_timeline.ApiGetCollectedTimelineArgs(
        client_id=client_id, flow_id=flow_id)
    self.CheckMethodIsNotAccessChecked(
        self.router.GetCollectedTimeline, args=args)
  def testGetCollectedTimelineRefusesAccessIfPartOfHuntButWrongFlow(self):
    client_id = self.SetupClient(0)
    hunt_id = self.CreateHunt()
    flow_id = flow_test_lib.StartFlow(
        flow_test_lib.DummyFlow, client_id=client_id, parent_hunt_id=hunt_id)

    args = api_timeline.ApiGetCollectedTimelineArgs(
        client_id=client_id, flow_id=flow_id)
    with self.assertRaises(ValueError):
      self.router.GetCollectedTimeline(args=args, token=self.token)
示例#8
0
    def testRaisesOnIncorrectFormat(self):
        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        flow_id = _WriteTimeline(client_id, [])

        args = api_timeline.ApiGetCollectedTimelineArgs()
        args.client_id = client_id
        args.flow_id = flow_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.UNSPECIFIED

        with self.assertRaises(ValueError):
            self.handler.Handle(args)
示例#9
0
    def testBodyMultipleResults(self):
        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        flow_id = "ABCDEF42"

        flow_obj = rdf_flow_objects.Flow()
        flow_obj.client_id = client_id
        flow_obj.flow_id = flow_id
        flow_obj.flow_class_name = timeline.TimelineFlow.__name__
        flow_obj.create_time = rdfvalue.RDFDatetime.Now()
        data_store.REL_DB.WriteFlowObject(flow_obj)

        entry_1 = rdf_timeline.TimelineEntry()
        entry_1.path = "/foo".encode("utf-8")

        blobs_1 = list(
            rdf_timeline.TimelineEntry.SerializeStream(iter([entry_1])))
        (blob_id_1, ) = data_store.BLOBS.WriteBlobsWithUnknownHashes(blobs_1)

        result_1 = rdf_timeline.TimelineResult()
        result_1.entry_batch_blob_ids = [blob_id_1.AsBytes()]

        entry_2 = rdf_timeline.TimelineEntry()
        entry_2.path = "/bar".encode("utf-8")

        blobs_2 = list(
            rdf_timeline.TimelineEntry.SerializeStream(iter([entry_2])))
        (blob_id_2, ) = data_store.BLOBS.WriteBlobsWithUnknownHashes(blobs_2)

        result_2 = rdf_timeline.TimelineResult()
        result_2.entry_batch_blob_ids = [blob_id_2.AsBytes()]

        flow_result_1 = rdf_flow_objects.FlowResult()
        flow_result_1.client_id = client_id
        flow_result_1.flow_id = flow_id
        flow_result_1.payload = result_1

        flow_result_2 = rdf_flow_objects.FlowResult()
        flow_result_2.client_id = client_id
        flow_result_2.flow_id = flow_id
        flow_result_2.payload = result_2

        data_store.REL_DB.WriteFlowResults([flow_result_1, flow_result_2])

        args = api_timeline.ApiGetCollectedTimelineArgs()
        args.client_id = client_id
        args.flow_id = flow_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY

        result = self.handler.Handle(args)
        content = b"".join(result.GenerateContent()).decode("utf-8")

        self.assertIn("|/foo|", content)
        self.assertIn("|/bar|", content)
示例#10
0
    def testRawGzchunkedEmpty(self):
        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        flow_id = _WriteTimeline(client_id, [])

        args = api_timeline.ApiGetCollectedTimelineArgs()
        args.client_id = client_id
        args.flow_id = flow_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.RAW_GZCHUNKED

        content = b"".join(self.handler.Handle(args).GenerateContent())

        buf = io.BytesIO(content)
        self.assertIsNone(chunked.Read(buf))
示例#11
0
    def testRawGzchunkedEmpty(self):
        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        flow_id = _WriteTimeline(client_id, [])

        args = api_timeline.ApiGetCollectedTimelineArgs()
        args.client_id = client_id
        args.flow_id = flow_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.RAW_GZCHUNKED

        content = self.handler.Handle(args).GenerateContent()
        deserialized = list(
            rdf_timeline.TimelineEntry.DeserializeStream(content))

        self.assertEmpty(deserialized)
示例#12
0
    def testBodyNoEntries(self):
        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        flow_id = _WriteTimeline(client_id, [])

        args = api_timeline.ApiGetCollectedTimelineArgs()
        args.client_id = client_id
        args.flow_id = flow_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY

        result = self.handler.Handle(args)
        content = b"".join(result.GenerateContent()).decode("utf-8")

        rows = list(csv.reader(io.StringIO(content), delimiter="|"))
        self.assertLen(rows, 0)
示例#13
0
    def testNonPrintableEscape(self):
        entry = rdf_timeline.TimelineEntry()
        entry.path = b"/f\x00b\x0ar\x1baz"

        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        flow_id = timeline_test_lib.WriteTimeline(client_id, [entry])

        args = api_timeline.ApiGetCollectedTimelineArgs()
        args.client_id = client_id
        args.flow_id = flow_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY
        args.body_opts.non_printable_escape = True

        result = self.handler.Handle(args)
        content = b"".join(result.GenerateContent()).decode("utf-8")

        self.assertIn(r"|/f\x00b\x0ar\x1baz|", content)
示例#14
0
    def testCarriageReturnEscape(self):
        entry = rdf_timeline.TimelineEntry()
        entry.path = "/foo/bar\r\rbaz/quux\rnorf".encode("utf-8")

        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        flow_id = timeline_test_lib.WriteTimeline(client_id, [entry])

        args = api_timeline.ApiGetCollectedTimelineArgs()
        args.client_id = client_id
        args.flow_id = flow_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY
        args.body_opts.carriage_return_escape = True

        result = self.handler.Handle(args)
        content = b"".join(result.GenerateContent()).decode("utf-8")

        self.assertIn("|/foo/bar\\r\\rbaz/quux\\rnorf|", content)
示例#15
0
    def testBackslashEscape(self):
        entry = rdf_timeline.TimelineEntry()
        entry.path = "C:\\Windows\\system32\\notepad.exe".encode("utf-8")

        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        flow_id = timeline_test_lib.WriteTimeline(client_id, [entry])

        args = api_timeline.ApiGetCollectedTimelineArgs()
        args.client_id = client_id
        args.flow_id = flow_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY
        args.body_opts.backslash_escape = True

        result = self.handler.Handle(args)
        content = b"".join(result.GenerateContent()).decode("utf-8")

        self.assertIn("|C:\\\\Windows\\\\system32\\\\notepad.exe|", content)
示例#16
0
    def testRaisesOnIncorrectFlowType(self):
        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        flow_id = "A1B3C5D7E"

        flow_obj = rdf_flow_objects.Flow()
        flow_obj.client_id = client_id
        flow_obj.flow_id = flow_id
        flow_obj.flow_class_name = "NotTimelineFlow"
        flow_obj.create_time = rdfvalue.RDFDatetime.Now()
        data_store.REL_DB.WriteFlowObject(flow_obj)

        args = api_timeline.ApiGetCollectedTimelineArgs()
        args.client_id = client_id
        args.flow_id = flow_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY

        with self.assertRaises(ValueError):
            self.handler.Handle(args)
示例#17
0
文件: timeline_test.py 项目: avmi/grr
    def testNtfsFileReferenceFormatInference(self):
        entry = rdf_timeline.TimelineEntry()
        entry.path = "/foo/bar/baz".encode("utf-8")
        entry.ino = 1688849860339456

        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        flow_id = "F00BA542"

        flow_obj = rdf_flow_objects.Flow()
        flow_obj.client_id = client_id
        flow_obj.flow_id = flow_id
        flow_obj.flow_class_name = timeline.TimelineFlow.__name__
        flow_obj.create_time = rdfvalue.RDFDatetime.Now()
        data_store.REL_DB.WriteFlowObject(flow_obj)

        blobs = list(rdf_timeline.TimelineEntry.SerializeStream(iter([entry])))
        blob_ids = data_store.BLOBS.WriteBlobsWithUnknownHashes(blobs)

        result = rdf_timeline.TimelineResult()
        result.entry_batch_blob_ids = [
            blob_id.AsBytes() for blob_id in blob_ids
        ]
        result.filesystem_type = "NTFS"

        flow_result = rdf_flow_objects.FlowResult()
        flow_result.client_id = client_id
        flow_result.flow_id = flow_id
        flow_result.payload = result
        data_store.REL_DB.WriteFlowResults([flow_result])

        args = api_timeline.ApiGetCollectedTimelineArgs()
        args.client_id = client_id
        args.flow_id = flow_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY

        result = self.handler.Handle(args)
        content = b"".join(result.GenerateContent()).decode("utf-8")

        rows = list(csv.reader(io.StringIO(content), delimiter="|"))
        self.assertLen(rows, 1)
        self.assertEqual(rows[0][1], "/foo/bar/baz")
        self.assertEqual(rows[0][2], "75520-6")
示例#18
0
    def testBodySubsecondPrecision(self):
        entry = rdf_timeline.TimelineEntry()
        entry.path = "/foo/bar/baz".encode("utf-8")
        entry.atime_ns = int(3.14 * 10**9)

        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        flow_id = timeline_test_lib.WriteTimeline(client_id, [entry])

        args = api_timeline.ApiGetCollectedTimelineArgs()
        args.client_id = client_id
        args.flow_id = flow_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY
        args.body_opts.timestamp_subsecond_precision = True

        result = self.handler.Handle(args)
        content = b"".join(result.GenerateContent()).decode("utf-8")

        rows = list(csv.reader(io.StringIO(content), delimiter="|"))
        self.assertLen(rows, 1)
        self.assertEqual(rows[0][1], "/foo/bar/baz")
        self.assertEqual(rows[0][7], "3.14")
示例#19
0
    def testNtfsFileReferenceFormat(self):
        entry = rdf_timeline.TimelineEntry()
        entry.path = "/foo/bar/baz".encode("utf-8")
        entry.ino = 1688849860339456

        client_id = db_test_utils.InitializeClient(data_store.REL_DB)
        flow_id = timeline_test_lib.WriteTimeline(client_id, [entry])

        args = api_timeline.ApiGetCollectedTimelineArgs()
        args.client_id = client_id
        args.flow_id = flow_id
        args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY
        args.body_opts.inode_ntfs_file_reference_format = True

        result = self.handler.Handle(args)
        content = b"".join(result.GenerateContent()).decode("utf-8")

        rows = list(csv.reader(io.StringIO(content), delimiter="|"))
        self.assertLen(rows, 1)
        self.assertEqual(rows[0][1], "/foo/bar/baz")
        self.assertEqual(rows[0][2], "75520-6")
 def testGetCollectedTimelineRaisesIfFlowIsNotFound(self):
     args = api_timeline.ApiGetCollectedTimelineArgs(
         client_id=self.client_id, flow_id="12345678")
     with self.assertRaises(api_call_handler_base.ResourceNotFoundError):
         self.router.GetCollectedTimeline(args, context=self.context)