def testRawGzchunkedMulipleEntries(self): entries = [] for idx in range(1024): entry = rdf_timeline.TimelineEntry() entry.path = "/quux/thud/bar/baz/foo{}".format(idx).encode("utf-8") entry.size = random.randint(0, 1024) entries.append(entry) client_id = db_test_utils.InitializeClient(data_store.REL_DB) flow_id = timeline_test_lib.WriteTimeline(client_id, entries) args = api_timeline.ApiGetCollectedTimelineArgs() args.client_id = client_id args.flow_id = flow_id args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.RAW_GZCHUNKED content = b"".join(self.handler.Handle(args).GenerateContent()) buf = io.BytesIO(content) chunks = chunked.ReadAll(buf) deserialized = list( rdf_timeline.TimelineEntry.DeserializeStream(chunks)) self.assertEqual(entries, deserialized)
def testBodyMultipleEntries(self): entries = [] for idx in range(1024): entry = rdf_timeline.TimelineEntry() entry.path = "/foo/bar/baz/quux/norf/thud{}".format(idx).encode( "utf-8") entry.size = random.randint(0, 1024) entries.append(entry) client_id = db_test_utils.InitializeClient(data_store.REL_DB) flow_id = timeline_test_lib.WriteTimeline(client_id, entries) args = api_timeline.ApiGetCollectedTimelineArgs() args.client_id = client_id args.flow_id = flow_id args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY result = self.handler.Handle(args) content = b"".join(result.GenerateContent()).decode("utf-8") rows = list(csv.reader(io.StringIO(content), delimiter="|")) self.assertLen(rows, len(entries)) for idx, row in enumerate(rows): self.assertEqual(row[1].encode("utf-8"), entries[idx].path) self.assertEqual(int(row[6]), entries[idx].size)
def testBodySingleEntry(self): entry = rdf_timeline.TimelineEntry() entry.path = "/foo/bar/baz".encode("utf-8") entry.ino = 4815162342 entry.size = 42 entry.atime_ns = 123 * 10**9 entry.mtime_ns = 456 * 10**9 entry.ctime_ns = 789 * 10**9 client_id = db_test_utils.InitializeClient(data_store.REL_DB) flow_id = timeline_test_lib.WriteTimeline(client_id, [entry]) args = api_timeline.ApiGetCollectedTimelineArgs() args.client_id = client_id args.flow_id = flow_id args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY result = self.handler.Handle(args) content = b"".join(result.GenerateContent()).decode("utf-8") rows = list(csv.reader(io.StringIO(content), delimiter="|")) self.assertLen(rows, 1) self.assertEqual(rows[0][1], "/foo/bar/baz") self.assertEqual(rows[0][2], "4815162342") self.assertEqual(rows[0][6], "42") self.assertEqual(rows[0][7], "123") self.assertEqual(rows[0][8], "456") self.assertEqual(rows[0][9], "789")
def testRaisesOnIncorrectFormat(self): client_id = db_test_utils.InitializeClient(data_store.REL_DB) flow_id = timeline_test_lib.WriteTimeline(client_id, []) args = api_timeline.ApiGetCollectedTimelineArgs() args.client_id = client_id args.flow_id = flow_id args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.UNSPECIFIED with self.assertRaises(ValueError): self.handler.Handle(args)
def testRawGzchunkedEmpty(self): client_id = db_test_utils.InitializeClient(data_store.REL_DB) flow_id = timeline_test_lib.WriteTimeline(client_id, []) args = api_timeline.ApiGetCollectedTimelineArgs() args.client_id = client_id args.flow_id = flow_id args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.RAW_GZCHUNKED content = b"".join(self.handler.Handle(args).GenerateContent()) buf = io.BytesIO(content) self.assertIsNone(chunked.Read(buf))
def testBodyNoEntries(self): client_id = db_test_utils.InitializeClient(data_store.REL_DB) flow_id = timeline_test_lib.WriteTimeline(client_id, []) args = api_timeline.ApiGetCollectedTimelineArgs() args.client_id = client_id args.flow_id = flow_id args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY result = self.handler.Handle(args) content = b"".join(result.GenerateContent()).decode("utf-8") rows = list(csv.reader(io.StringIO(content), delimiter="|")) self.assertLen(rows, 0)
def testGetCollectedTimelineBody(self): entry = rdf_timeline.TimelineEntry() entry.path = "/foo/bar/baz".encode("utf-8") client_id = db_test_utils.InitializeClient(data_store.REL_DB) flow_id = timeline_test_lib.WriteTimeline(client_id, [entry]) data = io.BytesIO() flow = self.api.Client(client_id).Flow(flow_id) flow.GetCollectedTimelineBody().WriteToStream(data) # pytype: disable=wrong-arg-types content = data.getvalue().decode("utf-8") self.assertIn("|/foo/bar/baz|", content)
def testGetCollectedTimelineBodyBackslashEscape(self): entry = rdf_timeline.TimelineEntry() entry.path = "C:\\Windows\\system32\\notepad.exe".encode("utf-8") client_id = db_test_utils.InitializeClient(data_store.REL_DB) flow_id = timeline_test_lib.WriteTimeline(client_id, [entry]) data = io.BytesIO() flow = self.api.Client(client_id).Flow(flow_id) flow.GetCollectedTimelineBody( backslash_escape=True).WriteToStream(data) content = data.getvalue().decode("utf-8") self.assertIn("|C:\\\\Windows\\\\system32\\\\notepad.exe|", content)
def testGetCollectedTimelineBodyNonPrintableEscape(self): entry = rdf_timeline.TimelineEntry() entry.path = b"/f\x00b\x0ar\x1baz" client_id = db_test_utils.InitializeClient(data_store.REL_DB) flow_id = timeline_test_lib.WriteTimeline(client_id, [entry]) flow = self.api.Client(client_id).Flow(flow_id) chunks = flow.GetCollectedTimelineBody(non_printable_escape=True) data = io.BytesIO() chunks.WriteToStream(data) content = data.getvalue().decode("utf-8") self.assertIn(r"|/f\x00b\x0ar\x1baz|", content)
def testGetCollectedTimelineBodyCarriageReturnEscape(self): entry = rdf_timeline.TimelineEntry() entry.path = "/foo\rbar/baz\r\r\rquux".encode("utf-8") client_id = db_test_utils.InitializeClient(data_store.REL_DB) flow_id = timeline_test_lib.WriteTimeline(client_id, [entry]) flow = self.api.Client(client_id).Flow(flow_id) chunks = flow.GetCollectedTimelineBody(carriage_return_escape=True) data = io.BytesIO() chunks.WriteToStream(data) content = data.getvalue().decode("utf-8") self.assertIn("|/foo\\rbar/baz\\r\\r\\rquux|", content)
def testNonPrintableEscape(self): entry = rdf_timeline.TimelineEntry() entry.path = b"/f\x00b\x0ar\x1baz" client_id = db_test_utils.InitializeClient(data_store.REL_DB) flow_id = timeline_test_lib.WriteTimeline(client_id, [entry]) args = api_timeline.ApiGetCollectedTimelineArgs() args.client_id = client_id args.flow_id = flow_id args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY args.body_opts.non_printable_escape = True result = self.handler.Handle(args) content = b"".join(result.GenerateContent()).decode("utf-8") self.assertIn(r"|/f\x00b\x0ar\x1baz|", content)
def testBackslashEscape(self): entry = rdf_timeline.TimelineEntry() entry.path = "C:\\Windows\\system32\\notepad.exe".encode("utf-8") client_id = db_test_utils.InitializeClient(data_store.REL_DB) flow_id = timeline_test_lib.WriteTimeline(client_id, [entry]) args = api_timeline.ApiGetCollectedTimelineArgs() args.client_id = client_id args.flow_id = flow_id args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY args.body_opts.backslash_escape = True result = self.handler.Handle(args) content = b"".join(result.GenerateContent()).decode("utf-8") self.assertIn("|C:\\\\Windows\\\\system32\\\\notepad.exe|", content)
def testCarriageReturnEscape(self): entry = rdf_timeline.TimelineEntry() entry.path = "/foo/bar\r\rbaz/quux\rnorf".encode("utf-8") client_id = db_test_utils.InitializeClient(data_store.REL_DB) flow_id = timeline_test_lib.WriteTimeline(client_id, [entry]) args = api_timeline.ApiGetCollectedTimelineArgs() args.client_id = client_id args.flow_id = flow_id args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY args.body_opts.carriage_return_escape = True result = self.handler.Handle(args) content = b"".join(result.GenerateContent()).decode("utf-8") self.assertIn("|/foo/bar\\r\\rbaz/quux\\rnorf|", content)
def testBodySubsecondPrecision(self): entry = rdf_timeline.TimelineEntry() entry.path = "/foo/bar/baz".encode("utf-8") entry.atime_ns = int(3.14 * 10**9) client_id = db_test_utils.InitializeClient(data_store.REL_DB) flow_id = timeline_test_lib.WriteTimeline(client_id, [entry]) args = api_timeline.ApiGetCollectedTimelineArgs() args.client_id = client_id args.flow_id = flow_id args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY args.body_opts.timestamp_subsecond_precision = True result = self.handler.Handle(args) content = b"".join(result.GenerateContent()).decode("utf-8") rows = list(csv.reader(io.StringIO(content), delimiter="|")) self.assertLen(rows, 1) self.assertEqual(rows[0][1], "/foo/bar/baz") self.assertEqual(rows[0][7], "3.14")
def testNtfsFileReferenceFormat(self): entry = rdf_timeline.TimelineEntry() entry.path = "/foo/bar/baz".encode("utf-8") entry.ino = 1688849860339456 client_id = db_test_utils.InitializeClient(data_store.REL_DB) flow_id = timeline_test_lib.WriteTimeline(client_id, [entry]) args = api_timeline.ApiGetCollectedTimelineArgs() args.client_id = client_id args.flow_id = flow_id args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY args.body_opts.inode_ntfs_file_reference_format = True result = self.handler.Handle(args) content = b"".join(result.GenerateContent()).decode("utf-8") rows = list(csv.reader(io.StringIO(content), delimiter="|")) self.assertLen(rows, 1) self.assertEqual(rows[0][1], "/foo/bar/baz") self.assertEqual(rows[0][2], "75520-6")
def testBodySubsecondPrecision(self): client_id = db_test_utils.InitializeClient(data_store.REL_DB) hunt_id = "ABCDEABCDE" snapshot = rdf_objects.ClientSnapshot() snapshot.client_id = client_id snapshot.knowledge_base.fqdn = "foo.bar.baz" data_store.REL_DB.WriteClientSnapshot(snapshot) hunt_obj = rdf_hunt_objects.Hunt() hunt_obj.hunt_id = hunt_id hunt_obj.args.standard.client_ids = [client_id] hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__ hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED data_store.REL_DB.WriteHuntObject(hunt_obj) entry = rdf_timeline.TimelineEntry() entry.path = "/foo/bar/baz".encode("utf-8") entry.btime_ns = int(1337.42 * 10**9) timeline_test_lib.WriteTimeline(client_id, [entry], hunt_id=hunt_id) args = api_timeline.ApiGetCollectedHuntTimelinesArgs() args.hunt_id = hunt_id args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY args.body_opts.timestamp_subsecond_precision = True content = b"".join(self.handler.Handle(args).GenerateContent()) buffer = io.BytesIO(content) with zipfile.ZipFile(buffer, mode="r") as archive: with archive.open(f"{client_id}_foo.bar.baz.body", mode="r") as file: content_file = file.read().decode("utf-8") rows = list( csv.reader(io.StringIO(content_file), delimiter="|")) self.assertLen(rows, 1) self.assertEqual(rows[0][1], "/foo/bar/baz") self.assertEqual(rows[0][10], "1337.42")
def testRawGzchunkedMultipleClients(self): client_id_1 = db_test_utils.InitializeClient(data_store.REL_DB) client_id_2 = db_test_utils.InitializeClient(data_store.REL_DB) snapshot = rdf_objects.ClientSnapshot() snapshot.client_id = client_id_1 snapshot.knowledge_base.fqdn = "foo.quux.com" data_store.REL_DB.WriteClientSnapshot(snapshot) snapshot = rdf_objects.ClientSnapshot() snapshot.client_id = client_id_2 snapshot.knowledge_base.fqdn = "foo.norf.com" data_store.REL_DB.WriteClientSnapshot(snapshot) hunt_id = "A0B1D2C3E4" hunt_obj = rdf_hunt_objects.Hunt() hunt_obj.hunt_id = hunt_id hunt_obj.args.standard.client_ids = [client_id_1, client_id_2] hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__ hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED data_store.REL_DB.WriteHuntObject(hunt_obj) entry_1 = rdf_timeline.TimelineEntry() entry_1.path = "foo_1".encode("utf-8") entry_1.ino = 5432154321 entry_1.size = 13371 entry_1.atime_ns = 122 * 10**9 entry_1.mtime_ns = 233 * 10**9 entry_1.ctime_ns = 344 * 10**9 entry_1.mode = 0o663 entry_2 = rdf_timeline.TimelineEntry() entry_2.path = "foo_2".encode("utf-8") entry_1.ino = 7654376543 entry_2.size = 13372 entry_1.atime_ns = 788 * 10**9 entry_1.mtime_ns = 899 * 10**9 entry_1.ctime_ns = 900 * 10**9 entry_1.mode = 0o763 timeline_test_lib.WriteTimeline(client_id_1, [entry_1], hunt_id=hunt_id) timeline_test_lib.WriteTimeline(client_id_2, [entry_2], hunt_id=hunt_id) args = api_timeline.ApiGetCollectedHuntTimelinesArgs() args.hunt_id = hunt_id args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.RAW_GZCHUNKED content = b"".join(self.handler.Handle(args).GenerateContent()) buffer = io.BytesIO(content) with zipfile.ZipFile(buffer, mode="r") as archive: client_filename_1 = f"{client_id_1}_foo.quux.com.gzchunked" with archive.open(client_filename_1, mode="r") as file: chunks = chunked.ReadAll(file) entries = list( rdf_timeline.TimelineEntry.DeserializeStream(chunks)) self.assertEqual(entries, [entry_1]) client_filename_2 = f"{client_id_2}_foo.norf.com.gzchunked" with archive.open(client_filename_2, mode="r") as file: chunks = chunked.ReadAll(file) entries = list( rdf_timeline.TimelineEntry.DeserializeStream(chunks)) self.assertEqual(entries, [entry_2])
def testBodyMultipleClients(self): client_id_1 = db_test_utils.InitializeClient(data_store.REL_DB) client_id_2 = db_test_utils.InitializeClient(data_store.REL_DB) snapshot = rdf_objects.ClientSnapshot() snapshot.client_id = client_id_1 snapshot.knowledge_base.fqdn = "bar.quux.com" data_store.REL_DB.WriteClientSnapshot(snapshot) snapshot = rdf_objects.ClientSnapshot() snapshot.client_id = client_id_2 snapshot.knowledge_base.fqdn = "bar.quuz.com" data_store.REL_DB.WriteClientSnapshot(snapshot) hunt_id = "B1C2E3D4F5" hunt_obj = rdf_hunt_objects.Hunt() hunt_obj.hunt_id = hunt_id hunt_obj.args.standard.client_ids = [client_id_1, client_id_2] hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__ hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED data_store.REL_DB.WriteHuntObject(hunt_obj) entry_1 = rdf_timeline.TimelineEntry() entry_1.path = "/bar/baz/quux".encode("utf-8") entry_1.ino = 5926273453 entry_1.size = 13373 entry_1.atime_ns = 111 * 10**9 entry_1.mtime_ns = 222 * 10**9 entry_1.ctime_ns = 333 * 10**9 entry_1.mode = 0o664 entry_2 = rdf_timeline.TimelineEntry() entry_2.path = "/bar/baz/quuz".encode("utf-8") entry_2.ino = 6037384564 entry_2.size = 13374 entry_2.atime_ns = 777 * 10**9 entry_2.mtime_ns = 888 * 10**9 entry_2.ctime_ns = 999 * 10**9 entry_2.mode = 0o777 timeline_test_lib.WriteTimeline(client_id_1, [entry_1], hunt_id=hunt_id) timeline_test_lib.WriteTimeline(client_id_2, [entry_2], hunt_id=hunt_id) args = api_timeline.ApiGetCollectedHuntTimelinesArgs() args.hunt_id = hunt_id args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY content = b"".join(self.handler.Handle(args).GenerateContent()) buffer = io.BytesIO(content) with zipfile.ZipFile(buffer, mode="r") as archive: client_filename_1 = f"{client_id_1}_bar.quux.com.body" with archive.open(client_filename_1, mode="r") as file: content_file = file.read().decode("utf-8") rows = list( csv.reader(io.StringIO(content_file), delimiter="|")) self.assertLen(rows, 1) self.assertEqual(rows[0][1], "/bar/baz/quux") self.assertEqual(rows[0][2], "5926273453") self.assertEqual(rows[0][3], stat.filemode(0o664)) self.assertEqual(rows[0][6], "13373") self.assertEqual(rows[0][7], "111") self.assertEqual(rows[0][8], "222") self.assertEqual(rows[0][9], "333") client_filename_2 = f"{client_id_2}_bar.quuz.com.body" with archive.open(client_filename_2, mode="r") as file: content_file = file.read().decode("utf-8") rows = list( csv.reader(io.StringIO(content_file), delimiter="|")) self.assertLen(rows, 1) self.assertEqual(rows[0][1], "/bar/baz/quuz") self.assertEqual(rows[0][2], "6037384564") self.assertEqual(rows[0][3], stat.filemode(0o777)) self.assertEqual(rows[0][6], "13374") self.assertEqual(rows[0][7], "777") self.assertEqual(rows[0][8], "888") self.assertEqual(rows[0][9], "999")