def testProgress(self): client_id = self.client_id with temp.AutoTempDirPath(remove_non_empty=True) as tempdir: filesystem_test_lib.CreateFile(os.path.join(tempdir, "foo")) filesystem_test_lib.CreateFile(os.path.join(tempdir, "bar")) filesystem_test_lib.CreateFile(os.path.join(tempdir, "baz")) args = rdf_timeline.TimelineArgs() args.root = tempdir.encode("utf-8") flow_id = flow_test_lib.StartFlow(timeline_flow.TimelineFlow, client_id=client_id, flow_args=args) progress = flow_test_lib.GetFlowProgress(client_id=client_id, flow_id=flow_id) self.assertEqual(progress.total_entry_count, 0) flow_test_lib.RunFlow(client_id=client_id, flow_id=flow_id, client_mock=action_mocks.ActionMock( timeline_action.Timeline)) progress = flow_test_lib.GetFlowProgress(client_id=client_id, flow_id=flow_id) self.assertEqual(progress.total_entry_count, 4)
def testNoLogsIfBtimeSupported(self, db: abstract_db.Database): client_id = self.client_id db.WriteClientMetadata(client_id, fleetspeak_enabled=True) snapshot = rdf_objects.ClientSnapshot() snapshot.client_id = client_id snapshot.knowledge_base.os = "Linux" snapshot.startup_info.client_info.timeline_btime_support = True db.WriteClientSnapshot(snapshot) with temp.AutoTempDirPath() as tempdir: args = rdf_timeline.TimelineArgs(root=tempdir.encode("utf-8")) flow_id = flow_test_lib.TestFlowHelper( timeline_flow.TimelineFlow.__name__, action_mocks.ActionMock(timeline_action.Timeline), client_id=client_id, creator=self.test_username, args=args) flow_test_lib.FinishAllFlowsOnClient(client_id) log_entries = db.ReadFlowLogEntries(client_id, flow_id, offset=0, count=1) self.assertEmpty(log_entries)
def testLogsWarningIfBtimeNotSupported(self, db: abstract_db.Database): client_id = self.client_id db.WriteClientMetadata(client_id, fleetspeak_enabled=True) snapshot = rdf_objects.ClientSnapshot() snapshot.client_id = client_id snapshot.knowledge_base.os = "Linux" snapshot.startup_info.client_info.timeline_btime_support = False db.WriteClientSnapshot(snapshot) with temp.AutoTempDirPath() as tempdir: args = rdf_timeline.TimelineArgs(root=tempdir.encode("utf-8")) flow_id = flow_test_lib.TestFlowHelper( timeline_flow.TimelineFlow.__name__, action_mocks.ActionMock(timeline_action.Timeline), client_id=client_id, token=self.token, args=args) flow_test_lib.FinishAllFlowsOnClient(client_id) log_entries = db.ReadFlowLogEntries(client_id, flow_id, offset=0, count=1) self.assertLen(log_entries, 1) self.assertRegex(log_entries[0].message, "birth time is not supported")
def _Collect(self, root): args = rdf_timeline.TimelineArgs(root=root) flow_id = flow_test_lib.TestFlowHelper( timeline_flow.TimelineFlow.__name__, action_mocks.ActionMock(timeline_action.Timeline), client_id=self.client_id, token=self.token, args=args) flow_test_lib.FinishAllFlowsOnClient(self.client_id) return timeline_flow.Entries(client_id=self.client_id, flow_id=flow_id)
def _Collect(self, root: bytes) -> Iterator[timeline_pb2.TimelineEntry]: args = rdf_timeline.TimelineArgs(root=root) flow_id = flow_test_lib.TestFlowHelper( timeline_flow.TimelineFlow.__name__, action_mocks.ActionMock(timeline_action.Timeline), client_id=self.client_id, creator=self.test_username, args=args) flow_test_lib.FinishAllFlowsOnClient(self.client_id) return timeline_flow.ProtoEntries(client_id=self.client_id, flow_id=flow_id)
def testRun(self): file_count = 64 with temp.AutoTempDirPath(remove_non_empty=True) as temp_dirpath: for idx in range(file_count): temp_filepath = os.path.join(temp_dirpath, "foo{}".format(idx)) _Touch(temp_filepath, content=os.urandom(random.randint(0, 1024))) args = rdf_timeline.TimelineArgs() args.root = temp_dirpath.encode("utf-8") responses = self.RunAction(timeline.Timeline, args) results: List[rdf_timeline.TimelineResult] = [] blobs: List[rdf_protodict.DataBlob] = [] # The test action runner is not able to distinguish between flow replies # and responses sent to well-known flow handlers, so we have to do the # filtering ourselves. for response in responses: if isinstance(response, rdf_timeline.TimelineResult): results.append(response) elif isinstance(response, rdf_protodict.DataBlob): blobs.append(response) else: raise AssertionError(f"Unexpected response: f{response}") self.assertNotEmpty(results) self.assertNotEmpty(blobs) blob_ids = [] for result in results: blob_ids.extend(result.entry_batch_blob_ids) for blob in blobs: self.assertIn(hashlib.sha256(blob.data).digest(), blob_ids) # Total number of entries should be one more than the file count because # of the entry for the root folder. total_entry_count = sum(result.entry_count for result in results) self.assertEqual(total_entry_count, file_count + 1) for result in results: # The filesystem type should be the same for every result. self.assertEqual(result.filesystem_type, results[0].filesystem_type)
def testRun(self): with temp.AutoTempDirPath(remove_non_empty=True) as temp_dirpath: for idx in range(64): temp_filepath = os.path.join(temp_dirpath, "foo{}".format(idx)) _Touch(temp_filepath, content=os.urandom(random.randint(0, 1024))) args = rdf_timeline.TimelineArgs() args.root = temp_dirpath.encode("utf-8") results = self.RunAction(timeline.Timeline, args) self.assertNotEmpty(results) self.assertNotEmpty(results[-1].entry_batch_blob_ids) blob_ids = results[-1].entry_batch_blob_ids for blob in results[:-1]: self.assertIn(hashlib.sha256(blob.data).digest(), blob_ids)