def testListScheduledFlows(self, db: abstract_db.Database): token = _CreateToken(db) client_id1 = db_test_utils.InitializeClient(db) client_id2 = db_test_utils.InitializeClient(db) handler = flow_plugin.ApiScheduleFlowHandler() sf1 = handler.Handle(flow_plugin.ApiCreateFlowArgs( client_id=client_id1, flow=flow_plugin.ApiFlow( name=file.CollectSingleFile.__name__, args=rdf_file_finder.CollectSingleFileArgs(path="/foo"), runner_args=rdf_flow_runner.FlowRunnerArgs(cpu_limit=60))), token=token) sf2 = handler.Handle(flow_plugin.ApiCreateFlowArgs( client_id=client_id1, flow=flow_plugin.ApiFlow( name=file.CollectSingleFile.__name__, args=rdf_file_finder.CollectSingleFileArgs(path="/foo"), runner_args=rdf_flow_runner.FlowRunnerArgs(cpu_limit=60))), token=token) handler.Handle(flow_plugin.ApiCreateFlowArgs( client_id=client_id2, flow=flow_plugin.ApiFlow( name=file.CollectSingleFile.__name__, args=rdf_file_finder.CollectSingleFileArgs(path="/foo"), runner_args=rdf_flow_runner.FlowRunnerArgs(cpu_limit=60))), token=token) handler = flow_plugin.ApiListScheduledFlowsHandler() args = flow_plugin.ApiListScheduledFlowsArgs(client_id=client_id1, creator=token.username) results = handler.Handle(args, token=token) self.assertEqual(results.scheduled_flows, [sf1, sf2])
def testRawGzchunkedMultipleClients(self): client_id_1 = db_test_utils.InitializeClient(data_store.REL_DB) client_id_2 = db_test_utils.InitializeClient(data_store.REL_DB) snapshot = rdf_objects.ClientSnapshot() snapshot.client_id = client_id_1 snapshot.knowledge_base.fqdn = "foo.quux.com" data_store.REL_DB.WriteClientSnapshot(snapshot) snapshot = rdf_objects.ClientSnapshot() snapshot.client_id = client_id_2 snapshot.knowledge_base.fqdn = "foo.norf.com" data_store.REL_DB.WriteClientSnapshot(snapshot) hunt_id = "A0B1D2C3E4" hunt_obj = rdf_hunt_objects.Hunt() hunt_obj.hunt_id = hunt_id hunt_obj.args.standard.client_ids = [client_id_1, client_id_2] hunt_obj.args.standard.flow_name = timeline.TimelineFlow.__name__ hunt_obj.hunt_state = rdf_hunt_objects.Hunt.HuntState.PAUSED data_store.REL_DB.WriteHuntObject(hunt_obj) entry_1 = rdf_timeline.TimelineEntry() entry_1.path = "foo_1".encode("utf-8") entry_1.size = 13371 entry_2 = rdf_timeline.TimelineEntry() entry_2.path = "foo_2".encode("utf-8") entry_2.size = 13372 _WriteTimeline(client_id_1, [entry_1], hunt_id=hunt_id) _WriteTimeline(client_id_2, [entry_2], hunt_id=hunt_id) args = api_timeline.ApiGetCollectedHuntTimelinesArgs() args.hunt_id = hunt_id content = b"".join(self.handler.Handle(args).GenerateContent()) buffer = io.BytesIO(content) with zipfile.ZipFile(buffer, mode="r") as archive: client_filename_1 = f"{client_id_1}_foo.quux.com.gzchunked" with archive.open(client_filename_1, mode="r") as file: chunks = chunked.ReadAll(file) entries = list( rdf_timeline.TimelineEntry.DeserializeStream(chunks)) self.assertEqual(entries, [entry_1]) client_filename_2 = f"{client_id_2}_foo.norf.com.gzchunked" with archive.open(client_filename_2, mode="r") as file: chunks = chunked.ReadAll(file) entries = list( rdf_timeline.TimelineEntry.DeserializeStream(chunks)) self.assertEqual(entries, [entry_2])
def testUnscheduleFlowRemovesScheduledFlow(self, db: abstract_db.Database): context = _CreateContext(db) client_id = db_test_utils.InitializeClient(db) handler = flow_plugin.ApiScheduleFlowHandler() sf1 = handler.Handle(flow_plugin.ApiCreateFlowArgs( client_id=client_id, flow=flow_plugin.ApiFlow( name=file.CollectSingleFile.__name__, args=rdf_file_finder.CollectSingleFileArgs(path="/foo"), runner_args=rdf_flow_runner.FlowRunnerArgs(cpu_limit=60))), context=context) sf2 = handler.Handle(flow_plugin.ApiCreateFlowArgs( client_id=client_id, flow=flow_plugin.ApiFlow( name=file.CollectSingleFile.__name__, args=rdf_file_finder.CollectSingleFileArgs(path="/foo"), runner_args=rdf_flow_runner.FlowRunnerArgs(cpu_limit=60))), context=context) handler = flow_plugin.ApiUnscheduleFlowHandler() args = flow_plugin.ApiUnscheduleFlowArgs( client_id=client_id, scheduled_flow_id=sf1.scheduled_flow_id) handler.Handle(args, context=context) handler = flow_plugin.ApiListScheduledFlowsHandler() args = flow_plugin.ApiListScheduledFlowsArgs(client_id=client_id, creator=context.username) results = handler.Handle(args, context=context) self.assertEqual(results.scheduled_flows, [sf2])
def testValidatesParsersWereNotApplied(self, db: abstract_db.Database): token = _CreateToken(db) client_id = db_test_utils.InitializeClient(db) with mock.patch.object( artifact_registry, "REGISTRY", artifact_registry.ArtifactRegistry()) as registry: registry.RegisterArtifact(self.ECHO1337_ARTIFACT) flow_args = rdf_artifacts.ArtifactCollectorFlowArgs() flow_args.artifact_list = [self.ECHO1337_ARTIFACT.name] flow_args.apply_parsers = True flow_id = flow_test_lib.TestFlowHelper( collectors.ArtifactCollectorFlow.__name__, self.FakeExecuteCommand(), client_id=client_id, args=flow_args, token=token) flow_test_lib.FinishAllFlowsOnClient(client_id) args = flow_plugin.ApiListParsedFlowResultsArgs() args.client_id = client_id args.flow_id = flow_id with self.assertRaisesRegex(ValueError, "already parsed"): self.handler.Handle(args, token=token)
def testResultMetadataHasGroupedNumberOfReplies(self, db: abstract_db.Database): client_id = db_test_utils.InitializeClient(db) flow = rdf_flow_objects.Flow() flow.client_id = client_id flow.flow_id = self._FLOW_ID db.WriteFlowObject(flow) flow_obj = FlowBaseTest.Flow(flow) flow_obj.SendReply(rdf_client.ClientInformation()) flow_obj.SendReply(rdf_client.StartupInfo()) flow_obj.SendReply(rdf_client.StartupInfo()) flow_obj.SendReply(rdf_client.StartupInfo(), tag="foo") flow_obj.PersistState() db.WriteFlowObject(flow_obj.rdf_flow) flow_2 = db.ReadFlowObject(client_id, self._FLOW_ID) flow_obj_2 = FlowBaseTest.Flow(flow_2) result_metadata = flow_obj_2.GetResultMetadata() self.assertLen(result_metadata.num_results_per_type_tag, 3) sorted_counts = sorted(result_metadata.num_results_per_type_tag, key=lambda v: (v.type, v.tag)) self.assertEqual(sorted_counts[0].type, "ClientInformation") self.assertEqual(sorted_counts[0].tag, "") self.assertEqual(sorted_counts[0].count, 1) self.assertEqual(sorted_counts[1].type, "StartupInfo") self.assertEqual(sorted_counts[1].tag, "") self.assertEqual(sorted_counts[1].count, 2) self.assertEqual(sorted_counts[2].type, "StartupInfo") self.assertEqual(sorted_counts[2].tag, "foo") self.assertEqual(sorted_counts[2].count, 1)
def testResultMetadataAreCorrectlyUpdatedAfterMultiplePersistStateCalls( self, db: abstract_db.Database): client_id = db_test_utils.InitializeClient(db) flow = rdf_flow_objects.Flow() flow.client_id = client_id flow.flow_id = self._FLOW_ID db.WriteFlowObject(flow) flow_obj = FlowBaseTest.Flow(flow) flow_obj.SendReply(rdf_client.ClientInformation()) flow_obj.PersistState() flow_obj.PersistState() db.WriteFlowObject(flow_obj.rdf_flow) flow_2 = db.ReadFlowObject(client_id, self._FLOW_ID) flow_obj_2 = FlowBaseTest.Flow(flow_2) result_metadata = flow_obj_2.GetResultMetadata() self.assertLen(result_metadata.num_results_per_type_tag, 1) self.assertTrue(result_metadata.is_metadata_set) self.assertEqual(result_metadata.num_results_per_type_tag[0].type, "ClientInformation") self.assertEqual(result_metadata.num_results_per_type_tag[0].tag, "") self.assertEqual(result_metadata.num_results_per_type_tag[0].count, 1)
def testRawGzchunkedMulipleEntries(self): entries = [] for idx in range(1024): entry = rdf_timeline.TimelineEntry() entry.path = "/quux/thud/bar/baz/foo{}".format(idx).encode("utf-8") entry.size = random.randint(0, 1024) entries.append(entry) client_id = db_test_utils.InitializeClient(data_store.REL_DB) flow_id = _WriteTimeline(client_id, entries) args = api_timeline.ApiGetCollectedTimelineArgs() args.client_id = client_id args.flow_id = flow_id args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.RAW_GZCHUNKED content = b"".join(self.handler.Handle(args).GenerateContent()) buf = io.BytesIO(content) chunks = chunked.ReadAll(buf) deserialized = list( rdf_timeline.TimelineEntry.DeserializeStream(chunks)) self.assertEqual(entries, deserialized)
def testExecutePythonHackWithResult(self): client_id = db_test_utils.InitializeClient(data_store.REL_DB) code = """ magic_return_str = str(py_args["foobar"]) """ maintenance_utils.UploadSignedConfigBlob( content=code.encode("utf-8"), aff4_path="aff4:/config/python_hacks/quux") flow_id = flow_test_lib.TestFlowHelper( administrative.ExecutePythonHack.__name__, client_mock=action_mocks.ActionMock(standard.ExecutePython), client_id=client_id, hack_name="quux", py_args={"foobar": 42}, token=self.token) flow_test_lib.FinishAllFlowsOnClient(client_id=client_id) results = flow_test_lib.GetFlowResults(client_id=client_id, flow_id=flow_id) self.assertLen(results, 1) self.assertIsInstance(results[0], administrative.ExecutePythonHackResult) self.assertEqual(results[0].result_string, "42")
def testEmptyResults(self, db: abstract_db.Database): token = _CreateToken(db) client_id = db_test_utils.InitializeClient(db) fake_artifact = rdf_artifacts.Artifact(name="FakeArtifact", doc="Lorem ipsum.", sources=[]) with mock.patch.object( artifact_registry, "REGISTRY", artifact_registry.ArtifactRegistry()) as registry: registry.RegisterArtifact(fake_artifact) flow_args = rdf_artifacts.ArtifactCollectorFlowArgs() flow_args.artifact_list = [fake_artifact.name] flow_args.apply_parsers = False flow_id = flow_test_lib.TestFlowHelper( collectors.ArtifactCollectorFlow.__name__, self.FakeExecuteCommand(), client_id=client_id, args=flow_args, token=token) flow_test_lib.FinishAllFlowsOnClient(client_id) args = flow_plugin.ApiListParsedFlowResultsArgs(client_id=client_id, flow_id=flow_id, offset=0, count=1024) result = self.handler.Handle(args, token=token) self.assertEmpty(result.errors) self.assertEmpty(result.items)
def testWriteApprovalRequestSubject(self): self.db.WriteGRRUser("requestor") day = rdfvalue.Duration.From(1, rdfvalue.DAYS) tomorrow = rdfvalue.RDFDatetime.Now() + day client_id = db_test_utils.InitializeClient(self.db) hunt_id = db_test_utils.InitializeHunt(self.db) cron_job_id = db_test_utils.InitializeCronJob(self.db) subject_ids = { ApprovalRequest.APPROVAL_TYPE_CLIENT: client_id, ApprovalRequest.APPROVAL_TYPE_HUNT: hunt_id, ApprovalRequest.APPROVAL_TYPE_CRON_JOB: cron_job_id, } # We iterate over all possible approval types. This will make the test fail # if a new approval type is added in the future but no subject is specified # for it. approval_types = set(_.number for _ in ApprovalType.DESCRIPTOR.values) approval_types.remove(ApprovalType.APPROVAL_TYPE_NONE) for approval_type in approval_types: subject_id = subject_ids[approval_type] request = rdf_objects.ApprovalRequest() request.requestor_username = "******" request.approval_type = approval_type request.subject_id = subject_id request.expiration_time = tomorrow request_id = self.db.WriteApprovalRequest(request) with self.subTest(case="Read single", type=approval_type): request = self.db.ReadApprovalRequest("requestor", request_id)
def testReadWriteApprovalRequestWithEmptyNotifiedUsersEmailsAndGrants( self): d = self.db d.WriteGRRUser("requestor") client_id = db_test_utils.InitializeClient(self.db) approval_request = rdf_objects.ApprovalRequest( approval_type=rdf_objects.ApprovalRequest.ApprovalType. APPROVAL_TYPE_CLIENT, subject_id=client_id, requestor_username="******", reason="some test reason", expiration_time=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(42)) approval_id = d.WriteApprovalRequest(approval_request) self.assertTrue(approval_id) read_request = d.ReadApprovalRequest("requestor", approval_id) # RDF values are terrible and differentiate between empty repeated fields # and non-set repeated fields. self.assertFalse(read_request.grants) read_request.grants = [] approval_request.grants = [] # Approval id and timestamp are generated in WriteApprovalRequest so we're # filling them into our model object to make sure that equality check works. approval_request.approval_id = read_request.approval_id approval_request.timestamp = read_request.timestamp self.assertEqual(approval_request, read_request)
def testDeleteUserDeletesApprovalGrantsForGrantor(self): d = self.db d.WriteGRRUser("requestor") d.WriteGRRUser("grantor") d.WriteGRRUser("grantor2") d.WriteGRRUser("user1") d.WriteGRRUser("user2") d.WriteGRRUser("user3") client_id = db_test_utils.InitializeClient(self.db) approval_request = rdf_objects.ApprovalRequest( approval_type=rdf_objects.ApprovalRequest.ApprovalType. APPROVAL_TYPE_CLIENT, subject_id=client_id, requestor_username="******", reason="some test reason", expiration_time=rdfvalue.RDFDatetime.FromSecondsSinceEpoch(42), notified_users=["user1", "user2", "user3"], email_cc_addresses=["*****@*****.**", "*****@*****.**"]) approval_id = d.WriteApprovalRequest(approval_request) self.db.GrantApproval(requestor_username="******", approval_id=approval_id, grantor_username="******") self.db.GrantApproval(requestor_username="******", approval_id=approval_id, grantor_username="******") d.DeleteGRRUser("grantor") result = d.ReadApprovalRequest("requestor", approval_id) self.assertLen(result.grants, 1) self.assertEqual(result.grants[0].grantor_username, "grantor2")
def testValidatesFlowName(self, db: abstract_db.Database): context = _CreateContext(db) class FakeFlow(flow_base.FlowBase): def Start(self): self.CallState("End") def End(self, responses: flow_responses.Responses) -> None: del responses # Unused. client_id = db_test_utils.InitializeClient(db) flow_id = flow_test_lib.TestFlowHelper( FakeFlow.__name__, client_id=client_id, token=access_control.ACLToken(username=context.username)) flow_test_lib.FinishAllFlowsOnClient(client_id) args = flow_plugin.ApiListParsedFlowResultsArgs() args.client_id = client_id args.flow_id = flow_id with self.assertRaisesRegex(ValueError, "artifact-collector"): self.handler.Handle(args, context=context)
def testWriteClientSnapshot_duplicateKeyIsRetryable(self): with test_lib.FakeTime(1): client_id = db_test_utils.InitializeClient(self.db) snapshot = rdf_objects.ClientSnapshot(client_id=client_id) self.db.WriteClientSnapshot(snapshot) with self.assertRaises(mysql_utils.RetryableError): self.db.WriteClientSnapshot(snapshot)
def testReadApprovalRequestsForSubjectReturnsManyNonExpiredApproval(self): client_id = db_test_utils.InitializeClient(self.db) d = self.db d.WriteGRRUser("requestor") expiration_time = rdfvalue.RDFDatetime.Now() + rdfvalue.Duration.From( 1, rdfvalue.DAYS) approval_ids = set() for _ in range(10): approval_request = rdf_objects.ApprovalRequest( approval_type=rdf_objects.ApprovalRequest.ApprovalType. APPROVAL_TYPE_CLIENT, subject_id=client_id, requestor_username="******", reason="some test reason", expiration_time=expiration_time) approval_ids.add(d.WriteApprovalRequest(approval_request)) approvals = list( d.ReadApprovalRequests( "requestor", rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT, subject_id=client_id)) self.assertLen(approvals, 10) self.assertEqual(set(a.approval_id for a in approvals), approval_ids)
def testBodySingleEntry(self): entry = rdf_timeline.TimelineEntry() entry.path = "/foo/bar/baz".encode("utf-8") entry.ino = 4815162342 entry.size = 42 entry.atime_ns = 123 * 10**9 entry.mtime_ns = 456 * 10**9 entry.ctime_ns = 789 * 10**9 client_id = db_test_utils.InitializeClient(data_store.REL_DB) flow_id = _WriteTimeline(client_id, [entry]) args = api_timeline.ApiGetCollectedTimelineArgs() args.client_id = client_id args.flow_id = flow_id args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY result = self.handler.Handle(args) content = b"".join(result.GenerateContent()).decode("utf-8") rows = list(csv.reader(io.StringIO(content), delimiter="|")) self.assertLen(rows, 1) self.assertEqual(rows[0][1], "/foo/bar/baz") self.assertEqual(rows[0][2], "4815162342") self.assertEqual(rows[0][6], "42") self.assertEqual(rows[0][7], "123") self.assertEqual(rows[0][8], "456") self.assertEqual(rows[0][9], "789")
def testReadApprovalRequestsForSubjectKeepsExpiredApprovalsWhenAsked(self): client_id = db_test_utils.InitializeClient(self.db) d = self.db d.WriteGRRUser("requestor") time_future = rdfvalue.RDFDatetime.Now() + rdfvalue.Duration.From( 1, rdfvalue.DAYS) time_past = rdfvalue.RDFDatetime.Now() - rdfvalue.Duration.From( 1, rdfvalue.DAYS) approval_ids = set() for i in range(10): approval_request = rdf_objects.ApprovalRequest( approval_type=rdf_objects.ApprovalRequest.ApprovalType. APPROVAL_TYPE_CLIENT, subject_id=client_id, requestor_username="******", reason="some test reason", expiration_time=(time_future if i % 2 == 0 else time_past)) approval_ids.add(d.WriteApprovalRequest(approval_request)) approvals = list( d.ReadApprovalRequests( "requestor", rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT, subject_id=client_id, include_expired=True)) self.assertLen(approvals, 10) self.assertEqual(set(a.approval_id for a in approvals), approval_ids)
def testBodyMultipleEntries(self): entries = [] for idx in range(1024): entry = rdf_timeline.TimelineEntry() entry.path = "/foo/bar/baz/quux/norf/thud{}".format(idx).encode( "utf-8") entry.size = random.randint(0, 1024) entries.append(entry) client_id = db_test_utils.InitializeClient(data_store.REL_DB) flow_id = _WriteTimeline(client_id, entries) args = api_timeline.ApiGetCollectedTimelineArgs() args.client_id = client_id args.flow_id = flow_id args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY result = self.handler.Handle(args) content = b"".join(result.GenerateContent()).decode("utf-8") rows = list(csv.reader(io.StringIO(content), delimiter="|")) self.assertLen(rows, len(entries)) for idx, row in enumerate(rows): self.assertEqual(row[1].encode("utf-8"), entries[idx].path) self.assertEqual(int(row[6]), entries[idx].size)
def setUp(self): super().setUp() self.handler = hunt_plugin.ApiCreatePerClientFileCollectionHuntHandler( ) self.context = api_call_context.ApiCallContext(username="******") self.client_id = db_test_utils.InitializeClient(data_store.REL_DB)
def testRandom(self): db = mem_db.InMemoryDB() client_id = db_test_utils.InitializeClient(db) flow_id = db_test_utils.InitializeFlow(db, client_id) flow_obj = db.ReadFlowObject(client_id=client_id, flow_id=flow_id) self.assertIsNotNone(flow_obj)
def testEdrAgentCollection(self): client_id = db_test_utils.InitializeClient(data_store.REL_DB) artifact_source = rdf_artifacts.ArtifactSource() artifact_source.type = rdf_artifacts.ArtifactSource.SourceType.COMMAND artifact_source.attributes = {"cmd": "/bin/echo", "args": ["1337"]} artifact = rdf_artifacts.Artifact() artifact.name = "Foo" artifact.doc = "Lorem ipsum." artifact.sources = [artifact_source] class FooParser(parsers.SingleResponseParser): supported_artifacts = ["Foo"] def ParseResponse( self, knowledge_base: rdf_client.KnowledgeBase, response: rdf_client_action.ExecuteResponse, ) -> Iterator[rdf_client.EdrAgent]: edr_agent = rdf_client.EdrAgent() edr_agent.name = "echo" edr_agent.agent_id = response.stdout.decode("utf-8") yield edr_agent class EchoActionMock(action_mocks.InterrogatedClient): def ExecuteCommand( self, args: rdf_client_action.ExecuteRequest, ) -> Iterable[rdf_client_action.ExecuteResponse]: response = rdf_client_action.ExecuteResponse() response.stdout = " ".join(args.args).encode("utf-8") response.exit_status = 0 return [response] with mock.patch.object( artifact_registry, "REGISTRY", artifact_registry.ArtifactRegistry()) as registry: registry.RegisterArtifact(artifact) with test_lib.ConfigOverrider({"Artifacts.edr_agents": ["Foo"]}): with parser_test_lib._ParserContext("Foo", FooParser): flow_test_lib.TestFlowHelper( discovery.Interrogate.__name__, client_mock=EchoActionMock(), client_id=client_id, creator=self.test_username) flow_test_lib.FinishAllFlowsOnClient(client_id) snapshot = data_store.REL_DB.ReadClientSnapshot(client_id) self.assertLen(snapshot.edr_agents, 1) self.assertEqual(snapshot.edr_agents[0].name, "echo") self.assertEqual(snapshot.edr_agents[0].agent_id, "1337")
def testKwargs(self): db = mem_db.InMemoryDB() username = db_test_utils.InitializeUser(db) client_id = db_test_utils.InitializeClient(db) flow_id = db_test_utils.InitializeFlow(db, client_id, creator=username) flow_obj = db.ReadFlowObject(client_id=client_id, flow_id=flow_id) self.assertEqual(flow_obj.creator, username)
def testClientInfoDefault(self, db: abstract_db.Database): client_id = db_test_utils.InitializeClient(db) flow = rdf_flow_objects.Flow() flow.client_id = client_id flow.flow_id = self._FLOW_ID flow = FlowBaseTest.Flow(flow) self.assertIsInstance(flow.client_info, rdf_client.ClientInformation) self.assertEmpty(flow.client_info.client_name)
def testParsesArtifactCollectionResults(self, db: abstract_db.Database): context = _CreateContext(db) with mock.patch.object( artifact_registry, "REGISTRY", artifact_registry.ArtifactRegistry()) as registry: registry.RegisterArtifact(self.ECHO1337_ARTIFACT) flow_args = rdf_artifacts.ArtifactCollectorFlowArgs() flow_args.artifact_list = [self.ECHO1337_ARTIFACT.name] flow_args.apply_parsers = False client_id = db_test_utils.InitializeClient(db) flow_id = flow_test_lib.TestFlowHelper( collectors.ArtifactCollectorFlow.__name__, self.FakeExecuteCommand(), client_id=client_id, args=flow_args, creator=context.username) flow_test_lib.FinishAllFlowsOnClient(client_id) class FakeParser( abstract_parser.SingleResponseParser[ rdf_client_action.ExecuteResponse], ): supported_artifacts = [self.ECHO1337_ARTIFACT.name] def ParseResponse( self, knowledge_base: rdf_client.KnowledgeBase, response: rdf_client_action.ExecuteResponse, ) -> Iterable[rdf_client_action.ExecuteResponse]: precondition.AssertType(response, rdf_client_action.ExecuteResponse) parsed_response = rdf_client_action.ExecuteResponse() parsed_response.stdout = response.stdout parsed_response.stderr = b"4815162342" return [parsed_response] with parser_test_lib._ParserContext("Fake", FakeParser): args = flow_plugin.ApiListParsedFlowResultsArgs( client_id=client_id, flow_id=flow_id, offset=0, count=1024) result = self.handler.Handle(args, context=context) self.assertEmpty(result.errors) self.assertLen(result.items, 1) response = result.items[0].payload self.assertIsInstance(response, rdf_client_action.ExecuteResponse) self.assertEqual(response.stdout, b"1337") self.assertEqual(response.stderr, b"4815162342")
def testSupplied(self): db = mem_db.InMemoryDB() client_id = db_test_utils.InitializeClient(db) flow_id = db_test_utils.InitializeFlow(db, client_id, flow_id="ABCDEF42") self.assertEqual(flow_id, "ABCDEF42") flow_obj = db.ReadFlowObject(client_id=client_id, flow_id=flow_id) self.assertIsNotNone(flow_obj)
def testRaisesOnIncorrectFormat(self): client_id = db_test_utils.InitializeClient(data_store.REL_DB) flow_id = _WriteTimeline(client_id, []) args = api_timeline.ApiGetCollectedTimelineArgs() args.client_id = client_id args.flow_id = flow_id args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.UNSPECIFIED with self.assertRaises(ValueError): self.handler.Handle(args)
def testReadApprovalRequestsForSubjectReturnsNothingWhenNoApprovals(self): client_id = db_test_utils.InitializeClient(self.db) d = self.db d.WriteGRRUser("requestor") approvals = list( d.ReadApprovalRequests( "requestor", rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT, subject_id=client_id)) self.assertFalse(approvals)
def testReturnsDefaultFlowProgressForEmptyFlow(self, db: abstract_db.Database): client_id = db_test_utils.InitializeClient(db) flow = rdf_flow_objects.Flow() flow.client_id = client_id flow.flow_id = self._FLOW_ID db.WriteFlowObject(flow) flow_obj = FlowBaseTest.Flow(flow) progress = flow_obj.GetProgress() self.assertIsInstance(progress, rdf_flow_objects.DefaultFlowProgress)
def testBodyMultipleResults(self): client_id = db_test_utils.InitializeClient(data_store.REL_DB) flow_id = "ABCDEF42" flow_obj = rdf_flow_objects.Flow() flow_obj.client_id = client_id flow_obj.flow_id = flow_id flow_obj.flow_class_name = timeline.TimelineFlow.__name__ flow_obj.create_time = rdfvalue.RDFDatetime.Now() data_store.REL_DB.WriteFlowObject(flow_obj) entry_1 = rdf_timeline.TimelineEntry() entry_1.path = "/foo".encode("utf-8") blobs_1 = list( rdf_timeline.TimelineEntry.SerializeStream(iter([entry_1]))) (blob_id_1, ) = data_store.BLOBS.WriteBlobsWithUnknownHashes(blobs_1) result_1 = rdf_timeline.TimelineResult() result_1.entry_batch_blob_ids = [blob_id_1.AsBytes()] entry_2 = rdf_timeline.TimelineEntry() entry_2.path = "/bar".encode("utf-8") blobs_2 = list( rdf_timeline.TimelineEntry.SerializeStream(iter([entry_2]))) (blob_id_2, ) = data_store.BLOBS.WriteBlobsWithUnknownHashes(blobs_2) result_2 = rdf_timeline.TimelineResult() result_2.entry_batch_blob_ids = [blob_id_2.AsBytes()] flow_result_1 = rdf_flow_objects.FlowResult() flow_result_1.client_id = client_id flow_result_1.flow_id = flow_id flow_result_1.payload = result_1 flow_result_2 = rdf_flow_objects.FlowResult() flow_result_2.client_id = client_id flow_result_2.flow_id = flow_id flow_result_2.payload = result_2 data_store.REL_DB.WriteFlowResults([flow_result_1, flow_result_2]) args = api_timeline.ApiGetCollectedTimelineArgs() args.client_id = client_id args.flow_id = flow_id args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.BODY result = self.handler.Handle(args) content = b"".join(result.GenerateContent()).decode("utf-8") self.assertIn("|/foo|", content) self.assertIn("|/bar|", content)
def testRawGzchunkedEmpty(self): client_id = db_test_utils.InitializeClient(data_store.REL_DB) flow_id = _WriteTimeline(client_id, []) args = api_timeline.ApiGetCollectedTimelineArgs() args.client_id = client_id args.flow_id = flow_id args.format = api_timeline.ApiGetCollectedTimelineArgs.Format.RAW_GZCHUNKED content = b"".join(self.handler.Handle(args).GenerateContent()) buf = io.BytesIO(content) self.assertIsNone(chunked.Read(buf))