def testCreateFlowFromClientRef(self): client_urn = self.SetupClient(0) args = processes.ListProcessesArgs(filename_regex="blah", fetch_binaries=True) if data_store.RelationalDBFlowsEnabled(): flows = data_store.REL_DB.ReadAllFlowObjects(client_urn.Basename()) self.assertEmpty(flows) else: children = aff4.FACTORY.Open(client_urn, token=self.token).ListChildren() self.assertEmpty(list(children)) client_ref = self.api.Client(client_id=client_urn.Basename()) result_flow = client_ref.CreateFlow( name=processes.ListProcesses.__name__, args=args.AsPrimitiveProto()) if data_store.RelationalDBFlowsEnabled(): flows = data_store.REL_DB.ReadAllFlowObjects(client_urn.Basename()) self.assertLen(flows, 1) self.assertEqual(flows[0].args, args) else: children = aff4.FACTORY.Open(client_urn, token=self.token).ListChildren() self.assertLen(list(children), 1) result_flow_obj = aff4.FACTORY.Open(result_flow.data.urn, token=self.token) self.assertEqual(result_flow_obj.args, args)
def WriteAllCrashDetails(client_id, crash_details, flow_session_id=None, hunt_session_id=None, token=None): """Updates the last crash attribute of the client.""" # AFF4. if data_store.AFF4Enabled(): with aff4.FACTORY.Create(client_id, aff4_grr.VFSGRRClient, token=token) as client_obj: client_obj.Set(client_obj.Schema.LAST_CRASH(crash_details)) # Duplicate the crash information in a number of places so we can find it # easily. client_urn = rdf_client.ClientURN(client_id) client_crashes = aff4_grr.VFSGRRClient.CrashCollectionURNForCID( client_urn) with data_store.DB.GetMutationPool() as pool: grr_collections.CrashCollection.StaticAdd(client_crashes, crash_details, mutation_pool=pool) # Relational db. if data_store.RelationalDBWriteEnabled(): try: data_store.REL_DB.WriteClientCrashInfo(client_id, crash_details) except db.UnknownClientError: pass if not flow_session_id: return if data_store.RelationalDBFlowsEnabled(): flow_id = flow_session_id.Basename() data_store.REL_DB.UpdateFlow(client_id, flow_id, client_crash_info=crash_details) flow_obj = data_store.REL_DB.ReadFlowObject(client_id, flow_id) if flow_obj.parent_hunt_id: db_compat.ProcessHuntClientCrash(flow_obj, client_crash_info=crash_details) # TODO(amoser): Registering crashes in hunts is currently not implemented for # the relational db. if not data_store.RelationalDBFlowsEnabled(): with aff4.FACTORY.Open(flow_session_id, flow.GRRFlow, mode="rw", age=aff4.NEWEST_TIME, token=token) as aff4_flow: aff4_flow.Set(aff4_flow.Schema.CLIENT_CRASH(crash_details)) hunt_session_id = ExtractHuntId(flow_session_id) if hunt_session_id and hunt_session_id != flow_session_id: hunt_obj = aff4.FACTORY.Open(hunt_session_id, aff4_type=implementation.GRRHunt, mode="rw", token=token) hunt_obj.RegisterCrash(crash_details)
def Run(self): client_id = self.SetupClient(0) email_descriptor = rdf_output_plugin.OutputPluginDescriptor( plugin_name=email_plugin.EmailOutputPlugin.__name__, plugin_args=email_plugin.EmailOutputPluginArgs( email_address="test@localhost", emails_limit=42)) with test_lib.FakeTime(42): if data_store.RelationalDBFlowsEnabled(): flow_id = flow.StartFlow( flow_cls=processes.ListProcesses, client_id=client_id.Basename(), output_plugins=[email_descriptor]) else: flow_urn = flow.StartAFF4Flow( flow_name=processes.ListProcesses.__name__, client_id=client_id, output_plugins=[email_descriptor], token=self.token) flow_id = flow_urn.Basename() self.Check( "ListFlowOutputPlugins", args=flow_plugin.ApiListFlowOutputPluginsArgs( client_id=client_id.Basename(), flow_id=flow_id), replace={flow_id: "W:ABCDEF"})
def Handle(self, args, token=None): """Retrieves the clients for a hunt.""" hunt_urn = args.hunt_id.ToURN() hunt = aff4.FACTORY.Open(hunt_urn, aff4_type=implementation.GRRHunt, token=token) clients_by_status = hunt.GetClientsByStatus() hunt_clients = clients_by_status[args.client_status.name] total_count = len(hunt_clients) if args.count: hunt_clients = sorted(hunt_clients)[args.offset:args.offset + args.count] else: hunt_clients = sorted(hunt_clients)[args.offset:] if data_store.RelationalDBFlowsEnabled(): flow_id = None else: flow_id = "%s:hunt" % hunt_urn.Basename() results = [ ApiHuntClient(client_id=c.Basename(), flow_id=flow_id) for c in hunt_clients ] return ApiListHuntClientsResult(items=results, total_count=total_count)
def checkClickingOnDownloadAsStartsDownloadForType(self, mock_method, plugin, plugin_display_name): pathspec = rdf_paths.PathSpec( path=os.path.join(self.base_path, "test.plist"), pathtype=rdf_paths.PathSpec.PathType.OS) session_id = flow_test_lib.TestFlowHelper( flows_transfer.GetFile.__name__, pathspec=pathspec, client_mock=self.action_mock, client_id=self.client_id, token=self.token) if not data_store.RelationalDBFlowsEnabled(): session_id = session_id.Basename() self.Open("/#/clients/%s/flows/%s" % (self.client_id, session_id)) self.Click("link=Results") self.Select("id=plugin-select", plugin_display_name) self.Click("css=grr-download-collection-as button[name='download-as']") def MockMethodIsCalled(): try: # Mock should be called twice: once for HEAD (to check permissions) # and once for GET methods. mock_method.assert_called_with( api_flow.ApiGetExportedFlowResultsArgs( client_id=self.client_id, flow_id=session_id, plugin_name=plugin), token=mock.ANY) return True except AssertionError: return False self.WaitUntil(MockMethodIsCalled)
def Handle(self, args, token=None): if data_store.RelationalDBFlowsEnabled(): flow_obj = data_store.REL_DB.ReadFlowObject( unicode(args.client_id), unicode(args.flow_id)) output_plugins_states = flow_obj.output_plugins_states else: flow_urn = args.flow_id.ResolveClientFlowURN(args.client_id, token=token) flow_obj = aff4.FACTORY.Open( flow_urn, aff4_type=flow.GRRFlow, mode="r", token=token) output_plugins_states = flow_obj.GetRunner().context.output_plugins_states type_indices = {} result = [] for output_plugin_state in output_plugins_states: plugin_descriptor = output_plugin_state.plugin_descriptor plugin_state = output_plugin_state.plugin_state type_index = type_indices.setdefault(plugin_descriptor.plugin_name, 0) type_indices[plugin_descriptor.plugin_name] += 1 # Output plugins states are stored differently for hunts and for flows: # as a dictionary for hunts and as a simple list for flows. # # TODO(user): store output plugins states in the same way for flows # and hunts. Until this is done, we can emulate the same interface in # the HTTP API. api_plugin = api_output_plugin.ApiOutputPlugin( id=plugin_descriptor.plugin_name + "_%d" % type_index, plugin_descriptor=plugin_descriptor, state=plugin_state) result.append(api_plugin) return ApiListFlowOutputPluginsResult(items=result)
def testShowsNotificationIfArchiveStreamingFailsInProgress(self): pathspec = rdf_paths.PathSpec(path=os.path.join( self.base_path, "test.plist"), pathtype=rdf_paths.PathSpec.PathType.OS) session_id = flow_test_lib.TestFlowHelper( flows_transfer.GetFile.__name__, client_mock=self.action_mock, client_id=self.client_id, pathspec=pathspec, token=self.token) if not data_store.RelationalDBFlowsEnabled(): session_id = session_id.Basename() def RaisingStub(*unused_args, **unused_kwargs): yield b"foo" yield b"bar" raise RuntimeError("something went wrong") with utils.Stubber(archive_generator.GetCompatClass(), "Generate", RaisingStub): self.Open("/#/clients/%s" % self.client_id) self.Click("css=a[grrtarget='client.flows']") self.Click("css=td:contains('GetFile')") self.Click("link=Results") self.Click("css=button.DownloadButton") self.WaitUntil( self.IsUserNotificationPresent, "Archive generation failed for flow %s" % session_id) # There will be no failure message, as we can't get a status from an # iframe that triggers the download. self.WaitUntilNot(self.IsTextPresent, "Can't generate archive: Unknown error")
def testNotificationPointingToFlowIsShownOnFlowCompletion(self): self.Open("/") pathspec = rdf_paths.PathSpec(path=os.path.join( self.base_path, "test.plist"), pathtype=rdf_paths.PathSpec.PathType.OS) session_id = flow_test_lib.TestFlowHelper( flows_transfer.GetFile.__name__, client_mock=self.action_mock, client_id=self.client_id, pathspec=pathspec, token=self.token) if not data_store.RelationalDBFlowsEnabled(): session_id = session_id.Basename() # Clicking on this should show the notifications table. self.Click("css=button[id=notification_button]") self.WaitUntil(self.IsTextPresent, "Notifications") # Click on the "flow completed" notification. self.Click("css=td:contains('Flow GetFile completed')") self.WaitUntilNot(self.IsTextPresent, "Notifications") # Check that clicking on a notification changes the location and shows # the flow page. self.WaitUntilEqual( "/#/clients/%s/flows/%s" % (self.client_id, session_id), self.GetCurrentUrlPath) self.WaitUntil(self.IsTextPresent, session_id)
def Handle(self, args, token=None): if data_store.RelationalDBFlowsEnabled(): client_id = str(args.client_id) flow_id = str(args.operation_id) # TODO(user): test both exception scenarios below. try: flow_obj = data_store.REL_DB.ReadFlowObject(client_id, flow_id) except db.UnknownFlowError: raise InterrogateOperationNotFoundError( "Operation with id %s not found" % args.operation_id) if flow_obj.flow_name != compatibility.GetName( discovery.Interrogate): raise InterrogateOperationNotFoundError( "Operation with id %s not found" % args.operation_id) complete = flow_obj.flow_state != flow_obj.FlowState.RUNNING else: try: flow_obj = aff4.FACTORY.Open(args.operation_id, aff4_type=aff4_flows.Interrogate, token=token) complete = not flow_obj.GetRunner().IsRunning() except aff4.InstantiationError: raise InterrogateOperationNotFoundError( "Operation with id %s not found" % args.operation_id) result = ApiGetInterrogateOperationStateResult() if complete: result.state = ApiGetInterrogateOperationStateResult.State.FINISHED else: result.state = ApiGetInterrogateOperationStateResult.State.RUNNING return result
def StartFlow(flow_cls, client_id=None, flow_args=None, creator=None, **kwargs): """Starts (but not runs) a flow (AFF4/REL_DB compatible).""" if isinstance(client_id, rdfvalue.RDFURN): client_id = client_id.Basename() if data_store.RelationalDBFlowsEnabled(): try: del kwargs["notify_to_user"] except KeyError: pass return flow.StartFlow(flow_cls=flow_cls, client_id=client_id, flow_args=flow_args, creator=creator, **kwargs) else: flow_urn = flow.StartAFF4Flow( flow_name=flow_cls.__name__, client_id=client_id, token=access_control.ACLToken(username=creator or "test"), args=flow_args, **kwargs) return flow_urn.Basename()
def Run(self): client_urn = self.SetupClient(0) client_id = client_urn.Basename() acl_test_lib.CreateUser(self.token.username) # Create a running mock refresh operation. running_flow_id = self.CreateMultiGetFileFlow( client_urn, file_path="fs/os/c/bin/bash", token=self.token) # Create a mock refresh operation and complete it. finished_flow_id = self.CreateMultiGetFileFlow( client_urn, file_path="fs/os/c/bin/bash", token=self.token) if data_store.RelationalDBFlowsEnabled(): flow_base.TerminateFlow(client_id, finished_flow_id, reason="Fake Error") # Create an arbitrary flow to check on 404s. non_update_flow_id = flow.StartFlow( client_id=client_id, flow_cls=discovery.Interrogate) else: finished_flow_urn = client_urn.Add("flows").Add(finished_flow_id) with aff4.FACTORY.Open( finished_flow_urn, aff4_type=flow.GRRFlow, mode="rw", token=self.token) as flow_obj: flow_obj.GetRunner().Error("Fake error") # Create an arbitrary flow to check on 404s. non_update_flow_id = flow.StartAFF4Flow( client_id=client_urn, flow_name=discovery.Interrogate.__name__, token=self.token).Basename() # Unkonwn flow ids should also cause 404s. unknown_flow_id = "F:12345678" # Check both operations. self.Check( "GetVfsFileContentUpdateState", args=vfs_plugin.ApiGetVfsFileContentUpdateStateArgs( client_id=client_id, operation_id=running_flow_id), replace={running_flow_id: "W:ABCDEF"}) self.Check( "GetVfsFileContentUpdateState", args=vfs_plugin.ApiGetVfsFileContentUpdateStateArgs( client_id=client_id, operation_id=finished_flow_id), replace={finished_flow_id: "W:ABCDEF"}) self.Check( "GetVfsFileContentUpdateState", args=vfs_plugin.ApiGetVfsFileContentUpdateStateArgs( client_id=client_id, operation_id=non_update_flow_id), replace={non_update_flow_id: "W:ABCDEF"}) self.Check( "GetVfsFileContentUpdateState", args=vfs_plugin.ApiGetVfsFileContentUpdateStateArgs( client_id=client_id, operation_id=unknown_flow_id), replace={unknown_flow_id: "W:ABCDEF"})
def testHuntClientsView(self): """Test the detailed client view works.""" self._CreateHuntWithDownloadedFile() # Open up and click on View Hunts then the first Hunt. self.Open("/") self.WaitUntil(self.IsElementPresent, "client_query") self.Click("css=a[grrtarget=hunts]") self.WaitUntil(self.IsTextPresent, "GenericHunt") self.Click("css=td:contains('GenericHunt')") # Click the Overview Tab then the Details Link. self.Click("css=li[heading=Overview]") self.WaitUntil(self.IsTextPresent, "Hunt ID") # Check the Hunt Clients tab. self.Click("css=li[heading=Clients]") client_id = self.client_ids[0] self.WaitUntil(self.IsElementPresent, "css=tr:contains('%s')" % client_id.Basename()) self.RequestAndGrantClientApproval(client_id) # TODO(user): move the code below outside of if as soon as hunt's # subflows are properly reported in the REL_DB implementation. if not data_store.RelationalDBFlowsEnabled(): self.Click("css=tr:contains('%s') td:nth-of-type(2) a" % client_id.Basename()) self.WaitUntil(self.IsTextPresent, "Flow Information") self.WaitUntil(self.IsTextPresent, self.base_path)
def _TerminateFlow(self, flow_id): reason = "Because I said so" if data_store.RelationalDBFlowsEnabled(): flow_base.TerminateFlow(self.client_id, flow_id, reason) else: flow_urn = rdfvalue.RDFURN(self.client_id).Add("flows").Add(flow_id) flow.GRRFlow.TerminateAFF4Flow(flow_urn, reason, token=self.token)
def testShowsErrorMessageIfArchiveStreamingFailsBeforeFirstChunkIsSent( self): pathspec = rdf_paths.PathSpec(path=os.path.join( self.base_path, "test.plist"), pathtype=rdf_paths.PathSpec.PathType.OS) flow_id = flow_test_lib.TestFlowHelper(flows_transfer.GetFile.__name__, self.action_mock, client_id=self.client_id, check_flow_errors=False, pathspec=pathspec, token=self.token) if not data_store.RelationalDBFlowsEnabled(): flow_id = flow_id.Basename() def RaisingStub(*unused_args, **unused_kwargs): raise RuntimeError("something went wrong") with utils.Stubber(archive_generator.GetCompatClass(), "Generate", RaisingStub): self.Open("/#/clients/%s" % self.client_id) self.Click("css=a[grrtarget='client.flows']") self.Click("css=td:contains('GetFile')") self.Click("link=Results") self.Click("css=button.DownloadButton") self.WaitUntil(self.IsTextPresent, "Can't generate archive: Unknown error") self.WaitUntil(self.IsUserNotificationPresent, "Archive generation failed for flow %s" % flow_id)
def GetFlowState(client_id, flow_id, token=None): if data_store.RelationalDBFlowsEnabled(): rdf_flow = data_store.REL_DB.ReadFlowObject(client_id.Basename(), flow_id) return rdf_flow.persistent_data else: flow_obj = aff4.FACTORY.Open(flow_id, mode="r", token=token) return flow_obj.state
def testLaunchBinaryFlowResultsHaveReadableStdOutAndStdErr(self): flow_id = flow_test_lib.StartFlow(gui_test_lib.RecursiveTestFlow, client_id=self.client_id) stderr = "Oh, ok, this is just a string 昨" stdout = "\00\00\00\00" response = rdf_client_action.ExecuteResponse( stderr=stderr.encode("utf-8"), stdout=stdout.encode("utf-8")) if data_store.RelationalDBFlowsEnabled(): data_store.REL_DB.WriteFlowResults( self.client_id, flow_id, [rdf_flow_objects.FlowResult(payload=response)]) else: with data_store.DB.GetMutationPool() as pool: flow.GRRFlow.ResultCollectionForFID( rdfvalue.RDFURN( self.client_id).Add("flows").Add(flow_id)).Add( response, mutation_pool=pool) self.Open("/#/clients/%s/flows/%s/results" % (self.client_id, flow_id)) # jQuery treats the backslash ('\') character as a special one, hence we # have to escape it twice: once for Javascript itself and second time # for jQuery. self.WaitUntil( self.IsElementPresent, r"css=grr-flow-inspector:contains('Oh, ok, " r"this is just a string \\\\xe6\\\\x98\\\\xa8')") self.WaitUntil( self.IsElementPresent, r"css=grr-flow-inspector:contains('\\\\x00\\\\x00\\\\x00\\\\x00')")
def testHandlerReturnsCorrectStateForFlow(self): # Create a mock refresh operation. flow_id = self.CreateMultiGetFileFlow( self.client_id, file_path="fs/os/c/bin/bash", token=self.token) args = vfs_plugin.ApiGetVfsFileContentUpdateStateArgs( client_id=self.client_id, operation_id=flow_id) # Flow was started and should be running. result = self.handler.Handle(args, token=self.token) self.assertEqual(result.state, "RUNNING") # Terminate flow. if data_store.RelationalDBFlowsEnabled(): flow_base.TerminateFlow(self.client_id.Basename(), flow_id, "Fake error") else: flow_urn = self.client_id.Add("flows").Add(flow_id) with aff4.FACTORY.Open( flow_urn, aff4_type=flow.GRRFlow, mode="rw", token=self.token) as flow_obj: flow_obj.GetRunner().Error("Fake error") # Recheck status and see if it changed. result = self.handler.Handle(args, token=self.token) self.assertEqual(result.state, "FINISHED")
def _LoadFlows(self, client_id, min_create_time, token): """Yields all flows for the given client_id and time range. Args: client_id: client URN min_create_time: minimum creation time (inclusive) token: acl token Yields: flow_objects.Flow objects """ if data_store.RelationalDBFlowsEnabled(): flow_list = data_store.REL_DB.ReadAllFlowObjects( client_id, min_create_time=min_create_time) for flow_obj in flow_list: if not flow_obj.parent_flow_id: yield flow_obj else: now = rdfvalue.RDFDatetime.Now() client_id_urn = rdf_client.ClientURN(client_id) flows_dir = aff4.FACTORY.Open(client_id_urn.Add("flows"), token=token) # Save DB roundtrips by checking both conditions at once. flow_list = flows_dir.ListChildren( age=(min_create_time.AsMicrosecondsSinceEpoch(), now.AsMicrosecondsSinceEpoch())) for flow_obj in aff4.FACTORY.MultiOpen(flow_list, token=token): yield rdf_flow_objects.Flow( args=flow_obj.args, flow_class_name=flow_obj.runner_args.flow_name, flow_id=flow_obj.urn.Basename(), create_time=flow_obj.context.create_time, creator=flow_obj.creator, )
def testArtifactSkipping(self): client_mock = action_mocks.ActionMock() # This does not match the Artifact so it will not be collected. client_id = self.SetupClient(0, system="Windows") artifact_list = ["FakeArtifact"] session_id = flow_test_lib.TestFlowHelper( aff4_flows.ArtifactCollectorFlow.__name__, client_mock, artifact_list=artifact_list, use_tsk=False, token=self.token, client_id=client_id) if data_store.RelationalDBFlowsEnabled(): flow_obj = data_store.REL_DB.ReadFlowObject( client_id.Basename(), session_id) state = flow_obj.persistent_data else: flow_obj = aff4.FACTORY.Open(session_id, token=self.token) state = flow_obj.state self.assertLen(state.artifacts_skipped_due_to_condition, 1) self.assertEqual(state.artifacts_skipped_due_to_condition[0], ["FakeArtifact", "os == 'Linux'"])
def Handle(self, args, token=None): if data_store.RelationalDBFlowsEnabled(): count = args.count or db.MAX_COUNT logs = data_store.REL_DB.ReadFlowLogEntries( str(args.client_id), str(args.flow_id), args.offset, count, args.filter) total_count = data_store.REL_DB.CountFlowLogEntries( str(args.client_id), str(args.flow_id)) return ApiListFlowLogsResult( items=[ ApiFlowLog().InitFromFlowLogEntry(log, str(args.flow_id)) for log in logs ], total_count=total_count) else: flow_urn = args.flow_id.ResolveClientFlowURN(args.client_id, token=token) logs_collection = flow.GRRFlow.LogCollectionForFID(flow_urn) result = api_call_handler_utils.FilterCollection(logs_collection, args.offset, args.count, args.filter) return ApiListFlowLogsResult( items=[ApiFlowLog().InitFromFlowLog(x) for x in result], total_count=len(logs_collection))
def testOverviewIsShownForNestedHuntFlows(self): if data_store.RelationalDBFlowsEnabled(): # TODO(amoser): Hunts don't spawn relational flows yet. return with implementation.StartHunt( hunt_name=standard.GenericHunt.__name__, flow_runner_args=rdf_flow_runner.FlowRunnerArgs( flow_name=gui_test_lib.RecursiveTestFlow.__name__), client_rate=0, token=self.token) as hunt: hunt.Run() self.AssignTasksToClients(client_ids=[self.client_id]) self.RunHunt(client_ids=[self.client_id]) self.Open("/#/clients/%s" % self.client_id) self.Click("css=a[grrtarget='client.flows']") # There should be a RecursiveTestFlow in the list. Expand nested flows. self.Click("css=tr:contains('RecursiveTestFlow') span.tree_branch") # Click on a nested flow. self.Click("css=tr:contains('RecursiveTestFlow'):nth(2)") # Nested flow should have Depth argument set to 1. self.WaitUntil(self.IsElementPresent, "css=td:contains('Depth') ~ td:nth(0):contains('1')") # Check that flow id of this flow has forward slash - i.e. consists of # 2 components. self.WaitUntil(self.IsTextPresent, "Flow ID") flow_id = self.GetText("css=dt:contains('Flow ID') ~ dd:nth(0)") self.assertIn("/", flow_id)
def Run(self): client_id = self.SetupClient(0) failing_descriptor = rdf_output_plugin.OutputPluginDescriptor( plugin_name=hunt_test_lib.FailingDummyHuntOutputPlugin.__name__) with test_lib.FakeTime(42): if data_store.RelationalDBFlowsEnabled(): flow_id = flow_test_lib.StartAndRunFlow( flow_cls=flow_test_lib.DummyFlowWithSingleReply, client_id=client_id.Basename(), output_plugins=[failing_descriptor]) else: flow_urn = flow.StartAFF4Flow( flow_name=flow_test_lib.DummyFlowWithSingleReply.__name__, client_id=client_id, output_plugins=[failing_descriptor], token=self.token) flow_id = flow_urn.Basename() flow_test_lib.TestFlowHelper(flow_urn, token=self.token) self.Check("ListFlowOutputPluginErrors", args=flow_plugin.ApiListFlowOutputPluginErrorsArgs( client_id=client_id.Basename(), flow_id=flow_id, plugin_id="FailingDummyHuntOutputPlugin_0"), replace={flow_id: "W:ABCDEF"})
def Handle(self, args, token=None): """Renders list of descriptors for all the flows.""" if data_store.RelationalDBFlowsEnabled(): flow_iterator = iteritems(registry.FlowRegistry.FLOW_REGISTRY) else: flow_iterator = iteritems(registry.AFF4FlowRegistry.FLOW_REGISTRY) result = [] for name, cls in sorted(flow_iterator): # Flows without a category do not show up in the GUI. if not getattr(cls, "category", None): continue # Only show flows that the user is allowed to start. try: if self.access_check_fn: self.access_check_fn(token.username, name) except access_control.UnauthorizedAccess: continue result.append(ApiFlowDescriptor().InitFromFlowClass(cls, token=token)) return ApiListFlowDescriptorsResult(items=result)
def Run(self): client_id = self.SetupClient(0) email_descriptor = rdf_output_plugin.OutputPluginDescriptor( plugin_name=email_plugin.EmailOutputPlugin.__name__, plugin_args=email_plugin.EmailOutputPluginArgs( email_address="test@localhost", emails_limit=42)) with test_lib.FakeTime(42): if data_store.RelationalDBFlowsEnabled(): flow_id = flow_test_lib.StartAndRunFlow( flow_cls=flow_test_lib.DummyFlowWithSingleReply, client_id=client_id.Basename(), output_plugins=[email_descriptor]) else: flow_urn = flow.StartAFF4Flow( flow_name=flow_test_lib.DummyFlowWithSingleReply.__name__, client_id=client_id, output_plugins=[email_descriptor], token=self.token) flow_id = flow_urn.Basename() flow_test_lib.TestFlowHelper(flow_urn, token=self.token) self.Check("ListFlowOutputPluginLogs", args=flow_plugin.ApiListFlowOutputPluginLogsArgs( client_id=client_id.Basename(), flow_id=flow_id, plugin_id="EmailOutputPlugin_0"), replace={flow_id: "W:ABCDEF"})
def _RunFlow(self, client_id): flow_args = transfer.GetFileArgs(pathspec=rdf_paths.PathSpec( path="/tmp/evil.txt", pathtype=rdf_paths.PathSpec.PathType.OS)) client_mock = hunt_test_lib.SampleHuntMock(failrate=2) if data_store.RelationalDBFlowsEnabled(): with test_lib.FakeTime(42): return flow_test_lib.StartAndRunFlow(transfer.GetFile, client_id=client_id, client_mock=client_mock, flow_args=flow_args) else: runner_args = rdf_flow_runner.FlowRunnerArgs( flow_name=transfer.GetFile.__name__) with test_lib.FakeTime(42): flow_urn = flow.StartAFF4Flow(client_id=client_id, args=flow_args, runner_args=runner_args, token=self.token) flow_test_lib.TestFlowHelper(flow_urn, client_mock=client_mock, client_id=client_id, token=self.token) return flow_urn.Basename()
def _AddLogToFlow(self, client_id, flow_id, log_string): if data_store.RelationalDBFlowsEnabled(): entry = rdf_flow_objects.FlowLogEntry(message=log_string) data_store.REL_DB.WriteFlowLogEntries(client_id, flow_id, [entry]) else: flow_urn = rdfvalue.RDFURN(client_id).Add("flows").Add(flow_id) with aff4.FACTORY.Open(flow_urn, token=self.token) as fd: fd.Log(log_string)
def Handle(self, args, token=None): if data_store.RelationalDBFlowsEnabled(): return self._BuildRelationalFlowList( str(args.client_id), args.offset, args.count) else: client_root_urn = args.client_id.ToClientURN().Add("flows") return self.BuildFlowList( client_root_urn, args.count, args.offset, token=token)
def Handle(self, args, token=None): if not self.attribute_name: raise ValueError("attribute_name can't be None") if data_store.RelationalDBFlowsEnabled(): return self._HandleRelational(args) else: return self._HandleLegacy(args, token=token)
def ProcessFlow(): time.sleep(1) if data_store.RelationalDBFlowsEnabled(): flow_base.TerminateFlow(client_urn.Basename(), flow_id, "") else: with aff4.FACTORY.Open(client_urn.Add("flows").Add(flow_id), mode="rw", token=self.token) as fd: fd.GetRunner().Error("")
def _StartFlow(self, flow_cls, **kw): if data_store.RelationalDBFlowsEnabled(): return flow.StartFlow(flow_cls=flow_cls, client_id=self.client_id, **kw) else: return flow.StartAFF4Flow( flow_name=compatibility.GetName(flow_cls), client_id=self.client_id, token=self.token, **kw)