def testHuntViewDoesShowsNothingForNonRapidLikeHuntWithClientRateNon0( self): hunt_urn = self.StartHunt( flow_runner_args=rdf_flow_runner.FlowRunnerArgs( flow_name=file_finder.FileFinder.__name__), flow_args=rdf_file_finder.FileFinderArgs(paths=["/tmp/**"]), client_rate=42) self.Open("/#/hunts/%s" % hunt_urn.Basename()) self.WaitUntil(self.IsElementPresent, "css=dt:contains('Client Rate')") self.WaitUntilNot( self.IsElementPresent, "css=dt:contains('Client Rate') + " "dd:contains('rapid hunting')")
def Run(self): action = self.job.args.hunt_cron_action token = access_control.ACLToken(username="******") hunt_args = rdf_hunts.GenericHuntArgs( flow_args=action.flow_args, flow_runner_args=rdf_flow_runner.FlowRunnerArgs( flow_name=action.flow_name)) with implementation.StartHunt(hunt_name=GenericHunt.__name__, args=hunt_args, runner_args=action.hunt_runner_args, token=token) as hunt: hunt.Run()
def _CreateHunt(self, description): output_plugins = [ output_plugin.OutputPluginDescriptor( plugin_name="TestOutputPlugin") ] with implementation.StartHunt( hunt_name=standard.GenericHunt.__name__, flow_runner_args=rdf_flow_runner.FlowRunnerArgs( flow_name=transfer.GetFile.__name__), output_plugins=output_plugins, description=description, client_rate=0, token=self.token) as hunt: return hunt
def testListScheduledFlows(self, db: abstract_db.Database): context = _CreateContext(db) client_id1 = db_test_utils.InitializeClient(db) client_id2 = db_test_utils.InitializeClient(db) handler = flow_plugin.ApiScheduleFlowHandler() sf1 = handler.Handle( flow_plugin.ApiCreateFlowArgs( client_id=client_id1, flow=flow_plugin.ApiFlow( name=file.CollectSingleFile.__name__, args=rdf_file_finder.CollectSingleFileArgs(path="/foo"), runner_args=rdf_flow_runner.FlowRunnerArgs(cpu_limit=60))), context=context) sf2 = handler.Handle( flow_plugin.ApiCreateFlowArgs( client_id=client_id1, flow=flow_plugin.ApiFlow( name=file.CollectSingleFile.__name__, args=rdf_file_finder.CollectSingleFileArgs(path="/foo"), runner_args=rdf_flow_runner.FlowRunnerArgs(cpu_limit=60))), context=context) handler.Handle( flow_plugin.ApiCreateFlowArgs( client_id=client_id2, flow=flow_plugin.ApiFlow( name=file.CollectSingleFile.__name__, args=rdf_file_finder.CollectSingleFileArgs(path="/foo"), runner_args=rdf_flow_runner.FlowRunnerArgs(cpu_limit=60))), context=context) handler = flow_plugin.ApiListScheduledFlowsHandler() args = flow_plugin.ApiListScheduledFlowsArgs( client_id=client_id1, creator=context.username) results = handler.Handle(args, context=context) self.assertEqual(results.scheduled_flows, [sf1, sf2])
def CreateHunt(self, flow_runner_args=None, flow_args=None, client_rule_set=None, original_object=None, client_rate=0, token=None, **kwargs): # Only initialize default flow_args value if default flow_runner_args value # is to be used. if not flow_runner_args: flow_args = (flow_args or transfer.GetFileArgs(pathspec=rdf_paths.PathSpec( path="/tmp/evil.txt", pathtype=rdf_paths.PathSpec.PathType.OS))) flow_runner_args = (flow_runner_args or rdf_flow_runner.FlowRunnerArgs( flow_name=transfer.GetFile.__name__)) client_rule_set = (client_rule_set or self._CreateForemanClientRuleSet()) if data_store.RelationalDBReadEnabled(): token = token or self.token hunt_args = rdf_hunt_objects.HuntArguments( hunt_type=rdf_hunt_objects.HuntArguments.HuntType.STANDARD, standard=rdf_hunt_objects.HuntArgumentsStandard( flow_name=flow_runner_args.flow_name, flow_args=flow_args)) hunt_obj = rdf_hunt_objects.Hunt(creator=token.username, client_rule_set=client_rule_set, original_object=original_object, client_rate=client_rate, args=hunt_args, **kwargs) hunt.CreateHunt(hunt_obj) return hunt_obj.hunt_id return implementation.StartHunt( hunt_name=standard.GenericHunt.__name__, flow_runner_args=flow_runner_args, flow_args=flow_args, client_rule_set=client_rule_set, client_rate=client_rate, original_object=original_object, token=token or self.token, **kwargs)
def testHuntViewShowsEligibilityNoteForNonRapidHuntWithClientRate0(self): # CreateHunt sets client rate to 0. Thus we have a non-eligible hunt: # FileFinder with a recursive glob expression and client rate 0. hunt_id = self.StartHunt( flow_runner_args=rdf_flow_runner.FlowRunnerArgs( flow_name=file_finder.FileFinder.__name__), flow_args=rdf_file_finder.FileFinderArgs(paths=["/tmp/**"])) self.Open("/#/hunts/%s" % hunt_id) self.WaitUntil( self.IsElementPresent, "css=dt:contains('Client Rate') + " "dd:contains('is not eligible for rapid hunting')") self.assertTrue( self.GetText("css=dt:contains('Client Rate') + dd").startswith("0 "))
def testHuntViewDoesNotShowAnythingForRapidLikeHunts(self): # CreateHunt sets client rate to 0. Thus we have a rapid-hunting-like hunt: # FileFinder without download and client rate 0. hunt_id = self.StartHunt( flow_runner_args=rdf_flow_runner.FlowRunnerArgs( flow_name=file_finder.FileFinder.__name__), flow_args=rdf_file_finder.FileFinderArgs(paths=["/tmp/evil.txt"])) self.Open("/#/hunts/%s" % hunt_id) self.WaitUntil(self.IsElementPresent, "css=dt:contains('Client Rate') + dd:contains(0)") self.WaitUntilNot( self.IsElementPresent, "css=dt:contains('Client Rate') + dd:contains('rapid hunting')")
def InitializeContext(self, args): """Initializes the context of this flow.""" if args is None: args = rdf_flow_runner.FlowRunnerArgs() output_plugins_states = [] for plugin_descriptor in args.output_plugins: if not args.client_id: self.Log( "Not initializing output plugin %s as flow does not run on " "the client.", plugin_descriptor.plugin_name) continue output_base_urn = self.session_id.Add(OUTPUT_PLUGIN_BASE_SUFFIX) plugin_class = plugin_descriptor.GetPluginClass() plugin = plugin_class(self.flow_obj.output_urn, args=plugin_descriptor.plugin_args, output_base_urn=output_base_urn, token=self.token) try: plugin.InitializeState() # TODO(amoser): Those do not need to be inside the state, they # could be part of the plugin descriptor. plugin.state["logs"] = [] plugin.state["errors"] = [] output_plugins_states.append( rdf_flow_runner.OutputPluginState( plugin_state=plugin.state, plugin_descriptor=plugin_descriptor)) except Exception as e: # pylint: disable=broad-except logging.info( "Plugin %s failed to initialize (%s), ignoring it.", plugin, e) parent_creator = None if self.parent_runner: parent_creator = self.parent_runner.context.creator context = rdf_flow_runner.FlowContext( create_time=rdfvalue.RDFDatetime.Now(), creator=parent_creator or self.token.username, current_state="Start", output_plugins_states=output_plugins_states, state=rdf_flow_runner.FlowContext.State.RUNNING, ) return context
def testHuntExpiration(self): """This tests that hunts with a client limit terminate correctly.""" with test_lib.FakeTime(1000): with implementation.StartHunt( hunt_name=standard.GenericHunt.__name__, flow_runner_args=rdf_flow_runner.FlowRunnerArgs( flow_name=transfer.GetFile.__name__), flow_args=transfer.GetFileArgs( pathspec=rdf_paths.PathSpec( path="/tmp/evil.txt", pathtype=rdf_paths.PathSpec.PathType.OS)), client_rule_set=self._CreateForemanClientRuleSet(), client_limit=5, expiry_time=rdfvalue.Duration("1000s"), token=self.token) as hunt: hunt.Run() # Pretend to be the foreman now and dish out hunting jobs to all the # clients (Note we have 10 clients here). self.AssignTasksToClients() hunt_obj = aff4.FACTORY.Open( hunt.session_id, age=aff4.ALL_TIMES, token=self.token) self.assertEqual(hunt_obj.Get(hunt_obj.Schema.STATE), "STARTED") # Now advance the time such that the hunt expires. time.time = lambda: 5000 # Run the hunt. client_mock = hunt_test_lib.SampleHuntMock() hunt_test_lib.TestHuntHelper( client_mock, self.client_ids, check_flow_errors=False, token=self.token) # No client should be processed since the hunt is expired. started, finished, errors = hunt_obj.GetClientsCounts() self.assertEqual(started, 0) self.assertEqual(finished, 0) self.assertEqual(errors, 0) hunt_obj = aff4.FACTORY.Open( hunt.session_id, age=aff4.ALL_TIMES, token=self.token) # Hunts are automatically stopped when they expire. self.assertEqual(hunt_obj.Get(hunt_obj.Schema.STATE), "COMPLETED")
def setUp(self): super(ApiGetExportedHuntResultsHandlerTest, self).setUp() self.handler = hunt_plugin.ApiGetExportedHuntResultsHandler() self.hunt_id = self.StartHunt( flow_runner_args=rdf_flow_runner.FlowRunnerArgs( flow_name=flow_test_lib.DummyFlowWithSingleReply.__name__), client_rate=0) self.client_ids = self.SetupClients(5) # Ensure that clients are processed sequentially - this way the test won't # depend on the order of results in the collection (which is normally # random). for cid in self.client_ids: self.RunHunt(client_ids=[cid], failrate=-1)
def Start(self): with hunts_implementation.StartHunt( hunt_name=hunts_standard.GenericHunt.__name__, client_limit=0, flow_runner_args=rdf_flow_runner.FlowRunnerArgs( flow_name=flows_discovery.Interrogate.__name__), flow_args=flows_discovery.InterrogateArgs(lightweight=False), output_plugins=self.GetOutputPlugins(), token=self.token) as hunt: runner = hunt.GetRunner() runner.runner_args.crash_limit = 500 runner.runner_args.client_rate = 50 runner.runner_args.expiry_time = "1w" runner.runner_args.description = ("Interrogate run by cron to keep host" "info fresh.") runner.Start()
def testHuntViewShowsEligibilityNoteForRapidLikeHuntWithClientRate0(self): # CreateHunt sets client rate to 0. Thus we have a rapid-hunting-like hunt: # FileFinder without download action and client rate 0. hunt_id = self.StartHunt( flow_runner_args=rdf_flow_runner.FlowRunnerArgs( flow_name=file_finder.FileFinder.__name__), flow_args=rdf_file_finder.FileFinderArgs(paths=["/tmp/evil.txt"])) self.Open("/#/hunts/%s" % hunt_id) self.WaitUntil( self.IsElementPresent, "css=dt:contains('Client Rate') + " "dd:contains('is eligible for rapid hunting')") self.WaitUntil( self.IsElementPresent, "css=dt:contains('Client Rate') + " "dd:contains('Client rate set to 0')") self.assertTrue( self.GetText("css=dt:contains('Client Rate') + dd").startswith("0 "))
def testOverviewIsShownForNestedHuntFlows(self): self.StartHunt(flow_runner_args=rdf_flow_runner.FlowRunnerArgs( flow_name=gui_test_lib.RecursiveTestFlow.__name__), client_rate=0, token=self.token) self.RunHunt(failrate=2, client_ids=[self.client_id]) self.Open("/#/clients/%s" % self.client_id) self.Click("css=a[grrtarget='client.flows']") # There should be a RecursiveTestFlow in the list. Expand nested flows. self.Click("css=tr:contains('RecursiveTestFlow') span.tree_branch") # Click on a nested flow. self.Click("css=tr:contains('RecursiveTestFlow'):nth(2)") # Nested flow should have Depth argument set to 1. self.WaitUntil(self.IsElementPresent, "css=td:contains('Depth') ~ td:nth(0):contains('1')")
def setUp(self): super(ApiGetHuntFilesArchiveHandlerTest, self).setUp() self.handler = hunt_plugin.ApiGetHuntFilesArchiveHandler() self.client_ids = self.SetupClients(10) self.hunt_id = self.StartHunt( flow_runner_args=rdf_flow_runner.FlowRunnerArgs( flow_name=file_finder.FileFinder.__name__), flow_args=rdf_file_finder.FileFinderArgs( paths=[os.path.join(self.base_path, "test.plist")], action=rdf_file_finder.FileFinderAction( action_type="DOWNLOAD"), ), client_rate=0, creator=self.token.username) self.RunHunt(client_ids=self.client_ids, client_mock=action_mocks.FileFinderClientMock())
def testScheduleFlow(self, db: abstract_db.Database): token = _CreateToken(db) client_id = db_test_utils.InitializeClient(db) handler = flow_plugin.ApiScheduleFlowHandler() args = flow_plugin.ApiCreateFlowArgs( client_id=client_id, flow=flow_plugin.ApiFlow( name=file.CollectSingleFile.__name__, args=rdf_file_finder.CollectSingleFileArgs(path="/foo"), runner_args=rdf_flow_runner.FlowRunnerArgs(cpu_limit=60))) sf = handler.Handle(args, token=token) self.assertEqual(sf.client_id, client_id) self.assertEqual(sf.creator, token.username) self.assertNotEmpty(sf.scheduled_flow_id) self.assertEqual(sf.flow_name, file.CollectSingleFile.__name__) self.assertEqual(sf.flow_args.path, "/foo") self.assertEqual(sf.runner_args.cpu_limit, 60)
def CreateSampleHunt(self, description, token=None): self.StartHunt( description=description, flow_runner_args=rdf_flow_runner.FlowRunnerArgs( flow_name=transfer.GetFile.__name__), flow_args=transfer.GetFileArgs(pathspec=rdf_paths.PathSpec( path="/tmp/evil.txt", pathtype=rdf_paths.PathSpec.PathType.NTFS, )), client_rule_set=self._CreateForemanClientRuleSet(), output_plugins=[ rdf_output_plugin.OutputPluginDescriptor( plugin_name="DummyOutputPlugin", plugin_args=gui_test_lib.DummyOutputPlugin.args_type( filename_regex="blah!", fetch_binaries=True)) ], client_rate=60, paused=True, token=token)
def Run(self): client_id = self.SetupClient(0) def ReplaceFlowId(): flows_dir_fd = aff4.FACTORY.Open(client_id.Add("flows"), token=self.token) flow_urn = list(flows_dir_fd.ListChildren())[0] return {flow_urn.Basename(): "W:ABCDEF"} with test_lib.FakeTime(42): self.Check("CreateFlow", args=flow_plugin.ApiCreateFlowArgs( client_id=client_id.Basename(), flow=flow_plugin.ApiFlow( name=processes.ListProcesses.__name__, args=processes.ListProcessesArgs( filename_regex=".", fetch_binaries=True), runner_args=rdf_flow_runner.FlowRunnerArgs( output_plugins=[], notify_to_user=False))), replace=ReplaceFlowId)
def testStartScheduledFlowsHandlesErrorInFlowArgsValidation(self): client_id = self.SetupClient(0) username = self.SetupUser("u0") self.ScheduleFlow( client_id=client_id, creator=username, flow_name=file.CollectSingleFile.__name__, flow_args=rdf_file_finder.CollectSingleFileArgs(path="/foo"), runner_args=rdf_flow_runner.FlowRunnerArgs(cpu_limit=60)) with mock.patch.object(rdf_file_finder.CollectSingleFileArgs, "Validate", side_effect=ValueError("foobazzle")): flow.StartScheduledFlows(client_id, username) self.assertEmpty(data_store.REL_DB.ReadAllFlowObjects(client_id)) scheduled_flows = flow.ListScheduledFlows(client_id, username) self.assertLen(scheduled_flows, 1) self.assertIn("foobazzle", scheduled_flows[0].error)
def setUp(self): super(ApiGetExportedHuntResultsHandlerTest, self).setUp() self.handler = hunt_plugin.ApiGetExportedHuntResultsHandler() self.hunt = implementation.StartHunt( hunt_name=standard.GenericHunt.__name__, flow_runner_args=rdf_flow_runner.FlowRunnerArgs( flow_name=flow_test_lib.DummyFlowWithSingleReply.__name__), client_rate=0, token=self.token) self.hunt.Run() self.client_ids = self.SetupClients(5) # Ensure that clients are processed sequentially - this way the test won't # depend on the order of results in the collection (which is normally # random). for cid in self.client_ids: self.AssignTasksToClients(client_ids=[cid]) client_mock = hunt_test_lib.SampleHuntMock() hunt_test_lib.TestHuntHelper(client_mock, [cid], token=self.token)
def setUp(self): super(ApiGetHuntFilesArchiveHandlerTest, self).setUp() self.handler = hunt_plugin.ApiGetHuntFilesArchiveHandler() self.hunt = implementation.StartHunt( hunt_name=standard.GenericHunt.__name__, flow_runner_args=rdf_flow_runner.FlowRunnerArgs( flow_name=file_finder.FileFinder.__name__), flow_args=rdf_file_finder.FileFinderArgs( paths=[os.path.join(self.base_path, "test.plist")], action=rdf_file_finder.FileFinderAction(action_type="DOWNLOAD"), ), client_rate=0, token=self.token) self.hunt.Run() client_ids = self.SetupClients(10) self.AssignTasksToClients(client_ids=client_ids) action_mock = action_mocks.FileFinderClientMock() hunt_test_lib.TestHuntHelper(action_mock, client_ids, token=self.token)
def testCopyHuntPreservesRuleType(self): implementation.StartHunt( hunt_name=standard.GenericHunt.__name__, description="model hunt", flow_runner_args=rdf_flow_runner.FlowRunnerArgs( flow_name=transfer.GetFile.__name__), flow_args=transfer.GetFileArgs(pathspec=rdf_paths.PathSpec( path="/tmp/evil.txt", pathtype=rdf_paths.PathSpec.PathType.TSK, )), client_rule_set=foreman_rules.ForemanClientRuleSet(rules=[ foreman_rules.ForemanClientRule( rule_type=foreman_rules.ForemanClientRule.Type.OS, os=foreman_rules.ForemanOsClientRule(os_darwin=True)) ]), token=self.token) self.Open("/#main=ManageHunts") self.Click("css=tr:contains('model hunt')") self.Click("css=button[name=CopyHunt]:not([disabled])") # Wait until dialog appears. self.WaitUntil(self.IsElementPresent, "css=grr-wizard-form:contains('What to run?')") # Click on "Next" button self.Click("css=grr-new-hunt-wizard-form button.Next") self.WaitUntil(self.IsElementPresent, "css=grr-wizard-form:contains('Hunt parameters')") # Click on "Next" button. self.Click("css=grr-new-hunt-wizard-form button.Next") self.WaitUntil( self.IsElementPresent, "css=grr-wizard-form:contains('How to process results')") # Click on "Next" button self.Click("css=grr-new-hunt-wizard-form button.Next") self.WaitUntil(self.IsElementPresent, "css=grr-wizard-form:contains('Where to run?')") self.WaitUntil( self.IsElementPresent, "css=grr-new-hunt-wizard-form " "label:contains('Os darwin') ~ * input:checked")
def Run(self): client_id = self.SetupClient(0) def ReplaceFlowId(): flows = data_store.REL_DB.ReadAllFlowObjects(client_id=client_id) self.assertNotEmpty(flows) flow_id = flows[0].flow_id return api_regression_test_lib.GetFlowTestReplaceDict(client_id, flow_id) with test_lib.FakeTime(42): self.Check( "CreateFlow", args=flow_plugin.ApiCreateFlowArgs( client_id=client_id, flow=flow_plugin.ApiFlow( name=processes.ListProcesses.__name__, args=processes.ListProcessesArgs( filename_regex=".", fetch_binaries=True), runner_args=rdf_flow_runner.FlowRunnerArgs( output_plugins=[], notify_to_user=True))), replace=ReplaceFlowId)
def testNotifyAboutEndDoesNothingWhenFlowsRunInsideHunt(self): self.CreateUser(self.token.username) # Create a user with a custom name to make sure the name is not in the list # of system names and that notifications are going to be delivered. user_token = access_control.ACLToken(username="******", reason="testing") self.CreateUser(user_token.username) hunt = implementation.StartHunt( hunt_name=standard.GenericHunt.__name__, flow_runner_args=rdf_flow_runner.FlowRunnerArgs( flow_name=FlowWithCustomNotifyAboutEnd.__name__), # pylint: disable=undefined-variable client_rate=0, token=user_token) hunt.Run() client_ids = self.SetupClients(5) self.AssignTasksToClients(client_ids=client_ids) hunt_test_lib.TestHuntHelper(None, client_ids, token=self.token) notifications = self.GetUserNotifications(user_token.username) self.assertEmpty(notifications)
def Run(self): if data_store.RelationalDBReadEnabled("hunts"): hra = self.job.args.hunt_cron_action.hunt_runner_args anbpcl = hra.avg_network_bytes_per_client_limit expiry_time = rdfvalue.RDFDatetime.Now() + hra.expiry_time hunt.CreateAndStartHunt( self.job.args.hunt_cron_action.flow_name, self.job.args.hunt_cron_action.flow_args, "Cron", avg_cpu_seconds_per_client_limit=hra. avg_cpu_seconds_per_client_limit, avg_network_bytes_per_client_limit=anbpcl, avg_results_per_client_limit=hra.avg_results_per_client_limit, client_limit=hra.client_limit, client_rate=hra.client_rate, client_rule_set=hra.client_rule_set, crash_limit=hra.crash_limit, description=hra.description, expiry_time=expiry_time, original_object=hra.original_object, output_plugins=hra.output_plugins, per_client_cpu_limit=hra.per_client_cpu_limit, per_client_network_bytes_limit=hra. per_client_network_limit_bytes, ) else: action = self.job.args.hunt_cron_action token = access_control.ACLToken(username="******") hunt_args = rdf_hunts.GenericHuntArgs( flow_args=action.flow_args, flow_runner_args=rdf_flow_runner.FlowRunnerArgs( flow_name=action.flow_name)) with implementation.StartHunt(hunt_name=GenericHunt.__name__, args=hunt_args, runner_args=action.hunt_runner_args, token=token) as hunt_obj: hunt_obj.Run()
def testGetPluginArgsHandlesMissingPluginsCorrectly(self): descriptor = output_plugin.OutputPluginDescriptor( plugin_name="TestOutputPluginWithArgs", plugin_args=rdf_flow_runner.FlowRunnerArgs( flow_name=transfer.GetFile.__name__)) serialized = descriptor.SerializeToString() deserialized = output_plugin.OutputPluginDescriptor() deserialized.ParseFromString(serialized) self.assertEqual(deserialized, descriptor) self.assertEqual(deserialized.GetPluginClass(), TestOutputPluginWithArgs) with utils.Stubber(output_plugin.OutputPlugin, "classes", {}): deserialized = output_plugin.OutputPluginDescriptor() deserialized.ParseFromString(serialized) self.assertTrue(deserialized.GetPluginClass(), output_plugin.UnknownOutputPlugin) # UnknownOutputPlugin should just return serialized arguments as bytes. self.assertEqual(deserialized.plugin_args, descriptor.plugin_args.SerializeToString())
def setUp(self): super().setUp() self.handler = hunt_plugin.ApiGetHuntFileHandler() self.file_path = os.path.join(self.base_path, "test.plist") self.vfs_file_path = "fs/os/%s" % self.file_path self.hunt_id = self.StartHunt( flow_runner_args=rdf_flow_runner.FlowRunnerArgs( flow_name=file_finder.FileFinder.__name__), flow_args=rdf_file_finder.FileFinderArgs( paths=[self.file_path], action=rdf_file_finder.FileFinderAction( action_type="DOWNLOAD"), ), client_rate=0, creator=self.context.username) self.client_id = self.SetupClient(0) self.RunHunt(client_ids=[self.client_id], client_mock=action_mocks.FileFinderClientMock())
def RunFlow(self, flow_name=None, plugins=None, flow_args=None, client_mock=None): runner_args = rdf_flow_runner.FlowRunnerArgs( flow_name=flow_name or transfer.GetFile.__name__, output_plugins=plugins) if flow_args is None: flow_args = transfer.GetFileArgs(pathspec=rdf_paths.PathSpec( path="/tmp/evil.txt", pathtype=rdf_paths.PathSpec.PathType.OS)) if client_mock is None: client_mock = hunt_test_lib.SampleHuntMock() return flow_test_lib.TestFlowHelper(flow_name, args=flow_args, runner_args=runner_args, client_mock=client_mock, client_id=self.client_id, token=self.token)
def testHuntCollectionLogging(self): """This tests running the hunt on some clients.""" with implementation.StartHunt( hunt_name=standard.GenericHunt.__name__, flow_runner_args=rdf_flow_runner.FlowRunnerArgs( flow_name=flow_test_lib.DummyLogFlow.__name__), client_rate=0, token=self.token) as hunt: hunt.Run() hunt.Log("Log from the hunt itself") hunt_urn = hunt.urn self.AssignTasksToClients() self.RunHunt() # Check logs were written to the hunt collection hunt_logs = implementation.GRRHunt.LogCollectionForHID(hunt_urn) count = 0 for log in hunt_logs: if log.client_id: self.assertIn(log.client_id, self.client_ids) self.assertIn(log.log_message, [ "First", "Second", "Third", "Fourth", "Uno", "Dos", "Tres", "Cuatro" ]) self.assertIn(log.flow_name, [ flow_test_lib.DummyLogFlow.__name__, flow_test_lib.DummyLogFlowChild.__name__ ]) self.assertIn(str(hunt_urn), str(log.urn)) else: self.assertEqual(log.log_message, "Log from the hunt itself") self.assertEqual(log.flow_name, standard.GenericHunt.__name__) self.assertEqual(log.urn, hunt_urn) count += 1 # 4 logs for each flow, 2 flow run. One hunt-level log. self.assertEqual(count, 8 * len(self.client_ids) + 1)
def CreateHunt(self, flow_runner_args=None, flow_args=None, client_rule_set=None, original_object=None, client_rate=0, duration=None, creator=None, **kwargs): # Only initialize default flow_args value if default flow_runner_args value # is to be used. if not flow_runner_args: flow_args = (flow_args or transfer.GetFileArgs(pathspec=rdf_paths.PathSpec( path="/tmp/evil.txt", pathtype=rdf_paths.PathSpec.PathType.OS))) flow_runner_args = (flow_runner_args or rdf_flow_runner.FlowRunnerArgs( flow_name=transfer.GetFile.__name__)) client_rule_set = (client_rule_set or self._CreateForemanClientRuleSet()) hunt_args = rdf_hunt_objects.HuntArguments( hunt_type=rdf_hunt_objects.HuntArguments.HuntType.STANDARD, standard=rdf_hunt_objects.HuntArgumentsStandard( flow_name=flow_runner_args.flow_name, flow_args=flow_args)) hunt_obj = rdf_hunt_objects.Hunt(creator=creator, client_rule_set=client_rule_set, original_object=original_object, client_rate=client_rate, duration=duration, args=hunt_args, **kwargs) hunt.CreateHunt(hunt_obj) return hunt_obj.hunt_id
def CreateSampleHunt(self, path=None, stopped=False, output_plugins=None, client_limit=0, client_count=10, creator=None): self.client_ids = self.SetupClients(client_count) self.hunt_urn = self.StartHunt( flow_runner_args=rdf_flow_runner.FlowRunnerArgs( flow_name=compatibility.GetName(transfer.GetFile)), flow_args=transfer.GetFileArgs(pathspec=rdf_paths.PathSpec( path=path or "/tmp/evil.txt", pathtype=rdf_paths.PathSpec.PathType.OS, )), client_rule_set=self._CreateForemanClientRuleSet(), output_plugins=output_plugins or [], client_rate=0, client_limit=client_limit, creator=creator or self.test_username, paused=stopped) return self.hunt_urn