def testFileFinderThrottlingByFlowCountWorks(self): self.InitRouterConfig( self.__class__.FILE_FINDER_THROTTLED_ROUTER_CONFIG % self.token.username) args = [] for p in ["tests.plist", "numbers.txt", "numbers.txt.ver2"]: args.append( file_finder.FileFinderArgs( action=file_finder.FileFinderAction(action_type="STAT"), paths=[p]).AsPrimitiveProto()) client_ref = self.api.Client(client_id=self.client_id.Basename()) flow_obj = client_ref.CreateFlow(name=file_finder.FileFinder.__name__, args=args[0]) self.assertEqual(flow_obj.data.state, flow_obj.data.RUNNING) flow_obj = client_ref.CreateFlow(name=file_finder.FileFinder.__name__, args=args[1]) self.assertEqual(flow_obj.data.state, flow_obj.data.RUNNING) with self.assertRaisesRegexp(RuntimeError, "2 flows run since"): client_ref.CreateFlow(name=file_finder.FileFinder.__name__, args=args[2])
def setUp(self): super(ApiGetHuntFileHandlerTest, self).setUp() self.handler = hunt_plugin.ApiGetHuntFileHandler() self.file_path = os.path.join(self.base_path, "test.plist") self.hunt = hunts.GRRHunt.StartHunt( hunt_name="GenericHunt", flow_runner_args=rdf_flows.FlowRunnerArgs( flow_name=file_finder.FileFinder.__name__), flow_args=file_finder.FileFinderArgs( paths=[self.file_path], action=file_finder.FileFinderAction(action_type="DOWNLOAD"), ), client_rate=0, token=self.token) self.hunt.Run() self.results_urn = self.hunt.results_collection_urn self.aff4_file_path = "fs/os/%s" % self.file_path self.client_id = self.SetupClients(1)[0] self.AssignTasksToClients(client_ids=[self.client_id]) action_mock = action_mocks.FileFinderClientMock() test_lib.TestHuntHelper(action_mock, [self.client_id], token=self.token)
def setUp(self): super(ApiGetHuntFileHandlerTest, self).setUp() self.handler = hunt_plugin.ApiGetHuntFileHandler() self.file_path = os.path.join(self.base_path, "test.plist") self.hunt = hunts.GRRHunt.StartHunt( hunt_name="GenericHunt", flow_runner_args=flow_runner.FlowRunnerArgs( flow_name=file_finder.FileFinder.__name__), flow_args=file_finder.FileFinderArgs( paths=[self.file_path], action=file_finder.FileFinderAction(action_type="DOWNLOAD"), ), client_rate=0, token=self.token) self.hunt.Run() self.results_urn = self.hunt.state.context.results_collection_urn self.aff4_file_path = rdfvalue.RDFURN("os").Add(self.file_path) self.client_id = self.SetupClients(1)[0] self.AssignTasksToClients(client_ids=[self.client_id]) action_mock = action_mocks.ActionMock("TransferBuffer", "StatFile", "HashFile", "HashBuffer") test_lib.TestHuntHelper(action_mock, [self.client_id], token=self.token)
def Check(path): router.CreateFlow(api_flow.ApiCreateFlowArgs( flow=api_flow.ApiFlow( name=file_finder.FileFinder.__name__, args=file_finder.FileFinderArgs(paths=[path])), client_id=self.client_id), token=self.token)
def Handle(self, args, token=None): client_urn = self.GetClientTarget(args, token=token) size_condition = file_finder.FileFinderCondition( condition_type=file_finder.FileFinderCondition.Type.SIZE, size=file_finder.FileFinderSizeCondition( max_file_size=args.max_file_size)) file_finder_args = file_finder.FileFinderArgs( paths=args.paths, action=file_finder.FileFinderAction(action_type=args.action), conditions=[size_condition]) # Check our flow throttling limits, will raise if there are problems. throttler = throttle.FlowThrottler() throttler.EnforceLimits(client_urn, token.username, file_finder.FileFinder.__name__, file_finder_args, token=token) # Limit the whole flow to 200MB so if a glob matches lots of small files we # still don't have too much impact. runner_args = flow_runner.FlowRunnerArgs( client_id=client_urn, flow_name=file_finder.FileFinder.__name__, network_bytes_limit=200 * 1000 * 1000) flow_id = flow.GRRFlow.StartFlow(runner_args=runner_args, token=token, args=file_finder_args) return ApiStartRobotGetFilesOperationResult( operation_id=utils.SmartUnicode(flow_id))
def testFileFinderWorkflowWorks(self): self.InitRouterConfig(self.__class__.FILE_FINDER_ROUTER_CONFIG % self.token.username) client_ref = self.api.Client(client_id=self.client_id.Basename()) args = file_finder.FileFinderArgs(paths=[ os.path.join(self.base_path, "test_data", "test.plist"), os.path.join(self.base_path, "test_data", "numbers.txt"), os.path.join(self.base_path, "test_data", "numbers.txt.ver2") ]).AsPrimitiveProto() flow_obj = client_ref.CreateFlow(name=file_finder.FileFinder.__name__, args=args) self.assertEqual(flow_obj.data.state, flow_obj.data.RUNNING) # Now run the flow we just started. client_id = rdf_client.ClientURN(flow_obj.client_id) flow_urn = client_id.Add("flows").Add(flow_obj.flow_id) for _ in test_lib.TestFlowHelper( flow_urn, client_id=client_id, client_mock=action_mocks.FileFinderClientMock(), token=self.token): pass # Refresh flow. flow_obj = client_ref.Flow(flow_obj.flow_id).Get() self.assertEqual(flow_obj.data.state, flow_obj.data.TERMINATED)
def Check(path): with self.assertRaises(access_control.UnauthorizedAccess): router.CreateFlow(api_flow.ApiCreateFlowArgs( flow=api_flow.ApiFlow( name=file_finder.FileFinder.__name__, args=file_finder.FileFinderArgs(paths=[path])), client_id=self.client_id), token=self.token)
def testNoThrottlingDoneByDefault(self): self.InitRouterConfig(self.__class__.FILE_FINDER_ROUTER_CONFIG) args = file_finder.FileFinderArgs( action=file_finder.FileFinderAction(action_type="STAT"), paths=["tests.plist"]).AsPrimitiveProto() client_ref = self.api.Client(client_id=self.client_id.Basename()) # Create 60 flows in a row to check that no throttling is applied. for _ in range(20): flow_obj = client_ref.CreateFlow( name=file_finder.FileFinder.__name__, args=args) self.assertEqual(flow_obj.data.state, flow_obj.data.RUNNING)
def testFileFinderThrottlingByDuplicateIntervalWorks(self): self.InitRouterConfig( self.__class__.FILE_FINDER_THROTTLED_ROUTER_CONFIG) args = file_finder.FileFinderArgs( action=file_finder.FileFinderAction(action_type="STAT"), paths=["tests.plist"]).AsPrimitiveProto() client_ref = self.api.Client(client_id=self.client_id.Basename()) flow_obj = client_ref.CreateFlow(name=file_finder.FileFinder.__name__, args=args) self.assertEqual(flow_obj.data.state, flow_obj.data.RUNNING) with self.assertRaisesRegexp(RuntimeError, "Identical FileFinder already run"): client_ref.CreateFlow(name=file_finder.FileFinder.__name__, args=args)
def setUp(self): super(ApiGetHuntFilesArchiveHandlerTest, self).setUp() self.handler = hunt_plugin.ApiGetHuntFilesArchiveHandler() self.hunt = hunts.GRRHunt.StartHunt( hunt_name="GenericHunt", flow_runner_args=rdf_flows.FlowRunnerArgs( flow_name=file_finder.FileFinder.__name__), flow_args=file_finder.FileFinderArgs( paths=[os.path.join(self.base_path, "test.plist")], action=file_finder.FileFinderAction(action_type="DOWNLOAD"),), client_rate=0, token=self.token) self.hunt.Run() client_ids = self.SetupClients(10) self.AssignTasksToClients(client_ids=client_ids) action_mock = action_mocks.FileFinderClientMock() test_lib.TestHuntHelper(action_mock, client_ids, token=self.token)
def Render(self, args, token=None): client_urn = self.GetClientTarget(args, token=token) size_condition = file_finder.FileFinderCondition( condition_type=file_finder.FileFinderCondition.Type.SIZE, size=file_finder.FileFinderSizeCondition( max_file_size=args.max_file_size)) file_finder_args = file_finder.FileFinderArgs( paths=args.paths, action=file_finder.FileFinderAction(action_type=args.action), conditions=[size_condition]) # Check our flow throttling limits, will raise if there are problems. throttler = throttle.FlowThrottler() throttler.EnforceLimits(client_urn, token.username, "FileFinder", file_finder_args, token=token) # Limit the whole flow to 200MB so if a glob matches lots of small files we # still don't have too much impact. runner_args = flow_runner.FlowRunnerArgs(client_id=client_urn, flow_name="FileFinder", network_bytes_limit=200 * 1000 * 1000) flow_id = flow.GRRFlow.StartFlow(runner_args=runner_args, token=token, args=file_finder_args) # Provide a url where the caller can check on the flow status. status_url = urlparse.urljoin( config_lib.CONFIG["AdminUI.url"], "/api/flows/%s/%s/status" % (client_urn.Basename(), flow_id.Basename())) return dict( flow_id=api_value_renderers.RenderValue(flow_id), flow_args=api_value_renderers.RenderValue(file_finder_args), runner_args=api_value_renderers.RenderValue(runner_args), status_url=status_url)
def testFlowDuplicateLimit(self): # Disable the request limit checking by setting it to 0. throttler = throttle.FlowThrottler( daily_req_limit=0, dup_interval=rdfvalue.Duration("1200s")) # Running the same flow immediately should fail with test_lib.FakeTime(self.BASE_TIME): throttler.EnforceLimits(self.client_id, self.token.username, "DummyLogFlow", None, token=self.token) flow.GRRFlow.StartFlow(client_id=self.client_id, flow_name="DummyLogFlow", token=self.token) with self.assertRaises(throttle.ErrorFlowDuplicate): throttler.EnforceLimits(self.client_id, self.token.username, "DummyLogFlow", None, token=self.token) # Doing the same outside the window should work with test_lib.FakeTime(self.BASE_TIME + 1200 + 1): throttler.EnforceLimits(self.client_id, self.token.username, "DummyLogFlow", None, token=self.token) flow.GRRFlow.StartFlow(client_id=self.client_id, flow_name="DummyLogFlow", token=self.token) with self.assertRaises(throttle.ErrorFlowDuplicate): throttler.EnforceLimits(self.client_id, self.token.username, "DummyLogFlow", None, token=self.token) # Now try a flow with more complicated args args = file_finder.FileFinderArgs( paths=["/tmp/1", "/tmp/2"], action=file_finder.FileFinderAction(action_type="STAT")) with test_lib.FakeTime(self.BASE_TIME): throttler.EnforceLimits(self.client_id, self.token.username, "FileFinder", args, token=self.token) flow.GRRFlow.StartFlow( client_id=self.client_id, flow_name="FileFinder", token=self.token, paths=["/tmp/1", "/tmp/2"], action=file_finder.FileFinderAction(action_type="STAT")) with self.assertRaises(throttle.ErrorFlowDuplicate): throttler.EnforceLimits(self.client_id, self.token.username, "FileFinder", args, token=self.token) # Different args should succeed. args = file_finder.FileFinderArgs( paths=["/tmp/1", "/tmp/3"], action=file_finder.FileFinderAction(action_type="STAT")) throttler.EnforceLimits(self.client_id, self.token.username, "FileFinder", args, token=self.token)
def testCopyHuntHandlesLiteralExpressionCorrectly(self): """Literals are raw bytes. Testing that raw bytes are processed right.""" literal_match = file_finder.FileFinderContentsLiteralMatchCondition( literal="foo\x0d\xc8bar") with self.ACLChecksDisabled(): hunts.GRRHunt.StartHunt( hunt_name="GenericHunt", description="model hunt", flow_runner_args=rdf_flows.FlowRunnerArgs( flow_name=file_finder.FileFinder.__name__), flow_args=file_finder.FileFinderArgs(conditions=[ file_finder.FileFinderCondition( condition_type="CONTENTS_LITERAL_MATCH", contents_literal_match=literal_match) ], paths=["/tmp/evil.txt"]), token=self.token) self.Open("/#main=ManageHunts") self.Click("css=tr:contains('model hunt')") self.Click("css=button[name=CopyHunt]:not([disabled])") # Wait until dialog appears. self.WaitUntil(self.IsTextPresent, "What to run?") # Check that non-default values of sample hunt are prefilled. self.WaitUntilEqual( "foo\\x0d\\xc8bar", self.GetValue, "css=grr-new-hunt-wizard-form " "label:contains('Literal') ~ * input:text") # Click on "Next" button. self.Click("css=grr-new-hunt-wizard-form button.Next") self.WaitUntil(self.IsTextPresent, "Output Processing") # Click on "Next" button self.Click("css=grr-new-hunt-wizard-form button.Next") self.WaitUntil(self.IsTextPresent, "Where to run?") # Click on "Next" button self.Click("css=grr-new-hunt-wizard-form button.Next") self.WaitUntil(self.IsTextPresent, "Review") # Check that the arguments summary is present. self.WaitUntil(self.IsTextPresent, file_finder.FileFinder.__name__) self.WaitUntil(self.IsTextPresent, "foo\\x0d\\xc8bar") # Click on "Run" button self.Click("css=grr-new-hunt-wizard-form button.Next") self.WaitUntil(self.IsTextPresent, "Created Hunt") # Close the window and check that the hunt was created. self.Click("css=button.Next") hunts_root = aff4.FACTORY.Open("aff4:/hunts", token=self.token) hunts_list = sorted(list(hunts_root.ListChildren()), key=lambda x: x.age) self.assertEqual(len(hunts_list), 2) last_hunt = aff4.FACTORY.Open(hunts_list[-1], token=self.token) # Check that the hunt was created with a correct literal value. self.assertEqual(last_hunt.args.flow_runner_args.flow_name, file_finder.FileFinder.__name__) self.assertEqual( last_hunt.args.flow_args.conditions[0].contents_literal_match. literal, "foo\x0d\xc8bar")
def testFileFinderWorkflowWorks(self): self.InitRouterConfig(self.__class__.FILE_FINDER_ROUTER_CONFIG % self.token.username) client_ref = self.api.Client(client_id=self.client_id.Basename()) args = file_finder.FileFinderArgs( paths=[ os.path.join(self.base_path, "test.plist"), os.path.join(self.base_path, "numbers.txt"), os.path.join(self.base_path, "numbers.txt.ver2") ], action=file_finder.FileFinderAction( action_type=file_finder.FileFinderAction.Action.DOWNLOAD) ).AsPrimitiveProto() flow_obj = client_ref.CreateFlow(name=file_finder.FileFinder.__name__, args=args) self.assertEqual(flow_obj.data.state, flow_obj.data.RUNNING) # Now run the flow we just started. client_id = rdf_client.ClientURN(flow_obj.client_id) flow_urn = client_id.Add("flows").Add(flow_obj.flow_id) for _ in test_lib.TestFlowHelper( flow_urn, client_id=client_id, client_mock=action_mocks.FileFinderClientMock(), token=self.token): pass # Refresh flow. flow_obj = client_ref.Flow(flow_obj.flow_id).Get() self.assertEqual(flow_obj.data.state, flow_obj.data.TERMINATED) # Check that we got 3 results (we downloaded 3 files). results = list(flow_obj.ListResults()) self.assertEqual(len(results), 3) # We expect results to be FileFinderResult. self.assertEqual( sorted( os.path.basename(r.payload.stat_entry.aff4path) for r in results), sorted(["test.plist", "numbers.txt", "numbers.txt.ver2"])) # Now downloads the files archive. zip_stream = StringIO.StringIO() flow_obj.GetFilesArchive().WriteToStream(zip_stream) zip_fd = zipfile.ZipFile(zip_stream) # Now check that the archive has only "test.plist" file, as it's the # only file that matches the whitelist (see FILE_FINDER_ROUTER_CONFIG). # There should be 3 items in the archive: the hash of the "test.plist" # file, the symlink to this hash and the MANIFEST file. namelist = zip_fd.namelist() self.assertEqual(len(namelist), 3) # First component of every path in the archive is the containing folder, # we should strip it. namelist = [os.path.join(*n.split(os.sep)[1:]) for n in namelist] with open(os.path.join(self.base_path, "test.plist")) as test_plist_fd: test_plist_hash = hashlib.sha256(test_plist_fd.read()).hexdigest() self.assertEqual( sorted([ # pyformat: disable os.path.join(self.client_id.Basename(), "fs", "os", self.base_path.strip("/"), "test.plist"), os.path.join("hashes", test_plist_hash), "MANIFEST" # pyformat: enable ]), sorted(namelist))
"""GRR console script to collect bashrc.""" from grr.lib import flow_utils from grr.lib.flows.general import file_finder action = file_finder.FileFinderAction(action_type="DOWNLOAD") ff_args = file_finder.FileFinderArgs(paths=["/home/*/.bashrc"], action=action) newest_time = "" target_client = None for client in SearchClients("client-ubuntu-trusty-m"): if client[3] > newest_time: newest_time = client[3] target_client = client[0] if target_client: flow_utils.StartFlowAndWait(target_client.urn, token=None, flow_name="FileFinder", args=ff_args)