def testSizeCondition(self): test_dir = self._PrepareTimestampedFiles() # We have one "old" file, auth.log, and two "new" ones, dpkg*. paths = [test_dir + "/{dpkg.log,dpkg_false.log,auth.log}"] # Auth.log is 770 bytes, the other two ~620 each. size_condition = rdf_file_finder.FileFinderCondition( condition_type="SIZE", size=rdf_file_finder.FileFinderSizeCondition(min_file_size=700)) self.RunAndCheck(paths, conditions=[size_condition], expected=["auth.log"], unexpected=["dpkg.log", "dpkg_false.log"], base_path=test_dir) size_condition = rdf_file_finder.FileFinderCondition( condition_type="SIZE", size=rdf_file_finder.FileFinderSizeCondition(max_file_size=700)) self.RunAndCheck(paths, conditions=[size_condition], expected=["dpkg.log", "dpkg_false.log"], unexpected=["auth.log"], base_path=test_dir)
class TestFileFinderOSWindows(base.VFSPathContentIsPE): """Download a file with FileFinder. Exercise globbing, interpolation and filtering. """ platforms = ["Windows"] flow = "FileFinder" test_output_path = "/fs/os/C:/Windows/System32/notepad.exe" sizecondition = rdf_file_finder.FileFinderSizeCondition( max_file_size=1000000) filecondition = rdf_file_finder.FileFinderCondition( condition_type=rdf_file_finder.FileFinderCondition.Type.SIZE, size=sizecondition) download = rdf_file_finder.FileFinderDownloadActionOptions() action = rdf_file_finder.FileFinderAction( action_type=rdf_file_finder.FileFinderAction.Action.DOWNLOAD, download=download) args = { "paths": ["%%environ_systemroot%%\\System32\\notepad.*"], "conditions": filecondition, "action": action }
def _CreateHuntFromFlow(self): self.client_id = self.SetupClient(0) flow_args = rdf_file_finder.FileFinderArgs( paths=["a/*", "b/*"], action=rdf_file_finder.FileFinderAction(action_type="STAT")) flow_runner_args = rdf_flows.FlowRunnerArgs( flow_name=file_finder.FileFinder.__name__) flow_urn = flow.GRRFlow.StartFlow(client_id=self.client_id, args=flow_args, runner_args=flow_runner_args, token=self.token) ref = rdf_hunts.FlowLikeObjectReference.FromFlowIdAndClientId( flow_urn.Basename(), self.client_id.Basename()) # Modify flow_args so that there are differences. flow_args.paths = ["b/*", "c/*"] flow_args.action.action_type = "DOWNLOAD" flow_args.conditions = [ rdf_file_finder.FileFinderCondition( condition_type="SIZE", size=rdf_file_finder.FileFinderSizeCondition(min_file_size=42)) ] return self.CreateHunt(flow_args=flow_args, flow_runner_args=flow_runner_args, original_object=ref), flow_urn
def testSizeCondition(self): # There are two values, one is 20 bytes, the other 53. session_id = self.RunFlow([self.runkey], [ registry.RegistryFinderCondition( condition_type=registry.RegistryFinderCondition.Type.SIZE, size=rdf_file_finder.FileFinderSizeCondition(min_file_size=50)) ]) results = self.GetResults(session_id) self.assertEqual(len(results), 1) self.assertGreater(results[0].stat_entry.st_size, 50)
class TestFileFinderOSLinux(base.VFSPathContentIsELF): """Download a file with FileFinder.""" platforms = ["Linux"] flow = file_finder.FileFinder.__name__ test_output_path = "/fs/os/bin/ps" sizecondition = rdf_file_finder.FileFinderSizeCondition(max_file_size=1000000) filecondition = rdf_file_finder.FileFinderCondition( condition_type=rdf_file_finder.FileFinderCondition.Type.SIZE, size=sizecondition) args = { "paths": ["/bin/ps"], "conditions": filecondition, "action": rdf_file_finder.FileFinderAction.Download(), }
class TestFileFinderOSLinuxProc(base.VFSPathContentExists): """Download a /proc/sys entry with FileFinder.""" platforms = ["Linux"] flow = file_finder.FileFinder.__name__ test_output_path = "/fs/os/proc/sys/net/ipv4/ip_forward" client_min_version = 3007 sizecondition = rdf_file_finder.FileFinderSizeCondition(max_file_size=1000000) filecondition = rdf_file_finder.FileFinderCondition( condition_type=rdf_file_finder.FileFinderCondition.Type.SIZE, size=sizecondition) args = { "paths": ["/proc/sys/net/ipv4/ip_forward"], "conditions": filecondition, "action": rdf_file_finder.FileFinderAction.Download(), }
def testSizeConditionWithDifferentActions(self): expected_files = ["dpkg.log", "dpkg_false.log"] non_expected_files = ["auth.log"] sizes = [ os.stat(os.path.join(self.fixture_path, f)).st_size for f in expected_files ] size_condition = rdf_file_finder.FileFinderCondition( condition_type=rdf_file_finder.FileFinderCondition.Type.SIZE, size=rdf_file_finder.FileFinderSizeCondition( max_file_size=max(sizes) + 1)) for action in self.CONDITION_TESTS_ACTIONS: self.RunFlowAndCheckResults(action=action, conditions=[size_condition], expected_files=expected_files, non_expected_files=non_expected_files)
def testSizeAndRegexConditionsWithDifferentActions(self): files_over_size_limit = ["auth.log"] filtered_files = ["dpkg.log", "dpkg_false.log"] expected_files = [] non_expected_files = files_over_size_limit + filtered_files sizes = [ os.stat(os.path.join(self.fixture_path, f)).st_size for f in files_over_size_limit ] size_condition = rdf_file_finder.FileFinderCondition( condition_type=rdf_file_finder.FileFinderCondition.Type.SIZE, size=rdf_file_finder.FileFinderSizeCondition( max_file_size=min(sizes) - 1)) regex_condition = rdf_file_finder.FileFinderCondition( condition_type=( rdf_file_finder.FileFinderCondition.Type.CONTENTS_REGEX_MATCH), contents_regex_match=rdf_file_finder. FileFinderContentsRegexMatchCondition( mode=(rdf_file_finder.FileFinderContentsRegexMatchCondition. Mode.ALL_HITS), bytes_before=10, bytes_after=10, regex="session opened for user .*?john")) for action in self.CONDITION_TESTS_ACTIONS: self.RunFlowAndCheckResults( action=action, conditions=[size_condition, regex_condition], expected_files=expected_files, non_expected_files=non_expected_files) # Check that order of conditions doesn't influence results for action in self.CONDITION_TESTS_ACTIONS: self.RunFlowAndCheckResults( action=action, conditions=[regex_condition, size_condition], expected_files=expected_files, non_expected_files=non_expected_files)
class TestFileFinderOSLinux(base.VFSPathContentIsELF): """Download a file with FileFinder.""" platforms = ["Linux"] flow = "FileFinder" test_output_path = "/fs/os/bin/ps" sizecondition = rdf_file_finder.FileFinderSizeCondition( max_file_size=1000000) filecondition = rdf_file_finder.FileFinderCondition( condition_type=rdf_file_finder.FileFinderCondition.Type.SIZE, size=sizecondition) download = rdf_file_finder.FileFinderDownloadActionOptions() action = rdf_file_finder.FileFinderAction( action_type=rdf_file_finder.FileFinderAction.Action.DOWNLOAD, download=download) args = { "paths": ["/bin/ps"], "conditions": filecondition, "action": action }
def Handle(self, args, token=None): client_urn = self.GetClientTarget(args, token=token) size_condition = rdf_file_finder.FileFinderCondition( condition_type=rdf_file_finder.FileFinderCondition.Type.SIZE, size=rdf_file_finder.FileFinderSizeCondition( max_file_size=args.max_file_size)) file_finder_args = rdf_file_finder.FileFinderArgs( paths=args.paths, action=rdf_file_finder.FileFinderAction(action_type=args.action), conditions=[size_condition]) # Check our flow throttling limits, will raise if there are problems. throttler = throttle.FlowThrottler( daily_req_limit=config_lib.CONFIG.Get("API.DailyFlowRequestLimit"), dup_interval=config_lib.CONFIG.Get("API.FlowDuplicateInterval")) throttler.EnforceLimits(client_urn, token.username, file_finder.FileFinder.__name__, file_finder_args, token=token) # Limit the whole flow to 200MB so if a glob matches lots of small files we # still don't have too much impact. runner_args = rdf_flows.FlowRunnerArgs( client_id=client_urn, flow_name=file_finder.FileFinder.__name__, network_bytes_limit=200 * 1000 * 1000) flow_id = flow.GRRFlow.StartFlow(runner_args=runner_args, token=token, args=file_finder_args) return ApiStartRobotGetFilesOperationResult( operation_id=utils.SmartUnicode(flow_id))