def testDoesNotFetchDuplicates(self): process1 = rdfvalue.Process( pid=2, ppid=1, cmdline=["test_img.dd"], exe=os.path.join(self.base_path, "test_img.dd"), ctime=long(1333718907.167083 * 1e6)) process2 = rdfvalue.Process( pid=3, ppid=1, cmdline=["test_img.dd", "--arg"], exe=os.path.join(self.base_path, "test_img.dd"), ctime=long(1333718907.167083 * 1e6)) client_mock = ListProcessesMock([process1, process2]) output_path = "analysis/GetBinariesFlowTest1" for _ in test_lib.TestFlowHelper( "ListProcesses", client_mock, client_id=self.client_id, fetch_binaries=True, token=self.token, output=output_path): pass fd = aff4.FACTORY.Open(self.client_id.Add(output_path), token=self.token) self.assertEqual(len(fd), 1)
def testWhenFetchingIgnoresMissingFiles(self): process1 = rdfvalue.Process( pid=2, ppid=1, cmdline=["test_img.dd"], exe=os.path.join(self.base_path, "test_img.dd"), ctime=long(1333718907.167083 * 1e6)) process2 = rdfvalue.Process( pid=2, ppid=1, cmdline=["file_that_does_not_exist"], exe=os.path.join(self.base_path, "file_that_does_not_exist"), ctime=long(1333718907.167083 * 1e6)) client_mock = ListProcessesMock([process1, process2]) output_path = "analysis/GetBinariesFlowTest1" for _ in test_lib.TestFlowHelper( "ListProcesses", client_mock, client_id=self.client_id, fetch_binaries=True, token=self.token, check_flow_errors=False, output=output_path): pass fd = aff4.FACTORY.Open(self.client_id.Add(output_path), token=self.token) binaries = list(fd) self.assertEqual(len(binaries), 1) self.assertEqual(binaries[0].pathspec.path, process1.exe)
def testDoesNotShowGenerateArchiveButtonForNonExportableRDFValues(self): values = [rdfvalue.Process(pid=1), rdfvalue.Process(pid=42423)] with self.ACLChecksDisabled(): self.CreateGenericHuntWithCollection(values=values) self.Open("/") self.Click("css=a[grrtarget=ManageHunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") self.WaitUntil(self.IsTextPresent, "42423") self.WaitUntilNot(self.IsTextPresent, "Results of this hunt can be downloaded as an archive")
def testProcessListingOnly(self): """Test that the ListProcesses flow works.""" client_mock = ListProcessesMock([ rdfvalue.Process(pid=2, ppid=1, cmdline=["cmd.exe"], exe="c:\\windows\\cmd.exe", ctime=long(1333718907.167083 * 1e6)) ]) flow_urn = flow.GRRFlow.StartFlow(client_id=self.client_id, flow_name="ListProcesses", output="Processes", token=self.token) for _ in test_lib.TestFlowHelper(flow_urn, client_mock, client_id=self.client_id, token=self.token): pass # Check the output collection processes = aff4.FACTORY.Open(self.client_id.Add("Processes"), token=self.token) self.assertEqual(len(processes), 1) self.assertEqual(processes[0].ctime, 1333718907167083L) self.assertEqual(processes[0].cmdline, ["cmd.exe"])
def testFetchesAndStoresBinary(self): process = rdfvalue.Process(pid=2, ppid=1, cmdline=["test_img.dd"], exe=os.path.join(self.base_path, "test_img.dd"), ctime=long(1333718907.167083 * 1e6)) client_mock = ListProcessesMock([process]) output_path = "analysis/GetBinariesFlowTest1" for _ in test_lib.TestFlowHelper("ListProcesses", client_mock, client_id=self.client_id, fetch_binaries=True, token=self.token, output=output_path): pass fd = aff4.FACTORY.Open(self.client_id.Add(output_path), token=self.token) binaries = list(fd) self.assertEqual(len(binaries), 1) self.assertEqual(binaries[0].pathspec.path, process.exe) self.assertEqual(binaries[0].st_size, os.stat(process.exe).st_size)
def testProcessToExportedNetworkConnection(self): conn1 = rdfvalue.NetworkConnection( state=rdfvalue.NetworkConnection.State.LISTEN, type=rdfvalue.NetworkConnection.Type.SOCK_STREAM, local_address=rdfvalue.NetworkEndpoint( ip="0.0.0.0", port=22), remote_address=rdfvalue.NetworkEndpoint( ip="0.0.0.0", port=0), pid=2136, ctime=0) conn2 = rdfvalue.NetworkConnection( state=rdfvalue.NetworkConnection.State.LISTEN, type=rdfvalue.NetworkConnection.Type.SOCK_STREAM, local_address=rdfvalue.NetworkEndpoint( ip="192.168.1.1", port=31337), remote_address=rdfvalue.NetworkEndpoint( ip="1.2.3.4", port=6667), pid=1, ctime=0) process = rdfvalue.Process( pid=2, ppid=1, cmdline=["cmd.exe"], exe="c:\\windows\\cmd.exe", ctime=long(1333718907.167083 * 1e6), connections=[conn1, conn2]) converter = export.ProcessToExportedNetworkConnectionConverter() results = list(converter.Convert(rdfvalue.ExportedMetadata(), process, token=self.token)) self.assertEqual(len(results), 2) self.assertEqual(results[0].state, rdfvalue.NetworkConnection.State.LISTEN) self.assertEqual(results[0].type, rdfvalue.NetworkConnection.Type.SOCK_STREAM) self.assertEqual(results[0].local_address.ip, "0.0.0.0") self.assertEqual(results[0].local_address.port, 22) self.assertEqual(results[0].remote_address.ip, "0.0.0.0") self.assertEqual(results[0].remote_address.port, 0) self.assertEqual(results[0].pid, 2136) self.assertEqual(results[0].ctime, 0) self.assertEqual(results[1].state, rdfvalue.NetworkConnection.State.LISTEN) self.assertEqual(results[1].type, rdfvalue.NetworkConnection.Type.SOCK_STREAM) self.assertEqual(results[1].local_address.ip, "192.168.1.1") self.assertEqual(results[1].local_address.port, 31337) self.assertEqual(results[1].remote_address.ip, "1.2.3.4") self.assertEqual(results[1].remote_address.port, 6667) self.assertEqual(results[1].pid, 1) self.assertEqual(results[1].ctime, 0)
def ParseProcess(self, item): cybox = item.get("_EPROCESS", {}).get("Cybox", {}) result = rdfvalue.Process( exe=cybox.get("Name"), pid=cybox.get("PID"), ppid=cybox.get("Parent_PID"), num_threads=item.get("thread_count"), ctime=item.get("process_create_time", {}).get("epoch"), ) return result
def testCSVPluginGeneratesTemporaryNameIfOutputDirIsNotSpecified(self): _, plugin = self.RunHunt(responses=[rdfvalue.Process(pid=42)]) self.assertTrue("ExportedProcess" in plugin.state.files_by_type) output_file = aff4.FACTORY.Open( plugin.state.files_by_type["ExportedProcess"].urn, aff4_type="AFF4Image", token=self.token) parsed_output = list( csv.DictReader(StringIO.StringIO(output_file.Read(sys.maxint)))) self.assertEqual(len(parsed_output), 1)
def Parse(self, result, unused_knowledge_base): """Parse the key pslist plugin output.""" for value_dict in self.IterateSections(result, "pslist"): process = rdfvalue.Process() for key, value in value_dict.iteritems(): if key in self.mapping: attr = self.mapping[key] if isinstance(getattr(process, attr), basestring): setattr(process, attr, value.svalue) else: setattr(process, attr, value.value) yield process
def testCSVPluginWithValuesOfMultipleTypes(self): hunt_urn, _ = self.RunHunt( plugin_args=rdfvalue.CSVOutputPluginArgs( output_dir=rdfvalue.RDFURN("aff4:/tmp/csv")), responses=[ rdfvalue.StatEntry( aff4path=self.client_id.Add("/fs/os/foo/bar"), pathspec=rdfvalue.PathSpec(path="/foo/bar")), rdfvalue.Process(pid=42) ], process_responses_separately=True) plugin_output_files = sorted( list( aff4.FACTORY.Open("aff4:/tmp/csv", token=self.token).ListChildren())) self.assertListEqual(plugin_output_files, [ rdfvalue.RDFURN("aff4:/tmp/csv/ExportedFile.csv"), rdfvalue.RDFURN("aff4:/tmp/csv/ExportedProcess.csv") ]) output_file = aff4.FACTORY.Open(plugin_output_files[0], aff4_type="AFF4Image", token=self.token) parsed_output = list( csv.DictReader(StringIO.StringIO(output_file.Read(sys.maxint)))) self.assertEqual(len(parsed_output), 1) self.assertEqual(parsed_output[0]["metadata.client_urn"], self.client_id) self.assertEqual(parsed_output[0]["metadata.hostname"], "Host-0") self.assertEqual(parsed_output[0]["metadata.mac_address"], "aabbccddee00") self.assertEqual(parsed_output[0]["metadata.source_urn"], hunt_urn.Add("Results")) self.assertEqual(parsed_output[0]["urn"], self.client_id.Add("/fs/os/foo/bar")) output_file = aff4.FACTORY.Open(plugin_output_files[1], aff4_type="AFF4Image", token=self.token) parsed_output = list( csv.DictReader(StringIO.StringIO(output_file.Read(sys.maxint)))) self.assertEqual(len(parsed_output), 1) self.assertEqual(parsed_output[0]["metadata.client_urn"], self.client_id) self.assertEqual(parsed_output[0]["metadata.hostname"], "Host-0") self.assertEqual(parsed_output[0]["metadata.mac_address"], "aabbccddee00") self.assertEqual(parsed_output[0]["metadata.source_urn"], hunt_urn.Add("Results")) self.assertEqual(parsed_output[0]["pid"], "42")
def testProcessListingWithFilter(self): """Test that the ListProcesses flow works with filter.""" client_mock = ListProcessesMock([ rdfvalue.Process(pid=2, ppid=1, cmdline=["cmd.exe"], exe="c:\\windows\\cmd.exe", ctime=long(1333718907.167083 * 1e6)), rdfvalue.Process(pid=3, ppid=1, cmdline=["cmd2.exe"], exe="c:\\windows\\cmd2.exe", ctime=long(1333718907.167083 * 1e6)), rdfvalue.Process(pid=4, ppid=1, cmdline=["missing_exe.exe"], ctime=long(1333718907.167083 * 1e6)), rdfvalue.Process(pid=5, ppid=1, cmdline=["missing2_exe.exe"], ctime=long(1333718907.167083 * 1e6))]) flow_urn = flow.GRRFlow.StartFlow(client_id=self.client_id, flow_name="ListProcesses", output="Processes", filename_regex=r".*cmd2.exe", token=self.token) for _ in test_lib.TestFlowHelper( flow_urn, client_mock, client_id=self.client_id, token=self.token): pass # Expect one result that matches regex processes = aff4.FACTORY.Open(self.client_id.Add("Processes"), token=self.token) self.assertEqual(len(processes), 1) self.assertEqual(processes[0].ctime, 1333718907167083L) self.assertEqual(processes[0].cmdline, ["cmd2.exe"]) # Expect two skipped results logs = aff4.FACTORY.Open(flow_urn.Add("Logs"), token=self.token) for log in logs: if "Skipped 2" in log.log_message: return raise RuntimeError("Skipped process not mentioned in logs")
def testEmailPluginSendsEmailPerEveyBatchOfResponses(self): self.ProcessResponses( plugin_args=rdfvalue.EmailOutputPluginArgs( email_address=self.email_address), responses=[rdfvalue.Process(pid=42)]) self.assertEqual(len(self.email_messages), 1) msg = self.email_messages[0] self.assertEqual(msg["address"], self.email_address) self.assertTrue( "got a new result in %s" % self.results_urn in msg["title"]) self.assertTrue(utils.SmartStr(self.client_id) in msg["message"]) self.assertTrue(utils.SmartStr(self.hostname) in msg["message"])
def testCSVPluginWithValuesOfMultipleTypes(self): streams = self.ProcessResponses( plugin_args=rdfvalue.CSVOutputPluginArgs(), responses=[ rdfvalue.StatEntry( aff4path=self.client_id.Add("/fs/os/foo/bar"), pathspec=rdfvalue.PathSpec(path="/foo/bar")), rdfvalue.Process(pid=42) ], process_responses_separately=True) self.assertEqual(sorted(streams.keys()), sorted(["ExportedFile.csv", "ExportedProcess.csv"])) self.assertEqual(streams["ExportedFile.csv"].urn, rdfvalue.RDFURN("aff4:/foo/bar/ExportedFile.csv")) self.assertEqual(streams["ExportedProcess.csv"].urn, rdfvalue.RDFURN("aff4:/foo/bar/ExportedProcess.csv")) contents = StringIO.StringIO(streams["ExportedFile.csv"].Read(16384)) parsed_output = list(csv.DictReader(contents)) self.assertEqual(len(parsed_output), 1) self.assertEqual(parsed_output[0]["metadata.client_urn"], self.client_id) self.assertEqual(parsed_output[0]["metadata.hostname"], "Host-0") self.assertEqual(parsed_output[0]["metadata.mac_address"], "aabbccddee00\nbbccddeeff00") self.assertEqual(parsed_output[0]["metadata.source_urn"], self.results_urn) self.assertEqual(parsed_output[0]["urn"], self.client_id.Add("/fs/os/foo/bar")) contents = StringIO.StringIO( streams["ExportedProcess.csv"].Read(16384)) parsed_output = list(csv.DictReader(contents)) self.assertEqual(len(parsed_output), 1) self.assertEqual(parsed_output[0]["metadata.client_urn"], self.client_id) self.assertEqual(parsed_output[0]["metadata.hostname"], "Host-0") self.assertEqual(parsed_output[0]["metadata.mac_address"], "aabbccddee00\nbbccddeeff00") self.assertEqual(parsed_output[0]["metadata.source_urn"], self.results_urn) self.assertEqual(parsed_output[0]["pid"], "42")
def testProcessToExportedOpenFileConverter(self): process = rdfvalue.Process( pid=2, ppid=1, cmdline=["cmd.exe"], exe="c:\\windows\\cmd.exe", ctime=long(1333718907.167083 * 1e6), open_files=["/some/a", "/some/b"]) converter = export.ProcessToExportedOpenFileConverter() results = list(converter.Convert(rdfvalue.ExportedMetadata(), process, token=self.token)) self.assertEqual(len(results), 2) self.assertEqual(results[0].pid, 2) self.assertEqual(results[0].path, "/some/a") self.assertEqual(results[1].pid, 2) self.assertEqual(results[1].path, "/some/b")
def testProcessToExportedProcessConverter(self): process = rdfvalue.Process( pid=2, ppid=1, cmdline=["cmd.exe"], exe="c:\\windows\\cmd.exe", ctime=long(1333718907.167083 * 1e6)) converter = export.ProcessToExportedProcessConverter() results = list(converter.Convert(rdfvalue.ExportedMetadata(), process, token=self.token)) self.assertEqual(len(results), 1) self.assertEqual(results[0].pid, 2) self.assertEqual(results[0].ppid, 1) self.assertEqual(results[0].cmdline, "cmd.exe") self.assertEqual(results[0].exe, "c:\\windows\\cmd.exe") self.assertEqual(results[0].ctime, long(1333718907.167083 * 1e6))
def testWhenFetchingFiltersOutProcessesWithoutExeAttribute(self): process = rdfvalue.Process( pid=2, ppid=1, cmdline=["test_img.dd"], ctime=long(1333718907.167083 * 1e6)) client_mock = ListProcessesMock([process]) output_path = "analysis/GetBinariesFlowTest1" for _ in test_lib.TestFlowHelper( "ListProcesses", client_mock, fetch_binaries=True, client_id=self.client_id, token=self.token, output=output_path): pass # No file created since no output matched. with self.assertRaises(IOError): aff4.FACTORY.Open(self.client_id.Add(output_path), aff4_type="RDFValueCollection", token=self.token)
def testEmailPluginStopsSendingEmailsAfterLimitIsReached(self): responses = [rdfvalue.Process(pid=i) for i in range(11)] self.ProcessResponses( plugin_args=rdfvalue.EmailOutputPluginArgs( email_address=self.email_address, emails_limit=10), responses=responses, process_responses_separately=True) self.assertEqual(len(self.email_messages), 10) for msg in self.email_messages: self.assertEqual(msg["address"], self.email_address) self.assertTrue( "got a new result in %s" % self.results_urn in msg["title"]) self.assertTrue(utils.SmartStr(self.client_id) in msg["message"]) self.assertTrue(utils.SmartStr(self.hostname) in msg["message"]) for msg in self.email_messages[:10]: self.assertFalse("sending of emails will be disabled now" in msg) self.assertTrue("sending of emails will be disabled now" in self.email_messages[9]["message"])
def Run(self, unused_arg): # psutil will cause an active loop on Windows 2000 if platform.system() == "Windows" and platform.version().startswith( "5.0"): raise RuntimeError("ListProcesses not supported on Windows 2000") for proc in psutil.process_iter(): response = rdfvalue.Process() process_fields = [ "pid", "ppid", "name", "exe", "username", "terminal" ] for field in process_fields: try: value = getattr(proc, field) if value is None: continue if callable(value): value = value() if not isinstance(value, (int, long)): value = utils.SmartUnicode(value) setattr(response, field, value) except (psutil.NoSuchProcess, psutil.AccessDenied, AttributeError): pass try: for arg in proc.cmdline(): response.cmdline.append(utils.SmartUnicode(arg)) except (psutil.NoSuchProcess, psutil.AccessDenied): pass try: response.nice = proc.nice() except (psutil.NoSuchProcess, psutil.AccessDenied): pass try: # Not available on Windows. if hasattr(proc, "uids"): (response.real_uid, response.effective_uid, response.saved_uid) = proc.uids() (response.real_gid, response.effective_gid, response.saved_gid) = proc.gids() except (psutil.NoSuchProcess, psutil.AccessDenied): pass try: response.ctime = long(proc.create_time() * 1e6) response.status = str(proc.status()) except (psutil.NoSuchProcess, psutil.AccessDenied): pass try: # Not available on OSX. if hasattr(proc, "cwd"): response.cwd = utils.SmartUnicode(proc.cwd()) except (psutil.NoSuchProcess, psutil.AccessDenied): pass try: response.num_threads = proc.num_threads() except (psutil.NoSuchProcess, psutil.AccessDenied, RuntimeError): pass try: (response.user_cpu_time, response.system_cpu_time) = proc.cpu_times() # This is very time consuming so we do not collect cpu_percent here. # response.cpu_percent = proc.get_cpu_percent() except (psutil.NoSuchProcess, psutil.AccessDenied): pass try: response.RSS_size, response.VMS_size = proc.memory_info() response.memory_percent = proc.memory_percent() except (psutil.NoSuchProcess, psutil.AccessDenied): pass # Due to a bug in psutil, this function is disabled for now # (https://github.com/giampaolo/psutil/issues/340) # try: # for f in proc.open_files(): # response.open_files.append(utils.SmartUnicode(f.path)) # except (psutil.NoSuchProcess, psutil.AccessDenied): # pass try: for c in proc.connections(): conn = response.connections.Append(family=c.family, type=c.type, pid=proc.pid) try: conn.state = c.status except ValueError: logging.info( "Encountered unknown connection status (%s).", c.status) try: conn.local_address.ip, conn.local_address.port = c.laddr # Could be in state LISTEN. if c.raddr: conn.remote_address.ip, conn.remote_address.port = c.raddr except AttributeError: conn.local_address.ip, conn.local_address.port = c.local_address # Could be in state LISTEN. if c.remote_address: (conn.remote_address.ip, conn.remote_address.port) = c.remote_address except (psutil.NoSuchProcess, psutil.AccessDenied): pass self.SendReply(response) # Reading information here is slow so we heartbeat between processes. self.Progress()