def testWhenFetchingFiltersOutProcessesWithoutExeAndConnectionState(self): p1 = rdf_client.Process(pid=2, ppid=1, cmdline=["test_img.dd"], ctime=long(1333718907.167083 * 1e6)) p2 = rdf_client.Process(pid=2, ppid=1, cmdline=["cmd.exe"], exe="c:\\windows\\cmd.exe", ctime=long(1333718907.167083 * 1e6), connections=rdf_client.NetworkConnection( family="INET", state="ESTABLISHED")) client_mock = ListProcessesMock([p1, p2]) for s in test_lib.TestFlowHelper("ListProcesses", client_mock, fetch_binaries=True, client_id=self.client_id, connection_states=["LISTEN"], token=self.token): session_id = s # No output matched. results = aff4.FACTORY.Open( session_id.Add(flow_runner.RESULTS_SUFFIX), aff4_type=sequential_collection.GeneralIndexedCollection, token=self.token) self.assertEqual(len(results), 0)
def testWhenFetchingFiltersOutProcessesWithoutExeAndConnectionState(self): client_id = test_lib.TEST_CLIENT_ID p1 = rdf_client.Process(pid=2, ppid=1, cmdline=["test_img.dd"], ctime=long(1333718907.167083 * 1e6)) p2 = rdf_client.Process(pid=2, ppid=1, cmdline=["cmd.exe"], exe="c:\\windows\\cmd.exe", ctime=long(1333718907.167083 * 1e6), connections=rdf_client.NetworkConnection( family="INET", state="ESTABLISHED")) client_mock = action_mocks.ListProcessesMock([p1, p2]) for s in flow_test_lib.TestFlowHelper( flow_processes.ListProcesses.__name__, client_mock, fetch_binaries=True, client_id=client_id, connection_states=["LISTEN"], token=self.token): session_id = s # No output matched. processes = flow.GRRFlow.ResultCollectionForFID(session_id) self.assertEqual(len(processes), 0)
def testDoesNotFetchDuplicates(self): process1 = rdf_client.Process(pid=2, ppid=1, cmdline=["test_img.dd"], exe=os.path.join(self.base_path, "test_img.dd"), ctime=long(1333718907.167083 * 1e6)) process2 = rdf_client.Process(pid=3, ppid=1, cmdline=["test_img.dd", "--arg"], exe=os.path.join(self.base_path, "test_img.dd"), ctime=long(1333718907.167083 * 1e6)) client_mock = action_mocks.ListProcessesMock([process1, process2]) for s in flow_test_lib.TestFlowHelper( flow_processes.ListProcesses.__name__, client_mock, client_id=test_lib.TEST_CLIENT_ID, fetch_binaries=True, token=self.token): session_id = s processes = flow.GRRFlow.ResultCollectionForFID(session_id) self.assertEqual(len(processes), 1)
def testWhenFetchingIgnoresMissingFiles(self): process1 = rdf_client.Process(pid=2, ppid=1, cmdline=["test_img.dd"], exe=os.path.join(self.base_path, "test_img.dd"), ctime=long(1333718907.167083 * 1e6)) process2 = rdf_client.Process(pid=2, ppid=1, cmdline=["file_that_does_not_exist"], exe=os.path.join( self.base_path, "file_that_does_not_exist"), ctime=long(1333718907.167083 * 1e6)) client_mock = action_mocks.ListProcessesMock([process1, process2]) for s in flow_test_lib.TestFlowHelper( flow_processes.ListProcesses.__name__, client_mock, client_id=test_lib.TEST_CLIENT_ID, fetch_binaries=True, token=self.token, check_flow_errors=False): session_id = s results = flow.GRRFlow.ResultCollectionForFID(session_id) binaries = list(results) self.assertEqual(len(binaries), 1) self.assertEqual(binaries[0].pathspec.path, process1.exe)
def testDoesNotFetchDuplicates(self): process1 = rdf_client.Process(pid=2, ppid=1, cmdline=["test_img.dd"], exe=os.path.join(self.base_path, "test_img.dd"), ctime=long(1333718907.167083 * 1e6)) process2 = rdf_client.Process(pid=3, ppid=1, cmdline=["test_img.dd", "--arg"], exe=os.path.join(self.base_path, "test_img.dd"), ctime=long(1333718907.167083 * 1e6)) client_mock = ListProcessesMock([process1, process2]) for s in test_lib.TestFlowHelper("ListProcesses", client_mock, client_id=self.client_id, fetch_binaries=True, token=self.token): session_id = s fd = aff4.FACTORY.Open(session_id.Add(flow_runner.RESULTS_SUFFIX), token=self.token) self.assertEqual(len(fd), 1)
def testWhenFetchingIgnoresMissingFiles(self): process1 = rdf_client.Process(pid=2, ppid=1, cmdline=["test_img.dd"], exe=os.path.join(self.base_path, "test_img.dd"), ctime=long(1333718907.167083 * 1e6)) process2 = rdf_client.Process(pid=2, ppid=1, cmdline=["file_that_does_not_exist"], exe=os.path.join( self.base_path, "file_that_does_not_exist"), ctime=long(1333718907.167083 * 1e6)) client_mock = ListProcessesMock([process1, process2]) output_path = "analysis/GetBinariesFlowTest1" for _ in test_lib.TestFlowHelper("ListProcesses", client_mock, client_id=self.client_id, fetch_binaries=True, token=self.token, check_flow_errors=False, output=output_path): pass fd = aff4.FACTORY.Open(self.client_id.Add(output_path), token=self.token) binaries = list(fd) self.assertEqual(len(binaries), 1) self.assertEqual(binaries[0].pathspec.path, process1.exe)
def testDoesNotFetchDuplicates(self): process1 = rdf_client.Process(pid=2, ppid=1, cmdline=["test_img.dd"], exe=os.path.join(self.base_path, "test_img.dd"), ctime=long(1333718907.167083 * 1e6)) process2 = rdf_client.Process(pid=3, ppid=1, cmdline=["test_img.dd", "--arg"], exe=os.path.join(self.base_path, "test_img.dd"), ctime=long(1333718907.167083 * 1e6)) client_mock = ListProcessesMock([process1, process2]) output_path = "analysis/GetBinariesFlowTest1" for _ in test_lib.TestFlowHelper("ListProcesses", client_mock, client_id=self.client_id, fetch_binaries=True, token=self.token, output=output_path): pass fd = aff4.FACTORY.Open(self.client_id.Add(output_path), token=self.token) self.assertEqual(len(fd), 1)
def testExportCommandIsNotShownForNonFileResults(self): values = [rdf_client.Process(pid=1), rdf_client.Process(pid=42423)] hunt_urn = self.CreateGenericHuntWithCollection(values=values) self.Open("/#/hunts/%s/results" % hunt_urn.Basename()) self.WaitUntil(self.IsElementPresent, "css=grr-hunt-results:contains('Value')") self.WaitUntilNot(self.IsTextPresent, "Show export command")
def testFindNoRunningLogserver(self): chk_id = "CIS-SERVICE-LOGSERVER-RUNNING" sym = "Missing attribute: Logging software is not running." context = "RAW" found = ["Expected state was not found"] host_data = self.GenHostData() # Try it without rsyslog. results = self.RunChecks(host_data) self.assertCheckDetectedAnom(chk_id, results, sym, found) # Now rsyslog is running. logs = rdf_client.Process(name="rsyslogd", pid=1236) host_data["ListProcessesGrr"][context].append(logs) results = self.RunChecks(host_data) self.assertCheckUndetected(chk_id, results) # Check with some problematic real-world data. host_data = self.GenHostData() # Reset the host_data. # Added a non-logger process. We expect to raise an anom. proc1 = rdf_client.Process(name="python", pid=10554, ppid=1, exe="/usr/bin/python", cmdline=[ "/usr/bin/python", "-E", "/usr/sbin/foo_agent", "/etc/foo/conf.d/rsyslogd.conf", "/etc/foo/foobar.conf" ]) host_data["ListProcessesGrr"][context].append(proc1) results = self.RunChecks(host_data) self.assertCheckDetectedAnom(chk_id, results, sym, found) # Now added a logging service proc. We expect no anom. this time. proc2 = rdf_client.Process(name="rsyslogd", pid=10200, ppid=1, exe="/sbin/rsyslogd", cmdline=[ "/sbin/rsyslogd", "-i", "/var/run/rsyslogd.pid", "-m", "0" ]) host_data["ListProcessesGrr"][context].append(proc2) results = self.RunChecks(host_data) self.assertCheckUndetected(chk_id, results) # Add yet another non-logger process. We should still raise no anom. proc3 = rdf_client.Process( name="foobar", pid=31337, ppid=1, exe="/usr/local/bin/foobar", cmdline=["/usr/local/bin/foobar", "--test", "args"]) host_data["ListProcessesGrr"][context].append(proc3) results = self.RunChecks(host_data) self.assertCheckUndetected(chk_id, results)
def testDoesNotShowGenerateArchiveButtonForNonExportableRDFValues(self): values = [rdf_client.Process(pid=1), rdf_client.Process(pid=42423)] self.CreateGenericHuntWithCollection(values=values) self.Open("/") self.Click("css=a[grrtarget=hunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") self.WaitUntil(self.IsTextPresent, "42423") self.WaitUntilNot(self.IsTextPresent, "Files referenced in this collection can be downloaded")
def testProcessListingWithFilter(self): """Test that the ListProcesses flow works with filter.""" client_id = self.SetupClient(0) client_mock = action_mocks.ListProcessesMock([ rdf_client.Process( pid=2, ppid=1, cmdline=["cmd.exe"], exe="c:\\windows\\cmd.exe", ctime=long(1333718907.167083 * 1e6)), rdf_client.Process( pid=3, ppid=1, cmdline=["cmd2.exe"], exe="c:\\windows\\cmd2.exe", ctime=long(1333718907.167083 * 1e6)), rdf_client.Process( pid=4, ppid=1, cmdline=["missing_exe.exe"], ctime=long(1333718907.167083 * 1e6)), rdf_client.Process( pid=5, ppid=1, cmdline=["missing2_exe.exe"], ctime=long(1333718907.167083 * 1e6)) ]) flow_urn = flow.GRRFlow.StartFlow( client_id=client_id, flow_name=flow_processes.ListProcesses.__name__, filename_regex=r".*cmd2.exe", token=self.token) session_id = flow_test_lib.TestFlowHelper( flow_urn, client_mock, client_id=client_id, token=self.token) # Expect one result that matches regex processes = flow.GRRFlow.ResultCollectionForFID(session_id) self.assertEqual(len(processes), 1) self.assertEqual(processes[0].ctime, 1333718907167083L) self.assertEqual(processes[0].cmdline, ["cmd2.exe"]) # Expect two skipped results logs = flow.GRRFlow.LogCollectionForFID(flow_urn) for log in logs: if "Skipped 2" in log.log_message: return raise RuntimeError("Skipped process not mentioned in logs")
def testDoesNotShowPerFileDownloadButtonForNonExportableRDFValues(self): values = [rdf_client.Process(pid=1), rdf_client.Process(pid=42423)] self.CreateGenericHuntWithCollection(values=values) self.Open("/") self.Click("css=a[grrtarget=hunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") self.WaitUntil(self.IsTextPresent, "42423") self.WaitUntilNot( self.IsElementPresent, "css=grr-results-collection button:has(span.glyphicon-download)")
def testProcessListingWithFilter(self): """Test that the ListProcesses flow works with filter.""" client_mock = ListProcessesMock([ rdf_client.Process(pid=2, ppid=1, cmdline=["cmd.exe"], exe="c:\\windows\\cmd.exe", ctime=long(1333718907.167083 * 1e6)), rdf_client.Process(pid=3, ppid=1, cmdline=["cmd2.exe"], exe="c:\\windows\\cmd2.exe", ctime=long(1333718907.167083 * 1e6)), rdf_client.Process(pid=4, ppid=1, cmdline=["missing_exe.exe"], ctime=long(1333718907.167083 * 1e6)), rdf_client.Process(pid=5, ppid=1, cmdline=["missing2_exe.exe"], ctime=long(1333718907.167083 * 1e6)) ]) flow_urn = flow.GRRFlow.StartFlow(client_id=self.client_id, flow_name="ListProcesses", output="Processes", filename_regex=r".*cmd2.exe", token=self.token) for _ in test_lib.TestFlowHelper(flow_urn, client_mock, client_id=self.client_id, token=self.token): pass # Expect one result that matches regex processes = aff4.FACTORY.Open(self.client_id.Add("Processes"), token=self.token) self.assertEqual(len(processes), 1) self.assertEqual(processes[0].ctime, 1333718907167083L) self.assertEqual(processes[0].cmdline, ["cmd2.exe"]) # Expect two skipped results logs = aff4.FACTORY.Open(flow_urn.Add("Logs"), token=self.token) for log in logs: if "Skipped 2" in log.log_message: return raise RuntimeError("Skipped process not mentioned in logs")
def GenHostData(self): # Create some host_data.. host_data = self.SetKnowledgeBase() loop4 = self.AddListener("127.0.0.1", 6000) loop6 = self.AddListener("::1", 6000, "INET6") ext4 = self.AddListener("10.1.1.1", 6000) ext6 = self.AddListener("fc00::1", 6000, "INET6") x11 = rdf_client.Process(name="x11", pid=1233, connections=[loop4, loop6]) xorg = rdf_client.Process(name="xorg", pid=1234, connections=[loop4, loop6, ext4, ext6]) sshd = rdf_client.Process(name="sshd", pid=1235, connections=[loop4, loop6, ext4, ext6]) host_data["ListProcessesGrr"] = self.SetArtifactData( parsed=[x11, xorg, sshd]) return host_data
def testDoesNotShowPerFileDownloadButtonForNonExportableRDFValues(self): values = [rdf_client.Process(pid=1), rdf_client.Process(pid=42423)] with self.ACLChecksDisabled(): self.CreateGenericHuntWithCollection(values=values) self.Open("/") self.Click("css=a[grrtarget=ManageHunts]") self.Click("css=td:contains('GenericHunt')") self.Click("css=li[heading=Results]") self.WaitUntil(self.IsTextPresent, "42423") self.WaitUntilNot( self.IsElementPresent, "css=grr-results-collection grr-downloadable-urn button")
def testProcessListingOnly(self): """Test that the ListProcesses flow works.""" client_id = test_lib.TEST_CLIENT_ID client_mock = action_mocks.ListProcessesMock([ rdf_client.Process(pid=2, ppid=1, cmdline=["cmd.exe"], exe="c:\\windows\\cmd.exe", ctime=long(1333718907.167083 * 1e6)) ]) flow_urn = flow.GRRFlow.StartFlow( client_id=client_id, flow_name=flow_processes.ListProcesses.__name__, token=self.token) for s in flow_test_lib.TestFlowHelper(flow_urn, client_mock, client_id=client_id, token=self.token): session_id = s # Check the output collection processes = flow.GRRFlow.ResultCollectionForFID(session_id) self.assertEqual(len(processes), 1) self.assertEqual(processes[0].ctime, 1333718907167083L) self.assertEqual(processes[0].cmdline, ["cmd.exe"])
def testExportedFilenamesAndManifestForValuesOfMultipleTypes(self): zip_fd, prefix = self.ProcessValuesToZip({ rdf_client.StatEntry: [ rdf_client.StatEntry(pathspec=rdf_paths.PathSpec( path="/foo/bar", pathtype="OS")) ], rdf_client.Process: [rdf_client.Process(pid=42)] }) self.assertEqual( set(zip_fd.namelist()), { "%s/MANIFEST" % prefix, "%s/ExportedFile_from_StatEntry.sql" % prefix, "%s/ExportedProcess_from_Process.sql" % prefix }) parsed_manifest = yaml.load(zip_fd.read("%s/MANIFEST" % prefix)) self.assertEqual( parsed_manifest, { "export_stats": { "StatEntry": { "ExportedFile": 1 }, "Process": { "ExportedProcess": 1 } } })
def testFetchesAndStoresBinary(self): process = rdf_client.Process(pid=2, ppid=1, cmdline=["test_img.dd"], exe=os.path.join(self.base_path, "test_img.dd"), ctime=long(1333718907.167083 * 1e6)) client_mock = ListProcessesMock([process]) output_path = "analysis/GetBinariesFlowTest1" for _ in test_lib.TestFlowHelper("ListProcesses", client_mock, client_id=self.client_id, fetch_binaries=True, token=self.token, output=output_path): pass fd = aff4.FACTORY.Open(self.client_id.Add(output_path), token=self.token) binaries = list(fd) self.assertEqual(len(binaries), 1) self.assertEqual(binaries[0].pathspec.path, process.exe) self.assertEqual(binaries[0].st_size, os.stat(process.exe).st_size)
def testProcessListingOnly(self): """Test that the ListProcesses flow works.""" client_mock = ListProcessesMock([ rdf_client.Process(pid=2, ppid=1, cmdline=["cmd.exe"], exe="c:\\windows\\cmd.exe", ctime=long(1333718907.167083 * 1e6)) ]) flow_urn = flow.GRRFlow.StartFlow(client_id=self.client_id, flow_name="ListProcesses", output="Processes", token=self.token) for _ in test_lib.TestFlowHelper(flow_urn, client_mock, client_id=self.client_id, token=self.token): pass # Check the output collection processes = aff4.FACTORY.Open(self.client_id.Add("Processes"), token=self.token) self.assertEqual(len(processes), 1) self.assertEqual(processes[0].ctime, 1333718907167083L) self.assertEqual(processes[0].cmdline, ["cmd.exe"])
def testListResultsForListProcessesFlow(self): process = rdf_client.Process( pid=2, ppid=1, cmdline=["cmd.exe"], exe="c:\\windows\\cmd.exe", ctime=long(1333718907.167083 * 1e6), RSS_size=42) client_urn = self.SetupClients(1)[0] client_mock = processes_test.ListProcessesMock([process]) flow_urn = flow.GRRFlow.StartFlow( client_id=client_urn, flow_name=processes.ListProcesses.__name__, token=self.token) for _ in flow_test_lib.TestFlowHelper( flow_urn, client_mock, client_id=client_urn, token=self.token): pass result_flow = self.api.Client( client_id=client_urn.Basename()).Flow(flow_urn.Basename()) results = list(result_flow.ListResults()) self.assertEqual(len(results), 1) self.assertEqual(process.AsPrimitiveProto(), results[0].payload)
def testCSVPluginWithValuesOfMultipleTypes(self): zip_fd, prefix = self.ProcessValuesToZip({ rdf_client.StatEntry: [ rdf_client.StatEntry( aff4path=self.client_id.Add("/fs/os/foo/bar"), pathspec=rdf_paths.PathSpec(path="/foo/bar")) ], rdf_client.Process: [rdf_client.Process(pid=42)] }) self.assertEqual( set(zip_fd.namelist()), set([ "%s/MANIFEST" % prefix, "%s/ExportedFile/from_StatEntry.csv" % prefix, "%s/ExportedProcess/from_Process.csv" % prefix ])) parsed_manifest = yaml.load(zip_fd.read("%s/MANIFEST" % prefix)) self.assertEqual( parsed_manifest, { "export_stats": { "StatEntry": { "ExportedFile": 1 }, "Process": { "ExportedProcess": 1 } } }) parsed_output = list( csv.DictReader( zip_fd.open("%s/ExportedFile/from_StatEntry.csv" % prefix))) self.assertEqual(len(parsed_output), 1) # Only the client_urn is filled in by the plugin. Doing lookups for # all the clients metadata is possible but expensive. It doesn't seem to # be worth it. self.assertEqual(parsed_output[0]["metadata.client_urn"], self.client_id) self.assertEqual(parsed_output[0]["metadata.hostname"], "") self.assertEqual(parsed_output[0]["metadata.mac_address"], "") self.assertEqual(parsed_output[0]["metadata.source_urn"], self.results_urn) self.assertEqual(parsed_output[0]["urn"], self.client_id.Add("/fs/os/foo/bar")) parsed_output = list( csv.DictReader( zip_fd.open("%s/ExportedProcess/from_Process.csv" % prefix))) self.assertEqual(len(parsed_output), 1) self.assertEqual(parsed_output[0]["metadata.client_urn"], self.client_id) self.assertEqual(parsed_output[0]["metadata.hostname"], "") self.assertEqual(parsed_output[0]["metadata.mac_address"], "") self.assertEqual(parsed_output[0]["metadata.source_urn"], self.results_urn) self.assertEqual(parsed_output[0]["pid"], "42")
def testCSVPluginWithValuesOfMultipleTypes(self): zip_fd, prefix = self.ProcessValuesToZip({ rdf_client.StatEntry: [ rdf_client.StatEntry(pathspec=rdf_paths.PathSpec( path="/foo/bar", pathtype="OS")) ], rdf_client.Process: [rdf_client.Process(pid=42)] }) self.assertEqual( set(zip_fd.namelist()), set([ "%s/MANIFEST" % prefix, "%s/ExportedFile/from_StatEntry.csv" % prefix, "%s/ExportedProcess/from_Process.csv" % prefix ])) parsed_manifest = yaml.load(zip_fd.read("%s/MANIFEST" % prefix)) self.assertEqual( parsed_manifest, { "export_stats": { "StatEntry": { "ExportedFile": 1 }, "Process": { "ExportedProcess": 1 } } }) parsed_output = list( csv.DictReader( zip_fd.open("%s/ExportedFile/from_StatEntry.csv" % prefix))) self.assertEqual(len(parsed_output), 1) # Make sure metadata is filled in. self.assertEqual(parsed_output[0]["metadata.client_urn"], self.client_id) self.assertEqual(parsed_output[0]["metadata.hostname"], "Host-0") self.assertEqual(parsed_output[0]["metadata.mac_address"], "aabbccddee00\nbbccddeeff00") self.assertEqual(parsed_output[0]["metadata.source_urn"], self.results_urn) self.assertEqual(parsed_output[0]["urn"], self.client_id.Add("/fs/os/foo/bar")) parsed_output = list( csv.DictReader( zip_fd.open("%s/ExportedProcess/from_Process.csv" % prefix))) self.assertEqual(len(parsed_output), 1) self.assertEqual(parsed_output[0]["metadata.client_urn"], self.client_id) self.assertEqual(parsed_output[0]["metadata.hostname"], "Host-0") self.assertEqual(parsed_output[0]["metadata.mac_address"], "aabbccddee00\nbbccddeeff00") self.assertEqual(parsed_output[0]["metadata.source_urn"], self.results_urn) self.assertEqual(parsed_output[0]["pid"], "42")
def GenProcessData(self, processes): """Create some process-based host data.""" host_data = self.SetKnowledgeBase() data = [] for (name, pid, cmdline) in processes: data.append(rdf_client.Process(name=name, pid=pid, cmdline=cmdline)) host_data["ListProcessesGrr"] = self.SetArtifactData(parsed=data) return host_data
def GenProcessData(self, processes, **kwargs): """Create some process-based host data.""" host_data = self.SetKnowledgeBase(**kwargs) data = [] for (name, pid, cmdline) in processes: data.append(rdf_client.Process(name=name, pid=pid, cmdline=cmdline)) # ListProcessesGrr is a flow artifact, thus it needs stored as raw. host_data["ListProcessesGrr"] = self.SetArtifactData(raw=data) return host_data
def ParseProcess(self, item): cybox = item.get("_EPROCESS", {}).get("Cybox", {}) result = rdf_client.Process( exe=cybox.get("Name"), pid=cybox.get("PID"), ppid=cybox.get("Parent_PID"), num_threads=item.get("thread_count"), ctime=item.get("process_create_time", {}).get("epoch")) return result
def testProcessListingFilterConnectionState(self): p1 = rdf_client.Process(pid=2, ppid=1, cmdline=["cmd.exe"], exe="c:\\windows\\cmd.exe", ctime=long(1333718907.167083 * 1e6), connections=rdf_client.NetworkConnection( family="INET", state="CLOSED")) p2 = rdf_client.Process(pid=3, ppid=1, cmdline=["cmd2.exe"], exe="c:\\windows\\cmd2.exe", ctime=long(1333718907.167083 * 1e6), connections=rdf_client.NetworkConnection( family="INET", state="LISTEN")) p3 = rdf_client.Process(pid=4, ppid=1, cmdline=["missing_exe.exe"], ctime=long(1333718907.167083 * 1e6), connections=rdf_client.NetworkConnection( family="INET", state="ESTABLISHED")) client_mock = ListProcessesMock([p1, p2, p3]) flow_urn = flow.GRRFlow.StartFlow( client_id=self.client_id, flow_name="ListProcesses", connection_states=["ESTABLISHED", "LISTEN"], token=self.token) for s in test_lib.TestFlowHelper(flow_urn, client_mock, client_id=self.client_id, token=self.token): session_id = s processes = aff4.FACTORY.Open(session_id.Add( flow_runner.RESULTS_SUFFIX), token=self.token) self.assertEqual(len(processes), 2) states = set() for process in processes: states.add(str(process.connections[0].state)) self.assertItemsEqual(states, ["ESTABLISHED", "LISTEN"])
def testProcessListingFilterConnectionState(self): client_id = test_lib.TEST_CLIENT_ID p1 = rdf_client.Process(pid=2, ppid=1, cmdline=["cmd.exe"], exe="c:\\windows\\cmd.exe", ctime=long(1333718907.167083 * 1e6), connections=rdf_client.NetworkConnection( family="INET", state="CLOSED")) p2 = rdf_client.Process(pid=3, ppid=1, cmdline=["cmd2.exe"], exe="c:\\windows\\cmd2.exe", ctime=long(1333718907.167083 * 1e6), connections=rdf_client.NetworkConnection( family="INET", state="LISTEN")) p3 = rdf_client.Process(pid=4, ppid=1, cmdline=["missing_exe.exe"], ctime=long(1333718907.167083 * 1e6), connections=rdf_client.NetworkConnection( family="INET", state="ESTABLISHED")) client_mock = action_mocks.ListProcessesMock([p1, p2, p3]) flow_urn = flow.GRRFlow.StartFlow( client_id=client_id, flow_name=flow_processes.ListProcesses.__name__, connection_states=["ESTABLISHED", "LISTEN"], token=self.token) for s in flow_test_lib.TestFlowHelper(flow_urn, client_mock, client_id=client_id, token=self.token): session_id = s processes = flow.GRRFlow.ResultCollectionForFID(session_id) self.assertEqual(len(processes), 2) states = set() for process in processes: states.add(str(process.connections[0].state)) self.assertItemsEqual(states, ["ESTABLISHED", "LISTEN"])
def testEmailPluginSendsEmailPerEveyBatchOfResponses(self): self.ProcessResponses(plugin_args=email_plugin.EmailOutputPluginArgs( email_address=self.email_address), responses=[rdf_client.Process(pid=42)]) self.assertEqual(len(self.email_messages), 1) msg = self.email_messages[0] self.assertEqual(msg["address"], self.email_address) self.assertTrue("got a new result in %s" % self.results_urn in msg["title"]) self.assertTrue(utils.SmartStr(self.client_id) in msg["message"]) self.assertTrue(utils.SmartStr(self.hostname) in msg["message"])
def testFindNoRunningLogserver(self): chk_id = "CIS-SERVICE-LOGSERVER-RUNNING" exp = "Missing attribute: Logging software is not running." found = ["Expected state was not found"] host_data = self.GenHostData() # Try it without rsyslog. results = self.RunChecks(host_data) self.assertCheckDetectedAnom(chk_id, results, exp, found) # Now rsyslog is running. logs = rdf_client.Process(name="rsyslogd", pid=1236) host_data["ListProcessesGrr"]["PARSER"].append(logs) results = self.RunChecks(host_data) self.assertCheckUndetected(chk_id, results)
def testProcessListingOnlyFleetspeak(self): """Test that the ListProcesses flow works with Fleetspeak.""" client_mock = action_mocks.ListProcessesMock([ rdf_client.Process(pid=2, ppid=1, cmdline=["cmd.exe"], exe=r"c:\windows\cmd.exe", ctime=1333718907167083L) ]) client_mock.mock_task_queue = [] def SendCallback(fs_msg): pb_msg = jobs_pb2.GrrMessage() fs_msg.data.Unpack(pb_msg) msg = rdf_flows.GrrMessage.FromSerializedString( pb_msg.SerializeToString()) client_mock.mock_task_queue.append(msg) service_name = "GRR" fake_service_client = _FakeGRPCServiceClient( service_name, send_callback=SendCallback) fleetspeak_connector.Reset() fleetspeak_connector.Init(service_client=fake_service_client) with mock.patch.object( fake_service_client.outgoing, "InsertMessage", wraps=fake_service_client.outgoing.InsertMessage): flow_urn = flow.GRRFlow.StartFlow( client_id=self.client_id, flow_name=flow_processes.ListProcesses.__name__, token=self.token) for s in flow_test_lib.TestFlowHelper(flow_urn, client_mock, client_id=self.client_id, token=self.token): session_id = s fleetspeak_connector.CONN.outgoing.InsertMessage.assert_called() # Check the output collection processes = flow.GRRFlow.ResultCollectionForFID(session_id) self.assertEqual(len(processes), 1) process, = processes self.assertEqual(process.ctime, 1333718907167083L) self.assertEqual(process.cmdline, ["cmd.exe"])