def test_clear(self): process = Process(tme=1, pid=2) process2 = Process(tme=2, pid=2) process3 = Process(tme=0, pid=2) process4 = Process(tme=0, pid=3) self.object_cache.add_data(data=process) self.object_cache.add_data(data=process2) self.object_cache.add_data(data=process3) self.object_cache.add_data(data=process4) self.assertEqual( len(self.object_cache.object_cache), 2, "object cache should contain two different categories") self.assertEqual(len(self.object_cache.faulty_nodes), 0, "object cache should not have faulty nodes") self.assertEqual(len(self.object_cache.unfound), 0, "object cache should not have unfound nodes") self.object_cache.unfound.add(process) self.object_cache.clear() self.assertEqual(len(self.object_cache.object_cache), 0, "object cache should be empty") self.assertEqual(len(self.object_cache.faulty_nodes), 0, "faulty nodes should be empty") self.assertEqual(len(self.object_cache.unfound), 0, "unfound should be empty")
def _add_piece(self, process_dict=None): self._changed = True _process_cache = self._process_cache if int(process_dict.get("gpid", 0)) > 0: if "exit" in process_dict.get("state", None): try: matching_process = _process_cache.get_data( value=int(process_dict.get( "tme", 0)), # we are in event, so tme means exit_tme key=int(process_dict.get("pid", 0))) except DataNotInCacheException: _process_cache.add_data(data=Process.from_process_event( **process_dict)) else: if matching_process is not None: try: matching_process.addProcessEvent(**process_dict) except ProcessMismatchException as exception: logging.getLogger( self.__class__.__name__).warning(exception) _process_cache.add_data( data=Process.from_process_event( **process_dict)) else: _process_cache.remove_data( data=matching_process, key=matching_process.pid) is_finished, job = self._finish_process( process=matching_process) if not is_finished and job is None: _process_cache.unfound.add(matching_process) elif is_finished: self._data.remove_data(data=job, key=job.gpid) return job else: process = Process.from_process_event(**process_dict) logging.getLogger(self.__class__.__name__).warning( "received exit event of process before actual start event: %s" % process) _process_cache.add_data(data=process) else: process = Process.from_process_event(**process_dict) if self.job_root_name in process.name: # create new dummy job self._data.add_data(data=Job( workernode=self.workernode, run=self.run, tme=process.tme, gpid=process.gpid, job_id=process.batchsystemId, configuration=self.configuration, data_source=self.data_source), key=process.gpid, value=process.tme) _process_cache.add_data(data=process)
def test_exit_code(self): process = Process(exit_code=1) self.assertEqual(process.exit_code, 0) # TODO: why is here an output at all? self.assertEqual(process.error_code, 0) self.assertEqual(process.signal, 1) process = Process(exit_code="256") self.assertEqual(process.error_code, 1) self.assertEqual(process.signal, 0) process = Process(exit_code=257) self.assertEqual(process.error_code, 1) self.assertEqual(process.signal, 1)
def test_last_tme(self): self.assertIsNone(self.empty_job.last_tme) self.empty_job.add_process(process=Process(tme=1, exit_tme=3, name="sge_shepherd", cmd="sge_shepherd", pid="3", ppid="0", gpid=3)) self.assertEqual(self.empty_job.last_tme, 3) self.empty_job.add_process(process=Process( name="test", pid="1", ppid="0", gpid="3", tme=2, exit_tme=4)) self.assertEqual(self.empty_job.last_tme, 4)
def test_updateIndex(self): process = Process(tme=1, pid=2, name="old") process2 = Process(tme=1, pid=2, name="new") self.object_cache.add_data(data=process) index = self.object_cache.data_index(value=process.tme, key=process.pid) self.object_cache.object_cache[process.pid][index] = process2 newProcess = self.object_cache.get_data(value=process.tme, key=process.pid) self.assertEqual(process2.name, newProcess.name)
def setUp(self): self.process = Process() self.header = { "tme": 0, "pid": 1, "ppid": 2, "uid": 3, "name": 4, "cmd": 5, "exit_code": 6, "state": 7, "gpid": 8 }
def test_process_from_row(self): row_string = "1405011331,1405065581,30726,7733,30726,0,(sge_shepherd)," \ "sge_shepherd-4165419,1,1,1,,,,,0,,,exit" row_header = "tme,exit_tme,pid,ppid,gpid,uid,name,cmd,error_code,signal,valid," \ "int_in_volume,int_out_volume,ext_in_volume,ext_out_volume,tree_depth," \ "process_type,color,state" process = Process.from_dict( dict(zip(row_header.split(","), row_string.split(",")))) self.assertIsNotNone(process) self.assertEqual(process.getDuration(), 54250) self.assertEqual(process.getHeader(), row_header) self.assertEqual(process.getRow(), row_string) self.assertEqual(process.error_code, 1) self.assertEqual(process.signal, 1) self.assertEqual(process.exit_code, 257) row_string = "1405011331,,30726,7733,30726,0,(sge_shepherd)," \ "sge_shepherd-4165419,0,0,1,,,,,0,,,exit" process = Process.from_dict( dict(zip(row_header.split(","), row_string.split(",")))) self.assertIsNotNone(process) self.assertEqual(process.getDuration(), 0) self.assertEqual(process.getHeader(), row_header) self.assertEqual( process.getRow(), '1405011331,0,30726,7733,30726,0,(sge_shepherd),sge_shepherd-4165419,0,0,1,,,,,0,,,exit' ) row_string = "1405011331,1405065581,30726,7733,30726,0,(sge_shepherd)," \ "sge_shepherd-4165419,,,1,,,,,0,,,exit" process = Process.from_dict( dict(zip(row_header.split(","), row_string.split(",")))) self.assertEqual(process.signal, 0) self.assertEqual(process.error_code, 0) row_string = "1405011331,1405065581,30726,7733,30726,0,(sge_shepherd)," \ "sge_shepherd-4165419,sth,,1,,,,,0,,,exit" self.assertRaises( ValueError, Process.from_dict, dict(zip(row_header.split(","), row_string.split(",")))) row_string = "1405011331,1405065581,30726,7733,30726,0,(sge_shepherd)," \ "sge_shepherd-4165419,,,1,,,,,0,,,exit" row_header = "tme,exit_tme,pid,ppid,gpid,uid,name,cmd,error_code,signal,valid," \ "int_in_volume,int_out_volume,ext_in_volume,ext_out_volume,tree_depth," \ "process_type,color,states" self.assertRaises( ArgumentNotDefinedException, Process.from_dict, dict(zip(row_header.split(","), row_string.split(","))))
def test_repr(self): row_string = "1405011331,,30726,7733,30726,0,(sge_shepherd)," \ "sge_shepherd-4165419,0,0,1,,,,,0,,,exit" row_header = "tme,exit_tme,pid,ppid,gpid,uid,name,cmd,error_code,signal,valid," \ "int_in_volume,int_out_volume,ext_in_volume,ext_out_volume,tree_depth," \ "process_type,color,state" process = Process( **dict(zip(row_header.split(","), row_string.split(",")))) self.assertEqual( "Process: name ((sge_shepherd)), cmd (sge_shepherd-4165419), pid (30726), " "ppid (7733), uid (0), gpid (30726), valid (1), tme (1405011331), " "exit_tme (0), state (exit), error_code (0), signal (0), job_id (None), " "tree_depth (0), process_type (), color (), int_in_volume (), " "int_out_volume (), ext_in_volume (), ext_out_volume ()", process.__repr__())
def test_complete_job(self): process = Process(name="sge_shepherd", cmd="sge_shepherd", pid="1", ppid="0", gpid="1") process2 = Process(name="test", pid="2", ppid="1", gpid="1") self.empty_job.add_process(process) self.assertEqual(True, self.empty_job.is_complete()) job2 = Job() job2.add_process(process2) job2.add_process(process) self.assertEqual(True, job2.is_complete())
def test_removeObject(self): process = Process(tme=1, pid=2) process2 = Process(tme=2, pid=2) process3 = Process(tme=0, pid=2) process4 = Process(tme=0, pid=3) self.object_cache.add_data(data=process) self.assertEqual(len(self.object_cache.object_cache), 1, "object cache should not be empty") self.object_cache.remove_data(data=process) self.assertEqual(len(self.object_cache.object_cache), 0, "object cache should be empty") self.object_cache.add_data(data=process2)
def test_batchsystem_id(self): row_string = "1405011331,,30726,7733,30726,0,(sge_shepherd)," \ "sge_shepherd-4165419,0,0,1,,,,,0,,,exit" row_header = "tme,exit_tme,pid,ppid,gpid,uid,name,cmd,error_code,signal,valid," \ "int_in_volume,int_out_volume,ext_in_volume,ext_out_volume,tree_depth," \ "process_type,color,state" process = Process( **dict(zip(row_header.split(","), row_string.split(",")))) self.assertEqual("sge_shepherd-4165419", process.cmd) self.assertEqual(4165419, process.batchsystemId) row_string = "1405011331,,30726,7733,30726,0,(sge_shepherd)," \ ",0,0,1,,,,,0,,,exit" process = Process( **dict(zip(row_header.split(","), row_string.split(",")))) self.assertIsNone(process.batchsystemId)
def test_incomplete_job(self): process_root = Process(name="sge_shepherd", cmd="sge_shepherd", pid="3", ppid="0", gpid=3) process = Process(name="test", pid="1", ppid="0", gpid="3") process2 = Process(name="test2", pid="2", ppid="1", gpid="3") self.empty_job.add_process(process=process) self.empty_job.add_process(process=process2) self.assertEqual(False, self.empty_job.is_complete()) job2 = Job() job2.add_process(process=process2) job2.add_process(process=process_root) self.assertEqual(False, job2.is_complete())
def test_update(self): process = Process(tme=1, pid=2) self.object_cache.add_data(data=process) theProcess = self.object_cache.get_data(value=process.tme, key=process.pid) theProcess.name = "test" newProcess = self.object_cache.get_data(value=process.tme, key=process.pid) self.assertEqual("test", newProcess.name, "name is not identical")
def test_insertRemove(self): process = Process(tme=1, pid=2) process2 = Process(tme=2, pid=2) process3 = Process(tme=0, pid=2) process4 = Process(tme=0, pid=3) self.assertEqual(len(self.object_cache.object_cache), 0, "object cache not empty") self.object_cache.add_data(data=process) self.assertEqual(len(self.object_cache.object_cache), 1, "object cache should contain one process") loadedProcess = self.object_cache.get_data(value=process.tme, key=process.pid) self.assertIsNotNone(loadedProcess, "No object loaded from cache") self.assertEqual(process, loadedProcess, "objects should be identical") self.object_cache.remove_data(data=process) self.assertEqual(len(self.object_cache.object_cache), 0, "object cache not empty") self.object_cache.add_data(data=process) self.object_cache.add_data(data=process2) self.object_cache.add_data(data=process3) self.object_cache.add_data(data=process4) self.assertEqual( len(self.object_cache.object_cache), 2, "object cache should contain two different categories") loadedProcess = self.object_cache.get_data(value=process2.tme, key=process2.pid) self.assertEqual(process2, loadedProcess, "objects should be identical") loadedProcess = self.object_cache.get_data(value=process3.tme, key=process3.pid) self.assertEqual(process3, loadedProcess, "objects should be identical") loadedProcess = self.object_cache.get_data(value=process.tme, key=process.pid) self.assertEqual(process, loadedProcess, "objects should be identical") loadedProcess = self.object_cache.get_data(value=process4.tme, key=process4.pid) self.assertEqual(process4, loadedProcess, "objects should be identical")
def test_cache_deletion(self): job = Job() process_root = Process(name="sge_shepherd", cmd="sge_shepherd", pid="3", ppid="0", gpid=3) job.add_process(process=process_root) self.assertEqual(1, job.process_count()) job.clear_caches() self.assertEqual(0, job.process_count())
def test_twoTrees(self): tree1_process1 = Process(tme=1, pid=1, ppid=0, name="sge_shepherd", cmd="sge_shepherd", valid=1) tree1_process2 = Process(tme=1, pid=2, ppid=1, valid=1) tree1_process3 = Process(tme=1, pid=3, ppid=1, valid=1) tree1_process4 = Process(tme=1, pid=4, ppid=2, valid=1) tree2_process1 = Process(tme=1, pid=10, ppid=0, valid=1) tree2_process2 = Process(tme=1, pid=11, ppid=10, valid=1) tree2_process3 = Process(tme=1, pid=12, ppid=10, valid=1) self.jobParser.add_piece(piece=tree1_process1) self.jobParser.add_piece(piece=tree1_process2) self.jobParser.add_piece(piece=tree1_process3) self.jobParser.add_piece(piece=tree1_process4) self.jobParser.add_piece(piece=tree2_process1) self.jobParser.add_piece(piece=tree2_process2) self.jobParser.add_piece(piece=tree2_process3) self.assertIsNone(self.jobParser.data.tree, "JobParser should not return tree")
def test_event_conversion(self): row_string = "1405011331,1405065581,30726,7733,30726,0,(sge_shepherd)," \ "sge_shepherd-4165419,0,0,1,,,,,0,,,exit" row_header = "tme,exit_tme,pid,ppid,gpid,uid,name,cmd,error_code,signal,valid," \ "int_in_volume,int_out_volume,ext_in_volume,ext_out_volume,tree_depth," \ "process_type,color,state" process = Process( **dict(zip(row_header.split(","), row_string.split(",")))) self.assertEqual( process.toProcessEvent(), { "tme": 1405065581, "name": "(sge_shepherd)", "cmd": "sge_shepherd-4165419", "pid": 30726, "ppid": 7733, "uid": 0, "gpid": 30726, "state": "exit", "exit_code": 0 }) row_string = "1405011331,,30726,7733,30726,0,(sge_shepherd)," \ "sge_shepherd-4165419,0,0,1,,,,,0,,,." process = Process( **dict(zip(row_header.split(","), row_string.split(",")))) self.assertEqual( process.toProcessEvent(), { "tme": 1405011331, "name": "(sge_shepherd)", "cmd": "sge_shepherd-4165419", "pid": 30726, "ppid": 7733, "uid": 0, "gpid": 30726, "state": ".", })
def _piece_from_dict(self, data_dict=None): return Process(**data_dict)
class TestProcessFunctions(unittest.TestCase): def setUp(self): self.process = Process() self.header = { "tme": 0, "pid": 1, "ppid": 2, "uid": 3, "name": 4, "cmd": 5, "exit_code": 6, "state": 7, "gpid": 8 } def test_setUp(self): self.assertEqual(False, self.process.valid) self.assertEqual(self.process.pid, 0) self.assertEqual(self.process.ppid, 0) self.assertEqual(self.process.gpid, 0) self.assertEqual(self.process.uid, 0) self.assertEqual(self.process.tme, 0) self.assertEqual(self.process.traffic, []) def test_exit_code(self): process = Process(exit_code=1) self.assertEqual(process.exit_code, 0) # TODO: why is here an output at all? self.assertEqual(process.error_code, 0) self.assertEqual(process.signal, 1) process = Process(exit_code="256") self.assertEqual(process.error_code, 1) self.assertEqual(process.signal, 0) process = Process(exit_code=257) self.assertEqual(process.error_code, 1) self.assertEqual(process.signal, 1) def test_process_events(self): row_start = [ "1406555483", "9939", "9881", "0", "(sge_shepherd)", "sge_shepherd-5073566", "0", ".", "9939" ] row_valid_start = [ "1406555483", "9939", "9881", "0", "(sge_shepherd)", "sge_shepherd-5073566", "0", "fork", "9939" ] row_end = [ "1406556210", "9940", "9881", "0", "(sge_shephrd)", "sge_shepherd-5073567", "0", "exit", "9939" ] row_end2 = [ "1406556210", "9940", "9881", "0", "(sge_shephrd)", "sge_shepherd-5073566", "0", "exit", "9939" ] row_valid_end = [ "1406556210", "9939", "9881", "0", "(sge_shepherd)", "sge_shepherd-5073566", "0", "exit", "9939" ] data_start_dict = {} data_valid_start_dict = {} data_end_dict = {} data_end_dict2 = {} data_valid_end_dict = {} for key in self.header: data_start_dict[key] = row_start[self.header[key]] data_valid_start_dict[key] = row_valid_start[self.header[key]] data_end_dict[key] = row_end[self.header[key]] data_end_dict2[key] = row_end2[self.header[key]] data_valid_end_dict[key] = row_valid_end[self.header[key]] self.process.addProcessEvent(**data_start_dict) self.process.addProcessEvent(**data_valid_end_dict) self.assertEqual(False, self.process.valid) process = Process() process.addProcessEvent(**data_valid_end_dict) self.assertEqual(process.name, data_valid_end_dict["name"]) process.addProcessEvent(**data_start_dict) self.assertEqual(False, process.valid) valid_process = Process() valid_process.addProcessEvent(**data_valid_start_dict) valid_process.addProcessEvent(**data_valid_end_dict) self.assertEqual(True, valid_process.valid) valid_process2 = Process() valid_process2.addProcessEvent(**data_valid_end_dict) valid_process2.addProcessEvent(**data_valid_start_dict) self.assertEqual(True, valid_process2.valid) process = Process() process.addProcessEvent(**data_valid_start_dict) self.assertRaises(ProcessMismatchException, process.addProcessEvent, **data_end_dict) process = Process() process.addProcessEvent(**data_valid_start_dict) self.assertRaises(ProcessMismatchException, process.addProcessEvent, **data_end_dict2) def test_event_conversion(self): row_string = "1405011331,1405065581,30726,7733,30726,0,(sge_shepherd)," \ "sge_shepherd-4165419,0,0,1,,,,,0,,,exit" row_header = "tme,exit_tme,pid,ppid,gpid,uid,name,cmd,error_code,signal,valid," \ "int_in_volume,int_out_volume,ext_in_volume,ext_out_volume,tree_depth," \ "process_type,color,state" process = Process( **dict(zip(row_header.split(","), row_string.split(",")))) self.assertEqual( process.toProcessEvent(), { "tme": 1405065581, "name": "(sge_shepherd)", "cmd": "sge_shepherd-4165419", "pid": 30726, "ppid": 7733, "uid": 0, "gpid": 30726, "state": "exit", "exit_code": 0 }) row_string = "1405011331,,30726,7733,30726,0,(sge_shepherd)," \ "sge_shepherd-4165419,0,0,1,,,,,0,,,." process = Process( **dict(zip(row_header.split(","), row_string.split(",")))) self.assertEqual( process.toProcessEvent(), { "tme": 1405011331, "name": "(sge_shepherd)", "cmd": "sge_shepherd-4165419", "pid": 30726, "ppid": 7733, "uid": 0, "gpid": 30726, "state": ".", }) def test_process_from_row(self): row_string = "1405011331,1405065581,30726,7733,30726,0,(sge_shepherd)," \ "sge_shepherd-4165419,1,1,1,,,,,0,,,exit" row_header = "tme,exit_tme,pid,ppid,gpid,uid,name,cmd,error_code,signal,valid," \ "int_in_volume,int_out_volume,ext_in_volume,ext_out_volume,tree_depth," \ "process_type,color,state" process = Process.from_dict( dict(zip(row_header.split(","), row_string.split(",")))) self.assertIsNotNone(process) self.assertEqual(process.getDuration(), 54250) self.assertEqual(process.getHeader(), row_header) self.assertEqual(process.getRow(), row_string) self.assertEqual(process.error_code, 1) self.assertEqual(process.signal, 1) self.assertEqual(process.exit_code, 257) row_string = "1405011331,,30726,7733,30726,0,(sge_shepherd)," \ "sge_shepherd-4165419,0,0,1,,,,,0,,,exit" process = Process.from_dict( dict(zip(row_header.split(","), row_string.split(",")))) self.assertIsNotNone(process) self.assertEqual(process.getDuration(), 0) self.assertEqual(process.getHeader(), row_header) self.assertEqual( process.getRow(), '1405011331,0,30726,7733,30726,0,(sge_shepherd),sge_shepherd-4165419,0,0,1,,,,,0,,,exit' ) row_string = "1405011331,1405065581,30726,7733,30726,0,(sge_shepherd)," \ "sge_shepherd-4165419,,,1,,,,,0,,,exit" process = Process.from_dict( dict(zip(row_header.split(","), row_string.split(",")))) self.assertEqual(process.signal, 0) self.assertEqual(process.error_code, 0) row_string = "1405011331,1405065581,30726,7733,30726,0,(sge_shepherd)," \ "sge_shepherd-4165419,sth,,1,,,,,0,,,exit" self.assertRaises( ValueError, Process.from_dict, dict(zip(row_header.split(","), row_string.split(",")))) row_string = "1405011331,1405065581,30726,7733,30726,0,(sge_shepherd)," \ "sge_shepherd-4165419,,,1,,,,,0,,,exit" row_header = "tme,exit_tme,pid,ppid,gpid,uid,name,cmd,error_code,signal,valid," \ "int_in_volume,int_out_volume,ext_in_volume,ext_out_volume,tree_depth," \ "process_type,color,states" self.assertRaises( ArgumentNotDefinedException, Process.from_dict, dict(zip(row_header.split(","), row_string.split(",")))) def test_traffic(self): # TODO: test traffic stuff pass def test_batchsystem_id(self): row_string = "1405011331,,30726,7733,30726,0,(sge_shepherd)," \ "sge_shepherd-4165419,0,0,1,,,,,0,,,exit" row_header = "tme,exit_tme,pid,ppid,gpid,uid,name,cmd,error_code,signal,valid," \ "int_in_volume,int_out_volume,ext_in_volume,ext_out_volume,tree_depth," \ "process_type,color,state" process = Process( **dict(zip(row_header.split(","), row_string.split(",")))) self.assertEqual("sge_shepherd-4165419", process.cmd) self.assertEqual(4165419, process.batchsystemId) row_string = "1405011331,,30726,7733,30726,0,(sge_shepherd)," \ ",0,0,1,,,,,0,,,exit" process = Process( **dict(zip(row_header.split(","), row_string.split(",")))) self.assertIsNone(process.batchsystemId) def test_repr(self): row_string = "1405011331,,30726,7733,30726,0,(sge_shepherd)," \ "sge_shepherd-4165419,0,0,1,,,,,0,,,exit" row_header = "tme,exit_tme,pid,ppid,gpid,uid,name,cmd,error_code,signal,valid," \ "int_in_volume,int_out_volume,ext_in_volume,ext_out_volume,tree_depth," \ "process_type,color,state" process = Process( **dict(zip(row_header.split(","), row_string.split(",")))) self.assertEqual( "Process: name ((sge_shepherd)), cmd (sge_shepherd-4165419), pid (30726), " "ppid (7733), uid (0), gpid (30726), valid (1), tme (1405011331), " "exit_tme (0), state (exit), error_code (0), signal (0), job_id (None), " "tree_depth (0), process_type (), color (), int_in_volume (), " "int_out_volume (), ext_in_volume (), ext_out_volume ()", process.__repr__())
def test_process_events(self): row_start = [ "1406555483", "9939", "9881", "0", "(sge_shepherd)", "sge_shepherd-5073566", "0", ".", "9939" ] row_valid_start = [ "1406555483", "9939", "9881", "0", "(sge_shepherd)", "sge_shepherd-5073566", "0", "fork", "9939" ] row_end = [ "1406556210", "9940", "9881", "0", "(sge_shephrd)", "sge_shepherd-5073567", "0", "exit", "9939" ] row_end2 = [ "1406556210", "9940", "9881", "0", "(sge_shephrd)", "sge_shepherd-5073566", "0", "exit", "9939" ] row_valid_end = [ "1406556210", "9939", "9881", "0", "(sge_shepherd)", "sge_shepherd-5073566", "0", "exit", "9939" ] data_start_dict = {} data_valid_start_dict = {} data_end_dict = {} data_end_dict2 = {} data_valid_end_dict = {} for key in self.header: data_start_dict[key] = row_start[self.header[key]] data_valid_start_dict[key] = row_valid_start[self.header[key]] data_end_dict[key] = row_end[self.header[key]] data_end_dict2[key] = row_end2[self.header[key]] data_valid_end_dict[key] = row_valid_end[self.header[key]] self.process.addProcessEvent(**data_start_dict) self.process.addProcessEvent(**data_valid_end_dict) self.assertEqual(False, self.process.valid) process = Process() process.addProcessEvent(**data_valid_end_dict) self.assertEqual(process.name, data_valid_end_dict["name"]) process.addProcessEvent(**data_start_dict) self.assertEqual(False, process.valid) valid_process = Process() valid_process.addProcessEvent(**data_valid_start_dict) valid_process.addProcessEvent(**data_valid_end_dict) self.assertEqual(True, valid_process.valid) valid_process2 = Process() valid_process2.addProcessEvent(**data_valid_end_dict) valid_process2.addProcessEvent(**data_valid_start_dict) self.assertEqual(True, valid_process2.valid) process = Process() process.addProcessEvent(**data_valid_start_dict) self.assertRaises(ProcessMismatchException, process.addProcessEvent, **data_end_dict) process = Process() process.addProcessEvent(**data_valid_start_dict) self.assertRaises(ProcessMismatchException, process.addProcessEvent, **data_end_dict2)
def _create_process(self, row=None, header_cache=None): process = Process.from_dict(row=dict(zip(header_cache, row))) self._process_cache.add_data(data=process) return process
def test_oneNodeWithRoot(self): process = Process(tme=1, pid=2, ppid=1, name="sge_shepherd", cmd="sge_shepherd", valid=1) self.jobParser.add_piece(piece=process) self.assertTrue(self.jobParser.data.is_valid(), "JobParser should return True") self.assertIsNotNone(self.jobParser.data.tree, "Tree should not be None")
def parseRow(self, row=None, headerCache=None, tme=None): if "state" in headerCache: if "exit" in row[headerCache['state']]: # load process and set exit arguments, # afterwards remove it from cache pid = int(row[headerCache['pid']]) process_index = self._process_cache.data_index(value=tme, key=pid) try: process = self._process_cache.object_cache[pid][ process_index] if (row[headerCache['name']] not in process.name and row[headerCache['cmd']] not in process.cmd): # wrong process selected! logging.getLogger(self.__class__.__name__).warning( "process %s has not been logged", row) process = Process(name=row[headerCache['name']], cmd=row[headerCache['cmd']], pid=row[headerCache['pid']], ppid=row[headerCache['ppid']], uid=row[headerCache['uid']]) self._process_cache.add_data(data=process) process_index = self._process_cache.data_index( value=tme, key=pid) except KeyError: # exit event received firsts process = Process() process.addProcessEvent( name=row[headerCache['name']], cmd=row[headerCache['cmd']], pid=row[headerCache['pid']], ppid=row[headerCache['ppid']], uid=row[headerCache['uid']], tme=row[headerCache['tme']], exit_code=row[headerCache['exit_code']], gpid=row[headerCache['gpid']], state=row[headerCache['state']]) self._process_cache.add_data(data=process) process_index = self._process_cache.data_index(value=tme, key=pid) else: process.addProcessEvent( tme=row[headerCache['tme']], exit_code=row[headerCache['exit_code']], state=row[headerCache['state']]) try: job = self._operator.getJob(tme=tme, gpid=int( row[headerCache['gpid']])) except BasicException: # the job is currently not known so remember as unknown self._process_cache.unfound.add(process) self._process_cache.remove_data(data=process, key=pid) except Exception: # the job is currently not known so remember as unknown self._process_cache.unfound.add(process) self._process_cache.remove_data(data=process, key=pid) else: # job has been found, so save current data self._finish_process(job=job, process=process) else: # a new process is getting to know # process has been started, so create and remember process = self._create_process(row=row, header_cache=headerCache) if "sge_shepherd" in process.cmd: # new pilot is starting try: job = self._operator.getJob( tme=tme, gpid=int(row[headerCache['gpid']]), batchsystemId=process.batchsystemId) if job.exit_tme and (int(job.exit_tme) < int(tme)): self._operator.createJob( tme=tme, gpid=int(row[headerCache['gpid']]), batchsystemId=process.batchsystemId) else: logging.getLogger(self.__class__.__name__).error( "ATTENTION: job was not created as it already seems to be " "existent - job_id from DB %d vs CSV %d", job.job_id, process.batchsystemId) except Exception: self._operator.createJob( tme=tme, gpid=int(row[headerCache['gpid']]), batchsystemId=process.batchsystemId) else: # load object self._create_process(row=row, header_cache=headerCache)
def test_oneNodeWithoutRoot(self): process = Process(tme=1, pid=2, ppid=1, valid=1) self.jobParser.add_piece(piece=process) self.assertFalse(self.jobParser.data.is_valid(), "JobParser should return False") self.assertEqual(None, self.jobParser.data.tree, "Tree should be None")