def test_a_InstallAndBasicUsage(self): from Ganga.GPI import Job, jobs # -- INSTALLANDBASICUSAGE HELP START help(Job) # -- INSTALLANDBASICUSAGE HELP STOP # -- INSTALLANDBASICUSAGE SUBMIT START j = Job() j.submit() # -- INSTALLANDBASICUSAGE SUBMIT STOP # -- INSTALLANDBASICUSAGE JOBS START jobs(0) # -- INSTALLANDBASICUSAGE JOBS STOP # -- INSTALLANDBASICUSAGE JOBSAPP START jobs(0).application # -- INSTALLANDBASICUSAGE JOBSAPP STOP # -- INSTALLANDBASICUSAGE EXECFILE START open('submit.py', 'w').write(""" j = Job() j.submit() """) execfile('submit.py')
def test_d_TestCorruptXML(self): # Corrupt the XML file from Ganga.GPI import jobs, Job assert isinstance(jobs(0).subjobs(0), Job) XMLFileName = getSJXMLFile(jobs(0).subjobs(0)) unlink(XMLFileName) assert not path.isfile(XMLFileName) with open(XMLFileName, 'w') as handler: handler.write(badStr) handler.flush() from tempfile import NamedTemporaryFile with NamedTemporaryFile(delete=False) as myTempfile: myTempfile.write(badStr) myTempfile.flush() myTempName = myTempfile.name from Ganga.GPIDev.Base.Proxy import stripProxy assert stripProxy(jobs(0).subjobs(0))._dirty is False assert stripProxy(jobs(0))._dirty is False assert open(XMLFileName, 'r').read() == open(myTempName, 'r').read() unlink(myTempName) global global_AutoStartReg global_AutoStartReg = True
def test_Savannah31691(self): from Ganga.GPI import config, Job, jobs config['Configuration']['autoGenerateJobWorkspace'] = True import Ganga.Runtime.Workspace_runtime localDir = Ganga.Runtime.Workspace_runtime.getLocalRoot() # Create 5 default jobs, then list content of local workspace for i in range(5): Job() dir_list = os.listdir(localDir) for i in range(5): assert str(i) in dir_list # Delete job 0, then try again to list content of local workspace jobs(0).remove() dir_list = os.listdir(localDir) for i in range(1, 5): assert str(i) in dir_list assert '0' not in dir_list
def test_Savannah14799(self): from Ganga.GPI import Job, jobtree, jobs from Ganga.GPIDev.Base.Proxy import stripProxy j = Job() jobtree.add(j) self.assertNotEqual( stripProxy(j)._getRegistry(), stripProxy(jobtree)._getRegistry()) self.assertTrue(str(j.id) in jobtree.listjobs()) jt2 = jobtree.copy() self.assertTrue(str(j.id) in jt2.listjobs()) jobs(j.id).remove() jt2.cleanlinks() self.assertFalse(str(j.id) in jobtree.listjobs()) print jt2.listjobs() print jt2 self.assertFalse(str(j.id) in jt2.listjobs()) jt3 = jobtree.copy() l1 = jobtree.listjobs() l3 = jt3.listjobs() l1.sort() l3.sort() self.assertEqual(l1, l3)
def test_d_GetNonSchemaAttr(self): """ Don't load a job looking at non-Schema objects""" from Ganga.GPI import jobs from Ganga.GPIDev.Base.Proxy import stripProxy raw_j = stripProxy(jobs(0)) assert raw_j._getRegistry().has_loaded(raw_j) is False dirty_status = raw_j._dirty assert dirty_status is False assert raw_j._getRegistry().has_loaded(raw_j) is False try: non_object = jobs(0)._dirty raise Exception("Shouldn't be here") except AttributeError: pass assert raw_j._getRegistry().has_loaded(raw_j) is False raw_j.printSummaryTree() assert raw_j._getRegistry().has_loaded(raw_j) is True
def test_b_jobResubmit(self): """here for testing a re-submit""" from Ganga.GPI import jobs jobs(0).resubmit() from GangaTest.Framework.utils import sleep_until_completed sleep_until_completed(jobs(0))
def test_d_loadSubmit(self): """here for testing a loaded submit""" from Ganga.GPI import jobs jobs(1).submit() from GangaTest.Framework.utils import sleep_until_completed sleep_until_completed(jobs(1))
def test_a_JobConstruction(self): """ First construct the Job object (singular)""" from Ganga.Utility.Config import getConfig self.assertFalse(getConfig('TestingFramework')['AutoCleanup']) from Ganga.GPI import Job, jobs for _ in range(numJobs): j=Job() self.assertEqual(len(jobs), numJobs) # Don't really gain anything from assertEqual... jobs(9).submit()
def test_d_JobRemoval(self): """ Fourth make sure that we get rid of the jobs safely""" from Ganga.GPI import jobs self.assertEqual(len(jobs), 1) jobs(0).remove() self.assertEqual(len(jobs), 0) from Ganga.Utility.Config import setConfigOption setConfigOption('TestingFramework', 'AutoCleanup', 'True')
def test_d_JobRemoval(self): """ Fourth make sure that we get rid of the jobs safely""" from Ganga.GPI import jobs self.assertEqual(len(jobs), 1) jobs(0).remove() self.assertEqual(len(jobs), 0) from Ganga.Utility.Config import setConfigOption setConfigOption('TestingFramework', 'AutoCleanup', default_CleanUp)
def test_b_SJCompleted(self): """ Test the subjobs complete """ from Ganga.GPI import jobs assert len(jobs) == 1 assert len(jobs(0).subjobs) == TestSJSubmit.n_subjobs from GangaTest.Framework.utils import sleep_until_completed sleep_until_completed(jobs(0)) for sj in jobs(0).subjobs: assert sj.status in ['completed']
def test_f_testXMLIndex(self): # Check XML Index content from Ganga.Core.GangaRepository.PickleStreamer import to_file, from_file from Ganga.GPI import jobs j = jobs(0) assert path.isfile(getIndexFile(j)) with open(getIndexFile(j)) as handler: obj, errs = from_file(handler) assert isinstance(obj, tuple) from Ganga.GPIDev.Base.Proxy import stripProxy, getName raw_j = stripProxy(j) index_cache = raw_j._getRegistry().getIndexCache(raw_j) assert isinstance(index_cache, dict) index_cls = getName(raw_j) index_cat = raw_j._category this_index_cache = (index_cat, index_cls, index_cache) assert this_index_cache == obj
def test_c_XMLAutoUpdated(self): # Check they get updated from Ganga.GPI import jobs j=jobs(0) XMLFileName = getXMLFile(j) last_update = stat(XMLFileName) j.name = testStr from Ganga.Utility.Config import getConfig flush_timeout = getConfig('Registry')['AutoFlusherWaitTime'] total_time=0. new_update = 0 lst_update = last_update.st_mtime while total_time < 2.*flush_timeout and new_update <= lst_update: total_time+=1. time.sleep(1.) try: new_update = stat(XMLFileName).st_mtime except: new_update = 0. newest_update = stat(XMLFileName) assert newest_update.st_mtime > last_update.st_mtime
def testSetParentOnLoad(self): """ Test that the parents are set correctly on load """ from Ganga.GPI import jobs, queues, Executable, Local from Ganga.GPIDev.Base.Proxy import isType, stripProxy def flush_full_job(): mj = jobs(0) mj.comment = "Make sure I'm dirty " + ''.join( random.choice(string.ascii_uppercase) for _ in range(5)) stripProxy(mj)._getRegistry()._flush([stripProxy(mj)]) # Make sure the main job is fully loaded j = jobs(0) assert isType(j.application, Executable) assert isType(j.backend, Local) assert j.application.exe == "sleep" # fire off a load of threads to flush for i in range(0, 20): queues.add(flush_full_job) # Now loop over and force the load of all the subjobs for sj in j.subjobs: assert sj.splitter is None assert isType(sj.application, Executable) assert isType(sj.backend, Local) assert sj.application.exe == "sleep" assert sj.application.args == ['400'] assert stripProxy(sj)._getRoot() is stripProxy(j) assert stripProxy(sj.application)._getRoot() is stripProxy(j)
def test_c_JobLoaded(self): """ Third do something to trigger a loading of a Job and then test if it's loaded""" from Ganga.GPI import jobs self.assertEqual(len(jobs), 1) j = jobs(0) from Ganga.GPIDev.Base.Proxy import stripProxy raw_j = stripProxy(j) ## ANY COMMAND TO LOAD A JOB CAN BE USED HERE raw_j.printSummaryTree() has_loaded_job = raw_j._getRegistry().has_loaded(raw_j) for i in range(len(j.subjobs)): self.assertFalse(raw_j.subjobs.isLoaded(i)) self.assertTrue(has_loaded_job) stripProxy(j.subjobs(0)).printSummaryTree() self.assertTrue(raw_j.subjobs.isLoaded(0)) for i in range(1, len(j.subjobs)): self.assertFalse(raw_j.subjobs.isLoaded(i))
def test_h_testXMLIndex(self): # Check index of job from Ganga.Core.GangaRepository.PickleStreamer import to_file, from_file from Ganga.GPI import jobs j = jobs(0) assert path.isfile(getIndexFile(j)) with open(getIndexFile(j)) as handler: obj, errs = from_file(handler) assert isinstance(obj, tuple) from Ganga.GPIDev.Base.Proxy import stripProxy, getName raw_j = stripProxy(j) index_cache = raw_j._getRegistry().getIndexCache(raw_j) assert isinstance(index_cache, dict) index_cls = getName(raw_j) index_cat = raw_j._category this_index_cache = (index_cat, index_cls, index_cache) print("just-built index: %s" % str(this_index_cache)) print("from disk: %s" % str(obj)) assert this_index_cache == obj
def test_i_testSJXMLIndex(self): # Check index of all sj from Ganga.Core.GangaRepository.PickleStreamer import to_file, from_file from Ganga.GPI import jobs assert len(jobs) == 2 j=jobs(0) with open(getSJXMLIndex(j)) as handler: obj, errs = from_file(handler) assert isinstance(obj, dict) from Ganga.GPIDev.Base.Proxy import stripProxy, getName raw_j = stripProxy(j) new_dict = {} for sj in j.subjobs: raw_sj = stripProxy(sj) temp_index = raw_sj._getRegistry().getIndexCache(raw_sj) new_dict[sj.id] = temp_index assert raw_sj._category == raw_j._category for k, v in new_dict.iteritems(): for k1, v1 in v.iteritems(): if k1 != 'modified': assert obj[k][k1] == new_dict[k][k1]
def test_update(self): from Ganga.GPI import LHCbTask, LHCbTransform, jobs t = LHCbTask() tr = LHCbTransform(application=DaVinci(), backend=Dirac()) t.appendTransform(tr) try: bkQueryList = [BKTestQuery(stripping20up)] tr.updateQuery() assert false, 'Should have thrown exception if updated with no query' except: tr.addQuery(BKTestQuery(stripping20down)) # Check some new data added assert len(tr.inputdata), 'No data added after call to update' try: # Shouldn't allow a second update before processed the data in # toProcess_dataset tr.updateQuery() assert false, 'Should have thrown an error if updated with files already to process' except: # run so can update again with a removed dataset recall that jobs with the # old dataset only created when run called. t.run() assert len(tr.getJobs()), "No Jobs created upon run()" job = jobs(int(tr.getJobs()[0].fqid.split('.')[0])) sleep_until_state(job, 300, 'submitted') del tr._impl.query.dataset.files[0] tr.update(True) # Check the dead dataset is picked up assert len( tr._impl.removed_data.files), "Didn\'t Pick up loss of a dataset" job.remove()
def get_subjobs_in_time_range(jobid, fromDate=None, toDate=None): subjobs = [] for subjob in jobs(jobid).subjobs: timeCreated = subjob.time.timestamps['new'] if fromDate is None and toDate is None: subjobs.append(subjob) elif fromDate is not None and toDate is not None: if timeCreated >= fromDate and timeCreated <= toDate: subjobs.append(subjob) elif fromDate is not None and toDate is None: if timeCreated >= fromDate: subjobs.append(subjob) return subjobs
def testSetParentOnLoad(self): """ Test that the parents are set correctly on load """ from Ganga.GPI import jobs, queues, Executable, Local from Ganga.GPIDev.Base.Proxy import isType def flush_full_job(): mj = jobs(0) mj.comment = "Make sure I'm dirty " + ''.join(random.choice(string.ascii_uppercase) for _ in range(5)) mj._impl._getRegistry()._flush([j]) # Make sure the main job is fully loaded j = jobs(0) assert isType(j.application, Executable) assert isType(j.backend, Local) assert j.application.exe == "sleep" # fire off a load of threads to flush for i in range(0, 20): queues.add(flush_full_job) # Now loop over and force the load of all the subjobs for sj in j.subjobs: assert sj.splitter is None assert isType(sj.application, Executable) assert isType(sj.backend, Local) assert sj.application.exe == "sleep" assert sj.application.args == ['400'] assert sj._impl._getRoot() is j._impl
def test_d_XMLUpdated(self): # check they get updated elsewhere from Ganga.GPI import jobs, disableMonitoring, enableMonitoring disableMonitoring() j=jobs(0) XMLFileName = getXMLFile(j) last_update = stat(XMLFileName) j.submit() newest_update = stat(XMLFileName) from GangaTest.Framework.utils import sleep_until_completed enableMonitoring() can_assert = False if j.status in ['submitted', 'running']: can_assert = True sleep_until_completed(j, 60) final_update = stat(XMLFileName) assert newest_update.st_mtime > last_update.st_mtime
def test_i_testSJXMLIndex(self): # Check index of all sj from Ganga.Core.GangaRepository.PickleStreamer import to_file, from_file from Ganga.GPI import jobs assert len(jobs) == 2 j = jobs(0) with open(getSJXMLIndex(j)) as handler: obj, errs = from_file(handler) assert isinstance(obj, dict) from Ganga.GPIDev.Base.Proxy import stripProxy, getName raw_j = stripProxy(j) new_dict = {} for sj in j.subjobs: raw_sj = stripProxy(sj) temp_index = raw_sj._getRegistry().getIndexCache(raw_sj) new_dict[sj.id] = temp_index assert raw_sj._category == raw_j._category for k, v in new_dict.iteritems(): for k1, v1 in v.iteritems(): if k1 != 'modified': assert obj[k][k1] == new_dict[k][k1]
def test_b_ModifyJob(self): from Ganga.GPI import jobs, Batch j = jobs(0) j.backend = Batch() j.application.exe = 'myexecutable' self.check_job(j)
def test_Jobs(self): from Ganga.GPI import Job, jobs j = Job() k = jobs(j.id) from Ganga.GPIDev.Base.Objects import GangaObject assert(not isinstance(j, GangaObject)) assert(not isinstance(k, GangaObject))
def test_b_Savannah32342(self): from Ganga.GPI import jobs j = jobs(0) assert j, 'job should not be null' assert len(j.subjobs) == 2, 'Splitting must have occured' for jj in j.subjobs: assert jj._impl._getParent(), 'Parent must be set'
def test_c_SelectSJTests(self): """ Is is a bird is it a plane... no it's a test for selecting subjobs now""" from Ganga.GPI import jobs, Job, ArgSplitter j = Job(splitter=ArgSplitter(args=[[_] for _ in job_names])) j.submit() from GangaTest.Framework.utils import sleep_until_completed assert sleep_until_completed(j, 60) mySlice = jobs(j.id).subjobs.select(status="completed") assert len(mySlice) == len(job_names) mySlice2 = jobs(j.id).subjobs.select(id=2) assert len(mySlice2) == 1 assert mySlice2[2].id == 2
def test_c_SelectSJTests(self): """ Is is a bird is it a plane... no it's a test for selecting subjobs now""" from Ganga.GPI import jobs, Job, ArgSplitter j=Job(splitter=ArgSplitter(args=[[_] for _ in job_names])) j.submit() from GangaTest.Framework.utils import sleep_until_completed assert sleep_until_completed(j, 60) mySlice = jobs(j.id).subjobs.select(status="completed") assert len(mySlice) == len(job_names) mySlice2 = jobs(j.id).subjobs.select(id=2) assert len(mySlice2) == 1 assert mySlice2[2].id == 2
def test_Jobs(self): from Ganga.GPI import Job, jobs j = Job() k = jobs(j.id) from Ganga.GPIDev.Base.Objects import GangaObject assert (not isinstance(j, GangaObject)) assert (not isinstance(k, GangaObject))
def test_g_testSJXMLContent(self): # Check SJ content from Ganga.Core.GangaRepository.VStreamer import to_file, from_file from Ganga.GPI import jobs from tempfile import NamedTemporaryFile from Ganga.GPIDev.Base.Proxy import stripProxy ignore_subs = [ 'subjobs', 'time', 'backend', 'id', 'splitter', 'info', 'application', 'inputdata' ] with NamedTemporaryFile(delete=False) as new_temp_file_a: temp_name_a = new_temp_file_a.name j = jobs(0) to_file(stripProxy(j), new_temp_file_a, ignore_subs) new_temp_file_a.flush() counter = 0 for sj in j.subjobs: XMLFileName = getSJXMLFile(sj) assert path.isfile(XMLFileName) with open(XMLFileName) as handler: tmpobj, errs = from_file(handler) assert hasattr(tmpobj, 'id') assert tmpobj.id == counter with NamedTemporaryFile(delete=False) as new_temp_file: temp_name = new_temp_file.name to_file(stripProxy(sj), new_temp_file, ignore_subs) new_temp_file.flush() #import filecmp #assert filecmp.cmp(XMLFileName, temp_name) assert open(temp_name_a).read() == open(temp_name).read() unlink(temp_name) counter += 1 assert counter == len(jobs(0).subjobs) unlink(temp_name_a)
def Savannah31691(self): from Ganga.GPI import config, Job, jobs config['Configuration']['autoGenerateJobWorkspace'] = True import Ganga.Runtime.Workspace_runtime localDir = Ganga.Runtime.Workspace_runtime.getLocalRoot() # Create 5 default jobs, then list content of local workspace for i in range(5): Job() self.assertEqual(len(os.listdir(localDir)), 5) # Delete job 0, then try again to list content of local workspace jobs(0).remove() self.assertEqual(len(os.listdir(localDir)), 4)
def test_b_EnableMonitoring(self): from Ganga.GPI import enableMonitoring, Job, jobs enableMonitoring() j = Job() j.submit() dummySleep(j) self.assertNotEqual(jobs(0).status, 'submitted')
def test_b_EnableMonitoring(self): from Ganga.GPI import enableMonitoring, Job, jobs enableMonitoring() j=Job() j.submit() dummySleep(j) self.assertNotEqual(jobs(0).status, 'submitted')
def test_g_testSJXMLContent(self): # Check SJ content from Ganga.Core.GangaRepository.VStreamer import to_file, from_file from Ganga.GPI import jobs from tempfile import NamedTemporaryFile from Ganga.GPIDev.Base.Proxy import stripProxy ignore_subs = ['subjobs', 'time', 'backend', 'id', 'splitter', 'info', 'application', 'inputdata'] with NamedTemporaryFile(delete=False) as new_temp_file_a: temp_name_a = new_temp_file_a.name j=jobs(0) to_file(stripProxy(j), new_temp_file_a, ignore_subs) new_temp_file_a.flush() counter = 0 for sj in j.subjobs: XMLFileName = getSJXMLFile(sj) assert path.isfile(XMLFileName) with open(XMLFileName) as handler: tmpobj, errs = from_file(handler) assert hasattr(tmpobj, 'id') assert tmpobj.id == counter with NamedTemporaryFile(delete=False) as new_temp_file: temp_name = new_temp_file.name to_file(stripProxy(sj), new_temp_file, ignore_subs) new_temp_file.flush() #import filecmp #assert filecmp.cmp(XMLFileName, temp_name) assert open(temp_name_a).read() == open(temp_name).read() unlink(temp_name) counter+=1 assert counter == len(jobs(0).subjobs) unlink(temp_name_a)
def test_b_TestRemoveSJXML(self): # Remove XML force to use backup from Ganga.GPI import jobs XMLFileName = getSJXMLFile(jobs(0).subjobs(0)) unlink(XMLFileName) assert not path.isfile(XMLFileName) assert path.isfile(XMLFileName+'~') global global_AutoStartReg global_AutoStartReg = True
def test_e_TestCorruptLoad(self): # Test loading of backup when corrupt from Ganga.GPI import jobs assert len(jobs) == 1 assert len(jobs(0).subjobs) == 2 backend2 = jobs(0).subjobs(0).backend assert backend2 is not None XMLFileName = getSJXMLFile(jobs(0).subjobs(0)) from tempfile import NamedTemporaryFile with NamedTemporaryFile(delete=False) as myTempfile: myTempfile.write(badStr) myTempfile.flush() myTempName = myTempfile.name from Ganga.GPIDev.Base.Proxy import stripProxy assert stripProxy(jobs(0).subjobs(0))._dirty is True assert stripProxy(jobs(0))._dirty is True stripProxy(jobs(0).subjobs(0))._getRegistry().flush_all() assert open(XMLFileName).read() != open(myTempName).read() unlink(myTempName)
def test_e_TestCorruptLoad(self): # Test loading of backup when corrupt from Ganga.GPI import jobs, Job assert len(jobs) == 1 backend2 = jobs(0).backend assert isinstance(jobs(0), Job) assert backend2 is not None XMLFileName = getXMLFile(0) from Ganga.GPIDev.Base.Proxy import stripProxy print("%s" % stripProxy(jobs(0)).__dict__) assert stripProxy(jobs(0))._dirty is True stripProxy(jobs(0))._setDirty() stripProxy(jobs(0))._getRegistry().flush_all() from tempfile import NamedTemporaryFile with NamedTemporaryFile(delete=False) as myTempfile: myTempfile.write(badStr) myTempfile.flush() myTempName = myTempfile.name assert open(XMLFileName).read() != open(myTempName).read() unlink(myTempName)
def test_e_TestCorruptLoad(self): # Test loading of backup when corrupt from Ganga.GPI import jobs, Job assert len(jobs) == 1 backend2 = jobs(0).backend assert isinstance(jobs(0), Job) assert backend2 is not None XMLFileName = getXMLFile(0) from Ganga.GPIDev.Base.Proxy import stripProxy print("%s" % stripProxy(jobs(0)).__dict__) assert stripProxy(jobs(0))._dirty is True stripProxy(jobs(0))._setDirty() stripProxy(jobs(0))._getRegistry().flush_all() from tempfile import NamedTemporaryFile with NamedTemporaryFile(delete=False) as myTempfile: myTempfile.write(badStr) myTempfile.flush() myTempName=myTempfile.name assert open(XMLFileName).read() != open(myTempName).read() unlink(myTempName)
def test_e_TestCorruptLoad(self): # Test loading of backup when corrupt from Ganga.GPI import jobs assert len(jobs) == 1 assert len(jobs(0).subjobs) == 2 backend2 = jobs(0).subjobs(0).backend assert backend2 is not None XMLFileName = getSJXMLFile(jobs(0).subjobs(0)) from tempfile import NamedTemporaryFile with NamedTemporaryFile(delete=False) as myTempfile: myTempfile.write(badStr) myTempfile.flush() myTempName=myTempfile.name from Ganga.GPIDev.Base.Proxy import stripProxy assert stripProxy(jobs(0).subjobs(0))._dirty is True assert stripProxy(jobs(0))._dirty is True stripProxy(jobs(0).subjobs(0))._getRegistry().flush_all() assert open(XMLFileName).read() != open(myTempName).read() unlink(myTempName)
def test_c_TestBackupLoad(self): # Test loading from backup from Ganga.GPI import jobs assert len(jobs) == 1 ## trigger load backend2 = jobs(0).backend assert backend2 is not None global global_AutoStartReg global_AutoStartReg = False
def test_d_GetNonSchemaAttr(self): """ Don't load a job looking at non-Schema objects""" from Ganga.GPI import jobs raw_j = stripProxy(jobs(0)) assert not raw_j._getRegistry().has_loaded(raw_j) dirty_status = raw_j._dirty assert not dirty_status assert not raw_j._getRegistry().has_loaded(raw_j) with pytest.raises(AttributeError): _ = jobs(0)._dirty assert not raw_j._getRegistry().has_loaded(raw_j) raw_j.printSummaryTree() assert raw_j._getRegistry().has_loaded(raw_j)
def test_b_JobNotLoaded(self): """S econd get the job and check that getting it via jobs doesn't cause it to be loaded""" from Ganga.GPI import jobs assert len(jobs) == 1 j = jobs(0) raw_j = stripProxy(j) has_loaded_job = raw_j._getRegistry().has_loaded(raw_j) assert not has_loaded_job
def testMergeRemoval(self): from Ganga.GPI import Job, Executable, Local, LocalFile, jobs # see Savannah 33710 j = Job() jobID = j.id # job will run for at least 20 seconds j.application = Executable(exe="sh", args=["-c", "sleep 20; echo foo > out.txt"]) j.backend = Local() j.outputfiles = [LocalFile("out.txt")] j.splitter = CopySplitter() j.postprocessors = MergerTester(files=["out.txt"]) j.postprocessors[0].ignorefailed = True j.postprocessors[0].alwaysfail = True j.postprocessors[0].wait = 10 j.submit() run_until_state(j, state="running") j.remove() with pytest.raises(KeyError): jobs(jobID)
def test_e_testXMLContent(self): # Check content of XML is as expected from Ganga.Core.GangaRepository.VStreamer import to_file, from_file from Ganga.GPI import jobs, Job from Ganga.GPIDev.Base.Proxy import stripProxy from tempfile import NamedTemporaryFile j = jobs(0) assert path.isfile(getXMLFile(j)) with open(getXMLFile(j)) as handler: tmpobj, errs = from_file(handler) assert hasattr(tmpobj, 'name') assert tmpobj.name == testStr ignore_subs = [ 'time', 'subjobs', 'info', 'application', 'backend', 'id' ] with NamedTemporaryFile(delete=False) as new_temp_file: temp_name = new_temp_file.name to_file(stripProxy(j), new_temp_file, ignore_subs) new_temp_file.flush() with NamedTemporaryFile(delete=False) as new_temp_file2: temp_name2 = new_temp_file2.name j2 = Job() j2.name = testStr j2.submit() from GangaTest.Framework.utils import sleep_until_completed sleep_until_completed(j2) to_file(stripProxy(j2), new_temp_file2, ignore_subs) new_temp_file2.flush() #import filecmp #assert filecmp.cmp(handler.name, new_temp_file.name) #assert not filecmp.cmp(new_temp_file.name, new_temp_file2.name) #assert open(getXMLFile(j)).read() == open(temp_name).read() assert open(temp_name).read() == open(temp_name2).read() unlink(temp_name) unlink(temp_name2)
def testMergeRemoval(self): from Ganga.GPI import Job, Executable, Local, LocalFile, jobs # see Savannah 33710 j = Job() jobID = j.id # job will run for at least 20 seconds j.application = Executable(exe='sh', args=['-c', 'sleep 20; echo foo > out.txt']) j.backend = Local() j.outputfiles = [LocalFile('out.txt')] j.splitter = CopySplitter() j.postprocessors = MergerTester(files=['out.txt']) j.postprocessors[0].ignorefailed = True j.postprocessors[0].alwaysfail = True j.postprocessors[0].wait = 10 j.submit() run_until_state(j, state='running') j.remove() with pytest.raises(KeyError): jobs(jobID)
def test_b_JobNotLoaded(self): """ Second get the job and check that getting it via jobs doesn't cause it to be loaded""" from Ganga.GPI import jobs self.assertEqual(len(jobs), 1) print("len: %s" % str(len(jobs))) j = jobs(0) from Ganga.GPIDev.Base.Proxy import stripProxy raw_j = stripProxy(j) has_loaded_job = raw_j._getRegistry().has_loaded(raw_j) self.assertFalse(has_loaded_job)