def test_Savannah96158(self): from Ganga.GPI import Job, jobs #The first two tests check the new functionality, the remainder just check that we didn't break existing functionality with this bug-fix a = Job() a.name = 'TestName' tmpList = jobs.select(name='*stN*') self.assertEqual( len(tmpList), 1, 'Test 1: jobs.select using wildcard returned unexpected number of results' ) a = Job() a.name = 'ekdicjsheeksoawoq1' a = Job() a.name = 'ekdicjsheeksoawoq2' a = Job() a.name = 'ekdicjsheeksoawoq3' a = Job() a.name = 'ekdicjsheeksoawoq4' tmpList = jobs.select(name='ekdicjsheeksoawoq?') self.assertEqual( len(tmpList), 4, 'Test 2: jobs.select using wildcard returned unexpected number of results' ) jobs.select(1) jobs.select(1, 4) jobs.select(status='new') jobs.select(backend='Local') jobs.select(application='Executable')
def test_Savannah96158(self): from Ganga.GPI import Job, jobs #The first two tests check the new functionality, the remainder just check that we didn't break existing functionality with this bug-fix a = Job() a.name = 'TestName' tmpList = jobs.select(name='*stN*') self.assertEqual(len(tmpList), 1, 'Test 1: jobs.select using wildcard returned unexpected number of results') a = Job() a.name = 'ekdicjsheeksoawoq1' a = Job() a.name = 'ekdicjsheeksoawoq2' a = Job() a.name = 'ekdicjsheeksoawoq3' a = Job() a.name = 'ekdicjsheeksoawoq4' tmpList = jobs.select(name='ekdicjsheeksoawoq?') self.assertEqual(len(tmpList), 4, 'Test 2: jobs.select using wildcard returned unexpected number of results') jobs.select(1) jobs.select(1, 4) jobs.select(status='new') jobs.select(backend='Local') jobs.select(application='Executable')
def test_Savannah8111(self): from Ganga.GPI import Job j1 = Job() j1.name = 'Gauss Job' jj = j1.copy() self.assertEqual(jj.name, 'Gauss Job')
def test_a_JobConstruction(self): """ First construct the Job object (singular)""" from Ganga.Utility.Config import getConfig self.assertFalse(getConfig('TestingFramework')['AutoCleanup']) from Ganga.GPI import Job, jobs j = Job() assert len(jobs) == 1 j.name = 'modified_name'
def test_a_JobConstruction(self): """ First construct the Job object (singular)""" from Ganga.Utility.Config import getConfig self.assertFalse(getConfig('TestingFramework')['AutoCleanup']) from Ganga.GPI import Job, jobs j=Job() assert len(jobs) == 1 j.name = 'modified_name'
def test_a_JobConstruction(self): """ First construct the Job object (singular)""" from Ganga.Utility.Config import getConfig self.assertFalse(getConfig('TestingFramework')['AutoCleanup']) from Ganga.GPI import Job, jobs for i in range(job_num): j = Job() j.name = job_names[i] self.assertEqual(len(jobs), job_num) # Don't really gain anything from assertEqual...
def submitpilots(n=1, doTerm=True): """Submit a number of pilotjobs""" if n <= 0: return from Ganga.GPI import Job, Executable, File j = Job() j.application = Executable(exe=File(config['PilotScript']), args=[]) j.name = 'LGIpilot' if not doTerm: j.name = 'LGIpilot@' j.inputsandbox = [File(config['PilotDist'])] j.application.env['LGI_IS_PILOTJOB'] = '1' if doTerm: j.application.env['SCHED_WAIT_TERM'] = str(config['WaitTerm']) if config['MaxRuntime'] is not None: j.application.env['SCHED_TERM_AFTER'] = str(config['MaxRuntime']) j.submit() for i in range(1, n-1): j = j.copy() j.submit() # returns last job return j
def test_e_testXMLContent(self): # Check content of XML is as expected from Ganga.Core.GangaRepository.VStreamer import to_file, from_file from Ganga.GPI import jobs, Job from Ganga.GPIDev.Base.Proxy import stripProxy from tempfile import NamedTemporaryFile j = jobs(0) assert path.isfile(getXMLFile(j)) with open(getXMLFile(j)) as handler: tmpobj, errs = from_file(handler) assert hasattr(tmpobj, 'name') assert tmpobj.name == testStr ignore_subs = [ 'time', 'subjobs', 'info', 'application', 'backend', 'id' ] with NamedTemporaryFile(delete=False) as new_temp_file: temp_name = new_temp_file.name to_file(stripProxy(j), new_temp_file, ignore_subs) new_temp_file.flush() with NamedTemporaryFile(delete=False) as new_temp_file2: temp_name2 = new_temp_file2.name j2 = Job() j2.name = testStr j2.submit() from GangaTest.Framework.utils import sleep_until_completed sleep_until_completed(j2) to_file(stripProxy(j2), new_temp_file2, ignore_subs) new_temp_file2.flush() #import filecmp #assert filecmp.cmp(handler.name, new_temp_file.name) #assert not filecmp.cmp(new_temp_file.name, new_temp_file2.name) #assert open(getXMLFile(j)).read() == open(temp_name).read() assert open(temp_name).read() == open(temp_name2).read() unlink(temp_name) unlink(temp_name2)
def test_e_testXMLContent(self): # Check content of XML is as expected from Ganga.Core.GangaRepository.VStreamer import to_file, from_file from Ganga.GPI import jobs, Job from Ganga.GPIDev.Base.Proxy import stripProxy from tempfile import NamedTemporaryFile j = jobs(0) assert path.isfile(getXMLFile(j)) with open(getXMLFile(j)) as handler: tmpobj, errs = from_file(handler) assert hasattr(tmpobj, 'name') assert tmpobj.name == testStr ignore_subs = ['time', 'subjobs', 'info', 'application', 'backend', 'id'] with NamedTemporaryFile(delete=False) as new_temp_file: temp_name = new_temp_file.name to_file(stripProxy(j), new_temp_file, ignore_subs) new_temp_file.flush() with NamedTemporaryFile(delete=False) as new_temp_file2: temp_name2 = new_temp_file2.name j2 = Job() j2.name = testStr j2.submit() from GangaTest.Framework.utils import sleep_until_completed sleep_until_completed(j2) to_file(stripProxy(j2), new_temp_file2, ignore_subs) new_temp_file2.flush() #import filecmp #assert filecmp.cmp(handler.name, new_temp_file.name) #assert not filecmp.cmp(new_temp_file.name, new_temp_file2.name) #assert open(getXMLFile(j)).read() == open(temp_name).read() assert open(temp_name).read() == open(temp_name2).read() unlink(temp_name) unlink(temp_name2)
def testJobCopy(self): """Test that a job copy copies everything properly""" from Ganga.GPI import Job, ARC, GenericSplitter, GangaDataset, LocalFile, FileChecker from Ganga.GPIDev.Base.Proxy import isType j = Job() j.application.exe = "sleep" j.application.args = ['myarg'] j.backend = ARC() j.backend.CE = "my.ce" j.inputdata = GangaDataset() j.inputdata.files = [ LocalFile("*.txt") ] j.inputfiles = [ LocalFile("*.txt") ] j.name = "testname" j.outputfiles = [ LocalFile("*.txt") ] j.postprocessors = FileChecker(files=['stdout'], searchStrings = ['my search']) j.splitter = GenericSplitter() j.splitter.attribute = "application.args" j.splitter.values = ['arg 1', 'arg 2', 'arg 3'] j2 = j.copy() # test the copy has worked self.assertTrue( isType(j2, Job) ) self.assertEqual( j2.application.exe, "sleep" ) self.assertEqual( j2.application.args, ["myarg"] ) self.assertTrue( isType(j2.backend, ARC) ) self.assertEqual( j2.backend.CE, "my.ce" ) self.assertTrue( isType(j2.inputdata, GangaDataset) ) self.assertEqual( len(j2.inputdata.files), 1 ) self.assertTrue( isType(j2.inputdata.files[0], LocalFile) ) self.assertEqual( j2.inputdata.files[0].namePattern, "*.txt" ) self.assertEqual( len(j2.inputfiles), 1 ) self.assertTrue( isType(j2.inputfiles[0], LocalFile) ) self.assertEqual( j2.inputfiles[0].namePattern, "*.txt" ) self.assertEqual( j2.name, "testname" ) self.assertEqual( len(j2.outputfiles), 1 ) self.assertTrue( isType(j2.outputfiles[0], LocalFile) ) self.assertEqual( j2.outputfiles[0].namePattern, "*.txt" ) self.assertEqual( len(j2.postprocessors), 1 ) self.assertTrue( isType(j2.postprocessors[0], FileChecker) ) self.assertEqual( j2.postprocessors[0].files, ["stdout"] ) self.assertEqual( j2.postprocessors[0].searchStrings, ["my search"] ) self.assertTrue( isType(j2.splitter, GenericSplitter) ) self.assertEqual( j2.splitter.attribute, "application.args" ) self.assertEqual( j2.splitter.values, ['arg 1', 'arg 2', 'arg 3'])
def test_f_testXMLContent(self): # Check their content from Ganga.Core.GangaRepository.VStreamer import to_file, from_file from Ganga.GPI import jobs, Job from Ganga.GPIDev.Base.Proxy import stripProxy from tempfile import NamedTemporaryFile j = jobs(0) XMLFileName = getXMLFile(j) assert path.isfile(XMLFileName) with open(XMLFileName) as handler: tmpobj, errs = from_file(handler) assert hasattr(tmpobj, 'name') assert tmpobj.name == testStr ignore_subs = [ 'status', 'subjobs', 'time', 'backend', 'id', 'splitter', 'info', 'application' ] with NamedTemporaryFile(delete=False) as new_temp_file: temp_name = new_temp_file.name to_file(stripProxy(j), new_temp_file, ignore_subs) new_temp_file.flush() with NamedTemporaryFile(delete=False) as new_temp_file2: temp_name2 = new_temp_file2.name j2 = Job() j2.name = testStr to_file(stripProxy(j2), new_temp_file2, ignore_subs) new_temp_file2.flush() #assert open(XMLFileName).read() == open(temp_name).read() assert open(temp_name).read() == open(temp_name2).read() unlink(temp_name) unlink(temp_name2)
def test_f_testXMLContent(self): # Check their content from Ganga.Core.GangaRepository.VStreamer import to_file, from_file from Ganga.GPI import jobs, Job from Ganga.GPIDev.Base.Proxy import stripProxy from tempfile import NamedTemporaryFile j=jobs(0) XMLFileName = getXMLFile(j) assert path.isfile(XMLFileName) with open(XMLFileName) as handler: tmpobj, errs = from_file(handler) assert hasattr(tmpobj, 'name') assert tmpobj.name == testStr ignore_subs = ['status', 'subjobs', 'time', 'backend', 'id', 'splitter', 'info', 'application'] with NamedTemporaryFile(delete=False) as new_temp_file: temp_name = new_temp_file.name to_file(stripProxy(j), new_temp_file, ignore_subs) new_temp_file.flush() with NamedTemporaryFile(delete=False) as new_temp_file2: temp_name2 = new_temp_file2.name j2=Job() j2.name=testStr to_file(stripProxy(j2), new_temp_file2, ignore_subs) new_temp_file2.flush() #assert open(XMLFileName).read() == open(temp_name).read() assert open(temp_name).read() == open(temp_name2).read() unlink(temp_name) unlink(temp_name2)
polarity, year, mode )) j.comment = ( '{1} {2} MC {0} ntuple creation for k3pi mixing measurement.' .format(event_type, year, polarity) ) j.application = DaVinci(version='v41r3') j.application.optsfile = [s.format(path=base, year=year) for s in OPTIONS] if args.test: # If testing, run over a couple of files locally, # saving the results to the sandbox j.inputdata = dataset[0:1] j.backend = Local() # Prepend test string to job name j.name = 'TEST_{0}'.format(j.name) j.outputfiles = [LocalFile(tfn)] else: # If not testing, run over everything on the grid, splitting jobs # into groups of 10 files, notifying me on job completion/subjob failure, # and save the results on the grid storage j.inputdata = dataset j.backend = Dirac() j.backend.settings['CPUTime'] = 60*60*24*7 j.do_auto_resubmit = True j.splitter = SplitByFiles(filesPerJob=5, ignoremissing=True) j.postprocessors = [Notifier(address=email)] j.outputfiles = [DiracFile(tfn)] if not args.inspect_job: queues.add(j.submit) # noqa