def test_d_RunningExecutables(self): from Ganga.GPI import Job, File, Executable # -- RUNNINGEXECUTABLES EXAMPLE START # Already existing Exe j = Job() j.application = Executable() j.application.exe = '/bin/ls' j.application.args = ['-l', '-h'] j.submit() # Wait for completion j.peek("stdout") # Send a script open('my_script.sh', 'w').write("""#!/bin/bash echo 'Current dir: ' `pwd` echo 'Contents:' ls -ltr echo 'Args: ' $@ """) import os os.system('chmod +x my_script.sh') j = Job() j.application = Executable() j.application.exe = File('my_script.sh') j.submit() # Wait for completion j.peek("stdout")
def testInternal(self): from Ganga.GPI import GaudiRun, Job, LocalFile, DiracFile tmp_fol = gettempdir() gaudi_testFol = path.join(tmp_fol, 'GaudiRunTest') shutil.rmtree(gaudi_testFol, ignore_errors=True) makedirs(gaudi_testFol) gaudi_testOpts = path.join(gaudi_testFol, 'testOpts.py') with open(gaudi_testOpts, 'w+') as temp_opt: temp_opt.write("print('hello')") gr = GaudiRun(directory=gaudi_testFol, myOpts=LocalFile(gaudi_testOpts)) assert isinstance(stripProxy(gr).getOptsFile(), stripProxy(LocalFile)) assert stripProxy(gr).getDir() assert open(path.join(stripProxy(gr).getOptsFile().localDir, stripProxy(gr).getOptsFile().namePattern)).read() == "print('hello')" assert stripProxy(gr).getDir() == gaudi_testFol j=Job() j.application = gr assert isinstance(j.application, GaudiRun) df = DiracFile(lfn='/not/some/file') gr.myOpts = df assert gr.myOpts.lfn == df.lfn shutil.rmtree(gaudi_testFol, ignore_errors=True)
def test_a_CreateJob(self): from Ganga.GPI import jobs, Job, Executable, Local jobs.remove() j = Job() j.application = Executable() j.backend = Local()
def test_c_onlyCreate(self): """here for testing job create""" from Ganga.GPI import Job, Executable, ArgSplitter, MassStorageFile j=Job() j.application=Executable(exe='touch') j.splitter=ArgSplitter(args=[['abc.txt'], ['def.txt']]) j.outputfiles=[MassStorageFile(outputfilenameformat = '/test/{sjid}-{fname}', namePattern = '*.txt')]
def test_Savannah13459(self): from Ganga.GPI import Job, config from Ganga.GPI import Job, Executable j = Job() j.application = Executable() j.application.args = ['1', '2', '3'] self.assertEqual(j.application.args, ['1', '2', '3']) j.application.args[0] = '0' self.assertEqual(j.application.args, ['0', '2', '3'])
def test_Savannah8529(self): from Ganga.GPI import Job, TestApplication # make sure that _auto__init__ is called correctly in all cases j1 = Job() j1.application = TestApplication() j2 = Job() j2.application = "TestApplication" j3 = Job(application=TestApplication()) j4 = Job(application="TestApplication") self.assertEqual(j1.application.derived_value, j2.application.derived_value) self.assertEqual(j2.application.derived_value, j3.application.derived_value) self.assertEqual(j3.application.derived_value, j4.application.derived_value) self.assertNotEqual(j1.application.derived_value, None) self.assertNotEqual(j1.application.derived_value.find(j1.application.exe), -1)
def test_c_onlyCreate(self): """here for testing job create""" from Ganga.GPI import Job, Executable, ArgSplitter, MassStorageFile j = Job() j.application = Executable(exe='touch') j.splitter = ArgSplitter(args=[['abc.txt'], ['def.txt']]) j.outputfiles = [ MassStorageFile(outputfilenameformat='/test/{sjid}-{fname}', namePattern='*.txt') ]
def test_a_jobSubmit(self): """here for testing a submit""" from Ganga.GPI import Job, Executable, ArgSplitter, MassStorageFile j=Job() j.application=Executable(exe='touch') j.splitter=ArgSplitter(args=[['abc.txt'], ['def.txt']]) j.outputfiles=[MassStorageFile(outputfilenameformat = '/test/{sjid}-{fname}', namePattern = '*.txt')] j.submit() from GangaTest.Framework.utils import sleep_until_completed sleep_until_completed(j)
def test_Savannah44116(self): from Ganga.GPI import Job, TestApplication, TestSubmitter from GangaTest.Framework.utils import sleep_until_state j = Job() j.application = TestApplication() j.application.postprocess_mark_as_failed = True j.backend = TestSubmitter() j.backend.time = 1 j.submit() self.assertTrue(sleep_until_state(j, 10, 'failed'), 'Job is not marked as failed despite app.postprocess() hook')
def testMergeThatAlwaysFailsOverwrite(self): from Ganga.GPI import Job, Executable, Local, LocalFile j = Job() j.application = Executable(exe='sh', args=['-c', 'echo foo > out.txt']) j.backend = Local() j.outputfiles = [LocalFile('out.txt')] j.splitter = CopySplitter() j.postprocessors = MergerTester(files=['out.txt'], overwrite=True) j.submit() assert run_until_state(j, 'failed', timeout=60) assert os.path.exists(os.path.join(j.outputdir, 'out.txt.merge_summary')), 'Summary file should be created'
def test_Savannah15630(self): from Ganga.GPI import Job, Executable, Local, LocalFile from GangaTest.Framework.utils import sleep_until_completed j = Job() j.application = Executable(exe='touch', args=['out.dat']) j.backend = Local() j.outputfiles = [LocalFile('out.dat')] j.submit() self.assertTrue(sleep_until_completed(j, 60), 'Timeout on job submission: job is still not finished') import os.path p = os.path.join(j.outputdir, j.application.args[0]) self.assertTrue(os.path.exists(p))
def test_Savannah47814(self): from Ganga.GPI import Job, Executable from GangaTest.Framework.utils import sleep_until_state, file_contains j = Job() j.application = Executable(exe='ThisScriptDoesNotExist') j.submit() failed = sleep_until_state(j, 60, state='failed', break_states=['new', 'killed', 'completed', 'unknown', 'removed']) self.assertTrue(failed, 'Job with illegal script should fail. Instead it went into the state %s' % j.status) import os.path f = os.path.join(j.outputdir, '__jobstatus__') self.assertTrue(file_contains(f, 'No such file or directory'), '__jobstatus__ file should contain error')
def testMergeThatAlwaysFailsFlagsSet(self): from Ganga.GPI import Job, Executable, Local, LocalFile j = Job() j.application = Executable(exe="sh", args=["-c", "echo foo > out.txt"]) j.backend = Local() j.outputfiles = [LocalFile("out.txt")] j.splitter = CopySplitter() j.postprocessors = MergerTester(files=["out.txt"], ignorefailed=True, overwrite=True) j.submit() run_until_completed(j, timeout=60) assert j.status == "failed" assert os.path.exists(os.path.join(j.outputdir, "out.txt.merge_summary")), "Summary file should be created"
def test_a_jobSubmit(self): """here for testing a submit""" from Ganga.GPI import Job, Executable, ArgSplitter, MassStorageFile j = Job() j.application = Executable(exe='touch') j.splitter = ArgSplitter(args=[['abc.txt'], ['def.txt']]) j.outputfiles = [ MassStorageFile(outputfilenameformat='/test/{sjid}-{fname}', namePattern='*.txt') ] j.submit() from GangaTest.Framework.utils import sleep_until_completed sleep_until_completed(j)
def testMergeThatAlwaysFailsIgnoreFailed(self): from Ganga.GPI import Job, Executable, Local, LocalFile j = Job() j.application = Executable(exe='sh', args=['-c', 'echo foo > out.txt']) j.backend = Local() j.outputfiles = [LocalFile('out.txt')] j.splitter = CopySplitter() j.postprocessors = MergerTester(files=['out.txt'], ignorefailed=True) j.submit() sleep_until_completed(j, 60) assert j.status == 'failed' assert os.path.exists(os.path.join(j.outputdir, 'out.txt.merge_summary')), 'Summary file should be created'
def test_Savannah44116(self): from Ganga.GPI import Job, TestApplication, TestSubmitter from GangaTest.Framework.utils import sleep_until_state j = Job() j.application = TestApplication() j.application.postprocess_mark_as_failed = True j.backend = TestSubmitter() j.backend.time = 1 j.submit() self.assertTrue( sleep_until_state(j, 10, 'failed'), 'Job is not marked as failed despite app.postprocess() hook')
def test_Savannah15630(self): from Ganga.GPI import Job, Executable, Local, LocalFile from GangaTest.Framework.utils import sleep_until_completed j = Job() j.application = Executable(exe='touch', args=['out.dat']) j.backend = Local() j.outputfiles = [LocalFile('out.dat')] j.submit() self.assertTrue( sleep_until_completed(j, 60), 'Timeout on job submission: job is still not finished') import os.path p = os.path.join(j.outputdir, j.application.args[0]) self.assertTrue(os.path.exists(p))
def testInterfaceLookFeel(self): # Just testing that the job construction works from Ganga.GPI import Job, Im3ShapeApp, Im3ShapeSplitter, DiracFile, LocalFile, GangaDataset, Dirac j=Job() app = Im3ShapeApp( im3_location=DiracFile(lfn='/lsst/y1a1-v2-z/software/2016-02-24/im3shape-grid.tar.gz'), ini_location=LocalFile('/afs/cern.ch/user/r/rcurrie/cmtuser/GANGA/GANGA_LSST/install/ganga/python/params_disc.ini'), blacklist=LocalFile('/afs/cern.ch/user/r/rcurrie/cmtuser/GANGA/GANGA_LSST/install/ganga/python/blacklist-y1.txt') ) j.application = app j.backend=Dirac() mydata=GangaDataset() mydata.append(DiracFile(lfn='/lsst/DES0005+0043-z-meds-y1a1-gamma.fits.fz')) j.inputdata = mydata j.splitter=Im3ShapeSplitter(size=20) j.outputfiles = [DiracFile('*.main.txt'), DiracFile('*.epoch.txt')]
def testInterfaceLookFeel(self): from Ganga.GPI import Job, LSF, Executable, DaVinci j1 = Job(name='my',application='DaVinci') j2 = Job(application = DaVinci()) j1.backend = LSF() j1.backend.queue = '8nm' j2.backend = j1.backend # deepcopy j2.backend.queue = '16nh' # shortcut bk2 = j2.backend # reference assert j2.backend.queue == '16nh' bk2.queue = '100nh' assert j2.backend.queue == '100nh' ap = Executable() j1.application = ap # deepcopy
def testInternal(self): from Ganga.GPI import GaudiExec, Job, LocalFile, DiracFile tmp_fol = gettempdir() gaudi_testFol = path.join(tmp_fol, 'GaudiExecTest') shutil.rmtree(gaudi_testFol, ignore_errors=True) makedirs(gaudi_testFol) gaudi_testOpts = path.join(gaudi_testFol, 'testOpts.py') with open(gaudi_testOpts, 'w+') as temp_opt: temp_opt.write("print('hello')") assert path.exists(gaudi_testOpts) gr = GaudiExec(directory=gaudi_testFol, options=[LocalFile(gaudi_testOpts)]) assert isinstance( stripProxy(gr).getOptsFiles()[0], stripProxy(LocalFile)) reconstructed_path = path.join( stripProxy(gr).getOptsFiles()[0].localDir, stripProxy(gr).getOptsFiles()[0].namePattern) assert reconstructed_path == gaudi_testOpts assert open(reconstructed_path).read() == "print('hello')" j = Job() j.application = gr assert isinstance(j.application, GaudiExec) df = DiracFile(lfn='/not/some/file') gr.options = [df] assert gr.options[0].lfn == df.lfn shutil.rmtree(gaudi_testFol, ignore_errors=True)
def testInterfaceLookFeel(self): """ This test tests the Executable app and that the DaVinci are assignable """ from Ganga.GPI import Job, LSF, Executable, DaVinci j1 = Job(name='my',application='DaVinci') j2 = Job(application = DaVinci()) j1.backend = LSF() j1.backend.queue = '8nm' j2.backend = j1.backend # deepcopy j2.backend.queue = '16nh' # shortcut bk2 = j2.backend # reference assert j2.backend.queue == '16nh' bk2.queue = '100nh' assert j2.backend.queue == '100nh' ap = Executable() j1.application = ap # deepcopy
def testFailure(self): """ Check a simple job fails and raises the correct exception """ from Ganga.GPI import Job, Dirac, Executable import time j = Job(backend = Dirac()) j.application = Executable(exe = 'ech') j.application.args = ['Hello World'] j.submit() assert run_until_state(j, 'failed', 220) filepath = os.path.join(j.outputdir, 'Ganga_Executable.log') i = 0 while not os.path.exists(filepath) and i < 10: i=i+1 time.sleep(5) found = False with open(filepath, 'r') as f: for line in f: if "Exception occured in running process: ['ech', 'Hello World']" in line: found = True assert found
def testFailure(self): """ Check a simple job fails and raises the correct exception """ from Ganga.GPI import Job, Dirac, Executable import time j = Job(backend=Dirac()) j.application = Executable(exe='ech') j.application.args = ['Hello World'] j.submit() assert run_until_state(j, 'failed', 220) filepath = os.path.join(j.outputdir, 'Ganga_Executable.log') i = 0 while not os.path.exists(filepath) and i < 10: i = i + 1 time.sleep(5) found = False with open(filepath, 'r') as f: for line in f: if "Exception occured in running process: ['ech', 'Hello World']" in line: found = True assert found
def testMergeRemoval(self): from Ganga.GPI import Job, Executable, Local, LocalFile, jobs # see Savannah 33710 j = Job() jobID = j.id # job will run for at least 20 seconds j.application = Executable(exe="sh", args=["-c", "sleep 20; echo foo > out.txt"]) j.backend = Local() j.outputfiles = [LocalFile("out.txt")] j.splitter = CopySplitter() j.postprocessors = MergerTester(files=["out.txt"]) j.postprocessors[0].ignorefailed = True j.postprocessors[0].alwaysfail = True j.postprocessors[0].wait = 10 j.submit() run_until_state(j, state="running") j.remove() with pytest.raises(KeyError): jobs(jobID)
def Savannah47814(self): from Ganga.GPI import Job, Executable from GangaTest.Framework.utils import sleep_until_state, file_contains j = Job() j.application = Executable(exe='ThisScriptDoesNotExist') j.submit() failed = sleep_until_state( j, 60, state='failed', break_states=['new', 'killed', 'completed', 'unknown', 'removed']) self.assertTrue( failed, 'Job with illegal script should fail. Instead it went into the state %s' % j.status) import os.path f = os.path.join(j.outputdir, '__jobstatus__') self.assertTrue(file_contains(f, 'No such file or directory'), '__jobstatus__ file should contain error')
def submitpilots(n=1, doTerm=True): """Submit a number of pilotjobs""" if n <= 0: return from Ganga.GPI import Job, Executable, File j = Job() j.application = Executable(exe=File(config['PilotScript']), args=[]) j.name = 'LGIpilot' if not doTerm: j.name = 'LGIpilot@' j.inputsandbox = [File(config['PilotDist'])] j.application.env['LGI_IS_PILOTJOB'] = '1' if doTerm: j.application.env['SCHED_WAIT_TERM'] = str(config['WaitTerm']) if config['MaxRuntime'] is not None: j.application.env['SCHED_TERM_AFTER'] = str(config['MaxRuntime']) j.submit() for i in range(1, n-1): j = j.copy() j.submit() # returns last job return j
def testInternal(self): from Ganga.GPI import GaudiExec, Job, LocalFile, DiracFile tmp_fol = gettempdir() gaudi_testFol = path.join(tmp_fol, 'GaudiExecTest') shutil.rmtree(gaudi_testFol, ignore_errors=True) makedirs(gaudi_testFol) gaudi_testOpts = path.join(gaudi_testFol, 'testOpts.py') with open(gaudi_testOpts, 'w+') as temp_opt: temp_opt.write("print('hello')") assert path.exists(gaudi_testOpts) gr = GaudiExec(directory=gaudi_testFol, options=[LocalFile(gaudi_testOpts)]) assert isinstance(stripProxy(gr).getOptsFiles()[0], stripProxy(LocalFile)) reconstructed_path = path.join(stripProxy(gr).getOptsFiles()[0].localDir, stripProxy(gr).getOptsFiles()[0].namePattern) assert reconstructed_path == gaudi_testOpts assert open(reconstructed_path).read() == "print('hello')" j=Job() j.application = gr assert isinstance(j.application, GaudiExec) df = DiracFile(lfn='/not/some/file') gr.options = [df] assert gr.options[0].lfn == df.lfn shutil.rmtree(gaudi_testFol, ignore_errors=True)
def testMergeRemoval(self): from Ganga.GPI import Job, Executable, Local, LocalFile, jobs # see Savannah 33710 j = Job() jobID = j.id # job will run for at least 20 seconds j.application = Executable(exe='sh', args=['-c', 'sleep 20; echo foo > out.txt']) j.backend = Local() j.outputfiles = [LocalFile('out.txt')] j.splitter = CopySplitter() j.postprocessors = MergerTester(files=['out.txt']) j.postprocessors[0].ignorefailed = True j.postprocessors[0].alwaysfail = True j.postprocessors[0].wait = 10 j.submit() run_until_state(j, state='running') j.remove() with pytest.raises(KeyError): jobs(jobID)
f.write(('from helpers import davinci\n\n' "davinci.configure(year={1}, mc=False, input_type='{0}'," "n_events={2}, root={3}, tfn='{4}')\n").format( input_type, year, 10000 if args.test else -1, ROOT, tfn)) # Make the file for setting the access functions for ntuple settings with open('{0}/configs.py'.format(basedir), 'w+') as f: f.write(configs_file.format(modes=MODES, turbo=not args.twotag, mc='False')) j = Job(name=JNAME.format(polarity, year)) # j.application = make_exec_app(version='v41r2p1') # j.application.options = [path.format(basedir) for path in OPTIONS] # Old submission method j.application = DaVinci(version='v41r2p1') # j.application = DaVinci() j.application.optsfile = [path.format(basedir) for path in OPTIONS] # If testing, run over a couple of files locally, saving # the results to the sandbox. # Else, run over everything on the grid, splitting jobs into groups of 10 # files, notifying me on job completion/subjob failure, # and save the results on the grid storage if args.test: j.inputdata = dataset[0:1] j.backend = Local() # Prepend test string to job name j.name = 'TEST_{0}'.format(j.name) j.outputfiles = [LocalFile(tfn)] else:
'from helpers import davinci\n\n' "davinci.configure(year={1}, mc=True, input_type='{0}'," "n_events={2}, root={3}, tfn='{4}')\n" ).format(input_type, year, 10000 if args.test else -1, ROOT, tfn)) print('Created all helper files for the options.') print('Options files:' + ' '.join([s.format(path=base, year=year) for s in OPTIONS])) # NOQA j = Job(name=JNAME.format( polarity, year, mode )) j.comment = ( '{1} {2} MC {0} ntuple creation for k3pi mixing measurement.' .format(event_type, year, polarity) ) j.application = DaVinci(version='v41r3') j.application.optsfile = [s.format(path=base, year=year) for s in OPTIONS] if args.test: # If testing, run over a couple of files locally, # saving the results to the sandbox j.inputdata = dataset[0:1] j.backend = Local() # Prepend test string to job name j.name = 'TEST_{0}'.format(j.name) j.outputfiles = [LocalFile(tfn)] else: # If not testing, run over everything on the grid, splitting jobs # into groups of 10 files, notifying me on job completion/subjob failure, # and save the results on the grid storage j.inputdata = dataset
def makeIPResolutionsJob( jobName, dataFile, brunelVersion="", dataType = '2012', extraFiles = [], ddDBTag = None, condDBTag = None ) : """Call this method to make a job that will run Brunel with the IP resolutions ntupling algorithm, using the given config file (data type, etc) and data file. Add this method to your ~/.ganga.py file to have it automatically loaded whenever you run ganga.""" print "Creating an IP resolutions monitoring job named \'%s\'" % jobName dataFile = os.path.expandvars(dataFile) if not os.path.exists(dataFile) : print "Could not find the data file \"%s\"!" % dataFile return None dataFile = os.path.abspath(dataFile) print "Using data file \'%s\'" % dataFile print "Parsing data file for options." stepInfos = parseDataFileForSteps(dataFile) productionOptsFiles = None if len(stepInfos) > 0 : for step in stepInfos : # Get any additional options files used for the Brunel step. if step['ApplicationName'] == "Brunel" : productionOptsFiles = [] for optsFile in step['OptionFiles'].split(";") : productionOptsFiles.append(optsFile.replace(" \n","")) # Get the DB tags used for the Gauss step. This should be the # same as for the Brunel step but it seems there's a bug in bkk. if step['ApplicationName'] == "Gauss" or step['ApplicationName'] == 'DaVinci' or step['ApplicationName'] == 'Brunel' : if ddDBTag == None : ddDBTag = step['DDDB'] if condDBTag == None : condDBTag = step['CONDDB'] if None == ddDBTag and len(extraFiles) == 0 : print "The DB tags could not be retrieved from the data file and no extra options\ files have been specified! The job cannot be made." return None j = Job( name = jobName ) if brunelVersion == "" : j.application = Brunel() else : j.application = Brunel(version=brunelVersion) j.application.extraopts = 'Brunel().OutputLevel = 5\nBrunel().PrintFreq = 10000\nBrunel().DataType = "{0}"\n'.format(dataType) if None != ddDBTag : print "Using DDDBTag \"%s\" and CondDBTag \"%s\"" % (ddDBTag, condDBTag) j.application.extraopts += "\nBrunel().DDDBtag = \"%s\"\nBrunel().CondDBtag = \"%s\"\n" % (ddDBTag, condDBTag) if 'sim' in ddDBTag.lower() : j.application.extraopts += '\nBrunel().Simulation = True\n' #j.application.extraopts += '\nBrunel().WithMC = True\n' else : print "The DB tags could not be retrieved from the data file." print "If they are not defined in one of the extra options files default values will be used." j.application.optsfile = [ mainIPConfigFile ] + extraFiles print "Using options files:" for optsFile in j.application.optsfile : print optsFile.name if None != productionOptsFiles and len(productionOptsFiles) > 0 : for optsFile in productionOptsFiles : print optsFile j.application.extraopts += "\nimportOptions(\"%s\")\n" % optsFile print "Reading in data ..." j.inputdata = j.application.readInputData( dataFile ) print "Data read. %s files found." % len(j.inputdata.files) if len(j.inputdata.files) > 0 : j.application.extraopts += '\nBrunel().InputType = "{0}"\n'.format(j.inputdata.files[0].name.split('.')[-1].upper()) j.splitter = SplitByFiles( filesPerJob = 10 ) j.backend = Dirac() j.outputfiles = [DiracFile('*.root')] return j