def defGridJob(jobName, jobGroup, inputFile): '''Defines Grid job''' from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob job = UserJob() job.setName(jobName) job.setJobGroup(jobGroup) job.setCPUTime(86400) job.dontPromptMe() job.setBannedSites([ 'LCG.IN2P3-CC.fr', 'OSG.UConn.us', 'LCG.Cracow.pl', 'OSG.MIT.us', 'LCG.Glasgow.uk', 'OSG.CIT.us', 'OSG.BNL.us', 'LCG.Brunel.uk' ]) job.setInputData(inputFile) job.setInputSandbox([ 'LFN:/ilc/user/l/lstroem/topasymmetry/lib_ilcsoft_2017-05-30_gcc62.tar.gz' ]) #only need LFN: for InputSandbox job.setOutputSandbox(["*.out", "*.log"]) outputFile = jobName + ".slcio" rootFile = jobName + ".root" #job.setOutputData([outputFile, rootFile], jobGroup, "CERN-DST-EOS") #keep some files on the GRID for further processing job.setOutputData([rootFile], jobGroup, "CERN-DST-EOS") return job, outputFile, rootFile
def subDDSim(): # Decide parameters for a job outputSE = "KEK-SRM" outputSE = "KEK-DISK" isLocal = _clip.isLocal nbevts = 10 if _clip.numberOfEvents == 0 else _clip.numberOfEvents outputFile = "ddsim_example.slcio" if _clip.outputFile == "" else _clip.outputFile outputDir = _clip.outputDir inputFile = _clip.inputFile if inputFile == "": gLogger.error("Input file for ddsim does not given.") exit(-1) # Create DIRAC objects for job submission dIlc = DiracILC() job = UserJob() job.setJobGroup("myddsimjob") job.setName("myddsim") job.setOutputSandbox(['*.log', '*.sh', '*.py', '*.xml']) job.setILDConfig("v02-00-02") # job.setInputSandbox(["a6-parameters.sin", "P2f_qqbar.sin"]) # job.setDestination(["LCG.KEK.jp", "LCG.DESY-HH.de"]) # job submission destination job.setBannedSites(["LCG.UKI-SOUTHGRID-RALPP.uk" ]) # a list of sites not to submit job # job.setCPUTime( cputime_limit_in_seconds_by_dirac_units ) ddsim = DDSim() ddsim.setVersion("ILCSoft-02-00-02_gcc49") ddsim.setDetectorModel("ILD_l5_v02") ddsim.setInputFile(inputFile) ddsim.setNumberOfEvents(nbevts) extraCLIArguments = " --steeringFile ddsim_steer.py " extraCLIArguments += " --outputFile %s " % outputFile extraCLIArguments += " --vertexSigma 0.0 0.0 0.1968 0.0 --vertexOffset 0.0 0.0 0.0 0.0 " ddsim.setExtraCLIArguments(extraCLIArguments) # ddsim.setRandomSeed(1234565) # ddsim.setStartFrom(20) # Number of events to skip before starting ddsim job.append(ddsim) if outputDir != "": job.setOutputData([outputFile], OutputPath=outputDir, OutputSE=outputSE) if isLocal: job.submit(dIlc, mode="local") else: job.submit(dIlc)
def create_job(inputData, saveName, outputDir, dontPromptMe): slcioFile = saveName + '.slcio' rootFile = saveName + '.root' if check_file_existence(outputDir, slcioFile, dontPromptMe): remove_file(outputDir, slcioFile, dontPromptMe) if check_file_existence(outputDir, rootFile, dontPromptMe): remove_file(outputDir, rootFile, dontPromptMe) dIlc = DiracILC() job = UserJob() job.setOutputSandbox(['*.out', '*.log', '*.sh', '*.py', '*.xml']) if SAVE_SLCIO: job.setOutputData([slcioFile, rootFile], OutputPath=outputDir, OutputSE=STORAGE_SE) else: job.setOutputData(rootFile, OutputPath=outputDir, OutputSE=STORAGE_SE) job.setJobGroup('myMarlinRun1') job.setName('MyMarlinJob1') # job.setBannedSites(['LCG.IN2P3-CC.fr','OSG.UConn.us','LCG.Cracow.pl','OSG.MIT.us','LCG.Glasgow.uk','OSG.CIT.us','OSG.BNL.us','LCG.Brunel.uk']) job.setInputSandbox(['LFN:/ilc/user/a/amaier/mylibs.tar.gz']) marl = Marlin() marl.setVersion('ILCSoft-2017-11-15_gcc62') marl.setInputFile(inputData) job.setInputData(list(map(lambda x: x.lstrip('LFN:'), inputData))) # marl.setInputFile(['LFN:/ilc/prod/clic/1.4tev/qq_ln/ILD/DST/00003249/010/qq_ln_dst_3249_10000.slcio']) marl.setSteeringFile('marlin/FullChain.xml') # marl.setSteeringFile('marlin/FullChainNewDetModel.xml') marl.setOutputFile(slcioFile) gearFile = '/afs/cern.ch/user/a/amaier/projects/CLIC_analysis/grid/marlin/clic_ild_cdr.gear' if not os.path.isfile(gearFile): print('Error: gear file', gearFile, 'does not exist! Abort submission.') return marl.setGearFile(gearFile) marl.setExtraCLIArguments( "--MyNtupleMaker.OutputFileName={rootOutfile}".format( rootOutfile=rootFile)) # marl.setNumberOfEvents(1000) job.append(marl) if dontPromptMe: job.dontPromptMe() job.submit(dIlc) return False
def getJob(jobid, jobpara): iser=jobid+1 outdst = "toto-ovl-%5.5i.dst.slcio"%iser outrec = "toto-ovl-%5.5i.rec.slcio"%iser ###In case one wants a loop: comment the folowing. #for i in range(2): j = UserJob() j.setJobGroup("Tutorial") j.setName("MarlinOverlayParametric%i"%iser) j.setInputSandbox(jobpara["setting_file"]) ## Define the overlay ov = OverlayInput() ov.setMachine("ilc_dbd") ov.setEnergy(energy) ov.setNumberOfSignalEventsPerJob(int(jobpara["n_events_per_job"])) ov.setBXOverlay(int(jobpara["BXOverlay"])) ov.setGGToHadInt(float(jobpara["GGToHadInt500"])) ov.setBkgEvtType("aa_lowpt") # ov.setBackgroundType("aa_lowpt") ov.setDetectorModel("ILD_o1_v05") res = j.append(ov) if not res['OK']: print res['Message'] exit(1) ## Define Marlin job ma = Marlin() ma.setDebug() ma.setVersion("ILCSoft-01-17-09") ma.setSteeringFile("marlin_ovl_stdreco.xml") ma.setGearFile("GearOutput.xml") # ma.setInputFile(simfile) ma.setInputFile(simlists[jobid]) ma.setOutputDstFile(outdst) ma.setOutputRecFile(outrec) res = j.append(ma) if not res['OK']: print res['Message'] exit(1) j.setOutputData([outdst,outrec],"myprod2/test","PNNL-SRM") j.setOutputSandbox(["*.log","*.xml","*.sh","TaggingEfficiency.root","PfoAnalysis.root"]) j.setCPUTime(10000) j.dontPromptMe() return j
def subWhizard2(): from ILCDIRAC.Interfaces.API.DiracILC import DiracILC from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob from ILCDIRAC.Interfaces.API.NewInterface.Applications import Marlin, Whizard2 # Decide parameters for a job outputSE = "KEK-SRM" isLocal = _clip.isLocal nbevts = 50 if _clip.numberOfEvents == 0 else _clip.numberOfEvents outputFile = "E500.P2f_bB.GWhizard2.I100000.e0.p0.n001.slcio" if _clip.outputFile == "" else _clip.outputFile outputDir = _clip.outputDir # Create DIRAC objects for job submission dIlc = DiracILC() job = UserJob() job.setInputSandbox(["pythia6-parameters.sin", "P2f_qqbar.sin"]) job.setOutputSandbox(['*.log', '*.sh', '*.py', '*.xml']) job.setOutputData([outputFile]) job.setJobGroup("mywhiz2") job.setName("mywhizard2") whiz = Whizard2() whiz.setVersion("2.7.0") whiz.setNumberOfEvents(nbevts) whiz.setEvtType("P2f_bB") whiz.setProcessVariables("P2f_bB") # whiz.setRandomSeed(15) whiz.setSinFile("P2f_qqbar.sin") whiz.setOutputFile(outputFile) job.append(whiz) if outputDir != "": print " outputDir = " + outputDir print " outputSE = " + outputSE job.setOutputData([outputFile], OutputPath=outputDir, OutputSE=outputSE) if isLocal: job.submit(dIlc, mode="local") else: job.submit(dIlc)
from ILCDIRAC.Interfaces.API.NewInterface.UserJob import UserJob from ILCDIRAC.Interfaces.API.NewInterface.Applications import Marlin from ILCDIRAC.Interfaces.API.DiracILC import DiracILC d= DiracILC(True,"repo.rep") ###In case one wants a loop: comment the folowing. #for i in range(2): j = UserJob() j.setJobGroup("Tutorial") j.setName("example")#%i) ma = Marlin() ma.setVersion("v0111Prod") ma.setSteeringFile("clic_ild_cdr_steering.xml") ma.setGearFile("clic_ild_cdr.gear") ma.setInputFile("LFN:/ilc/prod/clic/3tev/gghad/ILD/SIM/00000187/000/gghad_sim_187_97.slcio") outdst = "toto.dst.slcio" #% i outrec = "toto.rec.slcio" #% i ma.setOutputDstFile(outdst) ma.setOutputRecFile(outrec) res = j.append(ma) if not res['OK']: print res['Message'] exit(1) j.setOutputData([outdst,outrec],"some/path","KEK-SRM") j.setOutputSandbox("*.log") j.dontPromptMe() j.submit(d)
###In case one wants a loop: comment the folowing. #for i in range(2): j = UserJob() j.setJobGroup("Tutorial") j.setName("MarlinExample")#%i) ma = Marlin() ma.setDebug() # ma.setLogLevel("verbose") # ma.setILDConfig("v01-16-p05_500") ma.setVersion("v01-16-02") ma.setSteeringFile("marlin_stdreco.xml") ma.setGearFile(gearfile) ma.setInputFile([simfile, pandoraLikelihoodData, bg_aver]) ma.setOutputDstFile(outdst) ma.setOutputRecFile(outrec) res = j.append(ma) if not res['OK']: print res['Message'] exit(1) j.setOutputData(["myprod2/test/dst/"+outdst,"myprod2/test/rec/"+outrec],OutputSE="PNNL-SRM") j.setOutputSandbox(["*.log","*.xml","*.sh"]) j.dontPromptMe() j.submit(d)
open(nameSteeringMarlin,"w").write(f.read().replace(templateOutRoot,rootFile)) # ##################################################################### ##################################################################### #job definition job = UserJob() #use UserJob unless recommended differently job.setName(nameJob) job.setJobGroup(nameJobGroup) job.setCPUTime(86400) job.setBannedSites(['LCG.UKI-LT2-IC-HEP.uk','LCG.KEK.jp','LCG.IN2P3-CC.fr','LCG.Tau.il','Weizmann.il','LCG.Weizmann.il','OSG.MIT.us','OSG.FNAL_FERMIGRID.us','OSG.GridUNESP_CENTRAL.br','OSG.SPRACE.br']) job.setInputSandbox([nameSteeringMarlin,'LFN:/ilc/user/o/oviazlo/FCCee_o5/ilcsoft_2017-06-21/lcgeo_28_06_2017_v3.tgz',detectorModel,'LFN:/ilc/user/o/oviazlo/PandoraSettings.tar.gz','LFN:/ilc/user/o/oviazlo/FCCee_o5/marlin_lib_simHits_v3.tgz']) job.setOutputSandbox(["*.log"]) #files that should be brought back when retrieving the job outputs job.setOutputData([rootFile],nameDir,"CERN-DST-EOS") ##################################################################### ##################################################################### #ddsim ddsim = DDSim() ddsim.setVersion(ddsimVersion) ddsim.setDetectorModel(detectorModel) ddsim.setOutputFile(outputFile) ddsim.setSteeringFile("/afs/cern.ch/work/v/viazlo/gridSubmission/FCCee_o5/files/clic_steer.py") ddsim.setNumberOfEvents(nEvts) res = job.append(ddsim)
"LFN:/ilc/user/k/kacarevic/hgamgam/PandoraPFA/clic_ild_cdr.gear", "LFN:/ilc/user/k/kacarevic/hgamgam/PandoraPFA/MarlinRecoRootFiles/lib.tar.gz" ]) job.setInputData( "LFN:/ilc/user/k/kacarevic/hgamgam/Marlin/newPandora/aa/aa_%d.slcio" % i) job.setOutputSandbox( ["*.log", "*.sh", "*.py", "*.out", "*.xml", "*.steer "]) job.setJobGroup("myRoot") job.setName("root_aa_%d" % i) marl = Marlin() marl.setVersion('ILCSoft-2016-09-27_gcc48') marl.setInputFile([ "LFN:/ilc/user/k/kacarevic/hgamgam/Marlin/newPandora/aa/aa_%d.slcio" % i ]) # marl.setNumberOfEvents(10) marl.setSteeringFile("steering.xml") marl.setGearFile("clic_ild_cdr.gear") marl.setExtraCLIArguments("--MyProcessor.RootFilename=%s" % lcoutput) res = job.append(marl) if not res['OK']: print res['Message'] quit() #do something, like quit job.setOutputData(lcoutput, "hgamgam/Marlin/rootFiles/aa/workingVersion", "CERN-SRM") print lcoutput job.dontPromptMe() print job.submit(dIlc) time.sleep(0.5)
def all_jobs(name): d = DiracILC(True, "repo.rep") ################################################ j = UserJob() j.setJobGroup("PM1") j.setName("Exec1") banned_sites = [ "OSG.BNL.us", "LCG.UKI-NORTHGRID-LIV-HEP.uk", "OSG.UCSDT2.us", "LCG.SCOTGRIDDURHAM.uk", "LCG.NIKHEF.nl", "LCG.UKI-SOUTHGRID-RALPP.uk", "LCG.GRIF.fr", "LCG.Manchester.uk", "LCG.UKI-LT2-IC-HEP.uk", "LCG.Weizmann.il" ] j.setBannedSites(banned_sites) caindir = name #print('Cain directory is ',caindir) indata = [ 'LFN:/ilc/user/a/amustahid/cain.exe', str(caindir), 'LFN:/ilc/user/a/amustahid/runcain.sh', 'LFN:/ilc/user/a/amustahid/convert_pairs_lcio.py', 'LFN:/ilc/user/a/amustahid/pyLCIO.tar.gz', '/home/belle2/mustahid/useful/my.sh', './splitInput.py', './subddsim.py', './ddsim_steer_July26.py', './ILD_l5_v05.xml', './my2.sh', './dbd_500GeV.nung_1.xml', 'LFN:/ilc/user/a/amustahid/myProcessors.tar.gz', './create_dir.py', './conf.py', './util.py', './testcain.sh', './beam_250.i' ] j.setInputSandbox(indata) ################################################ #app = GenericApplication() #app.setScript("create_dir.py") #app.setInputFile("testcain.sh") #logf = 'create_dir.log' #app.setLogFile(logf) #app.setDebug(debug=True) #create_dirname = 'create_dir' #app.setName(create_dirname) #res=j.append(app) #if not res['OK']: # print res['Message'] # exit(1) ################################################ appre = GenericApplication() name = name.split('/') #print(name) cain_name = name[-1] subdir = name[-2] dirname = name[-3] #print('Cain file name ', cain_name) appre.setScript("LFN:/ilc/user/a/amustahid/runcain.sh") #appre.setScript("testcain.sh") ifile = cain_name.split('.') ifile = ifile[0] + '.' + ifile[1] + '.' + ifile[2] #print('ifile ',ifile) appre.setArguments(ifile) #direc = 'LFN:/ilc/user/a/amustahid/' #appre.setInputFile(ifile+".i") #appre.setArguments("This is input arguments") logf = ifile + '_' + subdir + '.log' appre.setLogFile(logf) appre.setDebug(debug=True) name = 'CAIN' appre.setName(name) res = j.append(appre) if not res['OK']: print res['Message'] exit(1) ################################################ ################################################ #appost = GenericApplication() #appost.setScript("myanal.sh") #appost.setArguments("This is my analysis step") #res=j.append(appost) #if not res['OK']: # print res['Message'] # exit(1) ap = GenericApplication() ap.setScript('my.sh') logf = 'my.log' ap.setLogFile(logf) ap.setDebug(debug=True) name = 'my' ap.setName(name) res = j.append(ap) if not res['OK']: print res['Message'] exit(1) outfile = 'incoherent_pair.dat' appre.setOutputFile(outfile) ################################################ direc = 'incoherent_pair' inputFile = direc + '/' + 'inco_pair_split.slcio' # global variables to hold command line parameters # ###################################### base = '.' #outdir=base+'/'+dirname+'/slcio_test_2ndrun' outdir = base + '/' + dirname + '/Run_7' #print('outdir'+' '+str(outdir)) geant_name = ifile outputFile = geant_name + '_' + subdir + '.slcio' #_clip = _Params(False,1,inputFile,outputFile,outdir) nbevents = 100 clip = _Params(nbevents, inputFile, outputFile, outdir) ddsim = subDDSim(clip) ################################################ res = j.append(ddsim) if not res['OK']: print res['Message'] exit(1) j.setOutputData(outputFile, outdir, "KEK-SRM") j.setOutputSandbox(["*.log", "*.dat", "*.slcio"]) j.dontPromptMe() res = j.submit(d) #res = j.submit(d, mode='local') if res['OK']: print str(res["Value"]) #print "Dirac job, "+str(res["Value"])+", was submitted." else: print "Failed to submit Dirac job. return message was as follows." pprint.pprint(res)
outpath="MyProd_" + ILDConfigVer + "/E250-TDR_ws/" + chann + "/" +ireq+ "/mrg" jobname="m" + idin + "_" + str(mrgix1) print jobname job = UserJob() job.setName(jobname) job.setJobGroup(jobGrName) job.setILDConfig(ILDConfigVer) job.setCPUTime(6400) job.setInputSandbox(["runDSTmerge_Tmp.py"]) job.setOutputSandbox( ["*.log","*.sh","*.py"] ) job.setInputData(mergeList) #job.setInputData(mergeList[0]) #job.setOutputData( lcoutputMRG, outpath, "CERN-SRM" ) job.setOutputData( lcoutputMRG, outpath, "IN2P3-SRM" ) job.dontPromptMe() slcioconcat = SLCIOConcatenate() slcioconcat.setInputFile(mergeList) #slcioconcat.setInputFile(mergeList[0]) slcioconcat.setNumberOfEvents(evtsPerMrg) slcioconcat.setOutputFile( lcoutputMRG, outpath) slcioconcat.setLogFile("merge.log") mergejob = job.append(slcioconcat) if not mergejob['OK']: print mergejob['Not ok appending slcioconcat to job'] quit() print job.submit(dirac)
def main(): # Take the input arguments from the argument parser, and check they exist... args = parse_args() if not args: print 'Invalid Arguments' sys.exit(1) print args.chain[0] # softVersions = ["v3r0p3", "3.0-SNAPSHOT", "ILC_DBD", "0116"] softVersions = ["v3r0p3", "HEAD", "ILC_DBD", "0116"] # Working (recommended) # softVersions = ["v3r0p3", "2.5", "ILC_DBD", "0116"] # Working # softVersions = ["v3r0p3", "HEAD", "ILC_DBD", "ILCSoft-01-17-07"] # Working # softVersions = ["v3r0p3", "HEAD", "ILCSoft-01-17-08", "0116"] check_events_arguments(args.events, args.split) detector = args.detector alias_properties(detector) outputPath, outputBase, repoName = input_output(args.Input, detector, args.chain, args.digiSteering) inputSandbox, outputSandbox = setup_sandboxes(args.macFile) dirac = DiracILC(True, repoName) # Prepares values for the job loop... if args.split < 0: nInputEvents = int(args.events) nOutputEvents = int(args.events) if args.split > 0: nInputEvents = int(args.events) nOutputEvents = int(args.split) # Loop that runs through the required number of jobs to be executed... for startEvent in range(0, nInputEvents, nOutputEvents): ################## Job Initialise ######################################## job = UserJob() job.setName(outputBase) job.setJobGroup('JobGroup') job.setInputSandbox(inputSandbox) fileNumber = startEvent/nOutputEvents print "Job ---> ", fileNumber ################## SLIC ################################################## if 1 in args.chain: slic = SLIC() slic.setVersion(softVersions[0]) slic.setSteeringFile(args.macFile) # slic.setInputFile(lfn) slic.setOutputFile(outputBase.replace('.slcio', '_' + str(fileNumber) + '_sim.slcio')) slic.setDetectorModel(detector) slic.setNumberOfEvents(nOutputEvents) slic.setStartFrom(startEvent) #print slic.listAttributes() result = job.append(slic) if not result['OK']: print result['Message'] sys.exit(2) ################## Overlay ############################################### if 2 in args.chain: ''' #Add the gghad background overlay. gghad = OverlayInput() #gghad.setProdID(1767) gghad.setEnergy(500.0) gghad.setBXOverlay('args.bunches') gghad.setGGToHadInt( 4.1 ) gghad.setNbSigEvtsPerJob(nOutputEvents) gghad.setMachine('ilc_dbd') gghad.setDetectorModel('sidloi3') gghad.setBkgEvtType('aa_lowpt') result = job.append( gghad ) if not result['OK']: print result['Message'] sys.exit(2) #Add the pair background overlay. pairs = OverlayInput() pairs.setProdID(2) pairs.setEnergy(500.0) pairs.setBXOverlay('args.bunches') pairs.setGGToHadInt(1.) pairs.setNbSigEvtsPerJob(nOutputEvents) pairs.setMachine('ilc_dbd') pairs.setDetectorModel('sidloi3') pairs.setBkgEvtType('eepairs') result = job.append( pairs ) if not result['OK']: print result['Message'] sys.exit(2) ''' gghad = OverlayInput() gghad.setPathToFiles('/ilc/user/j/jstrube/gghadron_lowpt/sidloi3/') gghad.setBXOverlay(int(args.bunches)) gghad.setGGToHadInt( 4.1 ) gghad.setNbSigEvtsPerJob(nOutputEvents) gghad.setBkgEvtType('aa_lowpt') result = job.append( gghad ) if not result['OK']: print result['Message'] sys.exit(2) ''' pairs = OverlayInput() pairs.setPathToFiles('/ilc/user/j/jstrube/GuineaPig/sidloi3/') pairs.setBXOverlay(int(args.bunches)) pairs.setGGToHadInt(1.) pairs.setBkgEvtType('eepairs') pairs.setNbSigEvtsPerJob(nOutputEvents) result = job.append( pairs ) if not result['OK']: print result['Message'] sys.exit(2) ''' ################## lcsim (digitization and tracking) ##################### if 3 in args.chain: lcsim = LCSIM() lcsim.setVersion(softVersions[1]) lcsim.setSteeringFile(args.digiSteering) # Another version is included in /steeringFiles if 1 in args.chain: lcsim.getInputFromApp(slic) lcsim.setTrackingStrategy('steeringFiles/sidloi3_trackingStrategies_default.xml') lcsim.setAliasProperties('steeringFiles/alias.properties') lcsim.setDetectorModel(detector+".zip") #lcsim.setOutputFile(outputBase.replace('.slcio', '_' + str(fileNumber) + '_digiTracking.slcio')) lcsim.setOutputDstFile(outputBase.replace('.slcio', '_' + str(fileNumber) + '_DST.slcio')) #NEED TO CHANGE!!! lcsim.setNumberOfEvents(nOutputEvents) #print lcsim.listAttributes() result = job.append(lcsim) if not result['OK']: print result['Message'] sys.exit(2) ################## slicPandora ########################################### if 4 in args.chain: slicPandora = SLICPandora() slicPandora.setVersion(softVersions[2]) slicPandora.setDetectorModel(detector) slicPandora.getInputFromApp(lcsim) slicPandora.setOutputFile(outputBase.replace('.slcio', '_' + str(fileNumber) + '_pandora.slcio')) slicPandora.setPandoraSettings('pandoraSettings.xml') slicPandora.setNumberOfEvents(nOutputEvents) #print slicPandora.listAttributes() result = job.append(slicPandora) if not result['OK']: print result['Message'] sys.exit(2) ################## Marlin, LCFIPlus Vertexing ############################ if 5 in args.chain: vertexing = Marlin() vertexing.setVersion(softVersions[3]) vertexing.setSteeringFile('steeringFiles/sid_dbd_vertexing.xml') vertexing.setGearFile('steeringFiles/' + detector + '.gear') vertexing.getInputFromApp(slicPandora) vertexing.setOutputFile(outputBase.replace('.slcio', '_' + str(fileNumber) + '_vertexing.slcio')) vertexing.setNumberOfEvents(nOutputEvents) #print vertexing.listAttributes() result = job.append(vertexing) if not result['OK']: print result['Message'] sys.exit(2) ################## lcsim (DST production) ################################ lcsimDst = LCSIM() lcsimDst.setVersion(softVersions[1]) lcsimDst.setSteeringFile('steeringFiles/sid_dbd_postPandora.xml') lcsimDst.getInputFromApp(vertexing) lcsimDst.setNumberOfEvents(nOutputEvents) lcsimDst.setAliasProperties('steeringFiles/alias.properties') lcsimDst.setDetectorModel(detector+".zip") lcsimDst.setOutputRecFile(outputBase.replace('.slcio', '_' + str(fileNumber) + '_Rec.slcio')) lcsimDst.setOutputDstFile(outputBase.replace('.slcio', '_' + str(fileNumber) + '_DST.slcio')) #print lcsimDst.listAttributes() result = job.append(lcsimDst) if not result['OK']: print result['Message'] sys.exit(2) ################## Marlin, LCFIPlus flavortag ############################ if 6 in args.chain: flavortag = Marlin() flavortag.setVersion(softVersions[3]) flavortag.setSteeringFile('steeringFiles/sid_dbd_flavortag.xml') flavortag.setGearFile('steeringFiles/' + detector + '.gear') flavortag.setInputFile(lcsimDstOutput) flavortag.setOutputFile(outputBase.replace('.slcio', '_' + '_flavortag.slcio')) flavortag.setNumberOfEvents(nOutputEvents) #print flavortag.listAttributes() result = job.append(flavortag) if not result['OK']: print result['Message'] sys.exit(2) ################## Job Finalise ########################################## # List of banned sites that the job shall not be sent too. These are sites that jobs tend to fail on, # This list is likely to change. job.setBannedSites(['LCG.IN2P3-CC.fr', 'LCG.RAL-LCG2.uk', 'LCG.DESY-HH.de', 'LCG.DESYZN.de', 'LCG.KEK.jp', 'OSG.PNNL.us','OSG.UConn.us','OSG.GridUNESP_CENTRAL.br','LCG.SCOTGRIDDURHAM.uk', 'LCG.TECHNIONself.il','LCG.UKI-SOUTHGRID-RALPP.uk','OSG.FNAL_FERMIGRID.us','LCG.UKI-LT2-IC-HEP.uk']) job.setCPUTime(50000) job.setPlatform('x86_64-slc5-gcc43-opt') # Sets the output data file according to if -f is selcted, ships ouput to your /ilc/user/a/aPerson/ # directory on the grid. outputLevel = max(args.chain) if outputLevel == 1: job.setOutputData(outputBase.replace('.slcio', '_' + str(fileNumber) + '_sim.slcio'), outputPath, 'CERN-SRM') if outputLevel == 3: #job.setOutputData(outputBase.replace('.slcio', '_' + str(fileNumber) + '_digiTracking.slcio'), outputPath, 'CERN-SRM') job.setOutputData(outputBase.replace('.slcio', '_' + str(fileNumber) + '_DST.slcio'), outputPath, 'CERN-SRM') if outputLevel == 4: job.setOutputData(outputBase.replace('.slcio', '_' + str(fileNumber) + '_pandora.slcio'), outputPath, 'CERN-SRM') if outputLevel == 5: job.setOutputData(outputBase.replace('.slcio', '_' + str(fileNumber) + '_DST.slcio'), outputPath, 'CERN-SRM') if outputLevel == 6: job.setOutputData(outputBase.replace('.slcio', '_' + str(fileNumber) + '_flavortag.slcio'), outputPath, 'CERN-SRM') job.setOutputSandbox(outputSandbox) job.setInputData(args.Input) if args.dontPromptMe: job.dontPromptMe() # Submits Job!!! job.submit() return 0;
RECoutput.append(lcoutputREC) job = UserJob() job.setName(jobname) job.setJobGroup(jobGrName) job.setILDConfig(ILDConfigVer) job.setCPUTime(86400) job.setInputData([lcinputREC]) job.setInputSandbox(["runRecoSplit_all_Tmp.py"]) job.setOutputSandbox(["*.log","*.sh","MarlinStdRecoParsed.xml","marlin*.xml","*.py "]) #job.setOutputSandbox(["*.log","*.sh","MarlinStdRecoParsed.xml","marlin*.xml","*.py ","*.root"]) #job.setDestinationCE('lyogrid07.in2p3.fr') job.dontPromptMe() job.setBannedSites(['LCG.QMUL.uk']) #job.setBannedSites(['LCG.IN2P3-CC.fr','LCG.DESYZN.de','LCG.DESY-HH.de','LCG.KEK.jp','OSG.UConn.us','LCG.Cracow.pl','OSG.MIT.us','LCG.Glasgow.uk','OSG.CIT.us','OSG.BNL.us','LCG.Brunel.uk','LCG.RAL-LCG2.uk','LCG.Oxford.uk','OSG.UCSDT2.us']) # run Malrin reco jobs mares = job.append(ma) if not mares['OK']: print mares['Not ok appending Marlin to job'] quit() #job.setOutputData( RECoutput,"MyProd_" + ILDConfigVer + "/E250-TDR_ws/" + chann + "/" +ireq+ "/rec","CERN-SRM") job.setOutputData( RECoutput,"MyProd_" + ILDConfigVer + "/E250-TDR_ws/" + chann + "/" +ireq+ "/rec","IN2P3-SRM") print RECoutput #print job.submit(dirac)
class UserJobTestCase(unittest.TestCase): """Base class for the UserJob test cases.""" def setUp(self): """Set up the objects.""" self.log_mock = Mock(name="SubMock") with patch('%s.getProxyInfo' % MODULE_NAME, new=Mock(return_value=None)): self.ujo = UserJob() def test_submit_noproxy( self ): self.ujo.proxyinfo = S_ERROR() assertDiracFailsWith( self.ujo.submit(), "Not allowed to submit a job, you need a ['ilc_user', 'calice_user'] proxy", self ) def test_submit_wrongproxygroup( self ): self.ujo.proxyinfo = S_OK( { 'group' : 'my_test_group.notInallowed_list' } ) assertDiracFailsWith( self.ujo.submit(), "Not allowed to submit job, you need a ['ilc_user', 'calice_user'] proxy", self ) def test_submit_noproxygroup( self ): self.ujo.proxyinfo = S_OK( { 'some_key' : 'Value', True : 1, False : [], 135 : {} } ) assertDiracFailsWith( self.ujo.submit(), 'Could not determine group, you do not have the right proxy', self ) def test_submit_addtoworkflow_fails( self ): self.ujo.proxyinfo = S_OK( { 'group' : 'ilc_user' } ) with patch('%s.UserJob._addToWorkflow' % MODULE_NAME, new=Mock(return_value=S_ERROR('workflow_testadd_error'))): assertDiracFailsWith( self.ujo.submit(), 'workflow_testadd_error', self ) def test_submit_addtoworkflow_fails_2( self ): self.ujo.proxyinfo = S_OK( { 'group' : 'calice_user' } ) with patch('%s.UserJob._addToWorkflow' % MODULE_NAME, new=Mock(return_value=S_ERROR('err_workflow_testadd'))): assertDiracFailsWith( self.ujo.submit(), 'err_workflow_testadd', self ) def test_submit_createnew_dirac_instance( self ): ilc_mock = Mock() ilc_mock().submitJob.return_value = S_OK('test_submission_successful') self.ujo.proxyinfo = S_OK( { 'group' : 'ilc_user' } ) with patch('%s.UserJob._addToWorkflow' % MODULE_NAME, new=Mock(return_value=S_OK())), \ patch('%s.DiracILC' % MODULE_NAME, new=ilc_mock): assertDiracSucceedsWith_equals( self.ujo.submit(), 'test_submission_successful', self ) ilc_mock().submitJob.assert_called_once_with(self.ujo, 'wms') assert self.ujo.oktosubmit def test_submit_existing_dirac_instance(self): """Test submit with dirac instance.""" ilc_mock = Mock() ilc_mock.submitJob.return_value = S_OK('test_submission_successful') self.ujo.proxyinfo = S_OK({'group': 'ilc_user'}) with patch('%s.UserJob._addToWorkflow' % MODULE_NAME, new=Mock(return_value=S_OK())): assertDiracSucceedsWith_equals(self.ujo.submit(diracinstance=ilc_mock), 'test_submission_successful', self) ilc_mock.submitJob.assert_called_once_with(self.ujo, 'wms') assert self.ujo.oktosubmit def test_setinputdata_failed( self ): assertDiracFailsWith( self.ujo.setInputData( { '/mylfn1' : True, '/mylfn2' : False } ), 'expected lfn string or list of lfns for input data', self ) def test_setinputdata(self): """Test setting input data.""" assertDiracSucceeds(self.ujo.setInputData(['LFN:/mylfn1', 'LFN:/mylfn2']), self) self.assertEqual(self.ujo.workflow.parameters.find('InputData').getValue(), '/mylfn1;/mylfn2') assertDiracSucceeds(self.ujo.setInputData('/mylfn1'), self) self.assertEqual(self.ujo.workflow.parameters.find('InputData').getValue(), '/mylfn1') def test_inputsandbox( self ): self.ujo.inputsandbox = Mock() assertDiracSucceeds( self.ujo.setInputSandbox( 'LFN:/ilc/user/u/username/libraries.tar.gz' ), self ) self.ujo.inputsandbox.extend.assert_called_once_with( [ 'LFN:/ilc/user/u/username/libraries.tar.gz' ] ) def test_inputsandbox_dictpassed( self ): assertDiracFailsWith( self.ujo.setInputSandbox( { '/some/file' : True, '/my/dict' : True } ), 'File passed must be either single file or list of files', self ) def test_setOutputData(self): """Test setting output data.""" assertDiracSucceeds(self.ujo.setOutputData(['/myFile1', '/myFile2']), self) self.assertEqual(self.ujo.workflow.parameters.find('UserOutputData').getValue(), '/myFile1;/myFile2') assertDiracSucceeds(self.ujo.setOutputData('/myFile2'), self) self.assertEqual(self.ujo.workflow.parameters.find('UserOutputData').getValue(), '/myFile2') assertDiracSucceeds(self.ujo.setOutputData('/myFile2', OutputSE="MY-SE"), self) self.assertEqual(self.ujo.workflow.parameters.find('UserOutputData').getValue(), '/myFile2') self.assertEqual(self.ujo.workflow.parameters.find('UserOutputSE').getValue(), 'MY-SE') assertDiracSucceeds(self.ujo.setOutputData('/myFile2', OutputSE=["MY-SE", 'YOUR-SE']), self) self.assertEqual(self.ujo.workflow.parameters.find('UserOutputData').getValue(), '/myFile2') self.assertEqual(self.ujo.workflow.parameters.find('UserOutputSE').getValue(), 'MY-SE;YOUR-SE') def test_setoutputdata_dictpassed( self ): assertDiracFailsWith( self.ujo.setOutputData( { '/mydict' : True } ), 'Expected file name string or list of file names for output data', self ) def test_setoutputdata_nolistse( self ): wf_mock = Mock() self.ujo.workflow = wf_mock with patch('%s.UserJob._addParameter' % MODULE_NAME, new=Mock()) as addparam_mock: assertDiracFailsWith(self.ujo.setOutputData(['mylfn1', 'other_lfn', 'last___lfn'], OutputSE={'mydict': True}), 'Expected string or list for OutputSE', self) addparam_mock.assert_called_once_with(wf_mock, 'UserOutputData', 'JDL', 'mylfn1;other_lfn;last___lfn', 'List of output data files') def test_setoutputdata_outputpath_nostring( self ): wf_mock = Mock() self.ujo.workflow = wf_mock with patch('%s.UserJob._addParameter' % MODULE_NAME, new=Mock()) as addparam_mock: assertDiracFailsWith( self.ujo.setOutputData( [ 'mylfn1', 'other_lfn', 'last___lfn' ], OutputPath = { 'mydict' : True } ), 'Expected string for OutputPath', self ) addparam_mock.assert_called_once_with( wf_mock, 'UserOutputData', 'JDL', 'mylfn1;other_lfn;last___lfn', 'List of output data files' ) def test_setoutputdata_invalid_outputpath_1( self ): wf_mock = Mock() self.ujo.workflow = wf_mock with patch('%s.UserJob._addParameter' % MODULE_NAME, new=Mock()) as addparam_mock: assertDiracFailsWith( self.ujo.setOutputData( [ 'mylfn1', 'other_lfn', 'last___lfn' ], OutputPath = '//ilc/user/somedir/output.xml' ), 'Output path contains /ilc/user/ which is not what you want', self ) addparam_mock.assert_called_once_with( wf_mock, 'UserOutputData', 'JDL', 'mylfn1;other_lfn;last___lfn', 'List of output data files' ) def test_setoutputdata_invalid_outputpath_2( self ): wf_mock = Mock() self.ujo.workflow = wf_mock with patch('%s.UserJob._addParameter' % MODULE_NAME, new=Mock()) as addparam_mock: assertDiracFailsWith( self.ujo.setOutputData( [ 'mylfn1', 'other_lfn', 'last___lfn' ], OutputPath = '//some/dir/ilc/user/somedir/output.xml' ), 'Output path contains /ilc/user/ which is not what you want', self ) addparam_mock.assert_called_once_with( wf_mock, 'UserOutputData', 'JDL', 'mylfn1;other_lfn;last___lfn', 'List of output data files' ) def test_setoutputdata( self ): wf_mock = Mock() self.ujo.workflow = wf_mock with patch('%s.UserJob._addParameter' % MODULE_NAME, new=Mock()) as addparam_mock: assertDiracSucceeds( self.ujo.setOutputData( [ 'mylfn1', 'other_lfn', 'last___lfn' ], OutputPath = '//some/dir/somedir/output.xml' ), self ) assertMockCalls( addparam_mock, [ ( wf_mock, 'UserOutputData', 'JDL', 'mylfn1;other_lfn;last___lfn', 'List of output data files' ), ( wf_mock, 'UserOutputPath', 'JDL', 'some/dir/somedir/output.xml', 'User specified Output Path' ) ], self ) def test_setoutputsandbox( self ): wf_mock = Mock() self.ujo.workflow = wf_mock with patch('%s.UserJob._addParameter' % MODULE_NAME, new=Mock()) as addparam_mock: assertDiracSucceeds( self.ujo.setOutputSandbox( '/my/dir/myfile.txt' ), self ) addparam_mock.assert_called_once_with( wf_mock, 'OutputSandbox', 'JDL', '/my/dir/myfile.txt', 'Output sandbox file' ) def test_setoutputsandbox_successes(self): """Test setting output sandbox.""" assertDiracSucceeds(self.ujo.setOutputSandbox(['myfile.txt', 'myfile.doc']), self) self.assertEqual(self.ujo.workflow.parameters.find('OutputSandbox').getValue(), 'myfile.txt;myfile.doc') def test_setoutputsandbox_dictpassed( self ): wf_mock = Mock() self.ujo.workflow = wf_mock with patch('%s.UserJob._addParameter' % MODULE_NAME, new=Mock()) as addparam_mock: assertDiracFailsWith( self.ujo.setOutputSandbox( { 'mydict' : True } ), 'Expected file string or list of files for output sandbox contents', self ) self.assertFalse( addparam_mock.called ) def test_configs(self): """Test setting different config packages.""" assertDiracSucceeds(self.ujo.setILDConfig('123.4'), self) self.assertEqual(self.ujo.workflow.parameters.find('ILDConfigPackage').getValue(), 'ILDConfig123.4') assertDiracSucceeds(self.ujo.setCLICConfig('567.8'), self) self.assertEqual(self.ujo.workflow.parameters.find('ClicConfigPackage').getValue(), 'ClicConfig567.8') self.assertIn('ildconfig', self.ujo.workflow.parameters.find('SoftwarePackages').getValue()) self.assertIn('clicconfig', self.ujo.workflow.parameters.find('SoftwarePackages').getValue()) def test_submit_split(self): """Test submitting with automatic splitting.""" self.ujo._splittingOption = True self.ujo._split = Mock(return_value=S_OK()) self.ujo.proxyinfo = S_OK({'group': 'ilc_user'}) ilc_mock = Mock() ilc_mock.submitJob.return_value = S_OK('test_submission_successful') assertDiracSucceeds(self.ujo.submit(diracinstance=ilc_mock), self) ilc_mock.submitJob.assert_called_once_with(self.ujo, 'wms') self.ujo._splittingOption = True self.ujo._split = Mock(return_value=S_ERROR("Splitting InValid")) assertDiracFailsWith(self.ujo.submit(), "Splitting InValid", self) @patch("%s._checkSplitConsistency" % MIXIN_CLASS, new=Mock(return_value=S_OK())) def test_split_bydata(self): """Test splitting by data.""" self.ujo._eventsPerJob = "1" self.ujo._numberOfJobs = "1" self.ujo._splittingOption = "byData" self.ujo._switch['byData'] = Mock(return_value=[("InputData", ["/ilc/user/u/username/data1"], True)]) with patch("%s.UserJob.setParameterSequence" % MODULE_NAME) as mock_parametric, \ patch('%s.LOG' % MIXIN_MODULE, new=self.log_mock): info_message = "Job splitting successful" assertDiracSucceeds(self.ujo._split(), self) self.log_mock.notice.assert_called_with(info_message) mock_parametric.assert_any_call("InputData", ["/ilc/user/u/username/data1"], True) @patch("%s.toInt" % MIXIN_MODULE, new=Mock(return_value=1)) @patch("%s.UserJob._checkSplitConsistency" % MODULE_NAME, new=Mock(return_value=S_OK())) def test_split_byevents(self): """Test splitting by events.""" self.ujo._splittingOption = "byEvents" self.ujo._switch['byEvents'] = Mock(return_value=[('NumberOfEvents', [1, 2], 'NbOfEvts')]) with patch("%s.UserJob.setParameterSequence" % MODULE_NAME) as mock_parametric, \ patch('%s.LOG' % MIXIN_MODULE, new=self.log_mock): info_message = "Job splitting successful" assertDiracSucceeds(self.ujo._split(), self) self.log_mock.notice.assert_called_with(info_message) mock_parametric.assert_any_call('NumberOfEvents', [1, 2], 'NbOfEvts') @patch("%s.toInt" % MIXIN_MODULE, new=Mock(return_value=1)) @patch("%s.UserJob._checkSplitConsistency" % MODULE_NAME, new=Mock(return_value=S_OK())) def test_split_atomicsubmission(self): """Test splitting atomic.""" self.ujo._splittingOption = None info_message = "Job splitting successful" with patch('%s.LOG' % MIXIN_MODULE, new=self.log_mock): assertDiracSucceeds(self.ujo._split(), self) self.log_mock.notice.assert_called_with(info_message) @patch("%s.toInt" % MIXIN_MODULE, new=Mock(return_value=False)) def test_split_inputparameters_failed(self): """Test splitting input parameters with failure.""" assertDiracFailsWith( self.ujo._split(), "Splitting: Invalid values for splitting", self ) @patch("%s.toInt" % MIXIN_MODULE, new=Mock(return_value=1)) @patch("%s._checkSplitConsistency" % MIXIN_CLASS, new=Mock(return_value=S_ERROR('failed'))) def test_split_checkSplitConsistency_failed(self): """Test splitting check consistency with failure.""" assertDiracFailsWith(self.ujo._split(), 'failed', self) @patch("%s.toInt" % MIXIN_MODULE, new=Mock(return_value=1)) @patch("%s._checkSplitConsistency" % MIXIN_CLASS, new=Mock(return_value=S_OK())) def test_split_sequencer_fails(self): """Test splitting when the sequencer fails.""" self.ujo._splittingOption = "bySequence" self.ujo._switch['bySequence'] = Mock(return_value=[]) self.ujo.setParameterSequence = Mock() self.ujo._split() self.ujo.setParameterSequence.assert_not_called() def test_checkSplitconsistency(self): """Test splitting consistency check.""" app1 = Fcc() app2 = Fcc() self.ujo.applicationlist = [app1, app2] self.ujo._switch = {"byEvents": lambda x: x} self.ujo._splittingOption = "byEvents" self.assertTrue(self.ujo._checkSplitConsistency()) def test_checkjobconsistency_bad_split_parameter(self): """Test splitting consistency check with bad split parameters.""" app1 = Fcc() app2 = Fcc() self.ujo.applicationlist = [app1, app2] self.ujo._switch = {"byEvents": lambda x: x } self.ujo._splittingOption = "byHand" self.assertFalse(self.ujo._checkSplitConsistency()['OK']) self.assertIn('_checkSplitConsistency', self.ujo.errorDict) def test_checkjobconsistency_no_same_events( self ): app1 = Fcc() app2 = Fcc() app1.numberOfEvents = 1 app2.numberOfEvents = 2 self.ujo.applicationlist = [app1, app2] self.ujo._switch = {"byEvents": lambda x: x } self.ujo._splittingOption = "byEvents" with patch('%s.LOG' % MIXIN_MODULE, new=self.log_mock): resCheck = self.ujo._checkSplitConsistency() self.assertFalse(resCheck['OK']) self.assertIn("have the same number", resCheck['Message']) def test_checkjobconsistency_negative_events( self ): app1 = Fcc() app2 = Fcc() app1.numberOfEvents = app2.numberOfEvents = -1 self.ujo.applicationlist = [app1, app2] self.ujo._switch = ["byEvents"] self.ujo._splittingOption = "byEvents" self.assertTrue(self.ujo._checkSplitConsistency()) def test_splitbydata( self ): self.ujo._data = ['data1', 'data2'] app1 = Fcc() app2 = Fcc() self.ujo.applicationlist = [app1, app2] assertEqualsImproved(self.ujo._splitByData(), [("InputData", [['data1'], ['data2']], 'ParametricInputData')], self) def test_splitbydata_no_data(self): """Test splitting without data.""" self.ujo._data = None self.assertFalse(self.ujo._splitByData()) self.assertIn('_splitByData', self.ujo.errorDict) def test_splitbydata_incorrectparameter(self): """Test splitting with data.""" self.ujo._data = ["/path/to/data1","/path/to/data2"] self.ujo._numberOfFilesPerJob = 3 self.assertFalse(self.ujo._splitByData()) self.assertIn('_splitByData', self.ujo.errorDict) def test_splitbyevents_1st_case(self): """Test splitting by events.""" app1 = Fcc() app2 = Fcc() self.ujo.applicationlist = [app1, app2] self.ujo._eventsPerJob = 2 self.ujo._numberOfJobs = 3 map_event_job = [2, 2, 2] assertEqualsImproved(self.ujo._splitByEvents(), [('NumberOfEvents', map_event_job, 'NbOfEvts')], self) def test_splitbyevents_2nd_case( self ): app1 = Fcc() app2 = Fcc() self.ujo.applicationlist = [app1, app2] self.ujo._eventsPerJob = 3 self.ujo._totalNumberOfEvents = 5 map_event_job = [3, 2] assertEqualsImproved(self.ujo._splitByEvents(), [('NumberOfEvents', map_event_job, 'NbOfEvts')], self) def test_splitbyevents_2nd_case_failed( self ): app1 = Fcc() app2 = Fcc() self.ujo.applicationlist = [app1, app2] self.ujo._eventsPerJob = 3 self.ujo._totalNumberOfEvents = 2 self.assertFalse(self.ujo._splitByEvents()) def test_splitbyevents_3rd_case(self): """Test splitting by events case 3.""" app1 = Fcc() app2 = Fcc() self.ujo.applicationlist = [app1, app2] self.ujo._numberOfJobs = 2 self.ujo._totalNumberOfEvents = 2 map_event_job = [1, 1] assertEqualsImproved(self.ujo._splitByEvents(), [('NumberOfEvents', map_event_job, 'NbOfEvts')], self) self.ujo._numberOfJobs = 3 self.ujo._totalNumberOfEvents = 5 map_event_job = [2, 2, 1] assertEqualsImproved(self.ujo._splitByEvents(), [('NumberOfEvents', map_event_job, 'NbOfEvts')], self) def test_splitbyevents_3rd_case_failed(self): """Test splitting by events case 3 fails.""" app1 = Fcc() app2 = Fcc() self.ujo.applicationlist = [app1, app2] self.ujo._numberOfJobs = 2 self.ujo._totalNumberOfEvents = None self.assertFalse(self.ujo._splitByEvents()) def test_setsplitevents(self): """Test splitting set split events.""" self.ujo.setSplitEvents(42, 42, 126) assertEqualsImproved(self.ujo._totalNumberOfEvents, 126, self) assertEqualsImproved(self.ujo._eventsPerJob, 42, self) assertEqualsImproved(self.ujo._numberOfJobs, 42, self) assertEqualsImproved(self.ujo._splittingOption, "byEvents", self) def test_setsplitInputdata(self): """Test set split input data.""" input_data = ["/path/to/data1", "/path/to/data2"] self.ujo.setSplitInputData(input_data) for data in input_data: self.assertIn(data, self.ujo._data) assertEqualsImproved(self.ujo._splittingOption, "byData", self) def test_setSplitFiles(self): """Test set split files over jobs.""" self.ujo.setSplitFilesAcrossJobs('myLFN', 20, 20) self.assertEqual(self.ujo._data, ['myLFN']) self.assertEqual(self.ujo._eventsPerFile, 20) self.assertEqual(self.ujo._eventsPerJob, 20) def test_splitBySkip(self): """Test set split with skip.""" self.ujo._eventsPerFile = 13 self.ujo._eventsPerJob = 5 self.ujo._data = ['lfn_%d' % d for d in [1, 2]] result = self.ujo._splitBySkip() self.assertEqual([('InputData', ['lfn_1', 'lfn_1', 'lfn_1', 'lfn_2', 'lfn_2', 'lfn_2'], 'InputData'), ('startFrom', [0, 5, 10, 0, 5, 10], 'startFrom'), ('NumberOfEvents', [5, 5, 3, 5, 5, 3], 'NbOfEvts')], result) self.ujo._eventsPerFile = 15 self.ujo._eventsPerJob = 5 self.ujo._data = ['lfn_%d' % d for d in [1, 2]] result = self.ujo._splitBySkip() self.assertEqual([('InputData', ['lfn_1', 'lfn_1', 'lfn_1', 'lfn_2', 'lfn_2', 'lfn_2'], 'InputData'), ('startFrom', [0, 5, 10, 0, 5, 10], 'startFrom'), ('NumberOfEvents', [5, 5, 5, 5, 5, 5], 'NbOfEvts')], result) def test_setSplittingStartIndex(self): """Test setting start index.""" res = self.ujo.setSplittingStartIndex(111) self.assertTrue(res['OK']) self.assertEqual(self.ujo._startJobIndex, 111) self.ujo._startJobIndex = 0 res = self.ujo.setSplittingStartIndex(-111) self.assertFalse(res['OK']) self.assertIn('setSplittingStartIndex', self.ujo.errorDict) self.assertEqual(self.ujo._startJobIndex, 0) def test_doNotAlter(self): """Test setting not altering the output.""" self.ujo.setSplitDoNotAlterOutputFilename() self.assertIsNotNone(self.ujo.workflow.parameters.find('DoNotAlterOutputData')) self.assertEqual(self.ujo.workflow.parameters.find('DoNotAlterOutputData').getValue(), "True") self.ujo.setSplitDoNotAlterOutputFilename(False) self.assertIsNotNone(self.ujo.workflow.parameters.find('DoNotAlterOutputData')) self.assertEqual(self.ujo.workflow.parameters.find('DoNotAlterOutputData').getValue(), "False") def test_setSplitJobIndexList(self): """Test the setSplitJobIndexList function.""" res = self.ujo.setSplitJobIndexList(range(0, 7, 3)) self.assertTrue(res['OK']) self.assertEqual([0, 3, 6], self.ujo._jobIndexList) res = self.ujo.setSplitJobIndexList(set(range(1, 7, 3))) self.assertFalse(res['OK']) self.assertIn('Invalid argument type', res['Message']) self.assertEqual([0, 3, 6], self.ujo._jobIndexList)
for macro, output, nEvts in jobParams: job = UserJob() job.setName("ssetru_dirac_test1") job.setJobGroup("tests") job.setCPUTime(86400) #below ten mb, specified local path #larger input files are put into a grid storage unit, specified with grid path #job.setInputSandbox(["newDetector.zip"]) job.setInputSandbox(["alias.properties"]) #'/afs/cern.ch/user/s/ssetru/www/newDetector.zip' #job.setInputSandbox.append('/afs/cern.ch/user/s/ssetru/www/newDetector.zip') #has log files, also may want to specify *.xml, generally short term data job.setOutputSandbox(["*.log", "*.mac", "*.xml"]) #stored forever, in grid storage until you delete, path specified goes after your user directory in dirac job.setOutputData(output, "test_vtx_matbudghalf_nonsensitivelayer", "CERN-SRM") #for index in xrange(0,len(slicNumEvents)): slic = SLIC() slic.setVersion('v3r0p3') #can loop over stdhep files, takes precedence over what is in macro #slic.setInputFile("some_file.stdhep") #need not loop over macro #slic.setInputFile("newDetector.zip") slic.setSteeringFile(macro) slic.setNumberOfEvents(nEvts) #also overwrites macro #two options #1) specify lcdd filename (newDetector.lcdd) local or grid #2) specify detector name (like below). looks up detector in org.lcsim detector page and downloads the tarball
if ov: steeringf = "clic_ild_cdr_steering_overlay.xml" res = j.append(ovi) if not res['OK']: print(res['Message']) continue ma = Marlin() ma.setVersion("v0111Prod") ma.setGearFile("clic_ild_cdr.gear") ma.setSteeringFile(steeringf) ma.setInputFile("LFN:"+lfn) ma.setNbEvts(10) ma.setEnergy(500.) ma.setOutputRecFile("myrec_overlay_%s.slcio"%ov) ma.setOutputDstFile("mydst_overlay_%s.slcio"%ov) res =j.append(ma) if not res['OK']: print(res['Message']) exit() j.setCPUTime(86400) j.setOutputData("myrec_overlay_%s.slcio"%ov,"some/path") j.setName("SomeName") j.setJobGroup("SomeGroup") res = d.checkparams(j) if not res['OK']: print(res['Message']) exit() j.submit(d)
ddsim.setDetectorModel("ILD_o1_v05") ddsim.setInputFile(genfile) ddsim.setRandomSeed(12345) # ddsim.setStartFrom(1) ddsim.setNumberOfEvents(5) # Number of events should not exceed number of events in file. # Otherwise, G4exception is thrown # ddsim.setDebug() ddsim.setSteeringFile(steeringfile) ddsim.setOutputFile(simfile) res = j.append(ddsim) if not res['OK']: print res['Message'] exit(1) j.setOutputSandbox(["*.log","*.xml","*.sh","*.root"]) j.dontPromptMe() if localjob : j.submit(d, mode="local") else: simdir="testjob" j.setOutputData(simfile,simdir,"PNNL-SRM") res=j.submit(d) if res['OK']: print "Dirac job, "+str(res["Value"])+", was submitted." else: print "Failed to submit dirac job. " print res
def main(): # Take the input arguments from the argument parser, and check they exist... args = parse_args() if not args: print 'Invalid Arguments' sys.exit(1) #### Software Versions #### softVersions = ["v3r0p3", "HEAD", "ILC_DBD", "0116"] # Working (recommended) # softVersions = ["v3r0p3", "2.5", "ILC_DBD", "0116"] # Working # softVersions = ["v3r0p3", "HEAD", "ILC_DBD", "ILCSoft-01-17-07"] # Working # Check the --runs and --split arguments to make sure they are compatible, if not exit... if not check_events_arguments(args.events, args.split): sys.exit(1) # Check the input LFN given by user, it needs to have .stdhep extension and should not have LFN: at the beginning... lfn_check, lfn = check_input_LFN(args.stdhepInput) if not lfn_check: sys.exit(1) # Call when you begin ILC-DIRAC jobs, the true indicates a repository file is included... dirac = DiracILC(True, setup_repository_name(args.stdhepInput, args.detector)) # Prepares the input and output sandboxes, if -f, then adds the files required for flavortagging, # into the input sandbox inputSandbox, outputSandbox = setup_sandboxes(args.macFile, args.flavortag) # Prepares values for the job loop... if args.split < 0: nInputEvents = int(args.events) nOutputEvents = int(args.events) if args.split > 0: nInputEvents = int(args.events) nOutputEvents = int(args.split) # Loop that runs through the required number of jobs to be executed... for startEvent in range(0, nInputEvents, nOutputEvents): ################## Job Initialise ######################################## job = UserJob() job.setName(path.basename(args.stdhepInput)) job.setJobGroup('JobGroup') job.setInputSandbox(inputSandbox) fileNumber = startEvent/nOutputEvents print "Job ", fileNumber outputFiles = setup_output_dict(args.stdhepInput, args.detector, fileNumber, args.outputPath, softVersions) slicOutput=outputFiles['slicOutput'] prePandoraOutput=outputFiles['prePandoraOutput'] pandoraOutput=outputFiles['pandoraOutput'] vertexingOutput=outputFiles['vertexingOutput'] lcsimRecOutput=outputFiles['lcsimRecOutput'] lcsimDstOutput=outputFiles['lcsimDstOutput'] flavortagOutput=outputFiles['flavortagOutput'] diracOutput=outputFiles['diracOutput'] ################## SLIC ################################################## slic = SLIC() slic.setVersion(softVersions[0]) slic.setSteeringFile(args.macFile) # slic.setInputFile(lfn) slic.setOutputFile(slicOutput) slic.setDetectorModel(args.detector) slic.setNumberOfEvents(nOutputEvents) slic.setStartFrom(startEvent) #print slic.listAttributes() result = job.append(slic) if not result['OK']: print result['Message'] sys.exit(2) ################## lcsim (digitization and tracking) ##################### lcsim = LCSIM() lcsim.setVersion(softVersions[1]) lcsim.setSteeringFile('steeringFiles/sid_dbd_prePandora_noOverlay_v22.xml') # Another version is included in /steeringFiles lcsim.getInputFromApp(slic) lcsim.setTrackingStrategy('steeringFiles/sidloi3_trackingStrategies_default.xml') # lcsim.setAliasProperties('alias.properties') lcsim.setDetectorModel('geometryFiles/sidloi3.zip') lcsim.setOutputFile(prePandoraOutput) lcsim.setNumberOfEvents(nOutputEvents) #print lcsim.listAttributes() result = job.append(lcsim) if not result['OK']: print result['Message'] sys.exit(2) ################## slicPandora ########################################### slicPandora = SLICPandora() slicPandora.setVersion(softVersions[2]) slicPandora.setDetectorModel(args.detector) slicPandora.getInputFromApp(lcsim) slicPandora.setOutputFile(pandoraOutput) slicPandora.setPandoraSettings('pandoraSettings.xml') slicPandora.setNumberOfEvents(nOutputEvents) #print slicPandora.listAttributes() result = job.append(slicPandora) if not result['OK']: print result['Message'] sys.exit(2) ################## Marlin, LCFIPlus Vertexing ############################ vertexing = Marlin() vertexing.setVersion(softVersions[3]) vertexing.setSteeringFile('steeringFiles/sid_dbd_vertexing.xml') vertexing.setGearFile('steeringFiles/sidloi3.gear') vertexing.getInputFromApp(slicPandora) vertexing.setOutputFile(vertexingOutput) vertexing.setNumberOfEvents(nOutputEvents) #print vertexing.listAttributes() result = job.append(vertexing) if not result['OK']: print result['Message'] sys.exit(2) ################## lcsim (DST production) ################################ lcsimDst = LCSIM() lcsimDst.setVersion(softVersions[1]) lcsimDst.setSteeringFile('steeringFiles/sid_dbd_postPandora.xml') lcsimDst.getInputFromApp(vertexing) lcsimDst.setNumberOfEvents(nOutputEvents) # lcsimDst.setAliasProperties('alias.properties') lcsimDst.setDetectorModel('geometryFiles/sidloi3.zip') lcsimDst.setOutputRecFile(lcsimRecOutput) lcsimDst.setOutputDstFile(lcsimDstOutput) #print lcsimDst.listAttributes() result = job.append(lcsimDst) if not result['OK']: print result['Message'] sys.exit(2) ################## Marlin, LCFIPlus flavortag ############################ if args.flavortag: flavortag = Marlin() flavortag.setVersion(softVersions[3]) flavortag.setSteeringFile('steeringFiles/sid_dbd_flavortag.xml') flavortag.setGearFile('steeringFiles/sidloi3.gear') flavortag.setInputFile(lcsimDstOutput) flavortag.setOutputFile(flavortagOutput) flavortag.setNumberOfEvents(nOutputEvents) #print flavortag.listAttributes() result = job.append(flavortag) if not result['OK']: print result['Message'] sys.exit(2) ################## Job Finalise ########################################## # List of banned sites that the job shall not be sent too. These are sites that jobs tend to fail on, # This list is likely to change. job.setBannedSites(['LCG.IN2P3-CC.fr', 'LCG.RAL-LCG2.uk', 'LCG.DESY-HH.de', 'LCG.DESYZN.de', 'LCG.KEK.jp', 'OSG.PNNL.us',]) job.setCPUTime(50000) job.setPlatform('x86_64-slc5-gcc43-opt') # Sets the output data file according to if -f is selcted, ships ouput to your /ilc/user/a/aPerson/ # directory on the grid. if args.flavortag: job.setOutputData(flavortagOutput, diracOutput, args.SE) else: job.setOutputData(lcsimDstOutput, diracOutput, args.SE) job.setOutputSandbox(outputSandbox) job.setInputData(lfn) if args.dontPromptMe: job.dontPromptMe() # Submits Job!!! job.submit() return 0;
def main(): # Take the input arguments from the argument parser, and check they exist... args = parse_args() if not args: print 'Invalid Arguments' sys.exit(1) #### Software Versions #### softVersions = ["v3r0p3", "HEAD", "ILC_DBD", "0116"] # Working (recommended) # softVersions = ["v3r0p3", "2.5", "ILC_DBD", "0116"] # Working # softVersions = ["v3r0p3", "HEAD", "ILC_DBD", "ILCSoft-01-17-07"] # Working # Check the --runs and --split arguments to make sure they are compatible, if not exit... if not check_events_arguments(args.events, args.split): sys.exit(1) # Check the input LFN given by user, it needs to have .stdhep extension and should not have LFN: at the beginning... lfn_check, lfn = check_input_LFN(args.stdhepInput) if not lfn_check: sys.exit(1) # Call when you begin ILC-DIRAC jobs, the true indicates a repository file is included... dirac = DiracILC(True, setup_repository_name(args.stdhepInput, args.detector)) # Prepares the input and output sandboxes, if -f, then adds the files required for flavortagging, # into the input sandbox inputSandbox, outputSandbox = setup_sandboxes(args.macFile, args.flavortag) # Prepares values for the job loop... if args.split < 0: nInputEvents = int(args.events) nOutputEvents = int(args.events) if args.split > 0: nInputEvents = int(args.events) nOutputEvents = int(args.split) # Loop that runs through the required number of jobs to be executed... for startEvent in range(0, nInputEvents, nOutputEvents): ################## Job Initialise ######################################## job = UserJob() job.setName(path.basename(args.stdhepInput)) job.setJobGroup('JobGroup') job.setInputSandbox(inputSandbox) fileNumber = startEvent / nOutputEvents print "Job ", fileNumber outputFiles = setup_output_dict(args.stdhepInput, args.detector, fileNumber, args.outputPath, softVersions) slicOutput = outputFiles['slicOutput'] prePandoraOutput = outputFiles['prePandoraOutput'] pandoraOutput = outputFiles['pandoraOutput'] vertexingOutput = outputFiles['vertexingOutput'] lcsimRecOutput = outputFiles['lcsimRecOutput'] lcsimDstOutput = outputFiles['lcsimDstOutput'] flavortagOutput = outputFiles['flavortagOutput'] diracOutput = outputFiles['diracOutput'] ################## SLIC ################################################## slic = SLIC() slic.setVersion(softVersions[0]) slic.setSteeringFile(args.macFile) # slic.setInputFile(lfn) slic.setOutputFile(slicOutput) slic.setDetectorModel(args.detector) slic.setNumberOfEvents(nOutputEvents) slic.setStartFrom(startEvent) #print slic.listAttributes() result = job.append(slic) if not result['OK']: print result['Message'] sys.exit(2) ################## lcsim (digitization and tracking) ##################### lcsim = LCSIM() lcsim.setVersion(softVersions[1]) lcsim.setSteeringFile( 'steeringFiles/sid_dbd_prePandora_noOverlay_v22.xml' ) # Another version is included in /steeringFiles lcsim.getInputFromApp(slic) lcsim.setTrackingStrategy( 'steeringFiles/sidloi3_trackingStrategies_default.xml') # lcsim.setAliasProperties('alias.properties') lcsim.setDetectorModel('geometryFiles/sidloi3.zip') lcsim.setOutputFile(prePandoraOutput) lcsim.setNumberOfEvents(nOutputEvents) #print lcsim.listAttributes() result = job.append(lcsim) if not result['OK']: print result['Message'] sys.exit(2) ################## slicPandora ########################################### slicPandora = SLICPandora() slicPandora.setVersion(softVersions[2]) slicPandora.setDetectorModel(args.detector) slicPandora.getInputFromApp(lcsim) slicPandora.setOutputFile(pandoraOutput) slicPandora.setPandoraSettings('pandoraSettings.xml') slicPandora.setNumberOfEvents(nOutputEvents) #print slicPandora.listAttributes() result = job.append(slicPandora) if not result['OK']: print result['Message'] sys.exit(2) ################## Marlin, LCFIPlus Vertexing ############################ vertexing = Marlin() vertexing.setVersion(softVersions[3]) vertexing.setSteeringFile('steeringFiles/sid_dbd_vertexing.xml') vertexing.setGearFile('steeringFiles/sidloi3.gear') vertexing.getInputFromApp(slicPandora) vertexing.setOutputFile(vertexingOutput) vertexing.setNumberOfEvents(nOutputEvents) #print vertexing.listAttributes() result = job.append(vertexing) if not result['OK']: print result['Message'] sys.exit(2) ################## lcsim (DST production) ################################ lcsimDst = LCSIM() lcsimDst.setVersion(softVersions[1]) lcsimDst.setSteeringFile('steeringFiles/sid_dbd_postPandora.xml') lcsimDst.getInputFromApp(vertexing) lcsimDst.setNumberOfEvents(nOutputEvents) # lcsimDst.setAliasProperties('alias.properties') lcsimDst.setDetectorModel('geometryFiles/sidloi3.zip') lcsimDst.setOutputRecFile(lcsimRecOutput) lcsimDst.setOutputDstFile(lcsimDstOutput) #print lcsimDst.listAttributes() result = job.append(lcsimDst) if not result['OK']: print result['Message'] sys.exit(2) ################## Marlin, LCFIPlus flavortag ############################ if args.flavortag: flavortag = Marlin() flavortag.setVersion(softVersions[3]) flavortag.setSteeringFile('steeringFiles/sid_dbd_flavortag.xml') flavortag.setGearFile('steeringFiles/sidloi3.gear') flavortag.setInputFile(lcsimDstOutput) flavortag.setOutputFile(flavortagOutput) flavortag.setNumberOfEvents(nOutputEvents) #print flavortag.listAttributes() result = job.append(flavortag) if not result['OK']: print result['Message'] sys.exit(2) ################## Job Finalise ########################################## # List of banned sites that the job shall not be sent too. These are sites that jobs tend to fail on, # This list is likely to change. job.setBannedSites([ 'LCG.IN2P3-CC.fr', 'LCG.RAL-LCG2.uk', 'LCG.DESY-HH.de', 'LCG.DESYZN.de', 'LCG.KEK.jp', 'OSG.PNNL.us', ]) job.setCPUTime(50000) job.setPlatform('x86_64-slc5-gcc43-opt') # Sets the output data file according to if -f is selcted, ships ouput to your /ilc/user/a/aPerson/ # directory on the grid. if args.flavortag: job.setOutputData(flavortagOutput, diracOutput, args.SE) else: job.setOutputData(lcsimDstOutput, diracOutput, args.SE) job.setOutputSandbox(outputSandbox) job.setInputData(lfn) if args.dontPromptMe: job.dontPromptMe() # Submits Job!!! job.submit() return 0
###In case one wants a loop: comment the folowing. #for i in range(2): j = UserJob() j.setJobGroup("Tutorial") j.setName("MarlinExample")#%i) ma = Marlin() ma.setDebug() # ma.setLogLevel("verbose") # ma.setILDConfig("v01-16-p05_500") ma.setVersion("v01-16-02") ma.setSteeringFile("marlin_stdreco.xml") ma.setGearFile(gearfile) ma.setInputFile([simfile, pandoraLikelihoodData, bg_aver]) ma.setOutputDstFile(outdst) ma.setOutputRecFile(outrec) res = j.append(ma) if not res['OK']: print res['Message'] exit(1) j.setOutputData([outdst,outrec],"myprod2/test","PNNL-SRM") j.setOutputSandbox(["*.log","*.xml","*.sh"]) j.dontPromptMe() j.submit(d)
def subOverlay(): # Decide parameters for a job outputSE = "KEK-SRM" isLocal = _clip.isLocal nbevts = 50 if _clip.numberOfEvents == 0 else _clip.numberOfEvents nbevts = 0 # To analize all input events outputFilePrefix = "overlay_example" if _clip.outputFilePrefix == "" else _clip.outputFilePrefix outputDir = _clip.outputDir inputFile = _clip.inputFile if inputFile == "": gLogger.error("Input file for ddsim does not given.") exit(-1) recfile = outputFilePrefix + ".rec.slcio" dstfile = outputFilePrefix + ".dst.slcio" detector_model = "ILD_l5_o1_v02" key = detector_model.split('_') sim_detectorModel = "_".join([key[0], key[1], key[3]]) # Create DIRAC objects for job submission dIlc = DiracILC() job = UserJob() job.setJobGroup("myoverlayjob") job.setName("myoverlay") job.setOutputSandbox(['*.log', '*.sh', '*.py', '*.xml']) job.setILDConfig("v02-00-02") # job.setInputSandbox(["a6-parameters.sin", "P2f_qqbar.sin"]) # job.setDestination(["LCG.KEK.jp", "LCG.DESY-HH.de"]) # job submission destination # job.setBannedSites([]) # a list of sites not to submit job # job.setCPUTime( cputime_limit_in_seconds_by_dirac_units ) # Create Overlay application ovldata = [{ "ProcessorName": "BgOverlayWW", "evttype": "aa_lowpt_WW", "ProdID": 10237, "expBG": 0.211, "subdir": "000" }, { "ProcessorName": "BgOverlayWB", "evttype": "aa_lowpt_WB", "ProdID": 10241, "expBG": 0.24605, "subdir": "000" }, { "ProcessorName": "BgOverlayBW", "evttype": "aa_lowpt_BW", "ProdID": 10239, "expBG": 0.243873, "subdir": "000" }, { "ProcessorName": "BgOverlayBB", "evttype": "aa_lowpt_BB", "ProdID": 10235, "expBG": 0.35063, "subdir": "000" }, { "ProcessorName": "PairBgOverlay", "evttype": "seeablepairs", "ProdID": 10233, "expBG": 1.0, "subdir": "100" }] BXOverlay = 1 NbSigEvtsPerJob = 100 numberOfSignalEvents = NbSigEvtsPerJob basebkgpath = "/ilc/prod/ilc/mc-opt-3/ild/sim/500-TDR_ws" energy = "500" for ovl in ovldata: print "### OverlayInput ... " + ovl["ProcessorName"] ovlapp = OverlayInput() ovlpath = "%s/%s/%s/v02-00-01/%8.8d/%s" % \ ( basebkgpath, ovl["evttype"], sim_detectorModel, ovl["ProdID"] , ovl["subdir"] ) print " OverlayPath ... " + ovlpath ovlapp.setMachine("ilc_dbd") # ovlapp.setEnergy(energy) # ovlapp.setDetectorModel(sim_detectorModel) ovlapp.setProcessorName(ovl["ProcessorName"]) ovlapp.setBkgEvtType(ovl["evttype"]) ovlapp.setPathToFiles(ovlpath) ovlapp.setGGToHadInt(ovl["expBG"]) ovlapp.setBXOverlay(BXOverlay) ovlapp.setNbSigEvtsPerJob(NbSigEvtsPerJob) ovlapp.setNumberOfSignalEventsPerJob(numberOfSignalEvents) res = job.append(ovlapp) if not res['OK']: print res['Message'] exit(1) # Create Marlin application marlin = Marlin() marlin.setVersion("ILCSoft-02-00-02_gcc49") marlin.setDetectorModel(detector_model) marlin.setSteeringFile("MarlinStdReco.xml") marlin.setInputFile(inputFile) marlin.setNumberOfEvents(nbevts) marlin.setOutputDstFile(dstfile) marlin.setOutputRecFile(recfile) extraCLIArguments = " --constant.DetectorModel=%s " % detector_model extraCLIArguments += " --constant.RunOverlay=true --constant.CMSEnergy=%s " % str( energy) extraCLIArguments += " --global.Verbosity=MESSAGE " marlin.setExtraCLIArguments(extraCLIArguments) job.append(marlin) if outputDir != "": job.setOutputData([dstfile, recfile], OutputPath=outputDir, OutputSE=outputSE) if isLocal: job.submit(dIlc, mode="local") else: job.submit(dIlc)
if ov: steeringf = "clic_ild_cdr_steering_overlay.xml" res = j.append(ovi) if not res['OK']: print res['Message'] continue ma = Marlin() ma.setVersion("v0111Prod") ma.setGearFile("clic_ild_cdr.gear") ma.setSteeringFile(steeringf) ma.setInputFile("LFN:" + lfn) ma.setNbEvts(10) ma.setEnergy(500.) ma.setOutputRecFile("myrec_overlay_%s.slcio" % ov) ma.setOutputDstFile("mydst_overlay_%s.slcio" % ov) res = j.append(ma) if not res['OK']: print res['Message'] exit() j.setCPUTime(86400) j.setOutputData("myrec_overlay_%s.slcio" % ov, "some/path") j.setName("SomeName") j.setJobGroup("SomeGroup") res = d.checkparams(j) if not res['OK']: print res['Message'] exit() j.submit(d)
def main(argv): # Input arguments ildconfig_version = "$ILDCONFIGVER" ilcsoft_version = "$ILCSOFTVER" evts_per_run = $EVTSPERRUN detector_model = "$DETECTOR" sim_input = "$SIMINPUT" process_name = "$PROCESS" index = $IND sim_input = diracpath_from_pnfspath( sim_input ) sim_detector_model = detector_model_wo_option( detector_model ) job_group = ilcsoft_version + "_" + ildconfig_version + "_" + process_name + "_" + detector_model dirac = DiracILC(True,job_group+".rep") # outputs to be saved onto grid SE RECoutput = [] # DDSim evtStart = (index-1)*evts_per_run evtEnd = index*evts_per_run - 1 RandSeed = random.randrange(11623, 99999) lcinputSIM = "LFN:" + sim_input lcoutputSIM = ilcsoft_version + ".ILDConfig_" + ildconfig_version + ".E1000." + process_name + ".eLpR.evt%s-%s_SIM.slcio"%(str(evtStart),(str)(evtEnd)) sim = DDSim() sim.setVersion(ilcsoft_version) sim.setDetectorModel(sim_detector_model) sim.setInputFile(lcinputSIM) sim.setSteeringFile("ddsim_steer.py") sim.setNumberOfEvents(evts_per_run) sim.setRandomSeed(RandSeed) sim.setEnergy(1000) sim.setStartFrom(evtStart) sim.setOutputFile(lcoutputSIM) # Marlin lcoutputDST = ilcsoft_version + ".ILDConfig_" + ildconfig_version + ".E1000." + process_name + ".eLpR.evt%s-%s_DST.slcio"%(str(evtStart),(str)(evtEnd)) ma = Marlin() ma.setVersion(ilcsoft_version) ma.setDetectorModel(detector_model) ma.setSteeringFile("MarlinStdReco.xml") ma.setExtraCLIArguments( "--constant.lcgeo_DIR=$lcgeo_DIR --constant.DetectorModel={} --global.MaxRecordNumber=0".format(detector_model) ) ma.setLogFile("marlin.log") ma.getInputFromApp(sim) ma.setEnergy(1000) ma.setOutputDstFile(lcoutputDST) RECoutput.append(lcoutputDST) # ILCDirac user job job = UserJob() job.setName("user_sim_reco") job.setJobGroup(job_group) job.setILDConfig(ildconfig_version) job.setCPUTime(86400) tmp_file_name = process_name + "_sim_reco_job_tmp.py" job.setInputSandbox([tmp_file_name]) job.setOutputSandbox(["*.log","MarlinStdRecoParsed.xml","marlin*.xml","*.py "]) job.dontPromptMe() job.setBannedSites(['LCG.IN2P3-CC.fr','OSG.UConn.us','LCG.Cracow.pl','OSG.MIT.us','LCG.Glasgow.uk','OSG.CIT.us','OSG.BNL.us','LCG.Brunel.uk','LCG.RAL-LCG2.uk','LCG.Oxford.uk','OSG.UCSDT2.us']) # run simulation job simres = job.append(sim) if not simres['OK']: print simres['Not ok appending ddsim to job'] quit() # run Malrin reco jobs mares = job.append(ma) if not mares['OK']: print mares['Not ok appending Marlin to job'] quit() job.setOutputData(RECoutput,"ILDPerformance/WWZZSeparation/{}_ILDConfig_{}_{}".format(ilcsoft_version,ildconfig_version,detector_model),"DESY-SRM") print RECoutput submit_output = job.submit(dirac) print submit_output