sys.exit(0) if len(sys.argv) == 1: print "Please pass in argument a number between 0 and 2" sys.exit() elif sys.argv[1] == '0': print 'Build Efficiency maps' FarmDirectory = "FARM" JobName = "HscpBuildEffMaps" LaunchOnCondor.Jobs_RunHere = 1 LaunchOnCondor.SendCluster_Create(FarmDirectory, JobName) for m in SStauMassPoints: LaunchOnCondor.SendCluster_Push([ "FWLITE", os.getcwd() + "/GetEfficiencyMaps.C", '"SStau' + str(m) + '"', "1", '"root://eoscms//eos/cms/store/user/querten/ModelIndepSept/ModelIndep_SingleStau' + str(m) + '.root"' ]) LaunchOnCondor.SendCluster_Submit() elif sys.argv[1] == '1': print 'Merge efficiencies' fileList = '' for m in SStauMassPoints: fileList += ' pictures/Histos_SStau' + str(m) + '.root' os.system('hadd -f pictures/Histos.root' + fileList) os.system('root MakePlot.C++ -l -b -q') elif sys.argv[1] == '2': print 'Compute model independent acceptance' FarmDirectory = "FARM"
index += 1 if (line.startswith('#')): continue vals = line.split(',') if (int(vals[1]) == 2): continue if int(vals[1]) == 0 and str(vals[2]).find("Data13TeV16") == -1: continue if (UseRemoteSamples and int(vals[1]) == 0 and vals[3].find('2016')): LaunchOnCondor.Jobs_InitCmds = [ 'ulimit -c 0', 'export HOME=%s' % os.environ['HOME'], 'export X509_USER_PROXY=%s/x509_user_proxy/x509_proxy; voms-proxy-init --noregen;' % os.environ['HOME'], 'export REMOTESTORAGEPATH=' + RemoteStorageDir.replace('/storage/data/cms/store/', '/store/') ] else: LaunchOnCondor.Jobs_InitCmds = ['ulimit -c 0'] LaunchOnCondor.SendCluster_Push([ "BASH", "sh " + os.getcwd() + "/StabilityCheck.sh " + os.getcwd() + "/pictures " + str(index) + " " + str(1) ]) f.close() LaunchOnCondor.SendCluster_Submit() elif sys.argv[1] == '2': os.system( 'find pictures/Histos_*.root -type f -size +1024c | xargs hadd -f pictures/Histos.root' ) if sys.argv[1] == '3': os.system('sh MakePlot.sh')
paths = ["%s/DoubleMuon/crab_Run%s_DoubleMuon/*/0000/" % (StorageDir, run), "%s/MET/crab_Run%s_MET/*/0000/" % (StorageDir, run), "%s/SingleMuon/crab_Run%s_SingleMuon/*/0000/" % (StorageDir, run)] createToMergeList(paths) LaunchOnCondor.Jobs_InitCmds = ['export HOME=%s' % os.environ['HOME'], 'export X509_USER_PROXY=$HOME/x509_user_proxy/x509_proxy'] LaunchOnCondor.Jobs_FinalCmds = ['rm -f %s/Run2016_%s.root' % (EndPath, run)] if TransferDirectlyToStorage: LaunchOnCondor.Jobs_FinalCmds += ["lcg-cp -v -n 10 -D srmv2 -b file://${PWD}/Run2016_%s.root srm://ingrid-se02.cism.ucl.ac.be:8444/srm/managerv2\?SFN=%s/Run2016_%s.root && rm -f Run2016_%s.root" % (run, EndPath, run, run)] # if you do not use zsh, change '\?' to '?' else: LaunchOnCondor.Jobs_FinalCmds += ["mv Run2016_%s.root %s" % (run, EndPath)] LaunchOnCondor.Jobs_Queue = '8nh' LaunchOnCondor.SendCluster_Push(["CMSSW", MergeTemplateName, 'XXX_SAVEPATH_XXX', 'Run2016_%s.root' % run]) LaunchOnCondor.SendCluster_Submit() os.system("rm -f %s" % MergeTemplateName) if sys.argv[1] == '3': print "Validating the content and estimating the total integrated luminosity..." EndPath = "" if TransferDirectlyToStorage: EndPath = "%s/HSCP2016" % StorageDir else: FarmDirectory = "MERGECrab" EndPath = "%s/%s/outputs" % (os.getcwd(), FarmDirectory) runs = getRunList(AllLumisFile) f = open ("Analysis_Samples_tmp.txt", 'w')
for inFile in inFileList: f.write("'" + inFile + "',\n") f.write(")\n") f.write("\n") f.write( "#main EDM tuple cfg that depends on the above parameters\n" ) f.write( "execfile( os.path.expandvars('${CMSSW_BASE}/src/SUSYBSMAnalysis/HSCP/test/MakeEDMtuples/HSCParticleProducer_cfg.py') )\n" ) LaunchOnCondor.Jobs_FinalCmds = [ "mv out.root %s/out/%i/%s_HSCP_%i.root" % (os.getcwd(), RUN, DATASET.split('/')[1], INDEX) ] LaunchOnCondor.SendCluster_Push( ["CMSSW", ["HSCParticleProducer_Data_Template_cfg.py"]]) INDEX += 1 # LaunchOnCondor.SendCluster_Submit() if sys.argv[1] == '2': FarmDirectory = "MERGE" LaunchOnCondor.SendCluster_Create(FarmDirectory, "HSCPEdmMerge") LaunchOnCondor.Jobs_Queue = '8nh' for RUN in goodLumis: LaunchOnCondor.Jobs_InitCmds = ['export HOME=%s' % os.environ['HOME']] LaunchOnCondor.Jobs_FinalCmds = [ "edmLumisInFiles.py Run2016_%i.root --output=%s/out/Run2016_%i.json" % (RUN, os.getcwd(), RUN) ] #in the first step also to add LaunchOnCondor.Jobs_FinalCmds += [
'export X509_USER_PROXY=~/x509_user_proxy/x509_proxy; voms-proxy-init --noregen;' ] f = open("HSCPEDM_cfg.py", "w") f.write("import sys, os\n") f.write("import FWCore.ParameterSet.Config as cms\n") f.write("\n") if ('HSCP' in DATASET): f.write("isSignal = True\n") f.write("isBckg = False\n") else: f.write("isSignal = False\n") f.write("isBckg = True\n") f.write("isData = False\n") f.write("isSkimmedSample = False\n") f.write("GTAG = 'MCRUN2_74_V9'\n") f.write("OUTPUTFILE = 'XXX_OUTPUT_XXX.root'\n") f.write("InputFileList = cms.untracked.vstring()\n") f.write("\n") for inFile in FILELIST: f.write("InputFileList.extend(['" + inFile + "'])\n") f.write("\n") f.write("#main EDM tuple cfg that depends on the above parameters\n") f.write( "execfile( os.path.expandvars('${CMSSW_BASE}/src/SUSYBSMAnalysis/HSCP/test/MakeEDMtuples/HSCParticleProducer_cfg.py') )\n" ) f.close() LaunchOnCondor.SendCluster_Push(["CMSSW", "HSCPEDM_cfg.py"]) LaunchOnCondor.SendCluster_Submit()
JobName = S[0] + "_SIMEDM" FarmDirectory = "FARM_" + JobName LaunchOnCondor.Jobs_NEvent = S[3] LaunchOnCondor.Jobs_Skip = 0 LaunchOnCondor.SendCluster_Create(FarmDirectory, JobName) LaunchOnCondor.Jobs_InitCmds = [ 'export X509_USER_PROXY=~/x509_user_proxy/x509_proxy; voms-proxy-init --noregen;' ] for i in range(0, S[2]): LaunchOnCondor.Jobs_Count = i LaunchOnCondor.Jobs_Skip += LaunchOnCondor.Jobs_NEvent LaunchOnCondor.SendCluster_Push([ "CMSSW", [ "GEN_SIM_Template_cfg.py", "RECO_Template_cfg.py", "HSCPEDM_Template_cfg.py" ] ]) LaunchOnCondor.Jobs_FinalCmds = ['rm step1.root; rm step2.root'] LaunchOnCondor.SendCluster_Submit() elif sys.argv[1] == '2': #MergeAll EDM files into one for S in samples: InputFiles = LaunchOnCondor.GetListOfFiles( '"file:', os.getcwd() + "/FARM_" + S[0] + "_SIMEDM/outputs/*.root", '"') chunks = getChunksFromList(InputFiles, (len(InputFiles) / S[4]) + 1) for I in range(0, S[4]): if (I >= len(chunks)): continue JobName = S[0]
f.write("process.source.fileNames.extend(['" + inFile[1] + "'])\n") f.write("\n") f.write("#import PhysicsTools.PythonAnalysis.LumiList as LumiList\n") f.write( "#process.source.lumisToProcess = LumiList.LumiList(filename = '" + JSON + "').getVLuminosityBlockRange()") f.write("\n") if ("/ALCARECO" in DATASET): f.write("\n") f.write( "process.tracksForDeDx.src = cms.InputTag('ALCARECOSiStripCalMinBias') #for SiStripCalMinBias ALCARECO format\n" ) f.write("\n") f.close() if not TransferDirectlyToStorage: LaunchOnCondor.Jobs_FinalCmds = [ "cp dEdxSkim.root " + os.getcwd() + "/out/dEdxSkim_%s_%i.root && rm dEdxSkim.root" % (inFile[0], LaunchOnCondor.Jobs_Count) ] else: LaunchOnCondor.Jobs_FinalCmds = [ "lcg-cp -v -n 10 -D srmv2 -b file://${PWD}/dEdxSkim.root srm://ingrid-se02.cism.ucl.ac.be:8444/srm/managerv2\?SFN=%s/%s/dEdxSkim_%s_%i.root && rm -f dEdxSkim.root" % (EndPath, inFile[0], inFile[0], LaunchOnCondor.Jobs_Count) ] # if you do not use zsh, change '\?' to '?' LaunchOnCondor.SendCluster_Push(["CMSSW", "dEdxSkimmer_cfg.py"]) os.system("rm -f dEdxSkimmer_cfg.py") LaunchOnCondor.SendCluster_Submit()
DEDX = [2.7, 3.0, 3.3] if sys.argv[1] == '1': JobName = "SkimEff" FarmDirectory = "FARM_SkimEff" LaunchOnCondor.Jobs_NEvent = 100000 LaunchOnCondor.Jobs_Skip = 0 LaunchOnCondor.SendCluster_Create(FarmDirectory, JobName) for s in SAMPLES: for p in PT: for n in NH: for d in DEDX: LaunchOnCondor.SendCluster_Push([ "CMSSW", "HSCPSkim_cfg.py", "XXX_SAMPLE_XXX", str(s), "XXX_PT_XXX", str(p), "XXX_NH_XXX", str(n), "XXX_DEDX_XXX", str(d) ]) LaunchOnCondor.SendCluster_Submit() if sys.argv[1] == '2': #LaunchOnCondor.runInteractively = True LaunchOnCondor.LSFlog = False JobName = "Analyze" FarmDirectory = "FARM_Analyze" LaunchOnCondor.SendCluster_Create(FarmDirectory, JobName) for s in SAMPLES: for p in PT: for n in NH: for d in DEDX:
print matchedDatasets FilesByRun = [] for dataset in matchedDatasets: filesFromDaset(dataset) if (not ISLOCAL): LaunchOnCondor.Jobs_InitCmds = [ 'export HOME=%s' % os.environ['HOME'], 'export X509_USER_PROXY=~/x509_user_proxy/x509_proxy; voms-proxy-init --noregen;' ] os.system("mkdir -p out/" + str(runN)) LaunchOnCondor.Jobs_FinalCmds = [ "mv %s %s/out/%s/HSCP_%s.root" % (OUTPUTFILE, os.getcwd(), runN, runN) ] LaunchOnCondor.SendCluster_Push( ["CMSSW", ["HSCParticleProducer2017_cfg.py"]], "GTAG=%s OUTPUTFILE=%s SAMPLE=%s isSkimmedSample=%s LUMITOPROCESS=%s inputFiles=%s" % (args.GTAG, OUTPUTFILE, args.SAMPLE, args.isSkimmedSample, os.getcwd() + lumiFile, FilesByRun), index='%s_' % runN) print '-------------' print lumiFile print FilesByRun print '-------------' LaunchOnCondor.SendCluster_Submit() else: print "choose step 0 or 1"
'export X509_USER_PROXY=~/x509_user_proxy/x509_proxy; voms-proxy-init --noregen;' ] print DATASET + " : " for RUN in FILELIST: print str(RUN) + " --> %i files to process" % len(FILELIST[RUN]) INDEX = 0 for inFileList in getChunksFromList(FILELIST[RUN], 5): InputListCSV = '' for inFile in inFileList: InputListCSV += inFile + ',' InputListCSV = InputListCSV[: -1] #remove the last duplicated comma LaunchOnCondor.SendCluster_Push([ "BASH", "sh " + os.getcwd() + "/MuonTimingStudy.sh " + InputListCSV + " out.root " + str(RUN) + "; mv out.root " + os.getcwd() + "/out/Histos_%i_%i.root" % (RUN, INDEX) ]) INDEX += 1 LaunchOnCondor.SendCluster_Submit() if sys.argv[1] == '2': os.system( 'find out/Histos_*.root -type f -size +1024c | xargs hadd -f Histos.root' ) if sys.argv[1] == '3': os.system('sh MakePlot.sh') if sys.argv[1] == '4':
print commands.getstatusoutput(initCommand+'lcg-ls -b -D srmv2 "srm://ingrid-se02.cism.ucl.ac.be:8444/srm/managerv2?SFN='+DATASET[1]+'" | xargs -I {} basename {}') LocalFileList = commands.getstatusoutput(initCommand+'lcg-ls -b -D srmv2 "srm://ingrid-se02.cism.ucl.ac.be:8444/srm/managerv2?SFN='+DATASET[1]+'" | xargs -I {} basename {}')[1].split('\n') for f in LocalFileList: if(f[-5:].find('.root')==-1):continue #only .root file considered FILELIST += ["root://cms-xrd-global.cern.ch/"+DATASET[1].replace('/storage/data/cms/store/','/store/')+f] else: #file path is an HSCP sample name, use the name to run the job FILELIST += [DATASET[1]] print FILELIST for inFileList in getChunksFromList(FILELIST,max(1,len(FILELIST)/50)): #50 jobs, this is a trade off between hadding time and processing time InputListCSV = '' for inFile in inFileList: InputListCSV+= inFile + ',' InputListCSV = InputListCSV[:-1] #remove the last duplicated comma LaunchOnCondor.SendCluster_Push (["BASH", "sh " + os.getcwd() + "/DeDxStudy.sh " + InputListCSV + " out.root; mv out.root " + outdir+"dEdxHistos_%i.root" % LaunchOnCondor.Jobs_Count ]) LaunchOnCondor.SendCluster_Submit() elif sys.argv[1]=='2': for DATASET in datasetList :#+signalList : indir = os.getcwd() + "/Histos/"+DATASET[0]+'/' os.system('rm -f Histos_'+DATASET[0]+'.root') os.system('find ' + indir + '*.root -type f -size +1024c | xargs hadd -f Histos_'+DATASET[0]+'.root') # finally merge all the runs into the histogram with data #os.system('rm -f Histos_Data.root') #os.system('hadd -f Histos_Data.root Histos_Run*.root') elif sys.argv[1]=='3': os.system('sh MakePlot.sh') else:
LaunchOnCondor.Jobs_InitCmds = [ 'ulimit -c 0', 'export HOME=%s' % os.environ['HOME'], 'export X509_USER_PROXY=$HOME/x509_user_proxy/x509_proxy; voms-proxy-init --noregen;', 'export REMOTESTORAGESERVER=' + RemoteServer, 'export REMOTESTORAGEPATH=' + RemoteStorageDir.replace( '/storage/data/cms/store/', '//store/') ] else: LaunchOnCondor.Jobs_InitCmds = ['ulimit -c 0'] if (int(vals[1]) >= 2 and skipSamples(Type, vals[2]) == True): continue # if(int(vals[1])==0):continue LaunchOnCondor.SendCluster_Push([ "FWLITE", os.getcwd() + "/Analysis_Step1_EventLoop.C", '"ANALYSE_' + str(index) + '_to_' + str(index) + '"', Type, vals[2].rstrip() ]) f.close() LaunchOnCondor.SendCluster_Submit() elif sys.argv[1] == '2': print 'MERGING FILE AND PREDICTING BACKGROUNDS' FarmDirectory = "FARM" JobName = "HscpPred" LaunchOnCondor.Jobs_RunHere = 1 LaunchOnCondor.SendCluster_Create(FarmDirectory, JobName) for Type in AnalysesToRun: Path = "Results/Type" + str(Type) + "/" os.system('rm -f ' + Path + 'Histos.root') #os.system('hadd -f ' + Path + 'Histos.root ' + Path + '*.root')