dtag = origdtag

            xsec = getByLabel(procData, 'xsec', -1)
            br = getByLabel(procData, 'br', [])
            suffix = str(getByLabel(procData, 'suffix', procSuffix))
            split = getByLabel(procData, 'split', -1)
            if opt.onlytag != 'all' and dtag.find(opt.onlytag) < 0: continue
            filt = ''
            if mctruthmode != 0: filt = '_filt' + str(mctruthmode)
            if (xsec > 0 and not isdata):
                for ibr in br:
                    xsec = xsec * ibr

            if (opt.resubmit == False):
                FileList = getByLabel(procData, 'dset', ['"UnknownDataset"'])
                LaunchOnCondor.SendCluster_Create(FarmDirectory,
                                                  JobName + '_' + dtag + filt)
                if (LaunchOnCondor.subTool != 'crab'):
                    FileList = getFileList(procData, int(opt.NFile))

                for s in range(0, len(FileList)):
                    LaunchOnCondor.Jobs_FinalCmds = []
                    LaunchOnCondor.Jobs_InitCmds = ['ulimit -c 0;']
                    if (not isLocalSample):
                        LaunchOnCondor.Jobs_InitCmds += [initialCommand]

                    #create the cfg file
                    eventsFile = FileList[s]
                    eventsFile = eventsFile.replace('?svcClass=default', '')
                    if (doCacheInputs and isLocalSample):
                        result = CacheInputs(eventsFile)
                        eventsFile = result[0]
Example #2
0
#!/usr/bin/env python

import string
import os
import sys
import LaunchOnCondor

FarmDirectory = "FARM"
JobName = "lltautauToysFirstTry"
os.system("make clean; make")
LaunchOnCondor.Jobs_RunHere = 0
LaunchOnCondor.SendCluster_Create(FarmDirectory, JobName)
for i in range(0,11):
   LaunchOnCondor.Jobs_FinalCmds = ["mv justForDebug_"+str(i) + " " + os.getcwd()+"/." ]
   LaunchOnCondor.SendCluster_Push(["BASH", os.getcwd()+"/Analyzer  --File1 "+os.getcwd()+"/plotter_2016_02_09.root --File2 "+os.getcwd()+"/plotter_2016_02_09.root --Index "+str(i)+" --Ntoy 100 --Data --Dir justForDebug_"+str(i)+" --NoXserver" ])
   #LaunchOnCondor.SendCluster_Push(["BASH", os.getcwd()+"/Analyzer  --File1 "+os.getcwd()+"/../plotter_Approval.root --File2 "+os.getcwd()+"/../plotter_Approval_tree.root --Index "+str(i)+" --Ntoy 1 --Dir justForDebug_"+str(i)+" --NoXserver" ])
LaunchOnCondor.SendCluster_Submit()

Example #3
0
    #prepare the output
    OUT = CWD + '/JOBS/' + OUTName[iConf] + signalSuffix + binSuffix + '/'
    os.system('mkdir -p ' + OUT)

    #get the cuts
    file = ROOT.TFile(inUrl)
    cutsH = file.Get('WZ/all_optim_cut')

    ###################################################
    ##   OPTIMIZATION LOOP                           ##
    ###################################################

    if (phase == 1):
        print '# RUN LIMITS FOR ALL POSSIBLE CUTS  for ' + DataCardsDir + '#\n'
        LaunchOnCondor.SendCluster_Create(
            FarmDirectory,
            JobName + "_" + signalSuffix + binSuffix + OUTName[iConf])

        FILE = open(OUT + "/LIST.txt", "w")
        i = 1
        while (i < cutsH.GetXaxis().GetNbins() + 1):
            shapeCutMin_ = 0
            shapeCutMax_ = 9999
            SCRIPT = open(
                OUT + 'script_' + str(i) + '_' + str(shapeCutMin_) + '_' +
                str(shapeCutMax_) + '.sh', "w")
            SCRIPT.writelines('cd ' + CMSSW_BASE + '/src;\n')
            SCRIPT.writelines("export SCRAM_ARCH=" +
                              os.getenv("SCRAM_ARCH", "slc6_amd64_gcc491") +
                              ";\n")
            SCRIPT.writelines("eval `scram r -sh`;\n")