Ejemplo n.º 1
0
parser.add_argument('--no-sub', dest='noSub', action='store_true', help='Do not submit jobs')
parser.add_argument('--proxy', dest="proxy", help="Path to proxy", default=os.environ["X509_USER_PROXY"])
parser.add_argument('--executable', help='Path to executable that should be run', 
        default = script_dir+'/runPythiaJob.sh')
args = parser.parse_args()

proc = args.proc
inDir = args.inDir
slha = args.slha
executable = args.executable
qcutRange = range(args.qcutRange[0], args.qcutRange[1]+1, args.qcutStep)
qcutList = args.qcutList
nJetMax = args.nJetMax

#get files
inFilesList = ["root://cmsxrootd.fnal.gov/"+fl.split("/hadoop/cms")[-1] for fl in glob.glob(inDir+'/*.lhe')]
infile = ','.join(inFilesList)

out_dir='/hadoop/cms/store/user/'+os.environ['USER']+'/mcProduction/RAWSIM'

print "Will use Pythia to shower LHE events from files:",infile

if len(qcutList)>0: qcutRange=qcutList

for qcut in qcutRange:
    outdir = out_dir+'/'+proc
    outfile = '_'.join(['GEN',proc,str(qcut)+'.root'])
    options = [proc, os.path.basename(slha), str(qcut), outdir,infile,str(nJetMax)]
    submitCondorJob(proc, executable, options, slha, label=str(qcut), #outputToTransfer=outfile,
            submit=(not args.noSub), proxy=args.proxy)
    #gridpack generation script and misc scripts
    infile_list = [script_dir + '/gridpack_generation.sh'
                   ]  #use modified gridpack generation script
    infile_list.append(genproductions_dir +
                       '/bin/MadGraph5_aMCatNLO/runcmsgrid_LO.sh')
    infile_list.append(genproductions_dir +
                       '/bin/MadGraph5_aMCatNLO/cleangridmore.sh')
    #patches needed by gridpack generation script
    infile_list.append(
        script_dir +
        '/ucsd.patch')  #use the patch committed in this repository
    infile_list.append(genproductions_dir +
                       '/bin/MadGraph5_aMCatNLO/patches/mgfixes.patch')
    infile_list.append(genproductions_dir +
                       '/bin/MadGraph5_aMCatNLO/patches/models.patch')
    #madgraph cards
    infile_list += glob.glob(cards_dir + '/*.dat')

    infile = ','.join(infile_list)

    options = [proc, out_dir]
    submitCondorJob(proc,
                    executable,
                    options,
                    infile,
                    label="gridpack",
                    submit=(not args.noSub),
                    proxy=args.proxy,
                    isGridpackJob=True)
    proc = args.proc
    infile = args.infile
    fragment = args.fragment
    nevents = args.nevents
    njobs = args.njobs

    script_dir = os.path.dirname(os.path.realpath(__file__))
    executable = script_dir + "/runLHEPythiaJob.sh"
    out_dir = "/hadoop/cms/store/user/" + os.environ["USER"] + "/mcProduction/LHERAWSIM"
    print "Will generate LHE events using tarball", infile, "and shower them using Pythia"

    # need to transfer input tarball and gen fragment
    infiles = infile + "," + fragment
    fragfile = os.path.basename(fragment)

    outdir = out_dir + "/" + proc
    options = [proc, str(nevents), fragfile, outdir]
    print "Options:", (" ".join(options))
    for j in range(0, njobs):
        rseed = str(500 + j)
        print "Random seed", rseed
        submitCondorJob(
            proc,
            executable,
            options + [rseed],
            infiles,
            label=rseed + (fragfile.replace(".py", "")),
            submit=(not args.noSub),
        )
    print "Will generate LHE events using tarball and shower them using Pythia"

    #need to transfer gen fragment
    fragfile = os.path.basename(fragment)

    logDir = os.path.join("logs",proc)
    if not os.path.isdir(logDir):
        os.makedirs(logDir)
    else:
        shutil.rmtree(logDir)
        os.makedirs(logDir)


    outdir = out_dir+'/'+proc
    if not os.path.isdir(outdir):
      os.makedirs(outdir)

    if len(qcutList)>0: qcutRange=qcutList

    for qcut in qcutRange:
        print "QCut", qcut
        for j in range(0,njobs):
            rseed = str(rseedStart+j)
            print "Random seed",rseed
            if mass:
                options = [proc, str(nevents), fragfile, str(qcut), str(nJetMax), str(mass), outdir, str(j+1)]
            else:
                options = [proc, str(nevents), fragfile, str(qcut), str(nJetMax), "0.00000", outdir, str(j+1)]
            print "Options:",(' '.join(options))
            submitCondorJob(proc, executable, options+[rseed], fragment, label=str(qcut)+'_'+rseed, submit=(not args.noSub), proxy=args.proxy)
script_dir = os.path.dirname(os.path.realpath(__file__))

parser = argparse.ArgumentParser()
parser.add_argument('--model', dest="model", help='Name of model and  a batch number if model has multiple models', required=True)
parser.add_argument('--tag', dest="tag", help='In case of multiple sets of fragments per model')
parser.add_argument('--executable', help='Path to executable that should be run', 
        default = script_dir+'/runFragmentValidation.sh')
parser.add_argument('--full', dest='full', action='store_true', help="Run also FSPremix and MiniAOD step.")
parser.add_argument('--fragment', dest='fragment', help="Path to fragment", required=True)
parser.add_argument('--njobs', dest="njobs", type=int, help='Number of condor jobs', required=True)
parser.add_argument('--nevents', dest="nevents", type=int, help='Number of events per job', required=True)
parser.add_argument('--no-sub', dest='noSub', action='store_true', help='Do not submit jobs')
parser.add_argument('--proxy', dest="proxy", help="Path to proxy", default='/tmp/x509up_u31156')
args = parser.parse_args()

out_dir='/hadoop/cms/store/user/'+os.environ['USER']+'/mcProduction/AODSIM'

exefile = args.executable
if (args.full): exefile = script_dir+'/runFullValidation.sh'

for ijob in range(args.njobs):
    outdir = out_dir+'/'+args.model
    if args.tag:
        options = [str(ijob+1), args.model, outdir, str(args.nevents),"condor",args.tag]
        submitCondorJob(args.model, exefile, options, args.fragment, label=args.tag+"_batch"+str(ijob+1),submit=(not args.noSub), proxy=args.proxy)
    else:
        options = [str(ijob+1), args.model, outdir, str(args.nevents),"condor"]
        submitCondorJob(args.model, exefile, options, args.fragment, label="batch"+str(ijob+1),submit=(not args.noSub), proxy=args.proxy)
            
    parser.add_argument('--no-sub', dest='noSub', action='store_true', help='Do not submit jobs')
    parser.add_argument('--proxy', dest="proxy", help="Path to proxy", default=os.environ["X509_USER_PROXY"])
    args = parser.parse_args()

    proc = args.proc
    cards_dir = args.cardsDir
    genproductions_dir = args.genproductionsDir

    script_dir = os.path.dirname(os.path.realpath(__file__))
    executable = script_dir+'/runGridpackGeneration.sh'
    out_dir = '/hadoop/cms/store/user/'+os.environ['USER']+'/mcProduction/GRIDPACKS/'+proc

    #gridpack generation script and misc scripts
    infile_list = [script_dir+'/gridpack_generation.sh'] #use modified gridpack generation script 
    infile_list.append(genproductions_dir+'/bin/MadGraph5_aMCatNLO/runcmsgrid_LO.sh')
    infile_list.append(genproductions_dir+'/bin/MadGraph5_aMCatNLO/cleangridmore.sh')
    #patches needed by gridpack generation script
    #infile_list.append(script_dir+'/ucsd.patch') #use the patch committed in this repository
    infile_list.append(genproductions_dir+'/bin/MadGraph5_aMCatNLO/patches/mgfixes.patch')
    infile_list.append(genproductions_dir+'/bin/MadGraph5_aMCatNLO/patches/models.patch')
    #madgraph cards
    infile_list += glob.glob(cards_dir+'/*.dat')

    if not os.path.isdir("logs/%s"%proc):
        os.makedirs("logs/%s"%proc)

    infile = ','.join(infile_list)

    options = [proc, out_dir]
    submitCondorJob(proc, executable, options, infile, label="gridpack", submit=(not args.noSub), proxy=args.proxy, isGridpackJob=True)
### Dustin Anderson

import os
import sys
import argparse
import glob

from submitLHECondorJob import submitCondorJob

script_dir = os.path.dirname(os.path.realpath(__file__))

parser = argparse.ArgumentParser()
parser.add_argument('--dataset', dest="dataset", help='Name of model and  a batch number if model has multiple datasets', required=True)
parser.add_argument('--executable', help='Path to executable that should be run', 
        default = script_dir+'/runFragmentValidation.sh')
parser.add_argument('--fragment', dest='fragment', help="Path to fragment", required=True)
parser.add_argument('--njobs', dest="njobs", type=int, help='Number of condor jobs', required=True)
parser.add_argument('--nevents', dest="nevents", type=int, help='Number of events per job', required=True)
parser.add_argument('--no-sub', dest='noSub', action='store_true', help='Do not submit jobs')
parser.add_argument('--proxy', dest="proxy", help="Path to proxy", default='/tmp/x509up_u31156')
args = parser.parse_args()


out_dir='/hadoop/cms/store/user/'+os.environ['USER']+'/mcProduction/AODSIM'

for ijob in range(args.njobs):
    outdir = out_dir+'/'+args.dataset
    options = [str(ijob+1), args.dataset, outdir, str(args.nevents)]
    submitCondorJob(args.dataset, args.executable, options, args.fragment, label="batch"+str(ijob+1),
            submit=(not args.noSub), proxy=args.proxy)
Ejemplo n.º 8
0
parser.add_argument('proc', help="Name of physics model")
parser.add_argument('--in-dir', '-i', dest='inDir', help="Path to input file directory", required=True)
parser.add_argument('--slha', help="Path to slha file/fragment", required=True)
parser.add_argument('--qcut-range', dest='qcutRange', nargs=2, type=int, default=[50,100], 
        help="Range of qcuts to scan over")
parser.add_argument('--no-sub', dest='noSub', action='store_true', help='Do not submit jobs')
parser.add_argument('--proxy', dest="proxy", help="Path to proxy", default='/tmp/x509up_u31156')
args = parser.parse_args()

proc = args.proc
inDir = args.inDir
slha = args.slha
qcutRange = range(args.qcutRange[0], args.qcutRange[1]+1, 2)

#get files
inFilesList = glob.glob(inDir+'/*.lhe')
infile = ','.join(inFilesList)+','+slha

script_dir = os.path.dirname(os.path.realpath(__file__))
executable = script_dir+'/runPythiaJob.sh'
out_dir='/hadoop/cms/store/user/'+os.environ['USER']+'/mcProduction/RAWSIM'

print "Will use Pythia to shower LHE events from files:",infile

for qcut in qcutRange:
    outdir = out_dir+'/'+proc
    outfile = '_'.join(['GEN',proc,str(qcut)+'.root'])
    options = [proc, os.path.basename(slha), str(qcut), outdir]
    submitCondorJob(proc, executable, options, infile, label=str(qcut), #outputToTransfer=outfile,
            submit=(not args.noSub), proxy=args.proxy)
    fragment = args.fragment
    nevents = args.nevents
    njobs = args.njobs
    rseedStart = args.rseedStart
    executable = args.executable
    qcutRange = range(args.qcutRange[0], args.qcutRange[1]+1, args.qcutStep)
    qcutList = args.qcutList
    nJetMax = args.nJetMax


    script_dir = os.path.dirname(os.path.realpath(__file__))
    out_dir='/hadoop/cms/store/user/'+os.environ['USER']+'/mcProduction/RAWSIM'
    print "Will generate LHE events using tarball and shower them using Pythia"

    #need to transfer gen fragment
    fragfile = os.path.basename(fragment)

    outdir = out_dir+'/'+proc

    if len(qcutList)>0: qcutRange=qcutList

    for qcut in qcutRange:
        print "QCut", qcut
        for j in range(0,njobs):
            rseed = str(rseedStart+j)
            print "Random seed",rseed
            options = [proc, str(nevents), fragfile, str(qcut), str(nJetMax), outdir, str(j+1)]
            print "Options:",(' '.join(options))
            submitCondorJob(proc, executable, options+[rseed], fragment, 
                label=str(qcut)+'_'+rseed, submit=(not args.noSub), proxy=args.proxy)
Ejemplo n.º 10
0
                    default='/tmp/x509up_u31156')
parser.add_argument('--rseed-start',
                    dest='rseedStart',
                    help='Initial value for random seed',
                    type=int,
                    default=10000)
args = parser.parse_args()

executable = args.exe
infile = args.gridpack
nevents = args.nevents
njobs = args.njobs
outdir = args.outdir
outdirEos = args.outdirEos
rseedStart = args.rseedStart

print "Will run", njobs, "jobs with", nevents, "events each"
print "Running this executable:", executable

for j in range(njobs):
    rseed = str(rseedStart + j)
    print "Random seed", rseed
    options = [str(nevents), str(rseed), outdir, outdirEos]
    submitCondorJob('miniaod',
                    executable,
                    options,
                    infile,
                    label=str(rseed),
                    submit=(not args.noSub),
                    proxy=args.proxy)
parser.add_argument('--njobs', dest="njobs", type=int, help='Number of condor jobs', required=True)
parser.add_argument('--nevents', dest="nevents", type=int, help='Number of events per job', required=True)
parser.add_argument('--out-dir', help="Output directory", dest='outdir',
        default='/hadoop/cms/store/user/'+os.environ['USER']+'/mcProduction/MINIAODSIM')
parser.add_argument('--out-dir-eos', help="Output directory (EOS)", dest='outdirEos',
        default="")
parser.add_argument('--no-sub', dest='noSub', action='store_true', help='Do not submit jobs')
parser.add_argument('--proxy', dest="proxy", help="Path to proxy", default='/tmp/x509up_u31156')
parser.add_argument('--rseed-start', dest='rseedStart', help='Initial value for random seed', 
        type=int, default=1)
args = parser.parse_args()

exefile = args.executable
nevents = args.nevents
njobs = args.njobs
outdir=args.outdir
outdirEos=args.outdirEos
rseedStart = args.rseedStart

out_dir='/hadoop/cms/store/user/'+os.environ['USER']+'/mcProduction/MINIAODSIM'
print "Will run",njobs,"jobs with",nevents,"events each"
for ijob in range(2000,2000+args.njobs):
    outdir = out_dir+'/'+args.model
    rseed = str(rseedStart+ijob)
    if args.tag:
        options = [str(ijob+1),args.model, outdir, str(args.nevents),rseed,"condor",args.tag]
        submitCondorJob(args.model, exefile, options, args.fragment, label=args.tag+"_batch"+str(ijob+1),submit=(not args.noSub), proxy=args.proxy)
    else:
        options = [str(ijob+1), args.model, outdir, str(args.nevents),rseed,"condor"]
        submitCondorJob(args.model, exefile, options, args.fragment, label="batch"+str(ijob+1),submit=(not args.noSub), proxy=args.proxy)
Ejemplo n.º 12
0
        default='/hadoop/cms/store/user/'+os.environ['USER']+'/mcProduction/MINIAODSIM')
parser.add_argument('--out-dir-eos', help="Output directory (EOS)", dest='outdirEos',
        default="")
parser.add_argument('--no-sub', dest='noSub', action='store_true', help='Do not submit jobs')
parser.add_argument('--pu-input', dest="puOpt", help="Specify how to retrieve the PU dataset: 'dbs', 'local_safe'", default='dbs')
parser.add_argument('--proxy', dest="proxy", help="Path to proxy", default='/tmp/x509up_u31156')
args = parser.parse_args()

executable = args.exe
infile = args.gridpack
nevents = args.nevents
njobs = args.njobs
outdir=args.outdir
outdirEos=args.outdirEos

print "Will run",njobs,"jobs with",nevents,"events each"
print "Running this executable:",executable

for j in range(njobs):
    rseed = str(10000+j)
    print "Random seed",rseed
    if args.puOpt == "dbs": puInputStr = "dbs:/MinBias_TuneCUETP8M1_13TeV-pythia8/RunIIWinter15GS-MCRUN2_71_V1-v1/GEN-SIM"
    elif args.puOpt == "local_safe": 
        puFilesAll = glob.glob("/hadoop/cms/phedex/store/mc/RunIIWinter15GS/MinBias_TuneCUETP8M1_13TeV-pythia8/GEN-SIM/MCRUN2_71_V1-v1/*/*")
        puFiles = random.sample(puFilesAll,10)
        puFiles = ["file:"+i for i in puFiles]
        puInputStr = "{0}".format(",".join(puFiles))
    options = [str(nevents), str(rseed), outdir, puInputStr, outdirEos]
    submitCondorJob('miniaod', executable, options, infile, label=str(rseed), 
            submit=(not args.noSub),proxy=args.proxy)
Ejemplo n.º 13
0
    args = parser.parse_args()

    proc = args.proc
    infile = args.infile
    fragment = args.fragment
    nevents = args.nevents
    njobs = args.njobs

    script_dir = os.path.dirname(os.path.realpath(__file__))
    executable = script_dir + '/runLHEPythiaJob.sh'
    out_dir = '/hadoop/cms/store/user/' + os.environ[
        'USER'] + '/mcProduction/LHERAWSIM'
    print "Will generate LHE events using tarball", infile, "and shower them using Pythia"

    #need to transfer input tarball and gen fragment
    infiles = infile + ',' + fragment
    fragfile = os.path.basename(fragment)

    outdir = out_dir + '/' + proc
    options = [proc, str(nevents), fragfile, outdir]
    print "Options:", (' '.join(options))
    for j in range(0, njobs):
        rseed = str(500 + j)
        print "Random seed", rseed
        submitCondorJob(proc,
                        executable,
                        options + [rseed],
                        infiles,
                        label=rseed + (fragfile.replace('.py', '')),
                        submit=(not args.noSub))