Example #1
0
def phadd(batch_name, dataset, samples, output_fn='phadd.root'):
    meat = '''
job=$(<cs_job)
fns=$(python -c 'from cs_filelist import get; print " ".join(get('$job'))')
cmd="./hadd.py __OUTPUT_FN__ $fns"

if false; then
  echo $cmd
  meatexit=0
  touch __OUTPUT_FN__
else
  $cmd 2>&1
  meatexit=$?
fi
'''.replace('__OUTPUT_FN__', output_fn)

    cs = CondorSubmitter(batch_name = batch_name,
                         dataset = dataset,
                         meat = meat,
                         pset_template_fn = '',
                         input_files = [cmssw_base('src/JMTucker/Tools/scripts/hadd.py')],
                         output_files = [output_fn, output_fn + '.haddlog'],
                         stageout_files = 'all',
                         )
    cs.submit_all(samples)
    def submit(self, samples):
        self.normalize()

        crab_samples, condor_samples = [], []
        for s in samples:
            s.set_curr_dataset(self.common.dataset)
            if s.condor or self.override == 'condor':
                condor_samples.append(s)
            elif not s.condor or self.override == 'crab':
                crab_samples.append(s)

        if self.testing:
            print 'MetaSubmitter: crab samples ='
            for s in crab_samples:
                print s.name
            print 'MetaSubmitter: condor samples ='
            for s in condor_samples:
                print s.name

        if crab_samples:
            args = dict(self.common.__dict__)
            args.update(self.crab.__dict__)
            cs = CRABSubmitter(self.batch_name, **args)
            cs.submit_all(crab_samples)
        if condor_samples:
            args = dict(self.common.__dict__)
            args.update(self.condor.__dict__)
            cs = CondorSubmitter(self.batch_name, **args)
            cs.submit_all(condor_samples)
Example #3
0
)
names = set(s.name for s in samples)
allowed = [arg for arg in sys.argv if arg in names]

for sample in samples:
    if allowed and sample.name not in allowed:
        continue

    print sample.isample, sample.name,
    isample = sample.isample  # for locals use below

    batch_dir = os.path.join(batch_root, 'signal_%05i' % sample.isample)
    os.mkdir(batch_dir)
    open(os.path.join(batch_dir, 'nice_name'), 'wt').write(sample.name)

    run_fn = os.path.join(batch_dir, 'run.sh')
    open(run_fn, 'wt').write(script_template % locals())

    open(os.path.join(batch_dir, 'cs_dir'), 'wt')
    open(os.path.join(batch_dir, 'cs_jobmap'),
         'wt').write('\n'.join(str(i) for i in xrange(njobs)) + '\n')
    open(os.path.join(batch_dir, 'cs_submit.jdl'),
         'wt').write(jdl_template % locals())
    open(os.path.join(batch_dir, 'cs_njobs'), 'wt').write(str(njobs))
    open(os.path.join(batch_dir, 'cs_outputfiles'),
         'wt').write('observed.root expected.root combine_output.txtgz')

    CondorSubmitter._submit(batch_dir, njobs)

# zcat signal_*/combine_output* | sort | uniq | egrep -v '^median expected limit|^mean   expected limit|^Observed|^Limit: r|^Generate toy|^Done in|random number generator seed is|^   ..% expected band|^DATACARD:' | tee /tmp/duh
process.mfvGenParticleFilterGeo2ddist = process.mfvGenParticleFilter.clone(min_njets = 4, min_jet_pt = 20, min_jet_ht40 = 1000, min_rho0 = 0.01, min_rho1 = 0.01, max_rho0 = 2.0, max_rho1 = 2.0)
process.mfvGenGeo2ddist = mfvTheoristRecipe.clone()
process.pGenGeo2ddist = cms.Path(process.common * process.mfvGenParticleFilterGeo2ddist * process.mfvGenGeo2ddist)

process.mfvGenParticleFilterSumpt350 = process.mfvGenParticleFilter.clone(min_njets = 4, min_jet_pt = 20, min_jet_ht40 = 1000, min_rho0 = 0.01, min_rho1 = 0.01, max_rho0 = 2.0, max_rho1 = 2.0, min_sumpt = 350, bquarkpt_fraction = 0.65)
process.mfvGenSumpt350 = mfvTheoristRecipe.clone()
process.pGenSumpt350 = cms.Path(process.common * process.mfvGenParticleFilterSumpt350 * process.mfvGenSumpt350)

process.mfvGenParticleFilterDvv400um = process.mfvGenParticleFilter.clone(min_njets = 4, min_jet_pt = 20, min_jet_ht40 = 1000, min_rho0 = 0.01, min_rho1 = 0.01, max_rho0 = 2.0, max_rho1 = 2.0, min_sumpt = 350, bquarkpt_fraction = 0.65, min_dvv = 0.04)
process.mfvGenDvv400um = mfvTheoristRecipe.clone()
process.pGenDvv400um = cms.Path(process.common * process.mfvGenParticleFilterDvv400um * process.mfvGenDvv400um)


if __name__ == '__main__' and hasattr(sys, 'argv') and 'submit' in sys.argv:
    from JMTucker.Tools.Year import year
    from JMTucker.Tools import Samples
    if year == 2015:
        raise NotImplementedError("samples don't have dataset")
        samples = Samples.all_signal_samples_2015
    elif year == 2016:
        samples = Samples.all_signal_samples

    from JMTucker.Tools.CondorSubmitter import CondorSubmitter
    from JMTucker.Tools.MetaSubmitter import secondary_files_modifier
    cs = CondorSubmitter('TheoristRecipeV44',
                         ex = year,
                         dataset = dataset,
                         pset_modifier = secondary_files_modifier('main')
                         )
    cs.submit_all(samples)
Example #5
0
for mn, mx in (3, 3), (3, 4), (4, 4):
    vtx_name = 'vtx%i%i' % (mn, mx)
    obj_name = 'byrun%i%i' % (mn, mx)
    pth_name = 'pth%i%i' % (mn, mx)

    vtx = process.mfvSelectedVerticesTight.clone(min_ntracks=mn,
                                                 max_ntracks=mx)
    obj = process.mfvByNpu.clone(vertex_src=vtx_name)
    pth = cms.Path(process.mfvAnalysisCuts * vtx * obj)
    setattr(process, vtx_name, vtx)
    setattr(process, obj_name, obj)
    setattr(process, pth_name, pth)

    obj_noana = obj.clone()
    pth_noana = cms.Path(vtx * obj_noana)
    setattr(process, obj_name + 'noana', obj_noana)
    setattr(process, pth_name + 'noana', pth_noana)

if __name__ == '__main__' and hasattr(sys, 'argv') and 'submit' in sys.argv:
    import JMTucker.Tools.Samples as Samples
    samples = [
        Samples.qcdht1500, Samples.qcdht1500ext, Samples.qcdht2000,
        Samples.qcdht2000ext
    ]
    for sample in samples:
        sample.files_per = 10

    from JMTucker.Tools.CondorSubmitter import CondorSubmitter
    cs = CondorSubmitter('ByNpuV11_16', dataset='ntuplev11')
    cs.submit_all(samples)
Example #6
0
    from JMTucker.Tools.Year import year
    import JMTucker.Tools.Samples as Samples 
    if year == 2015:
        samples = Samples.data_samples_2015 + Samples.ttbar_samples_2015 + Samples.qcd_samples_2015 + Samples.qcd_samples_ext_2015
    elif year == 2016:
        samples = Samples.data_samples + Samples.ttbar_samples + Samples.qcd_samples + Samples.qcd_samples_ext

    dataset = 'pick1vtxv14'
    for sample in samples:
        sample.datasets[dataset].files_per = 100000

    from JMTucker.Tools.CondorSubmitter import CondorSubmitter
    cs = CondorSubmitter('Pick1VtxV14_merge',
                         pset_template_fn = '$CMSSW_BASE/src/JMTucker/Tools/python/Merge_cfg.py',
                         ex = year,
                         dataset = dataset,
                         publish_name = 'pick1vtxv14_merge',
                         stageout_files = 'all'
                         )
    cs.submit_all(samples)


'''
# little zsh (bash?) for-loop to help figure out the job splitting
for x in eventlist.*.gz; do
z=${x/eventlist./}
sample=${z/.gz/}
nsel=$(zcat $x | wc -l)
nevt=$(samples nevents $sample main)
nfile=$(samples file $sample main 10000000 | grep root | wc -l)
filesper=$(python -c "from math import ceil; nevtarget=100; filesmax=100; print min(ceil(${nfile}*nevtarget/${nsel}),filesmax)")
file_event_from_argv(process)

process.load('JMTucker.MFVNeutralino.VertexSelector_cfi')
process.load('JMTucker.MFVNeutralino.AnalysisCuts_cfi')

process.mfvAnalysisCuts.apply_vertex_cuts = False

process.m = cms.EDAnalyzer(
    'MFVSignalMatch',
    vertex_src=cms.InputTag('mfvSelectedVerticesTight'),
    mevent_src=cms.InputTag('mfvEvent'),
    max_dist=cms.double(0.0084),
)

process.m100 = process.m.clone(max_dist=0.01)
process.m200 = process.m.clone(max_dist=0.02)

process.p = cms.Path(process.mfvSelectedVerticesSeq * process.mfvAnalysisCuts *
                     process.m * process.m100 * process.m200)

if __name__ == '__main__' and hasattr(sys, 'argv') and 'submit' in sys.argv:
    import JMTucker.Tools.Samples as Samples
    samples = Samples.mfv_signal_samples + Samples.mfv_ddbar_samples

    for sample in samples:
        sample.datasets[dataset].files_per = 1000

    from JMTucker.Tools.CondorSubmitter import CondorSubmitter
    cs = CondorSubmitter('SignalMatch', dataset=dataset)
    cs.submit_all(samples)
Example #8
0
def submit(config,
           name,
           scanpack_or_todo,
           todo_rawhlt=[],
           todo_reco=[],
           todo_ntuple=[]):
    global nevents
    global events_per

    if not any(name.endswith('_%s' % y) for y in (2015, 2016, 2017, 2018)):
        name += '_%s' % year

    config.General.requestName = name
    config.Data.outputPrimaryDataset = name

    if isinstance(scanpack_or_todo, scanpackbase):
        scanpack, todo = scanpack_or_todo, None
    else:
        scanpack, todo = None, scanpack_or_todo

    if scanpack:
        nevents = config.Data.totalUnits = scanpack.nevents
        events_per = config.Data.unitsPerJob = scanpack.events_per_job

    dummy_for_hash = int(time() * 1e6)
    steering = [
        'MAXEVENTS=%i' % events_per,
        'EXPECTEDEVENTS=%i' % ceil(events_per * expected_events_frac),
        'USETHISCMSSW=%i' % use_this_cmssw,
        'TRIGFILTER=%i' % trig_filter,
        'PREMIX=%i' % premix,
        'export DUMMYFORHASH=%i' %
        dummy_for_hash,  # exported so the python script executed in cmsRun can just get it from os.environ instead of parsing argv like we do the rest
        'OUTPUTLEVEL=%s' % output_level,
        'PYTHIA8240=%i' % pythia8240,
    ]

    if todo:
        steering.append('TODO=todo=' + todo)

    if scanpack:
        steering.append('SCANPACK=scanpack=%s,%s' %
                        (scanpack.name, scanpack.ibatch))

    salt = fixed_salt
    if not fixed_salt:
        salt = '%s %s' % (name, todo)
        if scanpack:
            salt += ' %s %s' % (scanpack.batch_name, year)

    if hip:
        assert type(hip) in (float, int)
        todo_rawhlt.append('hip_simulation,%f' % float(hip_simulation))
        for t in todo_reco, todo_ntuple:
            t.append('hip_mitigation')

    todo2s = ('RAWHLT', todo_rawhlt), ('RECO', todo_reco), ('NTUPLE',
                                                            todo_ntuple)
    for todo2_name, todo2 in todo2s:
        if todo2:
            todo2 = ' '.join('todo=%s' % x for x in todo2)
            steering.append('TODO%s="%s"' % (todo2_name, todo2))

            if not fixed_salt:
                salt += ' ' + todo2

    salt = salt.replace(' ', '_').replace('=', 'EQ')
    steering.append('SALT="%s"' % salt)

    open(steering_fn, 'wt').write('\n'.join(steering) + '\n')

    if condor:
        cs = CondorSubmitter(
            batch_name=os.path.basename(config.General.workArea),
            meat=dedent('''
                                           ./nstep.sh $((job+1)) 2>&1
                                           meatexit=$?
                                           mv FrameworkJobReport.xml ${workdir}/fjr_${job}.xml
                                           '''),
            pset_template_fn=config.JobType.psetName,
            input_files=['nstep.sh'] + config.JobType.inputFiles,
            stageout_files=config.JobType.outputFiles,
            publish_name=config.Data.outputDatasetTag,
            jdl_extras='request_memory = 3000',
        )
        sample = MCSample(config.General.requestName,
                          '/%s/None/None' % config.General.requestName,
                          config.Data.totalUnits,
                          filenames=['dummy'],
                          split_by='events',
                          events_per=config.Data.unitsPerJob)
        cs.submit(sample)

    else:
        if not testing:
            try:
                output = crab_command('submit', config=config)
            except CRABConfigException:
                output = 'problem'
            open(
                os.path.join(config.General.workArea,
                             'crab_%s' % config.General.requestName, 'cs_ex'),
                'wt').write(gitstatus_dir)
            print colors.boldwhite(name)
            pprint(output)
            print
        else:
            print 'crab config:'
            print config
            print 'steering.sh:'
            os.system('cat ' + steering_fn)
    os.remove(steering_fn)
process.mfvResolutionsFullSelByDistCutJets = mfvResolutions.clone(which_mom = 1)
process.p *= process.mfvResolutionsFullSelByDistCutJets

process.mfvResolutionsFullSelByDistCutTrksJets = mfvResolutions.clone(which_mom = 2)
process.p *= process.mfvResolutionsFullSelByDistCutTrksJets


if __name__ == '__main__' and hasattr(sys, 'argv') and 'submit' in sys.argv:
    import JMTucker.Tools.Samples as Samples
    samples = Samples.mfv_signal_samples + Samples.mfv_ddbar_samples

    for sample in samples:
        sample.datasets[dataset].files_per = 1000

    from JMTucker.Tools.CondorSubmitter import CondorSubmitter
    cs = CondorSubmitter('ResolutionsV16', dataset = dataset)
    cs.submit_all(samples)

elif __name__ == '__main__' and hasattr(sys, 'argv') and 'derivecut' in sys.argv:
    from math import pi
    from JMTucker.Tools.ROOTTools import *
    set_style()
    ps = plot_saver(plot_dir('resolutionsv14'), size=(800,500))
    for fn in sys.argv[1:]:
        if fn.endswith('.root'):
            print fn
            f = ROOT.TFile(fn)
            h = f.Get('mfvResolutionsFullSelByDistCutTrksJets/h_dist3d')
            h.Draw()
            ps.save(os.path.basename(fn).replace('.root', ''))
            integ = lambda a,b: get_integral(h, a, b, include_last_bin=False)
Example #10
0
def submit(config, name, scanpack_or_todo, todo_rawhlt=[], todo_reco=[], todo_ntuple=[]):
    global nevents
    global events_per

    config.General.requestName = name
    config.Data.outputPrimaryDataset = name

    if isinstance(scanpack_or_todo, scanpackbase):
        scanpack, todo = scanpack_or_todo, None
    else:
        scanpack, todo = None, scanpack_or_todo

    if scanpack:
        nevents = config.Data.totalUnits = scanpack.nevents
        events_per = config.Data.unitsPerJob = scanpack.events_per_job

    dummy_for_hash = int(time()*1e6)
    steering = [
        'MAXEVENTS=%i' % events_per,
        'EXPECTEDEVENTS=%i' % ceil(events_per*expected_events_frac),
        'USETHISCMSSW=%i' % use_this_cmssw,
        'TRIGFILTER=%i' % trig_filter,
        'PREMIX=%i' % premix,
        'export DUMMYFORHASH=%i' % dummy_for_hash,  # exported so the python script executed in cmsRun can just get it from os.environ instead of parsing argv like we do the rest
        'OUTPUTLEVEL=%s' % output_level,
        ]

    if todo:
        steering.append('TODO=todo=' + todo)

    if scanpack:
        steering.append('SCANPACK=scanpack=%s,%s' % (scanpack.name, scanpack.ibatch))

    salt = fixed_salt
    if not fixed_salt:
        salt = '%s %s' % (name, todo)
        if scanpack:
            salt += ' ' + scanpack.batch_name

    if hip_simulation:
        assert type(hip_simulation) in (float,int)
        todo_rawhlt.append('hip_simulation,%f' % float(hip_simulation))

    if hip_mitigation:
        assert hip_simulation
        todo_reco  .append('hip_mitigation')
        todo_ntuple.append('hip_mitigation')

    todo2s = ('RAWHLT', todo_rawhlt), ('RECO', todo_reco), ('NTUPLE', todo_ntuple)
    for todo2_name, todo2 in todo2s:
        if todo2:
            todo2 = ' '.join('todo=%s' % x for x in todo2)
            steering.append('TODO%s="%s"' % (todo2_name, todo2))

            if not fixed_salt:
                salt += ' ' + todo2

    salt = salt.replace(' ', '_').replace('=','EQ')
    steering.append('SALT="%s"' % salt)

    open(steering_fn, 'wt').write('\n'.join(steering) + '\n')

    if condor:
        cs = CondorSubmitter(batch_name = os.path.basename(config.General.workArea),
                             meat = dedent('''
                                           ./nstep.sh $((job+1)) 2>&1
                                           meatexit=$?
                                           mv FrameworkJobReport.xml ${workdir}/fjr_${job}.xml
                                           '''),
                             pset_template_fn = config.JobType.psetName,
                             input_files = ['nstep.sh'] + config.JobType.inputFiles,
                             stageout_files = config.JobType.outputFiles,
                             publish_name = config.Data.outputDatasetTag,
                             jdl_extras = 'request_memory = 3000',
                             )
        sample = MCSample(config.General.requestName,
                          '/%s/None/None' % config.General.requestName,
                          config.Data.totalUnits,
                          filenames = ['dummy'],
                          split_by = 'events',
                          events_per = config.Data.unitsPerJob
                          )
        cs.submit(sample)

    else:
        if not testing:
            try:
                output = crab_command('submit', config=config)
            except CRABConfigException:
                output = 'problem'
            open(os.path.join(config.General.workArea, 'crab_%s' % config.General.requestName, 'cs_ex'), 'wt').write(gitstatus_dir)
            print colors.boldwhite(name)
            pprint(output)
            print
        else:
            print 'crab config:'
            print config
            print 'steering.sh:'
            os.system('cat ' + steering_fn)
    os.remove(steering_fn)
Example #11
0
process.p = cms.Path(process.mfvWeight * process.mfvSelectedVerticesTight *
                     process.mfvAnalysisCuts)

for min_dbv in [0., 0.02, 0.05, 0.1]:
    ana = cms.EDAnalyzer(
        'MFVClusterTracksHistos',
        event_src=cms.InputTag('mfvEvent'),
        vertex_src=cms.InputTag('mfvSelectedVerticesTight'),
        weight_src=cms.InputTag('mfvWeight'),
        min_dbv=cms.double(min_dbv),
    )
    name = 'mfvClusterTracksMindBV0p%02i' % int(min_dbv * 100)
    setattr(process, name, ana)
    process.p *= ana

if __name__ == '__main__' and hasattr(sys, 'argv') and 'submit' in sys.argv:
    from JMTucker.Tools.CondorSubmitter import CondorSubmitter
    import JMTucker.Tools.Samples as Samples

    samples = Samples.ttbar_samples + Samples.qcd_samples + Samples.qcd_samples_ext + \
        [Samples.mfv_neu_tau00100um_M0800, Samples.mfv_neu_tau00300um_M0800, Samples.mfv_neu_tau01000um_M0800, Samples.mfv_neu_tau10000um_M0800] + \
        [Samples.xx4j_tau00001mm_M0300, Samples.xx4j_tau00010mm_M0300, Samples.xx4j_tau00001mm_M0700, Samples.xx4j_tau00010mm_M0700]

    for sample in samples:
        sample.files_per = 50
        if not sample.is_mc:
            sample.json = 'ana_10pc.json'

    cs = CondorSubmitter('ClusterTracksV10', dataset='ntuplev10')
    cs.submit_all(samples)
Example #12
0
from JMTucker.Tools.Merge_cfg import *

process.out.maxSize = cms.untracked.int32(2**19) # in kB, ~537 MB


if __name__ == '__main__' and hasattr(sys, 'argv') and 'submit' in sys.argv:
    from JMTucker.Tools.MetaSubmitter import *

    from JMTucker.MFVNeutralino.NtupleCommon import ntuple_version_use as version, dataset
    batch_name = 'Ntuple%s_sigs_merge' % version
    publish_name = 'Ntuple%s_%s' % (version, year)

    samples = pick_samples(dataset, all_signal='only')

    for sample in samples:
        sample.set_curr_dataset(dataset)
        sample.split_by = 'files'
        sample.files_per = -1000000 # hope we never have more than 1M files

    from JMTucker.Tools.CondorSubmitter import CondorSubmitter
    cs = CondorSubmitter(batch_name,
                         ex = year,
                         dataset = dataset,
                         publish_name = publish_name,
                         skip_output_files = ['merge.root'], # don't autodetect it
                         stageout_files = ['merge*.root'], # let the wrapper script glob merge.root, merge001.root, ...
                         )
    cs.submit_all(samples)

# for now, can publish output with mpublish --partial --no-coderep
Example #13
0
import JMTucker.Tools.SimpleTriggerEfficiency_cfi as SimpleTriggerEfficiency
SimpleTriggerEfficiency.setup_endpath(process)

if __name__ == '__main__' and hasattr(sys, 'argv') and 'submit' in sys.argv:
    from JMTucker.Tools.CondorSubmitter import CondorSubmitter
    import JMTucker.Tools.Samples as Samples

    samples = Samples.mfv_signal_samples + \
        Samples.mfv_signal_samples_glu + \
        Samples.mfv_signal_samples_gluddbar + \
        Samples.xx4j_samples

    for sample in samples:
        sample.files_per = 100

    CondorSubmitter('L1SigEff').submit_all(samples)

elif __name__ == '__main__' and hasattr(sys, 'argv') and 'ana' in sys.argv:
    from JMTucker.Tools.ROOTTools import ROOT
    for fn in sys.argv[1:]:
        f = ROOT.TFile(fn)
        h = f.Get('SimpleTriggerEfficiency/triggers_pass_num')

        num0 = None
        for i in xrange(1, h.GetNbinsX() + 1):
            path = h.GetXaxis().GetBinLabel(i)
            num = h.GetBinContent(i)
            if num0 is None:
                num0 = num
            else:
                if num - num0 > 0.01: