示例#1
0
    def filenames(self):
        fns = self.datasets[self.curr_dataset].filenames
        if not fns:
            try:
                import JMTucker.Tools.SampleFiles as sfns
                x = sfns.get(self.name, self.curr_dataset)
                if x is not None:
                    nfns, fns = x
                    if len(fns) != nfns:
                        raise ValueError('problem with JMTucker.Tools.SampleFiles')
            except ImportError:
                pass

            if not fns:
                print 'hitting DBS for filenames for', self.name, self.curr_dataset, self.dataset
                fns = self.datasets[self.curr_dataset].filenames = DBS.files_in_dataset(self.dataset, self.dbs_inst)
        return fns
示例#2
0
    def filenames(self):
        fns = self.datasets[self.curr_dataset].filenames
        if not fns:
            try:
                import JMTucker.Tools.SampleFiles as sfns
                x = sfns.get(self.name, self.curr_dataset)
                if x is not None:
                    nfns, fns = x
                    if len(fns) != nfns:
                        raise ValueError('problem with JMTucker.Tools.SampleFiles')
            except ImportError:
                pass

            if not fns:
                print 'hitting DBS for filenames for', self.name, self.curr_dataset, self.dataset
                fns = self.datasets[self.curr_dataset].filenames = DBS.files_in_dataset(self.dataset, self.dbs_inst)
        return fns
示例#3
0
def sample_files(process, sample, dataset, n=-1):
    import JMTucker.Tools.SampleFiles as sf
    sf.set_process(process, sample, dataset, n)
from JMTucker.Tools.BasicAnalyzer_cfg import *

import JMTucker.Tools.SampleFiles as sf
#sf.set_process(process, 'qcdht2000', 'main', 4)
sf.set_process(process, 'testqcdht2000', 'main')
process.TFileService.fileName = 'genparticle_histos.root'

add_analyzer(process, 'JMTGenParticleHistos', src=cms.InputTag('genParticles'))

process.maxEvents.input = 11688

process.source.duplicateCheckMode = cms.untracked.string('noDuplicateCheck')
示例#5
0
import sys
from JMTucker.Tools.BasicAnalyzer_cfg import *
process.setName_('Mini')
del process.TFileService

import JMTucker.Tools.Samples as Samples
s = Samples.qcdht1000

from JMTucker.Tools import SampleFiles
SampleFiles.setup(process, 'MFVNtupleV18', s.name, 50000)

process.load('JMTucker.MFVNeutralino.VertexSelector_cfi')
process.load('JMTucker.MFVNeutralino.AnalysisCuts_cfi')
process.load('JMTucker.MFVNeutralino.WeightProducer_cfi')

process.mfvAnalysisCuts.min_nvertex = 1
process.mfvWeight.histos = cms.untracked.bool(False)

process.mfvSampleInfo = cms.EDProducer('SampleInfoProducer',
                                       extra_weight_src = cms.InputTag('mfvWeight'),
                                       sample = cms.string(s.name),
                                       num_events = cms.int32(s.nevents),
                                       cross_section = cms.double(s.cross_section),
                                       int_lumi = cms.double(20000),
                                       )

process.p = cms.Path(process.mfvSelectedVerticesTight * process.mfvAnalysisCuts * process.mfvWeight * process.mfvSampleInfo)

process.out = cms.OutputModule('PoolOutputModule',
                               fileName = cms.untracked.string('minintuple.root'),
                               SelectEvents = cms.untracked.PSet(SelectEvents = cms.vstring('p')),
示例#6
0
# on (MINI)AOD use e.g. dasgo "file,lumi dataset="$(samples ds qcdht1000_2017 miniaod) | grep 833,
# run with | egrep -v '^File </tmp/tmp'

import sys, os
from pprint import pprint
from JMTucker.Tools import eos, SampleFiles
from JMTucker.Tools.ROOTTools import ROOT, detree

if len(sys.argv) < 6:
    sys.exit('usage: %s dataset sample run lumi event\n  where dataset and sample are as registered in SampleFiles. sample can be "*" to mean all samples having the dataset.' % sys.argv[0])

dataset = sys.argv[1]
sample = sys.argv[2]
rle = int(sys.argv[3]), int(sys.argv[4]), int(sys.argv[5])

fns = SampleFiles.get_fns(sample, dataset)
nfound = 0

for fn in fns:
    assert fn.endswith('.root')
    if not eos.exists(fn):
        raise IOError('does not exist on eos: %r' % fn)

    f = ROOT.TFile.Open(eos.canon(fn))
    t = f.Get('Events')
    for x in sorted(detree(t, 'EventAuxiliary.id().run():EventAuxiliary.luminosityBlock():EventAuxiliary.id().event()', xform=int)):
        if x == rle:
            print fn
            nfound += 1

if nfound != 1:
示例#7
0
def sample_files(process, sample, dataset, n=-1):
    if sample.endswith('_year'):
        sample = sample[:-4] + str(year)
    import JMTucker.Tools.SampleFiles as sf
    sf.set_process(process, sample, dataset, n)
sample = sys.argv[2]
pattern = sys.argv[3]
nfiles = int(sys.argv[4])
interactive = True
verbosity = 0

if '-f' in sys.argv:
    interactive = False

while '-v' in sys.argv:
    verbosity += 1
    sys.argv.remove('-v')

if '*' in sample or '?' in sample:
    samples = []
    for s, ds in SampleFiles.keys():
        if ds == dataset and fnmatch(s, sample):
            samples.append(s)
else:
    samples = [sample]
samples.sort()

infos = []

for sample in samples:
    if verbosity >= 1:
        print sample
    fns = SampleFiles.get_fns(sample, dataset)
    fns = random.sample(fns, nfiles)
    
    total_size = 0
from JMTucker.Tools.BasicAnalyzer_cfg import *

import JMTucker.Tools.SampleFiles as sf
#sf.set_process(process, 'qcdht2000', 'main', 4)
sf.set_process(process, 'testqcdht2000', 'main')
process.TFileService.fileName = 'genparticle_histos.root'

add_analyzer(process, 'JMTGenParticleHistos', src = cms.InputTag('genParticles'))

process.maxEvents.input = 11688

process.source.duplicateCheckMode = cms.untracked.string('noDuplicateCheck')
import sys, os
from math import ceil
from JMTucker.Tools.ROOTTools import *
from JMTucker.Tools import SampleFiles as sf

used_half_mc = False
target_nevents = 50000 # the number of presel events that make it to the vertexer per job

print '%30s %12s/%12s = %10s -> %10s (%10s)' % ('sample','npresel','nmcstat','frac','targnfns','evsfromtgt')
for fn in sys.argv[1:]:
    sname = os.path.basename(fn).replace('.root','')
    f = ROOT.TFile(fn)
    mcstat = f.Get('mcStat/h_sums').GetBinContent(1)
    if used_half_mc and not sname.startswith('JetHT'):
        mcstat /= 2
    npresjet = f.Get('mfvEventHistosJetPreSel/h_npv').GetEntries()
    #npreslep = f.Get('mfvEventHistosLeptonPreSel/h_npv').GetEntries()
    nfns = len(sf.get_fns(sname, 'miniaod')) if sf.has(sname, 'miniaod') else -1
    tfns = min(int(ceil(target_nevents / npresjet * nfns)), 50)
    evsfromtfns = (2 if used_half_mc else 1) * mcstat * tfns / nfns
    print '%30s %12.0f/%12.0f = %10.6f -> %4i/%5i (%10i)' % (sname, npresjet, mcstat, npresjet/mcstat, tfns, nfns, evsfromtfns)    #(lep %12.0f -> %10.6f)' % (, npreslep, npreslep/mcstat)
示例#11
0
import sys
from JMTucker.Tools.BasicAnalyzer_cfg import cms, process
from JMTucker.Tools import SampleFiles

raise NotImplementedError('V15 samples have trigger selection already')

use_weights = True

process.options.wantSummary = True
SampleFiles.setup(process, 'MFVNtupleV15', 'qcdht0250', 10000)
process.TFileService.fileName = 'events_cutplay.root'

from JMTucker.MFVNeutralino.AnalysisCuts_cfi import mfvAnalysisCuts as cuts
cuts.apply_vertex_cuts = False

process.trignjets = cuts.clone()
process.trignjetsht500 = cuts.clone(min_ht=500)
process.trignjetsht750 = cuts.clone(min_ht=750)
process.trigmu = cuts.clone(trigger_bit=3, min_4th_jet_pt=20)
process.trigmuht500 = cuts.clone(trigger_bit=3, min_4th_jet_pt=20, min_ht=500)
process.trigtopmu = cuts.clone(trigger_bit=4, min_4th_jet_pt=20)
process.trigtopmunmu = cuts.clone(trigger_bit=4,
                                  min_4th_jet_pt=20,
                                  min_nsemilepmuons=1)
process.trigtopmunmuht500 = cuts.clone(trigger_bit=4,
                                       min_4th_jet_pt=20,
                                       min_nsemilepmuons=1,
                                       min_ht=500)
process.trignjetsslep = cuts.clone(min_nsemileptons=1)
process.trigonly = cuts.clone(min_njets=0, min_4th_jet_pt=0)
import sys, os
from math import ceil
from JMTucker.Tools.ROOTTools import *
from JMTucker.Tools import SampleFiles as sf

used_half_mc = False
target_nevents = 50000  # the number of presel events that make it to the vertexer per job

print '%30s %12s/%12s = %10s -> %10s (%10s)' % (
    'sample', 'npresel', 'nmcstat', 'frac', 'targnfns', 'evsfromtgt')
for fn in sys.argv[1:]:
    sname = os.path.basename(fn).replace('.root', '')
    f = ROOT.TFile(fn)
    mcstat = f.Get('mcStat/h_sums').GetBinContent(1)
    if used_half_mc and not sname.startswith('JetHT'):
        mcstat /= 2
    npresjet = f.Get('mfvEventHistosJetPreSel/h_npv').GetEntries()
    #npreslep = f.Get('mfvEventHistosLeptonPreSel/h_npv').GetEntries()
    nfns = len(sf.get_fns(sname, 'miniaod')) if sf.has(sname,
                                                       'miniaod') else -1
    tfns = min(int(ceil(target_nevents / npresjet * nfns)), 50)
    evsfromtfns = (2 if used_half_mc else 1) * mcstat * tfns / nfns
    print '%30s %12.0f/%12.0f = %10.6f -> %4i/%5i (%10i)' % (
        sname, npresjet, mcstat, npresjet / mcstat, tfns, nfns, evsfromtfns
    )  #(lep %12.0f -> %10.6f)' % (, npreslep, npreslep/mcstat)
示例#13
0
import sys, os
from pprint import pprint
from JMTucker.Tools import eos, SampleFiles
from JMTucker.Tools.ROOTTools import ROOT, detree

if len(sys.argv) < 6:
    sys.exit(
        'usage: %s dataset sample run lumi event\n  where dataset and sample are as registered in SampleFiles. sample can be "*" to mean all samples having the dataset.'
        % sys.argv[0])

dataset = sys.argv[1]
sample = sys.argv[2]
rle = int(sys.argv[3]), int(sys.argv[4]), int(sys.argv[5])

fns = SampleFiles.get_fns(sample, dataset)
nfound = 0

for fn in fns:
    assert fn.endswith('.root')
    if not eos.exists(fn):
        raise IOError('does not exist on eos: %r' % fn)

    f = ROOT.TFile.Open(eos.canon(fn))
    t = f.Get('Events')
    for x in sorted(
            detree(
                t,
                'EventAuxiliary.id().run():EventAuxiliary.luminosityBlock():EventAuxiliary.id().event()',
                xform=int)):
        if x == rle:
示例#14
0
import sys
from JMTucker.Tools.BasicAnalyzer_cfg import cms, process
from JMTucker.Tools import SampleFiles

SampleFiles.setup(process, 'MFVNtupleV18', 'mfv_neutralino_tau1000um_M0400', 500)
process.TFileService.fileName = 'abcd_histos.root'

process.load('JMTucker.MFVNeutralino.WeightProducer_cfi')
process.load('JMTucker.MFVNeutralino.VertexSelector_cfi')
process.load('JMTucker.MFVNeutralino.AnalysisCuts_cfi')
process.load('JMTucker.MFVNeutralino.ABCDHistos_cfi')

process.p = cms.Path(process.mfvWeight * process.mfvSelectedVerticesSeq * process.mfvAnalysisCuts * process.mfvAbcdHistosSeq)

if __name__ == '__main__' and hasattr(sys, 'argv') and 'submit' in sys.argv:
    import JMTucker.Tools.Samples as Samples
    samples = Samples.from_argv([Samples.mfv_neutralino_tau0100um_M0400,
                                 Samples.mfv_neutralino_tau1000um_M0400,
                                 Samples.mfv_neutralino_tau0300um_M0400,
                                 Samples.mfv_neutralino_tau9900um_M0400] + Samples.ttbar_samples + Samples.qcd_samples)

    from JMTucker.Tools.CRABSubmitter import CRABSubmitter
    cs = CRABSubmitter('ABCDHistosV18',
                       job_control_from_sample = True,
                       use_ana_dataset = True,
                       run_half_mc = True,
                       )
    cs.submit_all(samples)
示例#15
0
import sys
from JMTucker.Tools.BasicAnalyzer_cfg import cms, process
from JMTucker.Tools import SampleFiles

raise NotImplementedError('V15 samples have trigger selection already')

use_weights = True

process.options.wantSummary = True
SampleFiles.setup(process, 'MFVNtupleV15', 'qcdht0250', 10000)
process.TFileService.fileName = 'events_cutplay.root'

from JMTucker.MFVNeutralino.AnalysisCuts_cfi import mfvAnalysisCuts as cuts
cuts.apply_vertex_cuts = False

process.trignjets = cuts.clone()
process.trignjetsht500 = cuts.clone(min_ht = 500)
process.trignjetsht750 = cuts.clone(min_ht = 750)
process.trigmu = cuts.clone(trigger_bit = 3, min_4th_jet_pt = 20)
process.trigmuht500 = cuts.clone(trigger_bit = 3, min_4th_jet_pt = 20, min_ht = 500)
process.trigtopmu = cuts.clone(trigger_bit = 4, min_4th_jet_pt = 20)
process.trigtopmunmu = cuts.clone(trigger_bit = 4, min_4th_jet_pt = 20, min_nsemilepmuons = 1)
process.trigtopmunmuht500 = cuts.clone(trigger_bit = 4, min_4th_jet_pt = 20, min_nsemilepmuons = 1, min_ht = 500)
process.trignjetsslep = cuts.clone(min_nsemileptons = 1)
process.trigonly = cuts.clone(min_njets = 0, min_4th_jet_pt = 0)

for name in process.filters.keys():
    setattr(process, 'p' + name, cms.Path(getattr(process,name)))

if use_weights:
    process.load('JMTucker.MFVNeutralino.WeightProducer_cfi')
示例#16
0
#!/usr/bin/env python

import sys
from JMTucker.Tools.BasicAnalyzer_cfg import *

import JMTucker.Tools.SampleFiles as sf
process.source.fileNames = sf.get('mfv_neu_tau01000um_M0300', 'main')[1][:1]

process.TFileService.fileName = 'l1sigeff.root'

process.load('HLTrigger.HLTfilters.hltHighLevel_cfi')
process.hltHighLevel.HLTPaths = ['HLT_PFHT800_v*']

hts = range(75, 201, 25) + [240, 250, 255, 280, 300, 320]
for ht in hts:
    l1 = cms.EDFilter('MFVL1HTTFilter', threshold = cms.double(ht))
    p = cms.Path(process.hltHighLevel * l1)
    setattr(process, 'l1%i' % ht, l1)
    setattr(process, 'p%i'  % ht, p)

import JMTucker.Tools.SimpleTriggerEfficiency_cfi as SimpleTriggerEfficiency
SimpleTriggerEfficiency.setup_endpath(process)

if __name__ == '__main__' and hasattr(sys, 'argv') and 'submit' in sys.argv:
    from JMTucker.Tools.CondorSubmitter import CondorSubmitter
    import JMTucker.Tools.Samples as Samples 

    samples = Samples.mfv_signal_samples + \
        Samples.mfv_signal_samples_glu + \
        Samples.mfv_signal_samples_gluddbar + \
        Samples.xx4j_samples
sample = sys.argv[2]
pattern = sys.argv[3]
nfiles = int(sys.argv[4])
interactive = True
verbosity = 0

if '-f' in sys.argv:
    interactive = False

while '-v' in sys.argv:
    verbosity += 1
    sys.argv.remove('-v')

if '*' in sample or '?' in sample:
    samples = []
    for s, ds in SampleFiles.keys():
        if ds == dataset and fnmatch(s, sample):
            samples.append(s)
else:
    samples = [sample]
samples.sort()

infos = []

for sample in samples:
    if verbosity >= 1:
        print sample
    fns = SampleFiles.get_fns(sample, dataset)
    fns = random.sample(fns, nfiles)

    total_size = 0
示例#18
0
#!/usr/bin/env python

import sys
from JMTucker.Tools.BasicAnalyzer_cfg import *

import JMTucker.Tools.SampleFiles as sf
process.source.fileNames = sf.get('mfv_neu_tau01000um_M0300', 'main')[1][:1]

process.TFileService.fileName = 'l1sigeff.root'

process.load('HLTrigger.HLTfilters.hltHighLevel_cfi')
process.hltHighLevel.HLTPaths = ['HLT_PFHT800_v*']

hts = range(75, 201, 25) + [240, 250, 255, 280, 300, 320]
for ht in hts:
    l1 = cms.EDFilter('MFVL1HTTFilter', threshold=cms.double(ht))
    p = cms.Path(process.hltHighLevel * l1)
    setattr(process, 'l1%i' % ht, l1)
    setattr(process, 'p%i' % ht, p)

import JMTucker.Tools.SimpleTriggerEfficiency_cfi as SimpleTriggerEfficiency
SimpleTriggerEfficiency.setup_endpath(process)

if __name__ == '__main__' and hasattr(sys, 'argv') and 'submit' in sys.argv:
    from JMTucker.Tools.CondorSubmitter import CondorSubmitter
    import JMTucker.Tools.Samples as Samples

    samples = Samples.mfv_signal_samples + \
        Samples.mfv_signal_samples_glu + \
        Samples.mfv_signal_samples_gluddbar + \
        Samples.xx4j_samples