def main(argv=None): if argv is None: argv = sys.argv[1:] histName = 'pileup' fileName = 'pileup/pileup.root' if getCMSSWVersion() == '76X': # 76X samples with pileup matching data from SimGeneral.MixingModule.mix_2015_25ns_FallMC_matchData_PoissonOOTPU_cfi import mix pileupDist = [float(x) for x in mix.input.nbPileupEvents.probValue] elif getCMSSWVersion() == '80X': # 80X sample with startup pileup #from SimGeneral.MixingModule.mix_2016_25ns_SpringMC_PUScenarioV1_PoissonOOTPU_cfi import mix # 80X moriond pileup from SimGeneral.MixingModule.mix_2016_25ns_Moriond17MC_PoissonOOTPU_cfi import mix pileupDist = [float(x) for x in mix.input.nbPileupEvents.probValue] else: from SimGeneral.MixingModule.mix_2017_25ns_WinterMC_PUScenarioV1_PoissonOOTPU_cfi import mix pileupDist = [float(x) for x in mix.input.nbPileupEvents.probValue] rootfile = ROOT.TFile(fileName,'recreate') # create mc pileup dist histmc = ROOT.TH1D(histName+'_MC',histName+'_MC',len(pileupDist),0,len(pileupDist)) for b,val in enumerate(pileupDist): histmc.SetBinContent(b+1,val) histmc.Scale(1./histmc.Integral()) histmc.Write() # read data for datatype in ['','_up','_down']:#,'_60000','_61000','_62000','_63000','_64000','_65000','_66000','_67000','_68000','_69000','_70000','_71000','_72000','_73000','_74000','_75000','_76000','_77000','_78000','_79000','_80000']: dataFileName = 'pileup/PileUpData{0}.root'.format(datatype) datafile = ROOT.TFile(dataFileName) histdata = datafile.Get(histName) histdata.SetTitle(histName+'_Data' + datatype) histdata.SetName(histName+'_Data'+datatype) histdata.Scale(1./histdata.Integral()) rootfile.cd() histdata.Write() # now use to get scalefactors numbins = min([histdata.GetNbinsX(),histmc.GetNbinsX()]) histscale = ROOT.TH1D(histName+'_scale'+datatype,histName+'_scale'+datatype,numbins,0,numbins) for b in range(numbins): d = histdata.GetBinContent(b+1) m = histmc.GetBinContent(b+1) sf = float(d)/m if m else 0. histscale.SetBinContent(b+1,sf) histscale.Write() rootfile.Write() rootfile.Close()
def getTestFiles(analysis,sample,n=1,version=None): if not version: version = getCMSSWVersion() sampleMap = { 'wz' : 'WZTo3LNu_TuneCUETP8M1_13TeV-powheg-pythia8', 'zz' : 'ZZTo4L_13TeV_powheg_pythia8', 'data' : 'DoubleMuon', 'hpp' : 'HPlusPlusHMinusMinusHTo4L_M-500_13TeV-pythia8' if version=='76X' else 'HPlusPlusHMinusMinusHTo4L_M-500_TuneCUETP8M1_13TeV_pythia8', 'hpp4l' : 'HPlusPlusHMinusMinusHTo4L_M-500_13TeV-pythia8' if version=='76X' else 'HPlusPlusHMinusMinusHTo4L_M-500_TuneCUETP8M1_13TeV_pythia8', 'hppr4l': 'HPlusPlusHMinusMinusHRTo4L_M-500_13TeV-pythia8' if version=='76X' else 'HPlusPlusHMinusMinusHRTo4L_M-500_TuneCUETP8M1_13TeV-pythia8', 'hpp3l' : 'HPlusPlusHMinusHTo3L_M-500_TuneCUETP8M1_13TeV_calchep-pythia8' if version=='76X' else 'HPlusPlusHMinusHTo3L_M-500_13TeV-calchep-pythia8', 'haa' : 'SUSYGluGluToHToAA_AToMuMu_AToTauTau_M-15_TuneCUETP8M1_13TeV_madgraph_pythia8', #'dy' : 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8', 'dy' : 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8', 'w' : 'WJetsToLNu_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8', 'qcd' : 'QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8', 'SingleMuon' : 'SingleMuon', 'SingleElectron': 'SingleElectron', 'DoubleMuon' : 'DoubleMuon', 'DoubleEG' : 'DoubleEG', 'MuonEG' : 'MuonEG', 'Tau' : 'Tau', } if sample not in sampleMap: return [] files = get_hdfs_root_files('{0}/{1}'.format(getNtupleDirectory(analysis,version=version),sampleMap[sample])) return files[:min(n,len(files))]
def getNewProjectionHistograms(analysis, sample, version=getCMSSWVersion(), shift='', base='newflat'): #return getProjectionHistograms(analysis,sample,version,shift,base) return 'dummy.root'
def getTestFiles(sample, n=1, version=None): if not version: version = getCMSSWVersion() sampleMap = { 'wz': 'WZTo3LNu_TuneCUETP8M1_13TeV-powheg-pythia8', 'zz': 'ZZTo4L_13TeV_powheg_pythia8', 'data': 'DoubleMuon', 'hpp': 'HPlusPlusHMinusMinusHTo4L_M-500_13TeV-pythia8' if version == '76X' else 'HPlusPlusHMinusMinusHTo4L_M-500_TuneCUETP8M1_13TeV_pythia8', 'hpp4l': 'HPlusPlusHMinusMinusHTo4L_M-500_13TeV-pythia8' if version == '76X' else 'HPlusPlusHMinusMinusHTo4L_M-500_TuneCUETP8M1_13TeV_pythia8', 'hppr4l': 'HPlusPlusHMinusMinusHRTo4L_M-500_13TeV-pythia8' if version == '76X' else 'HPlusPlusHMinusMinusHRTo4L_M-500_TuneCUETP8M1_13TeV_pythia8', 'hpp3l': 'HPlusPlusHMinusHTo3L_M-500_TuneCUETP8M1_13TeV_calchep-pythia8' if version == '76X' else 'HPlusPlusHMinusHTo3L_M-500_13TeV-calchep-pythia8', #'dy' : 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8', 'dy': 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8', 'w': 'WJetsToLNu_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8', 'SingleMuon': 'SingleMuon', 'SingleElectron': 'SingleElectron', 'DoubleMuon': 'DoubleMuon', 'DoubleEG': 'DoubleEG', 'MuonEG': 'MuonEG', 'Tau': 'Tau', } if sample not in sampleMap: return [] files = [ f.replace('/hdfs', '') for f in glob.glob('{0}/{1}/*/*/*/*.root'.format( getNtupleDirectory(version=version), sampleMap[sample])) ] if sample == 'wz': return files[ 1:min(n + 1, len(files) - 1)] # temporary hack to get a better WZ sample (Summer16 MC) return files[:min(n, len(files))]
def getFlatHistograms(analysis, sample, version=getCMSSWVersion(), shift=''): flat = 'flat/{0}/{1}.root'.format(analysis, sample) if shift in latestHistograms.get(version, {}).get(analysis, {}): baseDir = '/hdfs/store/user/dntaylor' flatpath = os.path.join(baseDir, latestHistograms[version][analysis][shift], sample) for fname in glob.glob('{0}/*.root'.format(flatpath)): if 'projection' not in fname: flat = fname return flat
def parse_command_line(argv): parser = argparse.ArgumentParser(description='Dump samples available') parser.add_argument('--version',type=str,default=getCMSSWVersion(),help='Samples to dump') parser.add_argument('--analysis',type=str,default='',help='Analysis to use for detailed information') parser.add_argument('--selection',type=str,default='1',help='Selection to apply to tree') parser.add_argument('--verbose',action='store_true',help='Display detailed sample information') args = parser.parse_args(argv) return args
def parse_command_line(argv): parser = argparse.ArgumentParser(description='Dump samples available') parser.add_argument('--version',type=str,default=getCMSSWVersion(),help='Samples to dump') parser.add_argument('--analysis',type=str,default='',help='Analysis to use for detailed information') parser.add_argument('--selection',type=str,default='1',help='Selection to apply to tree') parser.add_argument('--verbose',action='store_true',help='Display detailed sample information') args = parser.parse_args(argv) return args
def getLumi(version=getCMSSWVersion(), run=''): '''Get the integrated luminosity to scale monte carlo''' if run in runMap: return runMap[run] if version == '76X': #return 2263 # december jamboree golden json return 2318 # moriond golden json else: #return 4336.100 # previous "frozen" #return 12892.762 # ichep dataset golden json return 35867.060 # full 2016 for moriond
def getProjectionHistograms(analysis,sample,version=getCMSSWVersion(),shift='',base='projections'): proj = '{}/{}/{}.root'.format(base,analysis,sample) if shift in latestHistograms.get(version,{}).get(analysis,{}): baseDir = '/hdfs/store/user/dntaylor' projpath = os.path.join(baseDir,latestHistograms[version][analysis][shift],sample) for fname in glob.glob('{0}/*.root'.format(projpath)): if 'projection' in fname: proj = fname elif shift: logging.warning('Shift {} provided but not found'.format(shift)) proj = 'dummy.root' return proj
def getTestFiles(analysis, sample, n=1, version=None): if not version: version = getCMSSWVersion() sampleMap = { 'wz': 'WZTo3LNu_TuneCUETP8M1_13TeV-powheg-pythia8', 'zz': 'ZZTo4L_13TeV_powheg_pythia8', 'data': 'DoubleMuon', 'hpp': 'HPlusPlusHMinusMinusHTo4L_M-500_13TeV-pythia8' if version == '76X' else 'HPlusPlusHMinusMinusHTo4L_M-500_TuneCUETP8M1_13TeV_pythia8', 'hpp4l': 'HPlusPlusHMinusMinusHTo4L_M-500_13TeV-pythia8' if version == '76X' else 'HPlusPlusHMinusMinusHTo4L_M-500_TuneCUETP8M1_13TeV_pythia8', 'hppr4l': 'HPlusPlusHMinusMinusHRTo4L_M-500_13TeV-pythia8' if version == '76X' else 'HPlusPlusHMinusMinusHRTo4L_M-500_TuneCUETP8M1_13TeV-pythia8', 'hpp3l': 'HPlusPlusHMinusHTo3L_M-500_TuneCUETP8M1_13TeV_calchep-pythia8' if version == '76X' else 'HPlusPlusHMinusHTo3L_M-500_13TeV-calchep-pythia8', 'haa': 'SUSYGluGluToHToAA_AToMuMu_AToTauTau_M-15_TuneCUETP8M1_13TeV_madgraph_pythia8', #'dy' : 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8', 'dy': 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8', 'w': 'WJetsToLNu_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8', 'qcd': 'QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8', 'SingleMuon': 'SingleMuon', 'SingleElectron': 'SingleElectron', 'DoubleMuon': 'DoubleMuon', 'DoubleEG': 'DoubleEG', 'MuonEG': 'MuonEG', 'Tau': 'Tau', } if sample not in sampleMap: return [] files = get_hdfs_root_files('{0}/{1}'.format( getNtupleDirectory(analysis, version=version), sampleMap[sample])) return files[:min(n, len(files))]
def getNtupleDirectory(analysis,local=False,version=getCMSSWVersion(),shift=''): # first grab the local one if local: #ntupleDir = '{0}/src/ntuples/{0}'.format(CMSSW_BASE,analysis) ntupleDir = 'ntuples/{0}'.format(analysis) if os.path.exists(ntupleDir): return ntupleDir # if not read from hdfs baseDir = '/hdfs/store/user/dntaylor' if shift and analysis in latestShifts[version] and shift in latestShifts[version][analysis]: return os.path.join(baseDir,latestShifts[version][analysis][shift]) if analysis in latestNtuples[version] and latestNtuples[version][analysis]: return os.path.join(baseDir,latestNtuples[version][analysis])
def getSkimPickle(analysis,sample,version=getCMSSWVersion(),shift=''): pfile = 'pickles/{0}/skims/{1}.pkl'.format(analysis,sample) if shift and shift in latestSkims.get(version,{}).get(analysis,{}): baseDir = '/hdfs/store/user/dntaylor' ppath = os.path.join(baseDir,latestSkims[version][analysis][shift],sample) fnames = glob.glob('{0}/*.root'.format(ppath)) if len(fnames)==0: #raise Exception('No such path {0}'.format(ppath)) logging.warning('No such path {0}'.format(ppath)) for fname in fnames: if 'pkl' in fname: pfile = fname #else: # raise Exception('Unrecognized {0}'.format(':'.join([analysis,sample,version,shift]))) return pfile
def getLumi(version=getCMSSWVersion(),run=''): '''Get the integrated luminosity to scale monte carlo''' if run in runMap: return runMap[run] if version=='76X': #return 2263 # december jamboree golden json return 2318 # moriond golden json elif version=='80X': #return 12892.762 # ichep dataset golden json return 35867.060 # full 2016 for moriond elif version=='94X': return 41370 else: return 0
def getSkimPickle(analysis, sample, version=getCMSSWVersion(), shift=''): pfile = 'pickles/{0}/skims/{1}.pkl'.format(analysis, sample) if shift and shift in latestSkims.get(version, {}).get(analysis, {}): baseDir = '/hdfs/store/user/dntaylor' ppath = os.path.join(baseDir, latestSkims[version][analysis][shift], sample) fnames = glob.glob('{0}/*.root'.format(ppath)) if len(fnames) == 0: raise Exception('No such path {0}'.format(ppath)) for fname in fnames: if 'pkl' in fname: pfile = fname #else: # raise Exception('Unrecognized {0}'.format(':'.join([analysis,sample,version,shift]))) return pfile
def getSkimJson(analysis, sample, version=getCMSSWVersion(), shift=''): jfile = 'jsons/{0}/skims/{1}.json'.format(analysis, sample) if shift and shift in latestSkims.get(version, {}).get(analysis, {}): baseDir = '/hdfs/store/user/dntaylor' jpath = os.path.join(baseDir, latestSkims[version][analysis][shift], sample) fnames = glob.glob('{0}/*.root'.format(jpath)) if len(fnames) == 0: #raise Exception('No such path {0}'.format(jpath)) logging.warning('No such path {0}'.format(jpath)) for fname in fnames: if 'json' in fname: jfile = fname #else: # raise Exception('Unrecognized {0}'.format(':'.join([analysis,sample,version,shift]))) return jfile
def getNtupleDirectory(analysis, local=False, version=getCMSSWVersion(), shift=''): # first grab the local one if local: #ntupleDir = '{0}/src/ntuples/{0}'.format(CMSSW_BASE,analysis) ntupleDir = 'ntuples/{0}'.format(analysis) if os.path.exists(ntupleDir): return ntupleDir # if not read from hdfs baseDir = '/hdfs/store/user/dntaylor' if shift and analysis in latestShifts[version] and shift in latestShifts[ version][analysis]: return os.path.join(baseDir, latestShifts[version][analysis][shift]) if analysis in latestNtuples[version] and latestNtuples[version][analysis]: return os.path.join(baseDir, latestNtuples[version][analysis])
def getProjectionHistograms(analysis, sample, version=getCMSSWVersion(), shift='', base='projections'): proj = '{}/{}/{}.root'.format(base, analysis, sample) if shift in latestHistograms.get(version, {}).get(analysis, {}): baseDir = '/hdfs/store/user/dntaylor' projpath = os.path.join(baseDir, latestHistograms[version][analysis][shift], sample) for fname in glob.glob('{0}/*.root'.format(projpath)): if 'projection' in fname: proj = fname elif shift: logging.warning('Shift {} provided but not found'.format(shift)) proj = 'dummy.root' return proj
def getTestFiles(sample,n=1,version=None): if not version: version = getCMSSWVersion() sampleMap = { 'wz' : 'WZTo3LNu_TuneCUETP8M1_13TeV-powheg-pythia8', 'zz' : 'ZZTo4L_13TeV_powheg_pythia8', 'data' : 'DoubleMuon', 'hpp' : 'HPlusPlusHMinusMinusHTo4L_M-500_13TeV-pythia8' if version=='76X' else 'HPlusPlusHMinusMinusHTo4L_M-500_TuneCUETP8M1_13TeV_pythia8', 'hpp4l' : 'HPlusPlusHMinusMinusHTo4L_M-500_13TeV-pythia8' if version=='76X' else 'HPlusPlusHMinusMinusHTo4L_M-500_TuneCUETP8M1_13TeV_pythia8', 'hppr4l': 'HPlusPlusHMinusMinusHRTo4L_M-500_13TeV-pythia8' if version=='76X' else 'HPlusPlusHMinusMinusHRTo4L_M-500_TuneCUETP8M1_13TeV-pythia8', 'hpp3l' : 'HPlusPlusHMinusHTo3L_M-500_TuneCUETP8M1_13TeV_calchep-pythia8' if version=='76X' else 'HPlusPlusHMinusHTo3L_M-500_13TeV-calchep-pythia8', 'dy' : 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8', #'dy' : 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8', 'w' : 'WJetsToLNu_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8', 'qcd' : 'QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8', 'gjet' : 'GJet_Pt-40toInf_DoubleEMEnriched_MGG-80toInf_TuneCUETP8M1_13TeV_Pythia8', 'SingleMuon' : 'SingleMuon', 'SingleElectron': 'SingleElectron', 'DoubleMuon' : 'DoubleMuon', 'DoubleEG' : 'DoubleEG', 'MuonEG' : 'MuonEG', 'Tau' : 'Tau', 'haa' : 'SUSYGluGluToHToAA_AToMuMu_AToTauTau_M-15_TuneCUETP8M1_13TeV_madgraph_pythia8', 'hzz' : 'GluGluHToZZTo4L_M125_13TeV_powheg2_JHUGenV7011_pythia8', 'hgg' : 'GluGluHToGG_M-125_13TeV_powheg_pythia8', } for h in [125,300,750]: for a in ['3p6',4,5,6,7,8,9,10,11,12,13,14,15,17,19,21]: if h == 125: name = 'SUSYGluGluToHToAA_AToMuMu_AToTauTau_M-{a}_TuneCUETP8M1_13TeV_madgraph_pythia8' nameNew = 'SUSYGluGluToHToAA_AToMuMu_AToTauTau_M-{h}_M-{a}_TuneCUETP8M1_13TeV_madgraph_pythia8' else: name = 'SUSYGluGluToHToAA_AToMuMu_AToTauTau_M-{h}_M-{a}_TuneCUETP8M1_13TeV_madgraph_pythia8' nameNew = 'SUSYGluGluToHToAA_AToMuMu_AToTauTau_M-{h}_M-{a}_TuneCUETP8M1_13TeV_madgraph_pythia8' sampleMap['haa_{h}_{a}'.format(h=h,a=a)] = name.format(h=h,a=a) sampleMap['haa_{h}_{a}_new'.format(h=h,a=a)] = nameNew.format(h=h,a=a) if sample not in sampleMap: return [] files = get_hdfs_root_files('{0}/{1}'.format(getNtupleDirectory(version=version),sampleMap[sample])) return files[:min(n,len(files))]
def getTestFiles(sample,n=1,version=None): if not version: version = getCMSSWVersion() sampleMap = { 'wz' : 'WZTo3LNu_TuneCUETP8M1_13TeV-powheg-pythia8', 'zz' : 'ZZTo4L_13TeV_powheg_pythia8', 'data' : 'DoubleMuon', 'hpp' : 'HPlusPlusHMinusMinusHTo4L_M-500_13TeV-pythia8' if version=='76X' else 'HPlusPlusHMinusMinusHTo4L_M-500_TuneCUETP8M1_13TeV_pythia8', 'hpp4l' : 'HPlusPlusHMinusMinusHTo4L_M-500_13TeV-pythia8' if version=='76X' else 'HPlusPlusHMinusMinusHTo4L_M-500_TuneCUETP8M1_13TeV_pythia8', 'hppr4l': 'HPlusPlusHMinusMinusHRTo4L_M-500_13TeV-pythia8' if version=='76X' else 'HPlusPlusHMinusMinusHRTo4L_M-500_TuneCUETP8M1_13TeV-pythia8', 'hpp3l' : 'HPlusPlusHMinusHTo3L_M-500_TuneCUETP8M1_13TeV_calchep-pythia8' if version=='76X' else 'HPlusPlusHMinusHTo3L_M-500_13TeV-calchep-pythia8', 'dy' : 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8', #'dy' : 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8', 'w' : 'WJetsToLNu_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8', 'qcd' : 'QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8', 'gjet' : 'GJet_Pt-40toInf_DoubleEMEnriched_MGG-80toInf_TuneCUETP8M1_13TeV_Pythia8', 'SingleMuon' : 'SingleMuon', 'SingleElectron': 'SingleElectron', 'DoubleMuon' : 'DoubleMuon', 'DoubleEG' : 'DoubleEG', 'MuonEG' : 'MuonEG', 'Tau' : 'Tau', 'haa' : 'SUSYGluGluToHToAA_AToMuMu_AToTauTau_M-15_TuneCUETP8M1_13TeV_madgraph_pythia8', 'hzz' : 'GluGluHToZZTo4L_M125_13TeV_powheg2_JHUGenV7011_pythia8', 'hgg' : 'GluGluHToGG_M-125_13TeV_powheg_pythia8', } for h in [125,300,750]: for a in ['3p6',4,5,6,7,8,9,10,11,12,13,14,15,17,19,21]: if h == 125: name = 'SUSYGluGluToHToAA_AToMuMu_AToTauTau_M-{a}_TuneCUETP8M1_13TeV_madgraph_pythia8' nameNew = 'SUSYGluGluToHToAA_AToMuMu_AToTauTau_M-{h}_M-{a}_TuneCUETP8M1_13TeV_madgraph_pythia8' else: name = 'SUSYGluGluToHToAA_AToMuMu_AToTauTau_M-{h}_M-{a}_TuneCUETP8M1_13TeV_madgraph_pythia8' nameNew = 'SUSYGluGluToHToAA_AToMuMu_AToTauTau_M-{h}_M-{a}_TuneCUETP8M1_13TeV_madgraph_pythia8' sampleMap['haa_{h}_{a}'.format(h=h,a=a)] = name.format(h=h,a=a) sampleMap['haa_{h}_{a}_new'.format(h=h,a=a)] = nameNew.format(h=h,a=a) if sample not in sampleMap: return [] files = get_hdfs_root_files('{0}/{1}'.format(getNtupleDirectory(version=version),sampleMap[sample])) return files[:min(n,len(files))]
def getNtupleDirectory(version=None): baseDir = '/hdfs/store/user/dntaylor' if not version: version = getCMSSWVersion() if version in latestNtuples: return os.path.join(baseDir, latestNtuples[version])
def __init__(self,**kwargs): inputFileNames = kwargs.pop('inputFileNames',[]) inputTreeDirectory = kwargs.pop('inputTreeDirectory','') inputTreeName = kwargs.pop('inputTreeName','AnalysisTree') outputFileName = kwargs.pop('outputFileName','analysisTree.root') outputTreeName = kwargs.pop('outputTreeName','AnalysisTree') self.outputTreeName = outputTreeName if hasProgress: self.pbar = kwargs.pop('progressbar',ProgressBar(widgets=['{0}: '.format(outputTreeName),' ',SimpleProgress(),' events ',Percentage(),' ',Bar(),' ',ETA()])) # input files self.fileNames = [] if os.path.isfile('PSet.py'): # grab input files from crab pset import PSet self.fileNames = list(PSet.process.source.fileNames) elif isinstance(inputFileNames, basestring): # inputFiles is a file name if os.path.isfile(inputFileNames): # single file if inputFileNames[-4:] == 'root': # file is a root file self.fileNames += [inputFileNames] else: # file is list of files with open(inputFileNames,'r') as f: for line in f: self.fileNames += [line.strip()] else: self.fileNames = inputFileNames # already a python list or a cms.untracked.vstring() if not isinstance(outputFileName, basestring): # its a cms.string(), get value outputFileName = outputFileName.value() # test for hdfs #self.hasHDFS = os.path.exists('/hdfs/store/user') self.hasHDFS = False # input tchain self.treename = '{0}/{1}'.format(inputTreeDirectory,inputTreeName) if inputTreeDirectory else inputTreeName self.totalEntries = 0 self.numLumis = 0 self.numEvents = 0 self.summedWeights = 0 logging.info('Getting information') if len(self.fileNames)==0: logging.warning('No files to process') if len(self.fileNames)>1: logging.warning('More than one file requested, only processing the first file') for f,fName in enumerate(self.fileNames): if fName.startswith('/store'): fName = '{0}/{1}'.format('/hdfs' if self.hasHDFS else 'root://cmsxrootd.hep.wisc.edu/',fName) tfile = ROOT.TFile.Open(fName) tree = tfile.Get(self.treename) self.totalEntries += tree.GetEntries() if not hasattr(self,'version'): tree.GetEntry(1) if hasattr(tree,'provenance'): ver = tree.provenance[0].split('_') self.version = ''.join([ver[1],ver[2],'X']) else: self.version = getCMSSWVersion() tfile.Close('R') logging.info('Analysis is running with version {0}'.format(self.version)) self.flush() if not len(self.fileNames): raise Exception # other input files self.pileupWeights = PileupWeights(self.version) self.fakeRates = FakeRates(self.version) self.leptonScales = LeptonScales(self.version) self.triggerScales = TriggerScales(self.version) self.triggerPrescales = TriggerPrescales(self.version) self.zptGenWeight = ZptGenWeight(self.version) self.zzGenWeight = ZZGenWeight(self.version) # tfile fName = self.fileNames[0] if fName.startswith('/store'): fName = '{0}/{1}'.format('/hdfs' if self.hasHDFS else 'root://cmsxrootd.hep.wisc.edu/',fName) self.tfile = ROOT.TFile.Open(fName,'READ') self.oldtree = self.tfile.Get(self.treename) self.outfile = ROOT.TFile(outputFileName,"recreate") self.tree = self.oldtree.CloneTree(0) summedWeights = self.tfile.Get('summedWeights') self.summedWeights = summedWeights.GetBinContent(1)
def __init__(self,**kwargs): inputFileNames = kwargs.pop('inputFileNames',[]) inputTreeDirectory = kwargs.pop('inputTreeDirectory','miniTree') inputTreeName = kwargs.pop('inputTreeName','MiniTree') inputLumiName = kwargs.pop('inputTreeName','LumiTree') outputFileName = kwargs.pop('outputFileName','analysisTree.root') outputTreeName = kwargs.pop('outputTreeName','AnalysisTree') self.shift = kwargs.pop('shift','') self.outputTreeName = outputTreeName if hasProgress: self.pbar = kwargs.pop('progressbar',ProgressBar(widgets=['{0}: '.format(outputTreeName),' ',SimpleProgress(),' events ',Percentage(),' ',Bar(),' ',ETA()])) # preselection if not hasattr(self,'preselection'): self.preselection = '1' # input files self.fileNames = [] if os.path.isfile('PSet.py'): # grab input files from crab pset import PSet self.fileNames = list(PSet.process.source.fileNames) elif isinstance(inputFileNames, basestring): # inputFiles is a file name if os.path.isfile(inputFileNames): # single file if inputFileNames[-4:] == 'root': # file is a root file self.fileNames += [inputFileNames] else: # file is list of files with open(inputFileNames,'r') as f: for line in f: self.fileNames += [line.strip()] else: self.fileNames = inputFileNames # already a python list or a cms.untracked.vstring() if not isinstance(outputFileName, basestring): # its a cms.string(), get value outputFileName = outputFileName.value() # test for hdfs #self.hasHDFS = os.path.exists('/hdfs/store/user') self.hasHDFS = False # input tchain self.treename = '{0}/{1}'.format(inputTreeDirectory,inputTreeName) luminame = '{0}/{1}'.format(inputTreeDirectory,inputLumiName) #tchainLumi = ROOT.TChain(luminame) self.totalEntries = 0 self.numLumis = 0 self.numEvents = 0 self.summedWeights = 0 logging.info('Getting Lumi information') #self.skims = {} for f,fName in enumerate(self.fileNames): if fName.startswith('/store'): fName = '{0}/{1}'.format('/hdfs' if self.hasHDFS else 'root://cmsxrootd.hep.wisc.edu/',fName) tfile = ROOT.TFile.Open(fName) tree = tfile.Get(self.treename) #skimName = 'skim{0}'.format(f) #tree.Draw('>>{0}'.format(skimName),self.preselection,'entrylist') #skimlist = ROOT.gDirectory.Get(skimName) #listEvents = skimlist.GetN() #self.skims[f] = skimlist #self.totalEntries += listEvents self.totalEntries += tree.GetEntries() if not hasattr(self,'version'): tree.GetEntry(1) if hasattr(tree,'provenance'): ver = tree.provenance[0].split('_') self.version = ''.join([ver[1],ver[2],'X']) else: self.version = getCMSSWVersion() lumitree = tfile.Get(luminame) for entry in lumitree: self.numLumis += 1 self.numEvents += lumitree.nevents self.summedWeights += lumitree.summedWeights tfile.Close('R') #tchainLumi.Add(fName) # get the lumi info #self.numLumis = tchainLumi.GetEntries() #self.numEvents = 0 #self.summedWeights = 0 #for entry in xrange(self.numLumis): # tchainLumi.GetEntry(entry) # self.numEvents += tchainLumi.nevents # self.summedWeights += tchainLumi.summedWeights logging.info('Analysis is running with version {0}'.format(self.version)) logging.info("Will process {0} lumi sections with {1} events ({2}).".format(self.numLumis,self.numEvents,self.summedWeights)) self.flush() if not len(self.fileNames): raise Exception # other input files self.pileupWeights = PileupWeights(self.version) self.fakeRates = FakeRates(self.version) self.leptonScales = LeptonScales(self.version) self.triggerScales = TriggerScales(self.version) self.triggerPrescales = TriggerPrescales(self.version) self.zptGenWeight = ZptGenWeight(self.version) self.zzGenWeight = ZZGenWeight(self.version) # tfile self.outfile = ROOT.TFile(outputFileName,"recreate") # cut tree self.cutTree = CutTree() # analysis tree self.tree = AnalysisTree(outputTreeName) self.eventsStored = 0 # some things we always need: dysamples = [ 'DY1JetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8', 'DY2JetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8', 'DY3JetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8', 'DY4JetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8', 'DYJetsToLL_M-10to50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8', 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8', ] qqzzsamples = [ 'ZZTo4L_13TeV_powheg_pythia8', 'ZZTo4L_13TeV-amcatnloFXFX-pythia8', ] # pileup self.tree.add(lambda cands: self.pileupWeights.weight(self.event)[0], 'pileupWeight', 'F') self.tree.add(lambda cands: self.pileupWeights.weight(self.event)[1], 'pileupWeightUp', 'F') self.tree.add(lambda cands: self.pileupWeights.weight(self.event)[2], 'pileupWeightDown', 'F') self.tree.add(lambda cands: self.event.vertices_count(), 'numVertices', 'I') self.tree.add(lambda cands: self.event.rho(), 'rho', 'F') # gen self.tree.add(lambda cands: self.event.nTrueVertices(), 'numTrueVertices', 'I') self.tree.add(lambda cands: self.event.NUP(), 'NUP', 'I') self.tree.add(lambda cands: self.event.isData(), 'isData', 'I') self.tree.add(lambda cands: self.event.genWeight(), 'genWeight', 'F') if any([x in fName for x in dysamples]): self.tree.add(lambda cands: self.zptGenWeight.weight(self.gen), 'zPtWeight', 'F') if any([x in fName for x in qqzzsamples]): self.tree.add(lambda cands: self.zzGenWeight.weight(self.gen), 'qqZZkfactor', 'F') self.tree.add(lambda cands: self.event.numGenJets(), 'numGenJets', 'I') self.tree.add(lambda cands: self.event.genHT(), 'genHT', 'I') # scale shifts weightMap = { 0: {'muR':1.0, 'muF':1.0}, 1: {'muR':1.0, 'muF':2.0}, 2: {'muR':1.0, 'muF':0.5}, 3: {'muR':2.0, 'muF':1.0}, 4: {'muR':2.0, 'muF':2.0}, 5: {'muR':2.0, 'muF':0.5}, 6: {'muR':0.5, 'muF':1.0}, 7: {'muR':0.5, 'muF':2.0}, 8: {'muR':0.5, 'muF':0.5}, } self.tree.add(lambda cands: 0. if self.event.isData() else self.event.genWeights()[0] if len(self.event.genWeights())>0 else 0., 'genWeight_muR{muR:3.1f}_muF{muF:3.1f}'.format(**weightMap[0]), 'F') self.tree.add(lambda cands: 0. if self.event.isData() else self.event.genWeights()[1] if len(self.event.genWeights())>1 else 0., 'genWeight_muR{muR:3.1f}_muF{muF:3.1f}'.format(**weightMap[1]), 'F') self.tree.add(lambda cands: 0. if self.event.isData() else self.event.genWeights()[2] if len(self.event.genWeights())>2 else 0., 'genWeight_muR{muR:3.1f}_muF{muF:3.1f}'.format(**weightMap[2]), 'F') self.tree.add(lambda cands: 0. if self.event.isData() else self.event.genWeights()[3] if len(self.event.genWeights())>3 else 0., 'genWeight_muR{muR:3.1f}_muF{muF:3.1f}'.format(**weightMap[3]), 'F') self.tree.add(lambda cands: 0. if self.event.isData() else self.event.genWeights()[4] if len(self.event.genWeights())>4 else 0., 'genWeight_muR{muR:3.1f}_muF{muF:3.1f}'.format(**weightMap[4]), 'F') self.tree.add(lambda cands: 0. if self.event.isData() else self.event.genWeights()[5] if len(self.event.genWeights())>5 else 0., 'genWeight_muR{muR:3.1f}_muF{muF:3.1f}'.format(**weightMap[5]), 'F') self.tree.add(lambda cands: 0. if self.event.isData() else self.event.genWeights()[6] if len(self.event.genWeights())>6 else 0., 'genWeight_muR{muR:3.1f}_muF{muF:3.1f}'.format(**weightMap[6]), 'F') self.tree.add(lambda cands: 0. if self.event.isData() else self.event.genWeights()[7] if len(self.event.genWeights())>7 else 0., 'genWeight_muR{muR:3.1f}_muF{muF:3.1f}'.format(**weightMap[7]), 'F') self.tree.add(lambda cands: 0. if self.event.isData() else self.event.genWeights()[8] if len(self.event.genWeights())>8 else 0., 'genWeight_muR{muR:3.1f}_muF{muF:3.1f}'.format(**weightMap[8]), 'F')
def getRunRange(version=getCMSSWVersion(), run=''): if run in runRange: return runRange[run] return [0, 999999]
def getNewFlatHistograms(analysis,sample,version=getCMSSWVersion(),shift='',base='newflat'): return getFlatHistograms(analysis,sample,version,shift,base)
def getNtupleDirectory(version=None): baseDir = '/hdfs/store/user/dntaylor' if not version: version = getCMSSWVersion() if version in latestNtuples: return os.path.join(baseDir,latestNtuples[version])
def getRunRange(version=getCMSSWVersion(),run=''): if run in runRange: return runRange[run] return [0,999999]
def getNewFlatHistograms(analysis, sample, version=getCMSSWVersion(), shift='', base='newflat'): return getFlatHistograms(analysis, sample, version, shift, base)
from copy import deepcopy from itertools import product, combinations_with_replacement from DevTools.Plotter.utilities import ZMASS, addChannels, getLumi, getRunRange from DevTools.Utilities.utilities import getCMSSWVersion version = getCMSSWVersion() def buildZTauFakeRate(selectionParams,sampleSelectionParams,projectionParams,sampleProjectionParams,histParams,sampleHistParams): histParams['ZTauFakeRate'] = { 'count' : {'xVariable': '1', 'xBinning': [1,0,2], }, # just a count of events passing selection 'numVertices' : {'xVariable': 'numVertices', 'xBinning': [40,0,40], }, 'numLooseMuons' : {'xVariable': 'numLooseMuons', 'xBinning': [4,0,4], }, 'numTightMuons' : {'xVariable': 'numTightMuons', 'xBinning': [4,0,4], }, 'met' : {'xVariable': 'met_pt', 'xBinning': [500, 0, 500], }, 'metPhi' : {'xVariable': 'met_phi', 'xBinning': [500, -3.14159, 3.14159],}, # z 'zMass' : {'xVariable': 'z_mass', 'xBinning': [120, 60, 120], }, 'zPt' : {'xVariable': 'z_pt', 'xBinning': [500, 0, 500], }, 'zDeltaR' : {'xVariable': 'z_deltaR', 'xBinning': [500, 0, 5], }, 'zLeadingLeptonPt' : {'xVariable': 'z1_pt', 'xBinning': [1000, 0, 1000], }, 'zLeadingLeptonEta' : {'xVariable': 'z1_eta', 'xBinning': [500, -2.5, 2.5], }, 'zLeadingLeptonIso' : {'xVariable': 'z1_isolation', 'xBinning': [500, 0, 0.5], }, 'zSubLeadingLeptonPt' : {'xVariable': 'z2_pt', 'xBinning': [1000, 0, 1000], }, 'zSubLeadingLeptonEta' : {'xVariable': 'z2_eta', 'xBinning': [500, -2.5, 2.5], }, 'zSubLeadingLeptonIso' : {'xVariable': 'z2_isolation', 'xBinning': [500, 0, 0.5], }, # t 'wtMt' : {'xVariable': 'w_mt', 'xBinning': [500, 0, 500], }, 'wtPt' : {'xVariable': 'w_pt', 'xBinning': [500, 0, 500], },
def __init__(self, **kwargs): inputFileNames = kwargs.pop('inputFileNames', []) inputTreeDirectory = kwargs.pop('inputTreeDirectory', '') inputTreeName = kwargs.pop('inputTreeName', 'AnalysisTree') outputFileName = kwargs.pop('outputFileName', 'analysisTree.root') outputTreeName = kwargs.pop('outputTreeName', 'AnalysisTree') self.outputTreeName = outputTreeName if hasProgress: self.pbar = kwargs.pop( 'progressbar', ProgressBar(widgets=[ '{0}: '.format(outputTreeName), ' ', SimpleProgress(), ' events ', Percentage(), ' ', Bar(), ' ', ETA() ])) # input files self.fileNames = [] if os.path.isfile('PSet.py'): # grab input files from crab pset import PSet self.fileNames = list(PSet.process.source.fileNames) elif isinstance(inputFileNames, basestring): # inputFiles is a file name if os.path.isfile(inputFileNames): # single file if inputFileNames[-4:] == 'root': # file is a root file self.fileNames += [inputFileNames] else: # file is list of files with open(inputFileNames, 'r') as f: for line in f: self.fileNames += [line.strip()] else: self.fileNames = inputFileNames # already a python list or a cms.untracked.vstring() if not isinstance(outputFileName, basestring): # its a cms.string(), get value outputFileName = outputFileName.value() # test for hdfs #self.hasHDFS = os.path.exists('/hdfs/store/user') self.hasHDFS = False # input tchain self.treename = '{0}/{1}'.format( inputTreeDirectory, inputTreeName) if inputTreeDirectory else inputTreeName self.totalEntries = 0 self.numLumis = 0 self.numEvents = 0 self.summedWeights = 0 logging.info('Getting information') if len(self.fileNames) == 0: logging.warning('No files to process') if len(self.fileNames) > 1: logging.warning( 'More than one file requested, only processing the first file') for f, fName in enumerate(self.fileNames): if fName.startswith('/store'): fName = '{0}/{1}'.format( '/hdfs' if self.hasHDFS else 'root://cmsxrootd.hep.wisc.edu/', fName) tfile = ROOT.TFile.Open(fName) tree = tfile.Get(self.treename) self.totalEntries += tree.GetEntries() if not hasattr(self, 'version'): tree.GetEntry(1) if hasattr(tree, 'provenance'): ver = tree.provenance[0].split('_') self.version = ''.join([ver[1], ver[2], 'X']) else: self.version = getCMSSWVersion() tfile.Close('R') logging.info('Analysis is running with version {0}'.format( self.version)) self.flush() if not len(self.fileNames): raise Exception # other input files self.pileupWeights = PileupWeights(self.version) self.fakeRates = FakeRates(self.version) self.leptonScales = LeptonScales(self.version) self.triggerScales = TriggerScales(self.version) self.triggerPrescales = TriggerPrescales(self.version) self.zptGenWeight = ZptGenWeight(self.version) self.zzGenWeight = ZZGenWeight(self.version) # tfile fName = self.fileNames[0] if fName.startswith('/store'): fName = '{0}/{1}'.format( '/hdfs' if self.hasHDFS else 'root://cmsxrootd.hep.wisc.edu/', fName) self.tfile = ROOT.TFile.Open(fName, 'READ') self.oldtree = self.tfile.Get(self.treename) self.outfile = ROOT.TFile(outputFileName, "recreate") self.tree = self.oldtree.CloneTree(0) summedWeights = self.tfile.Get('summedWeights') self.summedWeights = summedWeights.GetBinContent(1)
import sys import logging from itertools import product, combinations_with_replacement from DevTools.Plotter.Plotter import Plotter from DevTools.Utilities.utilities import ZMASS, getCMSSWVersion from copy import deepcopy import ROOT logging.basicConfig( level=logging.INFO, stream=sys.stderr, format='%(asctime)s.%(msecs)03d %(levelname)s %(name)s: %(message)s', datefmt='%Y-%m-%d %H:%M:%S') version = getCMSSWVersion() blind = False plotter = Plotter('MuMuTauFakeRate') ######################### ### Define categories ### ######################### sigMap = { 'Z': [ 'DYJetsToLL_M-10to50_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8', 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8', ], 'QCD': [
def getNewProjectionHistograms(analysis,sample,version=getCMSSWVersion(),shift='',base='newflat'): #return getProjectionHistograms(analysis,sample,version,shift,base) return 'dummy.root'
def __init__(self,**kwargs): inputFileNames = kwargs.pop('inputFileNames',[]) inputTreeDirectory = kwargs.pop('inputTreeDirectory','') inputTreeName = kwargs.pop('inputTreeName','Events') outputFileName = kwargs.pop('outputFileName','analysisTree.root') outputTreeName = kwargs.pop('outputTreeName','AnalysisTree') self.shift = kwargs.pop('shift','') self.events = [] self.outputTreeName = outputTreeName # preselection if not hasattr(self,'preselection'): self.preselection = '1' # input files self.fileNames = [] if os.path.isfile('PSet.py'): # grab input files from crab pset import PSet self.fileNames = list(PSet.process.source.fileNames) elif isinstance(inputFileNames, basestring): # inputFiles is a file name if inputFileNames.startswith('/store'): # xrootd access self.fileNames += [inputFileNames] if os.path.isfile(inputFileNames): # single file if inputFileNames[-4:] == 'root': # file is a root file self.fileNames += [inputFileNames] else: # file is list of files with open(inputFileNames,'r') as f: for line in f: self.fileNames += [line.strip()] else: self.fileNames = inputFileNames # already a python list or a cms.untracked.vstring() if not isinstance(outputFileName, basestring): # its a cms.string(), get value outputFileName = outputFileName.value() if not self.fileNames: logging.error('No files found') raise ValueError # test for hdfs #self.hasHDFS = os.path.exists('/hdfs/store/user') self.hasHDFS = False # input tchain self.treename = inputTreeName self.totalEntries = 0 self.numLumis = 0 self.numEvents = 0 self.summedWeights = 0 self.summedWeightsLHEScale = [0]*9 logging.info('Getting Lumi information') #self.skims = {} #self.tfiles = [] for f,fName in enumerate(self.fileNames): if fName.startswith('/store/user'): fName = '{0}/{1}'.format('/hdfs' if self.hasHDFS else 'root://cmsxrootd.hep.wisc.edu/',fName) elif fName.startswith('/store'): fName = '{0}/{1}'.format('root://cmsxrootd.fnal.gov/',fName) tfile = ROOT.TFile.Open(fName) tree = tfile.Get(self.treename) self.totalEntries += tree.GetEntries() if not hasattr(self,'version'): tree.GetEntry(1) if hasattr(tree,'provenance'): ver = tree.provenance[0].split('_') self.version = ''.join([ver[1],ver[2],'X']) else: self.version = getCMSSWVersion() lumitree = tfile.Get('LuminosityBlocks') for entry in lumitree: self.numLumis += 1 runtree = tfile.Get('Runs') for entry in runtree: if hasattr(runtree,'genEventCount'): self.numEvents += runtree.genEventCount self.summedWeights += runtree.genEventSumw for i in range(9): if len(runtree.LHEScaleSumw)>i: self.summedWeightsLHEScale[i] += runtree.LHEScaleSumw[i] #self.tfiles += [tfile] tfile.Close('R') logging.info('Analysis is running with version {0}'.format(self.version)) logging.info("Will process {0} lumi sections with {1} events ({2}).".format(self.numLumis,self.numEvents,self.summedWeights)) self.flush() if not len(self.fileNames): raise Exception # tfile self.outfile = ROOT.TFile(outputFileName,"recreate") # cut tree self.cutTree = CutTree() # analysis tree self.tree = AnalysisTree(outputTreeName) self.eventsStored = 0 # some things we always need: # pileup self.tree.add(lambda cands: self.event.nOtherPV()+1, 'numVertices', 'I') self.tree.add(lambda cands: self.event.fixedGridRhoFastjetAll(), 'rho', 'F') # gen self.tree.add(lambda cands: 0 if self.event.isData() else self.event.Pileup_nPU(), 'numTrueVertices', 'I') self.tree.add(lambda cands: self.event.isData(), 'isData', 'I') self.tree.add(lambda cands: 0 if self.event.isData() else self.event.genWeight(), 'genWeight', 'F') # scale shifts weightMap = { 0: {'muR':1.0, 'muF':1.0}, 1: {'muR':1.0, 'muF':2.0}, 2: {'muR':1.0, 'muF':0.5}, 3: {'muR':2.0, 'muF':1.0}, 4: {'muR':2.0, 'muF':2.0}, 5: {'muR':2.0, 'muF':0.5}, 6: {'muR':0.5, 'muF':1.0}, 7: {'muR':0.5, 'muF':2.0}, 8: {'muR':0.5, 'muF':0.5}, } self.tree.add(lambda cands: 0. if self.event.isData() else self.event.LHEScaleWeight()[0] if hasattr(self.event,'LHEScaleWeight') and len(self.event.LHEScaleWeight())>0 else 0., 'genWeight_muR{muR:3.1f}_muF{muF:3.1f}'.format(**weightMap[0]), 'F') self.tree.add(lambda cands: 0. if self.event.isData() else self.event.LHEScaleWeight()[1] if hasattr(self.event,'LHEScaleWeight') and len(self.event.LHEScaleWeight())>1 else 0., 'genWeight_muR{muR:3.1f}_muF{muF:3.1f}'.format(**weightMap[1]), 'F') self.tree.add(lambda cands: 0. if self.event.isData() else self.event.LHEScaleWeight()[2] if hasattr(self.event,'LHEScaleWeight') and len(self.event.LHEScaleWeight())>2 else 0., 'genWeight_muR{muR:3.1f}_muF{muF:3.1f}'.format(**weightMap[2]), 'F') self.tree.add(lambda cands: 0. if self.event.isData() else self.event.LHEScaleWeight()[3] if hasattr(self.event,'LHEScaleWeight') and len(self.event.LHEScaleWeight())>3 else 0., 'genWeight_muR{muR:3.1f}_muF{muF:3.1f}'.format(**weightMap[3]), 'F') self.tree.add(lambda cands: 0. if self.event.isData() else self.event.LHEScaleWeight()[4] if hasattr(self.event,'LHEScaleWeight') and len(self.event.LHEScaleWeight())>4 else 0., 'genWeight_muR{muR:3.1f}_muF{muF:3.1f}'.format(**weightMap[4]), 'F') self.tree.add(lambda cands: 0. if self.event.isData() else self.event.LHEScaleWeight()[5] if hasattr(self.event,'LHEScaleWeight') and len(self.event.LHEScaleWeight())>5 else 0., 'genWeight_muR{muR:3.1f}_muF{muF:3.1f}'.format(**weightMap[5]), 'F') self.tree.add(lambda cands: 0. if self.event.isData() else self.event.LHEScaleWeight()[6] if hasattr(self.event,'LHEScaleWeight') and len(self.event.LHEScaleWeight())>6 else 0., 'genWeight_muR{muR:3.1f}_muF{muF:3.1f}'.format(**weightMap[6]), 'F') self.tree.add(lambda cands: 0. if self.event.isData() else self.event.LHEScaleWeight()[7] if hasattr(self.event,'LHEScaleWeight') and len(self.event.LHEScaleWeight())>7 else 0., 'genWeight_muR{muR:3.1f}_muF{muF:3.1f}'.format(**weightMap[7]), 'F') self.tree.add(lambda cands: 0. if self.event.isData() else self.event.LHEScaleWeight()[8] if hasattr(self.event,'LHEScaleWeight') and len(self.event.LHEScaleWeight())>8 else 0., 'genWeight_muR{muR:3.1f}_muF{muF:3.1f}'.format(**weightMap[8]), 'F')
def getNewProjectionHistograms(analysis, sample, version=getCMSSWVersion(), shift=''): flat = 'newflat/{0}/{1}.root'.format(analysis, sample) return flat