def runAsymptoticLimit(mass, cardname, year): if '-' in year: split_year = year.split('-') else: split_year = [year] for y in split_year: datacard = getDataCard(mass, cardname, y) combineSets(mass, y) combineYears(mass, cardname) datacard = getDataCard(mass, cardname, year) output_folder = datacard.replace('dataCards', 'output').rsplit('/', 1)[0] +'/asymptotic/'+cardname print 'Running Combine for mass', str(mass), 'GeV' if args.blind: runCombineCommand('combine -M AsymptoticLimits '+datacard+ ' --run blind') else: runCombineCommand('combine -M AsymptoticLimits '+datacard) makeDirIfNeeded(output_folder+'/x') os.system('scp '+os.path.expandvars('$CMSSW_BASE/src/HNL/Stat/data/output/tmp/*root')+ ' ' +output_folder+'/.') os.system('rm '+os.path.expandvars('$CMSSW_BASE/src/HNL/Stat/data/output/tmp/*root')) print 'Finished running asymptotic limits for mass', str(mass), 'GeV' return
def printSelections(in_file_path, out_file_path): in_file = TFile(in_file_path) key_names = [k[0] for k in rootFileContent(in_file, '/', starting_dir = 'cutflow')] makeDirIfNeeded(out_file_path) out_file = open(out_file_path, 'w') for k in key_names: if 'Total' in k: continue out_file.write(k.split('/')[-1] + '\n') return
def getOutputName(region): output_name = os.path.join(getOutputBase(region), sample.output) if args.isChild: output_name += '/tmp_' + sample.output output_name += '/' + sample.name + '_events_' + subjobAppendix + '.root' makeDirIfNeeded(output_name) return output_name
def combineYears(mass, cardname): if len(args.year) == 1: return else: datacard_massbase = os.path.join(datacards_base('-'.join(args.year)), 'HNL-'+args.flavor+'-m'+str(mass), 'shapes') datacard_path = lambda year : os.path.join(datacards_base(year), 'HNL-'+args.flavor+'-m'+str(mass), 'shapes', cardname+'.txt') categories = [datacard_path(y) for y in args.year if os.path.isfile(datacard_path(year))] makeDirIfNeeded(datacard_massbase+'/'+cardname+'.txt') runCombineCommand('combineCards.py '+' '.join(categories)+' > '+datacard_massbase+'/'+cardname+'.txt')
def write(self, path, append=False): append_string = 'recreate' if append and isValidRootFile(path): append_string = 'update' makeDirIfNeeded(path) output_file = ROOT.TFile(path, append_string) output_file.mkdir(self.name) output_file.cd(self.name) self.hist.Write() output_file.Close()
def mergeSmallFile(merge_file): path, name = merge_file.rsplit('/', 1) if not args.isTest: os.system('hadd -f -v ' + path + '/' + name.split('_', 1)[1] + '.root ' + merge_file + '/*root') os.system('rm -r -f ' + merge_file) else: makeDirIfNeeded(path + '/testArea/' + name.split('_', 1)[1] + '.root') os.system('hadd -f -v ' + path + '/testArea/' + name.split('_', 1)[1] + '.root ' + merge_file + '/*root')
def getOutputName(): output_name = getOutputBase() output_name += '/' + sample.output if args.isChild: output_name += '/tmp_' + sample.output output_name += '/' + sample.name + '_events_' + subjobAppendix + '.root' makeDirIfNeeded(output_name) return output_name
def write(self, append=False): append_string = 'recreate' if append and isValidRootFile(self.path): append_string = 'update' makeDirIfNeeded(self.path) output_file = TFile(self.path, append_string) output_file.mkdir(self.name) output_file.cd(self.name) self.eff_numerator.getHist().Write() self.eff_denominator.getHist().Write() self.misid_numerator.getHist().Write() self.misid_denominator.getHist().Write() output_file.Close()
def makeRunScript(script, arguments, i, condor_base): original_script = os.path.expandvars( os.path.join('$CMSSW_BASE', 'src', 'HNL', 'Tools', 'scripts', 'runOnCondor.sh')) new_script_name = os.path.realpath( os.path.join(condor_base, 'runscripts', str(i) + '.sh')) makeDirIfNeeded(new_script_name) new_script = open(new_script_name, 'w') for line in open(original_script, 'r'): new_line = line.replace('$1', arguments[0]) new_line = new_line.replace('$2', arguments[1]) new_script.write(new_line) new_script.close() return new_script_name
def getOutputName(prompt_str): if not args.isTest: output_name = os.path.join(os.getcwd(), 'data', __file__.split('.')[0], args.selection, args.region, sample.output, prompt_str) else: output_name = os.path.join(os.getcwd(), 'data', 'testArea', __file__.split('.')[0], args.selection, args.region, sample.output, prompt_str) if args.isChild: output_name += '/tmp_' + sample.output output_name += '/' + sample.name + '_events_' + subjobAppendix + '.root' makeDirIfNeeded(output_name) return output_name
def mergeLargeFile(merge_file): path, name = merge_file.rsplit('/', 1) sub_files = sorted(glob.glob(merge_file + '/*')) split_list = [] tmp_list = [] if args.isTest: makeDirIfNeeded( os.path.join(path, 'testArea', name, 'tmp_batches', 'x')) else: makeDirIfNeeded(os.path.join(path, name, 'tmp_batches', 'x')) for i, file_list in enumerate(sub_files): if i != 0 and (i % 20 == 0 or i == len(sub_files) - 1): tmp_list.append(file_list) split_list.append([x for x in tmp_list]) tmp_list = [] else: tmp_list.append(file_list) for j, file_list in enumerate(split_list): if not args.isTest: os.system('hadd -f -v ' + path + '/' + name + '/tmp_batches/batch_' + str(j) + '.root ' + ' '.join(file_list)) for f in file_list: os.system('rm ' + f) else: os.system('hadd -f -v ' + path + '/testArea/' + name + '/tmp_batches/batch_' + str(j) + '.root ' + ' '.join(file_list)) if args.isTest: os.system('hadd -f -v ' + path + '/testArea/' + name.split('_', 1)[1] + '.root ' + path + '/testArea/' + name + '/tmp_batches/*root') else: os.system('hadd -f -v ' + path + '/' + name.split('_', 1)[1] + '.root ' + path + '/' + name + '/tmp_batches/*root') os.system('rm -rf ' + path + '/' + name)
def write(self, append=False, name=None, subdirs=None): append_string = 'recreate' if append and isValidRootFile(self.path): append_string = 'update' makeDirIfNeeded(self.path) output_file = ROOT.TFile(self.path, append_string) if subdirs is None: output_file.mkdir(self.name) output_file.cd(self.name) else: nomo = '' for d in subdirs: nomo += d + '/' output_file.mkdir(nomo) output_file.cd(nomo) if name is not None: self.efficiency_num.getHist().Write(name + '_num') self.efficiency_denom.getHist().Write(name + '_denom') self.getEfficiency().Write(name + '_efficiency') else: self.efficiency_num.getHist().Write() self.efficiency_denom.getHist().Write() self.getEfficiency().Write() output_file.Close()
def write(self, append = False, name=None, is_test=None): append_string = 'recreate' if append and isValidRootFile(self.path): append_string = 'update' if is_test is None: path_to_use = self.path else: split_path = self.path.split('/') index_to_use = split_path.index('testArea')+1 path_to_use = os.path.expandvars("$HOME/Testing/Latest/"+'/'.join(split_path[index_to_use:-1])+'/'+is_test+'/'+split_path[-1]) makeDirIfNeeded(path_to_use) output_file = ROOT.TFile(path_to_use, append_string) if self.subdirs is None: pass # output_file.mkdir(self.name) # output_file.cd(self.name) else: nomo = '' for d in self.subdirs: nomo += d + '/' output_file.mkdir(nomo) output_file.cd(nomo) if name is not None: self.efficiency_num.getHist().Write(name+'_num') self.efficiency_denom.getHist().Write(name+'_denom') # self.getEfficiency().Write(name+'_efficiency') else: self.efficiency_num.getHist().Write() self.efficiency_denom.getHist().Write() # self.getEfficiency().Write() output_file.Close() if is_test is not None: self.write(append=append, name=name, is_test=None)
def savePlot(self, destination, message=None): makeDirIfNeeded(destination) destination_components = destination.split('/') cleaned_components = [ x for x in destination_components if not isTimeStampFormat(x) ] try: index_for_php = cleaned_components.index('src') except: index_for_php = None if index_for_php: php_destination = '/user/lwezenbe/public_html/' php_destination += '/'.join([ comp for comp in cleaned_components[index_for_php + 1:] if (comp != 'data' and comp != 'Results') ]) makeDirIfNeeded(php_destination) os.system('cp -rf $CMSSW_BASE/src/HNL/Tools/php/index.php ' + php_destination.rsplit('/', 1)[0] + '/index.php') cmssw_version = os.path.expandvars('$CMSSW_BASE').rsplit('/', 1)[-1] central_destination = '/user/lwezenbe/private/Backup/' + cmssw_version + '/' central_destination += '/'.join( [comp for comp in destination_components[index_for_php + 1:]]) makeDirIfNeeded(central_destination) self.canvas.SaveAs(destination + ".pdf") self.canvas.SaveAs(destination + ".png") self.canvas.SaveAs(destination + ".root") #Clean out the php directory you want to write to if it is already filled, otherwise things go wrong with updating the file on the website #os.system("rm "+php_destination.rsplit('/')[0]+"/*") if index_for_php: self.canvas.SaveAs(php_destination + ".pdf") self.canvas.SaveAs(php_destination + ".png") self.canvas.SaveAs(php_destination + ".root") # # Save to a central, local backup that you can return to also after you switched CMSSW # if index_for_php: self.canvas.SaveAs(central_destination + ".pdf") self.canvas.SaveAs(central_destination + ".png") self.canvas.SaveAs(central_destination + ".root") if message is not None: pt.writeMessage(destination.rsplit('/', 1)[0], message)
def clearLogs(base): last_base = base.replace('Latest', 'Previous') makeDirIfNeeded(last_base + '/x') os.system('rm -r ' + last_base + '/*') os.system('scp -r ' + base + '/* ' + last_base + '/.') os.system('rm -r ' + base)
cutter = Cutter(chain=chain) # # Create new reduced tree (except if it already exists and overwrite option is not used) # output_base = os.path.expandvars( os.path.join('/user/$USER/public/ntuples/HNL') ) if not args.isTest else os.path.expandvars( os.path.join('$CMSSW_BASE', 'src', 'HNL', 'TMVA', 'data', 'testArea')) signal_str = 'Signal' if chain.is_signal else 'Background' output_name = os.path.join( output_base, 'TMVA', str(args.year), args.region + '-' + args.selection, signal_str, 'tmp_' + sample.output, sample.name + '_' + sample.output + '_' + str(args.subJob) + '.root') makeDirIfNeeded(output_name) # if not args.isTest and isValidRootFile(output_name): # log.info('Finished: valid outputfile already exists') # exit(0) output_file = ROOT.TFile(output_name, "RECREATE") # # Switch off unused branches and create outputTree # output_tree = {} for prompt_str in ['prompt', 'nonprompt']: output_tree[prompt_str] = {} for c in SUPER_CATEGORIES.keys(): output_tree[prompt_str][c] = ROOT.TTree('trainingtree',
var['HNLmass'] = (lambda c : c.HNLmass, np.array(mass_range), ('m_{N} [GeV]', 'Efficiency')) from HNL.Tools.efficiency import Efficiency from HNL.EventSelection.eventCategorization import returnCategoryTriggers from HNL.Triggers.triggerSelection import applyCustomTriggers eff = {} for c in categories: for v in {k for k in var.keys()}: for t in category_map[c]: if args.separateTriggers is None or args.separateTriggers == 'full': name = str(c)+ '_' +v + '_' + t if v != 'HNLmass': eff[(c, v, t)] = Efficiency(name, var[v][0], var[v][2], getOutputName(v), var[v][1], subdirs=['efficiency_'+str(c), v, 'allTriggers']) else: eff[(c, v, t)] = Efficiency(name, var[v][0], var[v][2], getOutputName(v), var[v][1], subdirs=['efficiency_'+str(c), v, 'allTriggers']) makeDirIfNeeded(getOutputName(v)) else: for i, trigger in enumerate(category_triggers(chain, c)): name = str(c)+ '_' +v + '_' + t +'_'+str(i) if v != 'HNLmass': eff[(c, v, t, i)] = Efficiency(name, var[v][0], var[v][2], getOutputName(v), var[v][1], subdirs=['efficiency_'+str(c), v, str(returnCategoryTriggerNames(c)[i])]) else: eff[(c, v, t, i)] = Efficiency(name, var[v][0], var[v][2], getOutputName(v), var[v][1], subdirs=['efficiency_'+str(c), v, str(returnCategoryTriggerNames(c)[i])]) makeDirIfNeeded(getOutputName(v)) # # Set event range # if args.isTest: max_events = 20000 event_range = xrange(max_events) if max_events < len(sample.getEventRange(args.subJob)) else sample.getEventRange(args.subJob)
input_files = glob.glob(os.getcwd() + '/data/calcTriggerEff/*/' + args.separateTriggers + '/*.root') sample_names = {in_file_name.split('/')[-3] for in_file_name in input_files} f_names = {f.split('/')[-1].split('.')[0] for f in input_files} #Prepare output dir timestamp = time.strftime("%Y%m%d_%H%M%S") for f_name in f_names: if args.isTest: out_dir = os.getcwd( ) + '/data/testArea/Results/' + args.separateTriggers + '/' + f_name + '/' + timestamp else: out_dir = os.getcwd( ) + '/data/Results/' + args.separateTriggers + '/' + f_name + '/' + timestamp makeDirIfNeeded(out_dir + '/x') # def isUniqueElement(l, item): # list_of_truths = [e == item for e in l] # return not any(list_of_truths) import ROOT from HNL.Tools.helpers import rootFileContent, getObjFromFile from HNL.Plotting.plot import Plot from HNL.Plotting.plottingTools import extraTextFormat from HNL.EventSelection.eventCategorization import CATEGORIES, CATEGORY_TEX_NAMES, returnCategoryTriggerNames, TRIGGER_CATEGORIES from HNL.Tools.efficiency import Efficiency # #TODO: Can this be made general? # def makeNameCompList(name_list):
output_name += '/tmp_' + sample.output print 'Getting things ready to start the event loop' #Initialize histograms from HNL.Tools.ROC import ROC from HNL.Tools.efficiency import Efficiency list_of_roc = [] list_of_var_hist = {'efficiency': {}, 'fakerate': {}} for algo in algos: algo_wp = algos[algo] tot_name = output_name + '/' + sample.name + '_' + algo + '-ROC-' + str( args.flavor) + '_' + args.subJob + '.root' makeDirIfNeeded(tot_name) list_of_roc.append(ROC(algo, tot_name, working_points=algo_wp)) #Efficiency histograms for variables var_hist = { 'pt': [ lambda c: c.pt_l, np.arange(0., 210., 10.), ('p_T^{#tau}(offline) [GeV]', 'Efficiency') ], 'eta': [ lambda c: c.eta_l, np.arange(-2.5, 3., 0.5), ('#eta^{#tau}(offline) [GeV]', 'Efficiency') ] }
else: os.system(command + ' > ' + out_file_name(signal).split('.root')[0] + '.txt') exit(0) # # List the input variables here # from HNL.TMVA.mvaVariables import getVariableList print 'Processing', args.signalname if not args.skipTraining: input_var = getVariableList(args.signalname) makeDirIfNeeded(out_file_name(args.signalname)) out_file = ROOT.TFile(out_file_name(args.signalname), 'RECREATE') factory = ROOT.TMVA.Factory("factory", out_file, "") if args.writeInTree: if args.signalname.split('_')[-1] in ['e', 'mu']: name = 'NoTau/trainingtree' else: name = 'Total/trainingtree' in_tree = ih.getTree(args.signalname, name=name).CloneTree() in_tree.Write('backuptree') loader = ih.getLoader(args.signalname, input_var) preselection = ROOT.TCut("") # options = ""
#Merges subfiles if needed if args.batchSystem != 'HTCondor': merge_files = glob.glob('/storage_mnt/storage/user/' + os.path.expandvars('$USER') + '/public/ntuples/HNL/' + skim_selection_string + '/' + args.year + '/' + args.skimName + '/tmp*') else: merge_files = glob.glob(pnfs_base + '/tmp*') merge_files = sorted(merge_files) if args.batchSystem != 'HTCondor': os.system('gfal-mkdir ' + pnfs_base) os.system('gfal-mkdir ' + pnfs_backup_base) else: makeDirIfNeeded(pnfs_base) makeDirIfNeeded(pnfs_backup_base) for mf in merge_files: if 'Data' in mf: continue path, name = mf.rsplit('/', 1) new_name = name.split('_', 1)[1] + '.root' if not args.isTest: if args.batchSystem != 'HTCondor': os.system('gfal-rm ' + pnfs_backup_base + '/' + new_name) os.system('gfal-copy ' + pnfs_base + '/' + new_name + ' ' + pnfs_backup_base + '/' + new_name) os.system('gfal-rm ' + pnfs_base + '/' + new_name) else: if isValidRootFile(pnfs_base + '/' + new_name):
def makeDataCard(bin_name, flavor, year, obs_yield, sig_name, bkgr_names, sig_yield=None, bkgr_yields=None, shapes=False, coupling_sq=1e-4): if not shapes and len(bkgr_yields) != len(bkgr_names): raise RuntimeError( "length of background yields and names is inconsistent") if not shapes: out_name = os.path.join(os.path.expandvars('$CMSSW_BASE'), 'src', 'HNL', 'Stat', 'data', 'dataCards', str(year), flavor, sig_name, 'cutAndCount', bin_name + '.txt') else: out_name = os.path.join(os.path.expandvars('$CMSSW_BASE'), 'src', 'HNL', 'Stat', 'data', 'dataCards', str(year), flavor, sig_name, 'shapes', bin_name + '.txt') makeDirIfNeeded(out_name) out_file = open(out_name, 'w') out_file.write('# coupling squared = ' + str(coupling_sq) + ' \n \n') out_file.write('imax 1 number of bins \n') out_file.write('jmax * number of processes\n') out_file.write('kmax * \n') out_file.write('-' * 400 + '\n') if shapes: shapes_path = os.path.join(os.path.expandvars('$CMSSW_BASE'), 'src', 'HNL', 'Stat', 'data', 'shapes', str(year), flavor, sig_name, bin_name + '.shapes.root') out_file.write('shapes * * \t' + shapes_path + ' $PROCESS $PROCESS_SYSTEMATIC') out_file.write('-' * 400 + '\n') out_file.write('bin ' + bin_name + ' \n') if shapes: out_file.write('observation -1 \n') # out_file.write('observation '+str(obs_yield)+ ' \n') else: out_file.write('observation ' + str(obs_yield) + ' \n') out_file.write('-' * 400 + '\n') out_file.write(tab(['bin', ''] + [bin_name] * (len(bkgr_names) + 1))) out_file.write(tab(['process', ''] + bkgr_names + [sig_name])) out_file.write( tab(['process', ''] + [str(i) for i in xrange(1, len(bkgr_names) + 1)] + ['0'])) if shapes: out_file.write(tab(['rate', ''] + ['-1'] * (len(bkgr_names) + 1))) else: out_file.write( tab(['rate', ''] + ['%4.6f' % by if by >= 0 else 0.0 for by in bkgr_yields] + ['%4.6f' % sig_yield])) out_file.write('-' * 400 + '\n') # For now no systematics, just lumi as example out_file.write(tab(['lumi_13TeV', 'lnN'] + [1.025] * (len(bkgr_names) + 1))) #autoMCstats if shapes: out_file.write('* autoMCStats 3 1 1') out_file.close()
import glob, os from HNL.Tools.helpers import makeDirIfNeeded, progress list_of_datafiles = [ 'SingleMuon', 'SingleElectron', 'DoubleMuon', 'DoubleEG', 'MuonEG' ] import argparse argParser = argparse.ArgumentParser(description="Argument parser") argParser.add_argument('--year', action='store', default='2016') argParser.add_argument('--skim', action='store', default='Reco') args = argParser.parse_args() output_folder = '/storage_mnt/storage/user/lwezenbe/public/ntuples/HNL/OldAnalysis/' + args.year + '/' + args.skim + '/tmp_DataFiltered' makeDirIfNeeded(output_folder) event_information_set = set() # for f_name in list_of_datafiles: # file_list = glob.glob('/storage_mnt/storage/user/lwezenbe/public/ntuples/HNL/OldAnalysis/'+args.year+ '/' + args.skim +'/tmp_'+f_name+'/*.root') # for i, sub_f_name in enumerate(file_list): # print sub_f_name # f = ROOT.TFile(sub_f_name) # c = f.Get('blackJackAndHookers/blackJackAndHookersTree') # try: # c.GetEntry() # except: # continue # output_file = ROOT.TFile(output_folder +'/'+ sub_f_name.split('/')[-1], 'recreate')
print 'Chain initialized' #Set output dir #Since number of subjobs was set to be 1 (HNL samples are small), this name was chosen since no overlap possible #If this changes, this needs to be changed as well from HNL.Tools.helpers import makeDirIfNeeded if not args.isTest: output_name = os.path.join(os.getcwd(), 'data', os.path.basename(__file__).split('.')[0], sample.output) else: output_name = os.path.join(os.getcwd(), 'data', 'testArea', os.path.basename(__file__).split('.')[0], sample.output) output_name += '/' + sample.name + '.root' makeDirIfNeeded(output_name) print 'Getting things ready to start the event loop' #Initialize histograms pt_hist = [ ROOT.TH1D('leading_pt', sample.name, 100, 0, 100), ROOT.TH1D('subleading_pt', sample.name, 100, 0, 100), ROOT.TH1D('trailing_pt', sample.name, 100, 0, 100) ] if args.plotSoftTau and 'tau' in sample.name: pt_hist.append(ROOT.TH1D('softest_tau', sample.name, 100, 0, 100)) if args.plotSoftTau and 'tau' in sample.name: pt_hist.append(ROOT.TH1D('subleading_tau', sample.name, 100, 0, 100)) #Determine if testrun so it doesn't need to calculate the number of events in the getEventRange
default='baseline', type=str, help='What region do you want to select for?', choices=['baseline', 'lowMassSR', 'highMassSR']) argParser.add_argument( '--selection', action='store', default='default', help='Select the type of selection for objects', choices=['leptonMVAtop', 'AN2017014', 'default', 'Luka', 'TTT']) args = argParser.parse_args() ih = InputHandler(args.year, args.region, args.selection) #use getTree and scramble background for signal in ih.signal_names: if signal.split('_')[-1] in ['e', 'mu']: name = 'NoTau/trainingtree' else: name = 'Total/trainingtree' signal_tree = ih.getTree(signal, name=name, signal_only=True).CloneTree() bkgr_tree = ih.getTree(signal, name=name, bkgr_only=True).CloneTree() fname = '/storage_mnt/storage/user/lwezenbe/public/ntuples/HNL/TMVA/' + str( args.year ) + '/' + args.region + '-' + args.selection + '/ForExternalTraining/' + signal + '.root' makeDirIfNeeded(fname) out_file = ROOT.TFile(fname, 'recreate') signal_tree.Write('signaltree') bkgr_tree.Write('backgroundtree') out_file.Close()
if __name__ == "__main__": iso_wp = ['vvvloose', 'vvloose', 'vloose', 'loose', 'medium', 'tight', 'vtight', 'vvtight'] ele_wp = ['vvloose', 'vloose', 'loose', 'medium', 'tight', 'vtight', 'vvtight'] mu_wp = ['vloose', 'loose', 'medium', 'tight'] import numpy as np import os from HNL.Tools.helpers import makeDirIfNeeded from HNL.Tools.logger import getLogger, closeLogger log = getLogger('INFO') pt_range = np.arange(30, 70, 10) out_name_tex = os.path.join(os.path.expandvars('$CMSSW_BASE'), 'src', 'HNL', 'Weights', 'data', 'Results', __file__.split('.')[0], 'deeptauVSjets.txt') makeDirIfNeeded(out_name_tex) # out_file_tex = open(out_name_tex, 'w') # out_file_tex.write('\\begin{table}[] \n') column_str = '|'.join(['c' for _ in pt_range]) # out_file_tex.write("\\begin{tabular}{|c|"+column_str+"|} \n") # out_file_tex.write("\hline \n") # out_file_tex.write('& '+'&'.join(['$p_T = $'+str(m)+ ' GeV' for m in pt_range])+' \\\\ \n') # out_file_tex.write("\hline \n") for iso in iso_wp: tausftool = TauSF(2016, 'deeptauVSjets', iso, ele_wp[0], mu_wp[0]) # out_file_tex.write(iso) for pt in pt_range: sf = tausftool.sftool_iso.getSFvsPT(pt) up = tausftool.sftool_iso.getSFvsPT(pt, unc='Up') - tausftool.sftool_iso.getSFvsPT(pt) down = tausftool.sftool_iso.getSFvsPT(pt) - tausftool.sftool_iso.getSFvsPT(pt, unc='Down')
# # Save histograms # if args.isTest: continue subjobAppendix = '_subJob' + args.subJob if args.subJob else '' output_name = os.path.join(os.getcwd(), 'data', 'plotTau', reco_or_gen_str, sample.output) if args.isChild: output_name += '/tmp_' + sample.output + '/' + sample.name + '_' else: output_name += '/' makeDirIfNeeded(output_name + 'variables' + subjobAppendix + '.root') output_file = TFile( output_name + 'variables' + subjobAppendix + '.root', 'RECREATE') for v in var.keys(): output_file.mkdir(v) output_file.cd(v) list_of_hist[sample.output][v].getHist().Write() output_file.Close() #If the option to not run over the events again is made, load in the already created histograms here else: import glob hist_list = glob.glob(os.getcwd() + '/data/plotTau/' + reco_or_gen_str +
import os skipDirs = ['documents', 'log', '.git', 'virenv', 'utils'] skipFiles = [] extensions_to_copy = ['root', 'pdf', 'png'] file_paths = [] base_path = os.path.join(os.path.expandvars('$CMSSW_BASE'), 'src', 'HNL') for root, dirs, files in os.walk(base_path, topdown=True): dirs[:] = [d for d in dirs if d not in skipDirs] root_files = [ os.path.join(root, f) for f in files if f.split('.')[-1] in extensions_to_copy and f not in skipFiles ] if len(root_files) != 0: for ext in extensions_to_copy: file_paths.append(os.path.join(root, '*.' + ext)) cmssw_version = os.path.expandvars('$CMSSW_BASE').rsplit('/', 1)[-1] central_destination = makePathTimeStamped('/user/lwezenbe/private/Backup/' + cmssw_version + '/AllOutput') for rf in file_paths: try: index_for_backup = rf.split('/').index('src') except: index_for_backup = None backup_path = '/'.join([central_destination] + rf.split('/')[index_for_backup + 1:-1]) makeDirIfNeeded(backup_path + '/x') os.system('scp ' + rf + ' ' + backup_path + '/.')
default='HTCondor', help='choose batchsystem', choices=['local', 'HTCondor', 'Cream02']) args = argParser.parse_args() if args.batchSystem == 'Cream02': input_folder = '/storage_mnt/storage/user/lwezenbe/public/ntuples/HNL/' + args.skimSelection + '/' + args.year + '/' + args.skim + '/tmp_Data/' output_folder = '/storage_mnt/storage/user/lwezenbe/public/ntuples/HNL/' + args.skimSelection + '/' + args.year + '/' + args.skim + '/tmp_DataFiltered' else: pnfs_base = os.path.join('/pnfs/iihe/cms/store/user', os.path.expandvars('$USER'), 'skimmedTuples/HNL', args.skimSelection, args.year, args.skim) input_folder = pnfs_base + '/tmp_Data' output_folder = pnfs_base + '/tmp_DataFiltered' makeDirIfNeeded(output_folder + '/x') event_information_set = set() file_list = glob.glob(input_folder + '/*.root') for i, sub_f_name in enumerate(file_list): progress(i, len(file_list)) f = ROOT.TFile(sub_f_name) c = f.Get('blackJackAndHookers/blackJackAndHookersTree') try: c.GetEntry() except: continue output_file = ROOT.TFile(output_folder + '/' + sub_f_name.split('/')[-1],
shape_hist[sample_name] = ROOT.TH1D( sample_name, sample_name, n_search_regions, 0.5, n_search_regions + 0.5) for sr in xrange(1, n_search_regions + 1): shape_hist[sample_name].SetBinContent( sr, list_of_values['bkgr'][sample_name][ac][sr]) shape_hist[sample_name].SetBinError( sr, list_of_errors['bkgr'][sample_name][ac][sr]) for sample_name in list_of_values['signal'].keys(): out_path = os.path.join(os.path.expandvars('$CMSSW_BASE'), 'src', 'HNL', 'Stat', 'data', 'shapes', str(args.year), args.flavor, sample_name, ac + '.shapes.root') makeDirIfNeeded(out_path) out_shape_file = ROOT.TFile(out_path, 'recreate') n_search_regions = srm[ args.region].getNumberOfSearchRegions() shape_hist[sample_name] = ROOT.TH1D( sample_name, sample_name, n_search_regions, 0.5, n_search_regions + 0.5) for sr in xrange(1, n_search_regions + 1): shape_hist[sample_name].SetBinContent( sr, list_of_values['signal'][sample_name][ac][sr]) shape_hist[sample_name].SetBinError( sr, list_of_errors['signal'][sample_name][ac][sr]) shape_hist[sample_name].Write(sample_name) bkgr_names = [] # for bkgr_sample_name in list_of_values['bkgr'].keys()+['data_obs']: for bkgr_sample_name in background_collection + [