def createSummaryTasks(opt): #get files from directory tasklist=[] if opt.ma == False: if opt.input.find('/store')>=0: for filename in fillFromStore(opt.input): if not os.path.splitext(filename)[1] == '.root': continue isData = True if 'Data' in filename else False tasklist.append((opt.var,filename,isData,opt.output)) else: for filename in os.listdir(args[0]): if not os.path.splitext(filename)[1] == '.root': continue isData = True if 'Data' in filename else False tasklist.append((opt.var,filename,isData,opt.output)) #loop over tasks if opt.jobs>0: print ' Submitting jobs in %d threads' % opt.jobs import multiprocessing as MP pool = MP.Pool(opt.jobs) pool.map(createSummaryPacked,tasklist) else: for var,filename,isData,outDir in tasklist: createSummary(var=var,filename=filename,isData=isData,outDir=outDir) else: #masstrees, massfiles = getMassTrees(opt.input, verbose=True) #masspoints = sorted(list(set([mass for mass,_ in masstrees.keys()]))) # Create an array with the different masses mass = [166, 169, 171, 173, 175, 178] # A bit of hardcoding until I understand Benjamin's code for m in mass: print m m = str(m) output = opt.output + m os.system('mkdir -p %s' % output) print 'Creating new directory' for filename in fillFromStore(opt.input): direct = 'root://eoscms//eos/cms/store/cmst3/group/top/summer2015/treedir_bbbcb36/ttbar/mass_scan/' if not (filename == direct+'MC8TeV_SingleT_tW_'+m+'v5.root' or filename == direct+'MC8TeV_SingleT_t_'+m+'v5.root' or filename == direct+'MC8TeV_SingleTbar_tW_'+m+'v5.root' or filename == direct+'MC8TeV_SingleTbar_t_'+m+'v5.root' or filename == direct+'MC8TeV_TTJets_'+m+'v5.root' or filename == direct+'MC8TeV_TTJets_MSDecays_'+m+'v5.root'): continue print 'Going to analyze %s' %filename isData = True if 'Data' in filename else False tasklist.append((opt.var,filename,isData,output)) print ' Submitting jobs in %d threads for %s' % (opt.jobs,m) import multiprocessing as MP pool = MP.Pool(opt.jobs) pool.map(createSummaryPacked,tasklist) return 0
def main(args, options): #prepare output directory if args[0] == options.outDir: options.outDir += '/singleTop' print 'Warning output directory is the same, renaming as %s' % options.outDir os.system('mkdir -p %s' % options.outDir) #prepare one task per file to process taskList = [] try: treefiles = {} # procname -> [filename1, filename2, ...] if args[0].find('/store') >= 0: for filename in fillFromStore(args[0]): if not os.path.splitext(filename)[1] == '.root': continue isData, pname, splitno = resolveFilename( os.path.basename(filename)) if not pname in treefiles: treefiles[pname] = [] taskList.append((filename, isData, options.outDir)) else: for filename in os.listdir(args[0]): if not os.path.splitext(filename)[1] == '.root': continue isData, pname, splitno = resolveFilename(filename) if not pname in treefiles: treefiles[pname] = [] taskList.append((filename, isData, options.outDir)) except IndexError: print "Please provide a valid input directory" return -1 #submit tasks in parallel, if required, or run sequentially if opt.jobs > 0: print ' Submitting jobs in %d threads' % opt.jobs import multiprocessing as MP pool = MP.Pool(opt.jobs) pool.map(runSingleTopAnalysisPacked, taskList) else: for filename, isData, outDir in taskList: runSingleTopAnalysis(filename=filename, isData=isData, outDir=outDir) #EDIT: Write current cuts to sig/bkg file sigbkg = open('sig_bkg.txt', 'a') sigbkg.write('#####################The Cuts:\n') sigbkg.write('Must be an e or mu Event\n') sigbkg.write('Exactly 1 Forward Jet with eta<3.2 or eta>4.7\n') sigbkg.write('1 < Num, Central Jets < 3\n') sigbkg.write('Exactly 1 Sec Vertex\n') sigbkg.write('MT > 50\n') sigbkg.close() return 0
def main(args, options): #prepare output directory if args[0]==options.outDir: options.outDir += '/singleTop' print 'Warning output directory is the same, renaming as %s' % options.outDir os.system('mkdir -p %s'%options.outDir) #prepare one task per file to process taskList=[] try: treefiles = {} # procname -> [filename1, filename2, ...] if args[0].find('/store')>=0: for filename in fillFromStore(args[0]): if not os.path.splitext(filename)[1] == '.root': continue isData, pname, splitno = resolveFilename(os.path.basename(filename)) if not pname in treefiles: treefiles[pname] = [] taskList.append((filename, isData,options.outDir)) else: for filename in os.listdir(args[0]): if not os.path.splitext(filename)[1] == '.root': continue isData, pname, splitno = resolveFilename(filename) if not pname in treefiles: treefiles[pname] = [] taskList.append((filename, isData,options.outDir)) except IndexError: print "Please provide a valid input directory" return -1 #submit tasks in parallel, if required, or run sequentially if opt.jobs>0: print ' Submitting jobs in %d threads' % opt.jobs import multiprocessing as MP pool = MP.Pool(opt.jobs) pool.map(runSingleTopAnalysisPacked,taskList) else: for filename,isData,outDir in taskList: runSingleTopAnalysis(filename=filename,isData=isData,outDir=outDir) #EDIT: Write current cuts to sig/bkg file sigbkg = open('sig_bkg.txt','a') sigbkg.write('#####################The Cuts:\n') sigbkg.write('Must be an e or mu Event\n') sigbkg.write('Exactly 1 Forward Jet with eta<3.2 or eta>4.7\n') sigbkg.write('1 < Num, Central Jets < 3\n') sigbkg.write('Exactly 1 Sec Vertex\n') sigbkg.write('MT > 50\n') sigbkg.close() return 0
def main(args, options): #prepare output directory if args[0]==options.outDir: options.outDir += '/singleTop' print 'Warning output directory is the same, renaming as %s' % options.outDir os.system('mkdir -p %s'%options.outDir) #prepare one task per file to process taskList=[] try: treefiles = {} # procname -> [filename1, filename2, ...] if args[0].find('/store')>=0: for filename in fillFromStore(args[0]): if not os.path.splitext(filename)[1] == '.root': continue isData, pname, splitno = resolveFilename(os.path.basename(filename)) if not pname in treefiles: treefiles[pname] = [] taskList.append((filename, isData,options.outDir)) else: for filename in os.listdir(args[0]): if not os.path.splitext(filename)[1] == '.root': continue isData, pname, splitno = resolveFilename(filename) if not pname in treefiles: treefiles[pname] = [] taskList.append((filename, isData,options.outDir)) except IndexError: print "Please provide a valid input directory" return -1 #submit tasks in parallel, if required, or run sequentially if opt.jobs>0: print ' Submitting jobs in %d threads' % opt.jobs import multiprocessing as MP pool = MP.Pool(opt.jobs) pool.map(runSingleTopAnalysisPacked,taskList) else: for filename,isData,outDir in taskList: runSingleTopAnalysis(filename=filename,isData=isData,outDir=outDir) return 0