def __init__(self, inputfile, year, ijob, njobs): if inputfile.endswith('.txt'): infiles = SplitUp(inputfile, njobs)[ijob - 1] else: infiles = inputfile self.a = analyzer(infiles) if inputfile.endswith('.txt'): self.setname = inputfile.split('/')[-1].split('_')[0] else: self.setname = inputfile.split('/')[-1].split('_')[1] self.year = year self.ijob = ijob self.njobs = njobs self.config = OpenJSON('THconfig.json') self.cuts = self.config['CUTS'] # self.dijetIdxs = [0,1] self.trigs = { 16: ['HLT_PFHT800', 'HLT_PFHT900'], 17: ['HLT_PFHT1050', 'HLT_AK8PFJet500'], 18: [ 'HLT_AK8PFJet400_TrimMass30', 'HLT_AK8PFHT850_TrimMass50', 'HLT_PFHT1050' ] } if 'Data' in inputfile: self.a.isData = True else: self.a.isData = False
def CombineCommonSets(groupname,doStudies=False,modstr=''): '''Which stitch together either QCD or ttbar (ttbar-allhad+ttbar-semilep) @param groupname (str, optional): "QCD" or "ttbar". ''' if groupname not in ["QCD","ttbar"]: raise ValueError('Can only combine QCD or ttbar') config = OpenJSON('THconfig.json') for y in ['16','17','18']: baseStr = 'rootfiles/TH%s_{0}{2}_{1}{3}.root'%('studies' if doStudies else 'selection') if groupname == 'ttbar': to_loop = [''] if doStudies else ['','JES','JER','JMS','JMR'] for v in to_loop: if v == '': ExecuteCmd('hadd -f %s %s %s'%( baseStr.format('ttbar',y,modstr,''), baseStr.format('ttbar-allhad',y,modstr,''), baseStr.format('ttbar-semilep',y,modstr,'')) ) else: for v2 in ['up','down']: v3 = '_%s_%s'%(v,v2) ExecuteCmd('hadd -f %s %s %s'%( baseStr.format('ttbar',y,modstr,v3), baseStr.format('ttbar-allhad',y,modstr,v3), baseStr.format('ttbar-semilep',y,modstr,v3)) ) elif groupname == 'QCD': ExecuteCmd('hadd -f %s %s %s %s %s'%( baseStr.format('QCD',y,modstr,''), baseStr.format('QCDHT700',y,modstr,''), baseStr.format('QCDHT1000',y,modstr,''), baseStr.format('QCDHT1500',y,modstr,''), baseStr.format('QCDHT2000',y,modstr,'')) )
def getNormFactor(setname, year, configPath, genEventCount): # Config loading - will have cuts, xsec, and lumi if isinstance(configPath, str): config = OpenJSON(configPath) else: config = configPath cuts = config['CUTS'][year] lumi = config['lumi'] if setname in config['XSECS'].keys(): xsec = config['XSECS'][setname] else: raise KeyError('Key "%s" does not exist in config["XSECS"]' % setname) norm = (xsec * lumi) / genEventCount return norm
def getNormFactor(setname, year, configPath): # Config loading - will have cuts, xsec, and lumi if isinstance(configPath, str): config = OpenJSON(configPath) else: config = configPath cuts = config['CUTS'][year] lumi = config['lumi' + str(year)] geneEventCount = config['NEVENTS'][str(year)] # Deal with unique ttbar cases if setname == 'ttbar' and year == '16': setname = 'ttbar' elif setname == 'ttbar' and (year == '17' or year == '18'): setname = 'ttbar-allhad' if setname in config['XSECS'].keys(): xsec = config['XSECS'][setname] else: raise KeyError('Key "%s" does not exist in config["XSECS"]' % setname) norm = (xsec * lumi) / genEventCount return norm
default=False, dest='select', help= 'Whether to run the selection. If False, will attempt to recycle previous run histograms.' ) (options, args) = parser.parse_args() ########################################### # Establish some global variables for use # ########################################### plotdir = 'plots/' # this is where we'll save your plots if not os.path.exists(plotdir): os.makedirs(plotdir) rootfile_path = 'root://cmsxrootd.fnal.gov///store/user/cmsdas/2021/long_exercises/BstarTW/rootfiles' config = OpenJSON('bstar_config.json') cuts = config['CUTS'][options.year] CompileCpp("TIMBER/Framework/include/common.h") CompileCpp( 'bstar.cc') # has the c++ functions we need when looping of the RDataFrame # Sets we want to process and some nice naming for our plots signal_names = ['signalLH%s' % (mass) for mass in [2000]] #range(1400,4200,600)] bkg_names = [ 'singletop_tW', 'singletop_tWB', 'ttbar', 'QCDHT700', 'QCDHT1000', 'QCDHT1500', 'QCDHT2000' ] ##########MY ASSIGNED BACKGROUNDS##############
import json from TIMBER.Tools.Common import OpenJSON, DictCopy def polyStrGen(xorder,yorder): xparts = ['@0'] yparts = ['1'] totalParams = 0 for i in range(1,xorder+1): xparts.append('@{0}*x**{1}'.format(totalParams,i)) totalParams += 1 for i in range(1,yorder+1): yparts.append('@{0}*y**{1}'.format(totalParams,i)) totalParams += 1 return 'max(0,0.1*(%s)*(%s))'%('+'.join(xparts),'+'.join(yparts)),totalParams template = OpenJSON('template.json') parameterDict = { "NOMINAL": 0.1, "MIN":-10.0, "MAX":10.0, "ERROR":0.05 } parameterDict0 = DictCopy(parameterDict) parameterDict0['MIN'] = -10.0 shortNames = { 'deepTag':'DAK8', 'particleNet':'PN' } for tagger in ['deepTag','particleNet']:
default='', help='"Keep and drop" file for NanoAOD-tools') parser.add_argument('--payload', metavar='IN', dest='payload', default='', help='Payload which bypasses other command line options') parser.add_argument('--dry-run', action='store_true', dest='dryrun', default=False, help='Dry-run will not save to database') args = parser.parse_args() if args.payload != '': payload = OpenJSON(args.payload) for o in payload.keys(): if hasattr(args, o): setattr(args, o, payload[o]) run_data = { "timestamp": GetTimeStamp(), "conditions": args.tag, "process_time": None, "process_maxmem": None, "rootfile": None } start_time = time.time() if args.framework == "TIMBER":