class puProfile: def __init__( self, source_sample, cacheDir=os.path.join(cache_directory,"puProfiles") ): if not os.path.isdir( cacheDir ): os.makedirs( cacheDir ) self.source_sample = source_sample self.cacheDir = cacheDir self.initCache( cacheDir ) self.binning = [ 100, 0, 100 ] self.draw_string = "Pileup_nTrueInt" def initCache(self, cacheDir): self.cache = DirDB( os.path.join( cacheDir, 'puProfilesDirDBCache' )) def uniqueKey( self, *arg ): '''No dressing required''' return arg def cachedTemplate( self, selection, weight = '(1)', save = True, overwrite = False): key = (selection, weight, self.source_sample.name) if (self.cache and self.cache.contains(key)) and not overwrite: result = self.cache.get(key) logger.info( "Loaded MC PU profile from %s"%(self.cacheDir) ) logger.debug( "Key used: %s result: %r"%(key, result) ) elif self.cache: logger.info( "Obtain PU profile for %s"%( key, ) ) result = self.makeTemplate( selection = selection, weight = weight) if result: result = self.cache.add( key, result, overwrite=save ) logger.info( "Adding PU profile to cache for %s : %r" %( key, result) ) else: logger.warning( "Couldn't create PU profile to cache for %s : %r" %( key, result) ) else: result = self.makeTemplate( selection = selection, weight = weight) return result def makeTemplate( self, selection, weight='(1)' ): logger.info( "Make PU profile for sample %s and selection %s and weight %s", self.source_sample.name, selection, weight ) h_source = self.source_sample.get1DHistoFromDraw(self.draw_string, self.binning, selectionString = selection, weightString = weight ) logger.info( "PU histogram contains %s weighted events", h_source.Integral() ) h_source.Scale( 1./h_source.Integral() ) return h_source
max_events = 30000 # Text on the plots tex = ROOT.TLatex() tex.SetNDC() tex.SetTextSize(0.04) tex.SetTextAlign(11) # align right # fire up the cache cache_dir_ = os.path.join(cache_dir, 'fake_cache') dirDB = DirDB(cache_dir_) pu_key = (triggerSelection, leptonSelection, jetSelection, args.era, args.small, "pu") if dirDB.contains(pu_key) and not args.overwrite: reweight_histo = dirDB.get(pu_key) logger.info("Found PU reweight in cache %s", cache_dir_) else: logger.info("Didn't find PU reweight histo %r. Obtaining it now.", pu_key) data_nvtx_histo = data_sample.get1DHistoFromDraw( "PV_npvsGood", [100 / 5, 0, 100], selectionString=data_preselectionString, weightString="weight") data_nvtx_histo.Scale(1. / data_nvtx_histo.Integral()) mc_histos = [ s.get1DHistoFromDraw("PV_npvsGood", [100 / 5, 0, 100], selectionString=mc_preselectionString, weightString="weight*reweightBTag_SF") for s in mc ]
#sample.scale /= sample.normalization max_events = 30000 # Text on the plots tex = ROOT.TLatex() tex.SetNDC() tex.SetTextSize(0.04) tex.SetTextAlign(11) # align right # fire up the cache cache_dir_ = os.path.join(cache_dir, 'fake_pu_cache') dirDB = DirDB(cache_dir_) pu_key = ( triggerSelection, leptonSelection, jetSelection, args.era, args.small) if dirDB.contains( pu_key ) and not args.overwrite: reweight_histo = dirDB.get( pu_key ) logger.info( "Found PU reweight in cache %s", cache_dir_ ) else: logger.info( "Didn't find PU reweight histo %r. Obtaining it now.", pu_key) data_selectionString = "&&".join([getFilterCut(isData=True, year=year), triggerSelection, leptonSelection, jetSelection]) data_nvtx_histo = data_sample.get1DHistoFromDraw( "PV_npvsGood", [100, 0, 100], selectionString=data_selectionString, weightString = "weight" ) data_nvtx_histo.Scale(1./data_nvtx_histo.Integral()) mc_selectionString = "&&".join([getFilterCut(isData=False, year=year), triggerSelection, leptonSelection, jetSelection]) mc_histos = [ s.get1DHistoFromDraw( "PV_npvsGood", [100, 0, 100], selectionString=mc_selectionString, weightString = "weight*reweightBTag_SF") for s in mc] mc_histo = mc_histos[0] for h in mc_histos[1:]: mc_histo.Add( h ) mc_histo.Scale(1./mc_histo.Integral())
name = "met_mc", texX = 'p_{T}^{miss} (GeV)', texY = 'Number of Events / 20 GeV' if args.normalizeBinWidth else "Number of Events", binning = [400/20,0,400], stack = stack_mc, # attribute = TreeVariable.fromString('met_pt/F'), attribute = TreeVariable.fromString( "met_pt_%s/F" % args.variation ) if args.variation in jet_systematics else TreeVariable.fromString('met_pt/F'), selectionString = selectionModifier(cutInterpreter.cutString(args.selection)) if selectionModifier is not None else None, weight = mc_weight ) plots.append( met_mc ) ############################################################################ # Check DB for existing plots if args.variation is not None: key = (args.era, mode, args.variation) if dirDB.contains(key) and not args.overwrite: normalisation_mc, normalisation_data, histos = dirDB.get( key ) for i_p, h_s in enumerate(histos): plots[i_p].histos = h_s logger.info( "Loaded normalisations and histograms for %s in mode %s from cache.", args.era, mode) else: logger.info( "Obtain normalisations and histograms for %s in mode %s.", args.era, mode) normalization_selection_string = selectionModifier(cutInterpreter.cutString(args.selection)) mc_normalization_weight_string = MC_WEIGHT(variations[args.variation], returntype='string') normalisation_mc = {s.name :s.scale*s.getYieldFromDraw(selectionString = normalization_selection_string, weightString = mc_normalization_weight_string)['val'] for s in mc} for s in mc: print normalisation_mc[s.name] if args.variation == 'central': normalisation_data = data_sample.scale*data_sample.getYieldFromDraw( selectionString = normalization_selection_string, weightString = data_weight_string)['val']