def trigger_selection(selection, df, cfg): pass_all = np.zeros(df.size) == 0 pass_none = ~pass_all dataset = df['dataset'] if cfg.RUN.SYNC: # Synchronization mode selection.add('filt_met', pass_all) selection.add('trig_met', pass_all) selection.add('trig_ele', pass_all) selection.add('trig_mu', pass_all) selection.add('trig_photon', pass_all) else: if df['is_data']: selection.add('filt_met', mask_and(df, cfg.FILTERS.DATA)) else: selection.add('filt_met', mask_and(df, cfg.FILTERS.MC)) selection.add('trig_met', mask_or(df, cfg.TRIGGERS.MET)) # Electron trigger overlap if df['is_data']: if "SinglePhoton" in dataset: # Backup photon trigger, but not main electron trigger trig_ele = mask_or(df, cfg.TRIGGERS.ELECTRON.SINGLE_BACKUP) & ( ~mask_or(df, cfg.TRIGGERS.ELECTRON.SINGLE)) elif "SingleElectron" in dataset: # Main electron trigger, no check for backup trig_ele = mask_or(df, cfg.TRIGGERS.ELECTRON.SINGLE) elif "EGamma" in dataset: # 2018 has everything in one stream, so simple OR trig_ele = mask_or( df, cfg.TRIGGERS.ELECTRON.SINGLE_BACKUP) | mask_or( df, cfg.TRIGGERS.ELECTRON.SINGLE) else: trig_ele = pass_none else: trig_ele = mask_or(df, cfg.TRIGGERS.ELECTRON.SINGLE_BACKUP) | mask_or( df, cfg.TRIGGERS.ELECTRON.SINGLE) selection.add('trig_ele', trig_ele) # Photon trigger: if (not df['is_data']) or ('SinglePhoton' in dataset) or ('EGamma' in dataset): trig_photon = mask_or(df, cfg.TRIGGERS.PHOTON.SINGLE) else: trig_photon = pass_none selection.add('trig_photon', trig_photon) for trgname in cfg.TRIGGERS.HT.GAMMAEFF: if (not df['is_data']) or ('JetHT' in dataset): selection.add(trgname, mask_or(df, [trgname])) else: selection.add(trgname, np.ones(df.size) == 1) # Muon trigger selection.add('trig_mu', mask_or(df, cfg.TRIGGERS.MUON.SINGLE)) return selection
def process(self, df): self._configure(df) output = self.accumulator.identity() dataset = df['dataset'] # Lumi mask year = extract_year(dataset) if is_data(dataset): if year == 2016: json = bucoffea_path( 'data/json/Cert_271036-284044_13TeV_ReReco_07Aug2017_Collisions16_JSON.txt' ) elif year == 2017: json = bucoffea_path( 'data/json/Cert_294927-306462_13TeV_EOY2017ReReco_Collisions17_JSON_v1.txt' ) elif year == 2018: json = bucoffea_path( 'data/json/Cert_314472-325175_13TeV_17SeptEarlyReReco2018ABC_PromptEraD_Collisions18_JSON.txt' ) lumi_mask = LumiMask(json)(df['run'], df['luminosityBlock']) else: lumi_mask = np.ones(df.size) == 1 # MET filters if is_data(dataset): filt_met = mask_and(df, cfg.FILTERS.DATA) else: filt_met = mask_and(df, cfg.FILTERS.MC) if year == 2016: trigger = 'HLT_Photon175' else: trigger = 'HLT_Photon200' photons = setup_photons(df) ak4 = setup_jets(df) ak4 = ak4[ object_overlap(ak4, photons) \ & ak4.tightId \ & (ak4.pt > 100) \ & (ak4.abseta < 2.4) ] event_mask = filt_met \ & lumi_mask \ & (ak4.counts > 0) \ & df[trigger] \ & (df['MET_pt'] < 60) # Generator weight weights = processor.Weights(size=df.size, storeIndividual=True) if is_data(dataset): weights.add('gen', np.ones(df.size)) else: weights.add('gen', df['Generator_weight']) photon_kinematics = (photons.pt > 200) & (photons.barrel) # Medium vals = photons[photon_kinematics & photons.mediumId].sieie[event_mask] pt = photons[photon_kinematics & photons.mediumId].pt[event_mask] output['sieie'].fill(dataset=dataset, cat='medium', sieie=vals.flatten(), pt=pt.flatten(), weights=weight_shape( vals, weights.weight()[event_mask])) # No Sieie vals = photons[photon_kinematics & medium_id_no_sieie(photons)].sieie[event_mask] pt = photons[photon_kinematics & medium_id_no_sieie(photons)].pt[event_mask] output['sieie'].fill(dataset=dataset, cat='medium_nosieie', sieie=vals.flatten(), pt=pt.flatten(), weights=weight_shape( vals, weights.weight()[event_mask])) # No Sieie, inverted isolation vals = photons[photon_kinematics & medium_id_no_sieie_inv_iso(photons)].sieie[event_mask] pt = photons[photon_kinematics & medium_id_no_sieie_inv_iso(photons)].pt[event_mask] output['sieie'].fill(dataset=dataset, cat='medium_nosieie_invertiso', sieie=vals.flatten(), pt=pt.flatten(), weights=weight_shape( vals, weights.weight()[event_mask])) # Keep track of weight sum if not is_data(dataset): output['sumw'][dataset] += df['genEventSumw'] output['sumw2'][dataset] += df['genEventSumw2'] return output