def remove_jets(parts, jets): psjv = fj.vectorPJ() reject_indexes = [ p.user_index() for j in jets for p in j.constituents() if p.user_index() >= 0 ] # print ('indexes leading jets:', len(reject_indexes), sorted(reject_indexes)) _tmp = [ psjv.push_back(p) for p in parts if p.user_index() not in reject_indexes ] return psjv
def __init__(self, **kwargs): self.configure_from_args(mean_pt=0.7, multiplicity=1, max_eta=1, max_pt=100, min_pt=0.15) super(BoltzmannEvent, self).__init__(**kwargs) if self.min_pt < 0: self.min_pt = 0 self.particles = fj.vectorPJ() self.funbg = ROOT.TF1("funbg", "2. / [0] * x * TMath::Exp(-(2. / [0]) * x)", self.min_pt, self.max_pt, 1); self.funbg.SetParameter(0, self.mean_pt) self.funbg.SetNpx(1000) self.ROOT_random = ROOT.TRandom() self.histogram_pt = ROOT.TH1F("BoltzmannEvent_pt", "BoltzmannEvent_pt;p_{T} (GeV/c)", 100, logbins(1e-1, self.max_pt, 100)) self.histogram_pt.SetDirectory(0) self.histogram_eta = ROOT.TH1F("BoltzmannEvent_eta", "BoltzmannEvent_eta;#eta", 100, -self.max_eta, self.max_eta) self.histogram_eta.SetDirectory(0) self.histogram_phi = ROOT.TH1F("BoltzmannEvent_phi", "BoltzmannEvent_phi;#varphi (rad)", 100, -ROOT.TMath.Pi(), ROOT.TMath.Pi()) self.histogram_phi.SetDirectory(0) self.nEvent = 0
def load_event(self, offset=0): self.particles = None if self.file_io is None: self.open_file() if self.file_io is None: print('[e] unable to load the data file') return None if self.current_event_in_file >= self.current_file_number_of_events(): self.current_event_in_file = 0 self.file_io = None return self.load_event(offset=offset) self.event = self.file_io.df_events[self.current_event_in_file] # print ('reset indexes') # _tmp = [p.set_user_index(0) for ip,p in enumerate(event.particles)] # print('loaded event:', self.current_event_in_file) self.current_event_in_file = self.current_event_in_file + 1 self.particles = fj.vectorPJ() for ip, p in enumerate(self.event.particles): p.set_user_index(offset + ip) self.particles.push_back(p) return self.particles
def example(): tw = RTreeWriter() print(tw) tw.fill_branch('b', 10) tw.fill_branch('b', 12.) tw.fill_branch('bl', [1, 2, 3], do_enumerate=True) tw.fill_branch('bt', (10, 20, 30.)) psj = fj.PseudoJet() tw.fill_branch('jet', psj) tw.fill_branch('jet', psj) v = fj.vectorPJ() _v = fj.PseudoJet(1, 2, 3, 4) v.push_back(_v) v.push_back(_v) v.push_back(_v) tw.fill_branch('jets', v) tw.fill_branch('bd', {'x': 10, 'y': 20, 'z': 30.}) tw.fill_tree() tw.write_and_close()
def load_event_with_loc(self, run_number=-1, ev_id=-1, offset=0): self.particles = None if self.file_io is None: print('[e] unable to load the data because no file io is set') return None _events_match = [ e for e in self.file_io.df_events if e.ev_id == ev_id and e.run_number == run_number ] if len(_events_match) == 1: self.event = _events_match[0] else: print('[w] requested ev_id:', ev_id, "run_number:", run_number, 'number of matches', len(_events_match)) return None # print ('reset indexes') # _tmp = [p.set_user_index(0) for ip,p in enumerate(event.particles)] # print('loaded event:', self.current_event_in_file) # self.current_event_in_file = self.current_event_in_file + 1 self.particles = fj.vectorPJ() for ip, p in enumerate(self.event.particles): p.set_user_index(offset + ip) self.particles.push_back(p) return self.particles
def jets_as_psj_vector(self): self.psj_jet_vector = fj.vectorPJ() # _tmp = [self.psj_jet_vector.push_back(j) for j in self.jets if not j.is_pure_ghost()] _tmp = [self.psj_jet_vector.push_back(j) for j in self.jets] return self.psj_jet_vector
def main(): parser = argparse.ArgumentParser(description='pythia8 fastjet on the fly', prog=os.path.basename(__file__)) pyconf.add_standard_pythia_args(parser) parser.add_argument('--ignore-mycfg', help="ignore some settings hardcoded here", default=False, action='store_true') parser.add_argument('--output', default="output.root", type=str) parser.add_argument('--alpha', default=0, type=float) parser.add_argument('--dRmax', default=0.0, type=float) parser.add_argument('--zcut', default=0.1, type=float) parser.add_argument('--overwrite', help="overwrite output", default=False, action='store_true') parser.add_argument('--embed', help='run embedding from a file list', default='', type=str) parser.add_argument('--SDsignal', help='embed only SD signal prongs', default=False, action='store_true') parser.add_argument('--SDsignal-single', help='embed only SD signal - only leading prong!', default=False, action='store_true') parser.add_argument('--efficiency', help='apply charged particle efficiency', default=False, action='store_true') parser.add_argument('--benchmark', help='benchmark pthat setting - 80 GeV', default=False, action='store_true') parser.add_argument('--csjet', help='constituent subtration jet-by-jet', default=False, action='store_true') args = parser.parse_args() if args.output == 'output.root': args.output = 'output_alpha_{}_dRmax_{}_SDzcut_{}.root'.format( args.alpha, args.dRmax, args.zcut) if args.py_seed >= 0: args.output = 'output_alpha_{}_dRmax_{}_SDzcut_{}_seed_{}.root'.format( args.alpha, args.dRmax, args.zcut, args.py_seed) if args.embed: args.output = args.output.replace('.root', '_emb.root') if args.efficiency: args.output = args.output.replace('.root', '_effi.root') if args.SDsignal: args.output = args.output.replace('.root', '_SDsignal.root') if args.SDsignal_single: args.output = args.output.replace('.root', '_SDsignal_single.root') if args.csjet: args.output = args.output.replace('.root', '_csjet.root') if os.path.isfile(args.output): if not args.overwrite: print('[i] output', args.output, 'exists - use --overwrite to do just that...') return print(args) # alice specific max_eta = 0.9 # print the banner first fj.ClusterSequence.print_banner() print() # set up our jet definition and a jet selector jet_R0 = 0.4 jet_def = fj.JetDefinition(fj.antikt_algorithm, jet_R0) print(jet_def) mycfg = [] if args.benchmark: mycfg = ['PhaseSpace:pThatMin = 80', 'PhaseSpace:pThatMax = -1'] jet_selector = fj.SelectorPtMin(80.0) & fj.SelectorPtMax( 100.0) & fj.SelectorAbsEtaMax(max_eta - 1.05 * jet_R0) # jet_selector_cs = fj.SelectorPtMin(50.0) & fj.SelectorAbsEtaMax(max_eta - 1.05 * jet_R0) else: args.py_biaspow = 4 args.py_biasref = 10 jet_selector = fj.SelectorPtMin(20) & fj.SelectorAbsEtaMax( max_eta - 1.05 * jet_R0) # jet_selector_cs = fj.SelectorPtMin(50.0) & fj.SelectorAbsEtaMax(max_eta - 1.05 * jet_R0) if args.ignore_mycfg: mycfg = [] pythia = pyconf.create_and_init_pythia_from_args(args, mycfg) if not pythia: print("[e] pythia initialization failed.") return sd_zcut = args.zcut sd = fjcontrib.SoftDrop(0, sd_zcut, jet_R0) jarho = JetAnalysisWithRho(jet_R=jet_R0, jet_algorithm=fj.antikt_algorithm, particle_eta_max=max_eta) ja = JetAnalysis(jet_R=jet_R0, jet_algorithm=fj.antikt_algorithm, particle_eta_max=max_eta) be = None embd = None if len(args.embed) > 0: embd = DataBackgroundIO(file_list=args.embed) print(embd) else: be = BoltzmannEvent(mean_pt=0.6, multiplicity=2000 * max_eta * 2, max_eta=max_eta, max_pt=100) print(be) cs = None if args.dRmax > 0: cs = CEventSubtractor(alpha=args.alpha, max_distance=args.dRmax, max_eta=max_eta, bge_rho_grid_size=0.25, max_pt_correct=100) print(cs) csjet = None if args.csjet: csjet = CSubtractorJetByJet(max_eta=max_eta, bge_rho_grid_size=0.25) print(csjet) parts_selector = fj.SelectorAbsEtaMax(max_eta) if args.nev < 1: args.nev = 1 outf = ROOT.TFile(args.output, 'recreate') outf.cd() t = ROOT.TTree('t', 't') tw = RTreeWriter(tree=t) te = ROOT.TTree('te', 'te') twe = RTreeWriter(tree=te) # effi_pp = AliceChargedParticleEfficiency(csystem='pp') effi_PbPb = None if args.efficiency: effi_PbPb = AliceChargedParticleEfficiency(csystem='PbPb') print(effi_PbPb) ### EVENT LOOP STARTS HERE for iev in tqdm.tqdm(range(args.nev)): if not pythia.next(): continue parts_pythia = pythiafjext.vectorize_select( pythia, [pythiafjext.kFinal, pythiafjext.kCharged]) parts_gen = parts_selector(parts_pythia) if effi_PbPb: parts = effi_PbPb.apply_efficiency(parts_gen) else: parts = parts_gen signal_jets = fj.sorted_by_pt(jet_selector(jet_def(parts))) if len(signal_jets) < 1: continue for sjet in signal_jets: if args.SDsignal or args.SDsignal_single: sd_sjet = sd.result(sjet) pe1 = fj.PseudoJet() pe2 = fj.PseudoJet() has_parents = sd_sjet.has_parents(pe1, pe2) if has_parents: jparts = fj.vectorPJ() pe1.set_user_index(0) pe2.set_user_index(1) if args.SDsignal_single: if pe1.pt() > pe2.pt(): jparts.push_back(pe1) else: jparts.push_back(pe2) else: jparts.push_back(pe1) jparts.push_back(pe2) sjets = fj.sorted_by_pt(jet_selector(jet_def(jparts))) if len(sjets) == 1: sjet = sjets[0] else: continue else: continue if embd: bg_parts = embd.load_event(offset=10000) # for p in bg_parts: # print(p.user_index()) else: bg_parts = be.generate(offset=10000) # for p in bg_parts: # print(p.user_index()) full_event = bg_parts tmp = [full_event.push_back(psj) for psj in sjet.constituents()] if cs: cs_parts = cs.process_event(full_event) rho = cs.bge_rho.rho() jarho.analyze_event(cs_parts) tmp = [ fill_tree_data(ej, twe, sd, rho, iev, pythia.info.weight(), pythia.info.sigmaGen()) for ej in jarho.jets ] tmp = [ fill_tree_matched(sjet, ej, tw, sd, rho, iev, pythia.info.weight(), pythia.info.sigmaGen()) for ej in jarho.jets ] else: jarho.analyze_event(full_event) rho = jarho.rho if csjet: #_csjet = fjcontrib.ConstituentSubtractor(jarho.bg_estimator) # subtr_jets = [_csjet.result(ej) for ej in jarho.jets] csjet.set_event_particles(full_event) #subtr_jets = [csjet.process_jet(ej) for ej in jarho.jets] #print ('jbyj cs', len(subtr_jets), 'from', len(jarho.jets)) #subtr_jets_wconstits = [_j for _j in subtr_jets if _j.has_constituents()] #for _j in subtr_jets_wconstits: # print(len(_j.constituents())) subtr_jets_wconstits = csjet.process_jets(jarho.jets) japerjet = JetAnalysisPerJet( jet_R=jet_R0, jet_algorithm=fj.antikt_algorithm, particle_eta_max=max_eta, input_jets=subtr_jets_wconstits) # for _j in japerjet.jets: # for _c in _j.constituents(): # if _c.user_index() >= 0: # print('user index kept?', _c.user_index()) # # else: # # print('user index kept?', _c.user_index(), _c.pt()) # _sd_j = sd.result(_j) # https://phab.hepforge.org/source/fastjetsvn/browse/contrib/contribs/RecursiveTools/trunk/Recluster.cc L 270 # tmp = [fill_tree_matched(sjet, ej, tw, sd, rho, iev, pythia.info.sigmaGen()) for ej in subtr_jets_wcs] tmp = [ fill_tree_data(ej, twe, sd, rho, iev, pythia.info.weight(), pythia.info.sigmaGen()) for ej in japerjet.jets ] tmp = [ fill_tree_matched(sjet, ej, tw, sd, rho, iev, pythia.info.weight(), pythia.info.sigmaGen()) for ej in japerjet.jets ] else: tmp = [ fill_tree_data(ej, twe, sd, rho, iev, pythia.info.weight(), pythia.info.sigmaGen()) for ej in jarho.jets ] tmp = [ fill_tree_matched(sjet, ej, tw, sd, rho, iev, pythia.info.weight(), pythia.info.sigmaGen()) for ej in jarho.jets ] pythia.stat() outf.Write() outf.Close() print('[i] written', outf.GetName())
class RTreeWriter(MPBase): _fj_psj_type = type(fj.PseudoJet()) _fj_psj_vector_type = type(fj.vectorPJ()) _fj_LundDeclustering_type = get_LundDeclusteringType() # _fj_sdinfo = type(fjcontrib.SDinfo()) def __init__(self, **kwargs): self.configure_from_args(tree=None, tree_name=None, name="RTreeWriter", file_name="RTreeWriter.root", fout=None) super(RTreeWriter, self).__init__(**kwargs) self._warnings = [] if self.tree is None: if self.fout is None: print('[i] new file {}'.format(self.file_name)) self.fout = ROOT.TFile(self.file_name, 'recreate') self.fout.cd() else: self.name = self.fout.GetName() self.file_name = self.name self.fout.cd() if self.tree_name is None: self.tree_name = 't' + self.name self.tree = ROOT.TTree(self.tree_name, self.tree_name) self.branch_containers = {} def add_warning(self, s): if s not in self._warnings: self._warnings.append(s) def _fill_branch(self, bname, value): b = self.tree.GetBranch(bname) if not b: print('[i] RTreeWriter {} tree {}: creating branch [{}]'.format( self.name, self.tree.GetName(), bname)) self.branch_containers[bname] = ROOT.std.vector('float')() b = self.tree.Branch(bname, self.branch_containers[bname]) if b: # print('filling branch:', bname, 'at', b) self.branch_containers[bname].push_back(value) def fill_branches_attribs(self, o, attr_list=[], prefix=''): if len(attr_list) == 0: attr_list = o.__dict__ for a in attr_list: self.fill_branch(prefix + a, getattr(o, a)) def fill_branches(self, **kwargs): for a in kwargs: self.fill_branch(bname=a, value=kwargs[a]) def fill_branch(self, bname, value, do_enumerate=False): # print("FILL:", self.tree_name, bname, value) if float == type(value) or int == type(value): self._fill_branch(bname, value) return if type(value) in [tuple, list, self._fj_psj_vector_type]: if do_enumerate: r = [ self.fill_branch('{}_{}'.format(bname, i), x) for i, x in enumerate(value) ] else: r = [self.fill_branch(bname, x) for x in value] return if dict == type(value): r = [ self.fill_branch('{}_{}'.format(bname, i), x) for i, x in value.items() ] return if self._fj_psj_type == type(value): if value.has_area(): self.fill_branch( bname, { 'pt': value.pt(), 'phi': value.phi(), 'eta': value.eta(), 'a': value.area() }) else: self.fill_branch(bname, { 'pt': value.pt(), 'phi': value.phi(), 'eta': value.eta() }) return if self._fj_LundDeclustering_type == type(value): self.fill_branch( bname, { 'm': value.m(), 'z': value.z(), 'Delta': value.Delta(), 'kt': value.kt(), 'kappa': value.kappa(), 'psi': value.psi(), 'p': value.pair(), 's1': value.harder(), 's2': value.softer(), 'tf': value.z() * value.Delta() * value.Delta() }) return if bool == type(value): self._fill_branch(bname, value) return try: _val = float(value) self._fill_branch(bname, _val) self.add_warning('converted {} to float for branch {}'.format( type(value), bname)) return except: pass self.add_warning( 'do not know how to fill tree {} branch {} for type {} - ignored'. format(self.tree_name, bname, type(value))) def clear(self): for k in self.branch_containers: self.branch_containers[k].clear() def fill_tree(self): self.tree.Fill() self.clear() def write_and_close(self): print('[i] writing {}'.format(self.fout.GetName())) self.fout.Write() self.fout.Purge() self.fout.Close() def __del__(self): for w in self._warnings: pwarning(self.tree_name, ':', w)
def main(): parser = argparse.ArgumentParser(description='pythia8 fastjet on the fly', prog=os.path.basename(__file__)) pyconf.add_standard_pythia_args(parser) parser.add_argument('--embed', help='run embedding from a file list', default='', type=str) args = parser.parse_args() if args.nev < 1: args.nev = 1 mycfg = [] pythia = pyconf.create_and_init_pythia_from_args(args, mycfg) part_selection = [pythiafjext.kFinal, pythiafjext.kCharged] max_eta = 1. be = None embd = None if len(args.embed) > 0: embd = DataBackgroundIO(file_list=args.embed) print(embd) else: be = BoltzmannEvent(mean_pt=0.6, multiplicity=2000 * max_eta * 2, max_eta=max_eta, max_pt=100) print(be) # print the banner first fj.ClusterSequence.print_banner() print() # set up our jet definition and a jet selector jet_R0 = 0.6 jet_def = fj.JetDefinition(fj.antikt_algorithm, jet_R0) jet_def_emb = fj.JetDefinition(fj.antikt_algorithm, jet_R0) jet_selector = fj.SelectorPtMin(10.0) & fj.SelectorAbsEtaMax(1) fout = ROOT.TFile('pythia_8jet.root', 'recreate') fout.cd() hfraction = ROOT.TProfile('hfraction', 'hfraction', 10, 0, 100) hdpt = ROOT.TProfile('hdpt', 'hdpt', 10, 0, 100) hfraction2D = ROOT.TH2F('hfraction2D', 'hfraction', 10, 0, 100, 20, 0, 1) hdpt2D = ROOT.TH2F('hdpt2D', 'hdpt', 10, 0, 100, 20, -1, 0) hfraction_emb = ROOT.TProfile('hfraction_emb', 'hfraction', 10, 0, 100) hdpt_emb = ROOT.TProfile('hdpt_emb', 'hdpt', 10, 0, 100) hfraction2D_emb = ROOT.TH2F('hfraction2D_emb', 'hfraction', 10, 0, 100, 20, 0, 1) hdpt2D_emb = ROOT.TH2F('hdpt2D_emb', 'hdpt', 10, 0, 100, 20, -1, 0) for iev in tqdm.tqdm(range(args.nev)): if not pythia.next(): continue parts = [] parts = pythiafjext.vectorize_select(pythia, part_selection, 0, False) jets = fj.sorted_by_pt(jet_selector(jet_def(parts))) if embd: bg_parts = embd.load_event(offset=10000) else: bg_parts = be.generate(offset=10000) for j in jets: _sum_all, _sum_top_n, _fraction_pt = calc_n_lead(j, 8) hfraction.Fill(j.pt(), _fraction_pt) hdpt.Fill(j.pt(), (_sum_top_n - _sum_all) / _sum_all) hfraction2D.Fill(j.pt(), _fraction_pt) hdpt2D.Fill(j.pt(), (_sum_top_n - _sum_all) / _sum_all) full_event = fj.vectorPJ() tmp = [full_event.push_back(psj) for psj in bg_parts] tmp = [full_event.push_back(psj) for psj in j.constituents()] embd_jets = fj.sorted_by_pt(jet_selector(jet_def_emb(full_event))) for jemb in embd_jets: mpt = fjtools.matched_pt(jemb, j) if mpt < 0.5: continue _sum_all_emb, _sum_top_n_emb, _fraction_pt_emb = calc_n_lead( jemb, 8) _fraction_pt_emb = _sum_top_n_emb / _sum_all hfraction_emb.Fill(jemb.pt(), _fraction_pt) hdpt_emb.Fill(jemb.pt(), (_sum_top_n_emb - _sum_all) / _sum_all) hfraction2D_emb.Fill(jemb.pt(), _fraction_pt) hdpt2D_emb.Fill(jemb.pt(), (_sum_top_n_emb - _sum_all) / _sum_all) fg = ROOT.TF1('fg', 'gaus', 0, 1) fg.SetParameter(0, 1) fg.SetParameter(1, 0.8) fg.SetParameter(2, 0.1) hfraction2D.FitSlicesY(fg) hfraction2D_emb.FitSlicesY(fg) fgdpt = ROOT.TF1('fgdpt', 'gaus', -1, 0) fgdpt.SetParameter(0, 1) fgdpt.SetParameter(1, -0.2) fgdpt.SetParameter(2, 0.1) hdpt2D.FitSlicesY(fgdpt) hdpt2D_emb.FitSlicesY(fgdpt) fout.Write() fout.Close()
def main(): parser = argparse.ArgumentParser(description='test groomers', prog=os.path.basename(__file__)) parser.add_argument('-o', '--output-filename', default="output.root", type=str) parser.add_argument('datalistpp', help='run through a file list', default='', type=str) parser.add_argument('--datalistAA', help='run through a file list - embedding mode', default='', type=str) parser.add_argument('--jetR', default=0.4, type=float) parser.add_argument('--alpha', default=0, type=float) parser.add_argument('--dRmax', default=0.25, type=float) parser.add_argument('--overwrite', help="overwrite output", default=False, action='store_true') parser.add_argument('--jetptcut', help='remove jets below the cut', default=50., type=float) parser.add_argument('--nev', help='number of events to run', default=0, type=int) parser.add_argument('--max-eta', help='max eta for particles', default=0.9, type=float) parser.add_argument('--npart-cut', help='npart cut on centrality low,high hint:' + npart_cents, default='325,450', type=str) args = parser.parse_args() try: npart_min = int(args.npart_cut.split(',')[0]) npart_max = int(args.npart_cut.split(',')[1]) except: perror( 'unable to parse npart centrality selection - two integer numbers with a coma in-between needed - specified:', args.npart_cut) return 1 # initialize constituent subtractor cs = None if args.dRmax > 0: cs = CEventSubtractor(alpha=args.alpha, max_distance=args.dRmax, max_eta=args.max_eta, bge_rho_grid_size=0.25, max_pt_correct=100) pp_data = DataIO(name='Sim Pythia Detector level', file_list=args.datalistpp, random_file_order=False, tree_name='tree_Particle_gen') ja_pp = JetAnalysis(jet_R=args.jetR, jet_algorithm=fj.antikt_algorithm, jet_pt_min=50., particle_eta_max=args.max_eta) if args.datalistAA: aa_data = DataBackgroundIO(name='PbPb', file_list=args.datalistAA, tree_name='tree_Particle_gen') ja_emb = JetAnalysis(jet_R=args.jetR, jet_algorithm=fj.antikt_algorithm, jet_pt_min=50., particle_eta_max=args.max_eta) ja_aa = JetAnalysis(jet_R=args.jetR, jet_algorithm=fj.antikt_algorithm, jet_pt_min=50., particle_eta_max=args.max_eta) dndeta_selector = fj.SelectorAbsEtaMax(1.) # tg = thg.ThermalGenerator() print(cs) # print the banner first fj.ClusterSequence.print_banner() print() gout = GroomerOutput(args.output_filename, enable_aa_trees=bool(args.datalistAA)) delta_t = 0 start_t = time.time() iev = 1 while pp_data.load_event(offset=0): iev = iev + 1 if args.nev > 0: if iev > args.nev: iev = iev - 1 break if iev % 1000 == 0: delta_t = time.time() - start_t pinfo('processing event', iev, ' - ev/sec =', iev / delta_t, 'elapsed =', delta_t) # find jets on detector level if len(pp_data.particles) < 1: pwarning(iev, 'pp event skipped N parts', len(pp_data.particles)) continue ja_pp.analyze_event(pp_data.particles) if len(ja_pp.jets) < 1: continue # pinfo('n particles', len(pp_data.particles)) dndeta0 = dndeta_selector(pp_data.particles) [ gout.fill_branches(j, syst=0, dndeta=len(dndeta0) / 2.) for j in ja_pp.jets ] # pinfo('n jets', len(ja_pp.jets)) if args.datalistAA: while True: aa_loaded = aa_data.load_event(offset=10000) if aa_data.event.npart < npart_min or aa_data.event.npart >= npart_max: continue else: if len(aa_data.particles) < 1: pwarning(iev, 'AA event skipped N parts', len(aa_data.particles)) continue else: break if aa_loaded: ja_aa.analyze_event(aa_data.particles) dndeta1 = dndeta_selector(aa_data.particles) if len(ja_aa.jets) > 0: [ gout.fill_branches(j, syst=1, dndeta=len(dndeta1) / 2.) for j in ja_aa.jets ] else: # pwarning('no jets in AA event?', len(ja_aa.jets), 'while dndeta=', len(dndeta1)/2.) pass emb_event = fj.vectorPJ() [emb_event.push_back(p) for p in pp_data.particles] [emb_event.push_back(p) for p in aa_data.particles] rho = 0 if cs: cs_parts = cs.process_event(emb_event) rho = cs.bge_rho.rho() ja_emb.analyze_event(cs_parts) else: ja_emb.analyze_event(emb_event) # matches = [[jpp, jemb] for jpp in ja_pp.jets for jemb in ja_emb.jets if fjtools.matched_pt(jemb, jpp) > 0.5] # for mj in matches: # gout.fill_branches(mj[0], syst=2, dndeta=len(dndeta1)/2., rho=rho) # gout.fill_branches(mj[1], syst=3) [ gout.fill_branches_prong_matching(j_pp, j_emb, dndeta=len(dndeta1) / 2., rho=rho) for j_pp in ja_pp.jets for j_emb in ja_emb.jets ] delta_t = time.time() - start_t pinfo('processed events', iev, ' - ev/sec =', iev / delta_t, 'elapsed =', delta_t) gout.write()