def load_file(self, path): if not os.path.exists(path): pwarning('[w] file', path, 'does not exists.') return self.event_df_orig = self.pd_tree(path, self.event_tree_name) #select only run number, event id, zvtx and event rejected info self.event_df = self.event_df_orig[[ 'run_number', 'ev_id', 'ev_id_ext', 'z_vtx_reco', 'is_ev_rej' ]].copy() if self.event_df_orig is None: return False self.event_df.reset_index(drop=True) self.d0_df = self.pd_tree(path, self.d0_tree_name) if self.d0_df is None: return False self.track_df = self.pd_tree(path, self.track_tree_name) if self.track_df is None: return False self.d0_gen_df = self.pd_tree(path, self.d0_gen_tree_name) if self.d0_gen_df is None: return False self.track_gen_df = self.pd_tree(path, self.track_gen_tree_name) if self.track_gen_df is None: return False return True
def pd_tree(self, path, tname, squery=None): try: tree = uproot.open(path)[tname] except: pwarning('error getting', tname, 'from file:', path) return None if not tree: perror('Tree {} not found in file {}'.format(tname, path)) return None df = tree.pandas.df() if squery: df = df.query(squery) df.reset_index(drop=True) return df
def pd_tree(self, path, tname, squery=None): try: tree = uproot.open(path)[tname] except: pwarning('error getting', tname, 'from file:', path) return None if not tree: perror('Tree {} not found in file {}'.format(tname, path)) return None df = uproot.concatenate(tree, library="pd") if squery: #df.query(squery, inplace=True) df = df.query(squery) df.reset_index(drop=True) return df
def load_file(self, path): if not os.path.exists(path): pwarning('[w] file', path, 'does not exists.') return self.event_df = self.pd_tree(path, self.event_tree_name) if self.event_df is None: return False self.d0_df = self.pd_tree(path, self.d0_tree_name) if self.d0_df is None: return False self.track_df = self.pd_tree(path, self.track_tree_name) if self.track_df is None: return False return True
def load_file(self, path): if not os.path.exists(path): pwarning('[w] file', path, 'does not exists.') return try: event_tree = uproot.open(path)[self.event_tree_name] except: pwarning('error getting', self.event_tree_name, 'from file:', path) return False if not event_tree: perror('Tree {} not found in file {}'.format(self.event_tree_name, path)) return False event_df_orig = event_tree.pandas.df(['run_number', 'ev_id', 'z_vtx_reco','is_ev_rej']) event_df_orig.reset_index(drop=True) event_df = event_df_orig.query('is_ev_rej == 0') event_df.reset_index(drop=True) # Load track tree into dataframe try: track_tree = uproot.open(path)[self.tree_name] except: pwarning('error getting', self.tree_name, 'from file:', path) return False if not track_tree: perror('Tree {} not found in file {}'.format(tree_name, path)) return False track_df_orig = track_tree.pandas.df() # Merge event info into track tree track_df = pd.merge(track_df_orig, event_df, on=['run_number', 'ev_id']) self.track_df_grouped = track_df.groupby(['run_number','ev_id']) # (ii) Transform the DataFrameGroupBy object to a SeriesGroupBy of fastjet particles return True
def run(self): # need to change this for data to drive... delta_t = 0 start_t = time.time() iev = 1 # while self.det_sim.load_event() and self.part_sim.load_event(): while self.det_sim.load_event(): iev = iev + 1 if self.nev > 0: if iev > self.nev: iev = iev - 1 break if iev % 1000 == 0: delta_t = time.time() - start_t pinfo('processing event', iev, ' - ev/sec =', iev / delta_t, 'elapsed =', delta_t) # find jets on detector level if len(self.det_sim.particles) < 1: pwarning(iev, 'event skipped N detector parts', len(self.det_sim.particles)) continue self.ja_det.analyze_event(self.det_sim.particles) _jets_det = self.ja_det.jets # _x = [pdebug(' -d ', j) for j in _jets_det] if len(_jets_det) < 1: continue _too_high_pt = [ p.pt() for j in _jets_det for p in j.constituents() if p.pt() > 100. ] if len(_too_high_pt) > 0: pwarning(iev, 'a likely fake high pT particle(s)', _too_high_pt, '- skipping whole event') continue _output_fname = os.path.expanduser( os.path.expandvars(self.det_sim.file_io.file_input)) _output_fname = _output_fname.replace("/", "_") self.output.initialize_output(_output_fname) self.output.fill_det_level(iev, _jets_det) # load the corresponding event on particle level self.part_sim.open_afile(afile=self.det_sim.file_io.file_input) if not self.part_sim.load_event_with_loc( self.det_sim.event.run_number, self.det_sim.event.ev_id, 0): perror('unable to load partL event run#:', self.det_sim.event.run_number, 'ev_id:', self.det_sim.event.ev_id) continue if self.det_sim.event.run_number != self.part_sim.event.run_number: perror('run# missmatch detL:', self.det_sim.event.run_number, 'partL:', self.part_sim.event.run_number) continue if self.det_sim.event.ev_id != self.part_sim.event.ev_id: perror('ev_id# missmatch detL:', self.det_sim.event.ev_id, 'partL:', self.part_sim.event.ev_id) continue # find jets on particle level if len(self.part_sim.particles) < 1: pwarning(iev, 'event skipped N particle parts', len(self.part_sim.particles)) continue self.ja_part.analyze_event(self.part_sim.particles) _jets_part = self.ja_part.jets # _x = [pdebug(' -p ', j) for j in _jets_part] if len(_jets_part) < 1: continue # match in pp simulations _det_part_matches = [] _n_matches = 0 _part_psjv = self.ja_part.jets_as_psj_vector() for j_det in _jets_det: _mactches_pp = fjtools.matched_Reta(j_det, _part_psjv, 0.6 * self.jetR) #_mactches_pp = fjtools.matched_Ry(j_det, _part_psjv, 0.6 * self.jetR) _n_matches = _n_matches + len(_mactches_pp) if len(_mactches_pp) > 1: pwarning('event:', iev, 'jet pt=', j_det.pt(), 'more than one match in pp jets', [i for i in _mactches_pp]) if len(_mactches_pp) == 1: j_part = _part_psjv[_mactches_pp[0]] # pinfo('j_det', j_det, 'j_part', j_part) _det_part_matches.append([j_det, j_part]) self.output.fill_pp_pairs(iev, [j_det, j_part]) if _n_matches < 1: if _n_matches < 1: pwarning('event:', iev, '- no matched jets in simulation!?', len(_det_part_matches)) # here embedding to PbPb data _offset = 10000 while _offset < len(self.det_sim.particles): _offset = _offset + 1000 pwarning('increasing bg index offset to', _offset) _PbPb_loaded = 0 while _PbPb_loaded == 0: if not self.dataPbPb.load_event(offset=_offset): perror('unable to load next PbPb event') _PbPb_loaded = -1 else: _hybrid_event = self.dataPbPb.particles _nparts_hybrid_no_emb = len(_hybrid_event) if _nparts_hybrid_no_emb < 1: pwarning( 'hybrid event with no particles! trying another one' ) _PbPb_loaded = 0 else: _PbPb_loaded = 1 if _PbPb_loaded < 0: perror( 'unable to load PbPb event - permanent - bailing out here.' ) break _tmp = [_hybrid_event.push_back(p) for p in self.det_sim.particles] if self.cs: cs_parts = self.cs.process_event(_hybrid_event) rho = self.cs.bge_rho.rho() self.ja_hybrid.analyze_event(cs_parts) else: self.ja_hybrid.analyze_event(_hybrid_event) _hybrid_matches = [] _hybrid_psjv = self.ja_hybrid.jets_as_psj_vector() for m in _det_part_matches: j_det = m[0] j_part = m[1] _mactches_hybrid = fjtools.matched_Reta( j_det, _hybrid_psjv, 0.6 * self.jetR) if len(_mactches_hybrid) > 1: pwarning('event:', iev, 'jet pt=', j_det.pt(), 'more than one match in hybrid jets', [i for i in _mactches_hybrid]) if len(_mactches_hybrid) == 1: # m.append(_hybrid_psjv[_mactches_hybrid[0]]) j_hybr = _hybrid_psjv[_mactches_hybrid[0]] # pdebug('L302', 'j_det', j_det, 'j_part', j_part, 'j_hybr', j_hybr) _hybrid_matches.append([j_det, j_part, j_hybr]) self.output.fill_emb_3(iev, [j_det, j_part, j_hybr]) _n_matches_hybrid = len(_hybrid_matches) if _n_matches_hybrid < 1: if _n_matches_hybrid < 1: pwarning('event:', iev, '- no matched jets in embedding!?', _n_matches_hybrid) delta_t = time.time() - start_t pinfo('processed events', iev, ' - ev/sec =', iev / delta_t, 'elapsed =', delta_t) self.output.close()
def main(): parser = argparse.ArgumentParser(description='pythia8 fastjet on the fly', prog=os.path.basename(__file__)) pyconf.add_standard_pythia_args(parser) _default_output_filename = os.path.basename(__file__).replace( ".py", "") + "_output.root" parser.add_argument('--output', default=_default_output_filename, type=str) parser.add_argument('--debug', default=0, type=int) args = parser.parse_args() # print the banner first fj.ClusterSequence.print_banner() print() # set up our jet definition and a jet selector jet_R0 = 0.6 jet_def = fj.JetDefinition(fj.antikt_algorithm, jet_R0) jet_selector = fj.SelectorPtMin(2.0) & fj.SelectorAbsEtaMax(2) print(jet_def) jet_def_lund = fj.JetDefinition(fj.cambridge_algorithm, jet_R0) lund_gen = fjcontrib.LundGenerator(jet_def_lund) print(jet_def_lund) print(lund_gen) outf = ROOT.TFile(args.output, 'recreate') outf.cd() t = ROOT.TTree('t', 't') tw = RTreeWriter(tree=t) # mycfg = ['PhaseSpace:pThatMin = 100'] mycfg = [] pythia = pyconf.create_and_init_pythia_from_args(args, mycfg) if args.nev < 100: args.nev = 100 for i in tqdm.tqdm(range(args.nev)): if not pythia.next(): continue if args.debug: pwarning('-- event', i) # parts = pythiafjext.vectorize(pythia, True, -1, 1, False) parts = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, True) if args.debug > 5: parts = pythiafjext.vectorize_select(pythia, [pythiafjext.kHadron], 0, True) if args.debug > 10: parts = pythiafjext.vectorize_select(pythia, [pythiafjext.kAny], 0, True) if args.debug > 0: for p in parts: pypart = pythiafjext.getPythia8Particle(p) if pypart.name()[:2] == 'D0': pinfo(pypart.name(), pypart.id(), pypart.status(), 'final =?', pypart.isFinal()) jets = jet_selector(jet_def(parts)) for j in jets: isD0_lead = False lead_part = fj.sorted_by_E(j.constituents())[0] pypart = pythiafjext.getPythia8Particle(lead_part) if args.debug: pinfo('leading id is', pypart.id(), pypart.name(), 'jet', j) if abs(pypart.id()) == 421: # pinfo('leading D0') isD0_lead = True l = lund_gen.result(j) if len(l) > 0: tw.fill_branch('Epair', [s.pair().e() for s in l]) tw.fill_branch('z', [s.z() for s in l]) tw.fill_branch('kt', [s.kt() for s in l]) tw.fill_branch('delta', [s.Delta() for s in l]) tw.fill_branch('D0lead', isD0_lead) tw.fill_branch('lead_id', pypart.id()) tw.fill_tree() else: if args.debug: pwarning("len of a lund is less than 1?", len(l), l) pythia.stat() outf.Write() outf.Close() print('[i] written', outf.GetName())
def main(): parser = argparse.ArgumentParser(description='pythia8 fastjet on the fly', prog=os.path.basename(__file__)) pyconf.add_standard_pythia_args(parser) parser.add_argument('--nw', help="no warn", default=True, action='store_true') parser.add_argument('--ignore-mycfg', help="ignore some settings hardcoded here", default=False, action='store_true') parser.add_argument('--enable-background', help="enable background calc", default=False, action='store_true') parser.add_argument('--output', help="output file name", default='leadsj_vs_zloss.root', type=str) parser.add_argument('--jetptmin', help="jet pt minimum", default=-1, type=float) parser.add_argument('--jetptmax', help="jet pt maximum", default=1e6, type=float) parser.add_argument('--eta', help="jet eta max", default=2.4, type=float) parser.add_argument( '--kt', help="use kT algorithm instead of anti-kT for the subjets", default=False, action='store_true') args = parser.parse_args() # print the banner first fj.ClusterSequence.print_banner() print() # set up our jet definition and a jet selector jet_R0 = 0.4 jet_def = fj.JetDefinition(fj.antikt_algorithm, jet_R0) jet_selector = fj.SelectorPtMin(args.py_pthatmin) & fj.SelectorPtMax( 1000.0) & fj.SelectorAbsEtaMax(args.eta - jet_R0) mycfg = [] if args.jetptmin > 0: mycfg = ['PhaseSpace:pThatMin = {}'.format(args.jetptmin)] jet_selector = fj.SelectorPtMin(args.jetptmin) & fj.SelectorPtMax( args.jetptmax) & fj.SelectorAbsEtaMax(args.eta - jet_R0) print(jet_def) if args.ignore_mycfg: mycfg = [] pythia = pyconf.create_and_init_pythia_from_args(args, mycfg) if not pythia: perror("pythia initialization failed.") return nbins = 20 # sjrs = [0.001 + x * 0.04 for x in range(0, nbins)] sjrs = logbins(0.001, jet_R0, nbins) print(sjrs) print('log(1/r) :', [ROOT.TMath.Log(1 / r) for r in sjrs]) sjdefs = dict() for sjr in sjrs: if args.kt: _jet_def = fj.JetDefinition(fj.kt_algorithm, sjr) else: _jet_def = fj.JetDefinition(fj.antikt_algorithm, sjr) sjdefs[sjr] = _jet_def # tw = treewriter.RTreeWriter(name = 'lsjvsx', file_name = 'leadsj_vs_x.root') tw = treewriter.RTreeWriter(name='lsjvszloss', file_name=args.output) tw.fout.cd() h_zloss_r_q = dict() h_zloss_r_g = dict() for sjr in sjrs: sname = 'h_zloss_glue_{}'.format(sjr) _h_zloss_r_g = ROOT.TH1F(sname, sname, len(sjrs), 0., 1.) h_zloss_r_g[sjr] = _h_zloss_r_g sname = 'h_zloss_quark_{}'.format(sjr) _h_zloss_r_q = ROOT.TH1F(sname, sname, len(sjrs), 0., 1.) h_zloss_r_q[sjr] = _h_zloss_r_q lbins = logbins(ROOT.TMath.Log(1. / jet_R0), ROOT.TMath.Log(1. / sjrs[0]), nbins) print('lbins:', lbins) sname = 'prof_zloss_vs_r_any' prof_a = ROOT.TProfile(sname, sname, nbins, 0, jet_R0) prof_a_log = ROOT.TProfile(sname + '_log', sname + '_log', nbins, lbins) sname = 'prof_zloss_vs_r_glue' prof_g = ROOT.TProfile(sname, sname, nbins, 0, jet_R0) prof_g_log = ROOT.TProfile(sname + '_log', sname + '_log', nbins, lbins) sname = 'prof_zloss_vs_r_quark' prof_q = ROOT.TProfile(sname, sname, nbins, 0, jet_R0) prof_q_log = ROOT.TProfile(sname + '_log', sname + '_log', nbins, lbins) # prof_q_log = ROOT.TProfile(sname+'_log', sname+'_log', nbins, ROOT.TMath.Log(1./jet_R0), ROOT.TMath.Log(1./sjrs[0])) sname = 'h2_zloss_vs_r_glue' h2_zloss_r_g = ROOT.TH2F(sname, sname, nbins, 0., jet_R0, len(sjrs), 0., 1.) sname = 'h2_zloss_vs_r_quark' h2_zloss_r_q = ROOT.TH2F(sname, sname, nbins, 0., jet_R0, len(sjrs), 0., 1.) # loop if args.nev < 100: args.nev = 100 for i in tqdm.tqdm(range(args.nev)): if not pythia.next(): continue # parts = pythiafjext.vectorize(pythia, True, -1, 1, False) partons = pythiafjext.vectorize_select(pythia, [pythiafjext.kParton], 0, True) parts = pythiafjext.vectorize_select( pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, False) # parts = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, False) jets = jet_selector(jet_def(parts)) # for j in tqdm.tqdm(jets): for j in jets: j_type = match_dR(j, partons, jet_R0 / 2.) if j_type[0] is None: if args.nw: continue pwarning('Jet with no parton label') continue tw.fill_branch("j", j) for sjr in sjrs: rc_jets = fj.sorted_by_pt(sjdefs[sjr](j.constituents())) tw.fill_branch("sjr{}".format(sjr), rc_jets[0]) zloss = 1. - rc_jets[0].perp() / j.perp() tw.fill_branch("sjr{}_zloss".format(sjr), zloss) tw.fill_branch("ppid", j_type[0]) tw.fill_branch("pquark", j_type[1]) tw.fill_branch("pglue", j_type[2]) prof_a.Fill(sjr, zloss) prof_a_log.Fill(ROOT.TMath.Log(1. / sjr), zloss) if j_type[1]: h_zloss_r_q[sjr].Fill(zloss) h2_zloss_r_q.Fill(sjr, zloss) prof_q.Fill(sjr, zloss) prof_q_log.Fill(ROOT.TMath.Log(1. / sjr), zloss) if j_type[2]: h_zloss_r_g[sjr].Fill(zloss) h2_zloss_r_g.Fill(sjr, zloss) prof_g.Fill(sjr, zloss) prof_g_log.Fill(ROOT.TMath.Log(1. / sjr), zloss) tw.fill_tree() pythia.stat() tw.write_and_close()
def main(): parser = argparse.ArgumentParser(description='pythia8 fastjet on the fly', prog=os.path.basename(__file__)) pyconf.add_standard_pythia_args(parser) parser.add_argument('--nw', help="no warn", default=False, action='store_true') parser.add_argument('--ignore-mycfg', help="ignore some settings hardcoded here", default=False, action='store_true') parser.add_argument('--enable-background', help="enable background calc", default=False, action='store_true') parser.add_argument('--output', help="output file name", default='leadsj_vs_x_output.root', type=str) # for background parser.add_argument('--cent-bin', help="centraility bin 0 is the 0-5 percent most central bin", type=int, default=0) parser.add_argument('--seed', help="pr gen seed", type=int, default=1111) parser.add_argument('--harmonics', help="set harmonics flag (0 : v1 - v5) , (1 : v2 - v5) , (2: v3 - v5) , (3: v1 - v4) , (4: v1 - v3) , (5: uniform dN/dphi no harmonics) , (6 : v1 - v2 , v4 - v5) , (7 : v1 - v3 , v5) , (8 : v1 , v3 - v5) , (9 : v1 only) , (10 : v2 only) , (11 : v3 only) , (12 : v4 only) , (13 : v5 only)", type=int, default=5) parser.add_argument('--eta', help="set eta range must be uniform (e.g. abs(eta) < 0.9, which is ALICE TPC fiducial acceptance)", type=float, default=0.9) parser.add_argument('--qa', help="PrintOutQAHistos", default=False, action='store_true') parser.add_argument('--dRmax', default=0.25, type=float) parser.add_argument('--alpha', default=0, type=float) args = parser.parse_args() # print the banner first fj.ClusterSequence.print_banner() print() # set up our jet definition and a jet selector jet_R0 = 0.4 jet_def = fj.JetDefinition(fj.antikt_algorithm, jet_R0) jet_selector = fj.SelectorPtMin(args.py_pthatmin) & fj.SelectorPtMax(1000.0) & fj.SelectorAbsEtaMax(args.eta - jet_R0) # jet_selector = fj.SelectorPtMin(40.0) & fj.SelectorPtMax(200.0) &fj.SelectorAbsEtaMax(1) print(jet_def) all_jets = [] # mycfg = ['PhaseSpace:pThatMin = 80'] # mycfg = ['PhaseSpace:pThatMin = 40'] mycfg = [''] if args.ignore_mycfg: mycfg = [] pythia = pyconf.create_and_init_pythia_from_args(args, mycfg) if not pythia: perror("pythia initialization failed.") return jet_def_lund = fj.JetDefinition(fj.cambridge_algorithm, 1.0) lund_gen = fjcontrib.LundGenerator(jet_def_lund) print (lund_gen.description()) dy_groomer = fjcontrib.DynamicalGroomer(jet_def_lund) print (dy_groomer.description()) # sd = fjcontrib.SoftDrop(0, 0.1, 1.0) sd01 = fjcontrib.SoftDrop(0, 0.1, 1.0) print (sd01) sd02 = fjcontrib.SoftDrop(0, 0.2, 1.0) print (sd02) # jet_def_rc01 = fj.JetDefinition(fj.cambridge_algorithm, 0.1) # jet_def_rc02 = fj.JetDefinition(fj.cambridge_algorithm, 0.2) # print (jet_def_rc01) # print (jet_def_rc02) # rc = fjcontrib.Recluster(jet_def_rc, True) jet_def_rc01 = fj.JetDefinition(fj.antikt_algorithm, 0.1) jet_def_rc02 = fj.JetDefinition(fj.antikt_algorithm, 0.2) print (jet_def_rc01) print (jet_def_rc02) #rc = fjcontrib.Recluster(jet_def_rc, True) # tw = treewriter.RTreeWriter(name = 'lsjvsx', file_name = 'leadsj_vs_x.root') tw = treewriter.RTreeWriter(name = 'lsjvsx', file_name = args.output) tgbkg = None be = None if args.enable_background: # ROOT.gSystem.Load("libpyjetty_TennGen.dylib") # tgbkg = ROOT.TennGen() # //constructor # tgbkg.SetCentralityBin(args.cent_bin) # //centraility bin 0 is the 0-5 % most central bin # tgbkg.SetRandomSeed(args.seed) # //setting the seed # tgbkg.SetHarmonics(args.harmonics) # // set harmonics flag (0 : v1 - v5) , (1 : v2 - v5) , (2: v3 - v5) , (3: v1 - v4) , (4: v1 - v3) , (5: uniform dN/dphi no harmonics) , (6 : v1 - v2 , v4 - v5) , (7 : v1 - v3 , v5) , (8 : v1 , v3 - v5) , (9 : v1 only) , (10 : v2 only) , (11 : v3 only) , (12 : v4 only) , (13 : v5 only) # tgbkg.SetEtaRange(args.eta) # //set eta range must be uniform (e.g. |eta| < 0.9, which is ALICE TPC fiducial acceptance) # tgbkg.PrintOutQAHistos(args.qa) # # tgbkg.InitializeBackground() # from pyjetty.mputils import BoltzmannEvent be = BoltzmannEvent(mean_pt=0.7, multiplicity=2000 * args.eta * 2, max_eta=max_eta, max_pt=100) print(be) from pyjetty.mputils import CEventSubtractor, CSubtractorJetByJet cs = CEventSubtractor(alpha=args.alpha, max_distance=args.dRmax, max_eta=args.eta, bge_rho_grid_size=0.25, max_pt_correct=100) print(cs) if args.nev < 100: args.nev = 100 for i in tqdm.tqdm(range(args.nev)): if not pythia.next(): continue # parts = pythiafjext.vectorize(pythia, True, -1, 1, False) partons = pythiafjext.vectorize_select(pythia, [pythiafjext.kParton], 0, True) parts = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, False) # parts = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, False) jets = jet_selector(jet_def(parts)) # for j in tqdm.tqdm(jets): for j in jets: j_type = match_dR(j, partons, jet_R0 / 2.) if j_type[0] is None: if args.nw: continue pwarning('Jet with no parton label') continue j_sd02 = sd02.result(j) sd02_info = fjcontrib.get_SD_jet_info(j_sd02) j_sd01 = sd01.result(j) sd01_info = fjcontrib.get_SD_jet_info(j_sd01) rc_sjets01 = fj.sorted_by_pt(jet_def_rc01(j.constituents())) rc_sjets02 = fj.sorted_by_pt(jet_def_rc02(j.constituents())) tw.fill_branches( j = j, lund = [ls for ls in lund_gen.result(j)], dyg1 = dy_groomer.result(j, 1), sd01 = j_sd01, sd01_z = sd01_info.z, sd01_mu = sd01_info.mu, sd01_Delta = sd01_info.dR, sd02 = j_sd02, sd02_z = sd02_info.z, sd02_mu = sd02_info.mu, sd02_Delta = sd02_info.dR, # breaking compatibility # sd = j_sd, # sd_z = sd_info.z, # sd_mu = sd_info.mu, # sd_Delta = sd_info.dR, lsjet01 = rc_sjets01[0], nsjet01 = len(rc_sjets01), sjet01 = rc_sjets01, lsjet02 = rc_sjets02[0], nsjet02 = len(rc_sjets02), sjet02 = rc_sjets02, ppid = j_type[0], pquark = j_type[1], pglue = j_type[2], # this is redundancy pycode = pythia.info.code(), pysigmagen = pythia.info.sigmaGen(), pysigmaerr = pythia.info.sigmaErr(), pyid1 = pythia.info.id1pdf(), pyid2 = pythia.info.id1pdf(), pyx1 = pythia.info.x1pdf(), pyx2 = pythia.info.x2pdf(), pypdf1 = pythia.info.pdf1(), pyQfac = pythia.info.QFac(), pyalphaS = pythia.info.alphaS(), pypthat = pythia.info.pTHat(), pymhat = pythia.info.mHat() ) if be: bg_parts = be.generate(offset=10000) full_event = bg_parts tmp = [full_event.push_back(psj) for psj in j.constituents()] if cs: cs_parts = cs.process_event(full_event) rho = cs.bge_rho.rho() bg_jets = fj.sorted_by_pt(jet_def(cs_parts)) for bj in bg_jets: if fjtools.matched_pt(bj, j) > 0.5: pass tw.fill_tree() pythia.stat() tw.write_and_close()
def load_file(self, path): if not os.path.exists(path): pwarning('[w] file', path, 'does not exists.') return try: event_tree = uproot.open(path)[self.event_tree_name] except: pwarning('error getting', self.event_tree_name, 'from file:', path) return False if not event_tree: perror('Tree {} not found in file {}'.format( self.event_tree_name, path)) return False event_df_orig = event_tree.pandas.df( ['run_number', 'ev_id', 'z_vtx_reco', 'is_ev_rej']) event_df_orig.reset_index(drop=True) event_df = event_df_orig.query('is_ev_rej == 0') event_df.reset_index(drop=True) # Load gen tree into df try: gen_tree = uproot.open(path)[self.gen_tree_name] except: pwarning('error getting', self.gen_tree_name, 'from file:', path) return False if not gen_tree: perror('Tree {} not found in file {}'.format(gen_tree_name, path)) return False gen_df_orig = gen_tree.pandas.df( ['run_number', 'ev_id', 'pt_cand', 'eta_cand', 'cand_type']) gen_df_orig.sort_values(by=['run_number', 'ev_id'], inplace=True) df_genruns = gen_df_orig[['run_number', 'ev_id']].copy() # Load track tree into dataframe try: track_tree = uproot.open(path)[self.tree_name] except: pwarning('error getting', self.tree_name, 'from file:', path) return False if not track_tree: perror('Tree {} not found in file {}'.format(tree_name, path)) return False track_df_orig = track_tree.pandas.df([ 'run_number', 'ev_id', 'inv_mass', 'pt_cand', 'pt_prong0', 'pt_prong1', 'dca', 'cos_t_star', 'imp_par_prod', 'cos_p', 'cand_type', 'imp_par_prong0', 'imp_par_prong1', 'norm_dl_xy', 'eta_cand', 'nsigTPC_Pi_0', 'nsigTOF_Pi_0', 'nsigTPC_K_1', 'nsigTOF_K_1', 'nsigTPC_Pi_1', 'nsigTOF_Pi_1', 'nsigTPC_K_0', 'nsigTOF_K_0' ]) # Merge event info into track tree track_df = pd.merge(track_df_orig, event_df, on=['run_number', 'ev_id']) track_df.sort_values(by=['run_number', 'ev_id'], inplace=True) df_d0runs = track_df[['run_number', 'ev_id']].copy() df_runs = pd.merge(df_d0runs, df_genruns, on=['run_number', 'ev_id']) df_runs.drop_duplicates(keep='first', inplace=True) gen_df_orig = pd.merge(gen_df_orig, df_runs, on=['run_number', 'ev_id']) track_df = pd.merge(track_df, df_runs, on=['run_number', 'ev_id']) self.d0_gen = gen_df_orig.groupby(['run_number', 'ev_id']) self.track_df_grouped = track_df.groupby(['run_number', 'ev_id']) return True
def __del__(self): for w in self._warnings: pwarning(self.tree_name, ':', w)
def main(): parser = argparse.ArgumentParser(description='pythia8 fastjet on the fly', prog=os.path.basename(__file__)) pyconf.add_standard_pythia_args(parser) _default_output_filename = os.path.basename(__file__).replace( ".py", "") + "_output.root" parser.add_argument('--output', default=_default_output_filename, type=str) parser.add_argument('--user-seed', help='pythia seed', default=1111, type=int) parser.add_argument('--debug', default=0, type=int) args = parser.parse_args() # jets # print the banner first # fj.ClusterSequence.print_banner() # print() # # set up our jet definition and a jet selector jet_R0 = 1.0 # jet_def = fj.JetDefinition(fj.antikt_algorithm, jet_R0) # print(jet_def) # acceptance # hadron level acceptamce max_eta_hadron = 10. from pyjetty.mputils import pwarning pwarning('max eta for particles after hadronization set to', max_eta_hadron) parts_selector_h = fj.SelectorAbsEtaMax(max_eta_hadron) jet_selector = fj.SelectorPtMin(1.0) & fj.SelectorPtMax( 100.0) & fj.SelectorAbsEtaMax(max_eta_hadron - 1.05 * jet_R0) # parton level acceptamce max_eta_parton = max_eta_hadron + 3. * jet_R0 pwarning('max eta for partons set to', max_eta_parton) parts_selector_p = fj.SelectorAbsEtaMax(max_eta_parton) # initialize pythia # mZ = pythia8.Pythia().particleData.m0(23) mZ = 91.188 beams_eCM = "Beams:eCM={}".format(mZ) mycfg = [ "PDF:lepton = off", # Allow no substructure in e+- beams: normal for corrected LEP data. "WeakSingleBoson:ffbar2gmZ = on", # Process selection. "23:onMode = off", # Switch off all Z0 decays and then switch back on those to quarks. "23:onIfAny = 1 2 3 4 5", "Beams:idA = 11", "Beams:idB = -11", beams_eCM, # LEP1 initialization at Z0 mass. "HadronLevel:all=off", # parton level first "PhaseSpace:bias2Selection=off" ] # this is ON by default in pyconf - not OK for these settings pythia = pyconf.create_and_init_pythia_from_args(args, mycfg) if not pythia: return # initialize ROOT output outf = ROOT.TFile(args.output, 'recreate') outf.cd() tdf = ROOT.TDirectoryFile('PWGHF_TreeCreator', 'PWGHF_TreeCreator') tdf.cd() t_p = ROOT.TNtuple( 'tree_Particle_P', 'tree_Particle_P', 'run_number:ev_id:ParticlePt:ParticleEta:ParticlePhi:ParticleID:ParticleIDabs:ParticleCharge:isGluon:isQuark' ) # t_p = ROOT.TNtuple('tree_Particle_gen', 'tree_Particle_gen', 'run_number:ev_id:ParticlePt:ParticleEta:ParticlePhi:ParticleID:ParticleIDabs:ParticleCharge') t_h = ROOT.TNtuple( 'tree_Particle_H', 'tree_Particle_H', 'run_number:ev_id:ParticlePt:ParticleEta:ParticlePhi:ParticleID:ParticleIDabs:ParticleCharge:isHadron:isLepton:isVisible' ) t_e = ROOT.TNtuple('tree_event_char', 'tree_event_char', 'run_number:ev_id:z_vtx_reco:is_ev_rej') if args.nev < 100: args.nev = 100 run_number = args.user_seed # main loop for i in tqdm.tqdm(range(args.nev)): if not pythia.next(): continue if args.debug: pwarning('-- event', i) #select particles parts_pythia_p = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, True) parts_pythia_p_selected = parts_selector_p(parts_pythia_p) # hadronize hstatus = pythia.forceHadronLevel() if not hstatus: pwarning('forceHadronLevel false event', iev) continue parts_pythia_h = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, True) parts_pythia_h_selected = parts_selector_h(parts_pythia_h) # charged hadrons/particles only # parts_pythia_h = pythiafjext.vectorize_select(pythia, [pythiafjext.kHadron, pythiafjext.kCharged]) # parts_pythia_hch = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, True) # parts_pythia_hch_selected = parts_selector_h(parts_pythia_hch) # stream to trees ev_id = i t_e.Fill(run_number, ev_id, 0, 0) _tmp = [ t_p.Fill( run_number, ev_id, p.perp(), p.eta(), p.phi(), pythiafjext.getPythia8Particle(p).id(), pythiafjext.getPythia8Particle(p).idAbs(), pythiafjext.getPythia8Particle(p).charge(), pythiafjext.getPythia8Particle(p).isGluon(), pythiafjext.getPythia8Particle(p).isQuark(), ) for p in parts_pythia_p ] _tmp = [ t_h.Fill(run_number, ev_id, p.perp(), p.eta(), p.phi(), pythiafjext.getPythia8Particle(p).id(), pythiafjext.getPythia8Particle(p).idAbs(), pythiafjext.getPythia8Particle(p).charge(), pythiafjext.getPythia8Particle(p).isHadron(), pythiafjext.getPythia8Particle(p).isLepton(), pythiafjext.getPythia8Particle(p).isVisible()) for p in parts_pythia_h ] pythia.stat() outf.Write() outf.Close()
def main(): parser = argparse.ArgumentParser(description='pythia8 fastjet on the fly', prog=os.path.basename(__file__)) pyconf.add_standard_pythia_args(parser) parser.add_argument('--ignore-mycfg', help="ignore some settings hardcoded here", default=False, action='store_true') parser.add_argument('--output', help="output file name", default="test_hjet_parton.root", type=str) parser.add_argument('--no-tt', help="do not require TT to accept the event", default=False, action='store_true') parser.add_argument('--charged', help="analyze only the charged particles of the FS", default=False, action='store_true') parser.add_argument('--max-jet-pt', help="maximum jet pT to consider", type=float, default=100.) args = parser.parse_args() # print the banner first fj.ClusterSequence.print_banner() print() # set up our jet definition and a jet selector hadron_eta_max = 2.0 jet_R0 = 0.4 jet_def = fj.JetDefinition(fj.antikt_algorithm, jet_R0) jet_selector = fj.SelectorPtMin(10.0) & fj.SelectorPtMax( args.max_jet_pt) & fj.SelectorAbsEtaMax(hadron_eta_max - jet_R0) # jet_selector = fj.SelectorPtMin(40.0) & fj.SelectorPtMax(200.0) &fj.SelectorAbsEtaMax(hadron_eta_max - jet_R0) hTT6_selector = fj.SelectorPtMin(6) & fj.SelectorPtMax( 7) & fj.SelectorAbsEtaMax(hadron_eta_max) hTT12_selector = fj.SelectorPtMin(12) & fj.SelectorPtMax( 50) & fj.SelectorAbsEtaMax(hadron_eta_max) hTT20_selector = fj.SelectorPtMin(20) & fj.SelectorPtMax( 50) & fj.SelectorAbsEtaMax(hadron_eta_max) pythia_fs_part_selection = [pythiafjext.kFinal] if args.charged is True: pwarning('running with charged particles in the final state') pythia_fs_part_selection.append(pythiafjext.kCharged) print(jet_def) all_jets = [] # mycfg = ['PhaseSpace:pThatMin = 80'] # mycfg = ['PhaseSpace:pThatMin = 6'] # mycfg = ['PhaseSpace:pThatMin = 12'] # mycfg = ['PhaseSpace:pThatMin = 40'] mycfg = [] if args.ignore_mycfg: mycfg = [] pythia = pyconf.create_and_init_pythia_from_args(args, mycfg) if not pythia: perror("pythia initialization failed.") return jet_def_lund = fj.JetDefinition(fj.cambridge_algorithm, 1.0) lund_gen = fjcontrib.LundGenerator(jet_def_lund) print(lund_gen.description()) dy_groomer = fjcontrib.DynamicalGroomer(jet_def_lund) print(dy_groomer.description()) # sd = fjcontrib.SoftDrop(0, 0.1, 1.0) sd = fjcontrib.SoftDrop(0, 0.2, 1.0) print(sd) # jet_def_rc01 = fj.JetDefinition(fj.cambridge_algorithm, 0.1) # jet_def_rc02 = fj.JetDefinition(fj.cambridge_algorithm, 0.2) # print (jet_def_rc01) # print (jet_def_rc02) # rc = fjcontrib.Recluster(jet_def_rc, True) jet_def_rc01 = fj.JetDefinition(fj.antikt_algorithm, 0.1) jet_def_rc02 = fj.JetDefinition(fj.antikt_algorithm, 0.2) print(jet_def_rc01) print(jet_def_rc02) #rc = fjcontrib.Recluster(jet_def_rc, True) # tw = treewriter.RTreeWriter(name = 'lsjvsx', file_name = 'leadsj_vs_x.root') tw = treewriter.RTreeWriter(name='lsjvsx', file_name=args.output) zero_psj = fj.PseudoJet(0, 0, 10, 10) if args.nev < 100: args.nev = 100 t = tqdm.tqdm(total=args.nev) while t.n < args.nev: if not pythia.next(): continue # information about the leading process # print(pythia.info.code(), pythia.info.nameProc(pythia.info.code())) # continue # parts = pythiafjext.vectorize(pythia, True, -1, 1, False) partons = pythiafjext.vectorize_select(pythia, [pythiafjext.kParton], 0, True) # parts = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, False) # parts = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, False) parts = pythiafjext.vectorize_select(pythia, pythia_fs_part_selection, 0, False) hTT6 = zero_psj hTT6s = fj.sorted_by_pt(hTT6_selector(parts)) if len(hTT6s) > 0: hTT6 = hTT6s[0] hTT12 = zero_psj hTT12s = fj.sorted_by_pt(hTT12_selector(parts)) if len(hTT12s) > 0: hTT12 = hTT12s[0] hTT20 = zero_psj hTT20s = fj.sorted_by_pt(hTT20_selector(parts)) if len(hTT20s) > 0: hTT20 = hTT20s[0] if args.no_tt is False: if hTT12.perp() < 1 and hTT6.perp() < 1 and hTT20.perp() < 1: continue jets = jet_selector(jet_def(parts)) # for j in tqdm.tqdm(jets): for j in jets: t.update(1) j_type = match_dR(j, partons, jet_R0 / 2.) if j_type[0] is None: continue j_sd = sd.result(j) sd_info = fjcontrib.get_SD_jet_info(j_sd) rc_sjets01 = fj.sorted_by_pt(jet_def_rc01(j.constituents())) rc_sjets02 = fj.sorted_by_pt(jet_def_rc02(j.constituents())) tw.fill_branches( j=j, mult=len(parts), lund=[ls for ls in lund_gen.result(j)], dyg1=dy_groomer.result(j, 1), sd=j_sd, sd_z=sd_info.z, sd_mu=sd_info.mu, sd_Delta=sd_info.dR, lsjet01=rc_sjets01[0], nsjet01=len(rc_sjets01), sjet01=rc_sjets01, lsjet02=rc_sjets02[0], nsjet02=len(rc_sjets02), sjet02=rc_sjets02, hTT6=hTT6, hTT12=hTT12, hTT20=hTT20, dphi6=j.delta_phi_to(hTT6), dphi12=j.delta_phi_to(hTT12), dphi20=j.delta_phi_to(hTT20), ppid=j_type[0], pquark=j_type[1], pglue=j_type[2], # this is redundancy pycode=pythia.info.code(), pysigmagen=pythia.info.sigmaGen(), pysigmaerr=pythia.info.sigmaErr(), pyid1=pythia.info.id1pdf(), pyid2=pythia.info.id1pdf(), pyx1=pythia.info.x1pdf(), pyx2=pythia.info.x2pdf(), pypdf1=pythia.info.pdf1(), pyQfac=pythia.info.QFac(), pyalphaS=pythia.info.alphaS(), pypthat=pythia.info.pTHat(), pymhat=pythia.info.mHat()) tw.fill_tree() t.close() pythia.stat() tw.write_and_close()
def main(): parser = argparse.ArgumentParser(description='test groomers', prog=os.path.basename(__file__)) parser.add_argument('-o', '--output-filename', default="output.root", type=str) parser.add_argument('datalistpp', help='run through a file list', default='', type=str) parser.add_argument('--datalistAA', help='run through a file list - embedding mode', default='', type=str) parser.add_argument('--jetR', default=0.4, type=float) parser.add_argument('--alpha', default=0, type=float) parser.add_argument('--dRmax', default=0.25, type=float) parser.add_argument('--overwrite', help="overwrite output", default=False, action='store_true') parser.add_argument('--jetptcut', help='remove jets below the cut', default=50., type=float) parser.add_argument('--nev', help='number of events to run', default=0, type=int) parser.add_argument('--max-eta', help='max eta for particles', default=0.9, type=float) parser.add_argument('--npart-cut', help='npart cut on centrality low,high hint:' + npart_cents, default='325,450', type=str) args = parser.parse_args() try: npart_min = int(args.npart_cut.split(',')[0]) npart_max = int(args.npart_cut.split(',')[1]) except: perror( 'unable to parse npart centrality selection - two integer numbers with a coma in-between needed - specified:', args.npart_cut) return 1 # initialize constituent subtractor cs = None if args.dRmax > 0: cs = CEventSubtractor(alpha=args.alpha, max_distance=args.dRmax, max_eta=args.max_eta, bge_rho_grid_size=0.25, max_pt_correct=100) pp_data = DataIO(name='Sim Pythia Detector level', file_list=args.datalistpp, random_file_order=False, tree_name='tree_Particle_gen') ja_pp = JetAnalysis(jet_R=args.jetR, jet_algorithm=fj.antikt_algorithm, jet_pt_min=50., particle_eta_max=args.max_eta) if args.datalistAA: aa_data = DataBackgroundIO(name='PbPb', file_list=args.datalistAA, tree_name='tree_Particle_gen') ja_emb = JetAnalysis(jet_R=args.jetR, jet_algorithm=fj.antikt_algorithm, jet_pt_min=50., particle_eta_max=args.max_eta) ja_aa = JetAnalysis(jet_R=args.jetR, jet_algorithm=fj.antikt_algorithm, jet_pt_min=50., particle_eta_max=args.max_eta) dndeta_selector = fj.SelectorAbsEtaMax(1.) # tg = thg.ThermalGenerator() print(cs) # print the banner first fj.ClusterSequence.print_banner() print() gout = GroomerOutput(args.output_filename, enable_aa_trees=bool(args.datalistAA)) delta_t = 0 start_t = time.time() iev = 1 while pp_data.load_event(offset=0): iev = iev + 1 if args.nev > 0: if iev > args.nev: iev = iev - 1 break if iev % 1000 == 0: delta_t = time.time() - start_t pinfo('processing event', iev, ' - ev/sec =', iev / delta_t, 'elapsed =', delta_t) # find jets on detector level if len(pp_data.particles) < 1: pwarning(iev, 'pp event skipped N parts', len(pp_data.particles)) continue ja_pp.analyze_event(pp_data.particles) if len(ja_pp.jets) < 1: continue # pinfo('n particles', len(pp_data.particles)) dndeta0 = dndeta_selector(pp_data.particles) [ gout.fill_branches(j, syst=0, dndeta=len(dndeta0) / 2.) for j in ja_pp.jets ] # pinfo('n jets', len(ja_pp.jets)) if args.datalistAA: while True: aa_loaded = aa_data.load_event(offset=10000) if aa_data.event.npart < npart_min or aa_data.event.npart >= npart_max: continue else: if len(aa_data.particles) < 1: pwarning(iev, 'AA event skipped N parts', len(aa_data.particles)) continue else: break if aa_loaded: ja_aa.analyze_event(aa_data.particles) dndeta1 = dndeta_selector(aa_data.particles) if len(ja_aa.jets) > 0: [ gout.fill_branches(j, syst=1, dndeta=len(dndeta1) / 2.) for j in ja_aa.jets ] else: # pwarning('no jets in AA event?', len(ja_aa.jets), 'while dndeta=', len(dndeta1)/2.) pass emb_event = fj.vectorPJ() [emb_event.push_back(p) for p in pp_data.particles] [emb_event.push_back(p) for p in aa_data.particles] rho = 0 if cs: cs_parts = cs.process_event(emb_event) rho = cs.bge_rho.rho() ja_emb.analyze_event(cs_parts) else: ja_emb.analyze_event(emb_event) # matches = [[jpp, jemb] for jpp in ja_pp.jets for jemb in ja_emb.jets if fjtools.matched_pt(jemb, jpp) > 0.5] # for mj in matches: # gout.fill_branches(mj[0], syst=2, dndeta=len(dndeta1)/2., rho=rho) # gout.fill_branches(mj[1], syst=3) [ gout.fill_branches_prong_matching(j_pp, j_emb, dndeta=len(dndeta1) / 2., rho=rho) for j_pp in ja_pp.jets for j_emb in ja_emb.jets ] delta_t = time.time() - start_t pinfo('processed events', iev, ' - ev/sec =', iev / delta_t, 'elapsed =', delta_t) gout.write()
def main(): parser = argparse.ArgumentParser(description='test duplicate entries', prog=os.path.basename(__file__)) parser.add_argument('fname', help='input file', default='', type=str) args = parser.parse_args() event_tree_name = 'PWGHF_TreeCreator/tree_event_char' event_tree = uproot.open(args.fname)[event_tree_name] if not event_tree: perror('Tree {} not found in file {}'.format(event_tree_name, args.fname)) return False pinfo(args.fname) event_df_orig = event_tree.pandas.df() len_event_df_orig = len(event_df_orig) df_event_accepted = event_df_orig.query('is_ev_rej == 0') df_event_accepted.reset_index(drop=True) len_event_df_accepted = len(df_event_accepted) event_df_nodup = df_event_accepted.drop_duplicates() len_event_df_nodup = len(event_df_nodup) if len_event_df_accepted != len_event_df_nodup: perror('original event length:', len_event_df_orig, 'accepted:', len_event_df_accepted, 'nodup:', len_event_df_nodup) else: pindent('original event length:', len_event_df_orig, 'accepted:', len_event_df_accepted, 'nodup:', len_event_df_nodup) track_tree_name = 'PWGHF_TreeCreator/tree_Particle' track_tree = uproot.open(args.fname)[track_tree_name] if not track_tree: perror('Tree {} not found in file {}'.format(tree_name, args.fname)) return False track_df_orig = track_tree.pandas.df() track_df = pd.merge(track_df_orig, event_df_nodup, on=['run_number', 'ev_id']) len_track_df = len(track_df) track_df_nodup = track_df.drop_duplicates() len_track_df_nodup = len(track_df_nodup) if len_track_df_nodup < len_track_df: perror('track+event rows:', len_track_df, 'nodup:', len_track_df_nodup) else: pindent('track+event rows:', len_track_df, 'nodup:', len_track_df_nodup) track_df_grouped = track_df.groupby(['run_number', 'ev_id']) len_track_df_grouped = len(track_df_grouped) if len_track_df_grouped <= len_event_df_nodup: pindent('track+event length grouped:', len_track_df_grouped) else: perror('track+event length grouped:', len_track_df_grouped) # track_df_nodup = track_df_grouped.drop_duplicates() # print ('track+event length no dup:', len(track_df_nodup)) # from James # Check if there are duplicated tracks in an event. duplicate_selection = [ 'run_number', 'ev_id', 'ParticlePt', 'ParticleEta', 'ParticlePhi' ] # if use_ev_id_ext: # duplicate_selection.append('ev_id_ext') duplicate_rows_df = track_df.duplicated(duplicate_selection) for i, row in duplicate_rows_df.iteritems(): if row: print(i, row) # for r in duplicate_rows_df: # print(type(r)) n_duplicates = sum(duplicate_rows_df) pindent('2nd pass: using duplicate selection ', duplicate_selection) if n_duplicates > 0: perror( '2nd pass: there appear to be {} duplicate particles in the dataframe' .format(n_duplicates)) perror('this is: {:.2} of all tracks'.format(n_duplicates / len_track_df)) track_df_nodup = track_df.drop_duplicates(duplicate_selection, inplace=False) pwarning('new count rows for particles:', len(track_df_nodup), 'old count:', len_track_df) else: pindent('no duplicate particles found')
def main(): parser = argparse.ArgumentParser(description='test groomers', prog=os.path.basename(__file__)) parser.add_argument('-o', '--output-filename', default="centrality_output.root", type=str) parser.add_argument('datalist', help='run through a file list', default='', type=str) parser.add_argument('--overwrite', help="overwrite output", default=False, action='store_true') parser.add_argument('--nev', help='number of events to run', default=0, type=int) parser.add_argument('--max-eta', help='max eta for particles', default=0.9) parser.add_argument('--thermal', help='enable thermal generator', action='store_true', default=False) parser.add_argument('--thermal-default', help='enable thermal generator', action='store_true', default=False) parser.add_argument('--particles', help='stream particles', action='store_true', default=False) parser.add_argument('--npart-cut', help='npart cut on centrality low,high hint:' + npart_cents, default='325,450', type=str) parser.add_argument('--nch-cut', help='nch cut on centrality low,high hint:' + nch_cents, default='18467,50000', type=str) args = parser.parse_args() try: npart_min = int(args.npart_cut.split(',')[0]) npart_max = int(args.npart_cut.split(',')[1]) except: perror( 'unable to parse npart centrality selection - two integer numbers with a coma in-between needed - specified:', args.npart_cut) return 1 try: nch_min = int(args.nch_cut.split(',')[0]) nch_max = int(args.nch_cut.split(',')[1]) except: perror( 'unable to parse nch centrality selection - two integer numbers with a coma in-between needed - specified:', args.nch_cut) return 1 outf = ROOT.TFile(args.output_filename, 'recreate') outf.cd() t = ROOT.TTree('t', 't') tw = RTreeWriter(tree=t) hpt_antyr = ROOT.TH1F('hpt_antyr', 'hpt_antyr', 100, 0, 100) hpt_antyr_c = ROOT.TH1F('hpt_antyr_c', 'hpt_antyr_c', 100, 0, 100) hpt_therm = ROOT.TH1F('hpt_therm', 'hpt_therm', 100, 0, 100) hpt_therm_c = ROOT.TH1F('hpt_therm_c', 'hpt_therm_c', 100, 0, 100) data = DataIO(name='Sim Pythia Detector level', file_list=args.datalist, random_file_order=False, tree_name='tree_Particle_gen') dndeta_selector = fj.SelectorAbsEtaMax(abs( args.max_eta)) & fj.SelectorPtMin(0.15) tg_default = None if args.thermal_default: tg_default = thg.ThermalGenerator() print(tg_default) tg_central = None if args.thermal: tg_central = thg.ThermalGenerator(beta=0.5, N_avg=3000, sigma_N=500) print(tg_central) delta_t = 0 start_t = time.time() iev = 1 while data.load_event(offset=0): iev = iev + 1 if args.nev > 0: if iev > args.nev: iev = iev - 1 break if iev % 1000 == 0: delta_t = time.time() - start_t pinfo('processing event', iev, ' - ev/sec =', iev / delta_t, 'elapsed =', delta_t) # find jets on detector level if len(data.particles) < 1: pwarning(iev, 'pp event skipped N parts', len(data.particles)) continue # print(data.event) dndeta0_parts = dndeta_selector(data.particles) dndeta0 = len(dndeta0_parts) / (abs(args.max_eta * 2.)) [hpt_antyr.Fill(p.perp()) for p in dndeta0_parts] if args.particles: tw.fill_branches(dndeta=dndeta0, p=data.particles) else: tw.fill_branches(dndeta=dndeta0) tw.fill_branches_attribs( data.event, ['sigma', 'npart', 'nch', 'nchfwd', 'nchselect'], prefix='antyr_') if data.event.npart < npart_min or data.event.npart >= npart_max: tw.fill_branches(cent10npart=0) else: tw.fill_branches(cent10npart=1) [hpt_antyr_c.Fill(p.perp()) for p in dndeta0_parts] if data.event.nch < nch_min or data.event.nch >= nch_max: tw.fill_branches(cent10nch=0) else: tw.fill_branches(cent10nch=1) if tg_default: thg_particles = tg_default.load_event() dndetathg_default = dndeta_selector(thg_particles) if args.particles: tw.fill_branches(dndeta_thg_0=len(dndetathg_default) / (abs(args.max_eta * 2.)), p_thg_0=thg_particles) else: tw.fill_branches(dndeta_thg_0=len(dndetathg_default) / (abs(args.max_eta * 2.))) if tg_central: thg_parts_central = tg_central.load_event() dndetathg_central = dndeta_selector(thg_parts_central) [hpt_therm_c.Fill(p.perp()) for p in dndetathg_central] if args.particles: tw.fill_branches(dndeta_thg_c=len(dndetathg_central) / (abs(args.max_eta * 2.)), p_thg_c=thg_parts_central) else: tw.fill_branches(dndeta_thg_c=len(dndetathg_central) / (abs(args.max_eta * 2.))) tw.fill_tree() delta_t = time.time() - start_t pinfo('processed events', iev, ' - ev/sec =', iev / delta_t, 'elapsed =', delta_t) outf.Write() outf.Close()