Пример #1
0
    def analysis(self, df):

        djmm = fjtools.DJetMatchMaker()
        djmm.set_ch_pt_eta_phi(self.df_tracks['ParticlePt'].values,
                               self.df_tracks['ParticleEta'].values,
                               self.df_tracks['ParticlePhi'].values)
        djmm.set_Ds_pt_eta_phi_m(df['pt_cand'].values, df['eta_cand'].values,
                                 df['phi_cand'].values, df['inv_mass'].values)
        djmm.set_daughters0_pt_eta_phi(df['pt_prong0'].values,
                                       df['eta_prong0'].values,
                                       df['phi_prong0'].values)
        djmm.set_daughters1_pt_eta_phi(df['pt_prong1'].values,
                                       df['eta_prong1'].values,
                                       df['phi_prong1'].values)

        self.tw.fill_branches(dpsj=djmm.Ds)
        self.tw.fill_tree()

        for id0, d0 in enumerate(djmm.Ds):
            _parts_and_ds = djmm.match(0.005, id0)
            _parts_and_ds.push_back(d0)
            ja = jet_analysis.JetAnalysis(jet_R=0.4,
                                          particle_eta_max=0.9,
                                          jet_pt_min=2.0)
            ja.analyze_event(_parts_and_ds)
            if len(ja.jets) < 1:
                continue
            jets = ja.jets_as_psj_vector()
            djets = djmm.filter_D0_jets(jets)
            if len(djets) > 0:
                j = djets[0]
                dcand = djmm.get_Dcand_in_jet(j)
                sja = jet_analysis.JetAnalysis(jet_R=0.1,
                                               particle_eta_max=0.9,
                                               jet_pt_min=2.0)
                sja.analyze_event(j.constituents())
                lsj = fj.sorted_by_pt(sja.jets_as_psj_vector())
                if len(lsj) < 1:
                    continue
                sj_dcand = djmm.get_Dcand_in_jet(lsj[0])
                is_Dsj = 0
                if len(sj_dcand) > 0:
                    # if sj_dcand[0].m() == dcand[0].m() and sj_dcand[0].perp() == dcand[0].perp():
                    if sj_dcand[0].delta_R(dcand[0]) == 0.0:
                        is_Dsj = 1
                self.twjc.fill_branches(jet=j,
                                        dR=j.delta_R(dcand[0]),
                                        D=dcand[0],
                                        lsj=lsj[0],
                                        Dsj=is_Dsj,
                                        a10=fjext.angularity(j, 1.0, 0.4),
                                        a15=fjext.angularity(j, 0.5, 0.4),
                                        a20=fjext.angularity(j, 0.0, 0.4),
                                        a30=fjext.angularity(j, -1.0, 0.4))
                self.twjc.fill_tree()

            if len(djets) > 1:
                perror("more than one jet per D candidate?")

        return True
Пример #2
0
	def load_file(self, path):
		if not os.path.exists(path):
			pwarning('[w] file', path, 'does not exists.')
			return
		try:
			event_tree = uproot.open(path)[self.event_tree_name]
		except:
			pwarning('error getting', self.event_tree_name, 'from file:', path)
			return False
		if not event_tree:
			perror('Tree {} not found in file {}'.format(self.event_tree_name, path))
			return False
		event_df_orig = event_tree.pandas.df(['run_number', 'ev_id', 'z_vtx_reco','is_ev_rej'])
		event_df_orig.reset_index(drop=True)
		event_df = event_df_orig.query('is_ev_rej == 0')
		event_df.reset_index(drop=True)
		# Load track tree into dataframe
		try:
			track_tree = uproot.open(path)[self.tree_name]
		except:
			pwarning('error getting', self.tree_name, 'from file:', path)
			return False
		if not track_tree:
			perror('Tree {} not found in file {}'.format(tree_name, path))
			return False
		track_df_orig = track_tree.pandas.df()
		# Merge event info into track tree
		track_df = pd.merge(track_df_orig, event_df, on=['run_number', 'ev_id'])
		self.track_df_grouped = track_df.groupby(['run_number','ev_id'])
		# (ii) Transform the DataFrameGroupBy object to a SeriesGroupBy of fastjet particles
		return True
Пример #3
0
 def pd_tree(self, path, tname, squery=None):
     try:
         tree = uproot.open(path)[tname]
     except:
         pwarning('error getting', tname, 'from file:', path)
         return None
     if not tree:
         perror('Tree {} not found in file {}'.format(tname, path))
         return None
     df = tree.pandas.df()
     if squery:
         df = df.query(squery)
         df.reset_index(drop=True)
     return df
Пример #4
0
    def pd_tree(self, path, tname, squery=None):
        try:
            tree = uproot.open(path)[tname]
        except:
            pwarning('error getting', tname, 'from file:', path)
            return None
        if not tree:
            perror('Tree {} not found in file {}'.format(tname, path))
            return None
        df = uproot.concatenate(tree, library="pd")

        if squery:
            #df.query(squery, inplace=True)
            df = df.query(squery)
            df.reset_index(drop=True)
        return df
Пример #5
0
 def execute_analyses_on_file_list(self, file_list, nfiles=0):
     print()
     if os.path.exists(file_list):
         with open(file_list) as f:
             files = f.readlines()
         if int(nfiles) > 0:
             files = files[:nfiles]
         for f in files:
             fn = f.strip('\n')
             pinfo('+file:', fn)
         for f in tqdm.tqdm(files):
             fn = f.strip('\n')
             pinfo('file:', fn)
             if self.load_file(fn):
                 self.execute_analyses()
     else:
         perror('file list does not exist', file_list)
     pinfo('done.')
Пример #6
0
    def analysis_gen(self, df):

        m_gen_array = np.full((self.df_gen_tracks['ParticlePt'].values.size),
                              0.1396)

        djmm_gen = fjtools.DJetMatchMaker()
        djmm_gen.set_ch_pt_eta_phi_m(self.df_gen_tracks['ParticlePt'].values,
                                     self.df_gen_tracks['ParticleEta'].values,
                                     self.df_gen_tracks['ParticlePhi'].values,
                                     m_gen_array)
        djmm_gen.set_Ds_pt_eta_phi(df['pt_cand'].values, df['eta_cand'].values,
                                   df['phi_cand'].values)

        for id0, d0 in enumerate(djmm_gen.Ds):
            _parts_and_ds = djmm_gen.ch
            _parts_and_ds.push_back(d0)
            ja_gen = jet_analysis.JetAnalysis(
                jet_R=0.4,
                jet_RecombinationScheme=fj.E_scheme,
                particle_eta_max=0.9,
                jet_pt_min=5.0)
            ja_gen.analyze_event(_parts_and_ds)
            if len(ja_gen.jets) < 1:
                continue

            jets = ja_gen.jets_as_psj_vector()
            djets = djmm_gen.filter_D0_jets(jets)

            if len(djets) > 0:
                j = djets[0]
                dcand = djmm_gen.get_Dcand_in_jet(j)
                D_cand_type = df['cand_type'].values
                self.twjc_gen.fill_branches(jet=j,
                                            dR=j.delta_R(dcand[0]),
                                            D=dcand[0],
                                            Dmeson_cand_type=float(
                                                D_cand_type[id0]))
                self.twjc_gen.fill_tree()

            if len(djets) > 1:
                perror("more than one jet per D candidate?")

        return True
Пример #7
0
	def execute_analyses_on_file_list(self, file_list, nfiles=0):
		self.pbar2 = tqdm.tqdm(mininterval=20, maxinterval=60)
		self.pbar2_mark = None
		for a in self.analyses:
			a.callback = self.update_status
		print()
		if os.path.exists(file_list):
			with open(file_list) as f:
				files = f.readlines()
			if int(nfiles) > 0:
				files = files[:nfiles]
			for f in files:
				fn = f.strip('\n')
				pinfo('+file:', fn)
			for f in tqdm.tqdm(files):
				fn = f.strip('\n')
				if self.load_file(fn):
					self.execute_analyses()
			self.pbar2.close()
		else:
			perror('file list does not exist', file_list)
		pinfo('done.')
Пример #8
0
    def analysis(self, df):
        #print(df)

        m_array = np.full((self.df_tracks['ParticlePt'].values.size), 0.1396)

        djmm = fjtools.DJetMatchMaker()
        djmm.set_ch_pt_eta_phi_m(self.df_tracks['ParticlePt'].values,
                                 self.df_tracks['ParticleEta'].values,
                                 self.df_tracks['ParticlePhi'].values, m_array)
        djmm.set_Ds_pt_eta_phi_m(df['pt_cand'].values, df['eta_cand'].values,
                                 df['phi_cand'].values, df['inv_mass'].values)
        djmm.set_daughters0_pt_eta_phi(df['pt_prong0'].values,
                                       df['eta_prong0'].values,
                                       df['phi_prong0'].values)
        djmm.set_daughters1_pt_eta_phi(df['pt_prong1'].values,
                                       df['eta_prong1'].values,
                                       df['phi_prong1'].values)

        #run for each D candidate to build jet
        for id0, d0 in enumerate(djmm.Ds):

            #daughter tracks matching
            _parts_and_ds = djmm.match(0.005, id0)
            #replacing daughter tracks with matched D0 candidate
            #including D0
            _parts_and_ds.push_back(d0)

            #jet reconstruction with D0 and charged particle
            jetR = 0.4
            ja = jet_analysis.JetAnalysis(jet_R=jetR,
                                          jet_RecombinationScheme=fj.E_scheme,
                                          particle_eta_max=0.9,
                                          jet_pt_min=5.0)
            ja.analyze_event(_parts_and_ds)
            if len(ja.jets) < 1:
                continue
            jets = ja.jets_as_psj_vector()

            #filtering D0 jets
            djets = djmm.filter_D0_jets(jets)

            if len(djets) > 0:
                j = djets[0]
                dcand = djmm.get_Dcand_in_jet(j)

                #number of constitutents > 1
                #if len(j.constituents())<=1:
                #	continue

                #jets with the winner take all axis################################
                jet_def_wta = fj.JetDefinition(fj.cambridge_algorithm,
                                               2 * jetR)
                jet_def_wta.set_recombination_scheme(fj.WTA_pt_scheme)
                #print('WTA jet definition is:', jet_def_wta)
                reclusterer_wta = fjcontrib.Recluster(jet_def_wta)
                jet_wta = reclusterer_wta.result(j)
                ################################

                D_cosp = df['cos_p'].values
                D_cosTStar = df['cos_t_star'].values
                D_NormalisedDecayLength = df['norm_dl_xy'].values
                D_ImpactParameterProduct = df['imp_par_prod'].values
                D_cand_type = df['cand_type'].values

                self.twjc.fill_branches(
                    jet=j,
                    jetWta=jet_wta,
                    dR=j.delta_R(dcand[0]),
                    dRWTA=jet_wta.delta_R(dcand[0]),
                    D=dcand[0],
                    cos_p=float(D_cosp[id0]),
                    D_cos_t_star=float(D_cosTStar[id0]),
                    D_norm_dlxy=float(D_NormalisedDecayLength[id0]),
                    D_imp_par_prod=float(D_ImpactParameterProduct[id0]),
                    Dmeson_cand_type=float(D_cand_type[id0]))
                self.twjc.fill_tree()

            if len(djets) > 1:
                perror("more than one jet per D candidate?")

        return True
Пример #9
0
def main():
    parser = argparse.ArgumentParser(description='pythia8 fastjet on the fly',
                                     prog=os.path.basename(__file__))
    pyconf.add_standard_pythia_args(parser)
    parser.add_argument('--ignore-mycfg',
                        help="ignore some settings hardcoded here",
                        default=False,
                        action='store_true')

    args = parser.parse_args()

    # print the banner first
    fj.ClusterSequence.print_banner()
    print()
    # set up our jet definition and a jet selector
    jet_R0 = 0.4
    jet_def = fj.JetDefinition(fj.antikt_algorithm, jet_R0)
    jet_selector = fj.SelectorPtMin(80.0) & fj.SelectorPtMax(
        100.0) & fj.SelectorAbsEtaMax(1)
    # jet_selector = fj.SelectorPtMin(40.0) & fj.SelectorPtMax(200.0) &fj.SelectorAbsEtaMax(1)
    print(jet_def)

    all_jets = []

    mycfg = ['PhaseSpace:pThatMin = 80']
    # mycfg = ['PhaseSpace:pThatMin = 40']
    if args.ignore_mycfg:
        mycfg = []
    pythia = pyconf.create_and_init_pythia_from_args(args, mycfg)
    if not pythia:
        perror("pythia initialization failed.")
        return

    jet_def_lund = fj.JetDefinition(fj.cambridge_algorithm, 1.0)
    lund_gen = fjcontrib.LundGenerator(jet_def_lund)
    print(lund_gen.description())
    dy_groomer = fjcontrib.DynamicalGroomer(jet_def_lund)
    print(dy_groomer.description())
    # sd = fjcontrib.SoftDrop(0, 0.1, 1.0)
    sd = fjcontrib.SoftDrop(0, 0.2, 1.0)
    print(sd)

    # jet_def_rc01 = fj.JetDefinition(fj.cambridge_algorithm, 0.1)
    # jet_def_rc02 = fj.JetDefinition(fj.cambridge_algorithm, 0.2)
    # print (jet_def_rc01)
    # print (jet_def_rc02)
    # rc = fjcontrib.Recluster(jet_def_rc, True)

    jet_def_rc01 = fj.JetDefinition(fj.antikt_algorithm, 0.1)
    jet_def_rc02 = fj.JetDefinition(fj.antikt_algorithm, 0.2)
    print(jet_def_rc01)
    print(jet_def_rc02)
    #rc = fjcontrib.Recluster(jet_def_rc, True)

    # tw = treewriter.RTreeWriter(name = 'lsjvsx', file_name = 'leadsj_vs_x.root')
    tw = treewriter.RTreeWriter(name='lsjvsx',
                                file_name='leadsj_vs_x_bias80.root')

    if args.nev < 100:
        args.nev = 100
    for i in tqdm.tqdm(range(args.nev)):
        if not pythia.next():
            continue
        # parts = pythiafjext.vectorize(pythia, True, -1, 1, False)
        partons = pythiafjext.vectorize_select(pythia, [pythiafjext.kParton],
                                               0, True)
        parts = pythiafjext.vectorize_select(
            pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, False)
        # parts = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, False)
        jets = jet_selector(jet_def(parts))

        # for j in tqdm.tqdm(jets):
        for j in jets:
            j_type = match_dR(j, partons, jet_R0 / 2.)
            j_sd = sd.result(j)
            sd_info = fjcontrib.get_SD_jet_info(j_sd)
            rc_sjets01 = fj.sorted_by_pt(jet_def_rc01(j.constituents()))
            rc_sjets02 = fj.sorted_by_pt(jet_def_rc02(j.constituents()))
            tw.fill_branches(
                j=j,
                lund=[ls for ls in lund_gen.result(j)],
                dyg1=dy_groomer.result(j, 1),
                sd=j_sd,
                sd_z=sd_info.z,
                sd_mu=sd_info.mu,
                sd_Delta=sd_info.dR,
                lsjet01=rc_sjets01[0],
                nsjet01=len(rc_sjets01),
                sjet01=rc_sjets01,
                lsjet02=rc_sjets02[0],
                nsjet02=len(rc_sjets02),
                sjet02=rc_sjets02,
                ppid=j_type[0],
                pquark=j_type[1],
                pglue=j_type[2]  # this is redundancy
            )
            tw.fill_tree()

    pythia.stat()

    tw.write_and_close()
Пример #10
0
def main():
	parser = argparse.ArgumentParser(description='pythia8 fastjet on the fly', prog=os.path.basename(__file__))
	pyconf.add_standard_pythia_args(parser)
	parser.add_argument('--nw', help="no warn", default=False, action='store_true')
	parser.add_argument('--ignore-mycfg', help="ignore some settings hardcoded here", default=False, action='store_true')
	parser.add_argument('--enable-background', help="enable background calc", default=False, action='store_true')
	parser.add_argument('--output', help="output file name", default='leadsj_vs_x_output.root', type=str)

	# for background
	parser.add_argument('--cent-bin', help="centraility bin 0 is the  0-5 percent most central bin", type=int, default=0)
	parser.add_argument('--seed', help="pr gen seed", type=int, default=1111)
	parser.add_argument('--harmonics', help="set harmonics flag (0 : v1 - v5) , (1 : v2 - v5) , (2: v3 - v5) , (3: v1 - v4) , (4: v1 - v3) , (5: uniform dN/dphi no harmonics) , (6 : v1 - v2 , v4 - v5) , (7 : v1 - v3 , v5) , (8 : v1 , v3 - v5) , (9 : v1 only) , (10 : v2 only) , (11 : v3 only) , (12 : v4 only) , (13 : v5 only)", 
						type=int, default=5)
	parser.add_argument('--eta', help="set eta range must be uniform (e.g. abs(eta) < 0.9, which is ALICE TPC fiducial acceptance)",
						type=float, default=0.9)
	parser.add_argument('--qa', help="PrintOutQAHistos", default=False, action='store_true')

	parser.add_argument('--dRmax', default=0.25, type=float)
	parser.add_argument('--alpha', default=0, type=float)


	args = parser.parse_args()

	# print the banner first
	fj.ClusterSequence.print_banner()
	print()
	# set up our jet definition and a jet selector
	jet_R0 = 0.4
	jet_def = fj.JetDefinition(fj.antikt_algorithm, jet_R0)
	jet_selector = fj.SelectorPtMin(args.py_pthatmin) & fj.SelectorPtMax(1000.0) & fj.SelectorAbsEtaMax(args.eta - jet_R0)
	# jet_selector = fj.SelectorPtMin(40.0) & fj.SelectorPtMax(200.0) &fj.SelectorAbsEtaMax(1)
	print(jet_def)

	all_jets = []

	# mycfg = ['PhaseSpace:pThatMin = 80']
	# mycfg = ['PhaseSpace:pThatMin = 40']	
	mycfg = ['']	
	if args.ignore_mycfg:
		mycfg = []
	pythia = pyconf.create_and_init_pythia_from_args(args, mycfg)
	if not pythia:
		perror("pythia initialization failed.")
		return

	jet_def_lund = fj.JetDefinition(fj.cambridge_algorithm, 1.0)
	lund_gen = fjcontrib.LundGenerator(jet_def_lund)
	print (lund_gen.description())
	dy_groomer = fjcontrib.DynamicalGroomer(jet_def_lund)
	print (dy_groomer.description())

	# sd = fjcontrib.SoftDrop(0, 0.1, 1.0)
	sd01 = fjcontrib.SoftDrop(0, 0.1, 1.0)
	print (sd01)
	sd02 = fjcontrib.SoftDrop(0, 0.2, 1.0)
	print (sd02)

	# jet_def_rc01 = fj.JetDefinition(fj.cambridge_algorithm, 0.1)
	# jet_def_rc02 = fj.JetDefinition(fj.cambridge_algorithm, 0.2)
	# print (jet_def_rc01)
	# print (jet_def_rc02)
	# rc = fjcontrib.Recluster(jet_def_rc, True)

	jet_def_rc01 = fj.JetDefinition(fj.antikt_algorithm, 0.1)
	jet_def_rc02 = fj.JetDefinition(fj.antikt_algorithm, 0.2)
	print (jet_def_rc01)
	print (jet_def_rc02)
	#rc = fjcontrib.Recluster(jet_def_rc, True)

	# tw = treewriter.RTreeWriter(name = 'lsjvsx', file_name = 'leadsj_vs_x.root')
	tw = treewriter.RTreeWriter(name = 'lsjvsx', file_name = args.output)
	tgbkg = None
	be = None
	if args.enable_background:
		# ROOT.gSystem.Load("libpyjetty_TennGen.dylib")
		# tgbkg = ROOT.TennGen() # //constructor
		# tgbkg.SetCentralityBin(args.cent_bin) # //centraility bin 0 is the  0-5 % most central bin
		# tgbkg.SetRandomSeed(args.seed) # //setting the seed
		# tgbkg.SetHarmonics(args.harmonics) # // set harmonics flag (0 : v1 - v5) , (1 : v2 - v5) , (2: v3 - v5) , (3: v1 - v4) , (4: v1 - v3) , (5: uniform dN/dphi no harmonics) , (6 : v1 - v2 , v4 - v5) , (7 : v1 - v3 , v5) , (8 : v1 , v3 - v5) , (9 : v1 only) , (10 : v2 only) , (11 : v3 only) , (12 : v4 only) , (13 : v5 only)
		# tgbkg.SetEtaRange(args.eta) # //set eta range must be uniform (e.g. |eta| < 0.9, which is ALICE TPC fiducial acceptance)
		# tgbkg.PrintOutQAHistos(args.qa) #
		# tgbkg.InitializeBackground() #

		from pyjetty.mputils import BoltzmannEvent
		be = BoltzmannEvent(mean_pt=0.7, multiplicity=2000 * args.eta * 2, max_eta=max_eta, max_pt=100)
		print(be)

		from pyjetty.mputils import CEventSubtractor, CSubtractorJetByJet
		cs = CEventSubtractor(alpha=args.alpha, max_distance=args.dRmax, max_eta=args.eta, bge_rho_grid_size=0.25, max_pt_correct=100)
		print(cs)


	if args.nev < 100:
		args.nev = 100
	for i in tqdm.tqdm(range(args.nev)):
		if not pythia.next():
			continue
		# parts = pythiafjext.vectorize(pythia, True, -1, 1, False)
		partons = pythiafjext.vectorize_select(pythia, [pythiafjext.kParton], 0, True)
		parts = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, False)
		# parts = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, False)
		jets = jet_selector(jet_def(parts))

		# for j in tqdm.tqdm(jets):
		for j in jets:
			j_type = match_dR(j, partons, jet_R0 / 2.)
			if j_type[0] is None:
				if args.nw:
					continue
				pwarning('Jet with no parton label')
				continue

			j_sd02 = sd02.result(j)
			sd02_info = fjcontrib.get_SD_jet_info(j_sd02)
			j_sd01 = sd01.result(j)
			sd01_info = fjcontrib.get_SD_jet_info(j_sd01)

			rc_sjets01 = fj.sorted_by_pt(jet_def_rc01(j.constituents()))
			rc_sjets02 = fj.sorted_by_pt(jet_def_rc02(j.constituents()))
			tw.fill_branches(	j 			= j, 
								lund 		= [ls for ls in lund_gen.result(j)], 
								dyg1 		= dy_groomer.result(j, 1), 

								sd01 		= j_sd01, 
								sd01_z 		= sd01_info.z, 
								sd01_mu 	= sd01_info.mu, 
								sd01_Delta 	= sd01_info.dR, 

								sd02 		= j_sd02, 
								sd02_z 		= sd02_info.z, 
								sd02_mu 	= sd02_info.mu, 
								sd02_Delta 	= sd02_info.dR, 

								# breaking compatibility
								# sd 			= j_sd, 
								# sd_z 		= sd_info.z, 
								# sd_mu 		= sd_info.mu, 
								# sd_Delta 	= sd_info.dR, 

								lsjet01 	= rc_sjets01[0],
								nsjet01    	= len(rc_sjets01),
								sjet01     	= rc_sjets01,
								lsjet02 	= rc_sjets02[0],
								nsjet02    	= len(rc_sjets02),
								sjet02     	= rc_sjets02,

								ppid       	= j_type[0],
								pquark     	= j_type[1],
								pglue      	= j_type[2], # this is redundancy

								pycode 		= pythia.info.code(),
								pysigmagen  = pythia.info.sigmaGen(),
								pysigmaerr  = pythia.info.sigmaErr(),
								pyid1       = pythia.info.id1pdf(),
								pyid2       = pythia.info.id1pdf(),
								pyx1 	    = pythia.info.x1pdf(),
								pyx2       	= pythia.info.x2pdf(),
								pypdf1      = pythia.info.pdf1(),
								pyQfac 		= pythia.info.QFac(),
								pyalphaS 	= pythia.info.alphaS(),

								pypthat 	= pythia.info.pTHat(),
								pymhat 		= pythia.info.mHat()

							 )
			if be:
				bg_parts = be.generate(offset=10000)
				full_event = bg_parts
				tmp = [full_event.push_back(psj) for psj in j.constituents()]
				if cs:
					cs_parts = cs.process_event(full_event)
					rho = cs.bge_rho.rho()
					bg_jets = fj.sorted_by_pt(jet_def(cs_parts))
					for bj in bg_jets:
						if fjtools.matched_pt(bj, j) > 0.5:
							pass

			tw.fill_tree()

	pythia.stat()

	tw.write_and_close()
Пример #11
0
def main():
    parser = argparse.ArgumentParser(description='test duplicate entries',
                                     prog=os.path.basename(__file__))
    parser.add_argument('fname', help='input file', default='', type=str)
    args = parser.parse_args()

    event_tree_name = 'PWGHF_TreeCreator/tree_event_char'
    event_tree = uproot.open(args.fname)[event_tree_name]
    if not event_tree:
        perror('Tree {} not found in file {}'.format(event_tree_name,
                                                     args.fname))
        return False

    pinfo(args.fname)
    event_df_orig = event_tree.pandas.df()
    len_event_df_orig = len(event_df_orig)

    df_event_accepted = event_df_orig.query('is_ev_rej == 0')
    df_event_accepted.reset_index(drop=True)
    len_event_df_accepted = len(df_event_accepted)

    event_df_nodup = df_event_accepted.drop_duplicates()
    len_event_df_nodup = len(event_df_nodup)

    if len_event_df_accepted != len_event_df_nodup:
        perror('original event length:', len_event_df_orig, 'accepted:',
               len_event_df_accepted, 'nodup:', len_event_df_nodup)
    else:
        pindent('original event length:', len_event_df_orig, 'accepted:',
                len_event_df_accepted, 'nodup:', len_event_df_nodup)

    track_tree_name = 'PWGHF_TreeCreator/tree_Particle'
    track_tree = uproot.open(args.fname)[track_tree_name]
    if not track_tree:
        perror('Tree {} not found in file {}'.format(tree_name, args.fname))
        return False
    track_df_orig = track_tree.pandas.df()
    track_df = pd.merge(track_df_orig,
                        event_df_nodup,
                        on=['run_number', 'ev_id'])
    len_track_df = len(track_df)
    track_df_nodup = track_df.drop_duplicates()
    len_track_df_nodup = len(track_df_nodup)
    if len_track_df_nodup < len_track_df:
        perror('track+event rows:', len_track_df, 'nodup:', len_track_df_nodup)
    else:
        pindent('track+event rows:', len_track_df, 'nodup:',
                len_track_df_nodup)
    track_df_grouped = track_df.groupby(['run_number', 'ev_id'])
    len_track_df_grouped = len(track_df_grouped)
    if len_track_df_grouped <= len_event_df_nodup:
        pindent('track+event length grouped:', len_track_df_grouped)
    else:
        perror('track+event length grouped:', len_track_df_grouped)
    # track_df_nodup = track_df_grouped.drop_duplicates()
    # print ('track+event length no dup:', len(track_df_nodup))

    # from James
    # Check if there are duplicated tracks in an event.
    duplicate_selection = [
        'run_number', 'ev_id', 'ParticlePt', 'ParticleEta', 'ParticlePhi'
    ]
    # if use_ev_id_ext:
    # duplicate_selection.append('ev_id_ext')
    duplicate_rows_df = track_df.duplicated(duplicate_selection)
    for i, row in duplicate_rows_df.iteritems():
        if row:
            print(i, row)
    # for r in duplicate_rows_df:
    # 	print(type(r))
    n_duplicates = sum(duplicate_rows_df)
    pindent('2nd pass: using duplicate selection ', duplicate_selection)
    if n_duplicates > 0:
        perror(
            '2nd pass: there appear to be {} duplicate particles in the dataframe'
            .format(n_duplicates))
        perror('this is: {:.2} of all tracks'.format(n_duplicates /
                                                     len_track_df))
        track_df_nodup = track_df.drop_duplicates(duplicate_selection,
                                                  inplace=False)
        pwarning('new count rows for particles:', len(track_df_nodup),
                 'old count:', len_track_df)
    else:
        pindent('no duplicate particles found')
Пример #12
0
	def process_d0s(self, df):
		self.pbar.update(1)
		_n_d0s = len(df)
		if _n_d0s < 1:
			return
		# pinfo(df)
		if 'ev_id_ext' in list(self.event_df):
			_ev_query = "run_number == {} & ev_id == {} & ev_id_ext == {}".format(df['run_number'].values[0], df['ev_id'].values[0], df['ev_id_ext'].values[0])
		else:
			_ev_query = "run_number == {} & ev_id == {}".format(df['run_number'].values[0], df['ev_id'].values[0])
		_df_tracks = self.track_df.query(_ev_query)
		_df_tracks.reset_index(drop=True)

		djmm = fjtools.DJetMatchMaker()
		djmm.set_ch_pt_eta_phi(_df_tracks['ParticlePt'].values, _df_tracks['ParticleEta'].values, _df_tracks['ParticlePhi'].values)
		djmm.set_Ds_pt_eta_phi_m(df['pt_cand'].values, df['eta_cand'].values, df['phi_cand'].values, df['inv_mass'].values)
		djmm.set_daughters0_pt_eta_phi(df['pt_prong0'].values, df['eta_prong0'].values, df['phi_prong0'].values)
		djmm.set_daughters1_pt_eta_phi(df['pt_prong1'].values, df['eta_prong1'].values, df['phi_prong1'].values)

		# if _n_d0s > 1:
		# 	print('-- event break - 2D0 event:')
		# 	print('pt_cand:  ', df['pt_cand'].values)
		# 	print('eta_cand: ', df['eta_cand'].values)
		# 	print('phi_cand: ', df['phi_cand'].values)
		# 	print('inv_mass: ', df['inv_mass'].values)
		# 	print('pt_prong0:  ', df['pt_prong0'].values)
		# 	print('eta_prong0: ', df['eta_prong0'].values)
		# 	print('phi_prong0: ', df['phi_prong0'].values)
		# 	print('pt_prong1:  ', df['pt_prong1'].values)
		# 	print('eta_prong1: ', df['eta_prong1'].values)
		# 	print('phi_prong1: ', df['phi_prong1'].values)

		self.tw.fill_branches(dpsj = djmm.Ds)
		self.tw.fill_tree()

		for id0, d0 in enumerate(djmm.Ds):
			_parts_and_ds = djmm.match(0.005, id0)
			_parts_and_ds.push_back(d0)
			ja = jet_analysis.JetAnalysis(jet_R = 0.4, particle_eta_max=0.9, jet_pt_min=2.0)
			ja.analyze_event(_parts_and_ds)
			if len(ja.jets) < 1:
				continue
			jets = ja.jets_as_psj_vector()
			djets = djmm.filter_D0_jets(jets)
			if len(djets) > 0:
				j = djets[0]
				dcand = djmm.get_Dcand_in_jet(j)

				sja = jet_analysis.JetAnalysis(jet_R = 0.1, particle_eta_max=0.9, jet_pt_min=2.0)
				sja.analyze_event(j.constituents())
				lsj = fj.sorted_by_pt(sja.jets_as_psj_vector())
				sj_dcand = djmm.get_Dcand_in_jet(lsj[0])
				is_Dsj = 0
				if len(sj_dcand) > 0:
					# if sj_dcand[0].m() == dcand[0].m() and sj_dcand[0].perp() == dcand[0].perp():
					if sj_dcand[0].delta_R(dcand[0]) == 0.0:
						is_Dsj = 1
				self.twjc.fill_branches(jet = j, dR = j.delta_R(dcand[0]), D = dcand[0], lsj = lsj[0], Dsj = is_Dsj
										, a10 = fjext.angularity(j,  1.0, 0.4)
										, a15 = fjext.angularity(j,  0.5, 0.4)
										, a20 = fjext.angularity(j,  0.0, 0.4)
										, a30 = fjext.angularity(j, -1.0, 0.4))
				self.twjc.fill_tree()
			if len(djets) > 1:
				perror("more than one jet per D candidate?")

		return True
Пример #13
0
 def load_file(self, path):
     if not os.path.exists(path):
         pwarning('[w] file', path, 'does not exists.')
         return
     try:
         event_tree = uproot.open(path)[self.event_tree_name]
     except:
         pwarning('error getting', self.event_tree_name, 'from file:', path)
         return False
     if not event_tree:
         perror('Tree {} not found in file {}'.format(
             self.event_tree_name, path))
         return False
     event_df_orig = event_tree.pandas.df(
         ['run_number', 'ev_id', 'z_vtx_reco', 'is_ev_rej'])
     event_df_orig.reset_index(drop=True)
     event_df = event_df_orig.query('is_ev_rej == 0')
     event_df.reset_index(drop=True)
     # Load gen tree into df
     try:
         gen_tree = uproot.open(path)[self.gen_tree_name]
     except:
         pwarning('error getting', self.gen_tree_name, 'from file:', path)
         return False
     if not gen_tree:
         perror('Tree {} not found in file {}'.format(gen_tree_name, path))
         return False
     gen_df_orig = gen_tree.pandas.df(
         ['run_number', 'ev_id', 'pt_cand', 'eta_cand', 'cand_type'])
     gen_df_orig.sort_values(by=['run_number', 'ev_id'], inplace=True)
     df_genruns = gen_df_orig[['run_number', 'ev_id']].copy()
     # Load track tree into dataframe
     try:
         track_tree = uproot.open(path)[self.tree_name]
     except:
         pwarning('error getting', self.tree_name, 'from file:', path)
         return False
     if not track_tree:
         perror('Tree {} not found in file {}'.format(tree_name, path))
         return False
     track_df_orig = track_tree.pandas.df([
         'run_number', 'ev_id', 'inv_mass', 'pt_cand', 'pt_prong0',
         'pt_prong1', 'dca', 'cos_t_star', 'imp_par_prod', 'cos_p',
         'cand_type', 'imp_par_prong0', 'imp_par_prong1', 'norm_dl_xy',
         'eta_cand', 'nsigTPC_Pi_0', 'nsigTOF_Pi_0', 'nsigTPC_K_1',
         'nsigTOF_K_1', 'nsigTPC_Pi_1', 'nsigTOF_Pi_1', 'nsigTPC_K_0',
         'nsigTOF_K_0'
     ])
     # Merge event info into track tree
     track_df = pd.merge(track_df_orig,
                         event_df,
                         on=['run_number', 'ev_id'])
     track_df.sort_values(by=['run_number', 'ev_id'], inplace=True)
     df_d0runs = track_df[['run_number', 'ev_id']].copy()
     df_runs = pd.merge(df_d0runs, df_genruns, on=['run_number', 'ev_id'])
     df_runs.drop_duplicates(keep='first', inplace=True)
     gen_df_orig = pd.merge(gen_df_orig,
                            df_runs,
                            on=['run_number', 'ev_id'])
     track_df = pd.merge(track_df, df_runs, on=['run_number', 'ev_id'])
     self.d0_gen = gen_df_orig.groupby(['run_number', 'ev_id'])
     self.track_df_grouped = track_df.groupby(['run_number', 'ev_id'])
     return True
Пример #14
0
def main():
	parser = argparse.ArgumentParser(description='test the TennGen', prog=os.path.basename(__file__))
	pyconf.add_standard_pythia_args(parser)
	parser.add_argument('--ignore-mycfg', help="ignore some settings hardcoded here", default=False, action='store_true')
	parser.add_argument('--cent-bin', help="centraility bin 0 is the  0-5 % most central bin", type=int, default=0)
	parser.add_argument('--seed', help="pr gen seed", type=int, default=1111)
	parser.add_argument('--harmonics', help="set harmonics flag (0 : v1 - v5) , (1 : v2 - v5) , (2: v3 - v5) , (3: v1 - v4) , (4: v1 - v3) , (5: uniform dN/dphi no harmonics) , (6 : v1 - v2 , v4 - v5) , (7 : v1 - v3 , v5) , (8 : v1 , v3 - v5) , (9 : v1 only) , (10 : v2 only) , (11 : v3 only) , (12 : v4 only) , (13 : v5 only)", 
						type=int, default=5)
	parser.add_argument('--eta', help="set eta range must be uniform (e.g. |eta| < 0.9, which is ALICE TPC fiducial acceptance)",
						type=float, default=0.9)
	parser.add_argument('--qa', help="PrintOutQAHistos", default=False, action='store_true')

	args = parser.parse_args()

	args.py_pthatmin = 100
	mycfg = ['PhaseSpace:pThatMin = {}'.format(args.py_pthatmin)]
	if args.ignore_mycfg:
		mycfg = []
	pythia = pyconf.create_and_init_pythia_from_args(args, mycfg)
	if not pythia:
		perror("pythia initialization failed.")
		return

	tgbkg = ROOT.TennGen() # //constructor
	tgbkg.SetCentralityBin(args.cent_bin) # //centraility bin 0 is the  0-5 % most central bin
	tgbkg.SetRandomSeed(args.seed) # //setting the seed
	tgbkg.SetHarmonics(args.harmonics) # // set harmonics flag (0 : v1 - v5) , (1 : v2 - v5) , (2: v3 - v5) , (3: v1 - v4) , (4: v1 - v3) , (5: uniform dN/dphi no harmonics) , (6 : v1 - v2 , v4 - v5) , (7 : v1 - v3 , v5) , (8 : v1 , v3 - v5) , (9 : v1 only) , (10 : v2 only) , (11 : v3 only) , (12 : v4 only) , (13 : v5 only)
	tgbkg.SetEtaRange(args.eta) # //set eta range must be uniform (e.g. |eta| < 0.9, which is ALICE TPC fiducial acceptance)
	tgbkg.PrintOutQAHistos(args.qa) #
	tgbkg.InitializeBackground() #

	jet_R0 = 0.4
	jet_def = fj.JetDefinition(fj.antikt_algorithm, jet_R0)
	jet_selector_pythia = fj.SelectorPtMin(args.py_pthatmin) & fj.SelectorPtMax(1000.0) &fj.SelectorAbsEtaMax(args.eta - jet_R0)
	jet_selector_hybrid = fj.SelectorPtMin(10) & fj.SelectorPtMax(1000.0) &fj.SelectorAbsEtaMax(args.eta - jet_R0)
	# jet_selector = fj.SelectorPtMin(40.0) & fj.SelectorPtMax(200.0) &fj.SelectorAbsEtaMax(1)
	parts_selector = fj.SelectorAbsEtaMax(args.eta)
	print(jet_def)

	tw = treewriter.RTreeWriter(name = 'tparts', file_name = 'test_TennGen.root')

	if args.nev < 100:
		args.nev = 100
	pbar = tqdm.tqdm(total = args.nev)
	while pbar.n < args.nev:
		if not pythia.next():
			continue

		# get pythia particles
		# parts = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, False)
		_py_fj_parts = parts_selector(pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, False))
		# get jets w/o area determination
		# pythia_jets = jet_selector_pythia(jet_def(_py_fj_parts))

		# with area determination
		jet_area_def = fj.AreaDefinition(fj.active_area, fj.GhostedAreaSpec(args.eta))
		cs = fj.ClusterSequenceArea(_py_fj_parts, jet_def, jet_area_def)
		pythia_jets = jet_selector_pythia(cs.inclusive_jets())

		if len(pythia_jets) < 1:
			continue
		pbar.update(1)
		tw.fill_branches(pyj = pythia_jets)

		# now generate bg
		bg_tclones = tgbkg.GetBackground()
		# tgbkg.GetRandomSeed()
		nParticles = bg_tclones.GetEntries();
		# pinfo('event', pbar.n, 'number of parts', nParticles)
		# _parts = { 'pt' : [], 'eta' : [], 'phi' : [], 'kf' : []}
		_parts = [[], [], [], []]
		_ = [[_parts[0].append(p[0].Pt()), _parts[1].append(p[0].Eta()), _parts[2].append(p[0].Phi()), _parts[3].append(p[1])] for p in [[tlv_from_tmcparticle(_p), _p.GetKF()] for _p in bg_tclones if _p.GetEnergy()>0]]
		_bg_fj_parts = fjext.vectorize_pt_eta_phi(_parts[0], _parts[1], _parts[2], 1000) #bg particles with index > 1000

		# add background and pythia 
		_fj_parts = []
		_ = [_fj_parts.append(_p) for _p in _py_fj_parts]
		_ = [_fj_parts.append(_p) for _p in _bg_fj_parts]

		# stream all particles
		_ = [tw.fill_branches(part_pt = _pfj.perp(), part_eta = _pfj.eta(), part_phi = _pfj.phi(), part_idx=_pfj.user_index()) for _pfj in _fj_parts]

		# find jets in the hybrid event
		# w/o area
		# jets = jet_selector_hybrid(jet_def(_fj_parts))
		# w/area
		cs_hybrid = fj.ClusterSequenceArea(_fj_parts, jet_def, jet_area_def)
		jets = jet_selector_hybrid(cs_hybrid.inclusive_jets())
		# stream jets from the hybrid event
		tw.fill_branches(j = jets)

		# estimate the background
		bg_rho_range = fj.SelectorAbsEtaMax(args.eta * 1.1)
		bg_jet_def = fj.JetDefinition(fj.kt_algorithm, jet_R0)
		bg_area_def = fj.AreaDefinition(fj.active_area_explicit_ghosts, fj.GhostedAreaSpec(args.eta))
		# bg_area_def = fj.AreaDefinition(fj.active_area, fj.GhostedAreaSpec(args.eta)) #active area defunct for bg estim
		bg_estimator = fj.JetMedianBackgroundEstimator(bg_rho_range, bg_jet_def, bg_area_def)
		bg_estimator.set_particles(_fj_parts)
		if len(_fj_parts) < 0:
			perror('no particles in the hybrid event?')
			continue
		rho = bg_estimator.rho()
		sigma = bg_estimator.sigma()
		corr_jet_pt = [j.pt() - j.area() * rho for j in jets]
		# matches = [j.perp(), matched_jet(j, pythia_jets) for j in jets]
		delta_pt = [delta_pt_matched(j, pythia_jets, rho) for j in jets]
		tw.fill_branches(j_corr_pt = corr_jet_pt, dpt = delta_pt)
		tw.fill_branches(rho = rho, rho_sigma = sigma)

		tw.fill_tree()
		bg_tclones.Clear()

	pbar.close()

	tgbkg.CloserFunction()
	tw.write_and_close()
Пример #15
0
def main():
    parser = argparse.ArgumentParser(description='pythia8 fastjet on the fly',
                                     prog=os.path.basename(__file__))
    pyconf.add_standard_pythia_args(parser)
    parser.add_argument('--nw',
                        help="no warn",
                        default=True,
                        action='store_true')
    parser.add_argument('--ignore-mycfg',
                        help="ignore some settings hardcoded here",
                        default=False,
                        action='store_true')
    parser.add_argument('--enable-background',
                        help="enable background calc",
                        default=False,
                        action='store_true')
    parser.add_argument('--output',
                        help="output file name",
                        default='leadsj_vs_zloss.root',
                        type=str)
    parser.add_argument('--jetptmin',
                        help="jet pt minimum",
                        default=-1,
                        type=float)
    parser.add_argument('--jetptmax',
                        help="jet pt maximum",
                        default=1e6,
                        type=float)
    parser.add_argument('--eta', help="jet eta max", default=2.4, type=float)
    parser.add_argument(
        '--kt',
        help="use kT algorithm instead of anti-kT for the subjets",
        default=False,
        action='store_true')

    args = parser.parse_args()

    # print the banner first
    fj.ClusterSequence.print_banner()
    print()
    # set up our jet definition and a jet selector
    jet_R0 = 0.4
    jet_def = fj.JetDefinition(fj.antikt_algorithm, jet_R0)
    jet_selector = fj.SelectorPtMin(args.py_pthatmin) & fj.SelectorPtMax(
        1000.0) & fj.SelectorAbsEtaMax(args.eta - jet_R0)
    mycfg = []
    if args.jetptmin > 0:
        mycfg = ['PhaseSpace:pThatMin = {}'.format(args.jetptmin)]
        jet_selector = fj.SelectorPtMin(args.jetptmin) & fj.SelectorPtMax(
            args.jetptmax) & fj.SelectorAbsEtaMax(args.eta - jet_R0)
    print(jet_def)

    if args.ignore_mycfg:
        mycfg = []
    pythia = pyconf.create_and_init_pythia_from_args(args, mycfg)
    if not pythia:
        perror("pythia initialization failed.")
        return

    nbins = 20
    # sjrs = [0.001 + x * 0.04 for x in range(0, nbins)]
    sjrs = logbins(0.001, jet_R0, nbins)
    print(sjrs)
    print('log(1/r) :', [ROOT.TMath.Log(1 / r) for r in sjrs])
    sjdefs = dict()
    for sjr in sjrs:
        if args.kt:
            _jet_def = fj.JetDefinition(fj.kt_algorithm, sjr)
        else:
            _jet_def = fj.JetDefinition(fj.antikt_algorithm, sjr)
        sjdefs[sjr] = _jet_def

    # tw = treewriter.RTreeWriter(name = 'lsjvsx', file_name = 'leadsj_vs_x.root')
    tw = treewriter.RTreeWriter(name='lsjvszloss', file_name=args.output)
    tw.fout.cd()
    h_zloss_r_q = dict()
    h_zloss_r_g = dict()
    for sjr in sjrs:
        sname = 'h_zloss_glue_{}'.format(sjr)
        _h_zloss_r_g = ROOT.TH1F(sname, sname, len(sjrs), 0., 1.)
        h_zloss_r_g[sjr] = _h_zloss_r_g
        sname = 'h_zloss_quark_{}'.format(sjr)
        _h_zloss_r_q = ROOT.TH1F(sname, sname, len(sjrs), 0., 1.)
        h_zloss_r_q[sjr] = _h_zloss_r_q

    lbins = logbins(ROOT.TMath.Log(1. / jet_R0), ROOT.TMath.Log(1. / sjrs[0]),
                    nbins)
    print('lbins:', lbins)

    sname = 'prof_zloss_vs_r_any'
    prof_a = ROOT.TProfile(sname, sname, nbins, 0, jet_R0)
    prof_a_log = ROOT.TProfile(sname + '_log', sname + '_log', nbins, lbins)

    sname = 'prof_zloss_vs_r_glue'
    prof_g = ROOT.TProfile(sname, sname, nbins, 0, jet_R0)
    prof_g_log = ROOT.TProfile(sname + '_log', sname + '_log', nbins, lbins)

    sname = 'prof_zloss_vs_r_quark'
    prof_q = ROOT.TProfile(sname, sname, nbins, 0, jet_R0)
    prof_q_log = ROOT.TProfile(sname + '_log', sname + '_log', nbins, lbins)
    # prof_q_log = ROOT.TProfile(sname+'_log', sname+'_log', nbins, ROOT.TMath.Log(1./jet_R0), ROOT.TMath.Log(1./sjrs[0]))

    sname = 'h2_zloss_vs_r_glue'
    h2_zloss_r_g = ROOT.TH2F(sname, sname, nbins, 0., jet_R0, len(sjrs), 0.,
                             1.)
    sname = 'h2_zloss_vs_r_quark'
    h2_zloss_r_q = ROOT.TH2F(sname, sname, nbins, 0., jet_R0, len(sjrs), 0.,
                             1.)

    # loop

    if args.nev < 100:
        args.nev = 100
    for i in tqdm.tqdm(range(args.nev)):
        if not pythia.next():
            continue
        # parts = pythiafjext.vectorize(pythia, True, -1, 1, False)
        partons = pythiafjext.vectorize_select(pythia, [pythiafjext.kParton],
                                               0, True)
        parts = pythiafjext.vectorize_select(
            pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, False)
        # parts = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, False)
        jets = jet_selector(jet_def(parts))

        # for j in tqdm.tqdm(jets):
        for j in jets:
            j_type = match_dR(j, partons, jet_R0 / 2.)
            if j_type[0] is None:
                if args.nw:
                    continue
                pwarning('Jet with no parton label')
                continue

            tw.fill_branch("j", j)
            for sjr in sjrs:
                rc_jets = fj.sorted_by_pt(sjdefs[sjr](j.constituents()))
                tw.fill_branch("sjr{}".format(sjr), rc_jets[0])
                zloss = 1. - rc_jets[0].perp() / j.perp()
                tw.fill_branch("sjr{}_zloss".format(sjr), zloss)
                tw.fill_branch("ppid", j_type[0])
                tw.fill_branch("pquark", j_type[1])
                tw.fill_branch("pglue", j_type[2])

                prof_a.Fill(sjr, zloss)
                prof_a_log.Fill(ROOT.TMath.Log(1. / sjr), zloss)

                if j_type[1]:
                    h_zloss_r_q[sjr].Fill(zloss)
                    h2_zloss_r_q.Fill(sjr, zloss)
                    prof_q.Fill(sjr, zloss)
                    prof_q_log.Fill(ROOT.TMath.Log(1. / sjr), zloss)
                if j_type[2]:
                    h_zloss_r_g[sjr].Fill(zloss)
                    h2_zloss_r_g.Fill(sjr, zloss)
                    prof_g.Fill(sjr, zloss)
                    prof_g_log.Fill(ROOT.TMath.Log(1. / sjr), zloss)

            tw.fill_tree()

    pythia.stat()
    tw.write_and_close()
Пример #16
0
def main():
    parser = argparse.ArgumentParser(description='test groomers',
                                     prog=os.path.basename(__file__))
    parser.add_argument('-o',
                        '--output-filename',
                        default="output.root",
                        type=str)
    parser.add_argument('datalistpp',
                        help='run through a file list',
                        default='',
                        type=str)
    parser.add_argument('--datalistAA',
                        help='run through a file list - embedding mode',
                        default='',
                        type=str)
    parser.add_argument('--jetR', default=0.4, type=float)
    parser.add_argument('--alpha', default=0, type=float)
    parser.add_argument('--dRmax', default=0.25, type=float)
    parser.add_argument('--overwrite',
                        help="overwrite output",
                        default=False,
                        action='store_true')
    parser.add_argument('--jetptcut',
                        help='remove jets below the cut',
                        default=50.,
                        type=float)
    parser.add_argument('--nev',
                        help='number of events to run',
                        default=0,
                        type=int)
    parser.add_argument('--max-eta',
                        help='max eta for particles',
                        default=0.9,
                        type=float)
    parser.add_argument('--npart-cut',
                        help='npart cut on centrality low,high hint:' +
                        npart_cents,
                        default='325,450',
                        type=str)

    args = parser.parse_args()

    try:
        npart_min = int(args.npart_cut.split(',')[0])
        npart_max = int(args.npart_cut.split(',')[1])
    except:
        perror(
            'unable to parse npart centrality selection - two integer numbers with a coma in-between needed - specified:',
            args.npart_cut)
        return 1

    # initialize constituent subtractor
    cs = None
    if args.dRmax > 0:
        cs = CEventSubtractor(alpha=args.alpha,
                              max_distance=args.dRmax,
                              max_eta=args.max_eta,
                              bge_rho_grid_size=0.25,
                              max_pt_correct=100)

    pp_data = DataIO(name='Sim Pythia Detector level',
                     file_list=args.datalistpp,
                     random_file_order=False,
                     tree_name='tree_Particle_gen')
    ja_pp = JetAnalysis(jet_R=args.jetR,
                        jet_algorithm=fj.antikt_algorithm,
                        jet_pt_min=50.,
                        particle_eta_max=args.max_eta)

    if args.datalistAA:
        aa_data = DataBackgroundIO(name='PbPb',
                                   file_list=args.datalistAA,
                                   tree_name='tree_Particle_gen')
        ja_emb = JetAnalysis(jet_R=args.jetR,
                             jet_algorithm=fj.antikt_algorithm,
                             jet_pt_min=50.,
                             particle_eta_max=args.max_eta)
        ja_aa = JetAnalysis(jet_R=args.jetR,
                            jet_algorithm=fj.antikt_algorithm,
                            jet_pt_min=50.,
                            particle_eta_max=args.max_eta)

    dndeta_selector = fj.SelectorAbsEtaMax(1.)

    # tg = thg.ThermalGenerator()
    print(cs)

    # print the banner first
    fj.ClusterSequence.print_banner()
    print()

    gout = GroomerOutput(args.output_filename,
                         enable_aa_trees=bool(args.datalistAA))

    delta_t = 0
    start_t = time.time()
    iev = 1
    while pp_data.load_event(offset=0):
        iev = iev + 1
        if args.nev > 0:
            if iev > args.nev:
                iev = iev - 1
                break
        if iev % 1000 == 0:
            delta_t = time.time() - start_t
            pinfo('processing event', iev, ' - ev/sec =', iev / delta_t,
                  'elapsed =', delta_t)

        # find jets on detector level
        if len(pp_data.particles) < 1:
            pwarning(iev, 'pp event skipped N parts', len(pp_data.particles))
            continue
        ja_pp.analyze_event(pp_data.particles)
        if len(ja_pp.jets) < 1:
            continue

        # pinfo('n particles', len(pp_data.particles))
        dndeta0 = dndeta_selector(pp_data.particles)
        [
            gout.fill_branches(j, syst=0, dndeta=len(dndeta0) / 2.)
            for j in ja_pp.jets
        ]
        # pinfo('n jets', len(ja_pp.jets))

        if args.datalistAA:
            while True:
                aa_loaded = aa_data.load_event(offset=10000)
                if aa_data.event.npart < npart_min or aa_data.event.npart >= npart_max:
                    continue
                else:
                    if len(aa_data.particles) < 1:
                        pwarning(iev, 'AA event skipped N parts',
                                 len(aa_data.particles))
                        continue
                    else:
                        break
            if aa_loaded:
                ja_aa.analyze_event(aa_data.particles)
                dndeta1 = dndeta_selector(aa_data.particles)
                if len(ja_aa.jets) > 0:
                    [
                        gout.fill_branches(j, syst=1, dndeta=len(dndeta1) / 2.)
                        for j in ja_aa.jets
                    ]
                else:
                    # pwarning('no jets in AA event?', len(ja_aa.jets), 'while dndeta=', len(dndeta1)/2.)
                    pass
                emb_event = fj.vectorPJ()
                [emb_event.push_back(p) for p in pp_data.particles]
                [emb_event.push_back(p) for p in aa_data.particles]
                rho = 0
                if cs:
                    cs_parts = cs.process_event(emb_event)
                    rho = cs.bge_rho.rho()
                    ja_emb.analyze_event(cs_parts)
                else:
                    ja_emb.analyze_event(emb_event)
                # matches = [[jpp, jemb] for jpp in ja_pp.jets for jemb in ja_emb.jets if fjtools.matched_pt(jemb, jpp) > 0.5]
                # for mj in matches:
                # 	gout.fill_branches(mj[0], syst=2, dndeta=len(dndeta1)/2., rho=rho)
                # 	gout.fill_branches(mj[1], syst=3)
                [
                    gout.fill_branches_prong_matching(j_pp,
                                                      j_emb,
                                                      dndeta=len(dndeta1) / 2.,
                                                      rho=rho)
                    for j_pp in ja_pp.jets for j_emb in ja_emb.jets
                ]

    delta_t = time.time() - start_t
    pinfo('processed events', iev, ' - ev/sec =', iev / delta_t, 'elapsed =',
          delta_t)
    gout.write()
Пример #17
0
def main():
    parser = argparse.ArgumentParser(description='pythia8 fastjet on the fly',
                                     prog=os.path.basename(__file__))
    pyconf.add_standard_pythia_args(parser)
    parser.add_argument('--ignore-mycfg',
                        help="ignore some settings hardcoded here",
                        default=False,
                        action='store_true')
    parser.add_argument('--output',
                        help="output file name",
                        default="test_hjet_parton.root",
                        type=str)
    parser.add_argument('--no-tt',
                        help="do not require TT to accept the event",
                        default=False,
                        action='store_true')
    parser.add_argument('--charged',
                        help="analyze only the charged particles of the FS",
                        default=False,
                        action='store_true')
    parser.add_argument('--max-jet-pt',
                        help="maximum jet pT to consider",
                        type=float,
                        default=100.)
    args = parser.parse_args()

    # print the banner first
    fj.ClusterSequence.print_banner()
    print()
    # set up our jet definition and a jet selector
    hadron_eta_max = 2.0
    jet_R0 = 0.4
    jet_def = fj.JetDefinition(fj.antikt_algorithm, jet_R0)
    jet_selector = fj.SelectorPtMin(10.0) & fj.SelectorPtMax(
        args.max_jet_pt) & fj.SelectorAbsEtaMax(hadron_eta_max - jet_R0)
    # jet_selector = fj.SelectorPtMin(40.0) & fj.SelectorPtMax(200.0) &fj.SelectorAbsEtaMax(hadron_eta_max - jet_R0)
    hTT6_selector = fj.SelectorPtMin(6) & fj.SelectorPtMax(
        7) & fj.SelectorAbsEtaMax(hadron_eta_max)
    hTT12_selector = fj.SelectorPtMin(12) & fj.SelectorPtMax(
        50) & fj.SelectorAbsEtaMax(hadron_eta_max)
    hTT20_selector = fj.SelectorPtMin(20) & fj.SelectorPtMax(
        50) & fj.SelectorAbsEtaMax(hadron_eta_max)

    pythia_fs_part_selection = [pythiafjext.kFinal]
    if args.charged is True:
        pwarning('running with charged particles in the final state')
        pythia_fs_part_selection.append(pythiafjext.kCharged)
    print(jet_def)

    all_jets = []

    # mycfg = ['PhaseSpace:pThatMin = 80']
    # mycfg = ['PhaseSpace:pThatMin = 6']
    # mycfg = ['PhaseSpace:pThatMin = 12']
    # mycfg = ['PhaseSpace:pThatMin = 40']
    mycfg = []
    if args.ignore_mycfg:
        mycfg = []
    pythia = pyconf.create_and_init_pythia_from_args(args, mycfg)
    if not pythia:
        perror("pythia initialization failed.")
        return

    jet_def_lund = fj.JetDefinition(fj.cambridge_algorithm, 1.0)
    lund_gen = fjcontrib.LundGenerator(jet_def_lund)
    print(lund_gen.description())
    dy_groomer = fjcontrib.DynamicalGroomer(jet_def_lund)
    print(dy_groomer.description())
    # sd = fjcontrib.SoftDrop(0, 0.1, 1.0)
    sd = fjcontrib.SoftDrop(0, 0.2, 1.0)
    print(sd)

    # jet_def_rc01 = fj.JetDefinition(fj.cambridge_algorithm, 0.1)
    # jet_def_rc02 = fj.JetDefinition(fj.cambridge_algorithm, 0.2)
    # print (jet_def_rc01)
    # print (jet_def_rc02)
    # rc = fjcontrib.Recluster(jet_def_rc, True)

    jet_def_rc01 = fj.JetDefinition(fj.antikt_algorithm, 0.1)
    jet_def_rc02 = fj.JetDefinition(fj.antikt_algorithm, 0.2)
    print(jet_def_rc01)
    print(jet_def_rc02)
    #rc = fjcontrib.Recluster(jet_def_rc, True)

    # tw = treewriter.RTreeWriter(name = 'lsjvsx', file_name = 'leadsj_vs_x.root')
    tw = treewriter.RTreeWriter(name='lsjvsx', file_name=args.output)

    zero_psj = fj.PseudoJet(0, 0, 10, 10)

    if args.nev < 100:
        args.nev = 100
    t = tqdm.tqdm(total=args.nev)
    while t.n < args.nev:
        if not pythia.next():
            continue

        # information about the leading process
        # print(pythia.info.code(), pythia.info.nameProc(pythia.info.code()))
        # continue
        # parts = pythiafjext.vectorize(pythia, True, -1, 1, False)
        partons = pythiafjext.vectorize_select(pythia, [pythiafjext.kParton],
                                               0, True)
        # parts = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, False)
        # parts = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, False)
        parts = pythiafjext.vectorize_select(pythia, pythia_fs_part_selection,
                                             0, False)

        hTT6 = zero_psj
        hTT6s = fj.sorted_by_pt(hTT6_selector(parts))
        if len(hTT6s) > 0:
            hTT6 = hTT6s[0]

        hTT12 = zero_psj
        hTT12s = fj.sorted_by_pt(hTT12_selector(parts))
        if len(hTT12s) > 0:
            hTT12 = hTT12s[0]

        hTT20 = zero_psj
        hTT20s = fj.sorted_by_pt(hTT20_selector(parts))
        if len(hTT20s) > 0:
            hTT20 = hTT20s[0]

        if args.no_tt is False:
            if hTT12.perp() < 1 and hTT6.perp() < 1 and hTT20.perp() < 1:
                continue

        jets = jet_selector(jet_def(parts))

        # for j in tqdm.tqdm(jets):
        for j in jets:
            t.update(1)
            j_type = match_dR(j, partons, jet_R0 / 2.)
            if j_type[0] is None:
                continue
            j_sd = sd.result(j)
            sd_info = fjcontrib.get_SD_jet_info(j_sd)
            rc_sjets01 = fj.sorted_by_pt(jet_def_rc01(j.constituents()))
            rc_sjets02 = fj.sorted_by_pt(jet_def_rc02(j.constituents()))
            tw.fill_branches(
                j=j,
                mult=len(parts),
                lund=[ls for ls in lund_gen.result(j)],
                dyg1=dy_groomer.result(j, 1),
                sd=j_sd,
                sd_z=sd_info.z,
                sd_mu=sd_info.mu,
                sd_Delta=sd_info.dR,
                lsjet01=rc_sjets01[0],
                nsjet01=len(rc_sjets01),
                sjet01=rc_sjets01,
                lsjet02=rc_sjets02[0],
                nsjet02=len(rc_sjets02),
                sjet02=rc_sjets02,
                hTT6=hTT6,
                hTT12=hTT12,
                hTT20=hTT20,
                dphi6=j.delta_phi_to(hTT6),
                dphi12=j.delta_phi_to(hTT12),
                dphi20=j.delta_phi_to(hTT20),
                ppid=j_type[0],
                pquark=j_type[1],
                pglue=j_type[2],  # this is redundancy
                pycode=pythia.info.code(),
                pysigmagen=pythia.info.sigmaGen(),
                pysigmaerr=pythia.info.sigmaErr(),
                pyid1=pythia.info.id1pdf(),
                pyid2=pythia.info.id1pdf(),
                pyx1=pythia.info.x1pdf(),
                pyx2=pythia.info.x2pdf(),
                pypdf1=pythia.info.pdf1(),
                pyQfac=pythia.info.QFac(),
                pyalphaS=pythia.info.alphaS(),
                pypthat=pythia.info.pTHat(),
                pymhat=pythia.info.mHat())
            tw.fill_tree()

    t.close()
    pythia.stat()

    tw.write_and_close()
Пример #18
0
    def run(self):
        # need to change this for data to drive...
        delta_t = 0
        start_t = time.time()
        iev = 1
        # while self.det_sim.load_event() and self.part_sim.load_event():
        while self.det_sim.load_event():
            iev = iev + 1
            if self.nev > 0:
                if iev > self.nev:
                    iev = iev - 1
                    break
            if iev % 1000 == 0:
                delta_t = time.time() - start_t
                pinfo('processing event', iev, ' - ev/sec =', iev / delta_t,
                      'elapsed =', delta_t)

            # find jets on detector level
            if len(self.det_sim.particles) < 1:
                pwarning(iev, 'event skipped N detector parts',
                         len(self.det_sim.particles))
                continue
            self.ja_det.analyze_event(self.det_sim.particles)
            _jets_det = self.ja_det.jets
            # _x = [pdebug(' -d ', j) for j in _jets_det]
            if len(_jets_det) < 1:
                continue
            _too_high_pt = [
                p.pt() for j in _jets_det for p in j.constituents()
                if p.pt() > 100.
            ]
            if len(_too_high_pt) > 0:
                pwarning(iev, 'a likely fake high pT particle(s)',
                         _too_high_pt, '- skipping whole event')
                continue

            _output_fname = os.path.expanduser(
                os.path.expandvars(self.det_sim.file_io.file_input))
            _output_fname = _output_fname.replace("/", "_")
            self.output.initialize_output(_output_fname)

            self.output.fill_det_level(iev, _jets_det)

            # load the corresponding event on particle level
            self.part_sim.open_afile(afile=self.det_sim.file_io.file_input)
            if not self.part_sim.load_event_with_loc(
                    self.det_sim.event.run_number, self.det_sim.event.ev_id,
                    0):
                perror('unable to load partL event run#:',
                       self.det_sim.event.run_number, 'ev_id:',
                       self.det_sim.event.ev_id)
                continue
            if self.det_sim.event.run_number != self.part_sim.event.run_number:
                perror('run# missmatch detL:', self.det_sim.event.run_number,
                       'partL:', self.part_sim.event.run_number)
                continue
            if self.det_sim.event.ev_id != self.part_sim.event.ev_id:
                perror('ev_id# missmatch detL:', self.det_sim.event.ev_id,
                       'partL:', self.part_sim.event.ev_id)
                continue

            # find jets on particle level
            if len(self.part_sim.particles) < 1:
                pwarning(iev, 'event skipped N particle parts',
                         len(self.part_sim.particles))
                continue
            self.ja_part.analyze_event(self.part_sim.particles)
            _jets_part = self.ja_part.jets
            # _x = [pdebug(' -p ', j) for j in _jets_part]
            if len(_jets_part) < 1:
                continue

            # match in pp simulations
            _det_part_matches = []
            _n_matches = 0
            _part_psjv = self.ja_part.jets_as_psj_vector()
            for j_det in _jets_det:
                _mactches_pp = fjtools.matched_Reta(j_det, _part_psjv,
                                                    0.6 * self.jetR)
                #_mactches_pp = fjtools.matched_Ry(j_det, _part_psjv, 0.6 * self.jetR)
                _n_matches = _n_matches + len(_mactches_pp)
                if len(_mactches_pp) > 1:
                    pwarning('event:', iev, 'jet pt=', j_det.pt(),
                             'more than one match in pp jets',
                             [i for i in _mactches_pp])
                if len(_mactches_pp) == 1:
                    j_part = _part_psjv[_mactches_pp[0]]
                    # pinfo('j_det', j_det, 'j_part', j_part)
                    _det_part_matches.append([j_det, j_part])
                    self.output.fill_pp_pairs(iev, [j_det, j_part])

            if _n_matches < 1:
                if _n_matches < 1:
                    pwarning('event:', iev,
                             '- no matched jets in simulation!?',
                             len(_det_part_matches))

            # here embedding to PbPb data
            _offset = 10000
            while _offset < len(self.det_sim.particles):
                _offset = _offset + 1000
                pwarning('increasing bg index offset to', _offset)

            _PbPb_loaded = 0
            while _PbPb_loaded == 0:
                if not self.dataPbPb.load_event(offset=_offset):
                    perror('unable to load next PbPb event')
                    _PbPb_loaded = -1
                else:
                    _hybrid_event = self.dataPbPb.particles
                    _nparts_hybrid_no_emb = len(_hybrid_event)
                    if _nparts_hybrid_no_emb < 1:
                        pwarning(
                            'hybrid event with no particles! trying another one'
                        )
                        _PbPb_loaded = 0
                    else:
                        _PbPb_loaded = 1
            if _PbPb_loaded < 0:
                perror(
                    'unable to load PbPb event - permanent - bailing out here.'
                )
                break

            _tmp = [_hybrid_event.push_back(p) for p in self.det_sim.particles]

            if self.cs:
                cs_parts = self.cs.process_event(_hybrid_event)
                rho = self.cs.bge_rho.rho()
                self.ja_hybrid.analyze_event(cs_parts)
            else:
                self.ja_hybrid.analyze_event(_hybrid_event)

            _hybrid_matches = []
            _hybrid_psjv = self.ja_hybrid.jets_as_psj_vector()
            for m in _det_part_matches:
                j_det = m[0]
                j_part = m[1]
                _mactches_hybrid = fjtools.matched_Reta(
                    j_det, _hybrid_psjv, 0.6 * self.jetR)
                if len(_mactches_hybrid) > 1:
                    pwarning('event:', iev, 'jet pt=', j_det.pt(),
                             'more than one match in hybrid jets',
                             [i for i in _mactches_hybrid])
                if len(_mactches_hybrid) == 1:
                    # m.append(_hybrid_psjv[_mactches_hybrid[0]])
                    j_hybr = _hybrid_psjv[_mactches_hybrid[0]]
                    # pdebug('L302', 'j_det', j_det, 'j_part', j_part, 'j_hybr', j_hybr)
                    _hybrid_matches.append([j_det, j_part, j_hybr])
                    self.output.fill_emb_3(iev, [j_det, j_part, j_hybr])

            _n_matches_hybrid = len(_hybrid_matches)
            if _n_matches_hybrid < 1:
                if _n_matches_hybrid < 1:
                    pwarning('event:', iev, '- no matched jets in embedding!?',
                             _n_matches_hybrid)

        delta_t = time.time() - start_t
        pinfo('processed events', iev, ' - ev/sec =', iev / delta_t,
              'elapsed =', delta_t)
        self.output.close()
Пример #19
0
def main():
    parser = argparse.ArgumentParser(description='test groomers',
                                     prog=os.path.basename(__file__))
    parser.add_argument('-o',
                        '--output-filename',
                        default="centrality_output.root",
                        type=str)
    parser.add_argument('datalist',
                        help='run through a file list',
                        default='',
                        type=str)
    parser.add_argument('--overwrite',
                        help="overwrite output",
                        default=False,
                        action='store_true')
    parser.add_argument('--nev',
                        help='number of events to run',
                        default=0,
                        type=int)
    parser.add_argument('--max-eta', help='max eta for particles', default=0.9)
    parser.add_argument('--thermal',
                        help='enable thermal generator',
                        action='store_true',
                        default=False)
    parser.add_argument('--thermal-default',
                        help='enable thermal generator',
                        action='store_true',
                        default=False)
    parser.add_argument('--particles',
                        help='stream particles',
                        action='store_true',
                        default=False)
    parser.add_argument('--npart-cut',
                        help='npart cut on centrality low,high hint:' +
                        npart_cents,
                        default='325,450',
                        type=str)
    parser.add_argument('--nch-cut',
                        help='nch cut on centrality low,high hint:' +
                        nch_cents,
                        default='18467,50000',
                        type=str)

    args = parser.parse_args()

    try:
        npart_min = int(args.npart_cut.split(',')[0])
        npart_max = int(args.npart_cut.split(',')[1])
    except:
        perror(
            'unable to parse npart centrality selection - two integer numbers with a coma in-between needed - specified:',
            args.npart_cut)
        return 1

    try:
        nch_min = int(args.nch_cut.split(',')[0])
        nch_max = int(args.nch_cut.split(',')[1])
    except:
        perror(
            'unable to parse nch centrality selection - two integer numbers with a coma in-between needed - specified:',
            args.nch_cut)
        return 1

    outf = ROOT.TFile(args.output_filename, 'recreate')
    outf.cd()
    t = ROOT.TTree('t', 't')
    tw = RTreeWriter(tree=t)
    hpt_antyr = ROOT.TH1F('hpt_antyr', 'hpt_antyr', 100, 0, 100)
    hpt_antyr_c = ROOT.TH1F('hpt_antyr_c', 'hpt_antyr_c', 100, 0, 100)
    hpt_therm = ROOT.TH1F('hpt_therm', 'hpt_therm', 100, 0, 100)
    hpt_therm_c = ROOT.TH1F('hpt_therm_c', 'hpt_therm_c', 100, 0, 100)

    data = DataIO(name='Sim Pythia Detector level',
                  file_list=args.datalist,
                  random_file_order=False,
                  tree_name='tree_Particle_gen')
    dndeta_selector = fj.SelectorAbsEtaMax(abs(
        args.max_eta)) & fj.SelectorPtMin(0.15)

    tg_default = None
    if args.thermal_default:
        tg_default = thg.ThermalGenerator()
        print(tg_default)

    tg_central = None
    if args.thermal:
        tg_central = thg.ThermalGenerator(beta=0.5, N_avg=3000, sigma_N=500)
        print(tg_central)

    delta_t = 0
    start_t = time.time()
    iev = 1
    while data.load_event(offset=0):
        iev = iev + 1
        if args.nev > 0:
            if iev > args.nev:
                iev = iev - 1
                break
        if iev % 1000 == 0:
            delta_t = time.time() - start_t
            pinfo('processing event', iev, ' - ev/sec =', iev / delta_t,
                  'elapsed =', delta_t)

        # find jets on detector level
        if len(data.particles) < 1:
            pwarning(iev, 'pp event skipped N parts', len(data.particles))
            continue

        # print(data.event)

        dndeta0_parts = dndeta_selector(data.particles)
        dndeta0 = len(dndeta0_parts) / (abs(args.max_eta * 2.))
        [hpt_antyr.Fill(p.perp()) for p in dndeta0_parts]
        if args.particles:
            tw.fill_branches(dndeta=dndeta0, p=data.particles)
        else:
            tw.fill_branches(dndeta=dndeta0)
        tw.fill_branches_attribs(
            data.event, ['sigma', 'npart', 'nch', 'nchfwd', 'nchselect'],
            prefix='antyr_')

        if data.event.npart < npart_min or data.event.npart >= npart_max:
            tw.fill_branches(cent10npart=0)
        else:
            tw.fill_branches(cent10npart=1)
            [hpt_antyr_c.Fill(p.perp()) for p in dndeta0_parts]

        if data.event.nch < nch_min or data.event.nch >= nch_max:
            tw.fill_branches(cent10nch=0)
        else:
            tw.fill_branches(cent10nch=1)

        if tg_default:
            thg_particles = tg_default.load_event()
            dndetathg_default = dndeta_selector(thg_particles)
            if args.particles:
                tw.fill_branches(dndeta_thg_0=len(dndetathg_default) /
                                 (abs(args.max_eta * 2.)),
                                 p_thg_0=thg_particles)
            else:
                tw.fill_branches(dndeta_thg_0=len(dndetathg_default) /
                                 (abs(args.max_eta * 2.)))

        if tg_central:
            thg_parts_central = tg_central.load_event()
            dndetathg_central = dndeta_selector(thg_parts_central)
            [hpt_therm_c.Fill(p.perp()) for p in dndetathg_central]
            if args.particles:
                tw.fill_branches(dndeta_thg_c=len(dndetathg_central) /
                                 (abs(args.max_eta * 2.)),
                                 p_thg_c=thg_parts_central)
            else:
                tw.fill_branches(dndeta_thg_c=len(dndetathg_central) /
                                 (abs(args.max_eta * 2.)))

        tw.fill_tree()

    delta_t = time.time() - start_t
    pinfo('processed events', iev, ' - ev/sec =', iev / delta_t, 'elapsed =',
          delta_t)

    outf.Write()
    outf.Close()