def __init__(self, **kwargs): self.configure_from_args(tree_name='tree_Particle', tree_name_gen='tree_Particle_gen', args=None) super(Embedding, self).__init__(**kwargs) self.copy_attributes(self.args) self.jet_def = fj.JetDefinition(fj.antikt_algorithm, self.jetR) if self.benchmark: self.jet_selector = fj.SelectorPtMin(80.0) & fj.SelectorPtMax( 100.0) & fj.SelectorAbsEtaMax(self.max_eta - 1.05 * self.jetR) # jet_selector_cs = fj.SelectorPtMin(50.0) & fj.SelectorAbsEtaMax(max_eta - 1.05 * self.jetR) else: self.jet_selector = fj.SelectorAbsEtaMax(self.max_eta - 1.05 * self.jetR) self.parts_selector = fj.SelectorAbsEtaMax(self.max_eta) self.output = EmbeddingOutput(args=self.args) # self.output.copy_attributes(self) self.sd = fjcontrib.SoftDrop(0, self.sd_zcut, self.jetR) self.ja_part = JetAnalysis(jet_R=self.jetR, jet_algorithm=fj.antikt_algorithm, jet_pt_min=5., particle_eta_max=self.max_eta) self.ja_det = JetAnalysis(jet_R=self.jetR, jet_algorithm=fj.antikt_algorithm, jet_pt_min=self.jetptcut, particle_eta_max=self.max_eta) self.ja_hybrid = JetAnalysis(jet_R=self.jetR, jet_algorithm=fj.antikt_algorithm, jet_pt_min=5., particle_eta_max=self.max_eta) self.dataPbPb = DataBackgroundIO(name='Data PbPb', file_list=self.datalistAA) self.det_sim = DataIO(name='Sim Pythia Detector level', file_list=self.simulationpp, random_file_order=False) self.part_sim = DataIO(name='Sim Pythia Particle level', file_list=self.simulationpp, random_file_order=False, tree_name='tree_Particle_gen') self.cs = None if self.dRmax > 0: self.cs = CEventSubtractor(alpha=self.alpha, max_distance=self.dRmax, max_eta=self.max_eta, bge_rho_grid_size=0.25, max_pt_correct=100)
def process_data(self): self.start_time = time.time() # Use IO helper class to convert ROOT TTree into a SeriesGroupBy object of fastjet particles per event print('--- {} seconds ---'.format(time.time() - self.start_time)) io = process_io.ProcessIO(input_file=self.input_file, track_tree_name='tree_Particle', is_pp=self.is_pp, use_ev_id_ext=True) self.df_fjparticles = io.load_data(m=self.m) self.nEvents = len(self.df_fjparticles.index) self.nTracks = len(io.track_df.index) print('--- {} seconds ---'.format(time.time() - self.start_time)) # Initialize histograms self.initialize_output_objects() # Create constituent subtractor, if configured if not self.is_pp: self.constituent_subtractor = [ CEventSubtractor( max_distance=R_max, alpha=self.alpha, max_eta=self.max_eta, bge_rho_grid_size=self.bge_rho_grid_size, max_pt_correct=self.max_pt_correct, ghost_area=self.ghost_area, distance_type=fjcontrib.ConstituentSubtractor.deltaR) for R_max in self.max_distance ] print(self) # Find jets and fill histograms print('Analyze events...') self.analyze_events() # Plot histograms print('Save histograms...') process_base.ProcessBase.save_output_objects(self) print('--- {} seconds ---'.format(time.time() - self.start_time))
class Embedding(MPBase): def add_arguments_to_parser(parser): parser.add_argument('-o', '--output-filename', default="output.root", type=str) parser.add_argument('datalistAA', help='run through a file list', default='', type=str) parser.add_argument('simulationpp', help='run through a file list', default='', type=str) parser.add_argument('--jetR', default=0.4, type=float) parser.add_argument('--alpha', default=0, type=float) parser.add_argument('--dRmax', default=0.25, type=float) parser.add_argument('--sd-zcut', default=0.1, type=float) parser.add_argument('--overwrite', help="overwrite output", default=False, action='store_true') parser.add_argument('--benchmark', help='benchmark pthat setting - 80 GeV', default=False, action='store_true') parser.add_argument('--jetptcut', help='remove jets below the cut', default=1.e-3, type=float) parser.add_argument('--nev', help='number of events to run', default=0, type=int) parser.add_argument('--max-eta', help='max eta for particles', default=0.9) def __init__(self, **kwargs): self.configure_from_args(tree_name='tree_Particle', tree_name_gen='tree_Particle_gen', args=None) super(Embedding, self).__init__(**kwargs) self.copy_attributes(self.args) self.jet_def = fj.JetDefinition(fj.antikt_algorithm, self.jetR) if self.benchmark: self.jet_selector = fj.SelectorPtMin(80.0) & fj.SelectorPtMax( 100.0) & fj.SelectorAbsEtaMax(self.max_eta - 1.05 * self.jetR) # jet_selector_cs = fj.SelectorPtMin(50.0) & fj.SelectorAbsEtaMax(max_eta - 1.05 * self.jetR) else: self.jet_selector = fj.SelectorAbsEtaMax(self.max_eta - 1.05 * self.jetR) self.parts_selector = fj.SelectorAbsEtaMax(self.max_eta) self.output = EmbeddingOutput(args=self.args) # self.output.copy_attributes(self) self.sd = fjcontrib.SoftDrop(0, self.sd_zcut, self.jetR) self.ja_part = JetAnalysis(jet_R=self.jetR, jet_algorithm=fj.antikt_algorithm, jet_pt_min=5., particle_eta_max=self.max_eta) self.ja_det = JetAnalysis(jet_R=self.jetR, jet_algorithm=fj.antikt_algorithm, jet_pt_min=self.jetptcut, particle_eta_max=self.max_eta) self.ja_hybrid = JetAnalysis(jet_R=self.jetR, jet_algorithm=fj.antikt_algorithm, jet_pt_min=5., particle_eta_max=self.max_eta) self.dataPbPb = DataBackgroundIO(name='Data PbPb', file_list=self.datalistAA) self.det_sim = DataIO(name='Sim Pythia Detector level', file_list=self.simulationpp, random_file_order=False) self.part_sim = DataIO(name='Sim Pythia Particle level', file_list=self.simulationpp, random_file_order=False, tree_name='tree_Particle_gen') self.cs = None if self.dRmax > 0: self.cs = CEventSubtractor(alpha=self.alpha, max_distance=self.dRmax, max_eta=self.max_eta, bge_rho_grid_size=0.25, max_pt_correct=100) def run(self): # need to change this for data to drive... delta_t = 0 start_t = time.time() iev = 1 # while self.det_sim.load_event() and self.part_sim.load_event(): while self.det_sim.load_event(): iev = iev + 1 if self.nev > 0: if iev > self.nev: iev = iev - 1 break if iev % 1000 == 0: delta_t = time.time() - start_t pinfo('processing event', iev, ' - ev/sec =', iev / delta_t, 'elapsed =', delta_t) # find jets on detector level if len(self.det_sim.particles) < 1: pwarning(iev, 'event skipped N detector parts', len(self.det_sim.particles)) continue self.ja_det.analyze_event(self.det_sim.particles) _jets_det = self.ja_det.jets # _x = [pdebug(' -d ', j) for j in _jets_det] if len(_jets_det) < 1: continue _too_high_pt = [ p.pt() for j in _jets_det for p in j.constituents() if p.pt() > 100. ] if len(_too_high_pt) > 0: pwarning(iev, 'a likely fake high pT particle(s)', _too_high_pt, '- skipping whole event') continue _output_fname = os.path.expanduser( os.path.expandvars(self.det_sim.file_io.file_input)) _output_fname = _output_fname.replace("/", "_") self.output.initialize_output(_output_fname) self.output.fill_det_level(iev, _jets_det) # load the corresponding event on particle level self.part_sim.open_afile(afile=self.det_sim.file_io.file_input) if not self.part_sim.load_event_with_loc( self.det_sim.event.run_number, self.det_sim.event.ev_id, 0): perror('unable to load partL event run#:', self.det_sim.event.run_number, 'ev_id:', self.det_sim.event.ev_id) continue if self.det_sim.event.run_number != self.part_sim.event.run_number: perror('run# missmatch detL:', self.det_sim.event.run_number, 'partL:', self.part_sim.event.run_number) continue if self.det_sim.event.ev_id != self.part_sim.event.ev_id: perror('ev_id# missmatch detL:', self.det_sim.event.ev_id, 'partL:', self.part_sim.event.ev_id) continue # find jets on particle level if len(self.part_sim.particles) < 1: pwarning(iev, 'event skipped N particle parts', len(self.part_sim.particles)) continue self.ja_part.analyze_event(self.part_sim.particles) _jets_part = self.ja_part.jets # _x = [pdebug(' -p ', j) for j in _jets_part] if len(_jets_part) < 1: continue # match in pp simulations _det_part_matches = [] _n_matches = 0 _part_psjv = self.ja_part.jets_as_psj_vector() for j_det in _jets_det: _mactches_pp = fjtools.matched_Reta(j_det, _part_psjv, 0.6 * self.jetR) #_mactches_pp = fjtools.matched_Ry(j_det, _part_psjv, 0.6 * self.jetR) _n_matches = _n_matches + len(_mactches_pp) if len(_mactches_pp) > 1: pwarning('event:', iev, 'jet pt=', j_det.pt(), 'more than one match in pp jets', [i for i in _mactches_pp]) if len(_mactches_pp) == 1: j_part = _part_psjv[_mactches_pp[0]] # pinfo('j_det', j_det, 'j_part', j_part) _det_part_matches.append([j_det, j_part]) self.output.fill_pp_pairs(iev, [j_det, j_part]) if _n_matches < 1: if _n_matches < 1: pwarning('event:', iev, '- no matched jets in simulation!?', len(_det_part_matches)) # here embedding to PbPb data _offset = 10000 while _offset < len(self.det_sim.particles): _offset = _offset + 1000 pwarning('increasing bg index offset to', _offset) _PbPb_loaded = 0 while _PbPb_loaded == 0: if not self.dataPbPb.load_event(offset=_offset): perror('unable to load next PbPb event') _PbPb_loaded = -1 else: _hybrid_event = self.dataPbPb.particles _nparts_hybrid_no_emb = len(_hybrid_event) if _nparts_hybrid_no_emb < 1: pwarning( 'hybrid event with no particles! trying another one' ) _PbPb_loaded = 0 else: _PbPb_loaded = 1 if _PbPb_loaded < 0: perror( 'unable to load PbPb event - permanent - bailing out here.' ) break _tmp = [_hybrid_event.push_back(p) for p in self.det_sim.particles] if self.cs: cs_parts = self.cs.process_event(_hybrid_event) rho = self.cs.bge_rho.rho() self.ja_hybrid.analyze_event(cs_parts) else: self.ja_hybrid.analyze_event(_hybrid_event) _hybrid_matches = [] _hybrid_psjv = self.ja_hybrid.jets_as_psj_vector() for m in _det_part_matches: j_det = m[0] j_part = m[1] _mactches_hybrid = fjtools.matched_Reta( j_det, _hybrid_psjv, 0.6 * self.jetR) if len(_mactches_hybrid) > 1: pwarning('event:', iev, 'jet pt=', j_det.pt(), 'more than one match in hybrid jets', [i for i in _mactches_hybrid]) if len(_mactches_hybrid) == 1: # m.append(_hybrid_psjv[_mactches_hybrid[0]]) j_hybr = _hybrid_psjv[_mactches_hybrid[0]] # pdebug('L302', 'j_det', j_det, 'j_part', j_part, 'j_hybr', j_hybr) _hybrid_matches.append([j_det, j_part, j_hybr]) self.output.fill_emb_3(iev, [j_det, j_part, j_hybr]) _n_matches_hybrid = len(_hybrid_matches) if _n_matches_hybrid < 1: if _n_matches_hybrid < 1: pwarning('event:', iev, '- no matched jets in embedding!?', _n_matches_hybrid) delta_t = time.time() - start_t pinfo('processed events', iev, ' - ev/sec =', iev / delta_t, 'elapsed =', delta_t) self.output.close()
def __init__(self, input_file='', config_file='', output_dir='', **kwargs): self.config_file = config_file self.input_file = input_file self.output_dir = output_dir self.track_tree = 'tree_Particle_gen' self.dirname = 'PWGHF_TreeCreator' self.histutils = ROOT.RUtil.HistUtils() # Initialize utils class self.utils = process_utils.ProcessUtils() # Read config file with open(self.config_file, 'r') as stream: config = yaml.safe_load(stream) self.track_df = None self.jetR_list = config['jetR'] self.reclustering_algorithm = fj.cambridge_algorithm self.recoils_off = False if 'recoils_off' in config: self.recoils_off = config['recoils_off'] #---------------------------------------------- # Loading recoil subtraction parameters self.thermal_subtraction_method = None self.gridsizes = None if 'thermal_subtraction_method' in config: self.thermal_subtraction_method = config[ 'thermal_subtraction_method'] if not self.thermal_subtraction_method: print('Will not do recoil subtraction') elif 'gridsub' in self.thermal_subtraction_method.lower(): if 'gridsizes' in config: self.gridsizes = config['gridsizes'] else: print( 'User requested gridsub subtraction method, but no gridsize was provided. Bailing out!' ) exit() self.grid_dict = {} self.cell_phi = {} self.cell_eta = {} self.populated_cells = [] self.populated_cells_w_constit = [] self.total_thermal_momentum = 0 self.unsubtracted_thermal_momentum = 0 self.run_diagnostics = None if 'run_diagnostics' in config: self.run_diagnostics = config['run_diagnostics'] #---------------------------------------------- # If specified in the config file, randomly reject this fraction of thermals self.thermal_rejection_fraction = 0.0 if 'thermal_rejection_fraction' in config: self.thermal_rejection_fraction = config[ 'thermal_rejection_fraction'] #---------------------------------------------- # If constituent subtractor is present, initialize it self.constituent_subtractor = None if 'constituent_subtractor' in config: print('Constituent subtractor is enabled.') constituent_subtractor = config['constituent_subtractor'] max_distance = constituent_subtractor['max_distance'] alpha = constituent_subtractor['alpha'] max_eta = constituent_subtractor['max_eta'] bge_rho_grid_size = constituent_subtractor['bge_rho_grid_size'] max_pt_correct = constituent_subtractor['max_pt_correct'] ghost_area = constituent_subtractor['ghost_area'] self.constituent_subtractor = CEventSubtractor( max_distance=max_distance, alpha=alpha, max_eta=max_eta, bge_rho_grid_size=bge_rho_grid_size, max_pt_correct=max_pt_correct, ghost_area=ghost_area, distance_type=fjcontrib.ConstituentSubtractor.deltaR) else: print('Constituent subtractor is disabled.') #---------------------------------------------- # Create dictionaries to store grooming settings and observable settings for each observable # Each dictionary entry stores a list of subconfiguration parameters # The observable list stores the observable setting, e.g. subjetR # The grooming list stores a list of grooming settings {'sd': [zcut, beta]} or {'dg': [a]} self.observable_list = config['process_observables'] self.obs_settings = {} self.obs_grooming_settings = {} for observable in self.observable_list: obs_config_dict = config[observable] obs_config_list = [ name for name in list(obs_config_dict.keys()) if 'config' in name ] obs_subconfig_list = [ name for name in list(obs_config_dict.keys()) if 'config' in name ] self.obs_settings[observable] = self.utils.obs_settings( observable, obs_config_dict, obs_subconfig_list) self.obs_grooming_settings[ observable] = self.utils.grooming_settings(obs_config_dict) self.initialize_histos()
def process_mc(self): self.start_time = time.time() # ------------------------------------------------------------------------ # Use IO helper class to convert detector-level ROOT TTree into # a SeriesGroupBy object of fastjet particles per event print('--- {} seconds ---'.format(time.time() - self.start_time)) if self.fast_simulation: tree_dir = '' else: tree_dir = 'PWGHF_TreeCreator' io_det = process_io.ProcessIO(input_file=self.input_file, tree_dir=tree_dir, track_tree_name='tree_Particle', use_ev_id_ext=False, is_jetscape=self.jetscape, event_plane_range=self.event_plane_range) df_fjparticles_det = io_det.load_data( m=self.m, reject_tracks_fraction=self.reject_tracks_fraction) self.nEvents_det = len(df_fjparticles_det.index) self.nTracks_det = len(io_det.track_df.index) print('--- {} seconds ---'.format(time.time() - self.start_time)) # If jetscape, store also the negative status particles (holes) if self.jetscape: io_det_holes = process_io.ProcessIO( input_file=self.input_file, tree_dir=tree_dir, track_tree_name='tree_Particle', use_ev_id_ext=False, is_jetscape=self.jetscape, holes=True, event_plane_range=self.event_plane_range) df_fjparticles_det_holes = io_det_holes.load_data( m=self.m, reject_tracks_fraction=self.reject_tracks_fraction) self.nEvents_det_holes = len(df_fjparticles_det_holes.index) self.nTracks_det_holes = len(io_det_holes.track_df.index) print('--- {} seconds ---'.format(time.time() - self.start_time)) # ------------------------------------------------------------------------ # Use IO helper class to convert truth-level ROOT TTree into # a SeriesGroupBy object of fastjet particles per event io_truth = process_io.ProcessIO( input_file=self.input_file, tree_dir=tree_dir, track_tree_name='tree_Particle_gen', use_ev_id_ext=False, is_jetscape=self.jetscape, event_plane_range=self.event_plane_range) df_fjparticles_truth = io_truth.load_data(m=self.m) self.nEvents_truth = len(df_fjparticles_truth.index) self.nTracks_truth = len(io_truth.track_df.index) print('--- {} seconds ---'.format(time.time() - self.start_time)) # If jetscape, store also the negative status particles (holes) if self.jetscape: io_truth_holes = process_io.ProcessIO( input_file=self.input_file, tree_dir=tree_dir, track_tree_name='tree_Particle_gen', use_ev_id_ext=False, is_jetscape=self.jetscape, holes=True, event_plane_range=self.event_plane_range) df_fjparticles_truth_holes = io_truth_holes.load_data( m=self.m, reject_tracks_fraction=self.reject_tracks_fraction) self.nEvents_truth_holes = len(df_fjparticles_truth_holes.index) self.nTracks_truth_holes = len(io_truth_holes.track_df.index) print('--- {} seconds ---'.format(time.time() - self.start_time)) # ------------------------------------------------------------------------ # Now merge the two SeriesGroupBy to create a groupby df with [ev_id, run_number, fj_1, fj_2] # (Need a structure such that we can iterate event-by-event through both fj_1, fj_2 simultaneously) # In the case of jetscape, we merge also the hole collections fj_3, fj_4 print( 'Merge det-level and truth-level into a single dataframe grouped by event...' ) if self.jetscape: self.df_fjparticles = pandas.concat([ df_fjparticles_det, df_fjparticles_truth, df_fjparticles_det_holes, df_fjparticles_truth_holes ], axis=1) self.df_fjparticles.columns = [ 'fj_particles_det', 'fj_particles_truth', 'fj_particles_det_holes', 'fj_particles_truth_holes' ] else: self.df_fjparticles = pandas.concat( [df_fjparticles_det, df_fjparticles_truth], axis=1) self.df_fjparticles.columns = [ 'fj_particles_det', 'fj_particles_truth' ] print('--- {} seconds ---'.format(time.time() - self.start_time)) # ------------------------------------------------------------------------ # Set up the Pb-Pb embedding object if not self.is_pp and not self.thermal_model: self.process_io_emb = process_io_emb.ProcessIO_Emb( self.emb_file_list, track_tree_name='tree_Particle', m=self.m) # ------------------------------------------------------------------------ # Initialize histograms if not self.dry_run: self.initialize_output_objects() # Create constituent subtractor, if configured if self.do_constituent_subtraction: self.constituent_subtractor = [ CEventSubtractor( max_distance=R_max, alpha=self.alpha, max_eta=self.max_eta, bge_rho_grid_size=self.bge_rho_grid_size, max_pt_correct=self.max_pt_correct, ghost_area=self.ghost_area, distance_type=fjcontrib.ConstituentSubtractor.deltaR) for R_max in self.max_distance ] print(self) # Find jets and fill histograms print('Find jets...') self.analyze_events() # Plot histograms print('Save histograms...') process_base.ProcessBase.save_output_objects(self) print('--- {} seconds ---'.format(time.time() - self.start_time))
def main(): parser = argparse.ArgumentParser(description='pythia8 fastjet on the fly', prog=os.path.basename(__file__)) pyconf.add_standard_pythia_args(parser) parser.add_argument('--nw', help="no warn", default=False, action='store_true') parser.add_argument('--ignore-mycfg', help="ignore some settings hardcoded here", default=False, action='store_true') parser.add_argument('--enable-background', help="enable background calc", default=False, action='store_true') parser.add_argument('--output', help="output file name", default='leadsj_vs_x_output.root', type=str) # for background parser.add_argument('--cent-bin', help="centraility bin 0 is the 0-5 percent most central bin", type=int, default=0) parser.add_argument('--seed', help="pr gen seed", type=int, default=1111) parser.add_argument('--harmonics', help="set harmonics flag (0 : v1 - v5) , (1 : v2 - v5) , (2: v3 - v5) , (3: v1 - v4) , (4: v1 - v3) , (5: uniform dN/dphi no harmonics) , (6 : v1 - v2 , v4 - v5) , (7 : v1 - v3 , v5) , (8 : v1 , v3 - v5) , (9 : v1 only) , (10 : v2 only) , (11 : v3 only) , (12 : v4 only) , (13 : v5 only)", type=int, default=5) parser.add_argument('--eta', help="set eta range must be uniform (e.g. abs(eta) < 0.9, which is ALICE TPC fiducial acceptance)", type=float, default=0.9) parser.add_argument('--qa', help="PrintOutQAHistos", default=False, action='store_true') parser.add_argument('--dRmax', default=0.25, type=float) parser.add_argument('--alpha', default=0, type=float) args = parser.parse_args() # print the banner first fj.ClusterSequence.print_banner() print() # set up our jet definition and a jet selector jet_R0 = 0.4 jet_def = fj.JetDefinition(fj.antikt_algorithm, jet_R0) jet_selector = fj.SelectorPtMin(args.py_pthatmin) & fj.SelectorPtMax(1000.0) & fj.SelectorAbsEtaMax(args.eta - jet_R0) # jet_selector = fj.SelectorPtMin(40.0) & fj.SelectorPtMax(200.0) &fj.SelectorAbsEtaMax(1) print(jet_def) all_jets = [] # mycfg = ['PhaseSpace:pThatMin = 80'] # mycfg = ['PhaseSpace:pThatMin = 40'] mycfg = [''] if args.ignore_mycfg: mycfg = [] pythia = pyconf.create_and_init_pythia_from_args(args, mycfg) if not pythia: perror("pythia initialization failed.") return jet_def_lund = fj.JetDefinition(fj.cambridge_algorithm, 1.0) lund_gen = fjcontrib.LundGenerator(jet_def_lund) print (lund_gen.description()) dy_groomer = fjcontrib.DynamicalGroomer(jet_def_lund) print (dy_groomer.description()) # sd = fjcontrib.SoftDrop(0, 0.1, 1.0) sd01 = fjcontrib.SoftDrop(0, 0.1, 1.0) print (sd01) sd02 = fjcontrib.SoftDrop(0, 0.2, 1.0) print (sd02) # jet_def_rc01 = fj.JetDefinition(fj.cambridge_algorithm, 0.1) # jet_def_rc02 = fj.JetDefinition(fj.cambridge_algorithm, 0.2) # print (jet_def_rc01) # print (jet_def_rc02) # rc = fjcontrib.Recluster(jet_def_rc, True) jet_def_rc01 = fj.JetDefinition(fj.antikt_algorithm, 0.1) jet_def_rc02 = fj.JetDefinition(fj.antikt_algorithm, 0.2) print (jet_def_rc01) print (jet_def_rc02) #rc = fjcontrib.Recluster(jet_def_rc, True) # tw = treewriter.RTreeWriter(name = 'lsjvsx', file_name = 'leadsj_vs_x.root') tw = treewriter.RTreeWriter(name = 'lsjvsx', file_name = args.output) tgbkg = None be = None if args.enable_background: # ROOT.gSystem.Load("libpyjetty_TennGen.dylib") # tgbkg = ROOT.TennGen() # //constructor # tgbkg.SetCentralityBin(args.cent_bin) # //centraility bin 0 is the 0-5 % most central bin # tgbkg.SetRandomSeed(args.seed) # //setting the seed # tgbkg.SetHarmonics(args.harmonics) # // set harmonics flag (0 : v1 - v5) , (1 : v2 - v5) , (2: v3 - v5) , (3: v1 - v4) , (4: v1 - v3) , (5: uniform dN/dphi no harmonics) , (6 : v1 - v2 , v4 - v5) , (7 : v1 - v3 , v5) , (8 : v1 , v3 - v5) , (9 : v1 only) , (10 : v2 only) , (11 : v3 only) , (12 : v4 only) , (13 : v5 only) # tgbkg.SetEtaRange(args.eta) # //set eta range must be uniform (e.g. |eta| < 0.9, which is ALICE TPC fiducial acceptance) # tgbkg.PrintOutQAHistos(args.qa) # # tgbkg.InitializeBackground() # from pyjetty.mputils import BoltzmannEvent be = BoltzmannEvent(mean_pt=0.7, multiplicity=2000 * args.eta * 2, max_eta=max_eta, max_pt=100) print(be) from pyjetty.mputils import CEventSubtractor, CSubtractorJetByJet cs = CEventSubtractor(alpha=args.alpha, max_distance=args.dRmax, max_eta=args.eta, bge_rho_grid_size=0.25, max_pt_correct=100) print(cs) if args.nev < 100: args.nev = 100 for i in tqdm.tqdm(range(args.nev)): if not pythia.next(): continue # parts = pythiafjext.vectorize(pythia, True, -1, 1, False) partons = pythiafjext.vectorize_select(pythia, [pythiafjext.kParton], 0, True) parts = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal, pythiafjext.kCharged], 0, False) # parts = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal], 0, False) jets = jet_selector(jet_def(parts)) # for j in tqdm.tqdm(jets): for j in jets: j_type = match_dR(j, partons, jet_R0 / 2.) if j_type[0] is None: if args.nw: continue pwarning('Jet with no parton label') continue j_sd02 = sd02.result(j) sd02_info = fjcontrib.get_SD_jet_info(j_sd02) j_sd01 = sd01.result(j) sd01_info = fjcontrib.get_SD_jet_info(j_sd01) rc_sjets01 = fj.sorted_by_pt(jet_def_rc01(j.constituents())) rc_sjets02 = fj.sorted_by_pt(jet_def_rc02(j.constituents())) tw.fill_branches( j = j, lund = [ls for ls in lund_gen.result(j)], dyg1 = dy_groomer.result(j, 1), sd01 = j_sd01, sd01_z = sd01_info.z, sd01_mu = sd01_info.mu, sd01_Delta = sd01_info.dR, sd02 = j_sd02, sd02_z = sd02_info.z, sd02_mu = sd02_info.mu, sd02_Delta = sd02_info.dR, # breaking compatibility # sd = j_sd, # sd_z = sd_info.z, # sd_mu = sd_info.mu, # sd_Delta = sd_info.dR, lsjet01 = rc_sjets01[0], nsjet01 = len(rc_sjets01), sjet01 = rc_sjets01, lsjet02 = rc_sjets02[0], nsjet02 = len(rc_sjets02), sjet02 = rc_sjets02, ppid = j_type[0], pquark = j_type[1], pglue = j_type[2], # this is redundancy pycode = pythia.info.code(), pysigmagen = pythia.info.sigmaGen(), pysigmaerr = pythia.info.sigmaErr(), pyid1 = pythia.info.id1pdf(), pyid2 = pythia.info.id1pdf(), pyx1 = pythia.info.x1pdf(), pyx2 = pythia.info.x2pdf(), pypdf1 = pythia.info.pdf1(), pyQfac = pythia.info.QFac(), pyalphaS = pythia.info.alphaS(), pypthat = pythia.info.pTHat(), pymhat = pythia.info.mHat() ) if be: bg_parts = be.generate(offset=10000) full_event = bg_parts tmp = [full_event.push_back(psj) for psj in j.constituents()] if cs: cs_parts = cs.process_event(full_event) rho = cs.bge_rho.rho() bg_jets = fj.sorted_by_pt(jet_def(cs_parts)) for bj in bg_jets: if fjtools.matched_pt(bj, j) > 0.5: pass tw.fill_tree() pythia.stat() tw.write_and_close()
def main(): parser = argparse.ArgumentParser(description='pythia8 fastjet on the fly', prog=os.path.basename(__file__)) pyconf.add_standard_pythia_args(parser) parser.add_argument('--ignore-mycfg', help="ignore some settings hardcoded here", default=False, action='store_true') parser.add_argument('--output', default="output.root", type=str) parser.add_argument('--alpha', default=0, type=float) parser.add_argument('--dRmax', default=0.0, type=float) parser.add_argument('--zcut', default=0.1, type=float) parser.add_argument('--overwrite', help="overwrite output", default=False, action='store_true') parser.add_argument('--embed', help='run embedding from a file list', default='', type=str) parser.add_argument('--SDsignal', help='embed only SD signal prongs', default=False, action='store_true') parser.add_argument('--SDsignal-single', help='embed only SD signal - only leading prong!', default=False, action='store_true') parser.add_argument('--efficiency', help='apply charged particle efficiency', default=False, action='store_true') parser.add_argument('--benchmark', help='benchmark pthat setting - 80 GeV', default=False, action='store_true') parser.add_argument('--csjet', help='constituent subtration jet-by-jet', default=False, action='store_true') args = parser.parse_args() if args.output == 'output.root': args.output = 'output_alpha_{}_dRmax_{}_SDzcut_{}.root'.format( args.alpha, args.dRmax, args.zcut) if args.py_seed >= 0: args.output = 'output_alpha_{}_dRmax_{}_SDzcut_{}_seed_{}.root'.format( args.alpha, args.dRmax, args.zcut, args.py_seed) if args.embed: args.output = args.output.replace('.root', '_emb.root') if args.efficiency: args.output = args.output.replace('.root', '_effi.root') if args.SDsignal: args.output = args.output.replace('.root', '_SDsignal.root') if args.SDsignal_single: args.output = args.output.replace('.root', '_SDsignal_single.root') if args.csjet: args.output = args.output.replace('.root', '_csjet.root') if os.path.isfile(args.output): if not args.overwrite: print('[i] output', args.output, 'exists - use --overwrite to do just that...') return print(args) # alice specific max_eta = 0.9 # print the banner first fj.ClusterSequence.print_banner() print() # set up our jet definition and a jet selector jet_R0 = 0.4 jet_def = fj.JetDefinition(fj.antikt_algorithm, jet_R0) print(jet_def) mycfg = [] if args.benchmark: mycfg = ['PhaseSpace:pThatMin = 80', 'PhaseSpace:pThatMax = -1'] jet_selector = fj.SelectorPtMin(80.0) & fj.SelectorPtMax( 100.0) & fj.SelectorAbsEtaMax(max_eta - 1.05 * jet_R0) # jet_selector_cs = fj.SelectorPtMin(50.0) & fj.SelectorAbsEtaMax(max_eta - 1.05 * jet_R0) else: args.py_biaspow = 4 args.py_biasref = 10 jet_selector = fj.SelectorPtMin(20) & fj.SelectorAbsEtaMax( max_eta - 1.05 * jet_R0) # jet_selector_cs = fj.SelectorPtMin(50.0) & fj.SelectorAbsEtaMax(max_eta - 1.05 * jet_R0) if args.ignore_mycfg: mycfg = [] pythia = pyconf.create_and_init_pythia_from_args(args, mycfg) if not pythia: print("[e] pythia initialization failed.") return sd_zcut = args.zcut sd = fjcontrib.SoftDrop(0, sd_zcut, jet_R0) jarho = JetAnalysisWithRho(jet_R=jet_R0, jet_algorithm=fj.antikt_algorithm, particle_eta_max=max_eta) ja = JetAnalysis(jet_R=jet_R0, jet_algorithm=fj.antikt_algorithm, particle_eta_max=max_eta) be = None embd = None if len(args.embed) > 0: embd = DataBackgroundIO(file_list=args.embed) print(embd) else: be = BoltzmannEvent(mean_pt=0.6, multiplicity=2000 * max_eta * 2, max_eta=max_eta, max_pt=100) print(be) cs = None if args.dRmax > 0: cs = CEventSubtractor(alpha=args.alpha, max_distance=args.dRmax, max_eta=max_eta, bge_rho_grid_size=0.25, max_pt_correct=100) print(cs) csjet = None if args.csjet: csjet = CSubtractorJetByJet(max_eta=max_eta, bge_rho_grid_size=0.25) print(csjet) parts_selector = fj.SelectorAbsEtaMax(max_eta) if args.nev < 1: args.nev = 1 outf = ROOT.TFile(args.output, 'recreate') outf.cd() t = ROOT.TTree('t', 't') tw = RTreeWriter(tree=t) te = ROOT.TTree('te', 'te') twe = RTreeWriter(tree=te) # effi_pp = AliceChargedParticleEfficiency(csystem='pp') effi_PbPb = None if args.efficiency: effi_PbPb = AliceChargedParticleEfficiency(csystem='PbPb') print(effi_PbPb) ### EVENT LOOP STARTS HERE for iev in tqdm.tqdm(range(args.nev)): if not pythia.next(): continue parts_pythia = pythiafjext.vectorize_select( pythia, [pythiafjext.kFinal, pythiafjext.kCharged]) parts_gen = parts_selector(parts_pythia) if effi_PbPb: parts = effi_PbPb.apply_efficiency(parts_gen) else: parts = parts_gen signal_jets = fj.sorted_by_pt(jet_selector(jet_def(parts))) if len(signal_jets) < 1: continue for sjet in signal_jets: if args.SDsignal or args.SDsignal_single: sd_sjet = sd.result(sjet) pe1 = fj.PseudoJet() pe2 = fj.PseudoJet() has_parents = sd_sjet.has_parents(pe1, pe2) if has_parents: jparts = fj.vectorPJ() pe1.set_user_index(0) pe2.set_user_index(1) if args.SDsignal_single: if pe1.pt() > pe2.pt(): jparts.push_back(pe1) else: jparts.push_back(pe2) else: jparts.push_back(pe1) jparts.push_back(pe2) sjets = fj.sorted_by_pt(jet_selector(jet_def(jparts))) if len(sjets) == 1: sjet = sjets[0] else: continue else: continue if embd: bg_parts = embd.load_event(offset=10000) # for p in bg_parts: # print(p.user_index()) else: bg_parts = be.generate(offset=10000) # for p in bg_parts: # print(p.user_index()) full_event = bg_parts tmp = [full_event.push_back(psj) for psj in sjet.constituents()] if cs: cs_parts = cs.process_event(full_event) rho = cs.bge_rho.rho() jarho.analyze_event(cs_parts) tmp = [ fill_tree_data(ej, twe, sd, rho, iev, pythia.info.weight(), pythia.info.sigmaGen()) for ej in jarho.jets ] tmp = [ fill_tree_matched(sjet, ej, tw, sd, rho, iev, pythia.info.weight(), pythia.info.sigmaGen()) for ej in jarho.jets ] else: jarho.analyze_event(full_event) rho = jarho.rho if csjet: #_csjet = fjcontrib.ConstituentSubtractor(jarho.bg_estimator) # subtr_jets = [_csjet.result(ej) for ej in jarho.jets] csjet.set_event_particles(full_event) #subtr_jets = [csjet.process_jet(ej) for ej in jarho.jets] #print ('jbyj cs', len(subtr_jets), 'from', len(jarho.jets)) #subtr_jets_wconstits = [_j for _j in subtr_jets if _j.has_constituents()] #for _j in subtr_jets_wconstits: # print(len(_j.constituents())) subtr_jets_wconstits = csjet.process_jets(jarho.jets) japerjet = JetAnalysisPerJet( jet_R=jet_R0, jet_algorithm=fj.antikt_algorithm, particle_eta_max=max_eta, input_jets=subtr_jets_wconstits) # for _j in japerjet.jets: # for _c in _j.constituents(): # if _c.user_index() >= 0: # print('user index kept?', _c.user_index()) # # else: # # print('user index kept?', _c.user_index(), _c.pt()) # _sd_j = sd.result(_j) # https://phab.hepforge.org/source/fastjetsvn/browse/contrib/contribs/RecursiveTools/trunk/Recluster.cc L 270 # tmp = [fill_tree_matched(sjet, ej, tw, sd, rho, iev, pythia.info.sigmaGen()) for ej in subtr_jets_wcs] tmp = [ fill_tree_data(ej, twe, sd, rho, iev, pythia.info.weight(), pythia.info.sigmaGen()) for ej in japerjet.jets ] tmp = [ fill_tree_matched(sjet, ej, tw, sd, rho, iev, pythia.info.weight(), pythia.info.sigmaGen()) for ej in japerjet.jets ] else: tmp = [ fill_tree_data(ej, twe, sd, rho, iev, pythia.info.weight(), pythia.info.sigmaGen()) for ej in jarho.jets ] tmp = [ fill_tree_matched(sjet, ej, tw, sd, rho, iev, pythia.info.weight(), pythia.info.sigmaGen()) for ej in jarho.jets ] pythia.stat() outf.Write() outf.Close() print('[i] written', outf.GetName())
def __init__(self, config_file='', input_file='', output_dir='', **kwargs): super(common_base.CommonBase, self).__init__(**kwargs) self.config_file = config_file self.input_file = input_file self.output_dir = output_dir if not os.path.exists(self.output_dir): os.makedirs(self.output_dir) # Initialize config file self.initialize_config() # Load dataframe of particle four-vectors for all particles in the event # (separate dataframes for hard process and background) print() print('Loading particle dataframes') with open(self.input_file, 'rb') as f: df_particles_hard = pickle.load(f) df_particles_background = pickle.load(f) print('Done.') print() # Construct a dataframe of the combined hard+background particles df_particles_combined = pd.concat( [df_particles_hard, df_particles_background]) # Next, we will transform these into fastjet::PseudoJet objects. # This allows us to do jet finding and use the fastjet contrib to compute Nsubjettiness # (i) Group the particle dataframe by event id # df_particles_grouped is a DataFrameGroupBy object with one particle dataframe per event df_particles_hard_grouped = df_particles_hard.groupby('event_id') df_particles_combined_grouped = df_particles_combined.groupby( 'event_id') # (ii) Transform the DataFrameGroupBy object to a SeriesGroupBy of fastjet::PseudoJets print('Converting particle dataframes to fastjet::PseudoJets...') df_fjparticles_hard = df_particles_hard_grouped.apply( self.get_fjparticles) df_fjparticles_combined = df_particles_combined_grouped.apply( self.get_fjparticles) self.df_fjparticles = pd.concat( [df_fjparticles_hard, df_fjparticles_combined], axis=1) self.df_fjparticles.columns = [ 'fj_particles_hard', 'fj_particles_combined' ] print('Done.') print() # Create list of N-subjettiness observables: number of axes and beta values self.N_list = [] self.beta_list = [] for i in range(self.K - 2): self.N_list += [i + 1] * 3 self.beta_list += [0.5, 1, 2] self.N_list += [self.K - 1] * 2 self.beta_list += [1, 2] # Construct dictionary to store all jet quantities of interest self.jet_variables = {'hard': {}, 'combined': {}} for label in self.jet_variables.keys(): for jetR in self.jetR_list: self.jet_variables[label][f'R{jetR}'] = {} for R_max in self.max_distance: self.jet_variables[label][f'R{jetR}'][f'Rmax{R_max}'] = {} for i, N in enumerate(self.N_list): beta = self.beta_list[i] self.jet_variables[label][f'R{jetR}'][f'Rmax{R_max}'][ f'n_subjettiness_N{N}_beta{beta}'] = [] # Create constituent subtractors self.constituent_subtractor = [ CEventSubtractor( max_distance=R_max, alpha=self.alpha, max_eta=self.eta_max, bge_rho_grid_size=self.bge_rho_grid_size, max_pt_correct=self.max_pt_correct, ghost_area=self.ghost_area, distance_type=fjcontrib.ConstituentSubtractor.deltaR) for R_max in self.max_distance ] print(self) print()
def main(): parser = argparse.ArgumentParser(description='pythia8 fastjet on the fly', prog=os.path.basename(__file__)) parser.add_argument('--output', default="output.root", type=str) parser.add_argument('datalist', help='run through a file list', default='', type=str) parser.add_argument('--alpha', default=0, type=float) parser.add_argument('--dRmax', default=0.0, type=float) parser.add_argument('--zcut', default=0.1, type=float) parser.add_argument('--overwrite', help="overwrite output", default=False, action='store_true') parser.add_argument('--benchmark', help='benchmark pthat setting - 80 GeV', default=False, action='store_true') parser.add_argument('--jetptcut', help='remove jets below the cut', default=-100, type=float) parser.add_argument('--nev', help='number of events to run', default=0, type=int) args = parser.parse_args() if args.output == 'output.root': args.output = 'output_data_alpha_{}_dRmax_{}_SDzcut_{}.root'.format( args.alpha, args.dRmax, args.zcut) if args.jetptcut > -100: args.output = 'output_data_alpha_{}_dRmax_{}_SDzcut_{}_jpt_{}.root'.format( args.alpha, args.dRmax, args.zcut, args.jetptcut) if os.path.isfile(args.output): if not args.overwrite: print('[i] output', args.output, 'exists - use --overwrite to do just that...') return print(args) # alice specific max_eta = 0.9 # print the banner first fj.ClusterSequence.print_banner() print() # set up our jet definition and a jet selector jet_R0 = 0.4 jet_def = fj.JetDefinition(fj.antikt_algorithm, jet_R0) print(jet_def) if args.benchmark: jet_selector = fj.SelectorPtMin(80.0) & fj.SelectorPtMax( 100.0) & fj.SelectorAbsEtaMax(max_eta - 1.05 * jet_R0) # jet_selector_cs = fj.SelectorPtMin(50.0) & fj.SelectorAbsEtaMax(max_eta - 1.05 * jet_R0) else: jet_selector = fj.SelectorAbsEtaMax(max_eta - 1.05 * jet_R0) sd_zcut = args.zcut sd = fjcontrib.SoftDrop(0, sd_zcut, jet_R0) ja = JetAnalysisWithRho(jet_R=jet_R0, jet_algorithm=fj.antikt_algorithm, particle_eta_max=max_eta) data = DataIO(file_list=args.datalist) print(data) cs = None if args.dRmax > 0: cs = CEventSubtractor(alpha=args.alpha, max_distance=args.dRmax, max_eta=max_eta, bge_rho_grid_size=0.25, max_pt_correct=100) print(cs) parts_selector = fj.SelectorAbsEtaMax(max_eta) outf = ROOT.TFile(args.output, 'recreate') outf.cd() t = ROOT.TTree('t', 't') tw = RTreeWriter(tree=t) # need to change this for data to drive... delta_t = 1e-6 start_t = time.time() iev = 0 while data.load_event(): iev = iev + 1 _data_parts = data.particles if cs: cs_parts = cs.process_event(_data_parts) rho = cs.bge_rho.rho() ja.analyze_event(cs_parts) else: ja.analyze_event(_data_parts) rho = ja.rho tmp = [ fill_tree_data(j, tw, sd, rho, iev, 1.) for j in ja.jets if j.pt() > args.jetptcut ] if iev % 1000 == 0: delta_t = time.time() - start_t print('[i] processing event', iev, ' - ev/sec = ', iev / delta_t, 'elapsed = ', delta_t) if args.nev > 0: if iev > args.nev: break print('[i] processed events', iev, ' - ev/sec = ', iev / delta_t, 'elapsed = ', delta_t) outf.Write() outf.Close() print('[i] written', outf.GetName())
def main(): parser = argparse.ArgumentParser(description='test groomers', prog=os.path.basename(__file__)) parser.add_argument('-o', '--output-filename', default="output.root", type=str) parser.add_argument('datalistpp', help='run through a file list', default='', type=str) parser.add_argument('--datalistAA', help='run through a file list - embedding mode', default='', type=str) parser.add_argument('--jetR', default=0.4, type=float) parser.add_argument('--alpha', default=0, type=float) parser.add_argument('--dRmax', default=0.25, type=float) parser.add_argument('--overwrite', help="overwrite output", default=False, action='store_true') parser.add_argument('--jetptcut', help='remove jets below the cut', default=50., type=float) parser.add_argument('--nev', help='number of events to run', default=0, type=int) parser.add_argument('--max-eta', help='max eta for particles', default=0.9, type=float) parser.add_argument('--npart-cut', help='npart cut on centrality low,high hint:' + npart_cents, default='325,450', type=str) args = parser.parse_args() try: npart_min = int(args.npart_cut.split(',')[0]) npart_max = int(args.npart_cut.split(',')[1]) except: perror( 'unable to parse npart centrality selection - two integer numbers with a coma in-between needed - specified:', args.npart_cut) return 1 # initialize constituent subtractor cs = None if args.dRmax > 0: cs = CEventSubtractor(alpha=args.alpha, max_distance=args.dRmax, max_eta=args.max_eta, bge_rho_grid_size=0.25, max_pt_correct=100) pp_data = DataIO(name='Sim Pythia Detector level', file_list=args.datalistpp, random_file_order=False, tree_name='tree_Particle_gen') ja_pp = JetAnalysis(jet_R=args.jetR, jet_algorithm=fj.antikt_algorithm, jet_pt_min=50., particle_eta_max=args.max_eta) if args.datalistAA: aa_data = DataBackgroundIO(name='PbPb', file_list=args.datalistAA, tree_name='tree_Particle_gen') ja_emb = JetAnalysis(jet_R=args.jetR, jet_algorithm=fj.antikt_algorithm, jet_pt_min=50., particle_eta_max=args.max_eta) ja_aa = JetAnalysis(jet_R=args.jetR, jet_algorithm=fj.antikt_algorithm, jet_pt_min=50., particle_eta_max=args.max_eta) dndeta_selector = fj.SelectorAbsEtaMax(1.) # tg = thg.ThermalGenerator() print(cs) # print the banner first fj.ClusterSequence.print_banner() print() gout = GroomerOutput(args.output_filename, enable_aa_trees=bool(args.datalistAA)) delta_t = 0 start_t = time.time() iev = 1 while pp_data.load_event(offset=0): iev = iev + 1 if args.nev > 0: if iev > args.nev: iev = iev - 1 break if iev % 1000 == 0: delta_t = time.time() - start_t pinfo('processing event', iev, ' - ev/sec =', iev / delta_t, 'elapsed =', delta_t) # find jets on detector level if len(pp_data.particles) < 1: pwarning(iev, 'pp event skipped N parts', len(pp_data.particles)) continue ja_pp.analyze_event(pp_data.particles) if len(ja_pp.jets) < 1: continue # pinfo('n particles', len(pp_data.particles)) dndeta0 = dndeta_selector(pp_data.particles) [ gout.fill_branches(j, syst=0, dndeta=len(dndeta0) / 2.) for j in ja_pp.jets ] # pinfo('n jets', len(ja_pp.jets)) if args.datalistAA: while True: aa_loaded = aa_data.load_event(offset=10000) if aa_data.event.npart < npart_min or aa_data.event.npart >= npart_max: continue else: if len(aa_data.particles) < 1: pwarning(iev, 'AA event skipped N parts', len(aa_data.particles)) continue else: break if aa_loaded: ja_aa.analyze_event(aa_data.particles) dndeta1 = dndeta_selector(aa_data.particles) if len(ja_aa.jets) > 0: [ gout.fill_branches(j, syst=1, dndeta=len(dndeta1) / 2.) for j in ja_aa.jets ] else: # pwarning('no jets in AA event?', len(ja_aa.jets), 'while dndeta=', len(dndeta1)/2.) pass emb_event = fj.vectorPJ() [emb_event.push_back(p) for p in pp_data.particles] [emb_event.push_back(p) for p in aa_data.particles] rho = 0 if cs: cs_parts = cs.process_event(emb_event) rho = cs.bge_rho.rho() ja_emb.analyze_event(cs_parts) else: ja_emb.analyze_event(emb_event) # matches = [[jpp, jemb] for jpp in ja_pp.jets for jemb in ja_emb.jets if fjtools.matched_pt(jemb, jpp) > 0.5] # for mj in matches: # gout.fill_branches(mj[0], syst=2, dndeta=len(dndeta1)/2., rho=rho) # gout.fill_branches(mj[1], syst=3) [ gout.fill_branches_prong_matching(j_pp, j_emb, dndeta=len(dndeta1) / 2., rho=rho) for j_pp in ja_pp.jets for j_emb in ja_emb.jets ] delta_t = time.time() - start_t pinfo('processed events', iev, ' - ev/sec =', iev / delta_t, 'elapsed =', delta_t) gout.write()
def main(): parser = argparse.ArgumentParser(description='pythia8 fastjet on the fly', prog=os.path.basename(__file__)) pyconf.add_standard_pythia_args(parser) parser.add_argument('--ignore-mycfg', help="ignore some settings hardcoded here", default=False, action='store_true') parser.add_argument('--output', default="output.root", type=str) parser.add_argument('--alpha', default=0, type=float) parser.add_argument('--dRmax', default=0.25, type=float) parts.add_argument('--zcut', default=0.1, type=float) parser.add_argument('--overwrite', help="overwrite output", default=False, action='store_true') args = parser.parse_args() if args.output == 'output.root': args.output = 'output_alpha_{}_dRmax_{}_SDzcut_{}.root'.format( args.alpha, args.dRmax, args.zcut) if os.path.isfile(args.output): if not args.overwrite: print('[i] output', args.output, 'exists - use --overwrite to do just that...') return # print the banner first fj.ClusterSequence.print_banner() print() # set up our jet definition and a jet selector jet_R0 = 0.4 jet_def = fj.JetDefinition(fj.antikt_algorithm, jet_R0) jet_selector = fj.SelectorPtMin(80.0) & fj.SelectorPtMax( 100.0) & fj.SelectorAbsEtaMax(1 - 1.05 * jet_R0) jet_selector_cs = fj.SelectorPtMin(50.0) & fj.SelectorAbsEtaMax(1 - 1.05 * jet_R0) # jet_selector = fj.SelectorPtMin(10.0) & fj.SelectorPtMax(20.0) & fj.SelectorAbsEtaMax(1 - 1.05 * jet_R0) # jet_selector_cs = fj.SelectorPtMin(0.0) & fj.SelectorAbsEtaMax(1 - 1.05 * jet_R0) print(jet_def) mycfg = ['PhaseSpace:pThatMin = 80', 'PhaseSpace:pThatMax = -1'] # mycfg = [] if args.ignore_mycfg: mycfg = [] pythia = pyconf.create_and_init_pythia_from_args(args, mycfg) if not pythia: print("[e] pythia initialization failed.") return sd_zcut = args.zcut sd = fjcontrib.SoftDrop(0, sd_zcut, jet_R0) max_eta = 1 cs = CEventSubtractor(alpha=args.alpha, max_distance=args.dRmax, max_eta=max_eta, bge_rho_grid_size=0.25, max_pt_correct=100) be = BoltzmannEvent(mean_pt=0.6, multiplicity=1000 * max_eta * 2, max_eta=max_eta, max_pt=100) parts_selector = fj.SelectorAbsEtaMax(max_eta) if args.nev < 100: args.nev = 100 outf = ROOT.TFile(args.output, 'recreate') outf.cd() tn = ROOT.TNtuple( 'tn', 'tn', 'n:pt:phi:eta:cspt:csphi:cseta:dR:dpt:rg:csrg:z:csz:dRg:dzg') hpt = ROOT.TH1F('hpt', 'hpt', 40, 00, 160) hptcs = ROOT.TH1F('hptcs', 'hptcs', 40, 00, 160) hdpt = ROOT.TH1F('hdpt', 'hdpt', 40, -50, 50) hrg = ROOT.TH1F('hrg', 'hrg', 40, -1.1, 0.9) hrgcs = ROOT.TH1F('hrgcs', 'hrgcs', 40, -1.1, 0.9) hdrg = ROOT.TH1F('hdrg', 'hdrg', 40, -1.1, 0.9) hdz = ROOT.TH1F('hdz', 'hdz', 40, -1.1, 0.9) hdzz = ROOT.TH2F('hdzz', 'hdzz', 40, -1.1, 0.9, 40, -1.1, 0.9) hdphi = ROOT.TH1F('hdphi', 'hdphi', 90, -ROOT.TMath.Pi(), ROOT.TMath.Pi()) t = ROOT.TTree('t', 't') tw = RTreeWriter(tree=t) for i in tqdm.tqdm(range(args.nev)): if not pythia.next(): continue parts_pythia = pythiafjext.vectorize_select(pythia, [pythiafjext.kFinal]) parts = parts_selector(parts_pythia) signal_jets = fj.sorted_by_pt(jet_selector(jet_def(parts))) if len(signal_jets) < 1: continue bg_parts = be.generate(offset=10000) full_event = bg_parts sjet = signal_jets[0] lc = [full_event.push_back(psj) for psj in sjet.constituents()] # idxs = [psj.user_index() for psj in sjet.constituents()] # print('pythia jet:', idxs) cs_parts = cs.process_event(full_event) cs_signal_jets = fj.sorted_by_pt(jet_selector_cs(jet_def(cs_parts))) emb_jets = fj.sorted_by_pt(jet_selector_cs(jet_def(full_event))) # max_eta_part = max([j.eta() for j in full_event]) # print ('number of particles', len(full_event), max_eta_part) # mean_pt = sum([j.pt() for j in bg_parts]) / len(bg_parts) # print ('mean pt in bg', mean_pt) # print ('number of CS particles', len(cs_parts)) sd_signal_jet = sd.result(sjet) sd_info_signal_jet = fjcontrib.get_SD_jet_info(sd_signal_jet) # for j in cs_signal_jets: for j in emb_jets: if matched_pt(sjet, j) <= 0.5: continue sd_j = sd.result(j) sd_info_j = fjcontrib.get_SD_jet_info(sd_j) rho = cs.bge_rho.rho() tn.Fill(i, sjet.pt(), sjet.phi(), sjet.eta(), j.pt(), j.phi(), j.eta(), j.delta_R(sjet), j.pt() - sjet.pt(), sd_info_signal_jet.dR, sd_info_j.dR, sd_info_j.dR - sd_info_signal_jet.dR, sd_info_signal_jet.z, sd_info_j.z, sd_info_j.z - sd_info_signal_jet.z) hpt.Fill(sjet.pt()) hptcs.Fill(j.pt()) hdpt.Fill(j.pt() - sjet.pt()) hrg.Fill(sd_info_signal_jet.dR) hrgcs.Fill(sd_info_j.dR) hdphi.Fill(sjet.delta_phi_to(j)) if sd_info_j.dR > 0 and sd_info_signal_jet.dR > 0: hdrg.Fill(sd_info_j.dR - sd_info_signal_jet.dR) if sd_info_j.z > 0 and sd_info_signal_jet.z > 0: hdz.Fill(sd_info_j.z - sd_info_signal_jet.z) hdzz.Fill(sd_info_j.z, sd_info_j.z - sd_info_signal_jet.z) # for i,j in enumerate(signal_jets): # j_sd = sd.result(j) # sd_info = fjcontrib.get_SD_jet_info(j_sd) # # print(" |-> SD jet params z={0:10.3f} dR={1:10.3f} mu={2:10.3f}".format(sd_info.z, sd_info.dR, sd_info.mu)) pythia.stat() outf.Write() outf.Close()