Esempio n. 1
0
 def initialize_config(self):
   
   # Call base class initialization
   process_base.ProcessBase.initialize_config(self)
   
   # Read config file
   with open(self.config_file, 'r') as stream:
     config = yaml.safe_load(stream)
     
   self.fast_simulation = config['fast_simulation']
   self.dry_run = config['dry_run']
   self.skip_deltapt_RC_histograms = True
   self.fill_RM_histograms = True
   
   self.jet_matching_distance = config['jet_matching_distance']
   self.reject_tracks_fraction = config['reject_tracks_fraction']
   if 'mc_fraction_threshold' in config:
     self.mc_fraction_threshold = config['mc_fraction_threshold']
   
   if self.do_constituent_subtraction:
       self.is_pp = False
       self.emb_file_list = config['emb_file_list']
       self.main_R_max = config['constituent_subtractor']['main_R_max']
   else:
       self.is_pp = True
       
   if 'thermal_model' in config:
     self.thermal_model = True
     beta = config['thermal_model']['beta']
     N_avg = config['thermal_model']['N_avg']
     sigma_N = config['thermal_model']['sigma_N']
     self.thermal_generator = thermal_generator.ThermalGenerator(N_avg, sigma_N, beta)
   else:
     self.thermal_model = False
        
   # Create dictionaries to store grooming settings and observable settings for each observable
   # Each dictionary entry stores a list of subconfiguration parameters
   #   The observable list stores the observable setting, e.g. subjetR
   #   The grooming list stores a list of grooming settings {'sd': [zcut, beta]} or {'dg': [a]}
   self.observable_list = config['process_observables']
   self.obs_settings = {}
   self.obs_grooming_settings = {}
   for observable in self.observable_list:
   
     obs_config_dict = config[observable]
     obs_config_list = [name for name in list(obs_config_dict.keys()) if 'config' in name ]
     
     obs_subconfig_list = [name for name in list(obs_config_dict.keys()) if 'config' in name ]
     self.obs_settings[observable] = self.utils.obs_settings(observable, obs_config_dict, obs_subconfig_list)
     self.obs_grooming_settings[observable] = self.utils.grooming_settings(obs_config_dict)
     
   # Construct set of unique grooming settings
   self.grooming_settings = []
   lists_grooming = [self.obs_grooming_settings[obs] for obs in self.observable_list]
   for observable in lists_grooming:
     for setting in observable:
       if setting not in self.grooming_settings and setting != None:
         self.grooming_settings.append(setting)
Esempio n. 2
0
    def plot_delta_pt(self, N_avg, beta):

        self.thermal_generator = thermal_generator.ThermalGenerator(
            N_avg=N_avg, sigma_N=500, beta=beta, eta_max=self.eta_max)

        # Loop through events
        self.delta_pt_random_cone = []
        self.mean_pt = []
        for i in range(self.n_events):
            fj_particles_background = self.thermal_generator.load_event()

            # Compute delta-pt by random cone method
            self.delta_pt_RC(fj_particles_background, N_avg)

        # Plot and save
        mean = np.round(np.mean(self.delta_pt_random_cone), 2)
        sigma = np.round(np.std(self.delta_pt_random_cone), 2)
        plt.hist(self.delta_pt_random_cone,
                 np.linspace(-50, 50, 100),
                 histtype='stepfilled',
                 label=rf'$\mathrm{{mean}} = {mean},\;\sigma = {sigma}$',
                 linewidth=2,
                 linestyle='-',
                 alpha=0.5)
        plt.xlabel(r'$\delta p_{T}$', fontsize=14)
        plt.yscale('log')
        legend = plt.legend(loc='best', fontsize=14, frameon=False)
        plt.tight_layout()
        plt.savefig(
            os.path.join(self.output_dir,
                         f'delta_pt_random_cone_N{N_avg}_beta{beta}.pdf'))
        plt.close()

        print(
            f'mean pt (N={N_avg},beta={beta}): {np.round(np.mean(self.mean_pt),2)}'
        )
Esempio n. 3
0
    def initialize_config(self):

        # Call base class initialization
        process_base.ProcessBase.initialize_config(self)

        # Read config file
        with open(self.config_file, 'r') as stream:
            config = yaml.safe_load(stream)

        self.fast_simulation = config['fast_simulation']
        if 'jetscape' in config:
            self.jetscape = config['jetscape']
        else:
            self.jetscape = False
        if 'event_plane_angle' in config:
            self.event_plane_range = config['event_plane_angle']
        else:
            self.event_plane_range = None
        if 'matching_systematic' in config:
            self.matching_systematic = config['matching_systematic']
        else:
            self.matching_systematic = False
        self.dry_run = config['dry_run']
        self.skip_deltapt_RC_histograms = True
        self.fill_RM_histograms = True

        self.jet_matching_distance = config['jet_matching_distance']
        self.reject_tracks_fraction = config['reject_tracks_fraction']
        if 'mc_fraction_threshold' in config:
            self.mc_fraction_threshold = config['mc_fraction_threshold']

        if self.do_constituent_subtraction:
            self.is_pp = False
            self.emb_file_list = config['emb_file_list']
            self.main_R_max = config['constituent_subtractor']['main_R_max']
        else:
            self.is_pp = True

        if 'thermal_model' in config:
            self.thermal_model = True
            beta = config['thermal_model']['beta']
            N_avg = config['thermal_model']['N_avg']
            sigma_N = config['thermal_model']['sigma_N']
            self.thermal_generator = thermal_generator.ThermalGenerator(
                N_avg, sigma_N, beta)
        else:
            self.thermal_model = False

        # Create dictionaries to store grooming settings and observable settings for each observable
        # Each dictionary entry stores a list of subconfiguration parameters
        #   The observable list stores the observable setting, e.g. subjetR
        #   The grooming list stores a list of grooming settings {'sd': [zcut, beta]} or {'dg': [a]}
        self.observable_list = config['process_observables']
        self.obs_settings = {}
        self.obs_grooming_settings = {}
        self.obs_names = {}
        for observable in self.observable_list:

            obs_config_dict = config[observable]
            obs_config_list = [
                name for name in list(obs_config_dict.keys())
                if 'config' in name
            ]

            obs_subconfig_list = [
                name for name in list(obs_config_dict.keys())
                if 'config' in name
            ]
            self.obs_settings[observable] = self.utils.obs_settings(
                observable, obs_config_dict, obs_subconfig_list)
            self.obs_grooming_settings[
                observable] = self.utils.grooming_settings(obs_config_dict)

            self.obs_names[observable] = obs_config_dict["common_settings"][
                "xtitle"]

        # Construct set of unique grooming settings
        self.grooming_settings = []
        lists_grooming = [
            self.obs_grooming_settings[obs] for obs in self.observable_list
        ]
        for observable in lists_grooming:
            for setting in observable:
                if setting not in self.grooming_settings and setting != None:
                    self.grooming_settings.append(setting)

        # Flag for creating delta-observable histograms in Pb-Pb case.
        # You can override this by setting the flag to True in your user class.
        # NOTE: requires implementation of self.calculate_observable() and creation
        #     of user histograms with this form:
        #     'hDeltaObs_%s_emb_R%s_%s%s' % (observable, jetR, obs_label, suffix)
        self.fill_delta_obs = False
Esempio n. 4
0
def main():
    parser = argparse.ArgumentParser(description='test groomers',
                                     prog=os.path.basename(__file__))
    parser.add_argument('-o',
                        '--output-filename',
                        default="centrality_output.root",
                        type=str)
    parser.add_argument('datalist',
                        help='run through a file list',
                        default='',
                        type=str)
    parser.add_argument('--overwrite',
                        help="overwrite output",
                        default=False,
                        action='store_true')
    parser.add_argument('--nev',
                        help='number of events to run',
                        default=0,
                        type=int)
    parser.add_argument('--max-eta', help='max eta for particles', default=0.9)
    parser.add_argument('--thermal',
                        help='enable thermal generator',
                        action='store_true',
                        default=False)
    parser.add_argument('--thermal-default',
                        help='enable thermal generator',
                        action='store_true',
                        default=False)
    parser.add_argument('--particles',
                        help='stream particles',
                        action='store_true',
                        default=False)
    parser.add_argument('--npart-cut',
                        help='npart cut on centrality low,high hint:' +
                        npart_cents,
                        default='325,450',
                        type=str)
    parser.add_argument('--nch-cut',
                        help='nch cut on centrality low,high hint:' +
                        nch_cents,
                        default='18467,50000',
                        type=str)

    args = parser.parse_args()

    try:
        npart_min = int(args.npart_cut.split(',')[0])
        npart_max = int(args.npart_cut.split(',')[1])
    except:
        perror(
            'unable to parse npart centrality selection - two integer numbers with a coma in-between needed - specified:',
            args.npart_cut)
        return 1

    try:
        nch_min = int(args.nch_cut.split(',')[0])
        nch_max = int(args.nch_cut.split(',')[1])
    except:
        perror(
            'unable to parse nch centrality selection - two integer numbers with a coma in-between needed - specified:',
            args.nch_cut)
        return 1

    outf = ROOT.TFile(args.output_filename, 'recreate')
    outf.cd()
    t = ROOT.TTree('t', 't')
    tw = RTreeWriter(tree=t)
    hpt_antyr = ROOT.TH1F('hpt_antyr', 'hpt_antyr', 100, 0, 100)
    hpt_antyr_c = ROOT.TH1F('hpt_antyr_c', 'hpt_antyr_c', 100, 0, 100)
    hpt_therm = ROOT.TH1F('hpt_therm', 'hpt_therm', 100, 0, 100)
    hpt_therm_c = ROOT.TH1F('hpt_therm_c', 'hpt_therm_c', 100, 0, 100)

    data = DataIO(name='Sim Pythia Detector level',
                  file_list=args.datalist,
                  random_file_order=False,
                  tree_name='tree_Particle_gen')
    dndeta_selector = fj.SelectorAbsEtaMax(abs(
        args.max_eta)) & fj.SelectorPtMin(0.15)

    tg_default = None
    if args.thermal_default:
        tg_default = thg.ThermalGenerator()
        print(tg_default)

    tg_central = None
    if args.thermal:
        tg_central = thg.ThermalGenerator(beta=0.5, N_avg=3000, sigma_N=500)
        print(tg_central)

    delta_t = 0
    start_t = time.time()
    iev = 1
    while data.load_event(offset=0):
        iev = iev + 1
        if args.nev > 0:
            if iev > args.nev:
                iev = iev - 1
                break
        if iev % 1000 == 0:
            delta_t = time.time() - start_t
            pinfo('processing event', iev, ' - ev/sec =', iev / delta_t,
                  'elapsed =', delta_t)

        # find jets on detector level
        if len(data.particles) < 1:
            pwarning(iev, 'pp event skipped N parts', len(data.particles))
            continue

        # print(data.event)

        dndeta0_parts = dndeta_selector(data.particles)
        dndeta0 = len(dndeta0_parts) / (abs(args.max_eta * 2.))
        [hpt_antyr.Fill(p.perp()) for p in dndeta0_parts]
        if args.particles:
            tw.fill_branches(dndeta=dndeta0, p=data.particles)
        else:
            tw.fill_branches(dndeta=dndeta0)
        tw.fill_branches_attribs(
            data.event, ['sigma', 'npart', 'nch', 'nchfwd', 'nchselect'],
            prefix='antyr_')

        if data.event.npart < npart_min or data.event.npart >= npart_max:
            tw.fill_branches(cent10npart=0)
        else:
            tw.fill_branches(cent10npart=1)
            [hpt_antyr_c.Fill(p.perp()) for p in dndeta0_parts]

        if data.event.nch < nch_min or data.event.nch >= nch_max:
            tw.fill_branches(cent10nch=0)
        else:
            tw.fill_branches(cent10nch=1)

        if tg_default:
            thg_particles = tg_default.load_event()
            dndetathg_default = dndeta_selector(thg_particles)
            if args.particles:
                tw.fill_branches(dndeta_thg_0=len(dndetathg_default) /
                                 (abs(args.max_eta * 2.)),
                                 p_thg_0=thg_particles)
            else:
                tw.fill_branches(dndeta_thg_0=len(dndetathg_default) /
                                 (abs(args.max_eta * 2.)))

        if tg_central:
            thg_parts_central = tg_central.load_event()
            dndetathg_central = dndeta_selector(thg_parts_central)
            [hpt_therm_c.Fill(p.perp()) for p in dndetathg_central]
            if args.particles:
                tw.fill_branches(dndeta_thg_c=len(dndetathg_central) /
                                 (abs(args.max_eta * 2.)),
                                 p_thg_c=thg_parts_central)
            else:
                tw.fill_branches(dndeta_thg_c=len(dndetathg_central) /
                                 (abs(args.max_eta * 2.)))

        tw.fill_tree()

    delta_t = time.time() - start_t
    pinfo('processed events', iev, ' - ev/sec =', iev / delta_t, 'elapsed =',
          delta_t)

    outf.Write()
    outf.Close()