injection_time = llh_time injection_energy = dict(llh_energy) injection_energy["gamma"] = gamma_index injection_energy["e_min_gev"] = e_min injection_energy["e_max_gev"] = e_max inj_kwargs = { "injection_energy_pdf": injection_energy, "injection_sig_time_pdf": injection_time, } mh_dict = { "name": full_name_en, "mh_name": "large_catalogue", "dataset": diffuse_8_year.get_seasons(), # subselection_fraction=1), "catalogue": cat_path, "llh_dict": llh_dict, "inj_dict": inj_kwargs, "n_trials": 1, # 10, # "n_steps": 15, } mh = MinimisationHandler.create(mh_dict) scale_factor = 3 * mh.guess_scale( ) / 3 / 7 / scale_factor_per_decade[i] print("Scale Factor: ", scale_factor_per_decade[i], scale_factor) # # # # # How to run on the cluster for sources < 3162 mh_dict["n_steps"] = 15
for gamma_index in gammas: res = dict() nr_srcs = int(nr_brightest_sources[0]) cat_path = agn_subset_catalogue(cat_type, method, nr_srcs) catalogue = load_catalogue(cat_path) cat = np.load(cat_path) name = generate_name(unique_key, nr_srcs, gamma_index) bkg_ts = bkg_ts_base_name(unique_key, nr_srcs) injection_time = llh_time injection_energy = dict(llh_energy) injection_energy["gamma"] = gamma_index inj_kwargs = { "injection_energy_pdf": injection_energy, "injection_sig_time_pdf": injection_time, } unblind_dict = { "name": name, "mh_name": "large_catalogue", "dataset": diffuse_8_year.get_seasons(), "catalogue": cat_path, "llh_dict": llh_dict, "background_ts": bkg_ts, } ub = create_unblinder(unblind_dict, mock_unblind=True, full_plots=False)