def test_scale_estimation(self): this_mh_dict = dict(mh_dict) this_mh_dict['name'] += 'test_scale_estimation/' this_mh_dict['scale'] *= 5.1 this_mh_dict['n_steps'] = 6 sb = Submitter.get_submitter( this_mh_dict, use_cluster=False, n_cpu=5, do_sensitivity_scale_estimation='quick_injections') sb.run_quick_injections_to_estimate_sensitivity_scale() true_value = flux_to_k(public_sens_3yr) self.assertAlmostEqual(sb.sens_guess, true_value / 0.9, delta=true_value / 0.9 * 0.6) self.assertGreater(sb.sens_guess / 0.5, true_value)
source_res = dict() cat_path = ps_catalogue_name(sindec) sindec_key = "sindec=" + '{0:.2f}'.format(sindec) name = name_root + sindec_key + "/" src_res = dict() for length in lengths: full_name = name + str(length) + "/" scale = flux_to_k(reference_sensitivity(sindec) * 20) * max( 1, window / abs(length) ) # Standard Time Integration llh_time = { "Name": "FixedRefBox", "Fixed Ref Time (MJD)": ref_time, "Pre-Window": 0, "Post-Window": length } llh_kwargs = { "LLH Energy PDF": energy_pdf, "LLH Time PDF": llh_time,
"Fixed Ref Time (MJD)": t_start, "Pre-Window": 0, "Post-Window": flare_length, "Time Smear?": True, "Min Offset": 0.0, "Max Offset": max_window - flare_length, } inj_kwargs = { "Injection Energy PDF": injection_energy, "Injection Time PDF": injection_time, "Poisson Smear?": True, } scale = flux_to_k( reference_sensitivity(np.sin(dec)) * ((70 * max_window) / flare_length)) mh_dict = { "name": full_name, "mh_name": mh_name, "datasets": gfu_v002_p01, "catalogue": cat_path, "inj kwargs": inj_kwargs, "llh_dict": llh_kwargs, "scale": scale, "n_trials": 5, "n_steps": 15, } pkl_file = make_analysis_pickle(mh_dict)
cat_path = ps_catalogue_name(sindec) subname = name + "sindec=" + "{0:.2f}".format(sindec) + "/" mh_dict = { "name": subname, "mh_name": "fixed_weights", "dataset": dataset, "catalogue": cat_path, "llh_dict": llh_dict, "inj_dict": inj_dict, "n_steps": 15, "n_trials": 1000, } mh_dict["scale"] = flux_to_k( ae.guess_discovery_potential(cat_path) * 1.5) # analyse(mh_dict, n_cpu=24) dataset_res[sindec] = mh_dict all_res[label] = dataset_res wait_cluster() # Plot results plt.figure() ax1 = plt.subplot2grid((4, 1), (0, 0), colspan=3, rowspan=3) all_sens = []
} # set up an injection dictionary which will be equal to the time pdf dictionary injection_time = llh_time # Loop over spectral indices for gamma in gammas: full_name = time_name + str(gamma) + "/" length = float(time_key) # try to estimate a good scale based on the sensitivity from the 7-yr PS sensitivity # at the declination of the closest source scale = (0.00025 * (flux_to_k( reference_sensitivity(np.sin(closest_src["dec_rad"]), gamma=gamma) * 40 * math.sqrt(float(len(catalogue)))) * 200.0) / length) # in some cases the sensitivity is outside the tested range # to get a good sensitivity, adjust the scale in these cases if (gamma == 2.5) and (cat == "IIn"): scale *= 1.8 if (gamma == 2.5) and (cat == "IIP"): scale *= 0.15 if (gamma == 2.0) and (cat == "IIP"): scale *= 0.5 if cat == "IIn": scale *= 5 if length > 700: scale *= 2
} name = "analyses/benchmarks/ps_sens_7yr" # sindecs = np.linspace(0.90, -0.90, 3) sindecs = np.linspace(0.90, -0.90, 9) # sindecs = np.linspace(0.5, -0.5, 3) analyses = [] for sindec in sindecs: cat_path = ps_catalogue_name(sindec) subname = name + "/sindec=" + "{0:.2f}".format(sindec) + "/" scale = flux_to_k(reference_sensitivity(sindec)) * 5 mh_dict = { "name": subname, "mh_name": "fixed_weights", "dataset": ps_v002_p01, "catalogue": cat_path, "inj_dict": inj_kwargs, "llh_dict": llh_kwargs, "scale": scale, "n_trials": 50, "n_steps": 10, } analyse(mh_dict, cluster=True, n_jobs=100)
full_res = dict() for i, n in enumerate(np.array(nsources)): logging.info(f"stacking {n} sources") logging.info(f"cat path is {fs_sources(n, sindec)}") name = (gamma_name + "{:.4f}/".format(sindec) + str(n) + "sources" if sindec is not None else gamma_name + "None/" + str(n) + "sources") catalogue = np.load(fs_sources(n, sindec)) closest_src = np.sort(catalogue, order="distance_mpc")[0] scale = (flux_to_k( reference_sensitivity(np.sin(closest_src["dec_rad"]), gamma=gamma) * 40 * (math.log(float(len(catalogue)), 4) + 1)) * 200.0) / length mh_dict = { "name": name, "mh_name": mh_name, "dataset": custom_dataset(ps_v002_p01, catalogue, llh_dict["llh_sig_time_pdf"]), "catalogue": fs_sources(n, sindec), "inj_dict": inj_dict, "llh_dict":
"time_pdf_name": "FixedRefBox", "fixed_ref_time_mjd": t_start, "pre_window": 0, "post_window": flare_length, "time_smear_bool": True, "min_offset": 0.0, "max_offset": search_window - flare_length, } inj_kwargs = { "injection_energy_pdf": injection_energy, "injection_time_pdf": injection_time, } scale = flux_to_k( reference_sensitivity(np.sin(dec)) * (50 * search_window / flare_length)) mh_dict = { "name": full_name, "mh_name": mh_name, "datasets": custom_dataset(txs_sample_v1, catalogue, llh_kwargs["llh_time_pdf"]), "catalogue": cat_path, "inj_dict": inj_kwargs, "llh_dict":
"Pre-Window": 0.0, "Post-Window": injection_length, } injection_energy = dict(llh_energy) injection_energy["E Min"] = e_min injection_energy["Gamma"] = gamma inj_kwargs = { "Injection Energy PDF": injection_energy, "Injection Time PDF": injection_time, "Poisson Smear?": True, } scale = (flux_to_k( reference_sensitivity(np.sin(closest_src["dec"]), gamma=gamma) * 40 * math.sqrt(float(len(catalogue)))) * (e_min / 100.0)**0.2) mh_dict = { "name": full_name, "datasets": custom_dataset(txs_sample_v1, catalogue, llh_kwargs["LLH Time PDF"]), "catalogue": cat_path, "inj kwargs": inj_kwargs, "llh kwargs": llh_kwargs, "scale":
if not mh_name == "large_catalogue" else "standard_matrix", "llh_energy_pdf": llh_energy, "llh_sig_time_pdf": llh_time, "llh_bkg_time_pdf": { "time_pdf_name": "steady" }, "gamma_precision": args.smoothing, "smoothing_order": args.smoothing, } full_name = f"{hsphere_name}/{gamma:.2f}" length = 365 * 7 scale = (flux_to_k( reference_sensitivity(np.sin(0.5), gamma=gamma) * 40 * math.sqrt(float(len(catalogue)))) * 200.0) / length if hsphere == "southern": scale *= 5 if seasons == ["IC40"]: scale *= 4 data = ps_v002_p01 if seasons: data = data.get_seasons(*seasons) dataset = custom_dataset(data, catalogue, llh_dict["llh_sig_time_pdf"]) logger.debug(f"{dataset.keys()}")
catalogue["Relative Injection Weight"] = np.exp( np.random.normal(0., 2., int(n))) catalogue["Distance (Mpc)"] = np.ones(int(n)) catalogue["Ref Time (MJD)"] = np.random.uniform(55710., 56010, int(n)) cat_path = catalogue_dir + "random/" + str(n) + "_cat.npy" try: os.makedirs(os.path.dirname(cat_path)) except OSError: pass np.save(cat_path, catalogue) # cat_path = catalogue_dir + "TDEs/TDE_silver_catalogue.npy" # catalogue = np.load(cat_path) scale = flux_to_k(reference_sensitivity(np.sin(0.0))) * 40 * math.sqrt( float(n)) mh_dict = { "name": name, "datasets": ps_v002_p01, "catalogue": cat_path, "inj kwargs": inj_kwargs, "llh kwargs": llh_kwargs, "scale": scale, "n_trials": 5, "n_steps": 15 } analysis_path = analysis_dir + name
"Time-Integrated", "10 Day Flare", "2 Day Flare", ][i] f_name = ["negative_n_s", "positive_n_s", "flare_winter", "flare_murase"][i] flare_name = name + f_name + "/" res = dict() for gamma in gammas: full_name = flare_name + str(gamma) + "/" scale = flux_to_k( reference_sensitivity(np.sin(catalogue["dec"]), gamma=gamma) * 50 ) if i > 1: scale *= 10 ** (i - 1) inj = dict(inj_kwargs) inj["Injection Energy PDF"] = dict(inj["Injection Energy PDF"]) inj["Injection Energy PDF"]["Gamma"] = gamma if "E Min" in list(inj["Injection Energy PDF"].keys()): scale *= 10 mh_dict = {
def ts_distribution_evolution(self): logger.debug("plotting evolution of TS distribution") all_scales = np.array(list(self.results.keys())) all_scales_floats = [float(sc) for sc in all_scales] logger.debug("all scales: " + str(all_scales_floats)) logger.debug("sensitivity scale: " + str(flux_to_k(self.sensitivity))) sens_scale = all_scales[ all_scales_floats >= np.array(flux_to_k(self.sensitivity))][0] disc_scale = all_scales[ all_scales_floats >= np.array(flux_to_k(self.disc_potential))][0] scales = [all_scales[0], sens_scale, disc_scale] ts_arrays = [np.array(self.results[scale]["TS"]) for scale in scales] ns_arrays = np.array([ np.array([ np.median(self.results[scale]["Parameters"][key]) for key in self.results[scale]["Parameters"] if "n_s" in key ]) for scale in scales ]) n_s = [sum(a) for a in ns_arrays] logger.debug("numbers of injected neutrinos: " + str(n_s)) fig, ax = plt.subplots() ax.hist( ts_arrays[0], histtype="stepfilled", label="background", density=True, alpha=0.6, color="blue", ) ax.hist( ts_arrays[1], histtype="step", density=True, color="orange", label="signal: {:.2} signal neutrinos".format(n_s[1]), ) ax.axvline(self.bkg_median, ls="--", label="sensitivity threshold", color="orange") ax.hist( ts_arrays[2], histtype="step", density=True, color="red", label="signal: {:.2} signal neutrinos".format(n_s[2]), ) ax.axvline( self.disc_ts_threshold, ls="--", label="discovery potential threshold", color="red", ) ax.set_xlabel("Test Statistic") ax.set_ylabel("a.u.") ax.legend() ax.set_yscale("log") plt.tight_layout() sn = os.path.join(self.plot_dir, "ts_distributions/ts_evolution_.pdf") logger.debug("saving plot to " + sn) fig.savefig(sn) plt.close()
source_res = dict() cat_path = ps_catalogue_name(sindec) sindec_key = "sindec=" + '{0:.2f}'.format(sindec) name = name_root + sindec_key + "/" src_res = dict() for offset in offsets: full_name = name + str(offset) + "/" scale = flux_to_k(reference_sensitivity(sindec) * 20) * (window / (window - abs(offset))) # Standard Time Integration llh_time = { "Name": "FixedRefBox", "Fixed Ref Time (MJD)": 55800 + offset, "Pre-Window": 0, "Post-Window": window } llh_kwargs = { "LLH Energy PDF": energy_pdf, "LLH Time PDF": llh_time, "Fit Gamma?": True, "Fit Negative n_s?": True,
def ts_distribution_evolution(self): logger.debug('plotting evolution of TS distribution') all_scales = np.array(list(self.results.keys())) all_scales_floats = [float(sc) for sc in all_scales] logger.debug('all scales: ' + str(all_scales_floats)) logger.debug('sensitivity scale: ' + str(flux_to_k(self.sensitivity))) sens_scale = all_scales[ all_scales_floats >= np.array(flux_to_k(self.sensitivity))][0] disc_scale = all_scales[ all_scales_floats >= np.array(flux_to_k(self.disc_potential))][0] scales = [all_scales[0], sens_scale, disc_scale] ts_arrays = [np.array(self.results[scale]['TS']) for scale in scales] ns_arrays = np.array([ np.array([ np.median(self.results[scale]['Parameters'][key]) for key in self.results[scale]['Parameters'] if 'n_s' in key ]) for scale in scales ]) n_s = [sum(a) for a in ns_arrays] logger.debug('numbers of injected neutrinos: ' + str(n_s)) fig, ax = plt.subplots() ax.hist(ts_arrays[0], histtype='stepfilled', label='background', density=True, alpha=0.6, color='blue') ax.hist(ts_arrays[1], histtype='step', density=True, color='orange', label='signal: {:.2} signal neutrinos'.format(n_s[1])) ax.axvline(self.bkg_median, ls='--', label='sensitivity threshold', color='orange') ax.hist(ts_arrays[2], histtype='step', density=True, color='red', label='signal: {:.2} signal neutrinos'.format(n_s[2])) ax.axvline(self.disc_ts_threshold, ls='--', label='discovery potential threshold', color='red') ax.set_xlabel('Test Statistic') ax.set_ylabel('a.u.') ax.legend() ax.set_yscale('log') plt.tight_layout() sn = os.path.join(self.plot_dir, "ts_distributions/ts_evolution_.pdf") logger.debug('saving plot to ' + sn) fig.savefig(sn) plt.close()
# Loop over spectral indices for gamma in gammas: full_name = time_name + str(gamma) + "/" length = float(time_key) scale = ( 0.1 * ( flux_to_k( reference_sensitivity( np.sin(closest_src["dec_rad"]), gamma=gamma ) * 40 * math.sqrt(float(len(catalogue))) ) * 200.0 ) / length ) if cat == "IIn": scale *= 1.5 injection_energy = dict(llh_energy) injection_energy["gamma"] = gamma inj_dict = {
"Name": "Box", "Pre-Window": 0, "Post-Window": flare_length, "Time Smear?": True, "Min Offset": 0., "Max Offset": max_window - flare_length } inj_kwargs = { "Injection Energy PDF": injection_energy, "Injection Time PDF": injection_time, "Poisson Smear?": True, } scale = old_div(100 * math.sqrt(float(len(catalogue))) * flux_to_k( reference_sensitivity(np.sin(closest_src["dec"]), gamma=2) ) * max_window, flare_length) # print scale mh_dict = { "name": full_name, "datasets": custom_dataset(txs_sample_v2, catalogue, llh_kwargs["LLH Time PDF"]), "catalogue": cat_path, "inj kwargs": inj_kwargs, "llh kwargs": llh_kwargs, "scale": scale, "n_trials": 1, "n_steps": 15 }
# agn_catalogue_name("radioloud", "2rxs_100random_srcs"), # agn_catalogue_name("radioloud", "2rxs_test"), "llh_dict": llh_dict, "inj kwargs": inj_dict, "n_trials": 50, "n_steps": 10 } # cat_name = agn_catalogue_name("random", "2rxs_100brightest_srcs") # cat_name = agn_catalogue_name("radioloud", "2rxs_100brightest_srcs_dec0_weight1") # cat = np.load(cat_name) # print "Cat is ", cat_name, " Its lenght is: ", len(cat) scale = flux_to_k( reference_sensitivity(0.5, gamma) * 20 ) #*20*10**-3 #0.5 is the usally the sin_dec of the closest source -> [this produced 60000 neutrinos!!! mh_dict["scale"] = scale pkl_file = make_analysis_pickle(mh_dict) # rd.submit_to_cluster(pkl_file, n_jobs=20) rd.wait_for_cluster() # mh = MinimisationHandler.create(mh_dict) # mh.iterate_run(scale=scale, n_steps=10, n_trials=50) # mh.iterate_run(scale=scale, n_steps=3, n_trials=5) rh = ResultsHandler(mh_dict)
def __init__(self, flux_norm, bkg_time_pdf_dict): self.flux_norm = flux_to_k(flux_norm) self.bkg_time_pdf_dict = bkg_time_pdf_dict
"fixed_ref_time_mjd": t_start, "pre_window": 0., "post_window": flare_length, "time_smear_bool": True, "min_offset": 0., "max_offset": max_window - flare_length } inj_dict = { "injection_energy_pdf": injection_energy, "injection_sig_time_pdf": injection_time, } # Sets a default flux scale for signal injection scale = flux_to_k(reference_sensitivity(np.sin(catalogue["dec_rad"])) * (50 * max_window / flare_length)) if cat != "AT2018cow": dataset = custom_dataset(txs_sample_v2, catalogue, llh_dict["llh_sig_time_pdf"]) else: dataset = gfu_v002_p04 mh_dict = { "name": full_name, "mh_name": mh_name, "dataset": dataset, "catalogue": cat_path, "inj_dict": inj_dict, "llh_dict": llh_dict, "scale": scale,
config_mh = [] for sin_dec in sin_decs: name = seed_name + "sindec=" + "{0:.2f}".format(sin_dec) + "/" llh_dict = { "name": "standard_overlapping", "LLH Energy PDF": injection_energy, "LLH Time PDF": injection_time, "pull_name": pull_corrector, "floor_name": floor, } scale = flux_to_k(reference_sensitivity(sin_dec, gamma)) * 5 mh_dict = { "name": name, "mh_name": "fixed_weights", "datasets": [IC86_1_dict], "catalogue": ps_catalogue_name(sin_dec), "llh_dict": llh_dict, "inj kwargs": inj_dict, "n_trials": 20, "n_steps": 15, "scale": scale, } pkl_file = make_analysis_pickle(mh_dict)
if llh_name == "fixed_energy": llh_dict["LLH Energy PDF"]["Gamma"] = gamma inj_dict = { "Injection Time PDF": { "Name": "Steady" }, "Injection Energy PDF": { "Name": "Power Law", "Gamma": gamma, }, "fixed_n": 30 } mh_dict = { "name": name, "mh_name": "fixed_weights", "datasets": [IC86_1_dict], # "catalogue": ps_catalogue_name(sin_dec), "catalogue": tde_catalogue_name("jetted"), "llh_dict": llh_dict, "inj kwargs": inj_dict } scale = flux_to_k(reference_sensitivity( sin_dec, gamma)) * 125 * ([4.0, 1.0, 0.3, 10.0][j]) mh = MinimisationHandler.create(mh_dict) mh.iterate_run(scale=scale, n_steps=2, n_trials=100) rh = ResultsHandler(mh_dict)
# Try to fit a power law to the data llh_energy = {"Name": "Power Law"} # Set up a likelihood that fits the number of signal events (n_s), and also # the spectral index (gamma) of the source llh_kwargs = { "name": "standard", "LLH Energy PDF": llh_energy, "LLH Time PDF": llh_time, } # Takes a guess at the correct flux scale, based on previous IceCube results scale = flux_to_k(2.0) * 10**((160.0 - i) / 160.0) # Assign a unique name for each different minimisation handler dictionary name = base_dir + str(i) + "/" # Creates the Minimisation Handler dictionary, which contains all relevant # information to run an analysis mh_dict = { "name": name, "mh_name": "fixed_weights", "datasets": [IC86_234_dict], "catalogue": txs_cat_path, "inj kwargs": inj_kwargs, "llh_dict": llh_kwargs,
full_name = name + str(gamma) + "/" injection_time = llh_time = {"Name": "FixedEndBox"} injection_energy = dict(llh_energy) injection_energy["E Min"] = e_min injection_energy["Gamma"] = gamma inj_kwargs = { "Injection Energy PDF": injection_energy, "Injection Time PDF": injection_time, "Poisson Smear?": True, } scale = flux_to_k( reference_sensitivity(np.sin(catalogue["dec"]), gamma=gamma)) * 60 * (e_min / 100.)**0.2 mh_dict = { "name": full_name, "datasets": custom_dataset(txs_sample_v1, catalogue, llh_kwargs["LLH Time PDF"]), "catalogue": txs_cat_path, "inj kwargs": inj_kwargs, "llh kwargs": llh_kwargs, "scale":