inj_kwargs = { "injection_energy_pdf": injection_energy, "injection_time_pdf": injection_time, } scale = flux_to_k( reference_sensitivity(np.sin(dec)) * (50 * search_window / flare_length)) mh_dict = { "name": full_name, "mh_name": mh_name, "datasets": custom_dataset(txs_sample_v1, catalogue, llh_kwargs["llh_time_pdf"]), "catalogue": cat_path, "inj_dict": inj_kwargs, "llh_dict": llh_kwargs, "scale": scale, "n_trials": 100, "n_steps": 15, # number of flux values } # if mh_name == "flare":
"None/" + str(n) + "sources") catalogue = np.load(fs_sources(n, sindec)) closest_src = np.sort(catalogue, order="distance_mpc")[0] scale = (flux_to_k( reference_sensitivity(np.sin(closest_src["dec_rad"]), gamma=gamma) * 40 * (math.log(float(len(catalogue)), 4) + 1)) * 200.0) / length mh_dict = { "name": name, "mh_name": mh_name, "dataset": custom_dataset(ps_v002_p01, catalogue, llh_dict["llh_sig_time_pdf"]), "catalogue": fs_sources(n, sindec), "inj_dict": inj_dict, "llh_dict": llh_dict, "scale": scale, "n_trials": ntrials / cluster if cluster else ntrials, "n_steps": 10, } job_id = None
"Fixed Ref Time (MJD)": ref_time, "Pre-Window": 0, "Post-Window": length } llh_kwargs = { "LLH Energy PDF": energy_pdf, "LLH Time PDF": llh_time, "Fit Gamma?": True, "Fit Negative n_s?": True, "Fit Weights?": False } mh_dict = { "name": full_name, "datasets": custom_dataset(ps_v002_p01, np.load(cat_path), llh_kwargs["LLH Time PDF"]), "catalogue": cat_path, "inj kwargs": inj, "llh kwargs": llh_kwargs, "scale": scale, "n_trials": 100, "n_steps": 10 } analysis_path = analysis_dir + full_name try: os.makedirs(analysis_path) except OSError: pass
inj_kwargs = { "Injection Energy PDF": injection_energy, "Injection Time PDF": injection_time, "Poisson Smear?": True, } scale = old_div(100 * math.sqrt(float(len(catalogue))) * flux_to_k( reference_sensitivity(np.sin(closest_src["dec"]), gamma=2) ) * max_window, flare_length) # print scale mh_dict = { "name": full_name, "datasets": custom_dataset(txs_sample_v2, catalogue, llh_kwargs["LLH Time PDF"]), "catalogue": cat_path, "inj kwargs": inj_kwargs, "llh kwargs": llh_kwargs, "scale": scale, "n_trials": 1, "n_steps": 15 } pkl_file = make_analysis_pickle(mh_dict) # rd.submit_to_cluster(pkl_file, n_jobs=500) res[flare_length] = mh_dict src_res[label] = res
for pdf_type in sn_times[cat]: llh_times = sn_time_pdfs(cat, pdf_type=pdf_type) name = f"{name_root}/{pdf_type}/{cat}" bkg_ts = f"{bkg_ts_root}/{pdf_type}/{cat}" for llh_time in llh_times: time = (llh_time["decay_time"] if "decay" in pdf_type else llh_time["pre_window"] + llh_time["post_window"]) unblind_llh = { "llh_name": "standard", "llh_energy_pdf": llh_energy, "llh_sig_time_pdf": llh_time, } unblind_dict = { "name": name, "ts_type": "Fit Weights", "mh_name": "fit_weights", "dataset": custom_dataset(ps_v002_p01, catalogue, llh_time), "catalogue": cat_path, "llh_dict": unblind_llh, "background_ts": f"{bkg_ts}/{time}/", } ub = create_unblinder(unblind_dict, mock_unblind=True)
name = name_root + cat.replace(" ", "") + "/" logging.info(f"{name}") bkg_ts = bkg_ts_root + cat.replace(" ", "") + "/Fit Weights/" cat_path = tde_catalogue_name(cat) catalogue = load_catalogue(cat_path) unblind_dict = { "name": name, "mh_name": "fit_weights", "dataset": custom_dataset(txs_sample_v1, catalogue, llh_dict["llh_sig_time_pdf"]), "catalogue": cat_path, "llh_dict": llh_dict, "background_ts": bkg_ts } # ub = create_unblinder(unblind_dict, mock_unblind=False) ub = create_unblinder(unblind_dict, mock_unblind=False, disable_warning=True) r = ub.res_dict print(r)
nu_flux = f_nu_to_cr * cr_flux energy_pdf = { "Name": "Power Law", "Gamma": 2.0, "Energy Flux": nu_flux, "E Min": 10**2, } inj_kwargs = { "Injection Energy PDF": energy_pdf, "Injection Time PDF": time_pdf, "Poisson Smear?": True, } seasons = custom_dataset(txs_sample_v1, [source], time_pdf) n_inj = 0 for season in seasons: n_inj += calculate_neutrinos(source, season, inj_kwargs) n_injs.append("{0:.2f}".format(n_inj)) res_dict[source["Name"]] = n_injs print("\n") print("N_base \t N_scale \t Source") for (source, n_inj) in res_dict.items():
scale = (flux_to_k( reference_sensitivity(np.sin(0.5), gamma=gamma) * 40 * math.sqrt(float(len(catalogue)))) * 200.0) / length if hsphere == "southern": scale *= 5 if seasons == ["IC40"]: scale *= 4 data = ps_v002_p01 if seasons: data = data.get_seasons(*seasons) dataset = custom_dataset(data, catalogue, llh_dict["llh_sig_time_pdf"]) logger.debug(f"{dataset.keys()}") mh_dict = { "name": full_name, "mh_name": mh_name, "dataset": dataset, "catalogue": cat_path, "inj_dict": inj_dict, "llh_dict": llh_dict, "fixed_scale": 0, "n_trials": ntrials / cluster if cluster else ntrials, "n_steps": 10, "allow_extrapolated_sensitivity": False, }
# "time_pdf_name": "fixed_ref_box", # "fixed_ref_time_mjd": 56927.86, # "pre_window": 0., # "post_window": 57116.76 - 56927.86, # } llh_energy = { "energy_pdf_name": "power_law", } unblind_llh = { "llh_name": "standard", "llh_energy_pdf": llh_energy, "llh_sig_time_pdf": llh_time, } name = "analyses/benchmarks/TXS_0506+056/" unblind_dict = { "name": name, "mh_name": "fixed_weights", "dataset": custom_dataset( ps_v003_p02, txs_catalogue, unblind_llh["llh_sig_time_pdf"] ), # "dataset": txs_sample_v1.get_seasons(""), "catalogue": txs_cat_path, "llh_dict": unblind_llh, } ub = create_unblinder(unblind_dict, mock_unblind=False)