Ejemplo n.º 1
0
        "n_steps": 11,
    }

    pkl_file = make_analysis_pickle(mh_dict)

    # Creates a Minimisation Handler using the dictionary, and runs the trials

    # mh_power_law = MinimisationHandler.create(mh_dict_power_law)
    # mh_power_law.iterate_run(mh_dict_power_law["scale"], n_steps=mh_dict_power_law["n_steps"],
    #                n_trials=mh_dict_power_law["n_trials"])

    rd.submit_to_cluster(pkl_file, n_jobs=5)

    res_dict[i] = mh_dict

rd.wait_for_cluster()

sens = []
disc = []

for i in n_range:
    mh_dict = res_dict[i]

    # Creates a Results Handler to analyse the results, and calculate the
    # sensitivity. This is the flux that needs to arrive at Earth, in order for
    # IceCube to see an overfluctuation 90% of the time. Prints this information.

    rh = ResultsHandler(mh_dict)
    sens.append(rh.sensitivity)
    disc.append(rh.disc_potential)
                #     n_cpu=1 if cluster else 32,
                #     n_jobs=cluster,
                #     h_cpu='02:59:59'
                # )
                job_ids.append(job_id)

                time_res[gamma] = mh_dict

            cat_res[time_key] = time_res

        full_res[cat] = cat_res

    # Wait for cluster. If there are no cluster jobs, this just runs
    if cluster and np.any(job_ids):
        logging.info(f"waiting for jobs {job_ids}")
        wait_for_cluster(job_ids)

    stacked_sens = {}
    stacked_sens_flux = {}

    for b, (cat_name, cat_res) in enumerate(full_res.items()):

        stacked_sens_flux[cat_name] = {}

        for (time_key, time_res) in cat_res.items():

            stacked_sens_flux[cat_name][time_key] = {}

            sens_livetime = []
            fracs = []
            disc_pots_livetime = []