Exemplo n.º 1
0
    def __init__(self, n_cpu, **kwargs):
        self.queue = JoinableQueue()
        self.log_queue = Queue()
        self.n_tasks = Value('i', 0)
        kwargs["n_tasks"] = self.n_tasks

        self.processes = [
            Process(target=self.run_trial, kwargs=kwargs)
            for _ in range(int(n_cpu))
        ]

        self.mh = MinimisationHandler.create(kwargs["mh_dict"])
        for season in self.mh.seasons.keys():
            inj = self.mh.get_injector(season)
            inj.calculate_n_exp()
        self.mh_dict = kwargs["mh_dict"]
        self.scales = []

        handler = logging.StreamHandler()
        handler.setFormatter(
            logging.Formatter(
                "%(levelname)s: %(asctime)s - %(process)s - %(message)s"))
        # ql gets records from the queue and sends them to the handler

        ql = QueueListener(self.log_queue, handler)
        ql.start()

        for p in self.processes:
            p.start()
Exemplo n.º 2
0
 def do_asimov_scale_estimation(self):
     """estimate the injection scale using Asimov estimation"""
     logger.info("doing asimov estimation")
     mh = MinimisationHandler.create(self.mh_dict)
     scale_estimate = mh.guess_scale()
     logger.debug(f"estimated scale: {scale_estimate}")
     self.disc_guess = scale_estimate
     self.sens_guess = 0.3 * self.disc_guess
Exemplo n.º 3
0
                    "name": name,
                    "mh_name": "fixed_weights",
                    "datasets": [IC86_1_dict],
                    "catalogue": ps_catalogue_name(sin_dec),
                    "llh_dict": llh_dict,
                    "inj kwargs": inj_dict,
                    "n_trials": 20,
                    "n_steps": 15,
                    "scale": scale,
                }

                pkl_file = make_analysis_pickle(mh_dict)

                # rd.submit_to_cluster(pkl_file, n_jobs=150)
                #
                mh = MinimisationHandler.create(mh_dict)
                # mh.iterate_run(n_steps=2, n_trials=20, scale=scale)
                mh.run(10, scale=float(scale))

                config_mh.append(mh_dict)

            res_dict[key] = config_mh

        all_res[gamma] = res_dict

rd.wait_for_cluster()

for (gamma, res_dict) in all_res.items():

    gamma_name = basename + str(gamma) + "/"
Exemplo n.º 4
0
    os.makedirs(os.path.dirname(ts_path))
except OSError:
    pass

if os.path.isfile(ts_path):
    with open(ts_path, "r") as f:
        print("Loading ts_array")
        ts_array = Pickle.load(f)

else:
    print("Empty TS array")
    ts_array = []

# Creates a Minimisation Handler using the dictionary, and runs the trials

mh_pl = MinimisationHandler.create(mh_dict_pl)
mh_tm = MinimisationHandler.create(mh_dict_tm)

n_trials = 100

for i in range(n_trials):

    seed = random.randint(0, 999999)
    mh_pl.set_random_seed(seed)
    res_pl = mh_pl.run_trial(scale=1.0)
    mh_tm.set_random_seed(seed)
    res_tm = mh_tm.run_trial(scale=1.0)
    ts = res_tm["TS"] - res_pl["TS"]
    print(i, seed, res_tm, res_pl, ts)
    ts_array.append(ts)