"prior": offcd.const_prior, "p": 0.8, "k": 20, "thresh": 0.6, "min_peak_dist": 10 } metric = "loss" parallel_args = { "cmp_class_args": cmp_class_args, "preprocess_args": preprocess_args, "param": param, "metric": metric } sequential_args = parallel_args single_args = {"dt_start": dt_start, "dt_end": dt_end} single_args.update(parallel_args) specific_client_args = single_args fp_specific_client = partial(change_point_alg.run_specific_client, mac=mac, server=server, model_class=BayesianOffline, out_path=script_dir) cp_utils.parse_args(partial(change_point_alg.run_single, run=run), single_args, partial(change_point_alg.run_parallel, run=run), parallel_args, partial(change_point_alg.run_sequential, run=run), sequential_args, fp_specific_client, specific_client_args)
metric=metric, eps_hours=eps_hours, min_fraction_of_clients=min_fraction_of_clients) utils.parallel_exec(f_localize_events, dt_ranges) def run_single(dt_start, dt_end, metric, eps_hours, min_fraction_of_clients): localize_events(dt_start, dt_end, metric, eps_hours, min_fraction_of_clients) if __name__ == "__main__": min_fraction_of_clients = 0.75 eps_hours = 12 metric = "throughput_up" dt_start = datetime.datetime(2016, 5, 1) dt_end = datetime.datetime(2016, 5, 11) min_fraction_of_clients -= np.finfo(float).eps parallel_args = {"eps_hours": eps_hours, "metric": metric, "min_fraction_of_clients": min_fraction_of_clients} sequential_args = parallel_args single_args = {"dt_start": dt_start, "dt_end": dt_end} single_args.update(parallel_args) cp_utils.parse_args(run_single, single_args, run_parallel, parallel_args, run_sequential, sequential_args, None, None)