def create_sim_map(burnin_id) :

    am = AnalyzeManager(burnin_id, analyzers=SimulationDirectoryMapAnalyzer())
    am.analyze()

    sim_map_dict = am.analyzers[0].results
    df = pd.concat([pd.DataFrame(exp) for exp_id, exp in sim_map_dict.items()])
    return df
def run_single_analyzer(exp_id, analyzer, savefile_prefix=""):
    SetupParser.default_block = 'HPC'
    SetupParser.init()

    am = AnalyzeManager()
    am.add_analyzer(analyzer())
    exp = retrieve_experiment(exp_id)
    am.add_experiment(exp)
    am.analyze()

    df_return = am.analyzers[0].results
    df_return.to_csv("{}_{}.csv".format(savefile_prefix, exp_id), index=False)
    return df_return
def plot_inset_diagnostics(experiment_list,
                           channels=default_channels,
                           working_dir=".",
                           filename="output/InsetChart.json",
                           **kwargs):
    from simtools.Analysis.AnalyzeManager import AnalyzeManager

    am = AnalyzeManager()
    am.add_analyzer(
        inset_channel_plotter(channels,
                              working_dir=working_dir,
                              filename=filename,
                              **kwargs))

    for expt in experiment_list:
        am.add_experiment(expt)
    am.analyze()
Esempio n. 4
0
def run_experiment(configbuilder, experiment_name, experiment_builder,
                   analyzers):
    run_sim_args = {
        'config_builder': configbuilder,
        'exp_name': experiment_name,
        'exp_builder': experiment_builder
    }

    if not SetupParser.initialized:
        SetupParser.init('HPC')

    exp_manager = ExperimentManagerFactory.init()
    exp_manager.run_simulations(**run_sim_args)
    exp_manager.wait_for_finished(verbose=True)
    assert (exp_manager.succeeded())
    am = AnalyzeManager(exp_manager.experiment)
    for a in analyzers:
        am.add_analyzer(a)
    am.analyze()
def run_analyzers(exp_id, analyzers, savefile_prefix=""):
    def _remove_duplicate_columns(df):
        columns_to_keep = []
        for c in df.columns:
            if "_duplicated" not in c:
                columns_to_keep.append(c)
        return df[columns_to_keep]

    SetupParser.default_block = 'HPC'
    SetupParser.init()

    am = AnalyzeManager()
    for a in analyzers:
        am.add_analyzer(a())
    exp = retrieve_experiment(exp_id)
    am.add_experiment(exp)
    am.analyze()

    if len(analyzers) == 1:
        df_return = am.analyzers[0].results

    elif len(analyzers) > 1:
        df_list = [x.results for x in am.analyzers]
        df_return = pd.merge(df_list[0],
                             df_list[1],
                             on="sim_id",
                             suffixes=["", "_duplicated"])

        # Drop duplicated columns
        # for c in df_result.columns:
        #     if "_duplicated" in c:
        #         df_result.drop(c, inplace=True)
        df_return = _remove_duplicate_columns(df_return)

    else:
        raise ValueError

    df_return.to_csv("{}_{}.csv".format(savefile_prefix, exp_id), index=False)
    return df_return
                else:
                    plt.plot(data, label=label)

                plt.xlabel("Simulation Time")

        plt.ylabel(self.channel)
        plt.legend()
        plt.show()


if __name__ == "__main__":
    from simtools.Analysis.AnalyzeManager import AnalyzeManager

    am = AnalyzeManager()
    # am.add_analyzer(basic_inset_channel_plotter("True Prevalence"))

    am.add_analyzer(
        inset_channel_plotter("True Prevalence",
                              color_by_expt=True,
                              label_by_expt=True,
                              label_dict={
                                  "7e3073b4-d9f1-e811-a2bd-c4346bcb1555":
                                  "full campaign",
                                  "a2e981fe-d9f1-e811-a2bd-c4346bcb1555":
                                  "no 2011 bednets"
                              },
                              ref_date="2001-01-01"))
    am.add_experiment("7e3073b4-d9f1-e811-a2bd-c4346bcb1555")
    am.add_experiment("a2e981fe-d9f1-e811-a2bd-c4346bcb1555")
    am.analyze()