Ejemplo n.º 1
0
def produce_data(parameters_file, data_file):
    """
    Produce data for 'pooled' condition using multiprocessing
    :param parameters_file: Path to parameters file (string)
    :param data_file: Path to the future data files (dictionary with two entries)
    :return: a 'pool backup' (arbitrary Python object)
    """

    json_parameters = parameters.load(parameters_file)

    pool_parameters = parameters.extract_parameters(json_parameters)

    pool = mlt.Pool()

    backups = []

    for bkp in tqdm.tqdm(pool.imap_unordered(run, pool_parameters),
                         total=len(pool_parameters)):
        backups.append(bkp)

    pool_backup = backup.PoolBackup(parameters=json_parameters,
                                    backups=backups)
    pool_backup.save(parameters_file, data_file)

    return pool_backup
Ejemplo n.º 2
0
def individual_data(args):
    """
    Produce figures for 'individual' data
    :param args: Parsed args from command line ('Namespace' object)
    :return: None
    """

    for move in (str(i).replace("Move.", "")
                 for i in (model.Move.max_profit, model.Move.strategic,
                           model.Move.max_diff, model.Move.equal_sharing)):

        run_backups = []

        for r in ("25", "50"):  # , "75"):

            parameters_file = "data/json/{}_{}.json".format(r, move)
            data_file = "data/pickle/{}_{}.p".format(r, move)

            if not data_already_produced(parameters_file,
                                         data_file) or args.force:

                json_parameters = parameters.load(parameters_file)
                param = parameters.extract_parameters(json_parameters)
                run_backup = run(param)
                run_backup.save(parameters_file, data_file)

            else:
                run_backup = backup.RunBackup.load(data_file)

            run_backups.append(run_backup)

        analysis.separate.separate(backups=run_backups,
                                   fig_name='fig/separate_{}.pdf'.format(move))
Ejemplo n.º 3
0
def produce_data(parameters_file, data_file):

    json_parameters = parameters.load(parameters_file)

    pool_parameters = parameters.extract_parameters(json_parameters)

    pool = mlt.Pool()

    backups = []

    for bkp in tqdm.tqdm(pool.imap_unordered(run, pool_parameters),
                         total=len(pool_parameters)):
        backups.append(bkp)

    pool_backup = backup.PoolBackup(parameters=json_parameters,
                                    backups=backups)
    pool_backup.save(data_file)

    return pool_backup
Ejemplo n.º 4
0
def individual_data(args):

    for condition in ("75", "50", "25"):

        parameters_file, data_file, fig_files = get_files_names(condition)

        if not data_already_produced(data_file) or args.force:

            json_parameters = parameters.load(parameters_file)
            param = parameters.extract_parameters(json_parameters)
            run_backup = run(param)
            run_backup.save(data_file)

        else:
            run_backup = backup.RunBackup.load(data_file["pickle"])

        analysis.separate.eeg_like(backup=run_backup,
                                   fig_name=fig_files["eeg_like"])
        analysis.separate.pos_firmA_over_pos_firmB(
            backup=run_backup, fig_name=fig_files["positions"])

        terminal_msg(condition, parameters_file, data_file, fig_files)
Ejemplo n.º 5
0
def clustered_data(args):

    for move in (str(i).replace("Move.", "")
                 for i in (model.Move.max_profit, model.Move.strategic,
                           model.Move.max_diff, model.Move.equal_sharing)):

        parameters_file = "data/json/pool_{}.json".format(move)
        data_file = "data/pickle/pool_{}.p".format(move)

        if not data_already_produced(data_file) or args.force:
            pool_backup = produce_data(parameters_file, data_file)

        else:
            pool_backup = backup.PoolBackup.load(data_file)

        run_backups = []

        for r in ("25", "50"):  # , "75"):

            parameters_file = "data/json/{}_{}.json".format(r, move)
            data_file = "data/pickle/{}_{}.p".format(r, move)

            if not data_already_produced(parameters_file,
                                         data_file) or args.force:

                json_parameters = parameters.load(parameters_file)
                param = parameters.extract_parameters(json_parameters)
                run_backup = run(param)
                run_backup.save(parameters_file, data_file)

            else:
                run_backup = backup.RunBackup.load(data_file)

            run_backups.append(run_backup)

        parameters_file = "data/json/batch_{}.json".format(move)
        data_file = "data/pickle/batch_{}.p".format(move)

        if not data_already_produced(data_file) or args.force:
            batch_backup = produce_data(parameters_file, data_file)

        else:
            batch_backup = backup.PoolBackup.load(data_file)

        fig = plt.figure(figsize=(13.5, 7))
        gs = matplotlib.gridspec.GridSpec(nrows=2,
                                          ncols=2,
                                          width_ratios=[1, 0.7])

        analysis.pool.distance_price_and_profit(pool_backup=pool_backup,
                                                subplot_spec=gs[0, 0])
        analysis.separate.separate(backups=run_backups, subplot_spec=gs[:, 1])
        analysis.batch.plot(batch_backup=batch_backup, subplot_spec=gs[1, 0])

        plt.tight_layout()

        ax = fig.add_subplot(gs[:, :], zorder=-10)

        plt.axis("off")
        ax.text(s="B",
                x=-0.05,
                y=0,
                horizontalalignment='center',
                verticalalignment='center',
                transform=ax.transAxes,
                fontsize=20)
        ax.text(s="A",
                x=-0.05,
                y=0.55,
                horizontalalignment='center',
                verticalalignment='center',
                transform=ax.transAxes,
                fontsize=20)
        ax.text(s="C",
                x=0.58,
                y=0,
                horizontalalignment='center',
                verticalalignment='center',
                transform=ax.transAxes,
                fontsize=20)

        fig_name = "fig/clustered_{}.pdf".format(move)
        os.makedirs(os.path.dirname(fig_name), exist_ok=True)
        plt.savefig(fig_name)
        plt.show()