Example #1
0
def run_experiments(experiments, mpi=False, **kwargs):
    """Run a list of experiments. Optionally using MPI.

    :Arguments:
        experiments : list of dicts
            One entry for each experiment to run. Each experiment should have a dict like this:
            * 'data' -> np.ndarray
            * 'model_type' -> Hierarchical class or str name of class
            * 'name' -> str of name of experiment (optional)
            * 'kwargs' -> Keyword arguments to be supplied at model creation
        mpi : bool (default=False)
            Whether to run experiments in parallel using MPI. If set to True you must call
            your script (which calls this function) using mpirun.
            This requires mpi4py_map and mpi4py to be installed.
            E.g. pip install mpi4py mpi4py_map
        kwargs : dict
            Additional keyword arguments to be passed to run_experiment (see run_experiment),
            such as samples, burn etc.
    """
    if mpi:
        import mpi4py_map
        results = mpi4py_map.map(run_experiment, experiments, **kwargs)
    else:
        results = [run_experiment(experiment, **kwargs) for experiment in experiments]

    return results
def test_power_except():
    result_parallel = mpi4py_map.map(power_except_on_3,
                                     range(50),
                                     y=2,
                                     debug=True)
    result_serial = [power(i, y=2) for i in range(50)]
    result_serial[3] = None
    assert result_serial == result_parallel, "Parallel result does not match direct one."
Example #3
0
def analyze_experiments(experiments, mpi=False, plot_dic=True, **kwargs):
    """Analyze multiple experiments. Outputs will be saved to
    subdirectories. Can optionally analyze in parallel.

    :Arguments:
        experiments : list of dicts
            One entry for each experiment to analyze. Each experiment should have a dict like this:
            * 'data' -> np.ndarray
            * 'model_type' -> Hierarchical class or str name of class
            * 'name' -> str of name of experiment (optional)
            * 'kwargs' -> Keyword arguments to be supplied at model creation
        mpi : bool (default=False)
            Whether to run experiments in parallel using MPI. If set to True you must call
            your script (which calls this function) using mpirun.
            This requires mpi4py_map and mpi4py to be installed.
            E.g. pip install mpi4py mpi4py_map
        plot_dic : bool (default=True)
            Whether to create a bar plot of DIC values for each model.
        kwargs : dict
            Keyword arguments to be passed to analyze_experiment. See analyze_experiment.

    """

    # Load models if necessary
    for experiment in experiments:
        if 'model' not in experiment:
            experiment['model'] = load_model(experiment)

    if mpi:
        import mpi4py_map
        results = mpi4py_map.map(analyze_experiment, experiments, **kwargs)
    else:
        results = [
            analyze_experiment(experiment, **kwargs)
            for experiment in experiments
        ]

    if plot_dic:
        dics = [experiment['model'].mc.dic for experiment in experiments]
        names = [
            _parse_experiment(experiment)[-1] for experiment in experiments
        ]

        fig = plt.figure()
        x = range(len(names))
        ax = plt.bar(x, dics, align='center')
        plt.xticks(x, names)
        plt.ylabel('DIC')
        fig.autofmt_xdate()
        fig.savefig('dic.png')
        fig.savefig('dic.pdf')

    return results
Example #4
0
def setup_parallel_process(jobs_list, mpi):
    print 'in setup_parallel_process'
    #create tuples to allow multiple args for multiprocessing
    jobs_tuple = []
    for i in jobs_list:
        jobs_tuple.append((i, ))

    ####################
    if mpi:  #run using MPI
        import mpi4py_map
        mpi4py_map.map(
            Run_1_simulation_wrapped, jobs_tuple
        )  #provide parameter values for function f and store any results
        ##############
    else:  #run using python multiprocessing function
        print 'Sending out :', len(
            jobs_tuple), 'Jobs to', pool_size, ' CPUs using multiprocessing'
        pool = multiprocessing.Pool(processes=pool_size,
                                    initializer=start_process)
        pool.map(Run_1_simulation_wrapped, jobs_tuple)  #map jobs to CPUs
        pool.close()  # no more tasks
        pool.join()  # wrap up current tasks
        #take note of NULL values are failed simulation
    return
def setup_multiprocess(cases1, mpi):
    print 'in setup_multiprocess'
    cases = []  # create tuples to allow multi args for multiprocessing
    for i in cases1:
        cases.append((i, ))
    #print cases
    if mpi:
        import mpi4py_map
        mpi4py_map.map(
            do_calculation_wrapped, cases
        )  #provide parameter values for function f and store any results
        ##############
    else:
        print 'Sending out :', len(
            cases), 'Jobs to', pool_size, ' CPUs using multiprocessing'
        pool = multiprocessing.Pool(processes=pool_size,
                                    initializer=start_process)
        pool.map(do_calculation_wrapped, cases)
        pool.close()  # no more tasks
        pool.join()  # wrap up current tasks
        #take note of NULL values from failed simulation
        ##############

    return
Example #6
0
def analyze_experiments(experiments, mpi=False, plot_dic=True, **kwargs):
    """Analyze multiple experiments. Outputs will be saved to
    subdirectories. Can optionally analyze in parallel.

    :Arguments:
        experiments : list of dicts
            One entry for each experiment to analyze. Each experiment should have a dict like this:
            * 'data' -> np.ndarray
            * 'model_type' -> Hierarchical class or str name of class
            * 'name' -> str of name of experiment (optional)
            * 'kwargs' -> Keyword arguments to be supplied at model creation
        mpi : bool (default=False)
            Whether to run experiments in parallel using MPI. If set to True you must call
            your script (which calls this function) using mpirun.
            This requires mpi4py_map and mpi4py to be installed.
            E.g. pip install mpi4py mpi4py_map
        plot_dic : bool (default=True)
            Whether to create a bar plot of DIC values for each model.
        kwargs : dict
            Keyword arguments to be passed to analyze_experiment. See analyze_experiment.

    """

    # Load models if necessary
    for experiment in experiments:
        if 'model' not in experiment:
            experiment['model'] = load_model(experiment)

    if mpi:
        import mpi4py_map
        results = mpi4py_map.map(analyze_experiment, experiments, **kwargs)
    else:
        results = [analyze_experiment(experiment, **kwargs) for experiment in experiments]

    if plot_dic:
        dics = [experiment['model'].mc.dic for experiment in experiments]
        names = [_parse_experiment(experiment)[-1] for experiment in experiments]

        fig = plt.figure()
        x = list(range(len(names)))
        ax = plt.bar(x, dics, align='center')
        plt.xticks(x, names)
        plt.ylabel('DIC')
        fig.autofmt_xdate()
        fig.savefig('dic.png')
        fig.savefig('dic.pdf')

    return results
def test_power_except():
    result_parallel = mpi4py_map.map(power_except_on_3, range(50), y=2, debug=True)
    result_serial = [power(i, y=2) for i in range(50)]
    result_serial[3] = None
    assert result_serial == result_parallel, "Parallel result does not match direct one."
Example #8
0
def test_power():
    result_parallel = mpi4py_map.map(power, list(range(50)), y=2, debug=True)
    result_serial = [power(i, y=2) for i in range(50)]
    assert result_serial == result_parallel, "Parallel result does not match direct one."
Example #9
0
outfile    = outpath + "/diffused/" + os.path.basename(infile).strip(".nc") + "_diffused" + str(diffuse_timesteps) + ".nc"

# only copy data necessary for pism if lite
lite = True

os.system("mkdir -p " + outpath + "/diffused/")
print "initialize"
dd = DiffuseOcean.DiffuseOcean(infile, outfile, destination_grid_file, diffuse_timesteps, diffuse_variables, diffuse_missvals)
dd.getTimeIndependentInputData()
dd.prepareProjection()
dd.findAboveAndBelowSea()
dd.findAboveAndBelowSea()


# try to get the communicator object to see whether mpi is available:
try:
    from mpi4py import MPI
    comm = MPI.COMM_WORLD
    available = False if comm.size < 2 else True
    print "mpi available, with ", comm.size, " processors."
except:
    available = False

if available:
  result_parallel = mpi4py_map.map(dd.projAndDiffu, xrange(dd.nctimesteps), debug=True)
  dd.writeNetcdf(result_parallel, lite)
else:
  result_serial = map(dd.projAndDiffu, xrange(dd.nctimesteps))
  dd.writeNetcdf(result_serial, lite)