Example #1
0
def main():

    # Initialize the MPI pool
    pool = MPIPool()

    # Make sure only we run map() on the master process
    if not pool.is_master():
        pool.wait()
        sys.exit(0)

    # create some random input data
    x = np.random.uniform(size=10000)
    y = np.random.uniform(size=10000)
    tasks = np.vstack((x,y)).T

    vals = pool.map(worker, tasks)

    pool.close()
Example #2
0
def get_pool(mpi=False, threads=None, **kwargs):
    """
    Get a pool object to pass to emcee for parallel processing.
    If mpi is False and threads is None, pool is None.

    Parameters
    ----------
    mpi : bool
        Use MPI or not. If specified, ignores the threads kwarg.
    threads : int (optional)
        If mpi is False and threads is specified, use a Python
        multiprocessing pool with the specified number of threads.
    **kwargs
        Any other keyword arguments are passed through to the pool
        initializers.

    """

    if mpi:
        from mpipool import MPIPool

        # Initialize the MPI pool
        pool = MPIPool(**kwargs)

        # Make sure the thread we're running on is the master
        if not pool.is_master():
            pool.wait()
            sys.exit(0)
        logger.debug("Running with MPI...")

    elif threads > 1:
        logger.debug("Running with multiprocessing on {} cores..."
                     .format(threads))
        pool = multiprocessing.Pool(threads, **kwargs)

    else:
        logger.debug("Running serial...")
        pool = SerialPool(**kwargs)

    return pool
Example #3
0
def get_pool(mpi=False, threads=None, **kwargs):
    """ Get a pool object to pass to emcee for parallel processing.
        If mpi is False and threads is None, pool is None.

        Parameters
        ----------
        mpi : bool
            Use MPI or not. If specified, ignores the threads kwarg.
        threads : int (optional)
            If mpi is False and threads is specified, use a Python
            multiprocessing pool with the specified number of threads.
        **kwargs
            Any other keyword arguments are passed through to the pool
            initializers.
    """

    if mpi:
        from mpipool import MPIPool

        # Initialize the MPI pool
        pool = MPIPool(**kwargs)

        # Make sure the thread we're running on is the master
        if not pool.is_master():
            pool.wait()
            sys.exit(0)
        logger.debug("Running with MPI...")

    elif threads is not None and threads > 1:
        logger.debug(
            "Running with multiprocessing on {} cores...".format(threads))
        pool = multiprocessing.Pool(threads, **kwargs)

    else:
        logger.debug("Running serial...")
        pool = SerialPool(**kwargs)

    return pool
import numpy as np
from mpipool import MPIPool


# define the function that will be applied to tasks
def worker(task):
    x, y = task
    return x**2 + 2 * y


# create the pool
pool = MPIPool()

# only run map() on the master process, all other processes wait for their work
if not pool.is_master():
    pool.wait()
    # worker processes exit after they have done their work
    sys.exit(0)

# the following code is executed by the master process only
# create some random input data
x = np.random.uniform(size=10)
y = np.random.uniform(size=10)
tasks = list(zip(x, y))


# crate a callback function
def cb(x):
    print x

        stats_dict[stat].append([output[stat], os.path.basename(name1),
                                 os.path.basename(name2)])


print("Starting pool at {}".format(datetime.now()))

if multiprocess:

    use_mpi = False
    if use_mpi:
        from mpipool import MPIPool
        pool = MPIPool(loadbalance=False)

        if not pool.is_master():
            # Wait for instructions from the master process.
            pool.wait()
            sys.exit(0)
    else:
        from multiprocessing import cpu_count, Pool
        psize = cpu_count()
        print("Found {} CPUs to run on.".format(psize))
        pool = Pool(processes=psize)

print("Pool created at {}".format(datetime.now()))

# Run distance between comparisons

stats_dict = dict.fromkeys(stats)
for stat in stats_dict:
    if multiprocess:
        stats_dict[stat] = manager.list([])