#!/usr/bin/env python # # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 1997-2016 California Institute of Technology. # Copyright (c) 2016-2018 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/pathos/blob/master/LICENSE from pathos.parallel import stats from pathos.parallel import ParallelPool as Pool pool = Pool() def host(id): import socket import time time.sleep(1.0) return "Rank: %d -- %s" % (id, socket.gethostname()) print("Evaluate 10 items on 2 cpus") #FIXME: reset lport below pool.ncpus = 2 pool.servers = ('localhost:5653', ) res5 = pool.map(host, range(10)) print(pool) print('\n'.join(res5)) print(stats()) print('') # end of file
# # Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 1997-2016 California Institute of Technology. # License: 3-clause BSD. The full license text is available at: # - http://trac.mystic.cacr.caltech.edu/project/pathos/browser/pathos/LICENSE from pathos.parallel import stats from pathos.parallel import ParallelPool as Pool pool = Pool() def host(id): import socket return "Rank: %d -- %s" % (id, socket.gethostname()) print "Evaluate 10 items on 1 cpu" pool.ncpus = 1 res3 = pool.map(host, range(10)) print pool print '\n'.join(res3) print stats() print "Evaluate 10 items on 2 cpus" pool.ncpus = 2 res5 = pool.map(host, range(10)) print pool print '\n'.join(res5) print stats() # end of file
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 1997-2016 California Institute of Technology. # License: 3-clause BSD. The full license text is available at: # - http://trac.mystic.cacr.caltech.edu/project/pathos/browser/pathos/LICENSE from pathos.parallel import stats from pathos.parallel import ParallelPool as Pool pool = Pool() def host(id): import socket return "Rank: %d -- %s" % (id, socket.gethostname()) print "Evaluate 10 items on 1 cpu" pool.ncpus = 1 res3 = pool.map(host, range(10)) print pool print '\n'.join(res3) print stats() print "Evaluate 10 items on 2 cpus" pool.ncpus = 2 res5 = pool.map(host, range(10)) print pool print '\n'.join(res5) print stats() # end of file
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation) # Copyright (c) 1997-2016 California Institute of Technology. # Copyright (c) 2016-2019 The Uncertainty Quantification Foundation. # License: 3-clause BSD. The full license text is available at: # - https://github.com/uqfoundation/pathos/blob/master/LICENSE from pathos.parallel import stats from pathos.parallel import ParallelPool as Pool pool = Pool() def host(id): import socket return "Rank: %d -- %s" % (id, socket.gethostname()) print("Evaluate 10 items on 1 cpu") pool.ncpus = 1 res3 = pool.map(host, range(10)) print(pool) print('\n'.join(res3)) print(stats()) print("Evaluate 10 items on 2 cpus") pool.ncpus = 2 res5 = pool.map(host, range(10)) print(pool) print('\n'.join(res5)) print(stats()) # end of file
def evaluate_distributed(self, func, *args, nprocs=1, servers=None, verbose=False, **kwargs): """Distribute model evaluation across a cluster. Usage Conditions: * The provided function needs to accept a numpy array of inputs as its first parameter * The provided function must return a numpy array of results Parameters ---------- func : function, Model, or function that wraps a model, to be run in parallel nprocs : int, Number of processors to use for each node. Defaults to 1. servers : list[str] or None, IP addresses or alias for each server/node to use. verbose : bool, Display job execution statistics. Defaults to False. *args : list, Additional arguments to be passed to `func` **kwargs : dict, Additional keyword arguments passed to `func` Returns ---------- self : ProblemSpec object """ if verbose: from pathos.parallel import stats warnings.warn( "This is an untested experimental feature and may not work.") workers = pp_Pool(nprocs, servers=servers) # Split into even chunks chunks = np.array_split(self._samples, int(nprocs) * len(servers), axis=0) tmp_f = self._wrap_func(func) res = list(workers.map(tmp_f, chunks)) self._results = self._collect_results(res) if verbose: print(stats(), '\n') workers.clear() return self