def __init__(self, func, low, high, particleCount=25, threads=1):
     self.threads = threads
     pool = MpiPool(self._getMapFunction())
     super(MpiParticleSwarmOptimizer,
           self).__init__(func,
                          low,
                          high,
                          particleCount=particleCount,
                          pool=pool)
Esempio n. 2
0
    def __init__(self, **kwargs):
        """
        CosmoHammer sampler implementation
        
        """
        self.pool = MpiPool(self._getMapFunction())
        self.rank = self.pool.rank

        super(MpiCosmoHammerSampler, self).__init__(pool=self.pool, **kwargs)
Esempio n. 3
0
__author__ = 'sibirrer'

#this file is ment to be a shell script to be run with Monch cluster

# set up the scene
from cosmoHammer.util.MpiUtil import MpiPool
import time
import sys
import pickle
import os

from lenstronomy.Workflow.fitting_sequence import FittingSequence

pool = MpiPool(None)

start_time = time.time()

job_name = str(sys.argv[1])
if pool.isMaster():
    print("job %s loaded" % job_name)
# hoffman2 specifics
dir_path_cluster = '/u/flashscratch/s/sibirrer/'
path2load = os.path.join(dir_path_cluster, job_name) + ".txt"
path2dump = os.path.join(dir_path_cluster, job_name) + "_out.txt"

f = open(path2load, 'rb')
input = pickle.load(f)
f.close()
[
    fitting_kwargs_list, multi_band_list, kwargs_model, kwargs_constraints,
    kwargs_likelihood, kwargs_params, init_samples