def __init__(self, model_class=None, parameters=None, client=None, num_engines=None): """ Constructor """ self.my_class_name = 'DistributedEnsemble' self.model_class = cloudpickle.dumps(model_class) self.parameters = [parameters] self.number_of_trajectories = 0 self.seed_base = self.generate_seed_base() self.storage_mode = None # A chunk list self.result_list = {} # Set the Ipython.parallel client self.num_engines = num_engines self._update_client(client)
def __init__(self, model_class=None, parameters=None, qsub=False, client=None, num_engines=None, storage_mode=None, pickled_cluster_input_file=None, log_filename=None): """ Constructor """ self.my_class_name = 'DistributedEnsemble' self.log = Log(log_filename=log_filename) if model_class is None and pickled_cluster_input_file is None: self.log.write_log("Invalid configuration. Either provide a model class object or its pickled file.", logging.ERROR) raise MolnsUtilException("Invalid configuration. Either provide a model class object or its pickled file.") if model_class is not None and pickled_cluster_input_file is not None: self.log.write_log("Invalid configuration. Both a model class and a pickled file are provided.", logging.ERROR) raise MolnsUtilException("Invalid configuration. Both a model class and a pickled file are provided.") if model_class is not None: self.cluster_execution = False self.model_class = cloudpickle.dumps(model_class) else: self.cluster_execution = True self.pickled_cluster_input_file = pickled_cluster_input_file # Not checking here for parameters = None, as they could be present in the model class. self.parameters = [parameters] self.number_of_trajectories = 0 self.seed_base = generate_seed_base() self.storage_mode = storage_mode # A chunk list self.result_list = {} self.qsub = qsub self.num_engines = num_engines if self.qsub is False: # Set the Ipython.parallel client self._update_client(client)
def put(self, name, data): self.setup_provider() self.provider.put(name, cloudpickle.dumps(data))