def open(self): """Initiate and scale the cluster""" # initiate the cluster object # Look at the ~/.config/dask/mintpy.yaml file for changing the Dask configuration defaults print('initiate Dask cluster') if self.cluster_type == 'local': from dask.distributed import LocalCluster # initiate cluster object self.cluster = LocalCluster() else: # for non-local cluster, import related dask module only when it's needed # because job_queue is not available on macports, which make sense import dask_jobqueue # initiate cluster object if self.cluster_type == 'lsf': self.cluster = dask_jobqueue.LSFCluster(**self.cluster_kwargs) elif self.cluster_type == 'pbs': self.cluster = dask_jobqueue.PBSCluster(**self.cluster_kwargs) elif self.cluster_type == 'slurm': self.cluster = dask_jobqueue.SLURMCluster( **self.cluster_kwargs) else: msg = 'un-recognized input cluster: {}'.format( self.cluster_type) msg += '\nsupported clusters: {}'.format(CLUSTER_LIST) raise ValueError(msg) # show dask cluster job script for reference print("\n", self.cluster.job_script()) # for debug debug_mode = False if debug_mode: with open('dask_command_run_from_python.txt', 'w') as f: f.write(self.cluster.job_script() + '\n') # This line submits num_worker jobs to the cluster to start a bunch of workers # In tests on Pegasus `general` queue in Jan 2019, no more than 40 workers could RUN # at once (other user's jobs gained higher priority in the general at that point) print('scale the cluster to {} workers'.format(self.num_worker)) self.cluster.scale(self.num_worker)
def open(self): """Initiate the cluster""" # initiate the cluster object # Look at the ~/.config/dask/mintpy.yaml file for changing the Dask configuration defaults print('initiate Dask cluster') if self.cluster_type == 'local': from dask.distributed import LocalCluster # initiate cluster object self.cluster = LocalCluster() else: # for non-local cluster, import related dask module only when it's needed # because job_queue is not available on macports, which make sense import dask_jobqueue # initiate cluster object if self.cluster_type == 'lsf': self.cluster = dask_jobqueue.LSFCluster(**self.cluster_kwargs) elif self.cluster_type == 'pbs': self.cluster = dask_jobqueue.PBSCluster(**self.cluster_kwargs) elif self.cluster_type == 'slurm': self.cluster = dask_jobqueue.SLURMCluster( **self.cluster_kwargs) else: msg = 'un-recognized input cluster: {}'.format( self.cluster_type) msg += '\nsupported clusters: {}'.format(CLUSTER_LIST) raise ValueError(msg) # show dask cluster job script for reference print("\n", self.cluster.job_script()) # for debug debug_mode = False if debug_mode: with open('dask_command_run_from_python.txt', 'w') as f: f.write(self.cluster.job_script() + '\n')