from parsl.executors.ipp_controller import Controller from parsl.tests.utils import get_rundir # If you are a developer running tests, make sure to update parsl/tests/configs/user_opts.py # If you are a user copying-and-pasting this as an example, make sure to either # 1) create a local `user_opts.py`, or # 2) delete the user_opts import below and replace all appearances of `user_opts` with the literal value # (i.e., user_opts['swan']['username'] -> 'your_username') from .user_opts import user_opts config = Config(executors=[ IPyParallelExecutor( label='midway_ipp_multinode', provider=SlurmProvider( 'westmere', channel=SSHChannel(hostname='swift.rcc.uchicago.edu', username=user_opts['midway']['username'], script_dir=user_opts['midway']['script_dir']), launcher=SrunLauncher(), overrides=user_opts['midway']['overrides'], walltime="00:05:00", init_blocks=1, max_blocks=1, nodes_per_block=2, tasks_per_node=1, ), controller=Controller(public_ip=user_opts['public_ip']), ) ], run_dir=get_rundir())
from libsubmit.channels import SSHChannel from libsubmit.providers import SlurmProvider from parsl.config import Config from parsl.executors.ipp import IPyParallelExecutor from parsl.executors.threads import ThreadPoolExecutor config = Config(executors=[ IPyParallelExecutor( label='midway', provider=SlurmProvider( 'westmere', channel=SSHChannel( hostname='swift.rcc.uchicago.edu', username='******', script_dir='/scratch/midway2/annawoodard/parsl_scripts', ), init_blocks=1, min_blocks=1, max_blocks=1000, nodes_per_block=1, tasks_per_node=2, overrides= 'module load singularity; module load Anaconda3/5.1.0; source activate parsl_py36' ), ), ThreadPoolExecutor(label='local', max_threads=2) ], )
from parsl.tests.user_opts import user_opts from libsubmit.channels import SSHChannel from libsubmit.providers import SlurmProvider from parsl.config import Config from parsl.executors.ipp import IPyParallelExecutor from parsl.executors.ipp_controller import Controller config = Config(executors=[ IPyParallelExecutor( provider=SlurmProvider( 'westmere', channel=SSHChannel(hostname='swift.rcc.uchicago.edu', username=user_opts['midway']['username'], script_dir=user_opts['midway']['script_dir']), init_blocks=1, min_blocks=1, max_blocks=2, nodes_per_block=1, tasks_per_node=4, parallelism=0.5, overrides=user_opts['midway']['overrides']), label='midway_ipp', controller=Controller(public_ip=user_opts['public_ip']), ) ])
from libsubmit.channels import SSHChannel from libsubmit.launchers import SrunLauncher from parsl.config import Config from parsl.executors.ipp import IPyParallelExecutor from parsl.executors.ipp_controller import Controller from parsl.tests.user_opts import user_opts from parsl.tests.utils import get_rundir config = Config( executors=[ IPyParallelExecutor( label='cori_ipp_multinode', provider=SlurmProvider( 'debug', channel=SSHChannel(hostname='cori.nersc.gov', username=user_opts['cori']['username'], script_dir=user_opts['cori']['script_dir']), nodes_per_block=2, tasks_per_node=2, init_blocks=1, max_blocks=1, overrides=user_opts['cori']['overrides'], launcher=SrunLauncher, ), controller=Controller(public_ip=user_opts['public_ip']), ) ], run_dir=get_rundir(), )
), ], app_cache=False, ) rccNodeExclusive = Config( executors=[ IPyParallelExecutor( label='midway_ipp', provider=SlurmProvider( 'broadwl', channel=LocalChannel(), #launcher=SrunLauncher(), init_blocks=1, # 1 min_blocks=0, # 0 max_blocks=3, # 10 nodes_per_block=1, # 4 tasks_per_node=28, # 12 walltime="00:30:00", overrides="#SBATCH --mem-per-cpu=2000\n#SBATCH --account=pi-jevans\n#SBATCH --exclusive\nsource /project2/jevans/virt_sbatch/bin/activate\nmodule load java/1.8\nulimit -u 10000\nrm -rf `ls -la /tmp | grep 'reidmcy' | awk ' { print $9 } '`" #WARNING: Watch out for the rm /tmp, only use on exclusive ), ), ], app_cache=False, ) rccNodeSmall = Config( executors=[ IPyParallelExecutor( label='midway_ipp',
from libsubmit.channels import SSHChannel from libsubmit.providers import SlurmProvider from libsubmit.launchers import SrunLauncher from parsl.config import Config from parsl.executors.ipp import IPyParallelExecutor from parsl.executors.ipp_controller import Controller from parsl.tests.user_opts import user_opts from parsl.tests.utils import get_rundir config = Config(executors=[ IPyParallelExecutor( label='comet_ipp_multinode', provider=SlurmProvider( 'debug', channel=SSHChannel(hostname='comet.sdsc.xsede.org', username=user_opts['comet']['username'], script_dir=user_opts['comet']['script_dir']), launcher=SrunLauncher(), overrides=user_opts['comet']['overrides'], walltime="00:10:00", init_blocks=1, max_blocks=1, nodes_per_block=2, tasks_per_node=1, ), controller=Controller(public_ip=user_opts['public_ip']), ) ], run_dir=get_rundir())
from libsubmit.launchers import SrunLauncher from parsl.config import Config from parsl.executors.threads import ThreadPoolExecutor from parsl.executors.ipp import IPyParallelExecutor from parsl.executors.ipp_controller import Controller cori_debug_config = Config( executors=[ IPyParallelExecutor( label='ipp_slurm', provider=SlurmProvider( 'debug', nodes_per_block=1, tasks_per_node=64, init_blocks=1, max_blocks=1, walltime="00:25:00", overrides="#SBATCH --constraint=haswell" ) ) ] ) cori_regular_config = Config( executors=[ IPyParallelExecutor( label='ipp_slurm', provider=SlurmProvider( 'regular', nodes_per_block=1,