def run_md(script, coordinates, topology): """ Run an MD job. Args: script (str): The script of control parameters. coordinates (Amber .crd format): starting coordinates. topology (Amber .prmtop format): topology and forcefield information. Returns: final_coordinates (Amber .ncrst format) trajectory (Amber .nc format) log_file (text file) """ from xbowflow import xflowlib import os with open('script_file', 'w') as f: f.write(script) script_file = xflowlib.load('script_file') os.remove('script_file') md = xflowlib.SubprocessKernel( 'pmemd -i x.in -c x.crd -p x.prmtop -x x.nc -r x.ncrst -o x.log') md.set_inputs(['x.in', 'x.crd', 'x.prmtop']) md.set_outputs(['x.ncrst', 'x.nc', 'x.log']) return md.run(script_file, coordinates, topology)
def run_leap(parameters, script, structure): """ Run the Amber leap command on the given structure, using the given script. Args: parameters (str or list of strs): Names of parameter files. script (str): The leap input script. structure (object): SOmething with a .save() method that can produce a pdb format file. Returns: topology: Amber topology coordinates: Amber coordinates """ from xbowflow import xflowlib import os if isinstance(parameters, str): params = [parameters] else: params = parameters with open('leap.in', 'w') as f: for p in params: f.write('source {} \n'.format(p)) f.write('x = loadpdb x.pdb\n') f.write(script) f.write('saveamberparm x x.prmtop x.rst7\nquit\n') leapin = xflowlib.load('leap.in') os.remove('leap.in') leap = xflowlib.SubprocessKernel('tleap -f leap.in') leap.set_inputs(['leap.in', 'x.pdb']) leap.set_outputs(['x.prmtop', 'x.rst7']) topology, coordinates = leap.run(leapin, structure) print(leap.STDOUT) return topology, coordinates
def reduction(structure): from xbowflow import xflowlib reduce = xflowlib.SubprocessKernel( 'amber-shell reduce start.pdb > reduced.pdb') reduce.set_inputs(['start.pdb']) reduce.set_outputs(['reduced.pdb']) reduced = reduce.run(structure) return reduced
def parmcheck(prepc): from xbowflow import xflowlib parmchk = xflowlib.SubprocessKernel( 'amber-shell parmchk2 -i ligandHchimera.prepc -a Y -f prepc -o ligandHchimera.frcmod' ) parmchk.set_inputs(['ligandHchimera.prepc']) parmchk.set_outputs(['ligandHchimera.frcmod']) frcmod = parmchk.run(prepc) return frcmod
def params(mol2file): from xbowflow import xflowlib mol2 = xflowlib.load(mol2file) antechamber = xflowlib.SubprocessKernel( 'amber-shell antechamber -i chimeraOut.mol2 -fi mol2 -at gaff -an y -du y -o antechOut.prepc -fo prepc -c gas' ) antechamber.set_inputs(['chimeraOut.mol2']) antechamber.set_outputs(['antechOut.prepc']) prepfile = antechamber.run(mol2) return prepfile
def multirun(client, args): # Create and configure kernels: cmd1 = 'gmx grompp -f x.mdp -c x.gro -p x.top -o x.tpr' grompp = xflowlib.SubprocessKernel(cmd1) grompp.set_inputs(['x.mdp', 'x.gro', 'x.top']) grompp.set_outputs(['x.tpr']) cmd2 = 'gmx mdrun -s x.tpr -o x.trr -x x.xtc -c x.gro -e x.edr -g x.log' mdrun = xflowlib.SubprocessKernel(cmd2) mdrun.set_inputs(['x.tpr']) mdrun.set_outputs(['x.trr', 'x.xtc', 'x.gro', 'x.edr', 'x.log']) # Upload data startcrds = client.upload(xflowlib.load(args['startcrds'])) mdpfile = client.upload(xflowlib.load(args['mdpfile'])) topfile = client.upload(xflowlib.load(args['topfile'])) # Run kernels mdpfiles = [mdpfile] * len(args['repdirs']) tprfiles = client.map(grompp, mdpfiles, startcrds, topfile) trrfiles, xtcfiles, grofiles, edrfiles, logfiles = client.map( mdrun, tprfiles) deffnm = args['deffnm'] # Save final files for i, d, in enumerate(args['repdirs']): if not os.path.exists(d): os.mkdir(d) if trrfiles[i].result() is not None: trrfiles[i].result().save('{}/{}.trr'.format(d, deffnm)) if xtcfiles[i].result() is not None: xtcfiles[i].result().save('{}/{}.xtc'.format(d, deffnm)) if grofiles[i].result() is not None: grofiles[i].result().save('{}/{}.gro'.format(d, deffnm)) if edrfiles[i].result() is not None: edrfiles[i].result().save('{}/{}.edr'.format(d, deffnm)) if logfiles[i].result() is not None: logfiles[i].result().save('{}/{}.log'.format(d, deffnm))
def clean_up(structure): """ Cleans up a PDB file to make it ready for use by Amber tools. Args: structure (object): Something with a .save() method -e.g. mdtraj.Trajectory. Returns: xflow.CompressedFileObject: cleaned-up fata in PDB format. """ from xbowflow import xflowlib pdb4amber = xflowlib.SubprocessKernel( 'amber-shell pdb4amber -y -i bad.pdb -o fixed.pdb') pdb4amber.set_inputs(['bad.pdb']) pdb4amber.set_outputs(['fixed.pdb']) fixed = pdb4amber.run(structure) return fixed
def equilibration(client, args): # Create and configure MD kernels: cmd = '{mdexe} -O -i md.in -o md.out -c md.crd -p md.prmtop -r md.rst -ref ref.rst -x md.nc'.format( **args) mdrun1 = xflowlib.SubprocessKernel(cmd) mdrun1.set_inputs(['md.crd', 'ref.rst']) mdrun1.set_outputs(['md.rst', 'md.nc']) mdrun1.set_constant('md.prmtop', args['prmtop']) mdrun1.set_constant('md.in', args['mdin1']) mdrun2 = mdrun1.copy() mdrun2.set_constant('md.in', args['mdin2']) mdrun3 = mdrun1.copy() mdrun3.set_constant('md.in', args['mdin3']) startcrds = client.upload(xflowlib.load(args['startcrds'])) restart, trajfile = client.submit(mdrun1, startcrds, startcrds) restart, trajfile = client.submit(mdrun2, restart, startcrds) restart, trajfile = client.submit(mdrun3, restart, startcrds) trajfile.result().save(args['outtraj']) restart.result().save(args['outcrds'])
result = mdt.load(crdfile.as_file(), top=topfile.as_file()) return result if __name__ == '__main__': mc = XflowClient() inpcrd = xflowlib.load('csaw.rst7') mdin1 = xflowlib.load('tmd_1.in') mdin2 = xflowlib.load('tmd_2.in') mdin3 = xflowlib.load('production_md.in') prmtop = xflowlib.load('csaw.prmtop') md1 = xflowlib.SubprocessKernel( 'pmemd.cuda -O -i x.mdin -c x.rst7 -p x.prmtop -r out.rst7 -ref ref.rst7 -o x.mdout' ) md1.set_inputs(['x.mdin', 'x.rst7', 'ref.rst7']) md1.set_outputs(['out.rst7', 'x.mdout']) md1.set_constant('x.prmtop', prmtop) md2 = xflowlib.SubprocessKernel( 'pmemd.cuda -O -i x.mdin -c x.rst7 -p x.prmtop -x out.nc -o x.mdout') md2.set_inputs(['x.mdin', 'x.rst7']) md2.set_outputs(['out.nc', 'x.mdout']) md2.set_constant('x.prmtop', prmtop) npoints = 10 start_time = time.time() # Convert the initial coordinates to a single-frame trajectory - this
import mdtraj as mdt from xbowflow import xflowlib from xbowflow.clients import XflowClient # Create and configure the kernels. A kernel is a python function that # runs a program that would normally be run from the command line, and # returns the results the command produces. # # First we create a kernel that turns "grompp" into a python function. The # function will take three inputs (mdp file data, starting coordinates data, # and topology data) and return one output (the tpr file data) #print('Creating the "grompp" kernel...') grompp_command = 'gmx grompp -f x.mdp -c x.gro -p x.top -o x.tpr' grompp = xflowlib.SubprocessKernel(grompp_command) grompp.set_inputs(['x.mdp', 'x.gro', 'x.top']) grompp.set_outputs(['x.tpr']) # Now we do the same for "mdrun". The function will take just one input, the # tpr file data created by grompp, and return three outputs - the compressed # trajectory data, the final coordinates data, and the job log file. print('Creating the grompp and mdrun kernels...') mdrun_command = 'gmx mdrun -s x.tpr -x x.xtc -c x.gro -g x.log' mdrun = xflowlib.SubprocessKernel(mdrun_command) mdrun.set_inputs(['x.tpr']) mdrun.set_outputs(['x.xtc', 'x.gro', 'x.log']) # Now we start a **Crossflow** client. The client is how we send individual jobs # out to the worker nodes. print('Starting a Crossflow client...') client = XflowClient()
from xbowflow import xflowlib from xbowflow.clients import XflowClient import subprocess ls = xflowlib.SubprocessKernel('ls {x} > output') ls.set_inputs(['x']) #ls.set_outputs(['output', 'RESULT']) ls.set_outputs(['output']) if __name__ == '__main__': client = XflowClient(local=True) testdata = '-a' #output, result = client.submit(ls, testdata) output = client.submit(ls, testdata) #print(result.result()) output.result().save('ls-result1.log') testdata = 'crap' #output, result = client.submit(ls, testdata) output = client.submit(ls, testdata) #print(result.result()) output.result().save('ls-result2.log') client.close()