def main(): # For 0 dna systems trials = ['a' ,'b', 'c'] #constants = [50, 500] constants = [125, 250] i = 0 for constant in constants: for trial in trials: change_dir = os.path.join( base_dir, '6dna', 'k{}_{}'.format(constant,trial)) if not os.path.isdir(change_dir): os.makedirs(change_dir) os.chdir(change_dir) shutil.copyfile(os.path.join(input_dir, 'em.mdp'), './em.mdp') shutil.copyfile(os.path.join(input_dir, 'bilayer-graphene-dna-water-ions.gro'), './bilayer-graphene-dna-water-ions.gro') shutil.copyfile(os.path.join(input_dir, 'index.ndx'), './index.ndx') shutil.copyfile(os.path.join(input_dir, 'topol.top'), './topol.top') with open('angled_insertion.mdp', 'w') as f: _write_mdp(f, k=constant, seed=randint(100)) with open('submit.pbs', 'w') as f: lines = 'cd {} \n'.format(change_dir) lines += 'gmx grompp -f em.mdp -c bilayer-graphene-dna-water-ions.gro -p topol.top -o em -maxwarn 1\n' lines += 'gmx mdrun -deffnm em \n' lines += 'gmx grompp -f angled_insertion.mdp -c em.gro -p topol.top -maxwarn 1 -n index.ndx -o pull \n' lines += 'gmx mdrun -ntomp 8 -ntmpi 2 -gpu_id 01 -deffnm pull \n' script_utils.write_rahman_script(f, jobname='6dna_k{}_{}'.format(constant,trial), body=lines) operations.submit_job('submit.pbs', jobid, n_nodes, i) i += 1
box=system.boundingbox, overwrite=True, residues=set([p.parent.name for p in system.particles()])) system.save('compound.gro', box=system.boundingbox, overwrite=True, residues=set([p.parent.name for p in system.particles()])) bilayer.write_gmx_topology(system, 'compound.top', header=path_to_ff) # EQ sims p = subprocess.Popen('cp ~/Programs/setup/Bilayer/mdp_charmm/*.mdp .', shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) p.wait() with open('eq.pbs', 'w') as f: body = 'cd {}\n'.format(os.getcwd()) body += 'module load gromacs/2018.1\n' body += operations.write_eq_lines(gro='compound.gro', top='compound.top') script_utils.write_rahman_script(f, jobname="{}_setup".format(name), body=body) jobid = operations.submit_job('eq.pbs', jobid, n_nodes, i) os.chdir(curr_dir) with open('index.txt', 'w') as f: json.dump(index, f, indent=2)
import os import itertools as it import script_utils from operations import submit_job, compute_work, compute_penetration from multiprocessing import Pool n_nodes = 2 jobid = [None] * n_nodes i = 0 curr_dir = os.getcwd() #sds_folders = ['10sds', '20sds', '30sds', '40sds', '50sds', '60sds'] sds_folders = ['100sds'] k_folders = ['k50'] angle_folders = ['0'] trials = ['a', 'b', 'c'] for combo in it.product(sds_folders, k_folders, angle_folders, trials): sim_dir = os.path.join(curr_dir, '{0}/{1}_{2}_{3}'.format(*combo)) print("{}".format(sim_dir)) if os.path.isdir(sim_dir): os.chdir(sim_dir) if os.path.isfile('pull.tpr') and not os.path.isfile('pull.gro'): cmd = 'cd {}\n'.format(sim_dir) cmd += 'gmx mdrun -deffnm pull -ntomp 8 -ntmpi 2 -gpu_id 01 -append -cpi pull_prev.cpt -pf pull_pullf.xvg -px pull_pullx.xvg \n' with open('pull.pbs', 'w') as f: script_utils.write_rahman_script( f, jobname='{0}_{3}'.format(*combo), body=cmd) submit_job('pull.pbs', jobid, n_nodes, i) i += 1
import os import json import numpy as np import operations import script_utils ################ ## Script to iterate through folders, write rwmd files, and submit ################ index = json.load(open('index.txt', 'r')) n_nodes = 3 jobid = [None] * n_nodes curr_dir = os.getcwd() for i, name in enumerate(index.keys()): os.chdir(os.path.join(curr_dir, name)) rwmd_submission = operations.write_rwmd_lines( tc_groups=['non-water', 'water'], t_pairs=[[305, 455], [305, 455]], cooling_rate=1000, gro='npt_500ps.gro', top='compound.top') with open('rwmd_chain.pbs', 'w') as f: body = 'cd {}\n'.format(os.getcwd()) body += rwmd_submission script_utils.write_rahman_script(f, jobname='{}_rwmd'.format(name), body=body) jobid = operations.submit_job('rwmd_chain.pbs', jobid, n_nodes, i)
import os import json import numpy as np import operations import script_utils ################ ## Script to iterate through folders, write production files, and submit ################ index = json.load(open('index.txt', 'r')) n_nodes = 3 jobid = [None] * n_nodes curr_dir = os.getcwd() for i, name in enumerate(index.keys()): os.chdir(os.path.join(curr_dir, name)) lines = operations.write_production_lines(filename='npt') with open('production.pbs', 'w') as f: body = 'cd {}\n'.format(os.getcwd()) body += lines script_utils.write_rahman_script(f, jobname='{}_production'.format(name), body=body) jobid = operations.submit_job('production.pbs', jobid, n_nodes, i)
import os import operations import script_utils folders = ['trial4', 'trial5', 'trial6'] main_dir = os.getcwd() jobids = [None] * 2 for i, folder in enumerate(folders): os.chdir(os.path.join(main_dir, folder)) body = 'cd {}\n'.format(os.getcwd()) body += "gmx mdrun -ntomp 8 -ntmpi 2 -gpu_id 01 -deffnm npt" with open('submit.pbs', 'w') as f: script_utils.write_rahman_script( f, jobname="graphene_agg_{}".format(folder), body=body) operations.submit_job('submit.pbs', jobids, 2, i)