def setup(self): # prepare input for all replicas writeInputs.writeInputs(max_temp=self._max_temp, min_temp=self._min_temp, replicas=self._en_size, timesteps=self._timesteps, basename=self._basename) # and tar it up tar = tarfile.open("input_files.tar", "w") for name in [self._basename + ".prmtop", self._basename + ".inpcrd", self._basename + ".mdin"]: tar.add(name) for replica in self._replicas: tar.add ('mdin-%s-0' % replica.rid) #how does this work os.remove('mdin-%s-0' % replica.rid) tar.close() # create a single pipeline with one stage to transfer the tarball task = re.Task() task.name = 'untarTsk' task.executable = ['python'] task.upload_input_data = ['untar_input_files.py', 'input_files.tar'] task.arguments = ['untar_input_files.py', 'input_files.tar'] task.cpu_reqs = {'processes' : 1, 'thread_type' : None, 'threads_per_process': 1, 'process_type': None} task.post_exec = [] stage = re.Stage() stage.name = 'untarStg' stage.add_tasks(task) #setup = re.Pipeline() #setup.name = 'untarPipe' #setup.add_stages(stage) setup_pipeline = re.Pipeline() setup_pipeline.name = 'untarPipe' setup_pipeline.add_stages(stage) return [setup_pipeline]
def setup_replicas(replicas, min_temp, max_temp, timesteps, basename): writeInputs.writeInputs(max_temp=max_temp, min_temp=min_temp, replicas=replicas, timesteps=timesteps, basename=basename) tar = tarfile.open("input_files.tar", "w") for name in [ basename + ".prmtop", basename + ".inpcrd", basename + ".mdin" ]: tar.add(name) for r in range(replicas): tar.add('mdin-{replica}-{cycle}'.format(replica=r, cycle=0)) os.remove('mdin-{replica}-{cycle}'.format(replica=r, cycle=0)) tar.close() setup_p = re.Pipeline() setup_p.name = 'untarPipe' # # unused # repo = git.Repo('.', search_parent_directories=True) # aux_function_path = repo.working_tree_dir untar_stg = re.Stage() untar_stg.name = 'untarStg' # Untar Task untar_tsk = re.Task() untar_tsk.name = 'untarTsk' untar_tsk.executable = ['python'] untar_tsk.upload_input_data = ['untar_input_files.py', 'input_files.tar'] untar_tsk.arguments = ['untar_input_files.py', 'input_files.tar'] untar_tsk.cpu_reqs = 1 untar_tsk.post_exec = [] untar_stg.add_tasks(untar_tsk) setup_p.add_stages(untar_stg) replica_sandbox = '$Pipeline_%s_Stage_%s_Task_%s' \ % (setup_p.name, untar_stg.name, untar_tsk.name) return setup_p, replica_sandbox
def setup(self): self._log.debug('=== data staging') # prepare input for all replicas writeInputs.writeInputs(max_temp=self._max_temp, min_temp=self._min_temp, replicas=self._size, timesteps=self._timesteps, basename=self._basename) # and tar it up tar = tarfile.open("input_files.tar", "w") for name in [ self._basename + ".prmtop", self._basename + ".inpcrd", self._basename + ".mdin" ]: tar.add(name) for replica in self._replicas: tar.add('mdin-%s-0' % replica.rid) os.remove('mdin-%s-0' % replica.rid) tar.close() # create a single pipeline with one stage to transfer the tarball task = re.Task() task.name = 'untarTsk' task.executable = 'python' task.upload_input_data = ['untar_input_files.py', 'input_files.tar'] task.arguments = ['untar_input_files.py', 'input_files.tar'] task.cpu_reqs = 1 task.post_exec = [] stage = re.Stage() stage.name = 'untarStg' stage.add_tasks(task) setup = re.Pipeline() setup.name = 'untarPipe' setup.add_stages(stage) # run the setup pipeline self.workflow = set([setup]) self.run()
def InitCycle( self, Replicas, Replica_Cores, md_executable, ExchangeMethod, timesteps ): # "Cycle" = 1 MD stage plus the subsequent exchange computation """ Initial cycle consists of: 1) Create tarball of MD input data 2) Transfer the tarball to pilot sandbox 3) Untar the tarball 4) Run first Cycle """ #Initialize Pipeline #self._prof.prof('InitTar', uid=self._uid) p = Pipeline() p.name = 'initpipeline' md_dict = dict() #Bookkeeping tar_dict = dict() #Bookkeeping ##Write the input files self._prof.prof('InitWriteInputs', uid=self._uid) writeInputs.writeInputs(max_temp=350, min_temp=250, replicas=Replicas, timesteps=timesteps) self._prof.prof('EndWriteInputs', uid=self._uid) self._prof.prof('InitTar', uid=self._uid) #Create Tarball of input data tar = tarfile.open("Input_Files.tar", "w") for name in ["prmtop", "inpcrd", "mdin"]: tar.add(name) for r in range(Replicas): tar.add('mdin_{0}'.format(r)) tar.close() #delete all input files outside the tarball for r in range(Replicas): os.remove('mdin_{0}'.format(r)) self._prof.prof('EndTar', uid=self._uid) #Create Untar Stage untar_stg = Stage() untar_stg.name = 'untarStg' #Untar Task untar_tsk = Task() untar_tsk.name = 'untartsk' untar_tsk.executable = ['python'] untar_tsk.upload_input_data = [ 'untar_input_files.py', 'Input_Files.tar' ] untar_tsk.arguments = ['untar_input_files.py', 'Input_Files.tar'] untar_tsk.cores = 1 untar_stg.add_tasks(untar_tsk) p.add_stages(untar_stg) tar_dict[0] = '$Pipeline_%s_Stage_%s_Task_%s' % ( p.name, untar_stg.name, untar_tsk.name) # First MD stage: needs to be defined separately since workflow is not built from a predetermined order md_stg = Stage() md_stg.name = 'mdstg0' self._prof.prof('InitMD_0', uid=self._uid) # MD tasks for r in range(Replicas): md_tsk = AMBERTask(cores=Replica_Cores, MD_Executable=md_executable) md_tsk.name = 'mdtsk-{replica}-{cycle}'.format(replica=r, cycle=0) md_tsk.link_input_data += [ '%s/inpcrd' % tar_dict[0], '%s/prmtop' % tar_dict[0], '%s/mdin_{0}'.format(r) % tar_dict[0] #Use for full temperature exchange #'%s/mdin'%tar_dict[0] #Testing only ] md_tsk.arguments = [ '-O', '-p', 'prmtop', '-i', 'mdin_{0}'.format(r), # Use this for full Temperature Exchange '-c', 'inpcrd', '-o', 'out_{0}'.format(r), '-inf', 'mdinfo_{0}'.format(r) ] md_dict[r] = '$Pipeline_%s_Stage_%s_Task_%s' % ( p.name, md_stg.name, md_tsk.name) md_stg.add_tasks(md_tsk) self.md_task_list.append(md_tsk) #print md_tsk.uid p.add_stages(md_stg) #stage_uids.append(md_stg.uid) # First Exchange Stage ex_stg = Stage() ex_stg.name = 'exstg0' self._prof.prof('InitEx_0', uid=self._uid) #with open('logfile.log', 'a') as logfile: # logfile.write( '%.5f' %time.time() + ',' + 'InitEx0' + '\n') # Create Exchange Task. Exchange task performs a Metropolis Hastings thermodynamic balance condition # check and spits out the exchangePairs.dat file that contains a sorted list of ordered pairs. # Said pairs then exchange configurations by linking output configuration files appropriately. ex_tsk = Task() ex_tsk.name = 'extsk0' ex_tsk.executable = ['python'] ex_tsk.upload_input_data = [ExchangeMethod] for r in range(Replicas): ex_tsk.link_input_data += ['%s/mdinfo_%s' % (md_dict[r], r)] ex_tsk.arguments = ['TempEx.py', '{0}'.format(Replicas), '0'] ex_tsk.cores = 1 ex_tsk.mpi = False ex_tsk.download_output_data = ['exchangePairs_0.dat'] ex_stg.add_tasks(ex_tsk) #task_uids.append(ex_tsk.uid) p.add_stages(ex_stg) self.ex_task_list.append(ex_tsk) #self.ex_task_uids.append(ex_tsk.uid) self.Book.append(md_dict) return p
def init_cycle(self, replicas, replica_cores, python_path, md_executable, exchange_method, min_temp, max_temp, timesteps, basename, pre_exec): # "cycle" = 1 MD stage plus the subsequent exchange computation """ Initial cycle consists of: 1) Create tarball of MD input data 2) Transfer the tarball to pilot sandbox 3) Untar the tarball 4) Run first cycle """ #Initialize Pipeline self._prof.prof('InitTar', uid=self._uid) p = Pipeline() p.name = 'initpipeline' md_dict = dict() #bookkeeping tar_dict = dict() #bookkeeping #Write the input files self._prof.prof('InitWriteInputs', uid=self._uid) writeInputs.writeInputs( max_temp=max_temp, min_temp=min_temp, replicas=replicas, timesteps=timesteps, basename=basename) self._prof.prof('EndWriteInputs', uid=self._uid) self._prof.prof('InitTar', uid=self._uid) #Create Tarball of input data tar = tarfile.open("input_files.tar", "w") for name in [ basename + ".prmtop", basename + ".inpcrd", basename + ".mdin" ]: tar.add(name) for r in range(replicas): tar.add('mdin_{0}'.format(r)) tar.close() #delete all input files outside the tarball for r in range(replicas): os.remove('mdin_{0}'.format(r)) self._prof.prof('EndTar', uid=self._uid) #Create Untar Stage repo = git.Repo('.', search_parent_directories=True) aux_function_path = repo.working_tree_dir untar_stg = Stage() untar_stg.name = 'untarStg' #Untar Task untar_tsk = Task() untar_tsk.name = 'untartsk' untar_tsk.executable = ['python'] untar_tsk.upload_input_data = [ str(aux_function_path)+'/repex/untar_input_files.py', 'input_files.tar' ] untar_tsk.arguments = ['untar_input_files.py', 'input_files.tar'] untar_tsk.cpu_reqs = 1 #untar_tsk.post_exec = [''] untar_stg.add_tasks(untar_tsk) p.add_stages(untar_stg) tar_dict[0] = '$Pipeline_%s_Stage_%s_Task_%s' % ( p.name, untar_stg.name, untar_tsk.name) # First MD stage: needs to be defined separately since workflow is not built from a predetermined order, also equilibration needs to happen first. md_stg = Stage() md_stg.name = 'mdstg0' self._prof.prof('InitMD_0', uid=self._uid) # MD tasks for r in range(replicas): md_tsk = AMBERTask(cores=replica_cores, md_executable=md_executable, pre_exec=pre_exec) md_tsk.name = 'mdtsk-{replica}-{cycle}'.format(replica=r, cycle=0) md_tsk.link_input_data += [ '%s/inpcrd' % tar_dict[0], '%s/prmtop' % tar_dict[0], '%s/mdin_{0}'.format(r) % tar_dict[0] #Use for full temperature exchange ] md_tsk.arguments = [ '-O', '-p', 'prmtop', '-i', 'mdin_{0}'.format(r), '-c', 'inpcrd', '-o', 'out-{replica}-{cycle}'.format(replica=r, cycle=0), '-r', 'restrt'.format(replica=r, cycle=0), #'-r', 'rstrt-{replica}-{cycle}'.format(replica=r,cycle=0), '-x', 'mdcrd-{replica}-{cycle}'.format(replica=r, cycle=0), #'-o', '$NODE_LFS_PATH/out-{replica}-{cycle}'.format(replica=r,cycle=0), #'-r', '$NODE_LFS_PATH/rstrt-{replica}-{cycle}'.format(replica=r,cycle=0), #'-x', '$NODE_LFS_PATH/mdcrd-{replica}-{cycle}'.format(replica=r,cycle=0), '-inf', 'mdinfo_{0}'.format(r) ] md_dict[r] = '$Pipeline_%s_Stage_%s_Task_%s' % ( p.name, md_stg.name, md_tsk.name) md_stg.add_tasks(md_tsk) self.md_task_list.append(md_tsk) #print md_tsk.uid p.add_stages(md_stg) #stage_uids.append(md_stg.uid) # First Exchange Stage ex_stg = Stage() ex_stg.name = 'exstg0' self._prof.prof('InitEx_0', uid=self._uid) # Create Exchange Task ex_tsk = Task() ex_tsk.name = 'extsk0' #ex_tsk.pre_exec = ['module load python/2.7.10'] ex_tsk.executable = [python_path] ex_tsk.upload_input_data = [exchange_method] for r in range(replicas): ex_tsk.link_input_data += ['%s/mdinfo_%s' % (md_dict[r], r)] ex_tsk.pre_exec = ['mv *.py exchange_method.py'] ex_tsk.arguments = ['exchange_method.py', '{0}'.format(replicas), '0'] ex_tsk.cores = 1 ex_tsk.mpi = False ex_tsk.download_output_data = ['exchangePairs_0.dat'] ex_stg.add_tasks(ex_tsk) #task_uids.append(ex_tsk.uid) p.add_stages(ex_stg) self.ex_task_list.append(ex_tsk) #self.ex_task_uids.append(ex_tsk.uid) self.book.append(md_dict) return p
def InitCycle(self, Replicas, Replica_Cores, md_executable, ExchangeMethod, timesteps): # "Cycle" = 1 MD stage plus the subsequent exchange computation """ Initial cycle consists of: 1) Create tarball of MD input data 2) Transfer the tarball to pilot sandbox 3) Untar the tarball 4) Run first Cycle """ #Initialize Pipeline #self._prof.prof('InitTar', uid=self._uid) p = Pipeline() p.name = 'initpipeline' md_dict = dict() #Bookkeeping tar_dict = dict() #Bookkeeping ##Write the input files self._prof.prof('InitWriteInputs', uid=self._uid) writeInputs.writeInputs(max_temp=350,min_temp=250,replicas=Replicas,timesteps=timesteps) self._prof.prof('EndWriteInputs', uid=self._uid) self._prof.prof('InitTar', uid=self._uid) #Create Tarball of input data tar = tarfile.open("Input_Files.tar","w") for name in ["prmtop", "inpcrd", "mdin"]: tar.add(name) for r in range (Replicas): tar.add('mdin_{0}'.format(r)) tar.close() #delete all input files outside the tarball for r in range (Replicas): os.remove('mdin_{0}'.format(r)) self._prof.prof('EndTar', uid=self._uid) #Create Untar Stage untar_stg = Stage() untar_stg.name = 'untarStg' #Untar Task untar_tsk = Task() untar_tsk.name = 'untartsk' untar_tsk.executable = ['python'] untar_tsk.upload_input_data = ['untar_input_files.py','Input_Files.tar'] untar_tsk.arguments = ['untar_input_files.py','Input_Files.tar'] untar_tsk.cores = 1 untar_stg.add_tasks(untar_tsk) p.add_stages(untar_stg) tar_dict[0] = '$Pipeline_%s_Stage_%s_Task_%s'%(p.name, untar_stg.name, untar_tsk.name) # First MD stage: needs to be defined separately since workflow is not built from a predetermined order md_stg = Stage() md_stg.name = 'mdstg0' self._prof.prof('InitMD_0', uid=self._uid) # MD tasks for r in range (Replicas): md_tsk = AMBERTask(cores=Replica_Cores, MD_Executable=md_executable) md_tsk.name = 'mdtsk-{replica}-{cycle}'.format(replica=r,cycle=0) md_tsk.link_input_data += [ '%s/inpcrd'%tar_dict[0], '%s/prmtop'%tar_dict[0], '%s/mdin_{0}'.format(r)%tar_dict[0] #Use for full temperature exchange #'%s/mdin'%tar_dict[0] #Testing only ] md_tsk.arguments = ['-O','-p','prmtop', '-i', 'mdin_{0}'.format(r), # Use this for full Temperature Exchange '-c','inpcrd','-o','out_{0}'.format(r), '-inf','mdinfo_{0}'.format(r)] md_dict[r] = '$Pipeline_%s_Stage_%s_Task_%s'%(p.name, md_stg.name, md_tsk.name) md_stg.add_tasks(md_tsk) self.md_task_list.append(md_tsk) #print md_tsk.uid p.add_stages(md_stg) #stage_uids.append(md_stg.uid) # First Exchange Stage ex_stg = Stage() ex_stg.name = 'exstg0' self._prof.prof('InitEx_0', uid=self._uid) #with open('logfile.log', 'a') as logfile: # logfile.write( '%.5f' %time.time() + ',' + 'InitEx0' + '\n') # Create Exchange Task. Exchange task performs a Metropolis Hastings thermodynamic balance condition # check and spits out the exchangePairs.dat file that contains a sorted list of ordered pairs. # Said pairs then exchange configurations by linking output configuration files appropriately. ex_tsk = Task() ex_tsk.name = 'extsk0' ex_tsk.executable = ['python'] ex_tsk.upload_input_data = [ExchangeMethod] for r in range (Replicas): ex_tsk.link_input_data += ['%s/mdinfo_%s'%(md_dict[r],r)] ex_tsk.arguments = ['TempEx.py','{0}'.format(Replicas), '0'] ex_tsk.cores = 1 ex_tsk.mpi = False ex_tsk.download_output_data = ['exchangePairs_0.dat'] ex_stg.add_tasks(ex_tsk) #task_uids.append(ex_tsk.uid) p.add_stages(ex_stg) self.ex_task_list.append(ex_tsk) #self.ex_task_uids.append(ex_tsk.uid) self.Book.append(md_dict) return p
def init_cycle( self, replicas, replica_cores, python_path, md_executable, exchange_method, min_temp, max_temp, timesteps, basename, pre_exec ): # "cycle" = 1 MD stage plus the subsequent exchange computation """ Initial cycle consists of: 1) Create tarball of MD input data 2) Transfer the tarball to pilot sandbox 3) Untar the tarball 4) Run first cycle """ #Initialize Pipeline self._prof.prof('InitTar', uid=self._uid) p = Pipeline() p.name = 'initpipeline' md_dict = dict() #bookkeeping tar_dict = dict() #bookkeeping #Write the input files self._prof.prof('InitWriteInputs', uid=self._uid) writeInputs.writeInputs(max_temp=max_temp, min_temp=min_temp, replicas=replicas, timesteps=timesteps, basename=basename) self._prof.prof('EndWriteInputs', uid=self._uid) self._prof.prof('InitTar', uid=self._uid) #Create Tarball of input data tar = tarfile.open("input_files.tar", "w") for name in [ basename + ".prmtop", basename + ".inpcrd", basename + ".mdin" ]: tar.add(name) for r in range(replicas): tar.add('mdin_{0}'.format(r)) tar.close() #delete all input files outside the tarball for r in range(replicas): os.remove('mdin_{0}'.format(r)) self._prof.prof('EndTar', uid=self._uid) #Create Untar Stage repo = git.Repo('.', search_parent_directories=True) aux_function_path = repo.working_tree_dir untar_stg = Stage() untar_stg.name = 'untarStg' #Untar Task untar_tsk = Task() untar_tsk.name = 'untartsk' untar_tsk.executable = ['python'] untar_tsk.upload_input_data = [ str(aux_function_path) + '/repex/untar_input_files.py', 'input_files.tar' ] untar_tsk.arguments = ['untar_input_files.py', 'input_files.tar'] untar_tsk.cpu_reqs = 1 #untar_tsk.post_exec = [''] untar_stg.add_tasks(untar_tsk) p.add_stages(untar_stg) tar_dict[0] = '$Pipeline_%s_Stage_%s_Task_%s' % ( p.name, untar_stg.name, untar_tsk.name) # First MD stage: needs to be defined separately since workflow is not built from a predetermined order, also equilibration needs to happen first. md_stg = Stage() md_stg.name = 'mdstg0' self._prof.prof('InitMD_0', uid=self._uid) # MD tasks for r in range(replicas): md_tsk = AMBERTask(cores=replica_cores, md_executable=md_executable, pre_exec=pre_exec) md_tsk.name = 'mdtsk-{replica}-{cycle}'.format(replica=r, cycle=0) md_tsk.link_input_data += [ '%s/inpcrd' % tar_dict[0], '%s/prmtop' % tar_dict[0], '%s/mdin_{0}'.format(r) % tar_dict[0] #Use for full temperature exchange ] md_tsk.arguments = [ '-O', '-p', 'prmtop', '-i', 'mdin_{0}'.format(r), '-c', 'inpcrd', '-o', 'out-{replica}-{cycle}'.format(replica=r, cycle=0), '-r', 'restrt'.format(replica=r, cycle=0), #'-r', 'rstrt-{replica}-{cycle}'.format(replica=r,cycle=0), '-x', 'mdcrd-{replica}-{cycle}'.format(replica=r, cycle=0), #'-o', '$NODE_LFS_PATH/out-{replica}-{cycle}'.format(replica=r,cycle=0), #'-r', '$NODE_LFS_PATH/rstrt-{replica}-{cycle}'.format(replica=r,cycle=0), #'-x', '$NODE_LFS_PATH/mdcrd-{replica}-{cycle}'.format(replica=r,cycle=0), '-inf', 'mdinfo_{0}'.format(r) ] md_dict[r] = '$Pipeline_%s_Stage_%s_Task_%s' % ( p.name, md_stg.name, md_tsk.name) md_stg.add_tasks(md_tsk) self.md_task_list.append(md_tsk) #print md_tsk.uid p.add_stages(md_stg) #stage_uids.append(md_stg.uid) # First Exchange Stage ex_stg = Stage() ex_stg.name = 'exstg0' self._prof.prof('InitEx_0', uid=self._uid) # Create Exchange Task ex_tsk = Task() ex_tsk.name = 'extsk0' #ex_tsk.pre_exec = ['module load python/2.7.10'] ex_tsk.executable = [python_path] ex_tsk.upload_input_data = [exchange_method] for r in range(replicas): ex_tsk.link_input_data += ['%s/mdinfo_%s' % (md_dict[r], r)] ex_tsk.pre_exec = ['mv *.py exchange_method.py'] ex_tsk.arguments = ['exchange_method.py', '{0}'.format(replicas), '0'] ex_tsk.cores = 1 ex_tsk.mpi = False ex_tsk.download_output_data = ['exchangePairs_0.dat'] ex_stg.add_tasks(ex_tsk) #task_uids.append(ex_tsk.uid) p.add_stages(ex_stg) self.ex_task_list.append(ex_tsk) #self.ex_task_uids.append(ex_tsk.uid) self.book.append(md_dict) return p