def launch(self) -> int: """Execute the :class:`Template <template.template.Template>` object.""" # 4. Setup Biobb if self.check_restart(): return 0 self.stage_files() # Creating temporary folder self.tmp_folder = fu.create_unique_dir() fu.log('Creating %s temporary folder' % self.tmp_folder, self.out_log) # 5. Include here all mandatory input files # Copy input_file_path1 to temporary folder shutil.copy(self.io_dict['in']['input_file_path1'], self.tmp_folder) # 6. Prepare the command line parameters as instructions list instructions = ['-j'] if self.boolean_property: instructions.append('-v') fu.log('Appending optional boolean property', self.out_log, self.global_log) # 7. Build the actual command line as a list of items (elements order will be maintained) self.cmd = [ self.executable_binary_property, ' '.join(instructions), self.io_dict['out']['output_file_path'], str( PurePath(self.tmp_folder).joinpath( PurePath(self.io_dict['in']['input_file_path1']).name)) ] fu.log( 'Creating command line with instructions and required arguments', self.out_log, self.global_log) # 8. Repeat for optional input files if provided if self.io_dict['in']['input_file_path2']: # Copy input_file_path2 to temporary folder shutil.copy(self.io_dict['in']['input_file_path2'], self.tmp_folder) # Append optional input_file_path2 to cmd self.cmd.append( str( PurePath(self.tmp_folder).joinpath( PurePath( self.io_dict['in']['input_file_path2']).name))) fu.log('Appending optional argument to command line', self.out_log, self.global_log) # 9. Uncomment to check the command line # print(' '.join(cmd)) # Run Biobb block self.run_biobb() # Remove temporary file(s) if self.remove_tmp: self.tmp_files.append(self.tmp_folder) self.remove_tmp_files() return self.return_code
def create_instructions_file(self, container_io_dict, out_log, err_log): """Creates an input file using the properties file settings""" instructions_list = [] # different path if container execution or not if self.container_path: self.instructions_file = str(PurePath(self.container_volume_path).joinpath(self.instructions_file)) else: self.instructions_file = str(PurePath(fu.create_unique_dir()).joinpath(self.instructions_file)) fu.create_name(prefix=self.prefix, step=self.step, name=self.instructions_file) # parm instructions_list.append('parm ' + container_io_dict["in"]["input_top_path"]) # trajin in_params = get_in_parameters(self.in_parameters, out_log) instructions_list.append('trajin ' + container_io_dict["in"]["input_traj_path"] + ' ' + in_params) # mask mask = self.in_parameters.get('mask', '') if mask: strip_mask = get_negative_mask(mask, out_log) instructions_list.append('strip ' + strip_mask) # trajout out_params = get_out_parameters(self.out_parameters, out_log) instructions_list.append('trajout ' + container_io_dict["out"]["output_cpptraj_path"] + ' ' + out_params) # create .in file with open(self.instructions_file, 'w') as mdp: for line in instructions_list: mdp.write(line.strip() + '\n') return self.instructions_file
def launch(self) -> int: """Execute the :class:`PdbClusterZip <api.pdb_cluster_zip.PdbClusterZip>` api.pdb_cluster_zip.PdbClusterZip object.""" # check input/output paths and parameters self.check_data_params(self.out_log, self.err_log) # Setup Biobb if self.check_restart(): return 0 self.stage_files() check_mandatory_property(self.pdb_code, 'pdb_code', self.out_log, self.__class__.__name__) self.pdb_code = self.pdb_code.strip().lower() file_list = [] #Downloading PDB_files pdb_code_list = get_cluster_pdb_codes(pdb_code=self.pdb_code, cluster=self.cluster, out_log=self.out_log, global_log=self.global_log) unique_dir = fu.create_unique_dir() for pdb_code in pdb_code_list: pdb_file = os.path.join(unique_dir, pdb_code+".pdb") pdb_string = download_pdb(pdb_code=pdb_code, api_id=self.api_id, out_log=self.out_log, global_log=self.global_log) write_pdb(pdb_string, pdb_file, self.filter, self.out_log, self.global_log) file_list.append(os.path.abspath(pdb_file)) #Zipping files fu.log("Zipping the pdb files to: %s" % self.output_pdb_zip_path) fu.zip_list(self.output_pdb_zip_path, file_list, out_log=self.out_log) if self.remove_tmp: # remove temporary folder fu.rm(unique_dir) fu.log('Removed temporary folder: %s' % unique_dir, self.out_log) return 0
def create_instructions_file(self): """Creates an input file using the properties file settings""" instructions_list = [] # different path if container execution or not if self.container_path: self.instructions_file = str( PurePath(self.container_volume_path).joinpath( self.instructions_file)) else: self.instructions_file = str( PurePath(fu.create_unique_dir()).joinpath( self.instructions_file)) #self.instructions_file = str(PurePath(fu.create_unique_dir()).joinpath(self.instructions_file)) fu.create_name(prefix=self.prefix, step=self.step, name=self.instructions_file) for t in self.terms: instructions_list.append(t) # create instructions file with open(self.instructions_file, 'w') as mdp: for line in instructions_list: mdp.write(line.strip() + '\n') return self.instructions_file
def launch(self) -> int: """Execute the :class:`FPocket <fpocket.fpocket.FPocket>` fpocket.fpocket.FPocket object.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # check input/output paths and parameters self.check_data_params(out_log, err_log) # Check the properties fu.check_properties(self, self.properties) if self.restart: output_file_list = [self.io_dict["out"]["output_pockets_zip"],self.io_dict["out"]["output_summary"]] if fu.check_complete_files(output_file_list): fu.log('Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # create tmp_folder self.tmp_folder = fu.create_unique_dir() fu.log('Creating %s temporary folder' % self.tmp_folder, out_log) tmp_input = str(PurePath(self.tmp_folder).joinpath('input.pdb')) # copy input_pdb_path to tmp_folder shutil.copy(self.io_dict["in"]["input_pdb_path"], tmp_input) # create cmd cmd = [self.fpocket_path, '-f', tmp_input] # adding extra properties if self.min_radius: cmd.extend(['-m', str(self.min_radius)]) if self.max_radius: cmd.extend(['-M', str(self.max_radius)]) if self.num_spheres: cmd.extend(['-i', str(self.num_spheres)]) fu.log('Executing fpocket', out_log, self.global_log) cmd = fu.create_cmd_line(cmd, out_log=out_log, global_log=self.global_log) returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log, self.global_log).launch() process_output_fpocket(self.tmp_folder, self.io_dict["out"]["output_pockets_zip"], self.io_dict["out"]["output_summary"], self.sort_by, self.remove_tmp, out_log, self.__class__.__name__) return returncode
def launch(self) -> int: """Execute the :class:`FPocketSelect <fpocket.fpocket_select.FPocketSelect>` fpocket.fpocket_select.FPocketSelect object.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # check input/output paths and parameters self.check_data_params(out_log, err_log) # Check the properties fu.check_properties(self, self.properties) if self.restart: output_file_list = [ self.io_dict["out"]["output_pocket_pdb"], self.io_dict["out"]["output_pocket_pqr"] ] if fu.check_complete_files(output_file_list): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # create tmp_folder self.tmp_folder = fu.create_unique_dir() fu.log('Creating %s temporary folder' % self.tmp_folder, out_log) # decompress the input_pockets_zip file to tmp_folder all_pockets = fu.unzip_list( zip_file=self.io_dict["in"]["input_pockets_zip"], dest_dir=self.tmp_folder, out_log=out_log) pockets_list = [ i for i in all_pockets if ('pocket' + str(self.pocket)) in i ] for p in pockets_list: if PurePath(p).suffix == '.pdb': fu.log( 'Saving %s file' % self.io_dict["out"]["output_pocket_pdb"], out_log) shutil.copy(p, self.io_dict["out"]["output_pocket_pdb"]) else: fu.log( 'Saving %s file' % self.io_dict["out"]["output_pocket_pqr"], out_log) shutil.copy(p, self.io_dict["out"]["output_pocket_pqr"]) if self.remove_tmp: # remove temporary folder fu.rm(self.tmp_folder) fu.log('Removed temporary folder: %s' % self.tmp_folder, out_log) return 0
def __init__(self, input_gro_path: str, input_top_zip_path: str, output_trr_path: str, output_gro_path: str, output_edr_path: str, output_log_path: str, input_cpt_path: str = None, input_ndx_path: str = None, input_mdp_path: str = None, output_xtc_path: str = None, output_cpt_path: str = None, output_dhdl_path: str = None, properties: dict = None, **kwargs) -> None: # Properties management properties = properties or {} # Call parent class constructor super().__init__(properties) grompp_properties_keys = ['mdp', 'maxwarn', 'simulation_type'] mdrun_properties_keys = [ 'mpi_bin', 'mpi_np', 'mpi_hostlist', 'checkpoint_time', 'num_threads', 'num_threads_mpi', 'num_threads_omp', 'num_threads_omp_pme', 'use_gpu', 'gpu_id', 'gpu_tasks', 'dev' ] self.properties_grompp = {} self.properties_mdrun = {} if properties: self.global_log = properties.get('global_log', None) self.properties_grompp = properties.copy() for key in mdrun_properties_keys: self.properties_grompp.pop(key, None) self.properties_mdrun = properties.copy() for key in grompp_properties_keys: self.properties_mdrun.pop(key, None) # Grompp arguments self.input_gro_path = input_gro_path self.input_top_zip_path = input_top_zip_path self.output_tpr_path = str( Path(fu.create_unique_dir()).joinpath('internal.tpr')) self.input_cpt_path = input_cpt_path self.input_ndx_path = input_ndx_path self.input_mdp_path = input_mdp_path # MDRun arguments self.input_tpr_path = self.output_tpr_path self.output_trr_path = output_trr_path self.output_gro_path = output_gro_path self.output_edr_path = output_edr_path self.output_log_path = output_log_path self.output_xtc_path = output_xtc_path self.output_cpt_path = output_cpt_path self.output_dhdl_path = output_dhdl_path
def create_instructions_file(self, container_io_dict, out_log, err_log): """Creates an input file using the properties file settings""" instructions_list = [] # different path if container execution or not if self.container_path: self.instructions_file = str( PurePath(self.container_volume_path).joinpath( self.instructions_file)) else: self.instructions_file = str( PurePath(fu.create_unique_dir()).joinpath( self.instructions_file)) fu.create_name(prefix=self.prefix, step=self.step, name=self.instructions_file) # parm instructions_list.append('parm ' + container_io_dict["in"]["input_top_path"]) # trajin in_params = get_in_parameters(self.in_parameters, out_log) instructions_list.append('trajin ' + container_io_dict["in"]["input_traj_path"] + ' ' + in_params) # Set up instructions_list += setup_structure(self) # mask mask = self.in_parameters.get('mask', '') ref_mask = '' if mask: strip_mask = get_negative_mask(mask, out_log) ref_mask = get_mask(mask, out_log) instructions_list.append('strip ' + strip_mask) # reference reference = self.in_parameters.get('reference', '') inp_exp_pth = None if "input_exp_path" in container_io_dict["in"]: inp_exp_pth = container_io_dict["in"]["input_exp_path"] instructions_list += get_reference( reference, container_io_dict["out"]["output_cpptraj_path"], inp_exp_pth, ref_mask, False, self.__class__.__name__, out_log) instructions_list.append( 'atomicfluct out ' + container_io_dict["out"]["output_cpptraj_path"] + ' byres bfactor') # create .in file with open(self.instructions_file, 'w') as mdp: for line in instructions_list: mdp.write(line.strip() + '\n') return self.instructions_file
def launch(self): """Launches the execution of the CpptrajRandomizeIons module.""" # check input/output paths and parameters self.check_data_params(self.out_log, self.err_log) # Setup Biobb if self.check_restart(): return 0 self.stage_files() # Creating temporary folder self.tmp_folder = fu.create_unique_dir() fu.log('Creating %s temporary folder' % self.tmp_folder, self.out_log) # create cpptraj.in file # trajin randomizeIons.crd # randomizeions :K+,Cl-,Na+ around :DA,DC,DG,DT,D?3,D?5 by 5.0 overlap 3.5 # trajout solv_randion.crd restart # trajout solv_randion.pdb pdb # go instructions_file = str( PurePath(self.tmp_folder).joinpath("cpptraj.in")) with open(instructions_file, 'w') as cpptrajin: cpptrajin.write("trajin " + self.io_dict['in']['input_crd_path'] + " \n") cpptrajin.write("randomizeions " + self.ion_mask + " around " + self.solute_mask + " by " + str(self.distance) + " overlap " + str(self.overlap) + " \n") cpptrajin.write("trajout " + self.io_dict['out']['output_crd_path'] + " restart \n") cpptrajin.write("trajout " + self.io_dict['out']['output_pdb_path'] + " pdb \n") cpptrajin.write("go\n") # Command line self.cmd = [ 'cpptraj ', self.io_dict['in']['input_top_path'], '-i', instructions_file ] # Run Biobb block self.run_biobb() # Copy files to host self.copy_to_host() # remove temporary folder(s) if self.remove_tmp: self.tmp_files.append(self.tmp_folder) self.tmp_files.append("cpptraj.log") self.remove_tmp_files() return self.return_code
def launch(self): """Launches the execution of the ParmedCpinUtil module.""" # check input/output paths and parameters self.check_data_params(self.out_log, self.err_log) # Setup Biobb if self.check_restart(): return 0 self.stage_files() # Creating temporary folder self.tmp_folder = fu.create_unique_dir() fu.log('Creating %s temporary folder' % self.tmp_folder, self.out_log) # cpinutil.py -igb 2 -resname AS4 GL4 -p $1.prmtop -op $1.cpH.prmtop # cpinutil.py -p cln025.cpH.prmtop -igb 2 -system "CLN" -o cpin fu.log( 'Creating command line with instructions and required arguments', self.out_log, self.global_log) self.cmd = [ 'cpinutil.py', '-p', self.io_dict['in']['input_top_path'], '-o', self.io_dict['out']['output_cpin_path'] ] if self.igb: self.cmd.append('-igb') self.cmd.append(str(self.igb)) if self.system: self.cmd.append('-system') self.cmd.append(self.system) if self.resnames: self.cmd.append('-resnames') self.cmd.append(self.resnames) if self.io_dict["out"]["output_top_path"]: self.cmd.append('-op') self.cmd.append(self.io_dict["out"]["output_top_path"]) # Run Biobb block self.run_biobb() # Copy files to host self.copy_to_host() # remove temporary folder(s) if self.remove_tmp: self.tmp_files.append(self.tmp_folder) self.remove_tmp_files() return self.return_code
def launch(self) -> int: """Execute the :class:`GMXTrjConvStrEns <gromacs.gmx_trjconv_str_ens.GMXTrjConvStrEns>` gromacs.gmx_trjconv_str_ens.GMXTrjConvStrEns object.""" # check input/output paths and parameters self.check_data_params(self.out_log, self.err_log) # Setup Biobb if self.check_restart(): return 0 self.stage_files() # if container execution, output to container_volume_path, else create temporary folder to put zip output if self.container_path: output = self.container_volume_path + '/' + self.output_name + '.' + self.output_type else: # create temporary folder self.tmp_folder = fu.create_unique_dir() fu.log('Creating %s temporary folder' % self.tmp_folder, self.out_log) output = self.tmp_folder + '/' + self.output_name + '.' + self.output_type self.cmd = ['echo', '\"'+self.selection+'\"', '|', self.gmx_path, 'trjconv', '-f', self.stage_io_dict["in"]["input_traj_path"], '-s', self.stage_io_dict["in"]["input_top_path"], '-skip', self.skip, '-b', self.start, '-dt', self.dt, '-sep', '-o', output] # checking 'end' gromacs 'bug' if not str(self.end) =="0": self.cmd.append('-e') self.cmd.append(self.end) if self.stage_io_dict["in"]["input_index_path"]: self.cmd.extend(['-n', self.stage_io_dict["in"]["input_index_path"]]) # Run Biobb block self.run_biobb() # Copy files to host self.copy_to_host() if self.container_path: process_output_trjconv_str_ens(self.stage_io_dict['unique_dir'], self.remove_tmp, self.io_dict["out"]["output_str_ens_path"], self.output_name + '*', self.out_log) else: process_output_trjconv_str_ens(self.tmp_folder, self.remove_tmp, self.stage_io_dict["out"]["output_str_ens_path"], '*', self.out_log) return self.return_code
def launch(self): """Launches the execution of the NabBuildDNAStructure module.""" # check input/output paths and parameters self.check_data_params(self.out_log, self.err_log) # Setup Biobb if self.check_restart(): return 0 self.stage_files() # Creating temporary folder self.tmp_folder = fu.create_unique_dir() fu.log('Creating %s temporary folder' % self.tmp_folder, self.out_log) # create .nab file # molecule m; # m = fd_helix( "abdna", "aaaaaaaaaa", "dna" ); # putpdb( "nuc.pdb", m, "-wwpdb"); acid_type = 'dna' if ("rna" in self.helix_type): acid_type = 'rna' instructions_file = str(PurePath(self.tmp_folder).joinpath("nuc.nab")) with open(instructions_file, 'w') as nabin: nabin.write("molecule m; \n") nabin.write("m = fd_helix( \"" + self.helix_type + "\", \"" + self.sequence + "\", \"" + acid_type + "\" ); \n") nabin.write("putpdb( \"" + self.io_dict['out']['output_pdb_path'] + "\" , m, \"-wwpdb\");\n") # Command line self.cmd = ['nab ', '--compiler', self.compiler, '--linker', self.linker, instructions_file, ' ; ./' + self.tmp_folder +'/nuc' ] # Run Biobb block self.run_biobb() # Copy files to host self.copy_to_host() # remove temporary folder(s) if self.remove_tmp: self.tmp_files.append(self.tmp_folder) self.tmp_files.append("nab.log") self.tmp_files.append("tleap.out") self.remove_tmp_files() return self.return_code
def create_instructions_file(self, container_io_dict, out_log, err_log): """Creates an input file using the properties file settings""" instructions_list = [] # different path if container execution or not if self.container_path: self.instructions_file = str( PurePath(self.container_volume_path).joinpath( self.instructions_file)) else: self.instructions_file = str( PurePath(fu.create_unique_dir()).joinpath( self.instructions_file)) fu.create_name(prefix=self.prefix, step=self.step, name=self.instructions_file) # parm instructions_list.append('parm ' + container_io_dict["in"]["input_top_path"]) # trajin in_parameters = self.in_parameters in_params = '' if in_parameters: in_params = get_in_parameters(self.in_parameters, out_log, 'strip') instructions_list.append('trajin ' + container_io_dict["in"]["input_traj_path"] + ' ' + in_params) # mask mask = self.in_parameters.get('mask', '') if not mask or mask == 'None': fu.log('No mask provided, exiting', out_log, self.global_log) raise SystemExit('Mask parameter is mandatory') strip_mask = get_mask(mask, out_log) instructions_list.append('strip ' + strip_mask) # trajout out_params = get_out_parameters(self.out_parameters, out_log) instructions_list.append( 'trajout ' + container_io_dict["out"]["output_cpptraj_path"] + ' ' + out_params) # create .in file with open(self.instructions_file, 'w') as mdp: for line in instructions_list: mdp.write(line.strip() + '\n') return self.instructions_file
def launch(self): """Launches the execution of the Pdb4amberRun module.""" # check input/output paths and parameters self.check_data_params(self.out_log, self.err_log) # Setup Biobb if self.check_restart(): return 0 self.stage_files() # Creating temporary folder self.tmp_folder = fu.create_unique_dir() fu.log('Creating %s temporary folder' % self.tmp_folder, self.out_log) # Command line # sander -O -i mdin/min.mdin -p $1.cpH.prmtop -c ph$i/$1.inpcrd -r ph$i/$1.min.rst7 -o ph$i/$1.min.o self.cmd = ['pdb4amber ', '-i', self.io_dict['in']['input_pdb_path'], '-o', self.io_dict['out']['output_pdb_path'] ] if self.remove_hydrogens: self.cmd.append("-y ") if self.remove_waters: self.cmd.append("-d ") if self.constant_pH: self.cmd.append("--constantph ") # Run Biobb block self.run_biobb() # Copy files to host self.copy_to_host() # remove temporary folder(s) if self.remove_tmp: self.tmp_files.append(self.tmp_folder) self.remove_tmp_files() return self.return_code
def launch(self): """Launches the execution of the ParmedHMassRepartition module.""" # check input/output paths and parameters self.check_data_params(self.out_log, self.err_log) # Setup Biobb if self.check_restart(): return 0 self.stage_files() # Creating temporary folder self.tmp_folder = fu.create_unique_dir() fu.log('Creating %s temporary folder' % self.tmp_folder, self.out_log) # Parmed configuration (instructions) file instructions_file = str(PurePath(self.tmp_folder).joinpath("parmed.in")) with open(instructions_file, 'w') as parmedin: parmedin.write("hmassrepartition\n") parmedin.write("outparm " + self.io_dict['out']['output_top_path'] + "\n") self.cmd = ['parmed', '-p', self.io_dict['in']['input_top_path'], '-i', instructions_file, '-O' # Overwrite output files ] # Run Biobb block self.run_biobb() # Copy files to host self.copy_to_host() # remove temporary folder(s) if self.remove_tmp: self.tmp_files.append(self.tmp_folder) self.remove_tmp_files() return self.return_code
def get_gromacs_version(gmx: str = "gmx") -> int: """ Gets the GROMACS installed version and returns it as an int(3) for versions older than 5.1.5 and an int(5) for 20XX versions filling the gaps with '0' digits. Args: gmx (str): ('gmx') Path to the GROMACS binary. Returns: int: GROMACS version. """ unique_dir = fu.create_unique_dir() out_log, err_log = fu.get_logs(path=unique_dir, can_write_console=False) cmd = [gmx, "-version"] try: cmd_wrapper.CmdWrapper(cmd, out_log, err_log).launch() pattern = re.compile(r"GROMACS version:\s+(.+)") with open(Path(unique_dir).joinpath('log.out')) as log_file: for line in log_file: version_str = pattern.match(line.strip()) if version_str: break version = version_str.group(1).replace(".", "").replace("VERSION", "").strip() version = "".join([c for c in version if c.isdigit()]) except: return 0 if version.startswith("2"): while len(version) < 5: version += '0' else: while len(version) < 3: version += '0' fu.rm(unique_dir) return int(version)
def launch(self) -> int: """Execute the :class:`Pmxgentop <pmx.pmxgentop.Pmxgentop>` pmx.pmxgentop.Pmxgentop object.""" # Setup Biobb if self.check_restart(): return 0 self.stage_files() # Check if executable exists if not self.container_path: if not Path(self.pmx_path).is_file(): if not shutil.which(self.pmx_path): raise FileNotFoundError('Executable %s not found. Check if it is installed in your system and correctly defined in the properties' % self.pmx_path) # Unzip topology to topology_out top_file = fu.unzip_top(zip_file=self.input_top_zip_path, out_log=self.out_log) top_dir = str(Path(top_file).parent) # Copy extra files to container: topology folder if self.container_path: fu.log('Container execution enabled', self.out_log) fu.log(f"Unique dir: {self.stage_io_dict['unique_dir']}", self.out_log) fu.log(f"{self.stage_io_dict['unique_dir']} files: {os.listdir(self.stage_io_dict['unique_dir'])}", self.out_log) fu.log(f"Copy all files of the unzipped original topology to unique dir:", self.out_log) shutil.copytree(top_dir, str(Path(self.stage_io_dict.get("unique_dir")).joinpath(Path(top_dir).name))) top_file = str(Path(self.container_volume_path).joinpath(Path(top_dir).name, Path(top_file).name)) output_file_name = fu.create_name(prefix=self.prefix, step=self.step, name=str(Path(top_file).name)) unique_dir_output_file = str(Path(fu.create_unique_dir()).joinpath(output_file_name)) fu.log(f"unique_dir_output_file: {unique_dir_output_file}", self.out_log) if self.container_path: fu.log("Change references for container:", self.out_log) unique_dir_output_file = str(Path(self.container_volume_path).joinpath(Path(output_file_name))) fu.log(f" unique_dir_output_file: {unique_dir_output_file}", self.out_log) self.cmd = [self.pmx_path, 'gentop', '-o', str(Path(unique_dir_output_file)), '-ff', self.force_field, '-p', top_file] if self.split: self.cmd.append('--split') if self.scale_mass: self.cmd.append('--scale_mass') if self.gmx_lib: self.environment = os.environ.copy() self.environment['GMXLIB'] = self.gmx_lib # Run Biobb block self.run_biobb() # Copy files to host self.copy_to_host() if self.container_path: unique_dir_output_file = str(Path(container_io_dict.get("unique_dir")).joinpath(Path(unique_dir_output_file).name)) # Remove paths from top file with open(Path(unique_dir_output_file)) as top_fh: top_lines = top_fh.readlines() with open(Path(unique_dir_output_file), 'w') as top_fh: for line in top_lines: top_fh.write(line.replace(str(Path(unique_dir_output_file).parent)+'/', '')) # Copy the not modified itp files for orig_itp_file in Path(top_dir).iterdir(): fu.log(f'Check if {str(Path(unique_dir_output_file).parent.joinpath(Path(orig_itp_file).name))} exists', self.out_log, self.global_log) if not Path(unique_dir_output_file).parent.joinpath(Path(orig_itp_file).name).exists(): shutil.copy(orig_itp_file, Path(unique_dir_output_file).parent) fu.log(f'Copying {str(orig_itp_file)} to: {str(Path(unique_dir_output_file).parent)}', self.out_log, self.global_log) # zip topology fu.log('Compressing topology to: %s' % self.io_dict["out"]["output_top_zip_path"], self.out_log, self.global_log) fu.zip_top(zip_file=self.io_dict["out"]["output_top_zip_path"], top_file=str(Path(unique_dir_output_file)), out_log=self.out_log) self.tmp_files.extend([self.stage_io_dict.get("unique_dir"), top_dir]) self.remove_tmp_files() return self.return_code
def launch(self) -> int: """Execute the :class:`Pmxmutate <pmx.pmxmutate.Pmxmutate>` pmx.pmxmutate.Pmxmutate object.""" # Setup Biobb if self.check_restart(): return 0 self.stage_files() # Check if executable exists if not self.container_path: if not Path(self.pmx_path).is_file(): if not shutil.which(self.pmx_path): raise FileNotFoundError( 'Executable %s not found. Check if it is installed in your system and correctly defined in the properties' % self.pmx_path) # Generate mutations file mutations_dir = fu.create_unique_dir() self.input_mutations_file = create_mutations_file( input_mutations_path=str( Path(mutations_dir).joinpath('mutations.txt')), mutation_list=self.mutation_list, mutation_dict=MUTATION_DICT) # Copy extra files to container: mutations file if self.container_path: fu.log('Container execution enabled', self.out_log) shutil.copy2(self.input_mutations_file, self.stage_io_dict.get("unique_dir")) self.input_mutations_file = str( Path(self.container_volume_path).joinpath( Path(self.input_mutations_file).name)) self.cmd = [ self.pmx_path, 'mutate', '-f', self.stage_io_dict["in"]["input_structure_path"], '-o', self.stage_io_dict["out"]["output_structure_path"], '-ff', self.force_field, '--script', self.input_mutations_file ] if self.stage_io_dict["in"].get("input_b_structure_path"): self.cmd.append('-fB') self.cmd.append(self.stage_io_dict["in"]["input_b_structure_path"]) if self.resinfo: self.cmd.append('-resinfo') if self.gmx_lib: self.environment = os.environ.copy() self.environment['GMXLIB'] = self.gmx_lib # Run Biobb block self.run_biobb() # Copy files to host self.copy_to_host() self.tmp_files.append(self.stage_io_dict.get("unique_dir")) self.remove_tmp_files() return self.return_code
def launch(self): """Launches the execution of the LeapAddIons module.""" # check input/output paths and parameters self.check_data_params(self.out_log, self.err_log) # Setup Biobb if self.check_restart(): return 0 self.stage_files() # Creating temporary folder self.tmp_folder = fu.create_unique_dir() fu.log('Creating %s temporary folder' % self.tmp_folder, self.out_log) # Water Type # leaprc.water.tip4pew, tip4pd, tip3p, spceb, spce, opc, fb4, fb3 # Values: POL3BOX, QSPCFWBOX, SPCBOX, SPCFWBOX, TIP3PBOX, TIP3PFBOX, TIP4PBOX, TIP4PEWBOX, OPCBOX, OPC3BOX, TIP5PBOX. source_wat_command = "source leaprc.water.tip3p" if self.water_type == "TIP4PEWBOX": source_wat_command = "leaprc.water.tip4pew" if self.water_type == "TIP4PBOX": source_wat_command = "leaprc.water.tip4pd" if re.match(r"SPC", self.water_type): source_wat_command = "source leaprc.water.spce" if re.match(r"OPC", self.water_type): source_wat_command = "source leaprc.water.opc" # Counterions ions_command = "" if self.neutralise: #ions_command = ions_command + "addions mol " + self.negative_ions_type + " 0 \n" #ions_command = ions_command + "addions mol " + self.positive_ions_type + " 0 \n" ions_command = ions_command + "addionsRand mol " + self.negative_ions_type + " 0 \n" ions_command = ions_command + "addionsRand mol " + self.positive_ions_type + " 0 \n" if self.ionic_concentration and self.negative_ions_number==0 and self.positive_ions_number==0: self.find_out_number_of_ions() nneg = self.nio # Update with function npos = self.nio # Update with function #ions_command = ions_command + "addions mol " + self.negative_ions_type + " " + str(nneg) + " \n" #ions_command = ions_command + "addions mol " + self.positive_ions_type + " " + str(npos) + " \n" ions_command = ions_command + "addionsRand mol " + self.negative_ions_type + " " + str(nneg) + " \n" ions_command = ions_command + "addionsRand mol " + self.positive_ions_type + " " + str(npos) + " \n" else: if self.negative_ions_number != 0: #ions_command = ions_command + "addions mol " + self.negative_ions_type + " " + str(self.negative_ions_number) + " \n" ions_command = ions_command + "addionsRand mol " + self.negative_ions_type + " " + str(self.negative_ions_number) + " \n" if self.positive_ions_number != 0: #ions_command = ions_command + "addions mol " + self.positive_ions_type + " " + str(self.positive_ions_number) + " \n" ions_command = ions_command + "addionsRand mol " + self.positive_ions_type + " " + str(self.positive_ions_number) + " \n" ligands_lib_list = [] if self.io_dict['in']['input_lib_path'] is not None: if self.io_dict['in']['input_lib_path'].endswith('.zip'): ligands_lib_list = fu.unzip_list(self.io_dict['in']['input_lib_path'], dest_dir=self.tmp_folder, out_log=self.out_log) else: ligands_lib_list.append(self.io_dict['in']['input_lib_path']) ligands_frcmod_list = [] if self.io_dict['in']['input_frcmod_path'] is not None: if self.io_dict['in']['input_frcmod_path'].endswith('.zip'): ligands_frcmod_list = fu.unzip_list(self.io_dict['in']['input_frcmod_path'], dest_dir=self.tmp_folder, out_log=self.out_log) else: ligands_frcmod_list.append(self.io_dict['in']['input_frcmod_path']) amber_params_list = [] if self.io_dict['in']['input_params_path'] is not None: if self.io_dict['in']['input_params_path'].endswith('.zip'): amber_params_list = fu.unzip_list(self.io_dict['in']['input_params_path'], dest_dir=self.tmp_folder, out_log=self.out_log) else: amber_params_list.append(self.io_dict['in']['input_params_path']) leap_source_list = [] if self.io_dict['in']['input_source_path'] is not None: if self.io_dict['in']['input_source_path'].endswith('.zip'): leap_source_list = fu.unzip_list(self.io_dict['in']['input_source_path'], dest_dir=self.tmp_folder, out_log=self.out_log) else: leap_source_list.append(self.io_dict['in']['input_source_path']) instructions_file = str(PurePath(self.tmp_folder).joinpath("leap.in")) with open(instructions_file, 'w') as leapin: # Forcefields loaded by default: # Protein: ff14SB (PARM99 + frcmod.ff99SB + frcmod.parmbsc0 + OL3 for RNA) #leapin.write("source leaprc.protein.ff14SB \n") # DNA: parmBSC1 (ParmBSC1 (ff99 + bsc0 + bsc1) for DNA. Ivani et al. Nature Methods 13: 55, 2016) #leapin.write("source leaprc.DNA.bsc1 \n") # Ligands: GAFF (General Amber Force field, J. Comput. Chem. 2004 Jul 15;25(9):1157-74) #leapin.write("source leaprc.gaff \n") # Forcefields loaded from input forcefield property for t in self.forcefield: leapin.write("source leaprc.{}\n".format(t)) # Additional Leap commands for leap_commands in leap_source_list: leapin.write("source " + leap_commands + "\n") # Water Model loaded from input water_model property leapin.write(source_wat_command + " \n") # Ions Type if self.ions_type != "None": leapin.write("loadamberparams frcmod." + self.ions_type + "\n") # Additional Amber parameters for amber_params in amber_params_list: leapin.write("loadamberparams " + amber_params + "\n") # Ligand(s) libraries (if any) for amber_lib in ligands_lib_list: leapin.write("loadOff " + amber_lib + "\n") for amber_frcmod in ligands_frcmod_list: leapin.write("loadamberparams " + amber_frcmod + "\n") # Loading PDB file leapin.write("mol = loadpdb " + self.io_dict['in']['input_pdb_path'] + " \n") # Adding ions leapin.write(ions_command) # Generating box leapin.write("setBox mol vdw \n") # Saving output PDB file, coordinates and topology leapin.write("savepdb mol " + self.io_dict['out']['output_pdb_path'] + " \n") leapin.write("saveAmberParm mol " + self.io_dict['out']['output_top_path'] + " " + self.io_dict['out']['output_crd_path'] + "\n") leapin.write("quit \n"); # Command line self.cmd = ['tleap ', '-f', instructions_file ] # Run Biobb block self.run_biobb() # Copy files to host self.copy_to_host() if self.box_type != "cubic": fu.log('Fixing truncated octahedron Box in the topology and coordinates files', self.out_log, self.global_log) # Taking box info from input PDB file, CRYST1 tag (first line) with open(self.io_dict['in']['input_pdb_path']) as file: lines = file.readlines() pdb_line = lines[0] if 'OCTBOX' not in pdb_line: fu.log('WARNING: box info not found in input PDB file (OCTBOX). Needed to correctly assign the octahedron box. Assuming cubic box.',self.out_log, self.global_log) else: # PDB info: CRYST1 86.316 86.316 86.316 109.47 109.47 109.47 P 1 # PDB info: OCTBOX 86.1942924 86.1942924 86.1942924 109.4712190 109.4712190 109.4712190 #regex_box = 'CRYST1\s*(\d+\.\d+)\s*(\d+\.\d+)\s*(\d+\.\d+)\s*(\d+\.\d+)\s*(\d+\.\d+)\s*(\d+\.\d+)\s*P 1' regex_box = 'OCTBOX\s*(\d+\.\d+)\s*(\d+\.\d+)\s*(\d+\.\d+)\s*(\d+\.\d+)\s*(\d+\.\d+)\s*(\d+\.\d+)' box = re.findall(regex_box, pdb_line)[0] box_line = "" for coord in box: box_line += "{:12.7f}".format(float(coord)) # PRMTOP info: 1.09471219E+02 8.63157502E+01 8.63157502E+01 8.63157502E+01 top_box_line = "" top_box_line += ' %.8E' % Decimal(float(box[3])) top_box_line += ' %.8E' % Decimal(float(box[0])) top_box_line += ' %.8E' % Decimal(float(box[1])) top_box_line += ' %.8E' % Decimal(float(box[2])) # Removing box generated by tleap from the crd file (last line) with open(self.io_dict['out']['output_crd_path']) as file: lines = file.readlines() crd_lines = lines[:-1] # Adding old box coordinates (taken from the input pdb) crd_lines.append(box_line) with open(self.io_dict['out']['output_crd_path'],'w') as file: for line in crd_lines: file.write(str(line)) file.write("\n") # Now fixing IFBOX param in prmtop. box_flag = False ifbox_flag = 0 #%FLAG BOX_DIMENSIONS #%FORMAT(5E16.8) #1.09471219E+02 8.63157502E+01 8.63157502E+01 8.63157502E+01 tmp_parmtop = str(PurePath(self.tmp_folder).joinpath("top_temp.parmtop")) shutil.copyfile(self.io_dict['out']['output_top_path'], tmp_parmtop) with open(self.io_dict['out']['output_top_path'],'w') as new_top: with open(tmp_parmtop,'r') as old_top: for line in old_top: if 'BOX_DIMENSIONS' in line: box_flag = True new_top.write(line) elif box_flag and 'FORMAT' not in line: new_top.write(top_box_line + "\n") box_flag = False elif 'FLAG POINTERS' in line or ifbox_flag==1 or ifbox_flag==2 or ifbox_flag==3: ifbox_flag+=1 new_top.write(line) elif ifbox_flag == 4: #new_top.write(top_box_line + "\n") new_top.write(line[:56] + ' 2' + line[64:]) ifbox_flag+=1 else: new_top.write(line) # remove temporary folder(s) if self.remove_tmp: self.tmp_files.append(self.tmp_folder) self.tmp_files.append("leap.log") self.remove_tmp_files() return self.return_code
def launch(self) -> int: """Launches the execution of the GROMACS grompp module. Examples: This is a use example of how to use the Grommpp module from Python >>> from biobb_md.gromacs.grompp import Grompp >>> prop = { 'mdp':{ 'type': 'minimization', 'emtol':'500', 'nsteps':'5000'}} >>> Grompp(input_gro_path='/path/to/myStructure.gro', input_top_zip_path='/path/to/myTopology.zip', output_tpr_path='/path/to/NewCompiledBin.tpr', properties=prop).launch() """ tmp_files = [] mdout = 'mdout.mdp' tmp_files.append(mdout) # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # Check GROMACS version if not self.container_path: if self.gmx_version < 512: raise GromacsVersionError( "Gromacs version should be 5.1.2 or newer %d detected" % self.gmx_version) fu.log( "GROMACS %s %d version detected" % (self.__class__.__name__, self.gmx_version), out_log) # Restart if needed if self.restart: if fu.check_complete_files(self.io_dict["out"].values()): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # Unzip topology to topology_out top_file = fu.unzip_top(zip_file=self.input_top_zip_path, out_log=out_log) top_dir = str(Path(top_file).parent) tmp_files.append(top_dir) container_io_dict = fu.copy_to_container(self.container_path, self.container_volume_path, self.io_dict) if self.input_mdp_path: self.output_mdp_path = self.input_mdp_path else: mdp_dir = fu.create_unique_dir() tmp_files.append(mdp_dir) self.output_mdp_path = self.create_mdp( path=str(Path(mdp_dir).joinpath(self.output_mdp_path))) md = self.mdp.get('type', 'minimization') if md not in ('index', 'free'): fu.log('Will run a %s md of %s steps' % (md, self.nsteps), out_log, self.global_log) elif md == 'index': fu.log('Will create a TPR to be used as structure file') else: fu.log( 'Will run a %s md of %s' % (md, fu.human_readable_time( int(self.nsteps) * float(self.dt))), out_log, self.global_log) if self.container_path: fu.log('Container execution enabled', out_log) shutil.copy2(self.output_mdp_path, container_io_dict.get("unique_dir")) self.output_mdp_path = str( Path(self.container_volume_path).joinpath( Path(self.output_mdp_path).name)) shutil.copytree( top_dir, str( Path(container_io_dict.get("unique_dir")).joinpath( Path(top_dir).name))) top_file = str( Path(self.container_volume_path).joinpath( Path(top_dir).name, Path(top_file).name)) cmd = [ self.gmx_path, 'grompp', '-f', self.output_mdp_path, '-c', container_io_dict["in"]["input_gro_path"], '-r', container_io_dict["in"]["input_gro_path"], '-p', top_file, '-o', container_io_dict["out"]["output_tpr_path"], '-po', mdout, '-maxwarn', self.maxwarn ] if container_io_dict["in"].get("input_cpt_path") and Path( container_io_dict["in"]["input_cpt_path"]).exists(): cmd.append('-t') if self.container_path: shutil.copy2(container_io_dict["in"]["input_cpt_path"], container_io_dict.get("unique_dir")) cmd.append( str( Path(self.container_volume_path).joinpath( Path(container_io_dict["in"] ["input_cpt_path"]).name))) else: cmd.append(container_io_dict["in"]["input_cpt_path"]) if container_io_dict["in"].get("input_ndx_path") and Path( container_io_dict["in"]["input_ndx_path"]).exists(): cmd.append('-n') if self.container_path: shutil.copy2(container_io_dict["in"]["input_ndx_path"], container_io_dict.get("unique_dir")) cmd.append( Path(self.container_volume_path).joinpath( Path(container_io_dict["in"]["input_ndx_path"]).name)) else: cmd.append(container_io_dict["in"]["input_ndx_path"]) new_env = None if self.gmxlib: new_env = os.environ.copy() new_env['GMXLIB'] = self.gmxlib cmd = fu.create_cmd_line( cmd, container_path=self.container_path, host_volume=container_io_dict.get("unique_dir"), container_volume=self.container_volume_path, container_working_dir=self.container_working_dir, container_user_uid=self.container_user_id, container_shell_path=self.container_shell_path, container_image=self.container_image, out_log=out_log, global_log=self.global_log) returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log, self.global_log, new_env).launch() fu.copy_to_host(self.container_path, container_io_dict, self.io_dict) tmp_files.append(container_io_dict.get("unique_dir")) if self.remove_tmp: fu.rm_file_list(tmp_files, out_log=out_log) return returncode
def launch(self): """Launches the execution of the PmemdMDRun module.""" # check input/output paths and parameters self.check_data_params(self.out_log, self.err_log) # Setup Biobb if self.check_restart(): return 0 self.stage_files() # Creating temporary folder self.tmp_folder = fu.create_unique_dir() fu.log('Creating %s temporary folder' % self.tmp_folder, self.out_log) #if self.io_dict['in']['input_mdin_path']: # self.output_mdin_path = self.io_dict['in']['input_mdin_path'] #else: # self.output_mdin_path = self.create_mdin(path=str(Path(self.tmp_folder).joinpath("pmemd.mdin"))) self.output_mdin_path = self.create_mdin(path=str(Path(self.tmp_folder).joinpath("pmemd.mdin"))) # Command line # pmemd -O -i mdin/min.mdin -p $1.cpH.prmtop -c ph$i/$1.inpcrd -r ph$i/$1.min.rst7 -o ph$i/$1.min.o self.cmd = [self.pmemd_path, '-O', '-i', self.output_mdin_path, '-p', self.io_dict['in']['input_top_path'], '-c', self.io_dict['in']['input_crd_path'], '-r', self.io_dict['out']['output_rst_path'], '-o', self.io_dict['out']['output_log_path'], '-x', self.io_dict['out']['output_traj_path'] ] if self.io_dict['in']['input_ref_path']: self.cmd.append('-ref') self.cmd.append(self.io_dict['in']['input_ref_path']) if self.io_dict['in']['input_cpin_path']: self.cmd.append('-cpin') self.cmd.append(self.io_dict['in']['input_cpin_path']) if self.io_dict['out']['output_mdinfo_path']: self.cmd.append('-inf') self.cmd.append(self.io_dict['out']['output_mdinfo_path']) if self.io_dict['out']['output_cpout_path']: self.cmd.append('-cpout') self.cmd.append(self.io_dict['out']['output_cpout_path']) if self.io_dict['out']['output_cprst_path']: self.cmd.append('-cprestrt') self.cmd.append(self.io_dict['out']['output_cprst_path']) # general mpi properties if self.mpi_bin: mpi_cmd = [self.mpi_bin] if self.mpi_np: mpi_cmd.append('-n') mpi_cmd.append(str(self.mpi_np)) if self.mpi_flags: mpi_cmd.extend(self.mpi_flags) self.cmd = mpi_cmd + cmd # Run Biobb block self.run_biobb() # Copy files to host self.copy_to_host() # remove temporary folder(s) if self.remove_tmp: self.tmp_files.append(self.tmp_folder) self.tmp_files.append("mdinfo") self.remove_tmp_files() return self.return_code
def launch(self) -> int: """Execute the :class:`Pmxanalyse <pmx.pmxanalyse.Pmxanalyse>` pmx.pmxanalyse.Pmxanalyse object.""" # Setup Biobb if self.check_restart(): return 0 self.stage_files() # Check if executable is exists if not self.container_path: if not Path(self.pmx_path).is_file(): if not shutil.which(self.pmx_path): raise FileNotFoundError( 'Executable %s not found. Check if it is installed in your system and correctly defined in the properties' % self.pmx_path) list_a_dir = fu.create_unique_dir() list_b_dir = fu.create_unique_dir() list_a = list( filter( lambda f: Path(f).exists() and Path(f).stat().st_size > 10, fu.unzip_list(self.input_a_xvg_zip_path, list_a_dir, self.out_log))) list_b = list( filter( lambda f: Path(f).exists() and Path(f).stat().st_size > 10, fu.unzip_list(self.input_b_xvg_zip_path, list_b_dir, self.out_log))) string_a = " ".join(list_a) string_b = " ".join(list_b) # Copy extra files to container: two directories containing the xvg files if self.container_path: shutil.copytree( list_a_dir, Path(self.stage_io_dict.get("unique_dir")).joinpath( Path(list_a_dir).name)) shutil.copytree( list_b_dir, Path(self.stage_io_dict.get("unique_dir")).joinpath( Path(list_b_dir).name)) container_volume = " " + self.container_volume_path + "/" string_a = self.container_volume_path + "/" + container_volume.join( list_a) string_b = self.container_volume_path + "/" + container_volume.join( list_b) self.cmd = [ self.pmx_path, 'analyse', '-fA', string_a, '-fB', string_b, '-o', self.stage_io_dict["out"]["output_result_path"], '-w', self.stage_io_dict["out"]["output_work_plot_path"] ] if self.method: self.cmd.append('-m') self.cmd.append(self.method) if self.temperature: self.cmd.append('-t') self.cmd.append(str(self.temperature)) if self.nboots: self.cmd.append('-b') self.cmd.append(str(self.nboots)) if self.nblocks: self.cmd.append('-n') self.cmd.append(str(self.nblocks)) if self.integ_only: self.cmd.append('--integ_only') if self.reverseB: self.cmd.append('--reverseB') if self.skip: self.cmd.append('--skip') self.cmd.append(str(self.skip)) if self.slice: self.cmd.append('--slice') self.cmd.append(self.slice) if self.rand: self.cmd.append('--rand') if self.index: self.cmd.append('--index') self.cmd.append(self.index) if self.prec: self.cmd.append('--prec') self.cmd.append(str(self.prec)) if self.units: self.cmd.append('--units') self.cmd.append(self.units) if self.no_ks: self.cmd.append('--no_ks') if self.nbins: self.cmd.append('--nbins') self.cmd.append(str(self.nbins)) if self.dpi: self.cmd.append('--dpi') self.cmd.append(str(self.dpi)) # Run Biobb block self.run_biobb() # Copy files to host self.copy_to_host() self.tmp_files.extend( [self.stage_io_dict.get("unique_dir"), list_a_dir, list_b_dir]) self.remove_tmp_files() return self.return_code
def launch(self): """Launches the execution of the LeapSolvate module.""" # check input/output paths and parameters self.check_data_params(self.out_log, self.err_log) # Setup Biobb if self.check_restart(): return 0 self.stage_files() # Creating temporary folder self.tmp_folder = fu.create_unique_dir() fu.log('Creating %s temporary folder' % self.tmp_folder, self.out_log) # Leap configuration (instructions) file instructions_file = str(PurePath(self.tmp_folder).joinpath("leap.in")) box_command = "solvateOct" if self.box_type == "cubic": box_command = "solvateBox" # Forcefield #source_ff_command = "source leaprc." + self.forcefield # Water Type # leaprc.water.tip4pew, tip4pd, tip3p, spceb, spce, opc, fb4, fb3 # Values: POL3BOX, QSPCFWBOX, SPCBOX, SPCFWBOX, TIP3PBOX, TIP3PFBOX, TIP4PBOX, TIP4PEWBOX, OPCBOX, OPC3BOX, TIP5PBOX. source_wat_command = "source leaprc.water.tip3p" if self.water_type == "TIP4PEWBOX": source_wat_command = "leaprc.water.tip4pew" if self.water_type == "TIP4PBOX": source_wat_command = "leaprc.water.tip4pd" if re.match(r"SPC", self.water_type): source_wat_command = "source leaprc.water.spce" if re.match(r"OPC", self.water_type): source_wat_command = "source leaprc.water.opc" # Counterions ions_command = "" if self.neutralise: ions_command = ions_command + "addions mol " + self.negative_ions_type + " 0 \n" ions_command = ions_command + "addions mol " + self.positive_ions_type + " 0 \n" if self.negative_ions_number != 0: ions_command = ions_command + "addions mol " + self.negative_ions_type + " " + str( self.negative_ions_number) + " \n" if self.positive_ions_number != 0: ions_command = ions_command + "addions mol " + self.positive_ions_type + " " + str( self.positive_ions_number) + " \n" ligands_lib_list = [] if self.io_dict['in']['input_lib_path'] is not None: if self.io_dict['in']['input_lib_path'].endswith('.zip'): ligands_lib_list = fu.unzip_list( self.io_dict['in']['input_lib_path'], dest_dir=self.tmp_folder, out_log=self.out_log) else: ligands_lib_list.append(self.io_dict['in']['input_lib_path']) ligands_frcmod_list = [] if self.io_dict['in']['input_frcmod_path'] is not None: if self.io_dict['in']['input_frcmod_path'].endswith('.zip'): ligands_frcmod_list = fu.unzip_list( self.io_dict['in']['input_frcmod_path'], dest_dir=self.tmp_folder, out_log=self.out_log) else: ligands_frcmod_list.append( self.io_dict['in']['input_frcmod_path']) amber_params_list = [] if self.io_dict['in']['input_params_path'] is not None: if self.io_dict['in']['input_params_path'].endswith('.zip'): amber_params_list = fu.unzip_list( self.io_dict['in']['input_params_path'], dest_dir=self.tmp_folder, out_log=self.out_log) else: amber_params_list.append( self.io_dict['in']['input_params_path']) leap_source_list = [] if self.io_dict['in']['input_source_path'] is not None: if self.io_dict['in']['input_source_path'].endswith('.zip'): leap_source_list = fu.unzip_list( self.io_dict['in']['input_source_path'], dest_dir=self.tmp_folder, out_log=self.out_log) else: leap_source_list.append( self.io_dict['in']['input_source_path']) with open(instructions_file, 'w') as leapin: # Forcefields loaded by default: # Protein: ff14SB (PARM99 + frcmod.ff99SB + frcmod.parmbsc0 + OL3 for RNA) #leapin.write("source leaprc.protein.ff14SB \n") # DNA: parmBSC1 (ParmBSC1 (ff99 + bsc0 + bsc1) for DNA. Ivani et al. Nature Methods 13: 55, 2016) #leapin.write("source leaprc.DNA.bsc1 \n") # Ligands: GAFF (General Amber Force field, J. Comput. Chem. 2004 Jul 15;25(9):1157-74) #leapin.write("source leaprc.gaff \n") # Forcefields loaded from input forcefield property for t in self.forcefield: leapin.write("source leaprc.{}\n".format(t)) # Additional Leap commands for leap_commands in leap_source_list: leapin.write("source " + leap_commands + "\n") # Ions Type if self.ions_type != "None": leapin.write("loadamberparams frcmod." + self.ions_type + "\n") # Additional Amber parameters for amber_params in amber_params_list: leapin.write("loadamberparams " + amber_params + "\n") # Water Model loaded from input water_model property leapin.write(source_wat_command + " \n") # Ligand(s) libraries (if any) for amber_lib in ligands_lib_list: leapin.write("loadOff " + amber_lib + "\n") for amber_frcmod in ligands_frcmod_list: leapin.write("loadamberparams " + amber_frcmod + "\n") # Loading PDB file leapin.write("mol = loadpdb " + self.io_dict['in']['input_pdb_path'] + " \n") # Generating box + adding water molecules leapin.write(box_command + " mol " + self.water_type + " " + str(self.distance_to_molecule) + " " + str(self.closeness)) leapin.write(" iso \n") if self.iso else leapin.write("\n") # Adding counterions leapin.write(ions_command) # Saving output PDB file, coordinates and topology leapin.write("savepdb mol " + self.io_dict['out']['output_pdb_path'] + " \n") leapin.write("saveAmberParm mol " + self.io_dict['out']['output_top_path'] + " " + self.io_dict['out']['output_crd_path'] + "\n") leapin.write("quit \n") # Command line self.cmd = ['tleap ', '-f', instructions_file] # Run Biobb block self.run_biobb() # Copy files to host self.copy_to_host() # Saving octahedron box with all decimals in PDB file. Needed for the add_ions BB. # Getting octahedron box from generated crd file with open(self.io_dict['out']['output_crd_path'], "r") as file: for line in file: pass # Adding box as a first line in the generated pdb file with OCTBOX tag octbox = "OCTBOX " + line with open(self.io_dict['out']['output_pdb_path'], 'r+') as f: content = f.read() f.seek(0, 0) f.write(octbox + content) # remove temporary folder(s) if self.remove_tmp: self.tmp_files.append(self.tmp_folder) self.tmp_files.append("leap.log") self.remove_tmp_files() return self.return_code
def launch(self) -> int: """Execute the :class:`Titration <cmip.titration.Titration>` object.""" # Setup Biobb if self.check_restart(): return 0 # Check if output_pdb_path ends with ".pdb" if not self.io_dict['out']['output_pdb_path'].endswith('.pdb'): fu.log('ERROR: output_pdb_path name must end in .pdb', self.out_log, self.global_log) raise ValueError("ERROR: output_pdb_path name must end in .pdb") # Adding neutral, num_negative_ions, num_positive_ions, num_wats, cutoff if self.num_wats: self.params['titwat'] = str(self.num_wats) if self.num_positive_ions: self.params['titip'] = str(self.num_positive_ions) if self.num_negative_ions: self.params['titim'] = str(self.num_negative_ions) if self.neutral: charge = get_pdb_total_charge(self.io_dict['in']['input_pdb_path']) self.params['titip'] = '0' self.params['titim'] = '0' if int(round(charge)) > 0: self.params['titim'] = str(int(round(charge))) elif int(round(charge)) < 0: self.params['titip'] = abs(int(round(charge))) else: fu.log(f'Neutral flag activated however no positive or negative ions will be added because the system ' f'is already neutralized. System charge: {round(charge, 3)}', self.out_log, self.global_log) fu.log(f'Neutral flag activated. Current system charge: {round(charge, 3)}, ' f'positive ions to be added: {self.params["titip"]}, ' f'negative ions to be added: {self.params["titim"]}, ' f'final residual charge: {round(charge + int(self.params["titip"]) - int(self.params["titim"]), 3)}', self.out_log, self.global_log) if self.energy_cutoff: self.params['titcut'] = str(self.energy_cutoff) combined_params_dir = fu.create_unique_dir() self.io_dict['in']['combined_params_path'] = create_params_file( output_params_path=str(Path(combined_params_dir).joinpath(self.output_params_path)), input_params_path=self.io_dict['in']['input_params_path'], params_preset_dict=params_preset(execution_type='titration'), params_properties_dict=self.params) self.stage_files() self.cmd = [self.titration_path, '-i', self.stage_io_dict['in']['combined_params_path'], '-vdw', self.stage_io_dict['in']['input_vdw_params_path'], '-hs', self.stage_io_dict['in']['input_pdb_path'], '-outpdb', self.stage_io_dict['out']['output_pdb_path'][:-4]] # Run Biobb block self.run_biobb() # Copy files to host self.copy_to_host() # Remove temporal files self.tmp_files.extend([combined_params_dir]) self.remove_tmp_files() return self.return_code
def main(config, system=None): start_time = time.time() conf = settings.ConfReader(config, system) global_log, _ = fu.get_logs(path=conf.get_working_dir_path(), light_format=True) global_prop = conf.get_prop_dic() global_paths = conf.get_paths_dic() dhdl_paths_listA = [] dhdl_paths_listB = [] for ensemble, mutation in conf.properties['mutations'].items(): ensemble_prop = conf.get_prop_dic(prefix=ensemble) ensemble_paths = conf.get_paths_dic(prefix=ensemble) # step0_image global_log.info( ensemble + " Step 0: gmx image: Imaging trajectories to remove PBC issues") ensemble_paths['step0_image']['input_traj_path'] = conf.properties[ 'input_trajs'][ensemble]['input_traj_path'] ensemble_paths['step0_image']['input_top_path'] = conf.properties[ 'input_trajs'][ensemble]['input_tpr_path'] gmx_image_pc(**ensemble_paths["step0_image"], properties=ensemble_prop["step0_image"]) # step0.1_trjconv global_log.info( ensemble + " Step 0: gmx trjconv: Extract snapshots from equilibrium trajectories" ) ensemble_paths['step0.1_trjconv']['input_top_path'] = conf.properties[ 'input_trajs'][ensemble]['input_tpr_path'] gmx_trjconv_str_ens_pc(**ensemble_paths["step0.1_trjconv"], properties=ensemble_prop["step0.1_trjconv"]) for ensemble, mutation in conf.properties['mutations'].items(): ensemble_prop = conf.get_prop_dic(prefix=ensemble) ensemble_paths = conf.get_paths_dic(prefix=ensemble) compss_wait_on_file( ensemble_paths["step0.1_trjconv"]["output_str_ens_path"]) with zipfile.ZipFile(ensemble_paths["step0.1_trjconv"] ["output_str_ens_path"]) as zip_f: unique_dir = os.path.abspath( fu.create_unique_dir(prefix=ensemble_prop["step0.1_trjconv"] ['working_dir_path'] + '/' + ensemble + '/')) zip_f.extractall(unique_dir) state_pdb_list = [ os.path.join(unique_dir, name) for name in zip_f.namelist() ] for pdb_path in state_pdb_list: pdb_name = os.path.splitext(os.path.basename(pdb_path))[0] prop = conf.get_prop_dic(prefix=os.path.join(ensemble, pdb_name)) paths = conf.get_paths_dic(prefix=os.path.join(ensemble, pdb_name)) # step1_pmx_mutate global_log.info(ensemble + " " + pdb_name + " Step 1: pmx mutate: Generate Hybrid Structure") paths['step1_pmx_mutate']['input_structure_path'] = pdb_path prop['step1_pmx_mutate']['mutation_list'] = mutation mutate_pc(**paths["step1_pmx_mutate"], properties=prop["step1_pmx_mutate"]) if ensemble == 'stateA': mut = "L2R" elif ensemble == 'stateB': mut = "R2L" # step1.1_check_dummies global_log.info(ensemble + " " + pdb_name + " Step 1.1 Check for dummy atoms") extract_atoms_pc(**paths['step1.1_check_dummies'], properties=prop['step1.1_check_dummies']) compss_wait_on_file( paths['step1.1_check_dummies']['output_structure_path']) dummy = bool( os.path.getsize( paths['step1.1_check_dummies']['output_structure_path'])) # step1.2_remove_ligand global_log.info(ensemble + " " + pdb_name + " Step 1.2 Remove ligand") remove_ligand_pc(**paths['step1.2_remove_ligand'], properties=prop['step1.2_remove_ligand']) # step2_gmx_pdb2gmx global_log.info(ensemble + " " + pdb_name + " Step 2: gmx pdb2gmx: Generate Topology") pdb2gmx_pc(**paths["step2_gmx_pdb2gmx"], properties=prop["step2_gmx_pdb2gmx"]) # step2.1_sort_gro global_log.info(ensemble + " " + pdb_name + " Step 2.1 Sort gro residues") sort_gro_residues_pc(**paths['step2.1_sort_gro'], properties=prop['step2.1_sort_gro']) # step2.2_lig_gmx_appendLigand global_log.info( ensemble + " " + pdb_name + " Step 2.2_lig: gmx appendLigand: Append a ligand to a GROMACS topology" ) append_ligand_pc(**paths["step2.2_lig_gmx_appendLigand"], properties=prop["step2.2_lig_gmx_appendLigand"]) # step3_pmx_gentop global_log.info(ensemble + " " + pdb_name + " Step 3: pmx gentop: Generate Hybrid Topology") gentop_pc(**paths["step3_pmx_gentop"], properties=prop["step3_pmx_gentop"]) if not dummy: paths['step7_gmx_grompp']['input_gro_path'] = paths[ 'step2.1_sort_gro']['output_gro_path'] else: # step4_gmx_makendx global_log.info( ensemble + " " + pdb_name + " Step 4 (Dummies): gmx make_ndx: Generate Gromacs Index file to select atoms to freeze" ) make_ndx_pc(**paths["step4_gmx_makendx"], properties=prop["step4_gmx_makendx"]) # step5_gmx_grompp global_log.info( ensemble + " " + pdb_name + " Step 5 (Dummies): gmx grompp: Creating portable binary run file for energy minimization" ) grompp_ndx_pc(**paths["step5_gmx_grompp"], properties=prop["step5_gmx_grompp"]) # step6_gmx_mdrun global_log.info( ensemble + " " + pdb_name + " Step 6 (Dummies): gmx mdrun: Running energy minimization" ) mdrun_pc(**paths["step6_gmx_mdrun"], properties=prop["step6_gmx_mdrun"]) # step7_gmx_grompp global_log.info( ensemble + " " + pdb_name + " Step 7: gmx grompp: Creating portable binary run file for system equilibration" ) grompp_pc(**paths["step7_gmx_grompp"], properties=prop["step7_gmx_grompp"]) # step8_gmx_mdrun global_log.info(ensemble + " " + pdb_name + " Step 8: gmx mdrun: Running system equilibration") mdrun_pc(**paths["step8_gmx_mdrun"], properties=prop["step8_gmx_mdrun"]) # step9_gmx_grompp global_log.info( ensemble + " " + pdb_name + " Step 9: Creating portable binary run file for thermodynamic integration (ti)" ) grompp_pc(**paths["step9_gmx_grompp"], properties=prop["step9_gmx_grompp"]) # step10_gmx_mdrun global_log.info( ensemble + " " + pdb_name + " Step 10: gmx mdrun: Running thermodynamic integration") mdrun_dhdl_pc(**paths["step10_gmx_mdrun"], properties=prop["step10_gmx_mdrun"]) if ensemble == "stateA": dhdl_paths_listA.append( paths["step10_gmx_mdrun"]["output_dhdl_path"]) elif ensemble == "stateB": dhdl_paths_listB.append( paths["step10_gmx_mdrun"]["output_dhdl_path"]) # Creating zip file containing all the dhdl files dhdlA_path = 'dhdlA.zip' dhdlB_path = 'dhdlB.zip' # for dhdl_file in dhdl_paths_listA: # compss_wait_on_file(dhdl_file) # for dhdl_file in dhdl_paths_listB: # compss_wait_on_file(dhdl_file) # fu.zip_list(dhdlA_path, dhdl_paths_listA) # fu.zip_list(dhdlB_path, dhdl_paths_listB) zip_files_pc(dhdl_paths_listA, dhdlA_path) zip_files_pc(dhdl_paths_listB, dhdlB_path) # step11_pmx_analyse global_log.info( ensemble + " Step 11: pmx analyse: Calculate free energies from fast growth thermodynamic integration simulations" ) global_paths["step11_pmx_analyse"]["input_a_xvg_zip_path"] = dhdlA_path global_paths["step11_pmx_analyse"]["input_b_xvg_zip_path"] = dhdlB_path analyse_pc(**global_paths["step11_pmx_analyse"], properties=global_prop["step11_pmx_analyse"]) elapsed_time = time.time() - start_time global_log.info('') global_log.info('') global_log.info('Execution successful: ') global_log.info(' Workflow_path: %s' % conf.get_working_dir_path()) global_log.info(' Config File: %s' % config) if system: global_log.info(' System: %s' % system) global_log.info('') global_log.info('Elapsed time: %.1f minutes' % (elapsed_time / 60)) global_log.info('')
def main(config, imaged_traj_available): start_time = time.time() conf = settings.ConfReader(config) global_log, _ = fu.get_logs(path=conf.get_working_dir_path(), light_format=True) global_prop = conf.get_prop_dic() global_paths = conf.get_paths_dic() dhdl_paths_listA = [] dhdl_paths_listB = [] for ensemble, mutation in conf.properties['mutations'].items(): ensemble_prop = conf.get_prop_dic(prefix=ensemble) ensemble_paths = conf.get_paths_dic(prefix=ensemble) if not imaged_traj_available: # step0_image There isn't an imaged trajectory global_log.info( ensemble + " Step 0: gmx image: Imaging trajectories to remove PBC issues" ) ensemble_paths['step0_image']['input_top_path'] = conf.properties[ 'input_trajs'][ensemble]['input_tpr_path'] ensemble_paths['step0_image']['input_traj_path'] = conf.properties[ 'input_trajs'][ensemble]['input_traj_path'] gmx_image_pc(**ensemble_paths["step0_image"], properties=ensemble_prop["step0_image"]) else: # An imaged trajectory is available ensemble_paths['step1_trjconv_' + ensemble]['input_traj_path'] = conf.properties[ 'input_trajs'][ensemble]['input_traj_path'] # step1_trjconv global_log.info( ensemble + " Step 1: gmx trjconv: Extract snapshots from equilibrium trajectories" ) ensemble_paths['step1_trjconv_' + ensemble]['input_top_path'] = conf.properties[ 'input_trajs'][ensemble]['input_tpr_path'] gmx_trjconv_str_ens_pc(**ensemble_paths['step1_trjconv_' + ensemble], properties=ensemble_prop['step1_trjconv_' + ensemble]) for ensemble, mutation in conf.properties['mutations'].items(): ensemble_prop = conf.get_prop_dic(prefix=ensemble) ensemble_paths = conf.get_paths_dic(prefix=ensemble) compss_wait_on_file(ensemble_paths['step1_trjconv_' + ensemble]["output_str_ens_path"]) with zipfile.ZipFile( ensemble_paths['step1_trjconv_' + ensemble]["output_str_ens_path"]) as zip_f: unique_dir = os.path.abspath( fu.create_unique_dir( prefix=ensemble_prop['step1_trjconv_' + ensemble]['working_dir_path'] + '/' + ensemble + '/')) zip_f.extractall(unique_dir) state_pdb_list = [ os.path.join(unique_dir, name) for name in zip_f.namelist() ] for pdb_path in state_pdb_list: pdb_name = os.path.splitext(os.path.basename(pdb_path))[0] prop = conf.get_prop_dic(prefix=os.path.join(ensemble, pdb_name)) paths = conf.get_paths_dic(prefix=os.path.join(ensemble, pdb_name)) # step2_pmx_mutate global_log.info(ensemble + " " + pdb_name + " Step 2: pmx mutate: Generate Hybrid Structure") paths['step2_pmx_mutate']['input_structure_path'] = pdb_path prop['step2_pmx_mutate']['mutation_list'] = mutation mutate_pc(**paths["step2_pmx_mutate"], properties=prop["step2_pmx_mutate"]) # step3_check_dummies global_log.info(ensemble + " " + pdb_name + " Step 3 Check for dummy atoms") extract_atoms_pc(**paths['step3_check_dummies'], properties=prop['step3_check_dummies']) compss_wait_on_file( paths['step3_check_dummies']['output_structure_path']) try: dummy = bool( os.path.getsize( paths['step3_check_dummies']['output_structure_path'])) except: global_log.info( "Error ocurred while checking the file containing dummy atoms" ) global_log.info(sys.exc_info()[0]) # step4_gmx_pdb2gmx global_log.info(ensemble + " " + pdb_name + " Step 4: gmx pdb2gmx: Generate Topology") pdb2gmx_pc(**paths["step4_gmx_pdb2gmx"], properties=prop["step4_gmx_pdb2gmx"]) # step5_pmx_gentop global_log.info(ensemble + " " + pdb_name + " Step 5: pmx gentop: Generate Hybrid Topology") gentop_pc(**paths["step5_pmx_gentop"], properties=prop["step5_pmx_gentop"]) if not dummy: paths['step9_gmx_grompp']['input_gro_path'] = paths[ 'step4_gmx_pdb2gmx']['output_gro_path'] else: # step6_gmx_makendx global_log.info( ensemble + " " + pdb_name + " Step 6: (Dummies): gmx make_ndx: Generate Gromacs Index file to select atoms to freeze" ) make_ndx_pc(**paths["step6_gmx_makendx"], properties=prop["step6_gmx_makendx"]) # step7_gmx_grompp global_log.info( ensemble + " " + pdb_name + " Step 7: (Dummies): gmx grompp: Creating portable binary run file for energy minimization" ) grompp_ndx_pc(**paths["step7_gmx_grompp"], properties=prop["step7_gmx_grompp"]) # step8_gmx_mdrun global_log.info( ensemble + " " + pdb_name + " Step 8: (Dummies): gmx mdrun: Running energy minimization" ) mdrun_pc(**paths["step8_gmx_mdrun"], properties=prop["step8_gmx_mdrun"]) # step9_gmx_grompp global_log.info( ensemble + " " + pdb_name + " Step 9: gmx grompp: Creating portable binary run file for system equilibration" ) grompp_pc(**paths["step9_gmx_grompp"], properties=prop["step9_gmx_grompp"]) # step10_gmx_mdrun global_log.info( ensemble + " " + pdb_name + " Step 10: gmx mdrun: Running system equilibration") mdrun_pc(**paths["step10_gmx_mdrun"], properties=prop["step10_gmx_mdrun"]) # step11_gmx_grompp global_log.info( ensemble + " " + pdb_name + " Step 11: Creating portable binary run file for thermodynamic integration (ti)" ) grompp_pc(**paths["step11_gmx_grompp"], properties=prop["step11_gmx_grompp"]) # step12_gmx_mdrun global_log.info( ensemble + " " + pdb_name + " Step 12: gmx mdrun: Running thermodynamic integration") mdrun_dhdl_pc(**paths["step12_gmx_mdrun"], properties=prop["step12_gmx_mdrun"]) if ensemble == "stateA": dhdl_paths_listA.append( paths["step12_gmx_mdrun"]["output_dhdl_path"]) elif ensemble == "stateB": dhdl_paths_listB.append( paths["step12_gmx_mdrun"]["output_dhdl_path"]) # Creating zip file containing all the dhdl files dhdlA_path = os.path.join( global_prop["step13_pmx_analyse"]['working_dir_path'], 'dhdlA.zip') dhdlB_path = os.path.join( global_prop["step13_pmx_analyse"]['working_dir_path'], 'dhdlB.zip') for dhdl_file in dhdl_paths_listA: compss_wait_on_file(dhdl_file) for dhdl_file in dhdl_paths_listB: compss_wait_on_file(dhdl_file) fu.zip_list(dhdlA_path, dhdl_paths_listA) fu.zip_list(dhdlB_path, dhdl_paths_listB) # step13_pmx_analyse global_log.info( ensemble + " Step 13: pmx analyse: Calculate free energies from fast growth thermodynamic integration simulations" ) global_paths["step13_pmx_analyse"]["input_a_xvg_zip_path"] = dhdlA_path global_paths["step13_pmx_analyse"]["input_b_xvg_zip_path"] = dhdlB_path analyse_pc(**global_paths["step13_pmx_analyse"], properties=global_prop["step13_pmx_analyse"]) elapsed_time = time.time() - start_time global_log.info('') global_log.info('') global_log.info('Execution successful: ') global_log.info(' Workflow_path: %s' % conf.get_working_dir_path()) global_log.info(' Config File: %s' % config) global_log.info('') global_log.info('Elapsed time: %.1f minutes' % (elapsed_time / 60)) global_log.info('')
def launch(self) -> int: """Execute the :class:`Grompp <gromacs.grompp.Grompp>` object.""" # Setup Biobb if self.check_restart(): return 0 self.stage_files() # Unzip topology to topology_out top_file = fu.unzip_top(zip_file=self.input_top_zip_path, out_log=self.out_log) top_dir = str(Path(top_file).parent) # Create MDP file mdp_dir = fu.create_unique_dir() self.output_mdp_path = create_mdp( output_mdp_path=str(Path(mdp_dir).joinpath(self.output_mdp_path)), input_mdp_path=self.io_dict["in"]["input_mdp_path"], preset_dict=mdp_preset(self.simulation_type), mdp_properties_dict=self.mdp) # Copy extra files to container: MDP file and topology folder if self.container_path: fu.log('Container execution enabled', self.out_log) shutil.copy2(self.output_mdp_path, self.stage_io_dict.get("unique_dir")) self.output_mdp_path = str( Path(self.container_volume_path).joinpath( Path(self.output_mdp_path).name)) shutil.copytree( top_dir, str( Path(self.stage_io_dict.get("unique_dir")).joinpath( Path(top_dir).name))) top_file = str( Path(self.container_volume_path).joinpath( Path(top_dir).name, Path(top_file).name)) self.cmd = [ self.gmx_path, 'grompp', '-f', self.output_mdp_path, '-c', self.stage_io_dict["in"]["input_gro_path"], '-r', self.stage_io_dict["in"]["input_gro_path"], '-p', top_file, '-o', self.stage_io_dict["out"]["output_tpr_path"], '-po', 'mdout.mdp', '-maxwarn', self.maxwarn ] if self.stage_io_dict["in"].get("input_cpt_path") and Path( self.stage_io_dict["in"]["input_cpt_path"]).exists(): self.cmd.append('-t') if self.container_path: shutil.copy2(self.stage_io_dict["in"]["input_cpt_path"], self.stage_io_dict.get("unique_dir")) self.cmd.append( str( Path(self.container_volume_path).joinpath( Path(self.stage_io_dict["in"] ["input_cpt_path"]).name))) else: self.cmd.append(self.stage_io_dict["in"]["input_cpt_path"]) if self.stage_io_dict["in"].get("input_ndx_path") and Path( self.stage_io_dict["in"]["input_ndx_path"]).exists(): self.cmd.append('-n') if self.container_path: shutil.copy2(self.stage_io_dict["in"]["input_ndx_path"], self.stage_io_dict.get("unique_dir")) self.cmd.append( Path(self.container_volume_path).joinpath( Path(self.stage_io_dict["in"]["input_ndx_path"]).name)) else: self.cmd.append(self.stage_io_dict["in"]["input_ndx_path"]) if self.gmx_lib: self.environment = os.environ.copy() self.environment['GMXLIB'] = self.gmx_lib # Check GROMACS version if not self.container_path: if self.gmx_version < 512: raise GromacsVersionError( "Gromacs version should be 5.1.2 or newer %d detected" % self.gmx_version) fu.log( "GROMACS %s %d version detected" % (self.__class__.__name__, self.gmx_version), self.out_log) # Run Biobb block self.run_biobb() # Copy files to host self.copy_to_host() # Remove temporal files self.tmp_files.extend([ self.stage_io_dict.get("unique_dir"), top_dir, mdp_dir, 'mdout.mdp' ]) self.remove_tmp_files() return self.return_code
def launch(self) -> int: """Execute the :class:`FPocketFilter <fpocket.fpocket_filter.FPocketFilter>` fpocket.fpocket_filter.FPocketFilter object.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # check input/output paths and parameters self.check_data_params(out_log, err_log) # Check the properties fu.check_properties(self, self.properties) if self.restart: output_file_list = [self.io_dict["out"]["output_filter_pockets_zip"]] if fu.check_complete_files(output_file_list): fu.log('Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # load input_summary into a dictionary with open(self.io_dict["in"]["input_summary"]) as json_file: data = json.load(json_file) # build search_list search_list = [] ranges = {} if self.score: check_range('score', self.score, [0,1], out_log, self.__class__.__name__) search_list.append(self.score_matcher(self.score)) ranges['score'] = self.score if self.druggability_score: check_range('druggability_score', self.druggability_score, [0,1], out_log, self.__class__.__name__) search_list.append(self.druggability_score_matcher(self.druggability_score)) ranges['druggability_score'] = self.druggability_score if self.volume: check_range('volume', self.volume, [0,10000], out_log, self.__class__.__name__) search_list.append(self.volume_matcher(self.volume)) ranges['volume'] = self.volume fu.log('Performing a search under the next parameters: %s' % (', '.join(['{0}: {1}'.format(k, v) for k,v in ranges.items()])), out_log) # perform search search = [ x for x in data if all([f(data[x]) for f in search_list]) ] if len(search) == 0: fu.log('No matches found', out_log) return 0 str_out = ''; for s in search: str_out = str_out + ('\n**********\n%s\n**********\nscore: %s\ndruggability_score: %s\nvolume: %s\n' % (s, data[s]["score"], data[s]["druggability_score"], data[s]["volume"])) fu.log('Found %d matches:%s' % (len(search), str_out), out_log) # create tmp_folder self.tmp_folder = fu.create_unique_dir() fu.log('Creating %s temporary folder' % self.tmp_folder, out_log) process_output_fpocket_filter(search, self.tmp_folder, self.io_dict["in"]["input_pockets_zip"], self.io_dict["out"]["output_filter_pockets_zip"], self.remove_tmp, out_log) return 0
def main(config, system=None): start_time = time.time() conf = settings.ConfReader(config, system) global_log, _ = fu.get_logs(path=conf.get_working_dir_path(), light_format=True) global_prop = conf.get_prop_dic() global_paths = conf.get_paths_dic() dhdl_paths_listA = [] dhdl_paths_listB = [] for ensemble, mutation in conf.properties['mutations'].items(): ensemble_prop = conf.get_prop_dic(prefix=ensemble) ensemble_paths = conf.get_paths_dic(prefix=ensemble) # step0_image global_log.info( ensemble + " Step 0: gmx image: Imaging trajectories to remove PBC issues") ensemble_paths['step0_image']['input_traj_path'] = conf.properties[ 'input_trajs'][ensemble]['input_traj_path'] ensemble_paths['step0_image']['input_top_path'] = conf.properties[ 'input_trajs'][ensemble]['input_tpr_path'] gmx_image_pc(**ensemble_paths["step0_image"], properties=ensemble_prop["step0_image"]) # step1_trjconv global_log.info( ensemble + " Step 1: gmx trjconv: Extract snapshots from equilibrium trajectories" ) ensemble_paths['step1_trjconv_' + ensemble]['input_top_path'] = conf.properties[ 'input_trajs'][ensemble]['input_tpr_path'] gmx_trjconv_str_ens_pc(**ensemble_paths['step1_trjconv_' + ensemble], properties=ensemble_prop['step1_trjconv_' + ensemble]) for ensemble, mutation in conf.properties['mutations'].items(): ensemble_prop = conf.get_prop_dic(prefix=ensemble) ensemble_paths = conf.get_paths_dic(prefix=ensemble) compss_wait_on_file(ensemble_paths['step1_trjconv_' + ensemble]["output_str_ens_path"]) with zipfile.ZipFile( ensemble_paths['step1_trjconv_' + ensemble]["output_str_ens_path"]) as zip_f: unique_dir = os.path.abspath( fu.create_unique_dir( prefix=ensemble_prop['step1_trjconv_' + ensemble]['working_dir_path'] + '/' + ensemble + '/')) zip_f.extractall(unique_dir) state_pdb_list = [ os.path.join(unique_dir, name) for name in zip_f.namelist() ] for pdb_path in state_pdb_list: pdb_name = os.path.splitext(os.path.basename(pdb_path))[0] prop = conf.get_prop_dic(prefix=os.path.join(ensemble, pdb_name)) paths = conf.get_paths_dic(prefix=os.path.join(ensemble, pdb_name)) # Fixing terminal residues newpdb = pdb_path + ".term.gro" #fix_gro("/gpfs/scratch/bsc19/bsc19611/RATG13-RBD/fixGro.sh", pdb_path, newpdb) fix_term(pdb_path, newpdb) # Fixing terminal residues #newpdb2 = pdb_path + ".term.gro" #cmd2 = "sed 's/600ASN /600NASN/g' " + pdb_path + "| sed 's/ 1SER / 1NSER /g' | sed 's/597ASP /597CASP/g' | sed 's/793PRO /793CPRO/g' > " + newpdb2 #subprocess.call(cmd2, shell=True) paths['step1_pmx_mutate']['input_structure_path'] = newpdb # step1_pmx_mutate global_log.info(ensemble + " " + pdb_name + " Step 1: pmx mutate: Generate Hybrid Structure") prop['step1_pmx_mutate']['mutation_list'] = mutation mutate_pc(**paths["step1_pmx_mutate"], properties=prop["step1_pmx_mutate"]) # step1.1_check_dummies global_log.info(ensemble + " " + pdb_name + " Step 1.1 Check for dummy atoms") extract_atoms_pc(**paths['step1.1_check_dummies'], properties=prop['step1.1_check_dummies']) #compss_wait_on_file(paths['step1.1_check_dummies']['output_structure_path']) #try: # dummy = bool(os.path.getsize(paths['step1.1_check_dummies']['output_structure_path'])) #except: # global_log.info("Error ocurred while checking the file containing dummy atoms") # global_log.info(sys.exc_info()[0]) # step1.2_remove_ligand #global_log.info(ensemble + " " + pdb_name + " Step 1.2 Remove ligand") #remove_ligand_pc(**paths['step1.2_remove_ligand'], properties=prop['step1.2_remove_ligand']) # step2_gmx_pdb2gmx global_log.info(ensemble + " " + pdb_name + " Step 2: gmx pdb2gmx: Generate Topology") pdb2gmx_pc(**paths["step2_gmx_pdb2gmx"], properties=prop["step2_gmx_pdb2gmx"]) # step2.1_sort_gro #global_log.info(ensemble + " " + pdb_name + " Step 2.1 Sort gro residues") #sort_gro_residues_pc(**paths['step2.1_sort_gro'], properties=prop['step2.1_sort_gro']) # step2.2_lig_gmx_appendLigand #global_log.info(ensemble + " " + pdb_name +" Step 2.2_lig: gmx appendLigand: Append a ligand to a GROMACS topology") #append_ligand_pc(**paths["step2.2_lig_gmx_appendLigand"], properties=prop["step2.2_lig_gmx_appendLigand"]) # step3_pmx_gentop global_log.info(ensemble + " " + pdb_name + " Step 3: pmx gentop: Generate Hybrid Topology") gentop_pc(**paths["step3_pmx_gentop"], properties=prop["step3_pmx_gentop"]) # step4_gmx_makendx, step5_gmx_grompp, step6_gmx_mdrun #global_log.info(ensemble + " " + pdb_name +" Check structure and Step 4-6 (Dummies): gmx make_ndx: Generate Gromacs Index file to select atoms to freeze, gmx grompp: Creating portable binary run file for energy minimization, gmx mdrun: Running energy minimization") #check_structure_and_run_ndx_pc(ensemble, **paths["step4_gmx_makendx"], properties_makendx=prop["step4_gmx_makendx"], **paths["step5_gmx_grompp"], properties_grompp=prop["step5_gmx_grompp"], **paths["step6_gmx_mdrun"], properties_mdrun=prop["step6_gmx_mdrun"]) # step4_gmx_makendx, step5_gmx_grompp, step6_gmx_mdrun global_log.info( ensemble + " " + pdb_name + " Step 4-6 (Check structure and ndx): gmx make_ndx: Generate Gromacs Index file to select atoms to freeze, gmx grompp: Creating portable binary run file for energy minimization, gmx mdrun: Running energy minimization" ) fu.create_dir(prop["step4_gmx_makendx"]['working_dir_path'] + "/" + ensemble + "/" + pdb_name + "/step4_gmx_makendx") fu.create_dir(prop["step5_gmx_grompp"]['working_dir_path'] + "/" + ensemble + "/" + pdb_name + "/step5_gmx_grompp") fu.create_dir(prop["step6_gmx_mdrun"]['working_dir_path'] + "/" + ensemble + "/" + pdb_name + "/step6_gmx_mdrun") check_structure_and_run_ndx_pc( ensemble, output_structure_path=paths['step1.1_check_dummies'] ['output_structure_path'], **paths["step4_gmx_makendx"], properties_makendx=prop["step4_gmx_makendx"], **paths["step5_gmx_grompp"], properties_grompp=prop["step5_gmx_grompp"], **paths["step6_gmx_mdrun"], properties_mdrun=prop["step6_gmx_mdrun"]) # step7_gmx_grompp global_log.info( ensemble + " " + pdb_name + " Step 7: gmx grompp: Creating portable binary run file for system equilibration" ) grompp_pc(**paths["step7_gmx_grompp"], properties=prop["step7_gmx_grompp"]) # step8_gmx_mdrun global_log.info(ensemble + " " + pdb_name + " Step 8: gmx mdrun: Running system equilibration") mdrun_pc(**paths["step8_gmx_mdrun"], properties=prop["step8_gmx_mdrun"]) # step9_gmx_grompp global_log.info( ensemble + " " + pdb_name + " Step 9: Creating portable binary run file for thermodynamic integration (ti)" ) grompp_pc(**paths["step9_gmx_grompp"], properties=prop["step9_gmx_grompp"]) # step10_gmx_mdrun global_log.info( ensemble + " " + pdb_name + " Step 10: gmx mdrun: Running thermodynamic integration") mdrun_dhdl_pc(**paths["step10_gmx_mdrun"], properties=prop["step10_gmx_mdrun"]) if ensemble == "stateA": dhdl_paths_listA.append( paths["step10_gmx_mdrun"]["output_dhdl_path"]) elif ensemble == "stateB": dhdl_paths_listB.append( paths["step10_gmx_mdrun"]["output_dhdl_path"]) # Creating zip file containing all the dhdl files dhdlA_path = os.path.join( global_prop["step11_pmx_analyse"]['working_dir_path'], 'dhdlA.zip') dhdlB_path = os.path.join( global_prop["step11_pmx_analyse"]['working_dir_path'], 'dhdlB.zip') for dhdl_file in dhdl_paths_listA: compss_wait_on_file(dhdl_file) for dhdl_file in dhdl_paths_listB: compss_wait_on_file(dhdl_file) fu.zip_list(dhdlA_path, dhdl_paths_listA) fu.zip_list(dhdlB_path, dhdl_paths_listB) # step11_pmx_analyse global_log.info( ensemble + " Step 11: pmx analyse: Calculate free energies from fast growth thermodynamic integration simulations" ) global_paths["step11_pmx_analyse"]["input_a_xvg_zip_path"] = dhdlA_path global_paths["step11_pmx_analyse"]["input_b_xvg_zip_path"] = dhdlB_path analyse_pc(**global_paths["step11_pmx_analyse"], properties=global_prop["step11_pmx_analyse"]) elapsed_time = time.time() - start_time global_log.info('') global_log.info('') global_log.info('Execution successful: ') global_log.info(' Workflow_path: %s' % conf.get_working_dir_path()) global_log.info(' Config File: %s' % config) if system: global_log.info(' System: %s' % system) global_log.info('') global_log.info('Elapsed time: %.1f minutes' % (elapsed_time / 60)) global_log.info('')
def launch(self) -> int: """Execute the :class:`Cmip <cmip.cmip.Cmip>` object.""" tmp_files = [] # Setup Biobb if self.check_restart(): return 0 # Check if output_pdb_path ends with ".pdb" and does not contain underscores if self.io_dict['out']['output_pdb_path']: if (not self.io_dict['out']['output_pdb_path'].endswith('.pdb') ) or ("_" in str( Path(self.io_dict['out']['output_pdb_path']).name)): fu.log( f"ERROR: output_pdb_path ({self.io_dict['out']['output_pdb_path']}) name must end in .pdb and not contain underscores", out_log, self.global_log) raise ValueError( f"ERROR: output_pdb_path ({self.io_dict['out']['output_pdb_path']}) name must end in .pdb and not contain underscores" ) combined_params_dir = fu.create_unique_dir() self.io_dict['in']['combined_params_path'] = create_params_file( output_params_path=str( Path(combined_params_dir).joinpath( self.io_dict['in']['combined_params_path'])), input_params_path=self.io_dict['in'].get('input_params_path'), params_preset_dict=params_preset( execution_type=self.execution_type), params_properties_dict=self.params) self.stage_files() self.cmd = [ self.cmip_path, '-i', self.stage_io_dict['in']['combined_params_path'], '-vdw', self.stage_io_dict['in']['input_vdw_params_path'], '-hs', self.stage_io_dict['in']['input_pdb_path'] ] if self.stage_io_dict["in"].get("input_probe_pdb_path") and Path( self.io_dict["in"].get("input_probe_pdb_path")).exists(): self.cmd.append('-pr') self.cmd.append( self.stage_io_dict["in"].get("input_probe_pdb_path")) if self.stage_io_dict["out"].get("output_pdb_path"): self.cmd.append('-outpdb') self.cmd.append(self.stage_io_dict['out']['output_pdb_path']) if self.stage_io_dict["out"].get("output_grd_path"): self.cmd.append('-grdout') self.cmd.append(self.stage_io_dict["out"]["output_grd_path"]) if self.stage_io_dict["out"].get("output_cube_path"): self.cmd.append('-cube') self.cmd.append(self.stage_io_dict["out"]["output_cube_path"]) if self.stage_io_dict["out"].get("output_rst_path"): self.cmd.append('-rst') self.cmd.append(self.stage_io_dict["out"]["output_rst_path"]) if self.stage_io_dict["out"].get("output_byat_path"): self.cmd.append('-byat') self.cmd.append(self.stage_io_dict["out"]["output_byat_path"]) if self.stage_io_dict["out"].get("output_log_path"): self.cmd.append('-o') self.cmd.append(self.stage_io_dict["out"]["output_log_path"]) # Run Biobb block self.run_biobb() # Copy files to host self.copy_to_host() # CMIP removes or adds a .pdb extension from pdb output name if self.io_dict['out'].get('output_pdb_path'): output_pdb_path = self.io_dict['out'].get('output_pdb_path') if self.container_path: output_pdb_path = str( Path(self.stage_io_dict["unique_dir"]).joinpath( Path(self.io_dict['out'].get('output_pdb_path')).name)) if Path(output_pdb_path[:-4]).exists(): shutil.move(output_pdb_path[:-4], self.io_dict['out'].get('output_pdb_path')) elif Path(output_pdb_path + ".pdb").exists(): shutil.move(output_pdb_path + ".pdb", self.io_dict['out'].get('output_pdb_path')) # Replace "ATOMTM" tag for "ATOM " output_pdb_path = self.io_dict['out'].get('output_pdb_path') if output_pdb_path: with open(output_pdb_path) as pdb_file: list_pdb_lines = pdb_file.readlines() with open(output_pdb_path, 'w') as pdb_file: for line in list_pdb_lines: pdb_file.write(line.replace('ATOMTM', 'ATOM ')) # Remove temporal files self.tmp_files.extend([combined_params_dir]) self.remove_tmp_files() return self.return_code