def launch(self): """Launches the execution of the Open Babel module.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # check input/output paths and parameters self.check_data_params(out_log, err_log) # Check the properties fu.check_properties(self, self.properties) if self.restart: output_file_list = [self.output_path_par, self.output_path_inp, self.output_path_top] if fu.check_complete_files(output_file_list): fu.log('Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # create unique name for temporary folder (created by acpype) self.unique_name = create_unique_name(6) # create command line instruction cmd = self.create_cmd(out_log, err_log) # execute cmd fu.log('Running %s, this execution can take a while' % self.acpype_path, out_log) returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log, self.global_log).launch() # move files to output_path and removes temporary folder process_output(self.unique_name, self.basename + "." + self.unique_name + ".acpype", self.remove_tmp, self.basename, get_default_value(self.__class__.__name__), self.output_files, out_log) return returncode
def launch(self) -> int: """Execute the :class:`FPocketSelect <fpocket.fpocket_select.FPocketSelect>` fpocket.fpocket_select.FPocketSelect object.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # check input/output paths and parameters self.check_data_params(out_log, err_log) # Check the properties fu.check_properties(self, self.properties) if self.restart: output_file_list = [ self.io_dict["out"]["output_pocket_pdb"], self.io_dict["out"]["output_pocket_pqr"] ] if fu.check_complete_files(output_file_list): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # create tmp_folder self.tmp_folder = fu.create_unique_dir() fu.log('Creating %s temporary folder' % self.tmp_folder, out_log) # decompress the input_pockets_zip file to tmp_folder all_pockets = fu.unzip_list( zip_file=self.io_dict["in"]["input_pockets_zip"], dest_dir=self.tmp_folder, out_log=out_log) pockets_list = [ i for i in all_pockets if ('pocket' + str(self.pocket)) in i ] for p in pockets_list: if PurePath(p).suffix == '.pdb': fu.log( 'Saving %s file' % self.io_dict["out"]["output_pocket_pdb"], out_log) shutil.copy(p, self.io_dict["out"]["output_pocket_pdb"]) else: fu.log( 'Saving %s file' % self.io_dict["out"]["output_pocket_pqr"], out_log) shutil.copy(p, self.io_dict["out"]["output_pocket_pqr"]) if self.remove_tmp: # remove temporary folder fu.rm(self.tmp_folder) fu.log('Removed temporary folder: %s' % self.tmp_folder, out_log) return 0
def launch(self) -> int: """Execute the :class:`FPocket <fpocket.fpocket.FPocket>` fpocket.fpocket.FPocket object.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # check input/output paths and parameters self.check_data_params(out_log, err_log) # Check the properties fu.check_properties(self, self.properties) if self.restart: output_file_list = [self.io_dict["out"]["output_pockets_zip"],self.io_dict["out"]["output_summary"]] if fu.check_complete_files(output_file_list): fu.log('Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # create tmp_folder self.tmp_folder = fu.create_unique_dir() fu.log('Creating %s temporary folder' % self.tmp_folder, out_log) tmp_input = str(PurePath(self.tmp_folder).joinpath('input.pdb')) # copy input_pdb_path to tmp_folder shutil.copy(self.io_dict["in"]["input_pdb_path"], tmp_input) # create cmd cmd = [self.fpocket_path, '-f', tmp_input] # adding extra properties if self.min_radius: cmd.extend(['-m', str(self.min_radius)]) if self.max_radius: cmd.extend(['-M', str(self.max_radius)]) if self.num_spheres: cmd.extend(['-i', str(self.num_spheres)]) fu.log('Executing fpocket', out_log, self.global_log) cmd = fu.create_cmd_line(cmd, out_log=out_log, global_log=self.global_log) returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log, self.global_log).launch() process_output_fpocket(self.tmp_folder, self.io_dict["out"]["output_pockets_zip"], self.io_dict["out"]["output_summary"], self.sort_by, self.remove_tmp, out_log, self.__class__.__name__) return returncode
def launch(self) -> int: """Execute the :class:`ExtractModelPDBQT <utils.extract_model_pdbqt.ExtractModelPDBQT>` utils.extract_model_pdbqt.ExtractModelPDBQT object.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # check input/output paths and parameters self.check_data_params(out_log, err_log) # Check the properties fu.check_properties(self, self.properties) if self.restart: output_file_list = [self.io_dict["out"]["output_pdbqt_path"]] if fu.check_complete_files(output_file_list): fu.log('Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 structure_name = PurePath(self.io_dict["in"]["input_pdbqt_path"]).name parser = Bio.PDB.PDBParser(QUIET = True) structPDB = parser.get_structure(structure_name, self.io_dict["in"]["input_pdbqt_path"]) models = [] for model in structPDB.get_models(): models.append(model.id + 1) if not self.model in models: fu.log(self.__class__.__name__ + ': Selected model %d not found in %s structure.' % (self.model, self.io_dict["in"]["input_pdbqt_path"]), out_log) raise SystemExit(self.__class__.__name__ + ': Selected model %d not found in %s structure.' % (self.model, self.io_dict["in"]["input_pdbqt_path"])) save = False lines = 0 with open(self.io_dict["in"]["input_pdbqt_path"], "r") as input_pdb, open(self.io_dict["out"]["output_pdbqt_path"], "w") as output_pdb: for line in input_pdb: if line.startswith('MODEL') and line.split()[1] == str(self.model): save = True if line.startswith('ENDMDL'): save = False if save and not line.startswith('MODEL'): lines = lines + 1 output_pdb.write(line) fu.log('Saving model %d to %s' % (self.model, self.io_dict["out"]["output_pdbqt_path"]), out_log) return 0
def launch(self): """Launches the execution of the Open Babel module.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # Check the properties fu.check_properties(self, self.properties) if self.restart: output_file_list = [self.output_path] if fu.check_complete_files(output_file_list): fu.log('Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # create command line instruction cmd = self.create_cmd(out_log, err_log) returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log, self.global_log).launch() return returncode
def launch(self) -> int: """Launches the execution of the GROMACS solvate module.""" tmp_files = [] # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # Check GROMACS version if not self.container_path: if self.gmx_version < 512: raise GromacsVersionError( "Gromacs version should be 5.1.2 or newer %d detected" % self.gmx_version) fu.log( "GROMACS %s %d version detected" % (self.__class__.__name__, self.gmx_version), out_log) # Restart if needed if self.restart: if fu.check_complete_files(self.io_dict["out"].values()): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # Unzip topology to topology_out top_file = fu.unzip_top(zip_file=self.input_top_zip_path, out_log=out_log) top_dir = str(Path(top_file).parent) tmp_files.append(top_dir) container_io_dict = fu.copy_to_container(self.container_path, self.container_volume_path, self.io_dict) if self.container_path: shutil.copytree( top_dir, str( Path(container_io_dict.get("unique_dir")).joinpath( Path(top_dir).name))) top_file = str( Path(self.container_volume_path).joinpath( Path(top_dir).name, Path(top_file).name)) cmd = [ self.gmx_path, 'solvate', '-cp', container_io_dict["in"]["input_solute_gro_path"], '-cs', self.input_solvent_gro_path, '-o', container_io_dict["out"]["output_gro_path"], '-p', top_file ] new_env = None if self.gmxlib: new_env = os.environ.copy() new_env['GMXLIB'] = self.gmxlib cmd = fu.create_cmd_line( cmd, container_path=self.container_path, host_volume=container_io_dict.get("unique_dir"), container_volume=self.container_volume_path, container_working_dir=self.container_working_dir, container_user_uid=self.container_user_id, container_shell_path=self.container_shell_path, container_image=self.container_image, out_log=out_log, global_log=self.global_log) returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log, self.global_log, new_env).launch() fu.copy_to_host(self.container_path, container_io_dict, self.io_dict) if self.container_path: top_file = str( Path(container_io_dict.get("unique_dir")).joinpath( Path(top_dir).name, Path(top_file).name)) # zip topology fu.log( 'Compressing topology to: %s' % container_io_dict["out"]["output_top_zip_path"], out_log, self.global_log) fu.zip_top(zip_file=self.io_dict["out"]["output_top_zip_path"], top_file=top_file, out_log=out_log) tmp_files.append(container_io_dict.get("unique_dir")) if self.remove_tmp: fu.rm_file_list(tmp_files, out_log=out_log) return returncode
def launch(self) -> int: """Launches the execution of the GROMACS editconf module.""" tmp_files = [] # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # Restart if needed if self.restart: output_file_list = [self.io_dict['out'].get("output_top_zip_path")] if fu.check_complete_files(output_file_list): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # Unzip topology top_file = fu.unzip_top( zip_file=self.io_dict['in'].get("input_top_zip_path"), out_log=out_log) top_dir = str(Path(top_file).parent) tmp_files.append(top_dir) itp_name = str(Path(self.io_dict['in'].get("input_itp_path")).name) with open(top_file) as top_f: top_lines = top_f.readlines() top_f.close() fu.rm(top_file) forcefield_pattern = r'#include.*forcefield.itp\"' for index, line in enumerate(top_lines): if re.search(forcefield_pattern, line): break top_lines.insert(index + 1, '\n') top_lines.insert(index + 2, '; Including ligand ITP\n') top_lines.insert(index + 3, '#include "' + itp_name + '"\n') top_lines.insert(index + 4, '\n') if self.io_dict['in'].get("input_posres_itp_path"): top_lines.insert(index + 5, '; Ligand position restraints' + '\n') top_lines.insert(index + 6, '#ifdef ' + self.posres_name + '\n') top_lines.insert( index + 7, '#include "' + str( Path(self.io_dict['in'].get("input_posres_itp_path")).name) + '"\n') top_lines.insert(index + 8, '#endif' + '\n') top_lines.insert(index + 9, '\n') inside_moleculetype_section = False with open(self.io_dict['in'].get("input_itp_path")) as itp_file: moleculetype_pattern = r'\[ moleculetype \]' for line in itp_file: if re.search(moleculetype_pattern, line): inside_moleculetype_section = True continue if inside_moleculetype_section and not line.startswith(';'): moleculetype = line.strip().split()[0].strip() break molecules_pattern = r'\[ molecules \]' inside_molecules_section = False index_molecule = None molecule_string = moleculetype + (20 - len(moleculetype)) * ' ' + '1' + '\n' for index, line in enumerate(top_lines): if re.search(molecules_pattern, line): inside_molecules_section = True continue if inside_molecules_section and not line.startswith( ';') and line.upper().startswith('PROTEIN'): index_molecule = index if index_molecule: top_lines.insert(index_molecule + 1, molecule_string) else: top_lines.append(molecule_string) new_top = fu.create_name(path=top_dir, prefix=self.prefix, step=self.step, name='ligand.top') with open(new_top, 'w') as new_top_f: new_top_f.write("".join(top_lines)) shutil.copy2(self.io_dict['in'].get("input_itp_path"), top_dir) if self.io_dict['in'].get("input_posres_itp_path"): shutil.copy2(self.io_dict['in'].get("input_posres_itp_path"), top_dir) # zip topology fu.log( 'Compressing topology to: %s' % self.io_dict['out'].get("output_top_zip_path"), out_log, self.global_log) fu.zip_top(zip_file=self.io_dict['out'].get("output_top_zip_path"), top_file=new_top, out_log=out_log) if self.remove_tmp: fu.rm_file_list(tmp_files, out_log=out_log) return 0
def launch(self) -> int: """Execute the :class:`Box <utils.box.Box>` utils.box.Box object.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # check input/output paths and parameters self.check_data_params(out_log, err_log) # Check the properties fu.check_properties(self, self.properties) if self.restart: output_file_list = [self.io_dict["out"]["output_pdb_path"]] if fu.check_complete_files(output_file_list): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # check if cavity (pdb) or pocket (pqr) input_type = PurePath( self.io_dict["in"]["input_pdb_path"]).suffix.lstrip('.') if input_type == 'pdb': fu.log( 'Loading residue PDB selection from %s' % (self.io_dict["in"]["input_pdb_path"]), out_log, self.global_log) else: fu.log( 'Loading pocket PQR selection from %s' % (self.io_dict["in"]["input_pdb_path"]), out_log, self.global_log) # get input_pdb_path atoms coordinates selection_atoms_num = 0 x_coordslist = [] y_coordslist = [] z_coordslist = [] with open(self.io_dict["in"]["input_pdb_path"]) as infile: for line in infile: if line.startswith("HETATM") or line.startswith("ATOM"): x_coordslist.append(float(line[31:38].strip())) y_coordslist.append(float(line[39:46].strip())) z_coordslist.append(float(line[47:54].strip())) selection_atoms_num = selection_atoms_num + 1 ## Compute binding site box size # compute box center selection_box_center = [ np.average(x_coordslist), np.average(y_coordslist), np.average(z_coordslist) ] fu.log( 'Binding site center (Angstroms): %10.3f%10.3f%10.3f' % (selection_box_center[0], selection_box_center[1], selection_box_center[2]), out_log, self.global_log) # compute box size selection_coords_max = np.amax( [x_coordslist, y_coordslist, z_coordslist], axis=1) selection_box_size = selection_coords_max - selection_box_center if self.offset: fu.log('Adding %.1f Angstroms offset' % (self.offset), out_log, self.global_log) selection_box_size = [c + self.offset for c in selection_box_size] fu.log( 'Binding site size (Angstroms): %10.3f%10.3f%10.3f' % (selection_box_size[0], selection_box_size[1], selection_box_size[2]), out_log, self.global_log) # compute volume vol = np.prod(selection_box_size) * 2**3 fu.log('Volume (cubic Angstroms): %.0f' % (vol), out_log, self.global_log) # add box details as PDB remarks remarks = "REMARK BOX CENTER:%10.3f%10.3f%10.3f" % ( selection_box_center[0], selection_box_center[1], selection_box_center[2]) remarks += " SIZE:%10.3f%10.3f%10.3f" % (selection_box_size[0], selection_box_size[1], selection_box_size[2]) selection_box_coords_txt = "" # add (optional) box coordinates as 8 ATOM records if self.box_coordinates: fu.log('Adding box coordinates', out_log, self.global_log) selection_box_coords_txt = get_box_coordinates( selection_box_center, selection_box_size) with open(self.io_dict["out"]["output_pdb_path"], 'w') as f: f.seek(0, 0) f.write(remarks.rstrip('\r\n') + '\n' + selection_box_coords_txt) fu.log( 'Saving output PDB file (with box setting annotations): %s' % (self.io_dict["out"]["output_pdb_path"]), out_log, self.global_log) return 0
def launch(self) -> int: """Launches the execution of the GROMACS make_ndx module.""" tmp_files = [] # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # Check GROMACS version if not self.container_path: if self.gmx_version < 512: raise GromacsVersionError( "Gromacs version should be 5.1.2 or newer %d detected" % self.gmx_version) fu.log( "GROMACS %s %d version detected" % (self.__class__.__name__, self.gmx_version), out_log) # Restart if needed if self.restart: if fu.check_complete_files(self.io_dict["out"].values()): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 container_io_dict = fu.copy_to_container(self.container_path, self.container_volume_path, self.io_dict) cmd = [ 'echo', '-e', '\'' + self.selection + '\\nq' + '\'', '|', self.gmx_path, 'make_ndx', '-f', container_io_dict["in"]["input_structure_path"], '-o', container_io_dict["out"]["output_ndx_path"] ] if container_io_dict["in"].get("input_ndx_path")\ and Path(container_io_dict["in"].get("input_ndx_path")).exists(): cmd.append('-n') cmd.append(container_io_dict["in"].get("input_ndx_path")) new_env = None if self.gmxlib: new_env = os.environ.copy() new_env['GMXLIB'] = self.gmxlib cmd = fu.create_cmd_line( cmd, container_path=self.container_path, host_volume=container_io_dict.get("unique_dir"), container_volume=self.container_volume_path, container_working_dir=self.container_working_dir, container_user_uid=self.container_user_id, container_shell_path=self.container_shell_path, container_image=self.container_image, out_log=out_log, global_log=self.global_log) returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log, self.global_log, new_env).launch() fu.copy_to_host(self.container_path, container_io_dict, self.io_dict) tmp_files.append(container_io_dict.get("unique_dir")) if self.remove_tmp: fu.rm_file_list(tmp_files, out_log=out_log) return returncode
def launch(self) -> int: """Launches the execution of the GROMACS pdb2gmx module.""" tmp_files = [] # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # Check GROMACS version if not self.container_path: if self.gmx_version < 512: raise GromacsVersionError( "Gromacs version should be 5.1.2 or newer %d detected" % self.gmx_version) fu.log( "GROMACS %s %d version detected" % (self.__class__.__name__, self.gmx_version), out_log) # Restart if needed if self.restart: if fu.check_complete_files(self.io_dict["out"].values()): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 container_io_dict = fu.copy_to_container(self.container_path, self.container_volume_path, self.io_dict) output_top_path = fu.create_name(prefix=self.prefix, step=self.step, name=self.output_top_path) output_itp_path = fu.create_name(prefix=self.prefix, step=self.step, name=self.output_itp_path) cmd = [ self.gmx_path, "pdb2gmx", "-f", container_io_dict["in"]["input_pdb_path"], "-o", container_io_dict["out"]["output_gro_path"], "-p", output_top_path, "-water", self.water_type, "-ff", self.force_field, "-i", output_itp_path ] if self.his: cmd.append("-his") cmd = ['echo', self.his, '|'] + cmd if self.ignh: cmd.append("-ignh") new_env = None if self.gmxlib: new_env = os.environ.copy() new_env['GMXLIB'] = self.gmxlib cmd = fu.create_cmd_line( cmd, container_path=self.container_path, host_volume=container_io_dict.get("unique_dir"), container_volume=self.container_volume_path, container_working_dir=self.container_working_dir, container_user_uid=self.container_user_id, container_shell_path=self.container_shell_path, container_image=self.container_image, out_log=out_log, global_log=self.global_log) returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log, self.global_log, new_env).launch() fu.copy_to_host(self.container_path, container_io_dict, self.io_dict) if self.container_path: output_top_path = os.path.join(container_io_dict.get("unique_dir"), output_top_path) # zip topology fu.log( 'Compressing topology to: %s' % container_io_dict["out"]["output_top_zip_path"], out_log, self.global_log) fu.zip_top(zip_file=self.io_dict["out"]["output_top_zip_path"], top_file=output_top_path, out_log=out_log) tmp_files.append(self.output_top_path) tmp_files.append(self.output_itp_path) tmp_files.append(container_io_dict.get("unique_dir")) if self.remove_tmp: fu.rm_file_list(tmp_files, out_log=out_log) return returncode
def launch(self) -> int: """Launch the topology generation.""" tmp_files = [] # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # Restart if needed if self.restart: output_file_list = [self.io_dict['out'].get("output_top_zip_path")] if fu.check_complete_files(output_file_list): fu.log('Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 top_file = fu.unzip_top(zip_file=self.io_dict['in'].get("input_top_zip_path"), out_log=out_log) # Create index list of index file :) index_dic = {} lines = open(self.io_dict['in'].get("input_ndx_path")).read().splitlines() for index, line in enumerate(lines): if line.startswith('['): index_dic[line] = index, label = line if index > 0: index_dic[label] = index_dic[label][0], index index_dic[label] = index_dic[label][0], index fu.log('Index_dic: '+str(index_dic), out_log, self.global_log) self.ref_rest_chain_triplet_list = [tuple(elem.strip(' ()').replace(' ', '').split(',')) for elem in self.ref_rest_chain_triplet_list.split('),')] fu.log('ref_rest_chain_triplet_list: ' + str(self.ref_rest_chain_triplet_list), out_log, self.global_log) for reference_group, restrain_group, chain in self.ref_rest_chain_triplet_list: fu.log('Reference group: '+reference_group, out_log, self.global_log) fu.log('Restrain group: '+restrain_group, out_log, self.global_log) fu.log('Chain: '+chain, out_log, self.global_log) self.io_dict['out']["output_itp_path"] = fu.create_name(path=str(Path(top_file).parent), prefix=self.prefix, step=self.step, name=restrain_group+'.itp') # Mapping atoms from absolute enumeration to Chain relative enumeration fu.log('reference_group_index: start_closed:'+str(index_dic['[ '+reference_group+' ]'][0]+1)+' stop_open: '+str(index_dic['[ '+reference_group+' ]'][1]), out_log, self.global_log) reference_group_list = [int(elem) for line in lines[index_dic['[ '+reference_group+' ]'][0]+1: index_dic['[ '+reference_group+' ]'][1]] for elem in line.split()] fu.log('restrain_group_index: start_closed:'+str(index_dic['[ '+restrain_group+' ]'][0]+1)+' stop_open: '+str(index_dic['[ '+restrain_group+' ]'][1]), out_log, self.global_log) restrain_group_list = [int(elem) for line in lines[index_dic['[ '+restrain_group+' ]'][0]+1: index_dic['[ '+restrain_group+' ]'][1]] for elem in line.split()] selected_list = [reference_group_list.index(atom)+1 for atom in restrain_group_list] # Creating new ITP with restrictions with open(self.io_dict['out'].get("output_itp_path"), 'w') as f: fu.log('Creating: '+str(f)+' and adding the selected atoms force constants', out_log, self.global_log) f.write('[ position_restraints ]\n') f.write('; atom type fx fy fz\n') for atom in selected_list: f.write(str(atom)+' 1 '+self.force_constants+'\n') # Including new ITP in the corresponding ITP-chain file for file_dir in Path(top_file).parent.iterdir(): if not file_dir.name.startswith("posre") and not file_dir.name.endswith("_pr.itp"): if fnmatch.fnmatch(str(file_dir), "*_chain_"+chain+".itp"): with open(str(file_dir), 'a') as f: fu.log('Opening: '+str(f)+' and adding the ifdef include statement', out_log, self.global_log) f.write('\n') f.write('; Include Position restraint file\n') f.write('#ifdef CUSTOM_POSRES\n') f.write('#include "'+str(Path(self.io_dict['out'].get("output_itp_path")).name)+'"\n') f.write('#endif\n') # zip topology fu.zip_top(zip_file=self.io_dict['out'].get("output_top_zip_path"), top_file=top_file, out_log=out_log) if self.remove_tmp: fu.rm_file_list(tmp_files, out_log=out_log) return 0
def launch(self): """Launches the execution of the template_container module.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # Check the properties fu.check_properties(self, self.properties) # Restart if self.restart: # 4. Include here all output file paths output_file_list = [self.io_dict['out']['output_file_path']] if fu.check_complete_files(output_file_list): fu.log('Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # 5. Copy inputs to container container_io_dict = fu.copy_to_container(self.container_path, self.container_volume_path, self.io_dict) # 6. Prepare the command line parameters as instructions list instructions = ['-j'] if self.boolean_property: instructions.append('-v') fu.log('Appending optional boolean property', out_log, self.global_log) # 7. Build the actual command line as a list of items (elements order will be maintained) cmd = [self.executable_binary_property, ' '.join(instructions), container_io_dict['out']['output_file_path'], container_io_dict['in']['input_file_path1']] fu.log('Creating command line with instructions and required arguments', out_log, self.global_log) # 8. Repeat for optional input files if provided if container_io_dict['in']['input_file_path2']: # Append optional input_file_path2 to cmd cmd.append(container_io_dict['in']['input_file_path2']) fu.log('Appending optional argument to command line', out_log, self.global_log) # 9. Uncomment to check the command line # print(' '.join(cmd)) # 10. Create cmd with specdific syntax according to the required container cmd = fu.create_cmd_line(cmd, container_path=self.container_path, host_volume=container_io_dict.get('unique_dir'), container_volume=self.container_volume_path, container_working_dir=self.container_working_dir, container_user_uid=self.container_user_id, container_image=self.container_image, container_shell_path=self.container_shell_path, out_log=out_log, global_log=self.global_log) # Launch execution returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log, self.global_log).launch() # Copy output(s) to output(s) path(s) in case of container execution fu.copy_to_host(self.container_path, container_io_dict, self.io_dict) # Remove temporary file(s) if self.remove_tmp and container_io_dict.get('unique_dir'): fu.rm(container_io_dict.get('unique_dir')) fu.log('Removed: %s' % str(container_io_dict.get('unique_dir')), out_log) return returncode
def launch(self) -> int: """Execute the :class:`BindingSite <utils.bindingsite.BindingSite>` utils.bindingsite.BindingSite object.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # check input/output paths and parameters self.check_data_params(out_log, err_log) # Check the properties fu.check_properties(self, self.properties) if self.restart: output_file_list = [self.io_dict["out"]["output_pdb_path"]] if fu.check_complete_files(output_file_list): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # Parse structure fu.log( 'Loading input PDB structure %s' % (self.io_dict["in"]["input_pdb_path"]), out_log, self.global_log) structure_name = PurePath(self.io_dict["in"]["input_pdb_path"]).name parser = Bio.PDB.PDBParser(QUIET=True) structPDB = parser.get_structure(structure_name, self.io_dict["in"]["input_pdb_path"]) if len(structPDB): structPDB = structPDB[0] # Use only one chain n_chains = structPDB.get_list() if len(n_chains) != 1: fu.log( 'More than one chain found in the input PDB structure. Using only the first chain to find the binding site', out_log, self.global_log) # get first chain in case there is more than one chain for struct_chain in structPDB.get_chains(): structPDB = struct_chain # Get AA sequence structPDB_seq = get_pdb_sequence(structPDB) if len(structPDB_seq) == 0: fu.log( self.__class__.__name__ + ': Cannot extract AA sequence from the input PDB structure %s. Wrong format?' % self.io_dict["in"]["input_pdb_path"], out_log) raise SystemExit( self.__class__.__name__ + ': Cannot extract AA sequence from the input PDB structure %s. Wrong format?' % self.io_dict["in"]["input_pdb_path"]) else: fu.log( 'Found %s residues in %s' % (len(structPDB_seq), self.io_dict["in"]["input_pdb_path"]), out_log) # create temporary folder for decompressing the input_clusters_zip file unique_dir = PurePath(fu.create_unique_dir()) fu.log('Creating %s temporary folder' % unique_dir, out_log, self.global_log) # decompress the input_clusters_zip file cluster_list = fu.unzip_list( zip_file=self.io_dict["in"]["input_clusters_zip"], dest_dir=unique_dir, out_log=out_log) clusterPDB_ligands_aligned = [] clusterPDB_ligands_num = 0 fu.log('Iterating on all clusters:', out_log) for idx, cluster_path in enumerate(cluster_list): cluster_name = PurePath(cluster_path).stem fu.log(' ', out_log) fu.log('------------ Iteration #%s --------------' % (idx + 1), out_log) fu.log('Cluster member: %s' % cluster_name, out_log) # Load and Parse PDB clusterPDB = {} clusterPDB = parser.get_structure(cluster_name, cluster_path)[0] # Use only the first chain for cluster_chain in clusterPDB.get_chains(): clusterPDB = cluster_chain # Looking for ligands clusterPDB_ligands = get_ligand_residues(clusterPDB) if (len(clusterPDB_ligands)) == 0: fu.log( 'No ligands found that could guide the binding site search. Ignoring this member: %s' % cluster_name, out_log) continue # Selecting the largest ligand, if more than one lig_atoms_num = 0 clusterPDB_ligand = {} if self.ligand: if self.ligand in [ x.get_resname() for x in clusterPDB_ligands ]: for lig in clusterPDB_ligands: if lig.get_resname() == self.ligand: clusterPDB_ligand = lig lig_atoms_num = len(lig.get_list()) fu.log( 'Ligand found: %s (%s atoms)' % (lig.get_resname(), lig_atoms_num), out_log) else: fu.log( 'Ligand %s not found in %s cluster member, skipping this cluster' % (self.ligand, cluster_name), out_log) continue else: if len(clusterPDB_ligands) > 1: for lig_res in clusterPDB_ligands: lig_res_atoms_num = len(lig_res.get_list()) fu.log( 'Ligand found: %s (%s atoms)' % (lig_res.get_resname(), lig_res_atoms_num), out_log) if lig_res_atoms_num > lig_atoms_num: clusterPDB_ligand = lig_res lig_atoms_num = lig_res_atoms_num else: clusterPDB_ligand = clusterPDB_ligands[0] lig_atoms_num = len(clusterPDB_ligands[0].get_list()) fu.log( 'Member accepted. Valid ligand found: %s (%s atoms)' % (clusterPDB_ligand.get_resname(), lig_atoms_num), out_log) ## Mapping residues by sequence alignment to match structPDB-clusterPDB paired residues # Get AA sequence clusterPDB_seq = get_pdb_sequence(clusterPDB) # Pairwise align aln, residue_map = align_sequences(structPDB_seq, clusterPDB_seq, self.matrix_name, self.gap_open, self.gap_extend) fu.log( 'Matching residues to input PDB structure. Alignment is:\n%s' % (aln[1]), out_log) # Calculate (gapless) sequence identity seq_identity, gap_seq_identity = calculate_alignment_identity( aln[0], aln[1]) fu.log('Sequence identity (%%): %s' % (seq_identity), out_log) fu.log('Gap less identity (%%): %s' % (gap_seq_identity), out_log) ## Selecting aligned CA atoms from first model, first chain struct_atoms = [] cluster_atoms = [] for struct_res in residue_map: try: cluster_atoms.append( clusterPDB[residue_map[struct_res]]['CA']) struct_atoms.append( get_residue_by_id(structPDB, struct_res)['CA']) except KeyError: fu.log( 'Cannot find CA atom for residue %s (input PDB %s)' % (get_residue_by_id( structPDB, struct_res).get_resname(), struct_res), out_log) pass if len(cluster_atoms) == 0: fu.log( self.__class__.__name__ + ': Cannot find CA atoms (1st model, 1st chain) in cluster member %s when aligning against %s. Ignoring this member.' % (cluster_name, structure_name), out_log) raise SystemExit( self.__class__.__name__ + ': Cannot find CA atoms (1st model, 1st chain) in cluster member %s when aligning against %s. Ignoring this member.' % (cluster_name, structure_name)) else: fu.log( 'Superimposing %s aligned protein residues' % (len(cluster_atoms)), out_log) ## Align against input structure si = Bio.PDB.Superimposer() si.set_atoms(struct_atoms, cluster_atoms) si.apply(clusterPDB.get_atoms()) fu.log('RMSD: %s' % (si.rms), out_log) # Save transformed structure (and ligand) clusterPDB_ligand_aligned = clusterPDB[clusterPDB_ligand.get_id()] fu.log('Saving transformed ligand coordinates', out_log) clusterPDB_ligands_aligned.append(clusterPDB_ligand_aligned) ## Stop after n accepted cluster members clusterPDB_ligands_num += 1 if clusterPDB_ligands_num > self.max_num_ligands: break fu.log(' ', out_log) fu.log('----------------------------------------', out_log) fu.log( 'All transformed ligand coordinates saved, getting binding site residues', out_log) ## Select binding site atoms as those around cluster superimposed ligands fu.log( 'Defining binding site residues as those %sÅ around the %s cluster superimposed ligands' % (self.radius, clusterPDB_ligands_num), out_log) # select Atoms from aligned ligands clusterPDB_ligands_aligned2 = [ res for res in clusterPDB_ligands_aligned ] clusterPDB_ligands_aligned_atoms = Bio.PDB.Selection.unfold_entities( clusterPDB_ligands_aligned2, 'A') # select Atoms from input PDB structure structPDB_atoms = [atom for atom in structPDB.get_atoms()] # compute neighbors for aligned ligands in the input PDB structure structPDB_bs_residues_raw = {} structPDB_neighbors = Bio.PDB.NeighborSearch(structPDB_atoms) for ligand_atom in clusterPDB_ligands_aligned_atoms: # look for PDB atoms 5A around each ligand atom k_l = structPDB_neighbors.search(ligand_atom.coord, self.radius, 'R') for k in k_l: structPDB_bs_residues_raw[k.get_id()] = k.get_full_id() ## Save binding site to PDB io = Bio.PDB.PDBIO() fu.log( 'Writing binding site residues into %s' % (self.io_dict["out"]["output_pdb_path"]), out_log) # unselect input PDB atoms not in binding site structPDB_bs_atoms = 0 p = re.compile('H_|W_|W') residue_ids_to_remove = [] for res in structPDB.get_residues(): if res.id not in structPDB_bs_residues_raw.keys(): # add residue to residue_ids_to_remove list residue_ids_to_remove.append(res.id) elif p.match(res.resname): # add residue to residue_ids_to_remove list residue_ids_to_remove.append(res.id) else: # this residue will be preserved structPDB_bs_atoms += len(res.get_list()) # unselect input PDB atoms not in binding site for chain in structPDB: for idr in residue_ids_to_remove: chain.detach_child(idr) # write PDB file io.set_structure(structPDB) io.save(self.io_dict["out"]["output_pdb_path"]) if self.remove_tmp: # remove temporary folder fu.rm(unique_dir) fu.log(' ', out_log) fu.log('----------------------------------------', out_log) fu.log('Removed temporary folder: %s' % unique_dir, out_log) return 0
def launch(self) -> int: """Launches the execution of the GROMACS mdrun module.""" tmp_files = [] # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # Check GROMACS version if (not self.mpi_bin) and (not self.container_path): if self.gmx_version < 512: raise GromacsVersionError( "Gromacs version should be 5.1.2 or newer %d detected" % self.gmx_version) fu.log( "GROMACS %s %d version detected" % (self.__class__.__name__, self.gmx_version), out_log) # Restart if needed if self.restart: if fu.check_complete_files(self.io_dict["out"].values()): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 container_io_dict = fu.copy_to_container(self.container_path, self.container_volume_path, self.io_dict) cmd = [ self.gmx_path, 'mdrun', '-s', container_io_dict["in"]["input_tpr_path"], '-o', container_io_dict["out"]["output_trr_path"], '-c', container_io_dict["out"]["output_gro_path"], '-e', container_io_dict["out"]["output_edr_path"], '-g', container_io_dict["out"]["output_log_path"], '-nt', self.num_threads ] if self.use_gpu: cmd += ["-nb", "gpu", "-pme", "gpu"] if self.mpi_bin: mpi_cmd = [self.mpi_bin] if self.mpi_np: mpi_cmd.append('-np') mpi_cmd.append(str(self.mpi_np)) if self.mpi_hostlist: mpi_cmd.append('-hostfile') mpi_cmd.append(self.mpi_hostlist) cmd = mpi_cmd + cmd if container_io_dict["out"].get("output_xtc_path"): cmd.append('-x') cmd.append(container_io_dict["out"]["output_xtc_path"]) if container_io_dict["out"].get("output_cpt_path"): cmd.append('-cpo') cmd.append(container_io_dict["out"]["output_cpt_path"]) if container_io_dict["out"].get("output_dhdl_path"): cmd.append('-dhdl') cmd.append(container_io_dict["out"]["output_dhdl_path"]) new_env = None if self.gmxlib: new_env = os.environ.copy() new_env['GMXLIB'] = self.gmxlib cmd = fu.create_cmd_line( cmd, container_path=self.container_path, host_volume=container_io_dict.get("unique_dir"), container_volume=self.container_volume_path, container_working_dir=self.container_working_dir, container_user_uid=self.container_user_id, container_shell_path=self.container_shell_path, container_image=self.container_image, out_log=out_log, global_log=self.global_log) returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log, self.global_log, new_env).launch() fu.copy_to_host(self.container_path, container_io_dict, self.io_dict) tmp_files.append(container_io_dict.get("unique_dir")) if self.remove_tmp: fu.rm_file_list(tmp_files, out_log=out_log) return returncode
def launch(self) -> int: """Execute the :class:`FPocketFilter <fpocket.fpocket_filter.FPocketFilter>` fpocket.fpocket_filter.FPocketFilter object.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # check input/output paths and parameters self.check_data_params(out_log, err_log) # Check the properties fu.check_properties(self, self.properties) if self.restart: output_file_list = [self.io_dict["out"]["output_filter_pockets_zip"]] if fu.check_complete_files(output_file_list): fu.log('Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # load input_summary into a dictionary with open(self.io_dict["in"]["input_summary"]) as json_file: data = json.load(json_file) # build search_list search_list = [] ranges = {} if self.score: check_range('score', self.score, [0,1], out_log, self.__class__.__name__) search_list.append(self.score_matcher(self.score)) ranges['score'] = self.score if self.druggability_score: check_range('druggability_score', self.druggability_score, [0,1], out_log, self.__class__.__name__) search_list.append(self.druggability_score_matcher(self.druggability_score)) ranges['druggability_score'] = self.druggability_score if self.volume: check_range('volume', self.volume, [0,10000], out_log, self.__class__.__name__) search_list.append(self.volume_matcher(self.volume)) ranges['volume'] = self.volume fu.log('Performing a search under the next parameters: %s' % (', '.join(['{0}: {1}'.format(k, v) for k,v in ranges.items()])), out_log) # perform search search = [ x for x in data if all([f(data[x]) for f in search_list]) ] if len(search) == 0: fu.log('No matches found', out_log) return 0 str_out = ''; for s in search: str_out = str_out + ('\n**********\n%s\n**********\nscore: %s\ndruggability_score: %s\nvolume: %s\n' % (s, data[s]["score"], data[s]["druggability_score"], data[s]["volume"])) fu.log('Found %d matches:%s' % (len(search), str_out), out_log) # create tmp_folder self.tmp_folder = fu.create_unique_dir() fu.log('Creating %s temporary folder' % self.tmp_folder, out_log) process_output_fpocket_filter(search, self.tmp_folder, self.io_dict["in"]["input_pockets_zip"], self.io_dict["out"]["output_filter_pockets_zip"], self.remove_tmp, out_log) return 0
def launch(self) -> int: """Launches the execution of the GROMACS select module.""" tmp_files = [] # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # Check GROMACS version if not self.container_path: if self.gmx_version < 512: raise GromacsVersionError( "Gromacs version should be 5.1.2 or newer %d detected" % self.gmx_version) fu.log( "GROMACS %s %d version detected" % (self.__class__.__name__, self.gmx_version), out_log) # Restart if needed if self.restart: if fu.check_complete_files(self.io_dict["out"].values()): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 container_io_dict = fu.copy_to_container(self.container_path, self.container_volume_path, self.io_dict) cmd = [ self.gmx_path, 'select', '-s', container_io_dict["in"]["input_structure_path"], '-on', container_io_dict["out"]["output_ndx_path"] ] if container_io_dict["in"].get("input_ndx_path") and pl.Path( container_io_dict["in"].get("input_ndx_path")).exists(): cmd.append('-n') cmd.append(container_io_dict["in"].get("input_ndx_path")) cmd.append('-select') cmd.append("\'" + self.selection + "\'") new_env = None if self.gmxlib: new_env = os.environ.copy() new_env['GMXLIB'] = self.gmxlib if self.container_path: if self.container_path.endswith('singularity'): fu.log('Using Singularity image %s' % self.container_image, out_log, self.global_log) cmd = [ self.container_path, 'exec', '--bind', container_io_dict.get("unique_dir") + ':' + self.container_volume_path, self.container_image, " ".join(cmd) ] elif self.container_path.endswith('docker'): fu.log('Using Docker image %s' % self.container_image, out_log, self.global_log) docker_cmd = [ self.container_path, 'run', ] if self.container_working_dir: docker_cmd.append('-w') docker_cmd.append(self.container_working_dir) if self.container_volume_path: docker_cmd.append('-v') docker_cmd.append( container_io_dict.get("unique_dir") + ':' + self.container_volume_path) if self.container_user_id: docker_cmd.append('--user') docker_cmd.append(self.container_user_id) docker_cmd.append(self.container_image) docker_cmd.append(" ".join(cmd)) cmd = docker_cmd returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log, self.global_log, new_env).launch() fu.copy_to_host(self.container_path, container_io_dict, self.io_dict) tmp_files.append(container_io_dict.get("unique_dir")) if self.remove_tmp: fu.rm_file_list(tmp_files, out_log=out_log) return returncode
def launch(self) -> int: """Launches the execution of the GROMACS grompp module. Examples: This is a use example of how to use the Grommpp module from Python >>> from biobb_md.gromacs.grompp import Grompp >>> prop = { 'mdp':{ 'type': 'minimization', 'emtol':'500', 'nsteps':'5000'}} >>> Grompp(input_gro_path='/path/to/myStructure.gro', input_top_zip_path='/path/to/myTopology.zip', output_tpr_path='/path/to/NewCompiledBin.tpr', properties=prop).launch() """ tmp_files = [] mdout = 'mdout.mdp' tmp_files.append(mdout) # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # Check GROMACS version if not self.container_path: if self.gmx_version < 512: raise GromacsVersionError( "Gromacs version should be 5.1.2 or newer %d detected" % self.gmx_version) fu.log( "GROMACS %s %d version detected" % (self.__class__.__name__, self.gmx_version), out_log) # Restart if needed if self.restart: if fu.check_complete_files(self.io_dict["out"].values()): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # Unzip topology to topology_out top_file = fu.unzip_top(zip_file=self.input_top_zip_path, out_log=out_log) top_dir = str(Path(top_file).parent) tmp_files.append(top_dir) container_io_dict = fu.copy_to_container(self.container_path, self.container_volume_path, self.io_dict) if self.input_mdp_path: self.output_mdp_path = self.input_mdp_path else: mdp_dir = fu.create_unique_dir() tmp_files.append(mdp_dir) self.output_mdp_path = self.create_mdp( path=str(Path(mdp_dir).joinpath(self.output_mdp_path))) md = self.mdp.get('type', 'minimization') if md not in ('index', 'free'): fu.log('Will run a %s md of %s steps' % (md, self.nsteps), out_log, self.global_log) elif md == 'index': fu.log('Will create a TPR to be used as structure file') else: fu.log( 'Will run a %s md of %s' % (md, fu.human_readable_time( int(self.nsteps) * float(self.dt))), out_log, self.global_log) if self.container_path: fu.log('Container execution enabled', out_log) shutil.copy2(self.output_mdp_path, container_io_dict.get("unique_dir")) self.output_mdp_path = str( Path(self.container_volume_path).joinpath( Path(self.output_mdp_path).name)) shutil.copytree( top_dir, str( Path(container_io_dict.get("unique_dir")).joinpath( Path(top_dir).name))) top_file = str( Path(self.container_volume_path).joinpath( Path(top_dir).name, Path(top_file).name)) cmd = [ self.gmx_path, 'grompp', '-f', self.output_mdp_path, '-c', container_io_dict["in"]["input_gro_path"], '-r', container_io_dict["in"]["input_gro_path"], '-p', top_file, '-o', container_io_dict["out"]["output_tpr_path"], '-po', mdout, '-maxwarn', self.maxwarn ] if container_io_dict["in"].get("input_cpt_path") and Path( container_io_dict["in"]["input_cpt_path"]).exists(): cmd.append('-t') if self.container_path: shutil.copy2(container_io_dict["in"]["input_cpt_path"], container_io_dict.get("unique_dir")) cmd.append( str( Path(self.container_volume_path).joinpath( Path(container_io_dict["in"] ["input_cpt_path"]).name))) else: cmd.append(container_io_dict["in"]["input_cpt_path"]) if container_io_dict["in"].get("input_ndx_path") and Path( container_io_dict["in"]["input_ndx_path"]).exists(): cmd.append('-n') if self.container_path: shutil.copy2(container_io_dict["in"]["input_ndx_path"], container_io_dict.get("unique_dir")) cmd.append( Path(self.container_volume_path).joinpath( Path(container_io_dict["in"]["input_ndx_path"]).name)) else: cmd.append(container_io_dict["in"]["input_ndx_path"]) new_env = None if self.gmxlib: new_env = os.environ.copy() new_env['GMXLIB'] = self.gmxlib cmd = fu.create_cmd_line( cmd, container_path=self.container_path, host_volume=container_io_dict.get("unique_dir"), container_volume=self.container_volume_path, container_working_dir=self.container_working_dir, container_user_uid=self.container_user_id, container_shell_path=self.container_shell_path, container_image=self.container_image, out_log=out_log, global_log=self.global_log) returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log, self.global_log, new_env).launch() fu.copy_to_host(self.container_path, container_io_dict, self.io_dict) tmp_files.append(container_io_dict.get("unique_dir")) if self.remove_tmp: fu.rm_file_list(tmp_files, out_log=out_log) return returncode
def launch(self) -> int: """Launches the execution of the GROMACS editconf module.""" tmp_files = [] # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # Check GROMACS version if not self.container_path: if self.gmx_version < 512: raise GromacsVersionError( "Gromacs version should be 5.1.2 or newer %d detected" % self.gmx_version) fu.log( "GROMACS %s %d version detected" % (self.__class__.__name__, self.gmx_version), out_log) # Restart if needed if self.restart: if fu.check_complete_files(self.io_dict["out"].values()): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 container_io_dict = fu.copy_to_container(self.container_path, self.container_volume_path, self.io_dict) cmd = [ self.gmx_path, 'editconf', '-f', container_io_dict["in"]["input_gro_path"], '-o', container_io_dict["out"]["output_gro_path"], '-d', str(self.distance_to_molecule), '-bt', self.box_type ] if self.center_molecule: cmd.append('-c') fu.log('Centering molecule in the box.', out_log, self.global_log) fu.log( "Distance of the box to molecule: %6.2f" % self.distance_to_molecule, out_log, self.global_log) fu.log("Box type: %s" % self.box_type, out_log, self.global_log) new_env = None if self.gmxlib: new_env = os.environ.copy() new_env['GMXLIB'] = self.gmxlib cmd = fu.create_cmd_line( cmd, container_path=self.container_path, host_volume=container_io_dict.get("unique_dir"), container_volume=self.container_volume_path, container_working_dir=self.container_working_dir, container_user_uid=self.container_user_id, container_shell_path=self.container_shell_path, container_image=self.container_image, out_log=out_log, global_log=self.global_log) returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log, self.global_log, new_env).launch() fu.copy_to_host(self.container_path, container_io_dict, self.io_dict) tmp_files.append(container_io_dict.get("unique_dir")) if self.remove_tmp: fu.rm_file_list(tmp_files, out_log=out_log) return returncode
def launch(self) -> int: """Execute the :class:`BoxResidues <utils.box_residues.BoxResidues>` utils.box_residues.BoxResidues object.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # check input/output paths and parameters self.check_data_params(out_log, err_log) # Check the properties fu.check_properties(self, self.properties) if self.restart: output_file_list = [self.io_dict["out"]["output_pdb_path"]] if fu.check_complete_files(output_file_list): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # Parse structure fu.log( 'Loading input PDB structure %s' % (self.io_dict["in"]["input_pdb_path"]), out_log, self.global_log) structure_name = PurePath(self.io_dict["in"]["input_pdb_path"]).name parser = Bio.PDB.PDBParser(QUIET=True) structPDB = parser.get_structure(structure_name, self.io_dict["in"]["input_pdb_path"]) if len(structPDB): structPDB = structPDB[0] ## Mapping residue structure into input structure fu.log('Mapping residue structure into input structure', out_log, self.global_log) # Listing residues to be selected from the residue structure residPDB_res_list = [] for residPDB_res in self.resid_list: if self.residue_offset: residPDB_res_list.append( (' ', residPDB_res + self.residue_offset, ' ')) else: residPDB_res_list.append((' ', residPDB_res, ' ')) selection_res_list = [] selection_atoms_num = 0 for struct_chain in structPDB: for struct_res in struct_chain: if struct_res.get_id() in residPDB_res_list: selection_res_list.append(struct_res) selection_atoms_num += len(struct_res.get_list()) if len(selection_res_list) == 0: fu.log( self.__class__.__name__ + ': Cannot match any of the residues listed in [%s] into %s' % (', '.join(str(v) for v in self.resid_list), self.io_dict["in"]["input_pdb_path"]), out_log) raise SystemExit( self.__class__.__name__ + ': Cannot match any of the residues listed in [%s] into %s' % (', '.join(str(v) for v in self.resid_list), self.io_dict["in"]["input_pdb_path"])) elif len(selection_res_list) != len(residPDB_res_list): fu.log( 'Cannot match all the residues listed in %s into %s. Found %s out of %s' % (', '.join(str(v) for v in self.resid_list), self.io_dict["in"]["input_pdb_path"], len(selection_res_list), len(residPDB_res_list)), out_log) else: fu.log('Selection of residues successfully matched', out_log, self.global_log) ## Compute binding site box size # compute box center selection_box_center = sum( atom.coord for res in selection_res_list for atom in res.get_atoms()) / selection_atoms_num fu.log( 'Binding site center (Angstroms): %10.3f%10.3f%10.3f' % (selection_box_center[0], selection_box_center[1], selection_box_center[2]), out_log, self.global_log) # compute box size selection_coords_max = np.amax([ atom.coord for res in selection_res_list for atom in res.get_atoms() ], axis=0) selection_box_size = selection_coords_max - selection_box_center if self.offset: selection_box_size = [c + self.offset for c in selection_box_size] fu.log( 'Binding site size (Angstroms): %10.3f%10.3f%10.3f' % (selection_box_size[0], selection_box_size[1], selection_box_size[2]), out_log, self.global_log) # compute volume vol = np.prod(selection_box_size) * 2**3 fu.log('Volume (cubic Angstroms): %.0f' % (vol), out_log, self.global_log) # add box details as PDB remarks remarks = "REMARK BOX CENTER:%10.3f%10.3f%10.3f" % ( selection_box_center[0], selection_box_center[1], selection_box_center[2]) remarks += " SIZE:%10.3f%10.3f%10.3f" % (selection_box_size[0], selection_box_size[1], selection_box_size[2]) selection_box_coords_txt = "" # add (optional) box coordinates as 8 ATOM records if self.box_coordinates: fu.log('Adding box coordinates', out_log, self.global_log) selection_box_coords_txt = get_box_coordinates( selection_box_center, selection_box_size) with open(self.io_dict["out"]["output_pdb_path"], 'w') as f: f.seek(0, 0) f.write(remarks.rstrip('\r\n') + '\n' + selection_box_coords_txt) fu.log( 'Saving output PDB file (with box setting annotations): %s' % (self.io_dict["out"]["output_pdb_path"]), out_log, self.global_log) return 0
def launch(self): """Launches the execution of the template module.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # Check the properties fu.check_properties(self, self.properties) # Restart if self.restart: # 4. Include here all output file paths output_file_list = [self.io_dict['out']['output_file_path']] if fu.check_complete_files(output_file_list): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # Creating temporary folder self.tmp_folder = fu.create_unique_dir() fu.log('Creating %s temporary folder' % self.tmp_folder, out_log) # 5. Include here all mandatory input files # Copy input_file_path1 to temporary folder shutil.copy(self.io_dict['in']['input_file_path1'], self.tmp_folder) # 6. Prepare the command line parameters as instructions list instructions = ['-j'] if self.boolean_property: instructions.append('-v') fu.log('Appending optional boolean property', out_log, self.global_log) # 7. Build the actual command line as a list of items (elements order will be maintained) cmd = [ self.executable_binary_property, ' '.join(instructions), self.io_dict['out']['output_file_path'], str( PurePath(self.tmp_folder).joinpath( PurePath(self.io_dict['in']['input_file_path1']).name)) ] fu.log( 'Creating command line with instructions and required arguments', out_log, self.global_log) # 8. Repeat for optional input files if provided if self.io_dict['in']['input_file_path2']: # Copy input_file_path2 to temporary folder shutil.copy(self.io_dict['in']['input_file_path2'], self.tmp_folder) # Append optional input_file_path2 to cmd cmd.append( str( PurePath(self.tmp_folder).joinpath( PurePath( self.io_dict['in']['input_file_path2']).name))) fu.log('Appending optional argument to command line', out_log, self.global_log) # 9. Uncomment to check the command line # print(' '.join(cmd)) # Launch execution returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log, self.global_log).launch() # Remove temporary file(s) if self.remove_tmp: fu.rm(self.tmp_folder) fu.log('Removed: %s' % str(self.tmp_folder), out_log) return returncode