def launch(self): """Launches the execution of the Open Babel module.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # check input/output paths and parameters self.check_data_params(out_log, err_log) # Check the properties fu.check_properties(self, self.properties) if self.restart: output_file_list = [self.output_path_par, self.output_path_inp, self.output_path_top] if fu.check_complete_files(output_file_list): fu.log('Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # create unique name for temporary folder (created by acpype) self.unique_name = create_unique_name(6) # create command line instruction cmd = self.create_cmd(out_log, err_log) # execute cmd fu.log('Running %s, this execution can take a while' % self.acpype_path, out_log) returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log, self.global_log).launch() # move files to output_path and removes temporary folder process_output(self.unique_name, self.basename + "." + self.unique_name + ".acpype", self.remove_tmp, self.basename, get_default_value(self.__class__.__name__), self.output_files, out_log) return returncode
def __init__(self, input_ndx_path: str, input_top_zip_path: str, output_top_zip_path: str, properties: dict = None, **kwargs) -> None: properties = properties or {} # Input/Output files self.io_dict = { "in": {"input_ndx_path": input_ndx_path, "input_top_zip_path": input_top_zip_path}, "out": {"output_top_zip_path": output_top_zip_path} } # Properties specific for BB self.force_constants = properties.get('force_constants', '500 500 500') self.ref_rest_chain_triplet_list = properties.get('ref_rest_chain_triplet_list') # Properties common in all BB self.can_write_console_log = properties.get('can_write_console_log', True) self.global_log = properties.get('global_log', None) self.prefix = properties.get('prefix', None) self.step = properties.get('step', None) self.path = properties.get('path', '') self.remove_tmp = properties.get('remove_tmp', True) self.restart = properties.get('restart', False) # Check the properties fu.check_properties(self, properties)
def __init__(self, input_structure_path: str, input_ndx_path: str, output_itp_path: str, properties: dict = None, **kwargs) -> None: properties = properties or {} # Input/Output files self.io_dict = { "in": { "input_structure_path": input_structure_path, "input_ndx_path": input_ndx_path }, "out": { "output_itp_path": output_itp_path } } # Properties specific for BB self.force_constants = str( properties.get('force_constants', '500 500 500')) self.restrained_group = properties.get('restrained_group', 'system') # container Specific self.container_path = properties.get('container_path') self.container_image = properties.get('container_image', 'gromacs/gromacs:latest') self.container_volume_path = properties.get('container_volume_path', '/data') self.container_working_dir = properties.get('container_working_dir') self.container_user_id = properties.get('container_user_id') self.container_shell_path = properties.get('container_shell_path', '/bin/bash') # Properties common in all GROMACS BB self.gmxlib = properties.get('gmxlib', None) self.gmx_path = properties.get('gmx_path', 'gmx') self.gmx_nobackup = properties.get('gmx_nobackup', True) self.gmx_nocopyright = properties.get('gmx_nocopyright', True) if self.gmx_nobackup: self.gmx_path += ' -nobackup' if self.gmx_nocopyright: self.gmx_path += ' -nocopyright' if not self.container_path: self.gmx_version = get_gromacs_version(self.gmx_path) # Properties common in all BB self.can_write_console_log = properties.get('can_write_console_log', True) self.global_log = properties.get('global_log', None) self.prefix = properties.get('prefix', None) self.step = properties.get('step', None) self.path = properties.get('path', '') self.remove_tmp = properties.get('remove_tmp', True) self.restart = properties.get('restart', False) # Check the properties fu.check_properties(self, properties)
def launch(self) -> int: """Execute the :class:`FPocketSelect <fpocket.fpocket_select.FPocketSelect>` fpocket.fpocket_select.FPocketSelect object.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # check input/output paths and parameters self.check_data_params(out_log, err_log) # Check the properties fu.check_properties(self, self.properties) if self.restart: output_file_list = [ self.io_dict["out"]["output_pocket_pdb"], self.io_dict["out"]["output_pocket_pqr"] ] if fu.check_complete_files(output_file_list): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # create tmp_folder self.tmp_folder = fu.create_unique_dir() fu.log('Creating %s temporary folder' % self.tmp_folder, out_log) # decompress the input_pockets_zip file to tmp_folder all_pockets = fu.unzip_list( zip_file=self.io_dict["in"]["input_pockets_zip"], dest_dir=self.tmp_folder, out_log=out_log) pockets_list = [ i for i in all_pockets if ('pocket' + str(self.pocket)) in i ] for p in pockets_list: if PurePath(p).suffix == '.pdb': fu.log( 'Saving %s file' % self.io_dict["out"]["output_pocket_pdb"], out_log) shutil.copy(p, self.io_dict["out"]["output_pocket_pdb"]) else: fu.log( 'Saving %s file' % self.io_dict["out"]["output_pocket_pqr"], out_log) shutil.copy(p, self.io_dict["out"]["output_pocket_pqr"]) if self.remove_tmp: # remove temporary folder fu.rm(self.tmp_folder) fu.log('Removed temporary folder: %s' % self.tmp_folder, out_log) return 0
def launch(self) -> int: """Execute the :class:`FPocket <fpocket.fpocket.FPocket>` fpocket.fpocket.FPocket object.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # check input/output paths and parameters self.check_data_params(out_log, err_log) # Check the properties fu.check_properties(self, self.properties) if self.restart: output_file_list = [self.io_dict["out"]["output_pockets_zip"],self.io_dict["out"]["output_summary"]] if fu.check_complete_files(output_file_list): fu.log('Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # create tmp_folder self.tmp_folder = fu.create_unique_dir() fu.log('Creating %s temporary folder' % self.tmp_folder, out_log) tmp_input = str(PurePath(self.tmp_folder).joinpath('input.pdb')) # copy input_pdb_path to tmp_folder shutil.copy(self.io_dict["in"]["input_pdb_path"], tmp_input) # create cmd cmd = [self.fpocket_path, '-f', tmp_input] # adding extra properties if self.min_radius: cmd.extend(['-m', str(self.min_radius)]) if self.max_radius: cmd.extend(['-M', str(self.max_radius)]) if self.num_spheres: cmd.extend(['-i', str(self.num_spheres)]) fu.log('Executing fpocket', out_log, self.global_log) cmd = fu.create_cmd_line(cmd, out_log=out_log, global_log=self.global_log) returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log, self.global_log).launch() process_output_fpocket(self.tmp_folder, self.io_dict["out"]["output_pockets_zip"], self.io_dict["out"]["output_summary"], self.sort_by, self.remove_tmp, out_log, self.__class__.__name__) return returncode
def launch(self) -> int: """Execute the :class:`ExtractModelPDBQT <utils.extract_model_pdbqt.ExtractModelPDBQT>` utils.extract_model_pdbqt.ExtractModelPDBQT object.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # check input/output paths and parameters self.check_data_params(out_log, err_log) # Check the properties fu.check_properties(self, self.properties) if self.restart: output_file_list = [self.io_dict["out"]["output_pdbqt_path"]] if fu.check_complete_files(output_file_list): fu.log('Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 structure_name = PurePath(self.io_dict["in"]["input_pdbqt_path"]).name parser = Bio.PDB.PDBParser(QUIET = True) structPDB = parser.get_structure(structure_name, self.io_dict["in"]["input_pdbqt_path"]) models = [] for model in structPDB.get_models(): models.append(model.id + 1) if not self.model in models: fu.log(self.__class__.__name__ + ': Selected model %d not found in %s structure.' % (self.model, self.io_dict["in"]["input_pdbqt_path"]), out_log) raise SystemExit(self.__class__.__name__ + ': Selected model %d not found in %s structure.' % (self.model, self.io_dict["in"]["input_pdbqt_path"])) save = False lines = 0 with open(self.io_dict["in"]["input_pdbqt_path"], "r") as input_pdb, open(self.io_dict["out"]["output_pdbqt_path"], "w") as output_pdb: for line in input_pdb: if line.startswith('MODEL') and line.split()[1] == str(self.model): save = True if line.startswith('ENDMDL'): save = False if save and not line.startswith('MODEL'): lines = lines + 1 output_pdb.write(line) fu.log('Saving model %d to %s' % (self.model, self.io_dict["out"]["output_pdbqt_path"]), out_log) return 0
def launch(self): """Launches the execution of the Open Babel module.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # Check the properties fu.check_properties(self, self.properties) if self.restart: output_file_list = [self.output_path] if fu.check_complete_files(output_file_list): fu.log('Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # create command line instruction cmd = self.create_cmd(out_log, err_log) returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log, self.global_log).launch() return returncode
def __init__(self, input_structure_path: str, output_ndx_path: str, input_ndx_path: str = None, properties: dict = None, **kwargs) -> None: properties = properties or {} # Call parent class constructor super().__init__(properties) # Input/Output files self.io_dict = { "in": { "input_structure_path": input_structure_path, "input_ndx_path": input_ndx_path }, "out": { "output_ndx_path": output_ndx_path } } # Properties specific for BB self.selection = properties.get('selection', "a CA C N O") # Properties common in all GROMACS BB self.gmx_lib = properties.get('gmx_lib', None) self.gmx_path = properties.get('gmx_path', 'gmx') self.gmx_nobackup = properties.get('gmx_nobackup', True) self.gmx_nocopyright = properties.get('gmx_nocopyright', True) if self.gmx_nobackup: self.gmx_path += ' -nobackup' if self.gmx_nocopyright: self.gmx_path += ' -nocopyright' if not self.container_path: self.gmx_version = get_gromacs_version(self.gmx_path) # Check the properties fu.check_properties(self, properties)
def __init__(self, input_top_zip_path: str, input_itp_path: str, output_top_zip_path: str, input_posres_itp_path: str = None, properties: dict = None, **kwargs) -> None: properties = properties or {} # Input/Output files self.io_dict = { "in": { "input_top_zip_path": input_top_zip_path, "input_itp_path": input_itp_path, "input_posres_itp_path": input_posres_itp_path }, "out": { "output_top_zip_path": output_top_zip_path } } # Properties specific for BB self.posres_name = properties.get('posres_name', 'POSRES_LIGAND') # Properties common in all BB self.can_write_console_log = properties.get('can_write_console_log', True) self.global_log = properties.get('global_log', None) self.prefix = properties.get('prefix', None) self.step = properties.get('step', None) self.path = properties.get('path', '') self.remove_tmp = properties.get('remove_tmp', True) self.restart = properties.get('restart', False) # Check the properties fu.check_properties(self, properties)
def launch(self): """Launches the execution of the template module.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # Check the properties fu.check_properties(self, self.properties) # Restart if self.restart: # 4. Include here all output file paths output_file_list = [self.io_dict['out']['output_file_path']] if fu.check_complete_files(output_file_list): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # Creating temporary folder self.tmp_folder = fu.create_unique_dir() fu.log('Creating %s temporary folder' % self.tmp_folder, out_log) # 5. Include here all mandatory input files # Copy input_file_path1 to temporary folder shutil.copy(self.io_dict['in']['input_file_path1'], self.tmp_folder) # 6. Prepare the command line parameters as instructions list instructions = ['-j'] if self.boolean_property: instructions.append('-v') fu.log('Appending optional boolean property', out_log, self.global_log) # 7. Build the actual command line as a list of items (elements order will be maintained) cmd = [ self.executable_binary_property, ' '.join(instructions), self.io_dict['out']['output_file_path'], str( PurePath(self.tmp_folder).joinpath( PurePath(self.io_dict['in']['input_file_path1']).name)) ] fu.log( 'Creating command line with instructions and required arguments', out_log, self.global_log) # 8. Repeat for optional input files if provided if self.io_dict['in']['input_file_path2']: # Copy input_file_path2 to temporary folder shutil.copy(self.io_dict['in']['input_file_path2'], self.tmp_folder) # Append optional input_file_path2 to cmd cmd.append( str( PurePath(self.tmp_folder).joinpath( PurePath( self.io_dict['in']['input_file_path2']).name))) fu.log('Appending optional argument to command line', out_log, self.global_log) # 9. Uncomment to check the command line # print(' '.join(cmd)) # Launch execution returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log, self.global_log).launch() # Remove temporary file(s) if self.remove_tmp: fu.rm(self.tmp_folder) fu.log('Removed: %s' % str(self.tmp_folder), out_log) return returncode
def __init__(self, input_tpr_path: str, output_trr_path: str, output_gro_path: str, output_edr_path: str, output_log_path: str, output_xtc_path: str = None, output_cpt_path: str = None, output_dhdl_path: str = None, properties: dict = None, **kwargs) -> None: properties = properties or {} # Input/Output files self.io_dict = { "in": { "input_tpr_path": input_tpr_path }, "out": { "output_trr_path": output_trr_path, "output_gro_path": output_gro_path, "output_edr_path": output_edr_path, "output_log_path": output_log_path, "output_xtc_path": output_xtc_path, "output_cpt_path": output_cpt_path, "output_dhdl_path": output_dhdl_path } } # Properties specific for BB self.num_threads = str(properties.get('num_threads', 0)) self.mpi_bin = properties.get('mpi_bin') self.mpi_np = properties.get('mpi_np') self.mpi_hostlist = properties.get('mpi_hostlist') self.use_gpu = properties.get('use_gpu', False) # container Specific self.container_path = properties.get('container_path') self.container_image = properties.get('container_image', 'gromacs/gromacs:latest') self.container_volume_path = properties.get('container_volume_path', '/tmp') self.container_working_dir = properties.get('container_working_dir') self.container_user_id = properties.get('container_user_id') self.container_shell_path = properties.get('container_shell_path', '/bin/bash') # Properties common in all GROMACS BB self.gmxlib = properties.get('gmxlib', None) self.gmx_path = properties.get('gmx_path', 'gmx') self.gmx_nobackup = properties.get('gmx_nobackup', True) self.gmx_nocopyright = properties.get('gmx_nocopyright', True) if self.gmx_nobackup: self.gmx_path += ' -nobackup' if self.gmx_nocopyright: self.gmx_path += ' -nocopyright' if (not self.mpi_bin) and (not self.container_path): self.gmx_version = get_gromacs_version(self.gmx_path) # Properties common in all BB self.can_write_console_log = properties.get('can_write_console_log', True) self.global_log = properties.get('global_log', None) self.prefix = properties.get('prefix', None) self.step = properties.get('step', None) self.path = properties.get('path', '') self.remove_tmp = properties.get('remove_tmp', True) self.restart = properties.get('restart', False) # Check the properties fu.check_properties(self, properties)
def __init__(self, input_tpr_path: str, output_gro_path: str, input_top_zip_path: str, output_top_zip_path: str, properties: dict = None, **kwargs) -> None: properties = properties or {} # Input/Output files self.io_dict = { "in": { "input_tpr_path": input_tpr_path }, "out": { "output_gro_path": output_gro_path, "output_top_zip_path": output_top_zip_path } } # Should not be copied inside container self.input_top_zip_path = input_top_zip_path # Properties specific for BB self.output_top_path = properties.get('output_top_path', 'gio.top') self.replaced_group = properties.get('replaced_group', 'SOL') self.neutral = properties.get('neutral', False) self.concentration = properties.get('concentration', 0.05) self.seed = properties.get('seed', 1993) # container Specific self.container_path = properties.get('container_path') self.container_image = properties.get('container_image', 'gromacs/gromacs:latest') self.container_volume_path = properties.get('container_volume_path', '/data') self.container_working_dir = properties.get('container_working_dir') self.container_user_id = properties.get('container_user_id') self.container_shell_path = properties.get('container_shell_path', '/bin/bash') # Properties common in all GROMACS BB self.gmxlib = properties.get('gmxlib', None) self.gmx_path = properties.get('gmx_path', 'gmx') self.gmx_nobackup = properties.get('gmx_nobackup', True) self.gmx_nocopyright = properties.get('gmx_nocopyright', True) if self.gmx_nobackup: self.gmx_path += ' -nobackup' if self.gmx_nocopyright: self.gmx_path += ' -nocopyright' if not self.container_path: self.gmx_version = get_gromacs_version(self.gmx_path) # Properties common in all BB self.can_write_console_log = properties.get('can_write_console_log', True) self.global_log = properties.get('global_log', None) self.prefix = properties.get('prefix', None) self.step = properties.get('step', None) self.path = properties.get('path', '') self.remove_tmp = properties.get('remove_tmp', True) self.restart = properties.get('restart', False) # Check the properties fu.check_properties(self, properties)
def __init__(self, input_pdb_path: str, output_gro_path: str, output_top_zip_path: str, properties: dict = None, **kwargs) -> None: properties = properties or {} # Input/Output files self.io_dict = { "in": { "input_pdb_path": input_pdb_path }, "out": { "output_gro_path": output_gro_path, "output_top_zip_path": output_top_zip_path } } # Properties specific for BB self.output_top_path = properties.get('output_top_path', 'p2g.top') self.output_itp_path = properties.get('output_itp_path', 'posre.itp') self.water_type = properties.get('water_type', 'spce') self.force_field = properties.get('force_field', 'amber99sb-ildn') self.ignh = properties.get('ignh', False) self.his = properties.get('his', None) # container Specific self.container_path = properties.get('container_path') self.container_image = properties.get('container_image', 'gromacs/gromacs:latest') self.container_volume_path = properties.get('container_volume_path', '/data') self.container_working_dir = properties.get('container_working_dir') self.container_user_id = properties.get('container_user_id') self.container_shell_path = properties.get('container_shell_path', '/bin/bash') # Properties common in all GROMACS BB self.gmxlib = properties.get('gmxlib', None) self.gmx_path = properties.get('gmx_path', 'gmx') self.gmx_nobackup = properties.get('gmx_nobackup', True) self.gmx_nocopyright = properties.get('gmx_nocopyright', True) if self.gmx_nobackup: self.gmx_path += ' -nobackup' if self.gmx_nocopyright: self.gmx_path += ' -nocopyright' if not self.container_path: self.gmx_version = get_gromacs_version(self.gmx_path) # Properties common in all BB self.can_write_console_log = properties.get('can_write_console_log', True) self.global_log = properties.get('global_log', None) self.prefix = properties.get('prefix', None) self.step = properties.get('step', None) self.path = properties.get('path', '') self.remove_tmp = properties.get('remove_tmp', True) self.restart = properties.get('restart', False) # Check the properties fu.check_properties(self, properties)
def launch(self) -> int: """Execute the :class:`Box <utils.box.Box>` utils.box.Box object.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # check input/output paths and parameters self.check_data_params(out_log, err_log) # Check the properties fu.check_properties(self, self.properties) if self.restart: output_file_list = [self.io_dict["out"]["output_pdb_path"]] if fu.check_complete_files(output_file_list): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # check if cavity (pdb) or pocket (pqr) input_type = PurePath( self.io_dict["in"]["input_pdb_path"]).suffix.lstrip('.') if input_type == 'pdb': fu.log( 'Loading residue PDB selection from %s' % (self.io_dict["in"]["input_pdb_path"]), out_log, self.global_log) else: fu.log( 'Loading pocket PQR selection from %s' % (self.io_dict["in"]["input_pdb_path"]), out_log, self.global_log) # get input_pdb_path atoms coordinates selection_atoms_num = 0 x_coordslist = [] y_coordslist = [] z_coordslist = [] with open(self.io_dict["in"]["input_pdb_path"]) as infile: for line in infile: if line.startswith("HETATM") or line.startswith("ATOM"): x_coordslist.append(float(line[31:38].strip())) y_coordslist.append(float(line[39:46].strip())) z_coordslist.append(float(line[47:54].strip())) selection_atoms_num = selection_atoms_num + 1 ## Compute binding site box size # compute box center selection_box_center = [ np.average(x_coordslist), np.average(y_coordslist), np.average(z_coordslist) ] fu.log( 'Binding site center (Angstroms): %10.3f%10.3f%10.3f' % (selection_box_center[0], selection_box_center[1], selection_box_center[2]), out_log, self.global_log) # compute box size selection_coords_max = np.amax( [x_coordslist, y_coordslist, z_coordslist], axis=1) selection_box_size = selection_coords_max - selection_box_center if self.offset: fu.log('Adding %.1f Angstroms offset' % (self.offset), out_log, self.global_log) selection_box_size = [c + self.offset for c in selection_box_size] fu.log( 'Binding site size (Angstroms): %10.3f%10.3f%10.3f' % (selection_box_size[0], selection_box_size[1], selection_box_size[2]), out_log, self.global_log) # compute volume vol = np.prod(selection_box_size) * 2**3 fu.log('Volume (cubic Angstroms): %.0f' % (vol), out_log, self.global_log) # add box details as PDB remarks remarks = "REMARK BOX CENTER:%10.3f%10.3f%10.3f" % ( selection_box_center[0], selection_box_center[1], selection_box_center[2]) remarks += " SIZE:%10.3f%10.3f%10.3f" % (selection_box_size[0], selection_box_size[1], selection_box_size[2]) selection_box_coords_txt = "" # add (optional) box coordinates as 8 ATOM records if self.box_coordinates: fu.log('Adding box coordinates', out_log, self.global_log) selection_box_coords_txt = get_box_coordinates( selection_box_center, selection_box_size) with open(self.io_dict["out"]["output_pdb_path"], 'w') as f: f.seek(0, 0) f.write(remarks.rstrip('\r\n') + '\n' + selection_box_coords_txt) fu.log( 'Saving output PDB file (with box setting annotations): %s' % (self.io_dict["out"]["output_pdb_path"]), out_log, self.global_log) return 0
def __init__(self, input_gro_path: str, input_top_zip_path: str, output_tpr_path: str, input_cpt_path: str = None, input_ndx_path: str = None, properties: dict = None, **kwargs) -> None: properties = properties or {} # Input/Output files self.io_dict = { "in": { "input_gro_path": input_gro_path, "input_cpt_path": input_cpt_path, "input_ndx_path": input_ndx_path }, "out": { "output_tpr_path": output_tpr_path } } # Should not be copied inside container self.input_top_zip_path = input_top_zip_path # Properties specific for BB self.input_mdp_path = properties.get('input_mdp_path', None) self.output_mdp_path = properties.get('output_mdp_path', 'grompp.mdp') self.output_top_path = properties.get('output_top_path', 'grompp.top') #TODO REVIEW: When select is implemented. self.maxwarn = str(properties.get('maxwarn', 10)) self.mdp = { k: str(v) for k, v in properties.get('mdp', dict()).items() } #TODO REVIEW: this two attributes self.nsteps = '' self.dt = '' # container Specific self.container_path = properties.get('container_path') self.container_image = properties.get('container_image', 'gromacs/gromacs:latest') self.container_volume_path = properties.get('container_volume_path', '/data') self.container_working_dir = properties.get('container_working_dir') self.container_user_id = properties.get('container_user_id') self.container_shell_path = properties.get('container_shell_path', '/bin/bash') # Properties common in all GROMACS BB self.gmxlib = properties.get('gmxlib', None) self.gmx_path = properties.get('gmx_path', 'gmx') self.gmx_nobackup = properties.get('gmx_nobackup', True) self.gmx_nocopyright = properties.get('gmx_nocopyright', True) if self.gmx_nobackup: self.gmx_path += ' -nobackup' if self.gmx_nocopyright: self.gmx_path += ' -nocopyright' if not self.container_path: self.gmx_version = get_gromacs_version(self.gmx_path) # Properties common in all BB self.can_write_console_log = properties.get('can_write_console_log', True) self.global_log = properties.get('global_log', None) self.prefix = properties.get('prefix', None) self.step = properties.get('step', None) self.path = properties.get('path', '') self.remove_tmp = properties.get('remove_tmp', True) self.restart = properties.get('restart', False) # Check the properties fu.check_properties(self, properties)
def launch(self): """Launches the execution of the template_container module.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # Check the properties fu.check_properties(self, self.properties) # Restart if self.restart: # 4. Include here all output file paths output_file_list = [self.io_dict['out']['output_file_path']] if fu.check_complete_files(output_file_list): fu.log('Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # 5. Copy inputs to container container_io_dict = fu.copy_to_container(self.container_path, self.container_volume_path, self.io_dict) # 6. Prepare the command line parameters as instructions list instructions = ['-j'] if self.boolean_property: instructions.append('-v') fu.log('Appending optional boolean property', out_log, self.global_log) # 7. Build the actual command line as a list of items (elements order will be maintained) cmd = [self.executable_binary_property, ' '.join(instructions), container_io_dict['out']['output_file_path'], container_io_dict['in']['input_file_path1']] fu.log('Creating command line with instructions and required arguments', out_log, self.global_log) # 8. Repeat for optional input files if provided if container_io_dict['in']['input_file_path2']: # Append optional input_file_path2 to cmd cmd.append(container_io_dict['in']['input_file_path2']) fu.log('Appending optional argument to command line', out_log, self.global_log) # 9. Uncomment to check the command line # print(' '.join(cmd)) # 10. Create cmd with specdific syntax according to the required container cmd = fu.create_cmd_line(cmd, container_path=self.container_path, host_volume=container_io_dict.get('unique_dir'), container_volume=self.container_volume_path, container_working_dir=self.container_working_dir, container_user_uid=self.container_user_id, container_image=self.container_image, container_shell_path=self.container_shell_path, out_log=out_log, global_log=self.global_log) # Launch execution returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log, self.global_log).launch() # Copy output(s) to output(s) path(s) in case of container execution fu.copy_to_host(self.container_path, container_io_dict, self.io_dict) # Remove temporary file(s) if self.remove_tmp and container_io_dict.get('unique_dir'): fu.rm(container_io_dict.get('unique_dir')) fu.log('Removed: %s' % str(container_io_dict.get('unique_dir')), out_log) return returncode
def launch(self) -> int: """Execute the :class:`BoxResidues <utils.box_residues.BoxResidues>` utils.box_residues.BoxResidues object.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # check input/output paths and parameters self.check_data_params(out_log, err_log) # Check the properties fu.check_properties(self, self.properties) if self.restart: output_file_list = [self.io_dict["out"]["output_pdb_path"]] if fu.check_complete_files(output_file_list): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # Parse structure fu.log( 'Loading input PDB structure %s' % (self.io_dict["in"]["input_pdb_path"]), out_log, self.global_log) structure_name = PurePath(self.io_dict["in"]["input_pdb_path"]).name parser = Bio.PDB.PDBParser(QUIET=True) structPDB = parser.get_structure(structure_name, self.io_dict["in"]["input_pdb_path"]) if len(structPDB): structPDB = structPDB[0] ## Mapping residue structure into input structure fu.log('Mapping residue structure into input structure', out_log, self.global_log) # Listing residues to be selected from the residue structure residPDB_res_list = [] for residPDB_res in self.resid_list: if self.residue_offset: residPDB_res_list.append( (' ', residPDB_res + self.residue_offset, ' ')) else: residPDB_res_list.append((' ', residPDB_res, ' ')) selection_res_list = [] selection_atoms_num = 0 for struct_chain in structPDB: for struct_res in struct_chain: if struct_res.get_id() in residPDB_res_list: selection_res_list.append(struct_res) selection_atoms_num += len(struct_res.get_list()) if len(selection_res_list) == 0: fu.log( self.__class__.__name__ + ': Cannot match any of the residues listed in [%s] into %s' % (', '.join(str(v) for v in self.resid_list), self.io_dict["in"]["input_pdb_path"]), out_log) raise SystemExit( self.__class__.__name__ + ': Cannot match any of the residues listed in [%s] into %s' % (', '.join(str(v) for v in self.resid_list), self.io_dict["in"]["input_pdb_path"])) elif len(selection_res_list) != len(residPDB_res_list): fu.log( 'Cannot match all the residues listed in %s into %s. Found %s out of %s' % (', '.join(str(v) for v in self.resid_list), self.io_dict["in"]["input_pdb_path"], len(selection_res_list), len(residPDB_res_list)), out_log) else: fu.log('Selection of residues successfully matched', out_log, self.global_log) ## Compute binding site box size # compute box center selection_box_center = sum( atom.coord for res in selection_res_list for atom in res.get_atoms()) / selection_atoms_num fu.log( 'Binding site center (Angstroms): %10.3f%10.3f%10.3f' % (selection_box_center[0], selection_box_center[1], selection_box_center[2]), out_log, self.global_log) # compute box size selection_coords_max = np.amax([ atom.coord for res in selection_res_list for atom in res.get_atoms() ], axis=0) selection_box_size = selection_coords_max - selection_box_center if self.offset: selection_box_size = [c + self.offset for c in selection_box_size] fu.log( 'Binding site size (Angstroms): %10.3f%10.3f%10.3f' % (selection_box_size[0], selection_box_size[1], selection_box_size[2]), out_log, self.global_log) # compute volume vol = np.prod(selection_box_size) * 2**3 fu.log('Volume (cubic Angstroms): %.0f' % (vol), out_log, self.global_log) # add box details as PDB remarks remarks = "REMARK BOX CENTER:%10.3f%10.3f%10.3f" % ( selection_box_center[0], selection_box_center[1], selection_box_center[2]) remarks += " SIZE:%10.3f%10.3f%10.3f" % (selection_box_size[0], selection_box_size[1], selection_box_size[2]) selection_box_coords_txt = "" # add (optional) box coordinates as 8 ATOM records if self.box_coordinates: fu.log('Adding box coordinates', out_log, self.global_log) selection_box_coords_txt = get_box_coordinates( selection_box_center, selection_box_size) with open(self.io_dict["out"]["output_pdb_path"], 'w') as f: f.seek(0, 0) f.write(remarks.rstrip('\r\n') + '\n' + selection_box_coords_txt) fu.log( 'Saving output PDB file (with box setting annotations): %s' % (self.io_dict["out"]["output_pdb_path"]), out_log, self.global_log) return 0
def launch(self) -> int: """Execute the :class:`BindingSite <utils.bindingsite.BindingSite>` utils.bindingsite.BindingSite object.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # check input/output paths and parameters self.check_data_params(out_log, err_log) # Check the properties fu.check_properties(self, self.properties) if self.restart: output_file_list = [self.io_dict["out"]["output_pdb_path"]] if fu.check_complete_files(output_file_list): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # Parse structure fu.log( 'Loading input PDB structure %s' % (self.io_dict["in"]["input_pdb_path"]), out_log, self.global_log) structure_name = PurePath(self.io_dict["in"]["input_pdb_path"]).name parser = Bio.PDB.PDBParser(QUIET=True) structPDB = parser.get_structure(structure_name, self.io_dict["in"]["input_pdb_path"]) if len(structPDB): structPDB = structPDB[0] # Use only one chain n_chains = structPDB.get_list() if len(n_chains) != 1: fu.log( 'More than one chain found in the input PDB structure. Using only the first chain to find the binding site', out_log, self.global_log) # get first chain in case there is more than one chain for struct_chain in structPDB.get_chains(): structPDB = struct_chain # Get AA sequence structPDB_seq = get_pdb_sequence(structPDB) if len(structPDB_seq) == 0: fu.log( self.__class__.__name__ + ': Cannot extract AA sequence from the input PDB structure %s. Wrong format?' % self.io_dict["in"]["input_pdb_path"], out_log) raise SystemExit( self.__class__.__name__ + ': Cannot extract AA sequence from the input PDB structure %s. Wrong format?' % self.io_dict["in"]["input_pdb_path"]) else: fu.log( 'Found %s residues in %s' % (len(structPDB_seq), self.io_dict["in"]["input_pdb_path"]), out_log) # create temporary folder for decompressing the input_clusters_zip file unique_dir = PurePath(fu.create_unique_dir()) fu.log('Creating %s temporary folder' % unique_dir, out_log, self.global_log) # decompress the input_clusters_zip file cluster_list = fu.unzip_list( zip_file=self.io_dict["in"]["input_clusters_zip"], dest_dir=unique_dir, out_log=out_log) clusterPDB_ligands_aligned = [] clusterPDB_ligands_num = 0 fu.log('Iterating on all clusters:', out_log) for idx, cluster_path in enumerate(cluster_list): cluster_name = PurePath(cluster_path).stem fu.log(' ', out_log) fu.log('------------ Iteration #%s --------------' % (idx + 1), out_log) fu.log('Cluster member: %s' % cluster_name, out_log) # Load and Parse PDB clusterPDB = {} clusterPDB = parser.get_structure(cluster_name, cluster_path)[0] # Use only the first chain for cluster_chain in clusterPDB.get_chains(): clusterPDB = cluster_chain # Looking for ligands clusterPDB_ligands = get_ligand_residues(clusterPDB) if (len(clusterPDB_ligands)) == 0: fu.log( 'No ligands found that could guide the binding site search. Ignoring this member: %s' % cluster_name, out_log) continue # Selecting the largest ligand, if more than one lig_atoms_num = 0 clusterPDB_ligand = {} if self.ligand: if self.ligand in [ x.get_resname() for x in clusterPDB_ligands ]: for lig in clusterPDB_ligands: if lig.get_resname() == self.ligand: clusterPDB_ligand = lig lig_atoms_num = len(lig.get_list()) fu.log( 'Ligand found: %s (%s atoms)' % (lig.get_resname(), lig_atoms_num), out_log) else: fu.log( 'Ligand %s not found in %s cluster member, skipping this cluster' % (self.ligand, cluster_name), out_log) continue else: if len(clusterPDB_ligands) > 1: for lig_res in clusterPDB_ligands: lig_res_atoms_num = len(lig_res.get_list()) fu.log( 'Ligand found: %s (%s atoms)' % (lig_res.get_resname(), lig_res_atoms_num), out_log) if lig_res_atoms_num > lig_atoms_num: clusterPDB_ligand = lig_res lig_atoms_num = lig_res_atoms_num else: clusterPDB_ligand = clusterPDB_ligands[0] lig_atoms_num = len(clusterPDB_ligands[0].get_list()) fu.log( 'Member accepted. Valid ligand found: %s (%s atoms)' % (clusterPDB_ligand.get_resname(), lig_atoms_num), out_log) ## Mapping residues by sequence alignment to match structPDB-clusterPDB paired residues # Get AA sequence clusterPDB_seq = get_pdb_sequence(clusterPDB) # Pairwise align aln, residue_map = align_sequences(structPDB_seq, clusterPDB_seq, self.matrix_name, self.gap_open, self.gap_extend) fu.log( 'Matching residues to input PDB structure. Alignment is:\n%s' % (aln[1]), out_log) # Calculate (gapless) sequence identity seq_identity, gap_seq_identity = calculate_alignment_identity( aln[0], aln[1]) fu.log('Sequence identity (%%): %s' % (seq_identity), out_log) fu.log('Gap less identity (%%): %s' % (gap_seq_identity), out_log) ## Selecting aligned CA atoms from first model, first chain struct_atoms = [] cluster_atoms = [] for struct_res in residue_map: try: cluster_atoms.append( clusterPDB[residue_map[struct_res]]['CA']) struct_atoms.append( get_residue_by_id(structPDB, struct_res)['CA']) except KeyError: fu.log( 'Cannot find CA atom for residue %s (input PDB %s)' % (get_residue_by_id( structPDB, struct_res).get_resname(), struct_res), out_log) pass if len(cluster_atoms) == 0: fu.log( self.__class__.__name__ + ': Cannot find CA atoms (1st model, 1st chain) in cluster member %s when aligning against %s. Ignoring this member.' % (cluster_name, structure_name), out_log) raise SystemExit( self.__class__.__name__ + ': Cannot find CA atoms (1st model, 1st chain) in cluster member %s when aligning against %s. Ignoring this member.' % (cluster_name, structure_name)) else: fu.log( 'Superimposing %s aligned protein residues' % (len(cluster_atoms)), out_log) ## Align against input structure si = Bio.PDB.Superimposer() si.set_atoms(struct_atoms, cluster_atoms) si.apply(clusterPDB.get_atoms()) fu.log('RMSD: %s' % (si.rms), out_log) # Save transformed structure (and ligand) clusterPDB_ligand_aligned = clusterPDB[clusterPDB_ligand.get_id()] fu.log('Saving transformed ligand coordinates', out_log) clusterPDB_ligands_aligned.append(clusterPDB_ligand_aligned) ## Stop after n accepted cluster members clusterPDB_ligands_num += 1 if clusterPDB_ligands_num > self.max_num_ligands: break fu.log(' ', out_log) fu.log('----------------------------------------', out_log) fu.log( 'All transformed ligand coordinates saved, getting binding site residues', out_log) ## Select binding site atoms as those around cluster superimposed ligands fu.log( 'Defining binding site residues as those %sÅ around the %s cluster superimposed ligands' % (self.radius, clusterPDB_ligands_num), out_log) # select Atoms from aligned ligands clusterPDB_ligands_aligned2 = [ res for res in clusterPDB_ligands_aligned ] clusterPDB_ligands_aligned_atoms = Bio.PDB.Selection.unfold_entities( clusterPDB_ligands_aligned2, 'A') # select Atoms from input PDB structure structPDB_atoms = [atom for atom in structPDB.get_atoms()] # compute neighbors for aligned ligands in the input PDB structure structPDB_bs_residues_raw = {} structPDB_neighbors = Bio.PDB.NeighborSearch(structPDB_atoms) for ligand_atom in clusterPDB_ligands_aligned_atoms: # look for PDB atoms 5A around each ligand atom k_l = structPDB_neighbors.search(ligand_atom.coord, self.radius, 'R') for k in k_l: structPDB_bs_residues_raw[k.get_id()] = k.get_full_id() ## Save binding site to PDB io = Bio.PDB.PDBIO() fu.log( 'Writing binding site residues into %s' % (self.io_dict["out"]["output_pdb_path"]), out_log) # unselect input PDB atoms not in binding site structPDB_bs_atoms = 0 p = re.compile('H_|W_|W') residue_ids_to_remove = [] for res in structPDB.get_residues(): if res.id not in structPDB_bs_residues_raw.keys(): # add residue to residue_ids_to_remove list residue_ids_to_remove.append(res.id) elif p.match(res.resname): # add residue to residue_ids_to_remove list residue_ids_to_remove.append(res.id) else: # this residue will be preserved structPDB_bs_atoms += len(res.get_list()) # unselect input PDB atoms not in binding site for chain in structPDB: for idr in residue_ids_to_remove: chain.detach_child(idr) # write PDB file io.set_structure(structPDB) io.save(self.io_dict["out"]["output_pdb_path"]) if self.remove_tmp: # remove temporary folder fu.rm(unique_dir) fu.log(' ', out_log) fu.log('----------------------------------------', out_log) fu.log('Removed temporary folder: %s' % unique_dir, out_log) return 0
def launch(self) -> int: """Execute the :class:`FPocketFilter <fpocket.fpocket_filter.FPocketFilter>` fpocket.fpocket_filter.FPocketFilter object.""" # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # check input/output paths and parameters self.check_data_params(out_log, err_log) # Check the properties fu.check_properties(self, self.properties) if self.restart: output_file_list = [self.io_dict["out"]["output_filter_pockets_zip"]] if fu.check_complete_files(output_file_list): fu.log('Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # load input_summary into a dictionary with open(self.io_dict["in"]["input_summary"]) as json_file: data = json.load(json_file) # build search_list search_list = [] ranges = {} if self.score: check_range('score', self.score, [0,1], out_log, self.__class__.__name__) search_list.append(self.score_matcher(self.score)) ranges['score'] = self.score if self.druggability_score: check_range('druggability_score', self.druggability_score, [0,1], out_log, self.__class__.__name__) search_list.append(self.druggability_score_matcher(self.druggability_score)) ranges['druggability_score'] = self.druggability_score if self.volume: check_range('volume', self.volume, [0,10000], out_log, self.__class__.__name__) search_list.append(self.volume_matcher(self.volume)) ranges['volume'] = self.volume fu.log('Performing a search under the next parameters: %s' % (', '.join(['{0}: {1}'.format(k, v) for k,v in ranges.items()])), out_log) # perform search search = [ x for x in data if all([f(data[x]) for f in search_list]) ] if len(search) == 0: fu.log('No matches found', out_log) return 0 str_out = ''; for s in search: str_out = str_out + ('\n**********\n%s\n**********\nscore: %s\ndruggability_score: %s\nvolume: %s\n' % (s, data[s]["score"], data[s]["druggability_score"], data[s]["volume"])) fu.log('Found %d matches:%s' % (len(search), str_out), out_log) # create tmp_folder self.tmp_folder = fu.create_unique_dir() fu.log('Creating %s temporary folder' % self.tmp_folder, out_log) process_output_fpocket_filter(search, self.tmp_folder, self.io_dict["in"]["input_pockets_zip"], self.io_dict["out"]["output_filter_pockets_zip"], self.remove_tmp, out_log) return 0