def create_instructions_file(self, container_io_dict, out_log, err_log): """Creates an input file using the properties file settings""" instructions_list = [] # different path if container execution or not if self.container_path: self.instructions_file = str(PurePath(self.container_volume_path).joinpath(self.instructions_file)) else: self.instructions_file = str(PurePath(fu.create_unique_dir()).joinpath(self.instructions_file)) fu.create_name(prefix=self.prefix, step=self.step, name=self.instructions_file) # parm instructions_list.append('parm ' + container_io_dict["in"]["input_top_path"]) # trajin in_params = get_in_parameters(self.in_parameters, out_log) instructions_list.append('trajin ' + container_io_dict["in"]["input_traj_path"] + ' ' + in_params) # mask mask = self.in_parameters.get('mask', '') if mask: strip_mask = get_negative_mask(mask, out_log) instructions_list.append('strip ' + strip_mask) # trajout out_params = get_out_parameters(self.out_parameters, out_log) instructions_list.append('trajout ' + container_io_dict["out"]["output_cpptraj_path"] + ' ' + out_params) # create .in file with open(self.instructions_file, 'w') as mdp: for line in instructions_list: mdp.write(line.strip() + '\n') return self.instructions_file
def __init__(self, input_structure_path, input_traj_path, output_pdb_path, input_index_path=None, properties=None, **kwargs) -> None: properties = properties or {} # Call parent class constructor super().__init__(properties) # Input/Output files self.io_dict = { "in": { "input_structure_path": input_structure_path, "input_traj_path": input_traj_path, "input_index_path": input_index_path }, "out": { "output_pdb_path": output_pdb_path } } # Properties specific for BB self.fit_selection = properties.get('fit_selection', "System") self.output_selection = properties.get('output_selection', "System") self.method = properties.get('method', "linkage") self.dista = properties.get('dista', False) self.cutoff = properties.get('cutoff', 0.1) self.properties = properties # Properties common in all GROMACS BB self.gmx_path = get_binary_path(properties, 'gmx_path') # Check the properties self.check_properties(properties) # Internal parameters self.xvg_path = fu.create_name(prefix=self.prefix, step=self.step, name=properties.get('xvg_path', 'rmsd-dist.xvg')) self.xpm_path = fu.create_name(prefix=self.prefix, step=self.step, name=properties.get('xpm_path', 'rmsd-clust.xpm')) self.log_path = fu.create_name(prefix=self.prefix, step=self.step, name=properties.get('log_path', 'cluster.log'))
def create_instructions_file(self): """Creates an input file using the properties file settings""" instructions_list = [] # different path if container execution or not if self.container_path: self.instructions_file = str( PurePath(self.container_volume_path).joinpath( self.instructions_file)) else: self.instructions_file = str( PurePath(fu.create_unique_dir()).joinpath( self.instructions_file)) #self.instructions_file = str(PurePath(fu.create_unique_dir()).joinpath(self.instructions_file)) fu.create_name(prefix=self.prefix, step=self.step, name=self.instructions_file) for t in self.terms: instructions_list.append(t) # create instructions file with open(self.instructions_file, 'w') as mdp: for line in instructions_list: mdp.write(line.strip() + '\n') return self.instructions_file
def create_instructions_file(self, container_io_dict, out_log, err_log): """Creates an input file using the properties file settings""" instructions_list = [] # different path if container execution or not if self.container_path: self.instructions_file = str( PurePath(self.container_volume_path).joinpath( self.instructions_file)) else: self.instructions_file = str( PurePath(fu.create_unique_dir()).joinpath( self.instructions_file)) fu.create_name(prefix=self.prefix, step=self.step, name=self.instructions_file) # parm instructions_list.append('parm ' + container_io_dict["in"]["input_top_path"]) # trajin in_params = get_in_parameters(self.in_parameters, out_log) instructions_list.append('trajin ' + container_io_dict["in"]["input_traj_path"] + ' ' + in_params) # Set up instructions_list += setup_structure(self) # mask mask = self.in_parameters.get('mask', '') ref_mask = '' if mask: strip_mask = get_negative_mask(mask, out_log) ref_mask = get_mask(mask, out_log) instructions_list.append('strip ' + strip_mask) # reference reference = self.in_parameters.get('reference', '') inp_exp_pth = None if "input_exp_path" in container_io_dict["in"]: inp_exp_pth = container_io_dict["in"]["input_exp_path"] instructions_list += get_reference( reference, container_io_dict["out"]["output_cpptraj_path"], inp_exp_pth, ref_mask, False, self.__class__.__name__, out_log) instructions_list.append( 'atomicfluct out ' + container_io_dict["out"]["output_cpptraj_path"] + ' byres bfactor') # create .in file with open(self.instructions_file, 'w') as mdp: for line in instructions_list: mdp.write(line.strip() + '\n') return self.instructions_file
def create_instructions_file(self, container_io_dict, out_log, err_log): """Creates an input file using the properties file settings""" instructions_list = [] # different path if container execution or not if self.container_path: self.instructions_file = str( PurePath(self.container_volume_path).joinpath( self.instructions_file)) else: self.instructions_file = str( PurePath(fu.create_unique_dir()).joinpath( self.instructions_file)) fu.create_name(prefix=self.prefix, step=self.step, name=self.instructions_file) # parm instructions_list.append('parm ' + container_io_dict["in"]["input_top_path"]) # trajin in_parameters = self.in_parameters in_params = '' if in_parameters: in_params = get_in_parameters(self.in_parameters, out_log, 'strip') instructions_list.append('trajin ' + container_io_dict["in"]["input_traj_path"] + ' ' + in_params) # mask mask = self.in_parameters.get('mask', '') if not mask or mask == 'None': fu.log('No mask provided, exiting', out_log, self.global_log) raise SystemExit('Mask parameter is mandatory') strip_mask = get_mask(mask, out_log) instructions_list.append('strip ' + strip_mask) # trajout out_params = get_out_parameters(self.out_parameters, out_log) instructions_list.append( 'trajout ' + container_io_dict["out"]["output_cpptraj_path"] + ' ' + out_params) # create .in file with open(self.instructions_file, 'w') as mdp: for line in instructions_list: mdp.write(line.strip() + '\n') return self.instructions_file
def create_instrucions_file(self): """ Creates an input file using paths provideed in the configuration file (only used for test purposes) """ instructions_list = [] output_instructions_path = fu.create_name( prefix=self.prefix, step=self.step, name=get_default_value("instructions_file")) instructions_list.append('parm ' + self.input_top_path) instructions_list.append('trajin ' + self.input_traj_path) instructions_list.append('trajout ' + self.output_cpptraj_path + ' ' + get_default_value("format")) with open(output_instructions_path, 'w') as mdp: for line in instructions_list: mdp.write(line.strip() + '\n') return output_instructions_path
def launch(self) -> int: """Execute the :class:`Ndx2resttop <gromacs_extra.ndx2resttop.Ndx2resttop>` object.""" # Setup Biobb if self.check_restart(): return 0 top_file = fu.unzip_top( zip_file=self.io_dict['in'].get("input_top_zip_path"), out_log=self.out_log) # Create index list of index file :) index_dic = {} lines = open( self.io_dict['in'].get("input_ndx_path")).read().splitlines() for index, line in enumerate(lines): if line.startswith('['): index_dic[line] = index, label = line if index > 0: index_dic[label] = index_dic[label][0], index index_dic[label] = index_dic[label][0], index fu.log('Index_dic: ' + str(index_dic), self.out_log, self.global_log) self.ref_rest_chain_triplet_list = [ tuple(elem.strip(' ()').replace(' ', '').split(',')) for elem in self.ref_rest_chain_triplet_list.split('),') ] fu.log( 'ref_rest_chain_triplet_list: ' + str(self.ref_rest_chain_triplet_list), self.out_log, self.global_log) for reference_group, restrain_group, chain in self.ref_rest_chain_triplet_list: fu.log('Reference group: ' + reference_group, self.out_log, self.global_log) fu.log('Restrain group: ' + restrain_group, self.out_log, self.global_log) fu.log('Chain: ' + chain, self.out_log, self.global_log) self.io_dict['out']["output_itp_path"] = fu.create_name( path=str(Path(top_file).parent), prefix=self.prefix, step=self.step, name=restrain_group + '.itp') # Mapping atoms from absolute enumeration to Chain relative enumeration fu.log( 'reference_group_index: start_closed:' + str(index_dic['[ ' + reference_group + ' ]'][0] + 1) + ' stop_open: ' + str(index_dic['[ ' + reference_group + ' ]'][1]), self.out_log, self.global_log) reference_group_list = [ int(elem) for line in lines[index_dic['[ ' + reference_group + ' ]'][0] + 1:index_dic['[ ' + reference_group + ' ]'][1]] for elem in line.split() ] fu.log( 'restrain_group_index: start_closed:' + str(index_dic['[ ' + restrain_group + ' ]'][0] + 1) + ' stop_open: ' + str(index_dic['[ ' + restrain_group + ' ]'][1]), self.out_log, self.global_log) restrain_group_list = [ int(elem) for line in lines[index_dic['[ ' + restrain_group + ' ]'][0] + 1:index_dic['[ ' + restrain_group + ' ]'][1]] for elem in line.split() ] selected_list = [ reference_group_list.index(atom) + 1 for atom in restrain_group_list ] # Creating new ITP with restrictions with open(self.io_dict['out'].get("output_itp_path"), 'w') as f: fu.log( 'Creating: ' + str(f) + ' and adding the selected atoms force constants', self.out_log, self.global_log) f.write('[ position_restraints ]\n') f.write('; atom type fx fy fz\n') for atom in selected_list: f.write( str(atom) + ' 1 ' + self.force_constants + '\n') # Including new ITP in the corresponding ITP-chain file for file_dir in Path(top_file).parent.iterdir(): if not file_dir.name.startswith( "posre") and not file_dir.name.endswith("_pr.itp"): if fnmatch.fnmatch(str(file_dir), "*_chain_" + chain + ".itp"): with open(str(file_dir), 'a') as f: fu.log( 'Opening: ' + str(f) + ' and adding the ifdef include statement', self.out_log, self.global_log) f.write('\n') f.write('; Include Position restraint file\n') f.write('#ifdef CUSTOM_POSRES\n') f.write('#include "' + str( Path(self.io_dict['out'].get( "output_itp_path")).name) + '"\n') f.write('#endif\n') # zip topology fu.zip_top(zip_file=self.io_dict['out'].get("output_top_zip_path"), top_file=top_file, out_log=self.out_log) # Remove temporal files self.remove_tmp_files() return 0
def launch(self) -> int: """Execute the :class:`Pmxgentop <pmx.pmxgentop.Pmxgentop>` pmx.pmxgentop.Pmxgentop object.""" # Setup Biobb if self.check_restart(): return 0 self.stage_files() # Check if executable exists if not self.container_path: if not Path(self.pmx_path).is_file(): if not shutil.which(self.pmx_path): raise FileNotFoundError('Executable %s not found. Check if it is installed in your system and correctly defined in the properties' % self.pmx_path) # Unzip topology to topology_out top_file = fu.unzip_top(zip_file=self.input_top_zip_path, out_log=self.out_log) top_dir = str(Path(top_file).parent) # Copy extra files to container: topology folder if self.container_path: fu.log('Container execution enabled', self.out_log) fu.log(f"Unique dir: {self.stage_io_dict['unique_dir']}", self.out_log) fu.log(f"{self.stage_io_dict['unique_dir']} files: {os.listdir(self.stage_io_dict['unique_dir'])}", self.out_log) fu.log(f"Copy all files of the unzipped original topology to unique dir:", self.out_log) shutil.copytree(top_dir, str(Path(self.stage_io_dict.get("unique_dir")).joinpath(Path(top_dir).name))) top_file = str(Path(self.container_volume_path).joinpath(Path(top_dir).name, Path(top_file).name)) output_file_name = fu.create_name(prefix=self.prefix, step=self.step, name=str(Path(top_file).name)) unique_dir_output_file = str(Path(fu.create_unique_dir()).joinpath(output_file_name)) fu.log(f"unique_dir_output_file: {unique_dir_output_file}", self.out_log) if self.container_path: fu.log("Change references for container:", self.out_log) unique_dir_output_file = str(Path(self.container_volume_path).joinpath(Path(output_file_name))) fu.log(f" unique_dir_output_file: {unique_dir_output_file}", self.out_log) self.cmd = [self.pmx_path, 'gentop', '-o', str(Path(unique_dir_output_file)), '-ff', self.force_field, '-p', top_file] if self.split: self.cmd.append('--split') if self.scale_mass: self.cmd.append('--scale_mass') if self.gmx_lib: self.environment = os.environ.copy() self.environment['GMXLIB'] = self.gmx_lib # Run Biobb block self.run_biobb() # Copy files to host self.copy_to_host() if self.container_path: unique_dir_output_file = str(Path(container_io_dict.get("unique_dir")).joinpath(Path(unique_dir_output_file).name)) # Remove paths from top file with open(Path(unique_dir_output_file)) as top_fh: top_lines = top_fh.readlines() with open(Path(unique_dir_output_file), 'w') as top_fh: for line in top_lines: top_fh.write(line.replace(str(Path(unique_dir_output_file).parent)+'/', '')) # Copy the not modified itp files for orig_itp_file in Path(top_dir).iterdir(): fu.log(f'Check if {str(Path(unique_dir_output_file).parent.joinpath(Path(orig_itp_file).name))} exists', self.out_log, self.global_log) if not Path(unique_dir_output_file).parent.joinpath(Path(orig_itp_file).name).exists(): shutil.copy(orig_itp_file, Path(unique_dir_output_file).parent) fu.log(f'Copying {str(orig_itp_file)} to: {str(Path(unique_dir_output_file).parent)}', self.out_log, self.global_log) # zip topology fu.log('Compressing topology to: %s' % self.io_dict["out"]["output_top_zip_path"], self.out_log, self.global_log) fu.zip_top(zip_file=self.io_dict["out"]["output_top_zip_path"], top_file=str(Path(unique_dir_output_file)), out_log=self.out_log) self.tmp_files.extend([self.stage_io_dict.get("unique_dir"), top_dir]) self.remove_tmp_files() return self.return_code
def launch(self) -> int: """Launches the execution of the GROMACS editconf module.""" tmp_files = [] # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # Restart if needed if self.restart: output_file_list = [self.io_dict['out'].get("output_top_zip_path")] if fu.check_complete_files(output_file_list): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 # Unzip topology top_file = fu.unzip_top( zip_file=self.io_dict['in'].get("input_top_zip_path"), out_log=out_log) top_dir = str(Path(top_file).parent) tmp_files.append(top_dir) itp_name = str(Path(self.io_dict['in'].get("input_itp_path")).name) with open(top_file) as top_f: top_lines = top_f.readlines() top_f.close() fu.rm(top_file) forcefield_pattern = r'#include.*forcefield.itp\"' for index, line in enumerate(top_lines): if re.search(forcefield_pattern, line): break top_lines.insert(index + 1, '\n') top_lines.insert(index + 2, '; Including ligand ITP\n') top_lines.insert(index + 3, '#include "' + itp_name + '"\n') top_lines.insert(index + 4, '\n') if self.io_dict['in'].get("input_posres_itp_path"): top_lines.insert(index + 5, '; Ligand position restraints' + '\n') top_lines.insert(index + 6, '#ifdef ' + self.posres_name + '\n') top_lines.insert( index + 7, '#include "' + str( Path(self.io_dict['in'].get("input_posres_itp_path")).name) + '"\n') top_lines.insert(index + 8, '#endif' + '\n') top_lines.insert(index + 9, '\n') inside_moleculetype_section = False with open(self.io_dict['in'].get("input_itp_path")) as itp_file: moleculetype_pattern = r'\[ moleculetype \]' for line in itp_file: if re.search(moleculetype_pattern, line): inside_moleculetype_section = True continue if inside_moleculetype_section and not line.startswith(';'): moleculetype = line.strip().split()[0].strip() break molecules_pattern = r'\[ molecules \]' inside_molecules_section = False index_molecule = None molecule_string = moleculetype + (20 - len(moleculetype)) * ' ' + '1' + '\n' for index, line in enumerate(top_lines): if re.search(molecules_pattern, line): inside_molecules_section = True continue if inside_molecules_section and not line.startswith( ';') and line.upper().startswith('PROTEIN'): index_molecule = index if index_molecule: top_lines.insert(index_molecule + 1, molecule_string) else: top_lines.append(molecule_string) new_top = fu.create_name(path=top_dir, prefix=self.prefix, step=self.step, name='ligand.top') with open(new_top, 'w') as new_top_f: new_top_f.write("".join(top_lines)) shutil.copy2(self.io_dict['in'].get("input_itp_path"), top_dir) if self.io_dict['in'].get("input_posres_itp_path"): shutil.copy2(self.io_dict['in'].get("input_posres_itp_path"), top_dir) # zip topology fu.log( 'Compressing topology to: %s' % self.io_dict['out'].get("output_top_zip_path"), out_log, self.global_log) fu.zip_top(zip_file=self.io_dict['out'].get("output_top_zip_path"), top_file=new_top, out_log=out_log) if self.remove_tmp: fu.rm_file_list(tmp_files, out_log=out_log) return 0
def launch(self) -> int: """Launches the execution of the GROMACS pdb2gmx module.""" tmp_files = [] # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # Check GROMACS version if not self.container_path: if self.gmx_version < 512: raise GromacsVersionError( "Gromacs version should be 5.1.2 or newer %d detected" % self.gmx_version) fu.log( "GROMACS %s %d version detected" % (self.__class__.__name__, self.gmx_version), out_log) # Restart if needed if self.restart: if fu.check_complete_files(self.io_dict["out"].values()): fu.log( 'Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 container_io_dict = fu.copy_to_container(self.container_path, self.container_volume_path, self.io_dict) output_top_path = fu.create_name(prefix=self.prefix, step=self.step, name=self.output_top_path) output_itp_path = fu.create_name(prefix=self.prefix, step=self.step, name=self.output_itp_path) cmd = [ self.gmx_path, "pdb2gmx", "-f", container_io_dict["in"]["input_pdb_path"], "-o", container_io_dict["out"]["output_gro_path"], "-p", output_top_path, "-water", self.water_type, "-ff", self.force_field, "-i", output_itp_path ] if self.his: cmd.append("-his") cmd = ['echo', self.his, '|'] + cmd if self.ignh: cmd.append("-ignh") new_env = None if self.gmxlib: new_env = os.environ.copy() new_env['GMXLIB'] = self.gmxlib cmd = fu.create_cmd_line( cmd, container_path=self.container_path, host_volume=container_io_dict.get("unique_dir"), container_volume=self.container_volume_path, container_working_dir=self.container_working_dir, container_user_uid=self.container_user_id, container_shell_path=self.container_shell_path, container_image=self.container_image, out_log=out_log, global_log=self.global_log) returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log, self.global_log, new_env).launch() fu.copy_to_host(self.container_path, container_io_dict, self.io_dict) if self.container_path: output_top_path = os.path.join(container_io_dict.get("unique_dir"), output_top_path) # zip topology fu.log( 'Compressing topology to: %s' % container_io_dict["out"]["output_top_zip_path"], out_log, self.global_log) fu.zip_top(zip_file=self.io_dict["out"]["output_top_zip_path"], top_file=output_top_path, out_log=out_log) tmp_files.append(self.output_top_path) tmp_files.append(self.output_itp_path) tmp_files.append(container_io_dict.get("unique_dir")) if self.remove_tmp: fu.rm_file_list(tmp_files, out_log=out_log) return returncode
def launch(self) -> int: """Launch the topology generation.""" tmp_files = [] # Get local loggers from launchlogger decorator out_log = getattr(self, 'out_log', None) err_log = getattr(self, 'err_log', None) # Restart if needed if self.restart: output_file_list = [self.io_dict['out'].get("output_top_zip_path")] if fu.check_complete_files(output_file_list): fu.log('Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log) return 0 top_file = fu.unzip_top(zip_file=self.io_dict['in'].get("input_top_zip_path"), out_log=out_log) # Create index list of index file :) index_dic = {} lines = open(self.io_dict['in'].get("input_ndx_path")).read().splitlines() for index, line in enumerate(lines): if line.startswith('['): index_dic[line] = index, label = line if index > 0: index_dic[label] = index_dic[label][0], index index_dic[label] = index_dic[label][0], index fu.log('Index_dic: '+str(index_dic), out_log, self.global_log) self.ref_rest_chain_triplet_list = [tuple(elem.strip(' ()').replace(' ', '').split(',')) for elem in self.ref_rest_chain_triplet_list.split('),')] fu.log('ref_rest_chain_triplet_list: ' + str(self.ref_rest_chain_triplet_list), out_log, self.global_log) for reference_group, restrain_group, chain in self.ref_rest_chain_triplet_list: fu.log('Reference group: '+reference_group, out_log, self.global_log) fu.log('Restrain group: '+restrain_group, out_log, self.global_log) fu.log('Chain: '+chain, out_log, self.global_log) self.io_dict['out']["output_itp_path"] = fu.create_name(path=str(Path(top_file).parent), prefix=self.prefix, step=self.step, name=restrain_group+'.itp') # Mapping atoms from absolute enumeration to Chain relative enumeration fu.log('reference_group_index: start_closed:'+str(index_dic['[ '+reference_group+' ]'][0]+1)+' stop_open: '+str(index_dic['[ '+reference_group+' ]'][1]), out_log, self.global_log) reference_group_list = [int(elem) for line in lines[index_dic['[ '+reference_group+' ]'][0]+1: index_dic['[ '+reference_group+' ]'][1]] for elem in line.split()] fu.log('restrain_group_index: start_closed:'+str(index_dic['[ '+restrain_group+' ]'][0]+1)+' stop_open: '+str(index_dic['[ '+restrain_group+' ]'][1]), out_log, self.global_log) restrain_group_list = [int(elem) for line in lines[index_dic['[ '+restrain_group+' ]'][0]+1: index_dic['[ '+restrain_group+' ]'][1]] for elem in line.split()] selected_list = [reference_group_list.index(atom)+1 for atom in restrain_group_list] # Creating new ITP with restrictions with open(self.io_dict['out'].get("output_itp_path"), 'w') as f: fu.log('Creating: '+str(f)+' and adding the selected atoms force constants', out_log, self.global_log) f.write('[ position_restraints ]\n') f.write('; atom type fx fy fz\n') for atom in selected_list: f.write(str(atom)+' 1 '+self.force_constants+'\n') # Including new ITP in the corresponding ITP-chain file for file_dir in Path(top_file).parent.iterdir(): if not file_dir.name.startswith("posre") and not file_dir.name.endswith("_pr.itp"): if fnmatch.fnmatch(str(file_dir), "*_chain_"+chain+".itp"): with open(str(file_dir), 'a') as f: fu.log('Opening: '+str(f)+' and adding the ifdef include statement', out_log, self.global_log) f.write('\n') f.write('; Include Position restraint file\n') f.write('#ifdef CUSTOM_POSRES\n') f.write('#include "'+str(Path(self.io_dict['out'].get("output_itp_path")).name)+'"\n') f.write('#endif\n') # zip topology fu.zip_top(zip_file=self.io_dict['out'].get("output_top_zip_path"), top_file=top_file, out_log=out_log) if self.remove_tmp: fu.rm_file_list(tmp_files, out_log=out_log) return 0
def launch(self) -> int: """Execute the :class:`AppendLigand <gromacs_extra.append_ligand.AppendLigand>` object.""" # Setup Biobb if self.check_restart(): return 0 # Unzip topology top_file = fu.unzip_top( zip_file=self.io_dict['in'].get("input_top_zip_path"), out_log=self.out_log) top_dir = str(Path(top_file).parent) itp_name = str(Path(self.io_dict['in'].get("input_itp_path")).name) with open(top_file) as top_f: top_lines = top_f.readlines() top_f.close() fu.rm(top_file) forcefield_pattern = r'#include.*forcefield.itp\"' if top_lines: for index, line in enumerate(top_lines): if re.search(forcefield_pattern, line): break else: fu.log( f'FATAL: Input topfile {top_file} from input_top_zip_path {self.io_dict["in"].get("input_top_zip_path")} is empty.', self.out_log, self.global_log) return 1 top_lines.insert(index + 1, '\n') top_lines.insert(index + 2, '; Including ligand ITP\n') top_lines.insert(index + 3, '#include "' + itp_name + '"\n') top_lines.insert(index + 4, '\n') if self.io_dict['in'].get("input_posres_itp_path"): top_lines.insert(index + 5, '; Ligand position restraints' + '\n') top_lines.insert(index + 6, '#ifdef ' + self.posres_name + '\n') top_lines.insert( index + 7, '#include "' + str( Path(self.io_dict['in'].get("input_posres_itp_path")).name) + '"\n') top_lines.insert(index + 8, '#endif' + '\n') top_lines.insert(index + 9, '\n') inside_moleculetype_section = False with open(self.io_dict['in'].get("input_itp_path")) as itp_file: moleculetype_pattern = r'\[ moleculetype \]' for line in itp_file: if re.search(moleculetype_pattern, line): inside_moleculetype_section = True continue if inside_moleculetype_section and not line.startswith(';'): moleculetype = line.strip().split()[0].strip() break molecules_pattern = r'\[ molecules \]' inside_molecules_section = False index_molecule = None molecule_string = moleculetype + (20 - len(moleculetype)) * ' ' + '1' + '\n' for index, line in enumerate(top_lines): if re.search(molecules_pattern, line): inside_molecules_section = True continue if inside_molecules_section and not line.startswith( ';') and line.upper().startswith('PROTEIN'): index_molecule = index if index_molecule: top_lines.insert(index_molecule + 1, molecule_string) else: top_lines.append(molecule_string) new_top = fu.create_name(path=top_dir, prefix=self.prefix, step=self.step, name='ligand.top') with open(new_top, 'w') as new_top_f: new_top_f.write("".join(top_lines)) shutil.copy2(self.io_dict['in'].get("input_itp_path"), top_dir) if self.io_dict['in'].get("input_posres_itp_path"): shutil.copy2(self.io_dict['in'].get("input_posres_itp_path"), top_dir) # zip topology fu.log( 'Compressing topology to: %s' % self.io_dict['out'].get("output_top_zip_path"), self.out_log, self.global_log) fu.zip_top(zip_file=self.io_dict['out'].get("output_top_zip_path"), top_file=new_top, out_log=self.out_log) # Remove temporal files self.tmp_files.append(top_dir) self.remove_tmp_files() return 0
def launch(self) -> int: """Execute the :class:`Pdb2gmx <gromacs.pdb2gmx.Pdb2gmx>` object.""" # Setup Biobb if self.check_restart(): return 0 self.stage_files() internal_top_name = fu.create_name(prefix=self.prefix, step=self.step, name=self.internal_top_name) internal_itp_name = fu.create_name(prefix=self.prefix, step=self.step, name=self.internal_itp_name) # Create command line self.cmd = [ self.gmx_path, "pdb2gmx", "-f", self.stage_io_dict["in"]["input_pdb_path"], "-o", self.stage_io_dict["out"]["output_gro_path"], "-p", internal_top_name, "-water", self.water_type, "-ff", self.force_field, "-i", internal_itp_name ] if self.his: self.cmd.append("-his") self.cmd = ['echo', self.his, '|'] + self.cmd if self.ignh: self.cmd.append("-ignh") if self.merge: self.cmd.append("-merge") self.cmd.append("all") if self.gmx_lib: self.environment = os.environ.copy() self.environment['GMXLIB'] = self.gmx_lib # Check GROMACS version if not self.container_path: if self.gmx_version < 512: raise GromacsVersionError( "Gromacs version should be 5.1.2 or newer %d detected" % self.gmx_version) fu.log( "GROMACS %s %d version detected" % (self.__class__.__name__, self.gmx_version), self.out_log) # Run Biobb block self.run_biobb() # Copy files to host self.copy_to_host() if self.container_path: internal_top_name = os.path.join( self.stage_io_dict.get("unique_dir"), internal_top_name) # zip topology fu.log( 'Compressing topology to: %s' % self.io_dict["out"]["output_top_zip_path"], self.out_log, self.global_log) fu.zip_top(zip_file=self.io_dict["out"]["output_top_zip_path"], top_file=internal_top_name, out_log=self.out_log) # Remove temporal files self.tmp_files.extend([ self.internal_top_name, self.internal_itp_name, self.stage_io_dict.get("unique_dir") ]) self.remove_tmp_files() return self.return_code