示例#1
0
    def launch(self) -> int:
        """Execute the :class:`Solvate <gromacs.solvate.Solvate>` object."""

        # Setup Biobb
        if self.check_restart(): return 0
        self.stage_files()

        # Unzip topology to topology_out
        top_file = fu.unzip_top(zip_file=self.input_top_zip_path, out_log=self.out_log)
        top_dir = str(Path(top_file).parent)

        if self.container_path:
            shutil.copytree(top_dir, str(Path(self.stage_io_dict.get("unique_dir")).joinpath(Path(top_dir).name)))
            top_file = str(Path(self.container_volume_path).joinpath(Path(top_dir).name, Path(top_file).name))

        self.cmd = [self.gmx_path, 'solvate',
                    '-cp', self.stage_io_dict["in"]["input_solute_gro_path"],
                    '-cs', self.stage_io_dict["in"]["input_solvent_gro_path"],
                    '-o', self.stage_io_dict["out"]["output_gro_path"],
                    '-p', top_file]

        if self.shell:
            self.cmd.append("-shell")
            self.cmd.append(str(self.shell))

        if self.gmx_lib:
            self.environment = os.environ.copy()
            self.environment['GMXLIB'] = self.gmx_lib

        # Check GROMACS version
        if not self.container_path:
            if self.gmx_version < 512:
                raise GromacsVersionError("Gromacs version should be 5.1.2 or newer %d detected" % self.gmx_version)
            fu.log("GROMACS %s %d version detected" % (self.__class__.__name__, self.gmx_version), self.out_log)

        # Run Biobb block
        self.run_biobb()

        # Copy files to host
        self.copy_to_host()

        if self.container_path:
            top_file = str(Path(self.stage_io_dict.get("unique_dir")).joinpath(Path(top_dir).name, Path(top_file).name))

        # zip topology
        fu.log('Compressing topology to: %s' % self.stage_io_dict["out"]["output_top_zip_path"], self.out_log,
               self.global_log)
        fu.zip_top(zip_file=self.io_dict["out"]["output_top_zip_path"], top_file=top_file, out_log=self.out_log)

        # Remove temporal files
        self.tmp_files.extend([self.stage_io_dict.get("unique_dir"), top_dir])
        self.remove_tmp_files()

        return self.return_code
示例#2
0
    def launch(self) -> int:
        """Execute the :class:`Ndx2resttop <gromacs_extra.ndx2resttop.Ndx2resttop>` object."""
        # Setup Biobb
        if self.check_restart(): return 0

        top_file = fu.unzip_top(
            zip_file=self.io_dict['in'].get("input_top_zip_path"),
            out_log=self.out_log)

        # Create index list of index file :)
        index_dic = {}
        lines = open(
            self.io_dict['in'].get("input_ndx_path")).read().splitlines()
        for index, line in enumerate(lines):
            if line.startswith('['):
                index_dic[line] = index,
                label = line
                if index > 0:
                    index_dic[label] = index_dic[label][0], index
        index_dic[label] = index_dic[label][0], index
        fu.log('Index_dic: ' + str(index_dic), self.out_log, self.global_log)

        self.ref_rest_chain_triplet_list = [
            tuple(elem.strip(' ()').replace(' ', '').split(','))
            for elem in self.ref_rest_chain_triplet_list.split('),')
        ]
        fu.log(
            'ref_rest_chain_triplet_list: ' +
            str(self.ref_rest_chain_triplet_list), self.out_log,
            self.global_log)
        for reference_group, restrain_group, chain in self.ref_rest_chain_triplet_list:
            fu.log('Reference group: ' + reference_group, self.out_log,
                   self.global_log)
            fu.log('Restrain group: ' + restrain_group, self.out_log,
                   self.global_log)
            fu.log('Chain: ' + chain, self.out_log, self.global_log)
            self.io_dict['out']["output_itp_path"] = fu.create_name(
                path=str(Path(top_file).parent),
                prefix=self.prefix,
                step=self.step,
                name=restrain_group + '.itp')

            # Mapping atoms from absolute enumeration to Chain relative enumeration
            fu.log(
                'reference_group_index: start_closed:' +
                str(index_dic['[ ' + reference_group + ' ]'][0] + 1) +
                ' stop_open: ' +
                str(index_dic['[ ' + reference_group + ' ]'][1]), self.out_log,
                self.global_log)
            reference_group_list = [
                int(elem)
                for line in lines[index_dic['[ ' + reference_group + ' ]'][0] +
                                  1:index_dic['[ ' + reference_group +
                                              ' ]'][1]]
                for elem in line.split()
            ]
            fu.log(
                'restrain_group_index: start_closed:' +
                str(index_dic['[ ' + restrain_group + ' ]'][0] + 1) +
                ' stop_open: ' +
                str(index_dic['[ ' + restrain_group + ' ]'][1]), self.out_log,
                self.global_log)
            restrain_group_list = [
                int(elem)
                for line in lines[index_dic['[ ' + restrain_group + ' ]'][0] +
                                  1:index_dic['[ ' + restrain_group + ' ]'][1]]
                for elem in line.split()
            ]
            selected_list = [
                reference_group_list.index(atom) + 1
                for atom in restrain_group_list
            ]
            # Creating new ITP with restrictions
            with open(self.io_dict['out'].get("output_itp_path"), 'w') as f:
                fu.log(
                    'Creating: ' + str(f) +
                    ' and adding the selected atoms force constants',
                    self.out_log, self.global_log)
                f.write('[ position_restraints ]\n')
                f.write('; atom  type      fx      fy      fz\n')
                for atom in selected_list:
                    f.write(
                        str(atom) + '     1  ' + self.force_constants + '\n')

            # Including new ITP in the corresponding ITP-chain file
            for file_dir in Path(top_file).parent.iterdir():
                if not file_dir.name.startswith(
                        "posre") and not file_dir.name.endswith("_pr.itp"):
                    if fnmatch.fnmatch(str(file_dir),
                                       "*_chain_" + chain + ".itp"):
                        with open(str(file_dir), 'a') as f:
                            fu.log(
                                'Opening: ' + str(f) +
                                ' and adding the ifdef include statement',
                                self.out_log, self.global_log)
                            f.write('\n')
                            f.write('; Include Position restraint file\n')
                            f.write('#ifdef CUSTOM_POSRES\n')
                            f.write('#include "' + str(
                                Path(self.io_dict['out'].get(
                                    "output_itp_path")).name) + '"\n')
                            f.write('#endif\n')

        # zip topology
        fu.zip_top(zip_file=self.io_dict['out'].get("output_top_zip_path"),
                   top_file=top_file,
                   out_log=self.out_log)

        # Remove temporal files
        self.remove_tmp_files()

        return 0
示例#3
0
    def launch(self) -> int:
        """Execute the :class:`Pmxgentop <pmx.pmxgentop.Pmxgentop>` pmx.pmxgentop.Pmxgentop object."""

        # Setup Biobb
        if self.check_restart(): return 0
        self.stage_files()

        # Check if executable exists
        if not self.container_path:
            if not Path(self.pmx_path).is_file():
                if not shutil.which(self.pmx_path):
                    raise FileNotFoundError('Executable %s not found. Check if it is installed in your system and correctly defined in the properties' % self.pmx_path)

        # Unzip topology to topology_out
        top_file = fu.unzip_top(zip_file=self.input_top_zip_path, out_log=self.out_log)
        top_dir = str(Path(top_file).parent)

        # Copy extra files to container: topology folder
        if self.container_path:
            fu.log('Container execution enabled', self.out_log)
            fu.log(f"Unique dir: {self.stage_io_dict['unique_dir']}", self.out_log)
            fu.log(f"{self.stage_io_dict['unique_dir']} files: {os.listdir(self.stage_io_dict['unique_dir'])}", self.out_log)
            fu.log(f"Copy all files of the unzipped original topology to unique dir:", self.out_log)
            shutil.copytree(top_dir, str(Path(self.stage_io_dict.get("unique_dir")).joinpath(Path(top_dir).name)))
            top_file = str(Path(self.container_volume_path).joinpath(Path(top_dir).name, Path(top_file).name))

        output_file_name = fu.create_name(prefix=self.prefix, step=self.step, name=str(Path(top_file).name))
        unique_dir_output_file = str(Path(fu.create_unique_dir()).joinpath(output_file_name))
        fu.log(f"unique_dir_output_file: {unique_dir_output_file}", self.out_log)

        if self.container_path:
            fu.log("Change references for container:", self.out_log)
            unique_dir_output_file = str(Path(self.container_volume_path).joinpath(Path(output_file_name)))
            fu.log(f"    unique_dir_output_file: {unique_dir_output_file}", self.out_log)

        self.cmd = [self.pmx_path, 'gentop',
               '-o', str(Path(unique_dir_output_file)),
               '-ff', self.force_field,
               '-p', top_file]

        if self.split:
            self.cmd.append('--split')
        if self.scale_mass:
            self.cmd.append('--scale_mass')

        if self.gmx_lib:
            self.environment = os.environ.copy()
            self.environment['GMXLIB'] = self.gmx_lib

        # Run Biobb block
        self.run_biobb()

        # Copy files to host
        self.copy_to_host()

        if self.container_path:
            unique_dir_output_file = str(Path(container_io_dict.get("unique_dir")).joinpath(Path(unique_dir_output_file).name))

        # Remove paths from top file
        with open(Path(unique_dir_output_file)) as top_fh:
            top_lines = top_fh.readlines()
        with open(Path(unique_dir_output_file), 'w') as top_fh:
            for line in top_lines:
                top_fh.write(line.replace(str(Path(unique_dir_output_file).parent)+'/', ''))
        # Copy the not modified itp files
        for orig_itp_file in Path(top_dir).iterdir():
            fu.log(f'Check if {str(Path(unique_dir_output_file).parent.joinpath(Path(orig_itp_file).name))} exists', self.out_log, self.global_log)
            if not Path(unique_dir_output_file).parent.joinpath(Path(orig_itp_file).name).exists():
                shutil.copy(orig_itp_file, Path(unique_dir_output_file).parent)
                fu.log(f'Copying {str(orig_itp_file)} to: {str(Path(unique_dir_output_file).parent)}', self.out_log, self.global_log)

        # zip topology
        fu.log('Compressing topology to: %s' % self.io_dict["out"]["output_top_zip_path"], self.out_log, self.global_log)
        fu.zip_top(zip_file=self.io_dict["out"]["output_top_zip_path"], top_file=str(Path(unique_dir_output_file)), out_log=self.out_log)

        self.tmp_files.extend([self.stage_io_dict.get("unique_dir"), top_dir])
        self.remove_tmp_files()

        return self.return_code
示例#4
0
    def launch(self) -> int:
        """Launches the execution of the GROMACS solvate module."""
        tmp_files = []

        # Get local loggers from launchlogger decorator
        out_log = getattr(self, 'out_log', None)
        err_log = getattr(self, 'err_log', None)

        # Check GROMACS version
        if not self.container_path:
            if self.gmx_version < 512:
                raise GromacsVersionError(
                    "Gromacs version should be 5.1.2 or newer %d detected" %
                    self.gmx_version)
            fu.log(
                "GROMACS %s %d version detected" %
                (self.__class__.__name__, self.gmx_version), out_log)

        # Restart if needed
        if self.restart:
            if fu.check_complete_files(self.io_dict["out"].values()):
                fu.log(
                    'Restart is enabled, this step: %s will the skipped' %
                    self.step, out_log, self.global_log)
                return 0

        # Unzip topology to topology_out
        top_file = fu.unzip_top(zip_file=self.input_top_zip_path,
                                out_log=out_log)
        top_dir = str(Path(top_file).parent)
        tmp_files.append(top_dir)

        container_io_dict = fu.copy_to_container(self.container_path,
                                                 self.container_volume_path,
                                                 self.io_dict)

        if self.container_path:
            shutil.copytree(
                top_dir,
                str(
                    Path(container_io_dict.get("unique_dir")).joinpath(
                        Path(top_dir).name)))
            top_file = str(
                Path(self.container_volume_path).joinpath(
                    Path(top_dir).name,
                    Path(top_file).name))

        cmd = [
            self.gmx_path, 'solvate', '-cp',
            container_io_dict["in"]["input_solute_gro_path"], '-cs',
            self.input_solvent_gro_path, '-o',
            container_io_dict["out"]["output_gro_path"], '-p', top_file
        ]

        new_env = None
        if self.gmxlib:
            new_env = os.environ.copy()
            new_env['GMXLIB'] = self.gmxlib

        cmd = fu.create_cmd_line(
            cmd,
            container_path=self.container_path,
            host_volume=container_io_dict.get("unique_dir"),
            container_volume=self.container_volume_path,
            container_working_dir=self.container_working_dir,
            container_user_uid=self.container_user_id,
            container_shell_path=self.container_shell_path,
            container_image=self.container_image,
            out_log=out_log,
            global_log=self.global_log)
        returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log,
                                            self.global_log, new_env).launch()
        fu.copy_to_host(self.container_path, container_io_dict, self.io_dict)

        if self.container_path:
            top_file = str(
                Path(container_io_dict.get("unique_dir")).joinpath(
                    Path(top_dir).name,
                    Path(top_file).name))

        # zip topology
        fu.log(
            'Compressing topology to: %s' %
            container_io_dict["out"]["output_top_zip_path"], out_log,
            self.global_log)
        fu.zip_top(zip_file=self.io_dict["out"]["output_top_zip_path"],
                   top_file=top_file,
                   out_log=out_log)

        tmp_files.append(container_io_dict.get("unique_dir"))
        if self.remove_tmp:
            fu.rm_file_list(tmp_files, out_log=out_log)

        return returncode
示例#5
0
    def launch(self) -> int:
        """Launches the execution of the GROMACS pdb2gmx module."""
        tmp_files = []

        # Get local loggers from launchlogger decorator
        out_log = getattr(self, 'out_log', None)
        err_log = getattr(self, 'err_log', None)

        # Check GROMACS version
        if not self.container_path:
            if self.gmx_version < 512:
                raise GromacsVersionError(
                    "Gromacs version should be 5.1.2 or newer %d detected" %
                    self.gmx_version)
            fu.log(
                "GROMACS %s %d version detected" %
                (self.__class__.__name__, self.gmx_version), out_log)

        # Restart if needed
        if self.restart:
            if fu.check_complete_files(self.io_dict["out"].values()):
                fu.log(
                    'Restart is enabled, this step: %s will the skipped' %
                    self.step, out_log, self.global_log)
                return 0

        container_io_dict = fu.copy_to_container(self.container_path,
                                                 self.container_volume_path,
                                                 self.io_dict)

        output_top_path = fu.create_name(prefix=self.prefix,
                                         step=self.step,
                                         name=self.output_top_path)
        output_itp_path = fu.create_name(prefix=self.prefix,
                                         step=self.step,
                                         name=self.output_itp_path)

        cmd = [
            self.gmx_path, "pdb2gmx", "-f",
            container_io_dict["in"]["input_pdb_path"], "-o",
            container_io_dict["out"]["output_gro_path"], "-p", output_top_path,
            "-water", self.water_type, "-ff", self.force_field, "-i",
            output_itp_path
        ]

        if self.his:
            cmd.append("-his")
            cmd = ['echo', self.his, '|'] + cmd
        if self.ignh:
            cmd.append("-ignh")

        new_env = None
        if self.gmxlib:
            new_env = os.environ.copy()
            new_env['GMXLIB'] = self.gmxlib

        cmd = fu.create_cmd_line(
            cmd,
            container_path=self.container_path,
            host_volume=container_io_dict.get("unique_dir"),
            container_volume=self.container_volume_path,
            container_working_dir=self.container_working_dir,
            container_user_uid=self.container_user_id,
            container_shell_path=self.container_shell_path,
            container_image=self.container_image,
            out_log=out_log,
            global_log=self.global_log)
        returncode = cmd_wrapper.CmdWrapper(cmd, out_log, err_log,
                                            self.global_log, new_env).launch()
        fu.copy_to_host(self.container_path, container_io_dict, self.io_dict)

        if self.container_path:
            output_top_path = os.path.join(container_io_dict.get("unique_dir"),
                                           output_top_path)

        # zip topology
        fu.log(
            'Compressing topology to: %s' %
            container_io_dict["out"]["output_top_zip_path"], out_log,
            self.global_log)
        fu.zip_top(zip_file=self.io_dict["out"]["output_top_zip_path"],
                   top_file=output_top_path,
                   out_log=out_log)

        tmp_files.append(self.output_top_path)
        tmp_files.append(self.output_itp_path)
        tmp_files.append(container_io_dict.get("unique_dir"))
        if self.remove_tmp:
            fu.rm_file_list(tmp_files, out_log=out_log)

        return returncode
    def launch(self) -> int:
        """Launches the execution of the GROMACS editconf module."""
        tmp_files = []

        # Get local loggers from launchlogger decorator
        out_log = getattr(self, 'out_log', None)
        err_log = getattr(self, 'err_log', None)

        # Restart if needed
        if self.restart:
            output_file_list = [self.io_dict['out'].get("output_top_zip_path")]
            if fu.check_complete_files(output_file_list):
                fu.log(
                    'Restart is enabled, this step: %s will the skipped' %
                    self.step, out_log, self.global_log)
                return 0

        # Unzip topology
        top_file = fu.unzip_top(
            zip_file=self.io_dict['in'].get("input_top_zip_path"),
            out_log=out_log)
        top_dir = str(Path(top_file).parent)
        tmp_files.append(top_dir)
        itp_name = str(Path(self.io_dict['in'].get("input_itp_path")).name)

        with open(top_file) as top_f:
            top_lines = top_f.readlines()
            top_f.close()
        fu.rm(top_file)

        forcefield_pattern = r'#include.*forcefield.itp\"'
        for index, line in enumerate(top_lines):
            if re.search(forcefield_pattern, line):
                break
        top_lines.insert(index + 1, '\n')
        top_lines.insert(index + 2, '; Including ligand ITP\n')
        top_lines.insert(index + 3, '#include "' + itp_name + '"\n')
        top_lines.insert(index + 4, '\n')
        if self.io_dict['in'].get("input_posres_itp_path"):
            top_lines.insert(index + 5, '; Ligand position restraints' + '\n')
            top_lines.insert(index + 6, '#ifdef ' + self.posres_name + '\n')
            top_lines.insert(
                index + 7, '#include "' + str(
                    Path(self.io_dict['in'].get("input_posres_itp_path")).name)
                + '"\n')
            top_lines.insert(index + 8, '#endif' + '\n')
            top_lines.insert(index + 9, '\n')

        inside_moleculetype_section = False
        with open(self.io_dict['in'].get("input_itp_path")) as itp_file:
            moleculetype_pattern = r'\[ moleculetype \]'
            for line in itp_file:
                if re.search(moleculetype_pattern, line):
                    inside_moleculetype_section = True
                    continue
                if inside_moleculetype_section and not line.startswith(';'):
                    moleculetype = line.strip().split()[0].strip()
                    break

        molecules_pattern = r'\[ molecules \]'
        inside_molecules_section = False
        index_molecule = None
        molecule_string = moleculetype + (20 -
                                          len(moleculetype)) * ' ' + '1' + '\n'
        for index, line in enumerate(top_lines):
            if re.search(molecules_pattern, line):
                inside_molecules_section = True
                continue
            if inside_molecules_section and not line.startswith(
                    ';') and line.upper().startswith('PROTEIN'):
                index_molecule = index

        if index_molecule:
            top_lines.insert(index_molecule + 1, molecule_string)
        else:
            top_lines.append(molecule_string)

        new_top = fu.create_name(path=top_dir,
                                 prefix=self.prefix,
                                 step=self.step,
                                 name='ligand.top')

        with open(new_top, 'w') as new_top_f:
            new_top_f.write("".join(top_lines))

        shutil.copy2(self.io_dict['in'].get("input_itp_path"), top_dir)
        if self.io_dict['in'].get("input_posres_itp_path"):
            shutil.copy2(self.io_dict['in'].get("input_posres_itp_path"),
                         top_dir)

        # zip topology
        fu.log(
            'Compressing topology to: %s' %
            self.io_dict['out'].get("output_top_zip_path"), out_log,
            self.global_log)
        fu.zip_top(zip_file=self.io_dict['out'].get("output_top_zip_path"),
                   top_file=new_top,
                   out_log=out_log)

        if self.remove_tmp:
            fu.rm_file_list(tmp_files, out_log=out_log)

        return 0
示例#7
0
    def launch(self) -> int:
        """Launch the topology generation."""
        tmp_files = []

        # Get local loggers from launchlogger decorator
        out_log = getattr(self, 'out_log', None)
        err_log = getattr(self, 'err_log', None)

        # Restart if needed
        if self.restart:
            output_file_list = [self.io_dict['out'].get("output_top_zip_path")]
            if fu.check_complete_files(output_file_list):
                fu.log('Restart is enabled, this step: %s will the skipped' % self.step, out_log, self.global_log)
                return 0

        top_file = fu.unzip_top(zip_file=self.io_dict['in'].get("input_top_zip_path"), out_log=out_log)

        # Create index list of index file :)
        index_dic = {}
        lines = open(self.io_dict['in'].get("input_ndx_path")).read().splitlines()
        for index, line in enumerate(lines):
            if line.startswith('['):
                index_dic[line] = index,
                label = line
                if index > 0:
                    index_dic[label] = index_dic[label][0], index
        index_dic[label] = index_dic[label][0], index
        fu.log('Index_dic: '+str(index_dic), out_log, self.global_log)

        self.ref_rest_chain_triplet_list = [tuple(elem.strip(' ()').replace(' ', '').split(',')) for elem in self.ref_rest_chain_triplet_list.split('),')]
        fu.log('ref_rest_chain_triplet_list: ' + str(self.ref_rest_chain_triplet_list), out_log, self.global_log)
        for reference_group, restrain_group, chain in self.ref_rest_chain_triplet_list:
            fu.log('Reference group: '+reference_group, out_log, self.global_log)
            fu.log('Restrain group: '+restrain_group, out_log, self.global_log)
            fu.log('Chain: '+chain, out_log, self.global_log)
            self.io_dict['out']["output_itp_path"] = fu.create_name(path=str(Path(top_file).parent), prefix=self.prefix, step=self.step, name=restrain_group+'.itp')

            # Mapping atoms from absolute enumeration to Chain relative enumeration
            fu.log('reference_group_index: start_closed:'+str(index_dic['[ '+reference_group+' ]'][0]+1)+' stop_open: '+str(index_dic['[ '+reference_group+' ]'][1]), out_log, self.global_log)
            reference_group_list = [int(elem) for line in lines[index_dic['[ '+reference_group+' ]'][0]+1: index_dic['[ '+reference_group+' ]'][1]] for elem in line.split()]
            fu.log('restrain_group_index: start_closed:'+str(index_dic['[ '+restrain_group+' ]'][0]+1)+' stop_open: '+str(index_dic['[ '+restrain_group+' ]'][1]), out_log, self.global_log)
            restrain_group_list = [int(elem) for line in lines[index_dic['[ '+restrain_group+' ]'][0]+1: index_dic['[ '+restrain_group+' ]'][1]] for elem in line.split()]
            selected_list = [reference_group_list.index(atom)+1 for atom in restrain_group_list]
            # Creating new ITP with restrictions
            with open(self.io_dict['out'].get("output_itp_path"), 'w') as f:
                fu.log('Creating: '+str(f)+' and adding the selected atoms force constants', out_log, self.global_log)
                f.write('[ position_restraints ]\n')
                f.write('; atom  type      fx      fy      fz\n')
                for atom in selected_list:
                    f.write(str(atom)+'     1  '+self.force_constants+'\n')

            # Including new ITP in the corresponding ITP-chain file
            for file_dir in Path(top_file).parent.iterdir():
                if not file_dir.name.startswith("posre") and not file_dir.name.endswith("_pr.itp"):
                    if fnmatch.fnmatch(str(file_dir), "*_chain_"+chain+".itp"):
                        with open(str(file_dir), 'a') as f:
                            fu.log('Opening: '+str(f)+' and adding the ifdef include statement', out_log, self.global_log)
                            f.write('\n')
                            f.write('; Include Position restraint file\n')
                            f.write('#ifdef CUSTOM_POSRES\n')
                            f.write('#include "'+str(Path(self.io_dict['out'].get("output_itp_path")).name)+'"\n')
                            f.write('#endif\n')

        # zip topology
        fu.zip_top(zip_file=self.io_dict['out'].get("output_top_zip_path"), top_file=top_file, out_log=out_log)

        if self.remove_tmp:
            fu.rm_file_list(tmp_files, out_log=out_log)

        return 0
示例#8
0
    def via_gromacs(cls,
                    parmed_obj,
                    file_name,
                    file_path="./",
                    num_steps=5000 * 500,
                    write_out_freq=5000,
                    num_threads=1,
                    minimisation_mdp=None,
                    equilibration_mdp=None,
                    production_mdp=None,
                    tmp_dir=None,
                    report_equilibration=False,
                    report_production=False,
                    debug=False,
                    stages=["minimisation", "equilibration", "production"],
                    **kwargs):  #TODO infer threads based on system size
        """
        Simulation via GROMACS will be added in the future.

        based on cell size, make recomendation on number of threads to use 
    
        Parameters
        ------------
        parmed_obj : parmed.structure
            Parmed object of the fully parameterised simulated system.
        file_name : str
            No file type postfix is necessary
        file_path : str
            Default to current directory
        platform : str
            The computing architecture to do the calculation, default to CUDA, CPU, OpenCL is also possible.
        num_steps : int
            Number of production simulation to run, default 2,500,000 steps, i.e. 5 ns.
        write_out_freq : int
            Write out every nth frame of simulated trajectory, default to every 5000 frame write out one, i.e. 10 ps per frame.

        Returns
        --------
        path : str
            The absolute path where the trajectory is written to.
        """
        from biobb_md.gromacs.make_ndx import make_ndx, MakeNdx
        # from biobb_md.gromacs.grompp_mdrun import grompp_mdrun
        from biobb_md.gromacs.mdrun import mdrun
        from biobb_md.gromacs.grompp import grompp
        from biobb_common.tools.file_utils import zip_top

        assert tmp_dir is None or os.path.realpath(
            file_path) != os.path.realpath(
                tmp_dir
            ), "Simulation results will not be stored in a temporary directory"

        if debug:
            import shutil
            tmp_dir = "{}/.debug/".format(file_path)
            if os.path.isdir(tmp_dir):
                shutil.rmtree(tmp_dir)
            os.mkdir(tmp_dir)
        elif tmp_dir is None:
            tmp_dir = tempfile.mkdtemp(
                dir=file_path
            )  #FIXME just change this to debug dir in dubug mode
        else:
            try:
                os.mkdir(tmp_dir)
            except:
                raise ValueError(
                    "Cannot create an empty temporary directory for storing intermediate files"
                )

        parmed_obj.residues[0].name = "LIG"
        parmed_obj = parmed.gromacs.gromacstop.GromacsTopologyFile(
        ).from_structure(parmed_obj)

        parmed_obj.defaults.fudgeLJ = 0.5
        parmed_obj.defaults.fudgeQQ = 0.83333
        parmed_obj.defaults.gen_pairs = "yes"

        # Create prop dict and inputs/outputs

        xtc_file = '{}/{}.xtc'.format(file_path, file_name)
        gro_file = "{}/{}.gro".format(file_path, file_name)
        top_file = "{}/{}.top".format(tmp_dir, "topol")
        topzip_file = "{}/{}.zip".format(tmp_dir, "topol")

        # parmed_obj.save("{}/{}.pdb".format(tmp_dir, stage), overwrite=True)
        parmed_obj.save(gro_file, overwrite=True)
        parmed_obj.save(top_file, overwrite=True)

        index_file = "{}/{}.ndx".format(tmp_dir, "index")
        prop = {
            # "can_write_console_log" : False,
            'selection': "! r LIG"
        }

        # Create and launch bb
        # tmp = MakeNdx(input_structure_path = gro_file,
        #         output_ndx_path = index_file,
        #         properties = prop)
        # import logging
        # tmp.out_log = logging.Logger("x")
        # tmp.err_log = logging.Logger("y")
        # tmp.launch()
        # print(getattr(tmp, "out_log"))
        # print(tmp.err_log)
        make_ndx(input_structure_path=gro_file,
                 output_ndx_path=index_file,
                 properties=prop)

        ####################################################

        zip_top(topzip_file, top_file)

        stage = "minimisation"
        if stage in stages:
            mdp_dict = mdp2dict(get_data_filename("{}.mdp".format(stage)))
            if eval("{}_mdp".format(stage)) is not None:
                mdp_dict.update(eval("{}_mdp".format(stage)))
            next_gro_file = "{}/{}.gro".format(tmp_dir, stage)

            # grompp_mdrun(input_gro_path = gro_file,
            #     input_ndx_path = index_file,
            #     input_top_zip_path= topzip_file,
            #     # input_mdp_path = "{}.mdp".format(stage),
            #     output_trr_path = "{}/{}.trr".format(tmp_dir, stage),
            #     output_gro_path = next_gro_file,
            #     output_edr_path = "{}/{}.edr".format(tmp_dir, stage),
            #     output_log_path = "{}/{}.log".format(tmp_dir, stage),
            #     output_xtc_path = "{}/{}.xtc".format(tmp_dir, stage),
            #     num_threads_omp = num_threads,
            #     properties = {
            #         "mdp" : mdp_dict
            #         }
            #     )
            grompp(
                input_gro_path=gro_file,
                input_ndx_path=index_file,
                input_top_zip_path=topzip_file,
                output_tpr_path="{}/{}.tpr".format(tmp_dir, stage),
                # input_mdp_path = "{}.mdp".format(stage),
                properties={"mdp": mdp_dict})
            mdrun(
                input_tpr_path="{}/{}.tpr".format(tmp_dir, stage),
                output_trr_path="{}/{}.trr".format(tmp_dir, stage),
                output_gro_path=next_gro_file,
                output_edr_path="{}/{}.edr".format(tmp_dir, stage),
                output_log_path="{}/{}.log".format(tmp_dir, stage),
                output_cpt_path="{}/{}.cpt".format(tmp_dir, stage),
                output_xtc_path="{}/{}.xtc".format(tmp_dir, stage),
                properties={
                    "num_threads": num_threads,
                    "num_threads_omp": num_threads,
                    "num_threads_omp_pme": num_threads,
                }
                # num_threads_omp = 1 #XXX seems for minimisation speed is very slow when multiple threads are used, especially on cluster. Maybe need better handle
            )
            gro_file = next_gro_file

        ###################################################333

        stage = "equilibration"
        if stage in stages:
            mdp_dict = mdp2dict(get_data_filename("{}.mdp".format(stage)))
            if eval("{}_mdp".format(stage)) is not None:
                mdp_dict.update(eval("{}_mdp".format(stage)))
            next_gro_file = "{}/{}.gro".format(tmp_dir, stage)
            grompp(
                input_gro_path=gro_file,
                input_ndx_path=index_file,
                input_top_zip_path=topzip_file,
                output_tpr_path="{}/{}.tpr".format(tmp_dir, stage),
                # input_mdp_path = "{}.mdp".format(stage),
                properties={"mdp": mdp_dict})
            mdrun(input_tpr_path="{}/{}.tpr".format(tmp_dir, stage),
                  output_trr_path="{}/{}.trr".format(tmp_dir, stage),
                  output_gro_path=next_gro_file,
                  output_edr_path="{}/{}.edr".format(tmp_dir, stage),
                  output_log_path="{}/{}.log".format(tmp_dir, stage),
                  output_cpt_path="{}/{}.cpt".format(tmp_dir, stage),
                  output_xtc_path="{}/{}.xtc".format(tmp_dir, stage),
                  properties={
                      "num_threads": num_threads,
                      "num_threads_omp": num_threads,
                      "num_threads_omp_pme": num_threads,
                  })
            gro_file = next_gro_file

        ######################################################3

        stage = "production"
        if stage in stages:
            mdp_dict = mdp2dict(get_data_filename("{}.mdp".format(stage)))
            if eval("{}_mdp".format(stage)) is not None:
                mdp_dict.update(eval("{}_mdp".format(stage)))
            next_gro_file = "{}/{}.gro".format(tmp_dir, stage)
            grompp(
                input_gro_path=gro_file,
                input_ndx_path=index_file,
                input_top_zip_path=topzip_file,
                output_tpr_path="{}/{}.tpr".format(tmp_dir, stage),
                # input_mdp_path = "{}.mdp".format(stage),
                properties={"mdp": mdp_dict})
            mdrun(input_tpr_path="{}/{}.tpr".format(tmp_dir, stage),
                  output_trr_path="{}/{}.trr".format(tmp_dir, stage),
                  output_gro_path=next_gro_file,
                  output_edr_path="{}/{}.edr".format(tmp_dir, stage),
                  output_log_path="{}/{}.log".format(tmp_dir, stage),
                  output_cpt_path="{}/{}.cpt".format(tmp_dir, stage),
                  output_xtc_path=xtc_file,
                  properties={
                      "num_threads": num_threads,
                      "num_threads_omp": num_threads,
                      "num_threads_omp_pme": num_threads,
                  })
            # gro_file = next_gro_file

        ######################################################3

        if debug is not True and tmp_dir is not None:
            import shutil
            shutil.rmtree(tmp_dir)
        return os.path.abspath(xtc_file)
示例#9
0
    def launch(self) -> int:
        """Execute the :class:`AppendLigand <gromacs_extra.append_ligand.AppendLigand>` object."""
        # Setup Biobb
        if self.check_restart(): return 0

        # Unzip topology
        top_file = fu.unzip_top(
            zip_file=self.io_dict['in'].get("input_top_zip_path"),
            out_log=self.out_log)
        top_dir = str(Path(top_file).parent)
        itp_name = str(Path(self.io_dict['in'].get("input_itp_path")).name)

        with open(top_file) as top_f:
            top_lines = top_f.readlines()
            top_f.close()
        fu.rm(top_file)

        forcefield_pattern = r'#include.*forcefield.itp\"'
        if top_lines:
            for index, line in enumerate(top_lines):
                if re.search(forcefield_pattern, line):
                    break
        else:
            fu.log(
                f'FATAL: Input topfile {top_file} from input_top_zip_path {self.io_dict["in"].get("input_top_zip_path")} is empty.',
                self.out_log, self.global_log)
            return 1

        top_lines.insert(index + 1, '\n')
        top_lines.insert(index + 2, '; Including ligand ITP\n')
        top_lines.insert(index + 3, '#include "' + itp_name + '"\n')
        top_lines.insert(index + 4, '\n')
        if self.io_dict['in'].get("input_posres_itp_path"):
            top_lines.insert(index + 5, '; Ligand position restraints' + '\n')
            top_lines.insert(index + 6, '#ifdef ' + self.posres_name + '\n')
            top_lines.insert(
                index + 7, '#include "' + str(
                    Path(self.io_dict['in'].get("input_posres_itp_path")).name)
                + '"\n')
            top_lines.insert(index + 8, '#endif' + '\n')
            top_lines.insert(index + 9, '\n')

        inside_moleculetype_section = False
        with open(self.io_dict['in'].get("input_itp_path")) as itp_file:
            moleculetype_pattern = r'\[ moleculetype \]'
            for line in itp_file:
                if re.search(moleculetype_pattern, line):
                    inside_moleculetype_section = True
                    continue
                if inside_moleculetype_section and not line.startswith(';'):
                    moleculetype = line.strip().split()[0].strip()
                    break

        molecules_pattern = r'\[ molecules \]'
        inside_molecules_section = False
        index_molecule = None
        molecule_string = moleculetype + (20 -
                                          len(moleculetype)) * ' ' + '1' + '\n'
        for index, line in enumerate(top_lines):
            if re.search(molecules_pattern, line):
                inside_molecules_section = True
                continue
            if inside_molecules_section and not line.startswith(
                    ';') and line.upper().startswith('PROTEIN'):
                index_molecule = index

        if index_molecule:
            top_lines.insert(index_molecule + 1, molecule_string)
        else:
            top_lines.append(molecule_string)

        new_top = fu.create_name(path=top_dir,
                                 prefix=self.prefix,
                                 step=self.step,
                                 name='ligand.top')

        with open(new_top, 'w') as new_top_f:
            new_top_f.write("".join(top_lines))

        shutil.copy2(self.io_dict['in'].get("input_itp_path"), top_dir)
        if self.io_dict['in'].get("input_posres_itp_path"):
            shutil.copy2(self.io_dict['in'].get("input_posres_itp_path"),
                         top_dir)

        # zip topology
        fu.log(
            'Compressing topology to: %s' %
            self.io_dict['out'].get("output_top_zip_path"), self.out_log,
            self.global_log)
        fu.zip_top(zip_file=self.io_dict['out'].get("output_top_zip_path"),
                   top_file=new_top,
                   out_log=self.out_log)

        # Remove temporal files
        self.tmp_files.append(top_dir)
        self.remove_tmp_files()

        return 0
示例#10
0
    def launch(self) -> int:
        """Execute the :class:`Pdb2gmx <gromacs.pdb2gmx.Pdb2gmx>` object."""

        # Setup Biobb
        if self.check_restart(): return 0
        self.stage_files()

        internal_top_name = fu.create_name(prefix=self.prefix,
                                           step=self.step,
                                           name=self.internal_top_name)
        internal_itp_name = fu.create_name(prefix=self.prefix,
                                           step=self.step,
                                           name=self.internal_itp_name)

        # Create command line
        self.cmd = [
            self.gmx_path, "pdb2gmx", "-f",
            self.stage_io_dict["in"]["input_pdb_path"], "-o",
            self.stage_io_dict["out"]["output_gro_path"], "-p",
            internal_top_name, "-water", self.water_type, "-ff",
            self.force_field, "-i", internal_itp_name
        ]

        if self.his:
            self.cmd.append("-his")
            self.cmd = ['echo', self.his, '|'] + self.cmd
        if self.ignh:
            self.cmd.append("-ignh")
        if self.merge:
            self.cmd.append("-merge")
            self.cmd.append("all")

        if self.gmx_lib:
            self.environment = os.environ.copy()
            self.environment['GMXLIB'] = self.gmx_lib

        # Check GROMACS version
        if not self.container_path:
            if self.gmx_version < 512:
                raise GromacsVersionError(
                    "Gromacs version should be 5.1.2 or newer %d detected" %
                    self.gmx_version)
            fu.log(
                "GROMACS %s %d version detected" %
                (self.__class__.__name__, self.gmx_version), self.out_log)

        # Run Biobb block
        self.run_biobb()

        # Copy files to host
        self.copy_to_host()

        if self.container_path:
            internal_top_name = os.path.join(
                self.stage_io_dict.get("unique_dir"), internal_top_name)

        # zip topology
        fu.log(
            'Compressing topology to: %s' %
            self.io_dict["out"]["output_top_zip_path"], self.out_log,
            self.global_log)
        fu.zip_top(zip_file=self.io_dict["out"]["output_top_zip_path"],
                   top_file=internal_top_name,
                   out_log=self.out_log)

        # Remove temporal files
        self.tmp_files.extend([
            self.internal_top_name, self.internal_itp_name,
            self.stage_io_dict.get("unique_dir")
        ])
        self.remove_tmp_files()

        return self.return_code
示例#11
0
    def launch(self) -> int:
        """Execute the :class:`Genion <gromacs.genion.Genion>` object."""

        # Setup Biobb
        if self.check_restart(): return 0
        self.stage_files()

        # Unzip topology to topology_out
        top_file = fu.unzip_top(zip_file=self.input_top_zip_path,
                                out_log=self.out_log)
        top_dir = str(Path(top_file).parent)

        if self.container_path:
            shutil.copytree(
                top_dir,
                Path(self.stage_io_dict.get("unique_dir")).joinpath(
                    Path(top_dir).name))
            top_file = str(
                Path(self.container_volume_path).joinpath(
                    Path(top_dir).name,
                    Path(top_file).name))

        self.cmd = [
            'echo', '\"' + self.replaced_group + '\"', '|', self.gmx_path,
            'genion', '-s', self.stage_io_dict["in"]["input_tpr_path"], '-o',
            self.stage_io_dict["out"]["output_gro_path"], '-p', top_file
        ]

        if self.stage_io_dict["in"].get("input_ndx_path") and Path(
                self.stage_io_dict["in"].get("input_ndx_path")).exists():
            self.cmd.append('-n')
            self.cmd.append(self.stage_io_dict["in"].get("input_ndx_path"))

        if self.neutral:
            self.cmd.append('-neutral')

        if self.concentration:
            self.cmd.append('-conc')
            self.cmd.append(str(self.concentration))
            fu.log(
                'To reach up %g mol/litre concentration' % self.concentration,
                self.out_log, self.global_log)

        if self.seed is not None:
            self.cmd.append('-seed')
            self.cmd.append(str(self.seed))

        if self.gmx_lib:
            self.environment = os.environ.copy()
            self.environment['GMXLIB'] = self.gmx_lib

        # Check GROMACS version
        if not self.container_path:
            if self.gmx_version < 512:
                raise GromacsVersionError(
                    "Gromacs version should be 5.1.2 or newer %d detected" %
                    self.gmx_version)
            fu.log(
                "GROMACS %s %d version detected" %
                (self.__class__.__name__, self.gmx_version), self.out_log)

        # Run Biobb block
        self.run_biobb()

        # Copy files to host
        self.copy_to_host()

        if self.container_path:
            top_file = str(
                Path(self.stage_io_dict.get("unique_dir")).joinpath(
                    Path(top_dir).name,
                    Path(top_file).name))

        # zip topology
        fu.log(
            'Compressing topology to: %s' %
            self.stage_io_dict["out"]["output_top_zip_path"], self.out_log,
            self.global_log)
        fu.zip_top(zip_file=self.io_dict["out"]["output_top_zip_path"],
                   top_file=top_file,
                   out_log=self.out_log)

        # Remove temporal files
        self.tmp_files.extend([self.stage_io_dict.get("unique_dir"), top_dir])
        self.remove_tmp_files()

        return self.return_code