def build(cls, data_handler, matrix_creation_parameters):
        """
        Generates a matrix with the method used in the handler creation.

        @param trajectory_handler:
        @param matrix_creation_parameters:

        @return: The created matrix.
        """
        calculator_type = matrix_creation_parameters.get_value("calculator_type", 
                                                               default_value = "QTRFIT_OMP_CALCULATOR")

        calculator_options = matrix_creation_parameters.get_value("calculator_options", 
                                                               default_value = ProtocolParameters({"number_of_threads":8,
                                                                                "blocks_per_grid":8,
                                                                                "threads_per_block":32}))
        calculator_options = ProtocolParameters(calculator_options)
        
        structure = data_handler.get_data()
        fit_selection_coordsets = structure.getFittingCoordinates()
        calc_selection_coordsets = structure.getCalculationCoordinates()
        
        if calc_selection_coordsets is None:
            calculator = RMSDCalculator(calculatorType = calculator_type,
                                        fittingCoordsets  = fit_selection_coordsets)
        else:
            symm_groups = []
            if "symmetries" in matrix_creation_parameters:
                # Then prepare it to handle calculation symmetries
                # Description of equivalences must have the same number of atoms
                symm_groups = cls.process_symm_groups(matrix_creation_parameters,
                                                      structure,
                                                      calc_selection_coordsets)
                print "Using symmetries", symm_groups
            
            
            
            calculator = RMSDCalculator(calculatorType = calculator_type,
                                        fittingCoordsets  = fit_selection_coordsets,
                                        calculationCoordsets = calc_selection_coordsets,
                                        calcSymmetryGroups = symm_groups)
        
        try:
            calculator.setNumberOfOpenMPThreads(calculator_options.get_value("number_of_threads",
                                            default_value = 8))
        except KeyError:
            pass
        
        try:
            calculator.setCUDAKernelThreadsPerBlock(calculator_options.get_value("threads_per_block",
                                                                             default_value = 32), 
                                                calculator_options.get_value("blocks_per_grid",
                                                                             default_value = 8))
        except KeyError:
            pass
        
        rmsds = calculator.pairwiseRMSDMatrix()
        return CondensedMatrix(rmsds)
Exemplo n.º 2
0
    def __init__(self, workspace_parameters, observer):
        super(WorkspaceHandler,self).__init__(observer)

        self.parameters = ProtocolParameters(workspace_parameters.get_value("parameters", default_value = ProtocolParameters({
                                                                                                       "overwrite":True,
                                                                                                       "clear_after_exec":["tmp"]
                                                                                                  })))

        self.data = {
                      "results": os.path.join(workspace_parameters["base"], workspace_parameters.get_value("results", default_value="results")),
                      "tmp" : os.path.join(workspace_parameters["base"], workspace_parameters.get_value("tmp", default_value= "tmp")),
                      "clusters" : os.path.join(workspace_parameters["base"], workspace_parameters.get_value("clusters", default_value= "clusters")),
                      "matrix" : os.path.join(workspace_parameters["base"], workspace_parameters.get_value("matrix", default_value= "matrix"))
        }
Exemplo n.º 3
0
 def setUpClass(cls):
     cls.parameters = ProtocolParameters.get_default_params("data/params.json")
     distances = [94, 6, 43, 14, 96,
                     18, 59, 54, 69,
                         56, 96, 69,
                             54, 50,
                                  8]
     cls.matrix_1 = CondensedMatrix(distances)
Exemplo n.º 4
0
class WorkspaceHandler(Observable):

    def __init__(self, workspace_parameters, observer):
        super(WorkspaceHandler,self).__init__(observer)

        self.parameters = ProtocolParameters(workspace_parameters.get_value("parameters", default_value = ProtocolParameters({
                                                                                                       "overwrite":True,
                                                                                                       "clear_after_exec":["tmp"]
                                                                                                  })))

        self.data = {
                      "results": os.path.join(workspace_parameters["base"], workspace_parameters.get_value("results", default_value="results")),
                      "tmp" : os.path.join(workspace_parameters["base"], workspace_parameters.get_value("tmp", default_value= "tmp")),
                      "clusters" : os.path.join(workspace_parameters["base"], workspace_parameters.get_value("clusters", default_value= "clusters")),
                      "matrix" : os.path.join(workspace_parameters["base"], workspace_parameters.get_value("matrix", default_value= "matrix"))
        }

    def __getitem__(self,key):
        return self.data[key]

    def __str__(self):
        return json.dumps(self.data, sort_keys=False, indent=4, separators=(',', ': '))

    def create_directories(self):
        """
        Recreates the workspace structure. Removes the old location if necessary.
        """
        self.notify("MSG","Creating workspace...")
        if self.parameters.get_value("overwrite", default_value = True)  :
            self.clear_directories(self.data.keys())

        for folder_key in self.data:
            scripts_common.create_directory(self.data[folder_key])

    def clear_directories(self, directory_keys):
        """
        Removes the directories given as parameters.

        @param directory_keys: The keys of the 'data' object which defined paths will be erased.
        """
        for folder_key in directory_keys:
            folder_path = self.data[folder_key]
            if os.path.exists(folder_path):
                shutil.rmtree(folder_path)
                self.notify("MSG","Removing %s ..."%folder_path)
#                 print "Removing %s ..."%folder_path

    def __enter__(self):
        self.create_directories()
        return self

    def __exit__(self, exception_type, exception_val, trace):
        self.clear_directories(self.parameters["clear_after_exec"])
 def setup_loader(self):
     """
     Builds a loader with 2 preloaded pdbs from the data folder.
     """
     loader = ProteinEnsembleDataLoader(ProtocolParameters({"matrix":{}}))
     source1 = DataSource(os.path.join(test_data.__path__[0], "pdb1.pdb"))
     loader.load(source1)
     source2 = DataSource({
                           "source":os.path.join(test_data.__path__[0], "pdb2.dcd"),
                           "atoms_source": os.path.join(test_data.__path__[0], "pdb2.pdb")
     })
     loader.load(source2)
     return loader
Exemplo n.º 6
0
 def test_to_dic(self):
     input = ProtocolParameters({
         "ciao":
         ProtocolParameters({
             "come": "ti",
             "chiami": 4
         }),
         "io_mi": {
             "chiamo": 87,
             "Victor": [1, 2, 3, 4]
         }
     })
     expected = {
         'ciao': {
             'come': 'ti',
             'chiami': 4
         },
         'io_mi': {
             'chiamo': 87,
             'Victor': [1, 2, 3, 4]
         }
     }
     self.assertDictEqual(expected, ProtocolParameters.to_dict(input))
Exemplo n.º 7
0
 def test_to_dic(self):
     input = ProtocolParameters({
                                "ciao":ProtocolParameters({
                                                           "come": "ti",
                                                           "chiami": 4}),
                                "io_mi":{
                                         "chiamo":87,
                                         "Victor":[1,2,3,4]
                                         }
                                })
     expected = {
                 'ciao': {
                          'come': 'ti',
                          'chiami': 4
                          },
                 'io_mi': {
                           'chiamo': 87,
                           'Victor': [1, 2, 3, 4]
                           }
                 }
     self.assertDictEqual(expected, ProtocolParameters.to_dict(input))
Exemplo n.º 8
0
    def __init__(self, workspace_parameters, observer):
        super(WorkspaceHandler, self).__init__(observer)

        self.parameters = ProtocolParameters(
            workspace_parameters.get_value(
                "parameters", default_value=ProtocolParameters({"overwrite": True, "clear_after_exec": ["tmp"]})
            )
        )

        self.data = {
            "results": os.path.join(
                workspace_parameters["base"], workspace_parameters.get_value("results", default_value="results")
            ),
            "tmp": os.path.join(
                workspace_parameters["base"], workspace_parameters.get_value("tmp", default_value="tmp")
            ),
            "clusters": os.path.join(
                workspace_parameters["base"], workspace_parameters.get_value("clusters", default_value="clusters")
            ),
            "matrix": os.path.join(
                workspace_parameters["base"], workspace_parameters.get_value("matrix", default_value="matrix")
            ),
        }
Exemplo n.º 9
0
    def __init__(self, parameters, observer):
        """
        Class creator. It parses the needed files and extracts info and coordinates.
        """

        super(TrajectoryHandler, self).__init__(observer)

        print "Reading conformations..."
        prody.confProDy(verbosity="none")

        self.parameters = parameters
        matrix_parameters = parameters.get_value(
            "data.matrix.parameters", default_value=ProtocolParameters.empty())
        parameters["data"]["files"] = self.expand_file_lists(
            parameters["data"]["files"])
        self.files = parameters["data"]["files"]
        self.pdbs = []

        if len(self.files) == 0:
            common.print_and_flush("[ERROR] no pdbs. Exiting...\n")
            self.notify("SHUTDOWN", "No pdbs defined in script.")
            exit()

        self.notify("Loading", "Loading Trajectories")

        # Bookmarking structure
        self.bookmarking = {"pdb": None, "selections": {}}

        merged_structure = self.getMergedStructure()
        self.coordsets = merged_structure.getCoordsets()
        self.number_of_conformations = self.coordsets.shape[0]
        self.number_of_atoms = self.coordsets.shape[1]

        self.handle_selection_parameters(matrix_parameters)
        print "%d conformations of %d atoms were read." % (
            merged_structure.numCoordsets(), merged_structure.numAtoms())
Exemplo n.º 10
0
    def build(cls, data_handler, matrix_creation_parameters):
        """
        Generates a matrix with the method used in the handler creation.

        @param trajectory_handler:
        @param matrix_creation_parameters:

        @return: The created matrix.
        """
        calculator_type = matrix_creation_parameters.get_value(
            "calculator_type", default_value="QTRFIT_OMP_CALCULATOR")

        calculator_options = matrix_creation_parameters.get_value(
            "calculator_options",
            default_value=ProtocolParameters({
                "number_of_threads": 8,
                "blocks_per_grid": 8,
                "threads_per_block": 32
            }))
        calculator_options = ProtocolParameters(calculator_options)

        structure = data_handler.get_data()
        fit_selection_coordsets = structure.getFittingCoordinates()
        calc_selection_coordsets = structure.getCalculationCoordinates()

        if calc_selection_coordsets is None:
            calculator = RMSDCalculator(
                calculatorType=calculator_type,
                fittingCoordsets=fit_selection_coordsets)
        else:
            symm_groups = []
            if "symmetries" in matrix_creation_parameters:
                # Then prepare it to handle calculation symmetries
                # Description of equivalences must have the same number of atoms
                symm_groups = cls.process_symm_groups(
                    matrix_creation_parameters, structure,
                    calc_selection_coordsets)
                print "Using symmetries", symm_groups

            calculator = RMSDCalculator(
                calculatorType=calculator_type,
                fittingCoordsets=fit_selection_coordsets,
                calculationCoordsets=calc_selection_coordsets,
                calcSymmetryGroups=symm_groups)

        try:
            calculator.setNumberOfOpenMPThreads(
                calculator_options.get_value("number_of_threads",
                                             default_value=8))
        except KeyError:
            pass

        try:
            calculator.setCUDAKernelThreadsPerBlock(
                calculator_options.get_value("threads_per_block",
                                             default_value=32),
                calculator_options.get_value("blocks_per_grid",
                                             default_value=8))
        except KeyError:
            pass

        rmsds = calculator.pairwiseRMSDMatrix()
        return CondensedMatrix(rmsds)
Exemplo n.º 11
0
class WorkspaceHandler(Observable):
    def __init__(self, workspace_parameters, observer):
        super(WorkspaceHandler, self).__init__(observer)

        self.parameters = ProtocolParameters(
            workspace_parameters.get_value(
                "parameters", default_value=ProtocolParameters({"overwrite": True, "clear_after_exec": ["tmp"]})
            )
        )

        self.data = {
            "results": os.path.join(
                workspace_parameters["base"], workspace_parameters.get_value("results", default_value="results")
            ),
            "tmp": os.path.join(
                workspace_parameters["base"], workspace_parameters.get_value("tmp", default_value="tmp")
            ),
            "clusters": os.path.join(
                workspace_parameters["base"], workspace_parameters.get_value("clusters", default_value="clusters")
            ),
            "matrix": os.path.join(
                workspace_parameters["base"], workspace_parameters.get_value("matrix", default_value="matrix")
            ),
        }

    def __getitem__(self, key):
        return self.data[key]

    def __str__(self):
        return json.dumps(self.data, sort_keys=False, indent=4, separators=(",", ": "))

    def create_directories(self):
        """
        Recreates the workspace structure. Removes the old location if necessary.
        """
        self.notify("MSG", "Creating workspace...")
        if self.parameters.get_value("overwrite", default_value=True):
            self.clear_directories(self.data.keys())

        for folder_key in self.data:
            scripts_common.create_directory(self.data[folder_key])

    def clear_directories(self, directory_keys):
        """
        Removes the directories given as parameters.

        @param directory_keys: The keys of the 'data' object which defined paths will be erased.
        """
        for folder_key in directory_keys:
            folder_path = self.data[folder_key]
            if os.path.exists(folder_path):
                shutil.rmtree(folder_path)
                self.notify("MSG", "Removing %s ..." % folder_path)

    #                 print "Removing %s ..."%folder_path

    def __enter__(self):
        self.create_directories()
        return self

    def __exit__(self, exception_type, exception_val, trace):
        self.clear_directories(self.parameters["clear_after_exec"])
Exemplo n.º 12
0
    # Saving matrices
    for dataset_name in data.all_datasets:
        handler = MatrixHandler(condensed_matrices[dataset_name],
                                {"method": "load"})
        handler.save_matrix("./matrices/%s" % dataset_name)

    # Run pyProCT for each of them
    base_script = "".join(open("base_script.json", "r").readlines())
    for dataset_name in ['concentric_circles'
                         ]:  #data.all_datasets: #["spaeth_06"]:#
        print dataset_name
        # Change placeholders
        script_str = base_script % (os.path.abspath(
            "./tmp/%s" % dataset_name), "./matrices/%s" % dataset_name)
        parameters = ProtocolParameters.get_params_from_json(script_str)
        # And change another hypothesis stuff
        parameters["clustering"]["evaluation"]["maximum_noise"] = data.noise[
            dataset_name]
        parameters["clustering"]["evaluation"][
            "minimum_cluster_size"] = data.minsize[dataset_name]
        parameters["clustering"]["evaluation"][
            "minimum_clusters"] = data.num_cluster_ranges[dataset_name][0]
        parameters["clustering"]["evaluation"][
            "maximum_clusters"] = data.num_cluster_ranges[dataset_name][1]
        print parameters["clustering"]["evaluation"][
            "minimum_clusters"], parameters["clustering"]["evaluation"][
                "maximum_clusters"]
        if dataset_name in data.criteria:
            parameters["clustering"]["evaluation"][
                "evaluation_criteria"] = data.criteria[dataset_name]
Exemplo n.º 13
0
@author: victor
"""
from pyproct.driver.parameters import ProtocolParameters
from pyproct.driver.driver import Driver
from pyproct.driver.observer.observer import Observer
import sys
from pyproct.driver.handlers.matrix.matrixHandler import MatrixHandler
from pyproct.tools.matrixTools import get_submatrix
from pyproct.tools.plotTools import matrixToImage
from itertools import product
import numpy
from pyproct.tools.pdbTools import get_number_of_frames

if __name__ == '__main__':
    base_script = "".join(open("base_script.json", "r").readlines())
    parameters = ProtocolParameters.get_params_from_json(base_script)
    parameters["global"]["workspace"]["base"] = sys.argv[3]
    parameters["data"]["files"] = [sys.argv[1], sys.argv[2]]

    frames_ini = get_number_of_frames(sys.argv[1])
    frames_proto = get_number_of_frames(sys.argv[2])
    print sys.argv[1], "->", frames_ini
    print sys.argv[2], "->", frames_proto

    try:
        Driver(Observer()).run(parameters)
    except SystemExit:
        # Expected improductive search
        # Load again the matrix
        handler = MatrixHandler({
            "method": "load",
Exemplo n.º 14
0
if __name__ == '__main__':
    parser = optparse.OptionParser(usage='%prog [--mpi] [--print] script', version=pyproct.__version__)

    parser.add_option('--mpi', action="store_true",  dest = "use_mpi", help="Add this flag if you want to use MPI-based scheduling.")
    parser.add_option('--print', action="store_true",  dest = "print_messages", help="Add this flag to print observed messages to stdout.")

    options, args = parser.parse_args()

    if(len(args)==0):
        parser.error("You need to specify the script to be executed.")

    json_script = args[0]

    parameters = None
    try:
        parameters = ProtocolParameters.get_params_from_json(tools.remove_comments(open(json_script).read()))
        parameters["global"]["workspace"]["base"] = os.path.abspath(parameters["global"]["workspace"]["base"])
    except ValueError, e:
        print "Malformed json script."
        print e.message
        exit()

    observer = None
    cmd_thread = None
    if options.use_mpi:
        from pyproct.driver.mpidriver import MPIDriver
        from pyproct.driver.observer.MPIObserver import MPIObserver
        observer = MPIObserver()
        if options.print_messages:
            cmd_thread = CmdLinePrinter(observer)
            cmd_thread.start()
Exemplo n.º 15
0
    create_directory("./matrices")
    create_directory("./tmp")
    condensed_matrices, all_observations = vtools.create_matrices(data)
    
    # Saving matrices
    for dataset_name in data.all_datasets:
        handler = MatrixHandler(condensed_matrices[dataset_name], {"method":"load"})
        handler.save_matrix("./matrices/%s"%dataset_name)

    # Run pyProCT for each of them
    base_script = "".join(open("base_script.json","r").readlines())
    for dataset_name in ['concentric_circles']: #data.all_datasets: #["spaeth_06"]:#
        print dataset_name
        # Change placeholders
        script_str = base_script%(os.path.abspath("./tmp/%s"%dataset_name),"./matrices/%s"%dataset_name)
        parameters = ProtocolParameters.get_params_from_json(script_str)
        # And change another hypothesis stuff
        parameters["clustering"]["evaluation"]["maximum_noise"] = data.noise[dataset_name]
        parameters["clustering"]["evaluation"]["minimum_cluster_size"] = data.minsize[dataset_name]
        parameters["clustering"]["evaluation"]["minimum_clusters"] = data.num_cluster_ranges[dataset_name][0]
        parameters["clustering"]["evaluation"]["maximum_clusters"] = data.num_cluster_ranges[dataset_name][1]
        print parameters["clustering"]["evaluation"]["minimum_clusters"], parameters["clustering"]["evaluation"]["maximum_clusters"]
        if dataset_name in data.criteria:
            parameters["clustering"]["evaluation"]["evaluation_criteria"] = data.criteria[dataset_name]
        else:
            parameters["clustering"]["evaluation"]["evaluation_criteria"] = data.criteria["default"]
        Driver(Observer()).run(parameters)

    for dataset_name in ['concentric_circles']: #data.all_datasets:
        results_file = os.path.join(os.path.abspath("./tmp/%s"%dataset_name),"results/results.json")
        results = convert_to_utf8(json.loads(open(results_file).read()))
Exemplo n.º 16
0
    parser.add_option(
        '--print',
        action="store_true",
        dest="print_messages",
        help="Add this flag to print observed messages to stdout.")

    options, args = parser.parse_args()

    if (len(args) == 0):
        parser.error("You need to specify the script to be executed.")

    json_script = args[0]

    parameters = None
    try:
        parameters = ProtocolParameters.get_params_from_json(
            tools.remove_comments(open(json_script).read()))
        parameters["global"]["workspace"]["base"] = os.path.abspath(
            parameters["global"]["workspace"]["base"])
    except ValueError, e:
        print "Malformed json script."
        print e.message
        exit()

    observer = None
    cmd_thread = None
    if options.use_mpi:
        from pyproct.driver.mpidriver import MPIDriver
        from pyproct.driver.observer.MPIObserver import MPIObserver
        observer = MPIObserver()
        if options.print_messages:
            cmd_thread = CmdLinePrinter(observer)
Exemplo n.º 17
0
if __name__ == '__main__':
    parser = optparse.OptionParser(usage='%prog [--mpi] [--print] script', version=pyproct.__version__)

    parser.add_option('--mpi', action="store_true",  dest = "use_mpi", help="Add this flag if you want to use MPI-based scheduling.")
    parser.add_option('--print', action="store_true",  dest = "print_messages", help="Add this flag to print observed messages to stdout.")

    options, args = parser.parse_args()

    if(len(args)==0):
        parser.error("You need to specify the script to be executed.")

    json_script = args[0]

    parameters = None
    try:
        parameters = ProtocolParameters.get_params_from_json(open(json_script).read())
        parameters["global"]["workspace"]["base"] = os.path.abspath(parameters["global"]["workspace"]["base"])
    except ValueError, e:
        print "Malformed json script."
        print e.message
        exit()

    observer = None
    cmd_thread = None
    if options.use_mpi:
        from pyproct.driver.mpidriver import MPIDriver
        from pyproct.driver.observer.MPIObserver import MPIObserver
        observer = MPIObserver()
        if options.print_messages:
            cmd_thread = CmdLinePrinter(observer)
            cmd_thread.start()
Exemplo n.º 18
0
@author: victor
"""
from pyproct.driver.parameters import ProtocolParameters
from pyproct.driver.driver import Driver
from pyproct.driver.observer.observer import Observer
import sys
from pyproct.driver.handlers.matrix.matrixHandler import MatrixHandler
from pyproct.tools.matrixTools import get_submatrix
from pyproct.tools.plotTools import matrixToImage
from itertools import product
import numpy
from pyproct.tools.pdbTools import get_number_of_frames

if __name__ == '__main__':
    base_script = "".join(open("base_script.json","r").readlines())
    parameters = ProtocolParameters.get_params_from_json(base_script)
    parameters["global"]["workspace"]["base"] = sys.argv[3]
    parameters["data"]["files"] = [sys.argv[1], sys.argv[2]]

    frames_ini = get_number_of_frames(sys.argv[1])
    frames_proto = get_number_of_frames(sys.argv[2])
    print sys.argv[1],"->",frames_ini
    print sys.argv[2],"->",frames_proto

    try:
        Driver(Observer()).run(parameters)
    except SystemExit:
        # Expected improductive search
        # Load again the matrix
        handler = MatrixHandler({
            "method": "load",