示例#1
0
 def create_directories(self, remove_existing = True):
     self.notify("MSG","Creating workspace...")
     for path in self.data:
         if os.path.exists(self.data[path]) and remove_existing:
             self.notify("Removing Directory",self.data[path])
             shutil.rmtree(self.data[path])
         scripts_common.create_directory(self.data[path])
示例#2
0
 def save_params_handler(self, data):
     data = convert_to_utf8(json.loads(data))
     create_directory("wizard/scripts")
     my_hash = hashlib.sha1()
     my_hash.update(str(time.time()))
     path = os.path.join("wizard","scripts",my_hash.hexdigest()[:10]+".ppc")
     script_handler = open(path,"w")
     script_handler.write(json.dumps(data, sort_keys=False, indent=4, separators=(',', ': ')))
     script_handler.close()
     self.wfile.write('{"file_url":"'+path+'"}')
示例#3
0
    def create_directories(self):
        """
        Recreates the workspace structure. Removes the old location if necessary.
        """
        self.notify("MSG", "Creating workspace...")
        if self.parameters.get_value("overwrite", default_value=True):
            self.clear_directories(self.data.keys())

        for folder_key in self.data:
            scripts_common.create_directory(self.data[folder_key])
示例#4
0
    def create_directories(self):
        """
        Recreates the workspace structure. Removes the old location if necessary.
        """
        self.notify("MSG","Creating workspace...")
        if self.parameters.get_value("overwrite", default_value = True)  :
            self.clear_directories(self.data.keys())

        for folder_key in self.data:
            scripts_common.create_directory(self.data[folder_key])
示例#5
0
 def show_results_handler(self, data):
     try:
         data = convert_to_utf8(json.loads(data))
         print "show_results_handler", data
         create_directory("results/tmp")
         results = data["results"] if "results" in data else "results"
         results_path = os.path.join(data["base"],results,"results.json")
         shutil.copyfile(results_path,os.path.join("results","tmp","data.json"))
         webbrowser.open("http://"+IP+":"+str(PORT)+"/results.html", new = 0, autoraise=True)
         self.wfile.write("OK")
     except IOError:
         self.wfile.write("KO")
示例#6
0
 def create_directory(self,data):
     data = convert_to_utf8(json.loads(data))
     print data
     try:
         success = create_directory(data['location'], ensure_writability = True)
         self.wfile.write(json.dumps({"done":success}))
     except:
         self.wfile.write(json.dumps({"done":False}))
def preprocess_pdb(args):
    pdb_file = args[1]
    output = "./" + args[2]+"/"+args[2]
    create_directory("./" + args[2])
    cluster_frames = get_frame_numbers(args)
    pdb = prody.parsePDB(pdb_file)
    # Get a copy of the pdb coords
    input_coordsets = numpy.array(pdb.getCoordsets()[cluster_frames])

    # Empty pdb
    pdb.delCoordset(range(pdb.numCoordsets()))

    # Build another pdb to store it
    input_pdb = prody.parsePDB(pdb_file)
    input_pdb.delCoordset(range(input_pdb.numCoordsets()))
    # And add the chosen coordsets
    for i in range(len(cluster_frames)):
        input_pdb.addCoordset(input_coordsets[i])
    prody.writePDB(output+"_ini.pdb", input_pdb)
    print_matrix(input_pdb.select("name CA").getCoordsets(), output)
    return pdb, input_coordsets, cluster_frames, output
示例#8
0
def preprocess_pdb(args):
    pdb_file = args[1]
    output = "./" + args[2] + "/" + args[2]
    create_directory("./" + args[2])
    cluster_frames = get_frame_numbers(args)
    pdb = prody.parsePDB(pdb_file)
    # Get a copy of the pdb coords
    input_coordsets = numpy.array(pdb.getCoordsets()[cluster_frames])

    # Empty pdb
    pdb.delCoordset(range(pdb.numCoordsets()))

    # Build another pdb to store it
    input_pdb = prody.parsePDB(pdb_file)
    input_pdb.delCoordset(range(input_pdb.numCoordsets()))
    # And add the chosen coordsets
    for i in range(len(cluster_frames)):
        input_pdb.addCoordset(input_coordsets[i])
    prody.writePDB(output + "_ini.pdb", input_pdb)
    print_matrix(input_pdb.select("name CA").getCoordsets(), output)
    return pdb, input_coordsets, cluster_frames, output
示例#9
0
 def test_create_directory(self):
     create_directory("tmp_test/test")
     self.assertTrue(os.path.exists("tmp_test/test"))
     os.system("rm -rf tmp_test")
     self.assertFalse(create_directory("/folder_at_root", True))
示例#10
0
"""
Created on 05/06/2014

@author: victor
"""

import sys
import os.path
import pyproct.tools.scriptTools as tools

level = sys.argv[1]

execution_range = int(sys.argv[2]), int(sys.argv[3]) + 1  #second is included

exec_path = os.getcwd()

base_folder = {"level2": "scripts", "level1": "scripts", "level0": ""}

base = os.path.join(base_folder[level], level, "run")
tools.create_directory(base)

for script_number in range(*execution_range):
    pyproct_script = os.path.join(base_folder[level], level,
                                  "script_%s_%d.json" % (level, script_number))
    os.system("python -m pyproct.main %s" % pyproct_script)
    # load colors per cluster
    #colors = [(1.,0.,0.), (0.,1.,0.), (0.,0.,1.)]*10
    import seaborn as sns
    colors = sns.hls_palette(15, l=.3, s=.8)
    
    # VMD execution template
    template = open("/home/victor/git/PhD-GPCR/PhD-GPCR-2/data/load_script_representatives.tcl").read()
    
    for line in open(options.input):
        protein, drug, folder = line.strip().split()

        # sorted clusters and same color generation always make the same cluster_id, color pair
        representatives_file = os.path.join(folder, "representatives.pdb")
        
        output_folder = os.path.join(options.output_folder, drug, protein)
        create_directory(output_folder)
        
        pdb = parsePDB(representatives_file)
        writePDB(os.path.join(output_folder,"protein.pdb"), pdb.select("protein"), csets = [0])
        writePDB(os.path.join(output_folder,"ligands.pdb"), pdb.select("resname %s"%drug))
        
        num_clusters = pdb.numCoordsets()
        clusters_file = open(os.path.join(output_folder,"cluster_colors"), "w")
        for i in range(num_clusters):
            clusters_file.write("%.2f %.2f %.2f%s"%(   colors[i][0],
                                                       colors[i][1],
                                                       colors[i][2],
                                ("\n" if i <(num_clusters-1) else "")))
        clusters_file.close()
        
        camera_settings = ""; camera_settings_zoomed = ""; option_camera = "#"; pre_render_file = ""; rendered_file = ""; option_zoom = "#"
示例#12
0
"""

import json
import os.path
import validation.bidimensional.datasets as data
import validation.bidimensional.validationTools as vtools
from pyproct.tools.scriptTools import create_directory
from pyproct.driver.parameters import ProtocolParameters
from pyproct.driver.observer.observer import Observer
from pyproct.driver.driver import Driver
from pyproct.tools.commonTools import convert_to_utf8
from pyproct.clustering.clustering import Clustering
from pyproct.data.matrix.matrixHandler import MatrixHandler

if __name__ == '__main__':
    create_directory("./clustering_images")
    create_directory("./matrices")
    create_directory("./tmp")
    condensed_matrices, all_observations = vtools.create_matrices(data)

    # Saving matrices
    for dataset_name in data.all_datasets:
        handler = MatrixHandler(condensed_matrices[dataset_name],
                                {"method": "load"})
        handler.save_matrix("./matrices/%s" % dataset_name)

    # Run pyProCT for each of them
    base_script = "".join(open("base_script.json", "r").readlines())
    for dataset_name in ['concentric_circles'
                         ]:  #data.all_datasets: #["spaeth_06"]:#
        print dataset_name
示例#13
0
"""

import json
import os.path
import validation.bidimensional.datasets as data
import validation.bidimensional.validationTools as vtools
from pyproct.tools.scriptTools import create_directory
from pyproct.driver.parameters import ProtocolParameters
from pyproct.driver.observer.observer import Observer
from pyproct.driver.driver import Driver
from pyproct.tools.commonTools import convert_to_utf8
from pyproct.clustering.clustering import Clustering
from pyproct.data.matrix.matrixHandler import MatrixHandler

if __name__ == '__main__':
    create_directory("./clustering_images")
    create_directory("./matrices")
    create_directory("./tmp")
    condensed_matrices, all_observations = vtools.create_matrices(data)
    
    # Saving matrices
    for dataset_name in data.all_datasets:
        handler = MatrixHandler(condensed_matrices[dataset_name], {"method":"load"})
        handler.save_matrix("./matrices/%s"%dataset_name)

    # Run pyProCT for each of them
    base_script = "".join(open("base_script.json","r").readlines())
    for dataset_name in ['concentric_circles']: #data.all_datasets: #["spaeth_06"]:#
        print dataset_name
        # Change placeholders
        script_str = base_script%(os.path.abspath("./tmp/%s"%dataset_name),"./matrices/%s"%dataset_name)

    options, args = parser.parse_args()
    input_file = args[0]
    control_info = json.loads(open(input_file, "r").read())

    print "Selected action = %s" % options.action
    # Generate regression tests
    if options.action == "GENERATE":
        print "Generating Expected Results"
        for test_info in control_info:
            print "Generating: %s" % test_info["name"]
            stdout_file, stderr_file = execute_pyproct(test_info["script"])
            script = json.loads(tools.remove_comments(open(test_info["script"], "r").read()))
            workspace = script["global"]["workspace"]
            s_tools.create_directory(test_info["expected_results_dir"])

            # Move the generated files
            os.system("mv %s %s %s"%(stdout_file, stderr_file,test_info["expected_results_dir"]))
            for subpath in test_info["files_to_check"]:
                for file in test_info["files_to_check"][subpath]:
                    os.system("mv %s %s"%(os.path.join(workspace["base"],subpath,file),
                                                       test_info["expected_results_dir"]))

            # clean(workspace)

    # Execute regression tests
    elif options.action == "TEST":
        print "Testing results against Expected Results dir"
        log_handler = open(options.log_file,"w")
示例#15
0
"""
Created on 05/06/2014

@author: victor
"""

import sys
import os.path
import pyproct.tools.scriptTools as tools

level = sys.argv[1]

execution_range = int(sys.argv[2]) , int(sys.argv[3])+1 #second is included

exec_path = os.getcwd()

base_folder = {
              "level2":"scripts",
              "level1":"scripts",
              "level0":""
              }

base = os.path.join(base_folder[level], level, "run")
tools.create_directory(base)

for script_number in range(*execution_range):
    pyproct_script = os.path.join(base_folder[level], level, "script_%s_%d.json"%(level,script_number))
    os.system("python -m pyproct.main %s"%pyproct_script)
示例#16
0
"""
Created on 05/06/2014

@author: victor
"""
import pyproct.tools.scriptTools as tools
import os.path

import sys

LEVEL = sys.argv[1]

tools.create_directory("scripts/level2")
tools.create_directory("scripts/level1")
tools.create_directory("scripts/level0")

all_initial_files = open(sys.argv[2])
print "Working with filelist: %s"%sys.argv[2]

BASE_TRAJ_FOLDER = "/gpfs/scratch/bsc72/bsc72476/Victor/2JOF"
BASE_SCRIPT_FOLDER = "scripts"
BASE_CLUSTERING_FOLDER = "compressions"

script_index = 0
all_scripts = []


LEVEL_TEMPLATE = "".join(open(os.path.join(BASE_SCRIPT_FOLDER, "%s_base.json"%LEVEL)).readlines())

for file_path in all_initial_files.readlines():
    traj_path = os.path.join(BASE_TRAJ_FOLDER, file_path.strip())
示例#17
0
 def test_create_directory(self):
     create_directory("tmp_test/test")
     self.assertTrue(os.path.exists("tmp_test/test"))
     os.system("rm -rf tmp_test")
     self.assertFalse(create_directory("/folder_at_root", True))
    options, args = parser.parse_args()
    input_file = args[0]
    control_info = json.loads(open(input_file, "r").read())

    print "Selected action = %s" % options.action
    # Generate regression tests
    if options.action == "GENERATE":
        print "Generating Expected Results"
        for test_info in control_info:
            print "Generating: %s" % test_info["name"]
            stdout_file, stderr_file = execute_pyproct(test_info["script"])
            script = json.loads(
                tools.remove_comments(open(test_info["script"], "r").read()))
            workspace = script["global"]["workspace"]
            s_tools.create_directory(test_info["expected_results_dir"])

            # Move the generated files
            os.system(
                "mv %s %s %s" %
                (stdout_file, stderr_file, test_info["expected_results_dir"]))
            for subpath in test_info["files_to_check"]:
                for file in test_info["files_to_check"][subpath]:
                    os.system("mv %s %s" %
                              (os.path.join(workspace["base"], subpath, file),
                               test_info["expected_results_dir"]))

            # clean(workspace)

    # Execute regression tests
    elif options.action == "TEST":
示例#19
0
"""
Created on 05/06/2014

@author: victor
"""
import pyproct.tools.scriptTools as tools
import os.path

import sys

LEVEL = sys.argv[1]

tools.create_directory("scripts/level2")
tools.create_directory("scripts/level1")
tools.create_directory("scripts/level0")

all_initial_files = open(sys.argv[2])
print "Working with filelist: %s" % sys.argv[2]

BASE_TRAJ_FOLDER = "/gpfs/scratch/bsc72/bsc72476/Victor/2JOF"
BASE_SCRIPT_FOLDER = "scripts"
BASE_CLUSTERING_FOLDER = "compressions"

script_index = 0
all_scripts = []

LEVEL_TEMPLATE = "".join(
    open(os.path.join(BASE_SCRIPT_FOLDER, "%s_base.json" % LEVEL)).readlines())

for file_path in all_initial_files.readlines():
    traj_path = os.path.join(BASE_TRAJ_FOLDER, file_path.strip())