Exemple #1
0
def plot_minimizations(args, rescale=False, plot_dist=False):
    """PLOT ALL THE MINIMZATION RESULTS FOR THIS SIMULATION"""
    runner = rs.StringIterationRunner(args)
    plt.title("Minimization results")
    plot_reference_structures(runner, rescale=rescale)
    utils.plot_path(colvars.rescale_evals(runner.stringpath, runner.cvs)
                    if rescale else runner.stringpath,
                    label="Initial String")
    stringdist = np.zeros((len(runner.stringpath), ))
    for i in range(len(runner.stringpath)):
        trajpath = runner.point_path(i) + runner.point_name(
            i) + "-minimization.gro"
        if os.path.exists(trajpath):
            traj = md.load(abspath(trajpath))
            logger.debug("point %s: %s", i, traj)
            val = colvars.eval_cvs(traj, runner.cvs, rescale=rescale)
            utils.plot_path(val, text=str(i))
            stringdist[i] = np.linalg.norm(val - runner.stringpath[i])
        else:
            logger.warn("Minimization result for iteration %s not found", i)
    plt.legend()
    plt.show()
    if plot_dist:
        plt.title("minimization distance to points on string")
        plt.plot(stringdist)
        plt.show()
Exemple #2
0
def show_all_freemd(args,
                    traj_stride=10,
                    startpart=2,
                    endpart=78,
                    file_stride=1,
                    save=False):
    runner = rs.StringIterationRunner(args)
    traj = None
    for i in range(startpart, endpart + 1, file_stride):
        if i < 3:
            filename = "/home/oliverfl/projects/gpcr/simulations/freemd/freemd-dec2017/3p0g_prod4"
        else:
            endstr = "0" + str(i) if i < 10 else str(i)
            filename = "/home/oliverfl/projects/gpcr/simulations/freemd/freemd-dec2017/3p0g_prod4.part00" + endstr
        logger.debug("Loading file %s", filename)
        t = plot_any_traj(
            runner,
            filename,
            rescale=False,
            # label="part %s" % i,
            trajformat="trr",
            stride=traj_stride,
            show_plot=False)
        if save:
            traj = t if traj is None else traj + t

    if save:
        traj.save(
            "/home/oliverfl/projects/gpcr/simulations/freemd/freemd-dec2017/merged-part%sto%sstride%s.xtc"
            % (startpart, endpart, traj_stride))
    plt.show()
Exemple #3
0
def plot_input_frames(args):
    """PLOT THE INITIAL COORDINATES; BEFORE MINIMIZATION FOR THIS SIMULATION"""
    runner = rs.StringIterationRunner(args)
    plt.title("INPUT FRAMES")
    plot_reference_structures(runner)
    utils.plot_path(runner.stringpath, label="Initial String")
    inittraj = None
    for i in range(len(runner.stringpath)):
        trajpath = runner.point_path(i) + runner.point_name(i)
        infile = trajpath + "-in.gro"
        if not os.path.exists(infile):
            logger.warn("%s files not found for point %s. Skipping", infile, i)
            continue
        intraj = md.load(abspath(infile))
        inittraj = intraj if inittraj is None else inittraj + intraj
    logger.debug("init-traj: %s", inittraj)
    inpath = colvars.eval_cvs(inittraj, runner.cvs)
    utils.plot_path(inpath, label="Init coordinates")
    # utils.plot_endpoints(np.matrix([colvars.eval_cvs(traj, runner.cvs) for traj in inittraj]), label="Init gros2")
    plt.legend()
    plt.show()
    if len(runner.stringpath) == len(inpath):
        diff = np.linalg.norm(runner.stringpath - inpath, axis=1)
    elif len(runner.stringpath) == (len(inpath) + 2):
        # Fixed endpoints
        diff = np.linalg.norm(runner.stringpath[0:-2] - inpath, axis=1)
    else:
        logger.error("Path lengths differ: %s vs %s", len(runner.stringpath))
        return
    plt.plot(diff, label="Distance between input coordinates and string")
    plt.scatter([range(0, len(diff))], diff)
    plt.legend()
    plt.show()
Exemple #4
0
def compare_5cvs_pbc_condition(args, point_filetype="gro", tol=1e-5):
    runner = rs.StringIterationRunner(args)
    cvs_pbc = create_cvs.create_5cvs(normalize=False, pbc=True)
    cv_nopbc = create_cvs.create_5cvs(normalize=False, pbc=False)

    # Load all restrained simus.
    fake_iter = 999
    point_dir = runner.point_path(fake_iter, iteration=fake_iter)
    point_path = point_dir + runner.point_name(
        fake_iter, iteration=fake_iter) + "-restrained." + point_filetype
    point_path = point_path.replace(str(fake_iter), "*")
    all_restrained_paths = sorted(glob.glob(point_path))
    ntraj = len(all_restrained_paths)
    logger.debug("Comparing %s trajectories ", ntraj)
    for traj_idx, traj_path in enumerate(all_restrained_paths):
        if traj_idx % np.ceil(ntraj / 100) == 0:
            logger.debug("Comparing traj %s/%s", traj_idx, ntraj)
        traj = md.load(traj_path, top=runner.topology)
        evals_pbc = colvars.eval_cvs(traj, cvs_pbc)
        evals_nopbc = colvars.eval_cvs(traj, cv_nopbc)
        # For every restrained simu, compare pbc to no pbc. See if they differ.
        for frame_idx, frame_pbc in enumerate(evals_pbc):
            frame_nopbc = evals_nopbc[frame_idx]
            for cv_idx, val_pbc in enumerate(frame_pbc):
                val_nopbc = frame_nopbc[cv_idx]
                if not (val_nopbc - val_pbc) < tol:
                    raise Exception(
                        "The two values do not equal: %s, %s. For cv %s and trajectory %s"
                        % (val_pbc, val_nopbc, cvs_pbc[cv_idx], traj_path))
Exemple #5
0
def start():
    logger.info("----------------Starting removing files------------")
    simulations = [
        "apo-optimized", "holo-optimized", "endpoints-holo", "endpoints-apo",
        "to3sn6-holo", "to3sn6-apo", "straight-holo-optimized", "holo-curved",
        "apo-curved", "holo-straight", "beta1-apo", "pierre-ash79",
        "pierre-asp79_Na"
    ]
    delete_filetypes = [
        "cpt",
        "tpr",
        "log",
        "trr",
    ]
    total_nfiles_removed, total_filesize_mb = 0, 0
    for simu_id in simulations:
        args = get_args_for_simulation(simu_id)
        runner = rs.StringIterationRunner(args)
        for filetype in delete_filetypes:
            nfiles_removed, filesize_mb = delete_for_filetype(runner, filetype)
            total_nfiles_removed += nfiles_removed
            total_filesize_mb += filesize_mb
        nfiles_removed, filesize_mb = delete_input_coordinate_files(runner)
        total_nfiles_removed += nfiles_removed
        total_filesize_mb += filesize_mb
        nfiles_removed, filesize_mb = delete_backup_files(runner)
        total_nfiles_removed += nfiles_removed
        total_filesize_mb += filesize_mb
        # TODO submission files
        # TODO maybe edr files
        # TODO compress everything to xtc and only necessary frames
    logger.info(
        "##########SUMMARY##########\nDeleted %s files in total of total file size %s Gb",
        total_nfiles_removed, total_filesize_mb / 1024)
Exemple #6
0
def plot_simu_trajs(args,
                    type,
                    show_reference_structures=True,
                    rescale=False,
                    show_label=True,
                    show_text=False):
    runner = rs.StringIterationRunner(args)
    plt.title(type + " SIMUS")
    if show_reference_structures:
        plot_reference_structures(runner, rescale=rescale)
    utils.plot_path(colvars.rescale_evals(runner.stringpath, runner.cvs)
                    if rescale else runner.stringpath,
                    label="Initial String")
    trajcount = 0
    for i in range(len(runner.stringpath)):
        trajpath = runner.point_path(i) + runner.point_name(i)
        filename = trajpath + type + ".trr"
        if not os.path.exists(filename):
            logger.warn("File %s not found for point %s. Skipping", filename,
                        i)
            continue
        traj = md.load(abspath(trajpath + type + ".trr"),
                       top=runner.topology)  # trajpath + type + ".gro"))
        description = type + str(i)
        utils.plot_path(colvars.eval_cvs(traj, runner.cvs, rescale=rescale),
                        label=description if show_label else None,
                        text=description if show_text else None)
        trajcount += 1
    if trajcount < 4:
        plt.legend()
    plt.show()
Exemple #7
0
def plot_custom_strings(args,
                        filepaths=[],
                        additional_strings=[],
                        rescale=False,
                        legend=True,
                        twoD=False,
                        cv_indices=None,
                        show_reference_structures=False):
    """Find all string-paths and plot them"""
    runner = rs.StringIterationRunner(args)
    if cv_indices is None:
        cv_indices = [i for i in range(runner.stringpath.shape[1])]
    plt.title("ALL STRINGS")
    if show_reference_structures:
        plot_reference_structures(runner, rescale=rescale, twoD=twoD)
    last = None
    for fp in filepaths:
        try:
            path = np.loadtxt(fp)[:, cv_indices]
            plotpath = colvars.rescale_evals(path,
                                             runner.cvs) if rescale else path
            utils.plot_path(plotpath,
                            label=fp.split("/")[-1],
                            text=None,
                            legend=legend,
                            twoD=twoD,
                            axis_labels=[
                                get_cv_description(cv, use_simpler_names=True)
                                for cv in np.array(runner.cvs)[cv_indices]
                            ])
            plt.grid()
            # utils.plot_path(plotpath, label="Stringpath %s" % i, text=None, legend=legend)

            last = path
        except IOError as err:
            tb = traceback.format_exc()
            logger.error(tb)
            logger.info(
                "Did not find string in filepath %s. Not looking for sequential strings",
                fp)
            break
    for i, path in enumerate(additional_strings):
        plotpath = colvars.rescale_evals(path, runner.cvs) if rescale else path
        utils.plot_path(plotpath,
                        label="a%s" % i,
                        text=None,
                        legend=legend,
                        twoD=twoD,
                        axis_labels=[
                            get_cv_description(cv, use_simpler_names=True)
                            for cv in np.array(runner.cvs)[cv_indices]
                        ])
        plt.grid()
        last = path
    if last is None:
        return
    if legend:
        plt.legend()
    plt.show()
Exemple #8
0
def plot_swarms_path(args):
    """PLOT THE SWARMS FOR THIS ITERATION"""
    runner = rs.StringIterationRunner(args)
    plt.title("SWARMS PATHS")
    processor = SingleIterationPostProcessor(runner,
                                             save=False,
                                             plot=True,
                                             ignore_missing_files=True)
    processor.run()
    logger.info("Convergence after iteration %s: %s", runner.iteration,
                processor.converge)
Exemple #9
0
def merge_restrained_to_file(args, filename, iterations=None):
    runner = rs.StringIterationRunner(args)
    if iterations is None:
        iterations = [runner.iteration]
    traj = None
    for i in iterations:
        runner.init_iteration(i)
        iterationtraj = merge_restrained(runner)
        if traj is None:
            traj = iterationtraj
        else:
            traj += iterationtraj
        logger.debug("Done with iteration %s, generated traj: %s", i,
                     iterationtraj)
    traj.save(filename)
    return traj
Exemple #10
0
def compare_dror_path(args):
    """Find all string-paths and plot them"""
    runner = rs.StringIterationRunner(args)
    plt.title("Dror String vs. first/lats string")
    plot_reference_structures(runner)
    drorpath = plot_dror_path(runner)
    first = np.loadtxt(runner.working_dir + runner.string_filepath % 0)
    current = np.loadtxt(runner.working_dir +
                         runner.string_filepath % runner.iteration)
    utils.plot_path(first, label="First")
    utils.plot_path(current, label="Iteration %s" % runner.iteration)
    plt.legend()
    reldiff = np.abs(
        utils.compute_path_length(drorpath) -
        utils.compute_path_length(current) /
        utils.compute_path_length(drorpath))
    logger.info("Relative difference between Dror path and final: %s percent",
                100 * reldiff)
    plt.show()
Exemple #11
0
def compute_string_with_new_length(args,
                                   new_length,
                                   iteration=None,
                                   savepath=None,
                                   plot=True):
    runner = rs.StringIterationRunner(args)
    if iteration is not None:
        runner.init_iteration(iteration)
    new_stringpath = utils.change_string_length(runner.stringpath, new_length)
    if plot:
        utils.plot_path(runner.stringpath, label="Original path", twoD=True)
        utils.plot_path(new_stringpath, label="New path", twoD=True)
        plt.show()
    logger.info("New string with new length:%s\n%s", new_stringpath)
    if savepath is None:
        print(runner.string_filepath)
        savepath = runner.string_filepath % (str(runner.iteration - 1) +
                                             "_len" + str(new_length))
    np.savetxt(savepath, new_stringpath)
    logger.info("Saved to %s", savepath)
Exemple #12
0
def merge_swarms_to_file(args,
                         filename,
                         iterations=None,
                         ignore_missing_files=True):
    runner = rs.StringIterationRunner(args)
    if iterations is None:
        iterations = [runner.iteration]
    traj = None
    for i in iterations:
        runner.init_iteration(i)
        processor = SingleIterationPostProcessor(
            runner, ignore_missing_files=ignore_missing_files)
        iterationtraj = processor.merge_swarms()
        if traj is None:
            traj = iterationtraj
        else:
            traj += iterationtraj
        logger.debug("Done with iteration %s, generated traj: %s", i,
                     iterationtraj)
    traj.save(filename)
    return traj
Exemple #13
0
def compute_drifted_string(args, plot=True, iteration=None, twoD=False):
    """Return the unparametrized string after the swarms drift
    :param plot:
    """
    runner = rs.StringIterationRunner(args)
    if iteration is not None:
        runner.init_iteration(iteration)

    plt.title("SWARMS PATHS")
    processor = SingleIterationPostProcessor(runner, save=False, plot=False)
    runner.run()
    driftstring = processor.compute_drifted_string()
    reparametrized_weights = processor.compute_new_stringpath()
    reparametrized_noweights = utils.reparametrize_path_iter(
        driftstring, arclength_weight=None)
    if plot:
        utils.plot_path(runner.stringpath, label="Input", twoD=twoD)
        utils.plot_path(reparametrized_weights,
                        label="reparametrized weights",
                        twoD=twoD)
        utils.plot_path(reparametrized_noweights,
                        label="reparametrized no weights",
                        twoD=twoD)
        utils.plot_path(driftstring, label="Drifted", twoD=twoD)
        plt.show()
    logger.debug(
        "Convergence to input path between reparametrized with weights %s and without weights %s",
        np.linalg.norm(reparametrized_weights - runner.stringpath) /
        np.linalg.norm(runner.stringpath),
        np.linalg.norm(reparametrized_noweights - runner.stringpath) /
        np.linalg.norm(runner.stringpath))
    logger.debug(
        "Convergence to drifted path between reparametrized with weights %s and without weights %s",
        np.linalg.norm(reparametrized_weights - driftstring) /
        np.linalg.norm(driftstring),
        np.linalg.norm(reparametrized_noweights - driftstring) /
        np.linalg.norm(driftstring))
    return driftstring
Exemple #14
0
 def __init__(self, args, cvtype, start_iteration, last_iteration, transition_frame_loader=None,
              stationary_method="msm", load_transition_count=None):
     object.__init__(self)
     self.runner = rs.StringIterationRunner(args)
     self.cvtype = cvtype
     self.field_visualizer = None
     self.rescale = True
     self.smear_transitions = False
     self.normalize_grid = False
     self.field_cutoff = 10
     self.field_label = None
     self.gridmin, self.gridmax = None, None
     self.stringpath_type = "None"
     self.show_network, self.show_FE = False, True
     self.plot_probability = False
     self.stationary_method = stationary_method
     self.start_iteration, self.last_iteration = start_iteration, last_iteration
     self.transition_frame_loader = transition_frame_loader
     self.save_network_plot = True
     self.cvi, self.cvj = 0, None
     self.cv_indices = None
     self.dependent_cvs = None
     self.load_transition_count = load_transition_count
     """DIFFERENT CVS"""
     if cvtype.startswith("5cvs"):
         color_cvs = utils.load_binary("../gpcr/cvs/old/cvs-len5_good/cvs.pkl")
         self.cvs = np.array(color_cvs)
         indices_string = cvtype.split("_")[1].split("and")
         self.cvi = int(indices_string[0])
         if indices_string[1] == "None":
             self.cvj = None
         else:
             self.cvj = int(indices_string[1])
         self.gridmin, self.gridmax = None, None  # -0.2, 1.2
         self.ngrid = 39
         self.smear_transitions = False
         self.rescale = True
         self.stringpath_type = "avg"
     elif cvtype.startswith("beta1-5cvs"):
         color_cvs = utils.load_binary("../gpcr/cvs/beta1-cvs/cvs.pkl")
         self.cvs = np.array(color_cvs)
         indices_string = cvtype.split("_")[1].split("and")
         self.cvi = int(indices_string[0])
         if indices_string[1] == "None":
             self.cvj = None
         else:
             self.cvj = int(indices_string[1])
         self.ngrid = 30
         self.smear_transitions = False
         self.rescale = False
         self.stringpath_type = "avg"
     elif cvtype.startswith("beta1-npxxy"):
         self.cvs = create_cvs.create_beta1_npxxy_cvs()
         self.cvi, self.cvj = 0, 1
         self.ngrid = 30
         self.smear_transitions = False
         self.rescale = True
         self.normalize_grid = True
     elif cvtype == "ionic_lock-YY":
         self.cvs = [create_cvs.create_ionic_lock_cv(), create_cvs.create_YY_cv()]
         self.cvi, self.cvj = 0, 1
         self.ngrid = 41
         self.rescale = True
     elif cvtype == "ioniclock_old":
         self.stringpath_type = "1D"
         # A CV along a single reaction coordinate
         self.cvs = np.array(utils.load_binary("/home/oliverfl/projects/gpcr/cvs/ionic-lock-COM.pkl"))
         self.gridmin, self.gridmax = None, None  # 0.35, 2.1
         self.ngrid = 61
         # self.smear_transitions = False
         self.rescale = True
     elif cvtype == "YY_old":
         res1, res2 = 219, 326
         self.cvs = [colvars.CADistanceCv("|Y219-Y326|(CA)", res1, res2, periodic=False)]
         self.gridmin, self.gridmax = None, None
         self.ngrid = 101
         # self.smear_transitions = False
         self.rescale = True
         self.plot_probability = False
         self.field_cutoff = 50
     elif cvtype == 'rmsd':
         self.cvi, self.cvj = 0, 1
         self.cvs = utils.load_binary("../gpcr/cvs/rmsd-cvs/cvs.pkl")
         self.gridmin, self.gridmax = None, None  # -0.2, 2.0
         self.ngrid = 50
     elif cvtype.startswith("loose_coupling_"):
         self.cvi, self.cvj = 0, 1
         ligand_cv, connector_cv, gprotein_cv = create_cvs.create_loose_coupling_cvs(normalize=True)
         if cvtype.endswith("ligand_connector"):
             self.cvs = np.array([ligand_cv, connector_cv])
         elif cvtype.endswith("ligand_gprotein"):
             self.cvs = np.array([ligand_cv, gprotein_cv])
         elif cvtype.endswith("connector_gprotein"):
             self.cvs = np.array([connector_cv, gprotein_cv])
         self.rescale = True
         self.smear_transitions = False
         self.ngrid = 29  # 51 smearing, 29 regular
     elif cvtype == "dror":
         self.cvi, self.cvj = 0, 1  # 3, 4
         self.cvs = utils.load_binary("../gpcr/cvs/dror-cvs/cvs.pkl")
         logger.info("Norm scales for dror CVs, %s", ([(cv._norm_scale, cv._norm_offset) for cv in self.cvs]))
         self.gridmin, self.gridmax = -0.6, 1.8
         self.ngrid = 51 #41 for holo
         self.smear_transitions = False
         self.rescale = True
     elif cvtype == "probability_classifier":
         dt, nclusters = 4, 3
         probability_classifier_cvs = utils.load_binary(
             "/home/oliverfl/projects/gpcr/neural_networks/strajs_%s_clusters_dt%s/probability_classifier_cvs.pkl" % (
                 nclusters, dt))
         self.cvi, self.cvj = 0, 1
         self.cvs = np.array([probability_classifier_cvs[self.cvi], probability_classifier_cvs[self.cvj]])
         self.gridmin, self.gridmax = 0, nclusters
         self.ngrid = nclusters + 1
         self.show_network, self.show_FE = True, True
     elif cvtype == "nmrcvs":
         color_cvs = utils.load_binary("../gpcr/cvs/nmr-cvs/cvs.pkl")
         self.cvi, self.cvj = 0, 1  # , 2  # 3, 4
         self.cvs = np.array([color_cvs[self.cvi]])
         self.gridmin, self.gridmax = -0.2, 1.2
         self.ngrid = 40
         self.rescale = True
     elif cvtype.startswith("nature_2018"):
         # From https://www.nature.com/articles/nature22354
         # Distance 266-148
         res1, res2 = 266, 148
         self.cvs = [colvars.CADistanceCv("|TM4-TM6|(CA)", res1, res2, periodic=False)]
         self.gridmin, self.gridmax = 1.5, 6
         self.ngrid = 50
         # self.smear_transitions = False
         self.rescale = True
         self.field_cutoff = 1000
         if not cvtype.endswith("FE"):
             self.plot_probability = True
             self.field_visualizer = FRETConverter.convert_to_fret_efficiency
     elif cvtype == "cell_2015":
         # From https://www.sciencedirect.com/science/article/pii/S0092867415004997
         # Distance 265-148 (but 265 is missing in our system so we use 266 instead)
         res1, res2 = 266, 148
         # qCom = "protein and resSeq {} and element != H"
         # self.cvs = [colvars.CADistanceCv("|TM4-TM6|(CA)", res1, res2, periodic=False)]
         # self.cvs = [colvars.COMDistanceCv("|TM4-TM6|(COM)", qCom.format(res1), qCom.format(res2))]
         self.cvs = [colvars.MaxDistanceCv("Max|TM4-TM6|", res1, res2)]
         self.gridmin, self.gridmax = 1.5, 6
         self.ngrid = 100
         # self.smear_transitions = False
         self.rescale = True
         self.plot_probability = True
         self.field_cutoff = 1000
     elif cvtype.startswith("DRY-motif"):
         self.cvi, self.cvj = 0, None
         active_DRY_rmsd, inactive_DRY_rmsd = create_cvs.create_DRY_cvs(normalize=True)
         self.cvs = np.array([active_DRY_rmsd if cvtype == "DRY-motif-active" else inactive_DRY_rmsd])
         self.ngrid = 40
         # self.smear_transitions = False
         self.rescale = True
     elif cvtype.startswith("discrete_classifier"):
         dt, nclusters = 4, 3
         if cvtype == "discrete_classifier_old":
             discrete_classifier_cv = utils.load_binary(
                 "/home/oliverfl/projects/gpcr/neural_networks/strajs_%s_clusters_dt%s/discrete_classifier_cv.pkl" % (
                     nclusters, dt))
             self.cvs = np.array([discrete_classifier_cv])
             self.cvi, self.cvj = 0, None
         else:
             classifier = utils.load_binary("/home/oliverfl/projects/gpcr/neural_networks/drorAcvs/classifier.pkl")
             scaler = utils.load_binary("/home/oliverfl/projects/gpcr/neural_networks/drorAcvs/scaler.pkl")
             simulation_cvs = utils.load_binary("../gpcr/cvs/cvs-len5_good/cvs.pkl")
             discrete_classifier_cv = trajclassifier.DependentDiscreteClassifierCv(
                 "Cluster",
                 trajclassifier.CvsVectorizer(simulation_cvs),
                 scaler,
                 classifier
             )
             self.dependent_cvs = np.array([discrete_classifier_cv])
             self.cvs = simulation_cvs
             self.cv_indices = [i for i in range(len(simulation_cvs))]
         self.smear_transitions = False
         self.gridmin, self.gridmax = 0, nclusters
         self.ngrid = nclusters + 1
         self.show_network, self.show_FE = True, False
     elif cvtype == "avg_string_cv":
         # A CV along a specific string
         simulation_cvs = utils.load_binary("../gpcr/cvs/cvs-len5_good/cvs.pkl")
         logger.info("Using stringpath from simulations %s-%s", self.start_iteration, self.last_iteration)
         # stringpath = self.runner.stringpath
         avg_string, strings = extra_analysis.compute_average_string(self.runner,
                                                                     start_iteration=self.start_iteration,
                                                                     end_iteration=self.last_iteration,
                                                                     rescale=False,
                                                                     plot=False,
                                                                     save=False)
         stringCv = colvars.StringIndexCv("avg{}-{}".format(self.start_iteration, self.last_iteration), avg_string,
                                          simulation_cvs, interpolate=False)
         stringCv.normalize(scale=1. * len(avg_string), offset=0)
         # self.smear_transitions = False
         self.dependent_cvs = np.array([stringCv])
         self.cvs = simulation_cvs
         # self.gridmin, self.gridmax = -0.1, 1.1  # len(avg_string)
         self.ngrid = 30  # 1 + utils.rint(len(avg_string) / 3.2)
         self.cv_indices = [i for i in range(len(simulation_cvs))]
         # self.ngrid = int(np.rint((self.gridmax - self.gridmin) * len(avg_string)))
     elif cvtype == "water_solubility":
         self.cvs = [create_cvs.WaterSolubilityCV(266, 0.8)]
         self.gridmin = 0
         self.gridmax = 80
         self.ngrid = self.gridmax - self.gridmin + 1
     else:
         raise Exception("No valid cv chose " + cvtype)
     self.cvs = np.array(self.cvs)
     logger.info("Using CVtype %s and CVs %s", cvtype, [cv.id + "," + str(cv) for cv in self.cvs])
     if self.cv_indices is None:
         self.cv_indices = [self.cvi] if self.cvj is None else [self.cvi, self.cvj]
     self.calculator = FECalculator(self.runner, self.cvs,
                                    start_iteration=self.start_iteration,
                                    last_iteration=self.last_iteration,
                                    ngrid=self.ngrid,
                                    normalize_grid=self.normalize_grid,
                                    gridmin=self.gridmin,
                                    gridmax=self.gridmax,
                                    smear_transitions=self.smear_transitions,
                                    plot=False,
                                    ignore_missing_files=False,
                                    cv_indices=self.cv_indices,
                                    dependent_cvs=self.dependent_cvs)
Exemple #15
0
 #                     legend=True,
 #                     twoD=True, cv_indices=None)
 # plot_all_strings(args, plot_frequency=1, start_iteration=1, end_iteration=300, rescale=True, legend=True,
 #                  twoD=True,cv_indices=None, plot_convergence=False, plot_reference_structures=False)
 # plot_restrained=False, cv_indices=[0,1], plot_convergence=True)
 # plot_all_average_strings(args, strings_per_average=77, start_iteration=1, end_iteration=232,
 #                          rescale=True, legend=True, twoD=False, plot_strings=True,
 #                          do_plot_reference_structures=False, plot_boxes=True,
 #                          cv_indices=None, plot_convergence=False, accumulate=False)
 # compute_rolling_average_string_convergence(args,
 #                                            strings_per_average=21,  # comparison_step=81/51,
 #                                            plot=True)
 # logger.info("Drifted string after swarms (unparametrized):\n%s",
 #             compute_drifted_string(args, plot=True, iteration=10))
 # compute_string_with_new_length(args, iteration=None, new_length=20, savepath=None)
 compute_average_string(rs.StringIterationRunner(args),
                        start_iteration=200,
                        end_iteration=282,
                        rescale=False,
                        legend=True,
                        twoD=True,
                        cv_indices=None)
 # plot_input_frames(args)
 # plot_minimizations(args, rescale=True, plot_dist=False)
 # plot_swarms_path(args)
 # plot_thermalization_simus(args, rescale=True)
 # plot_restrained_simus(args, rescale=False, show_label=False, show_text=True)
 # plot_any_traj(rs.StringIterationRunner(args),
 #               "/home/oliver/slask/3SN6-holo-charmm-gui/gromacs/step7_1to3",
 #               trajformat="xtc",
 #               rescale=False,
Exemple #16
0
def plot_all_strings(args,
                     plot_frequency=1,
                     start_iteration=1,
                     end_iteration=None,
                     rescale=False,
                     legend=True,
                     twoD=False,
                     plot_restrained=False,
                     cv_indices=None,
                     plot_convergence=True,
                     plot_reference_structures=True):
    """Find all string-paths and plot them"""
    runner = rs.StringIterationRunner(args)
    runner.cvs = np.array(runner.cvs)
    if cv_indices is None:
        cv_indices = [i for i in range(runner.stringpath.shape[1])]
    plt.title("ALL STRINGS")
    if plot_reference_structures:
        plot_reference_structures(runner, rescale=rescale, twoD=twoD)
    last = None
    convergences = []
    for i in range(start_iteration,
                   2000 if end_iteration is None else end_iteration):
        try:
            runner.init_iteration(i)
            path = runner.stringpath[:, cv_indices]
            # path = np.loadtxt(runner.working_dir + runner.string_filepath % i)
            if last is not None:
                if len(last) == len(path):
                    dist = np.linalg.norm(last - path)
                    convergence = dist / np.linalg.norm(path)
                    logger.info(
                        "Converge between iterations %s and %s: %s. Absolute distance: %s",
                        i - 1, i, convergence, dist)
                    convergences.append(convergence)
                else:
                    logger.warn(
                        "Number points differ between iterations %s and %s", i,
                        i - 1)
                    convergences.append(np.nan)
            if (i + start_iteration - 1) % plot_frequency == 0:
                plotpath = colvars.rescale_evals(
                    path, runner.cvs[cv_indices]) if rescale else path
                utils.plot_path(plotpath,
                                label="Stringpath %s" % i,
                                text=None,
                                legend=legend,
                                twoD=twoD,
                                axis_labels=[
                                    get_cv_description(cv.id,
                                                       use_simpler_names=True)
                                    for cv in np.array(runner.cvs)[cv_indices]
                                ])
                if plot_restrained:
                    restrainedpath = SingleIterationPostProcessor(
                        runner).compute_string_from_restrained()
                    restrainedpath = colvars.rescale_evals(
                        restrainedpath,
                        runner.cvs) if rescale else restrainedpath
                    utils.plot_path(restrainedpath,
                                    label="Restrained {}".format(i),
                                    twoD=twoD)
                plt.grid()
                # utils.plot_path(plotpath, label="Stringpath %s" % i, text=None, legend=legend)

            last = path
        except IOError as err:
            tb = traceback.format_exc()
            logger.error(tb)
            logger.info(
                "Did not find string %s in filepath %s. Not looking for sequential strings",
                i, runner.string_filepath)
            break
    if last is None:
        return
    if legend:
        plt.legend()
    plt.show()
    if plot_convergence:
        plt.plot(convergences)
        plt.ylabel(r'$|\bar{s_i}-\bar{s}_{i+1}|/|\bar{s}_{i+1}|$')
        plt.xlabel(r"i")
        plt.title("Convergence")
        plt.show()
Exemple #17
0
def plot_all_average_strings(args,
                             strings_per_average=10,
                             start_iteration=1,
                             end_iteration=999,
                             rescale=False,
                             legend=True,
                             twoD=False,
                             plot_strings=True,
                             do_plot_reference_structures=True,
                             plot_boxes=True,
                             cv_indices=None,
                             plot_convergence=True,
                             accumulate=False):
    """Find all string-paths and plot them"""
    runner = rs.StringIterationRunner(args)
    if cv_indices is None:
        cv_indices = [i for i in range(runner.stringpath.shape[1])]
    plt.title("ALL STRINGS")
    if do_plot_reference_structures:
        plot_reference_structures(runner, rescale=rescale, twoD=twoD)
    last, last_iteration = None, start_iteration
    convergences = []
    last_iteration = start_iteration
    for string_index in range(start_iteration, end_iteration,
                              strings_per_average):
        first_index = start_iteration if accumulate else string_index
        last_index = string_index + strings_per_average
        try:
            runner.init_iteration(string_index)
            path, strings = compute_average_string(runner,
                                                   start_iteration=first_index,
                                                   end_iteration=last_index,
                                                   rescale=rescale,
                                                   legend=legend,
                                                   plot=False,
                                                   save=False,
                                                   twoD=twoD)
            if path is None:
                break
            path = path[:, cv_indices]
            # path = np.loadtxt(runner.working_dir + runner.string_filepath % i)
            if last is not None:
                if len(last) == len(path):
                    dist = np.linalg.norm(last - path)
                else:
                    dist = np.linalg.norm(
                        utils.change_string_length(last, len(path)) - path)
                    logger.warn(
                        "Number points differ between iterations %s and %s",
                        last_iteration, last_index)
                    # convergences.append(np.nan)
                convergence = dist / np.linalg.norm(path)
                logger.info(
                    "Converge between iterations %s and %s: %s. Absolute distance: %s",
                    last_iteration, last_index, convergence, dist)
                convergences.append(convergence)
            if plot_strings:
                boxplot = plot_boxes and (string_index +
                                          strings_per_average) >= end_iteration
                utils.plot_path(
                    path,
                    label="i%s-i%s" % (first_index, last_index),
                    text=None,
                    scatter=False,
                    boxplot_data=strings[:, :,
                                         cv_indices] if boxplot else None,
                    legend=legend,
                    twoD=twoD,
                    ncols=2,
                    axis_labels=([] if twoD else ["inactivation"]) + [
                        get_cv_description(cv.id, use_simpler_names=True)
                        for cv in np.array(runner.cvs)[cv_indices]
                    ])
                # utils.plot_path(plotpath, label="Stringpath %s" % i, text=None, legend=legend)

            last = path
            last_iteration = last_index
        except IOError as err:
            logger.error(err)
            logger.info(
                "Did not find string %s in filepath %s. Not looking for sequential strings",
                string_index, runner.string_filepath)
            break
    if last is None:
        return
    # plt.title(runner.simu_id)
    plt.show()
    if plot_convergence:
        plt.plot(convergences)
        plt.ylabel(r'$|\bar{s_i}-\bar{s}_{i+1}|/|\bar{s}_{i+1}|$')
        plt.xlabel(r"i")
        plt.title("Convergence")
        plt.show()
Exemple #18
0
def compute_rolling_average_string_convergence(args,
                                               strings_per_average=33,
                                               start_iteration=None,
                                               end_iteration=9999,
                                               comparison_step=None,
                                               plot=True):
    if start_iteration is None:
        start_iteration = 2  # strings_per_average
    if comparison_step is None:
        comparison_step = strings_per_average
    runner = rs.StringIterationRunner(args)
    all = []
    iterations = []
    convergences = []
    total_avg_string, strings = compute_average_string(
        runner,
        start_iteration=start_iteration,
        end_iteration=end_iteration,
        rescale=False,
        legend=False,
        plot=False,
        save=False,
        twoD=False)
    total_avg = np.linalg.norm(total_avg_string)
    last = None
    for i in range(start_iteration, end_iteration + 1):
        try:
            runner.init_iteration(start_iteration)
            path, strings = compute_average_string(
                runner,
                start_iteration=max(1, i - strings_per_average + 1),
                end_iteration=i,
                rescale=False,
                legend=False,
                plot=False,
                save=False,
                twoD=False)
            if path is None or (i >= strings_per_average
                                and len(strings) != strings_per_average):
                break
            all.append(path)
            compare_index = i - start_iteration - comparison_step
            if compare_index >= 0:
                last = all[compare_index]
                if len(last) != len(path):
                    logger.warn(
                        "Different length of previous path for iteration %s. %s vs %s",
                        i, len(last), len(path))
                    last = utils.change_string_length(last, len(path))
                c = np.linalg.norm(last - path) / total_avg
                convergences.append(c)
                iterations.append(i)
        except IOError as err:
            logger.error(err)
            logger.info(
                "Did not find string %s in filepath %s. Not looking for sequential strings",
                i, runner.string_filepath)
            break
    if len(iterations) == 0:
        logger.warn("Nothing done")
        return None
    convergences = np.array(convergences)
    iterations = np.array(iterations)
    result = np.empty((len(iterations), 2))
    result[:, 0] = iterations
    result[:, 1] = convergences
    # print(convergences)
    all = np.array(all)
    if plot:
        utils.plot_path(result,
                        axis_labels=["Iteration#", "Convergence"],
                        twoD=True,
                        label=utils.simuid_to_label.get(
                            runner.simu_id, runner.simu_id))
        plt.title(
            "rolling avg. for %s strings, comparison to average %s iterations before"
            % (strings_per_average, comparison_step))
        plt.show()

    return result