예제 #1
0
def plot_convergence_from_clusters(
    ax,
    search_parameters: dict,
    yaml_path: str,
    data_path: str = "Experiments/Data.nosync/",
    logx=True,
):
    history = get_master_yaml(yaml_path)

    file_names = match_parameters(search_parameters, history)
    cycle = plt.rcParams["axes.prop_cycle"].by_key()["color"]
    for file_name in file_names:
        print(file_name)
        simulation_parameters = history[file_name]
        t, x, v = load_traj_data(file_name, data_path)
        error = calculate_l1_convergence(t, x, v, plot_hist=False)
        cluster_count = _get_number_of_clusters(
            simulation_parameters["initial_dist_x"])
        print(cluster_count)
        cluster_label = f"{cluster_count} cluster{'' if cluster_count==1 else 's'}"
        if logx:
            ax.semilogx(
                t,
                error,
                label=cluster_label,
                color=cycle[cluster_count - 1],
                alpha=0.25,
            )

        else:
            ax.plot(
                t,
                error,
                label=cluster_label,
                color=cycle[cluster_count - 1],
                alpha=0.25,
            )

    ax.set(xlabel="Time", ylabel=r"$\ell^1$ Error")
    handles, labels = ax.get_legend_handles_labels()
    # sort both labels and handles by labels
    labels, handles = zip(*sorted(zip(labels, handles), key=lambda t: t[0]))
    ax.legend(handles, labels)
    plt.tight_layout()
    return ax
예제 #2
0
def plot_ODE_solution(data_path: str,
                      yaml_path: str,
                      search_parameters: dict = {},
                      **kwargs):
    os.chdir("E:/")
    history = processing.get_master_yaml(yaml_path)

    fig, ax = plt.subplots()
    timestep_range = processing.get_parameter_range("dt", history, **kwargs)
    for timestep in timestep_range:
        search_parameters["dt"] = timestep
        file_names = processing.match_parameters(search_parameters, history,
                                                 **kwargs)
        for file_name in file_names:
            t, x, v = processing.load_traj_data(file_name, data_path)
            # print(len(t))
            if file_name == file_names[0]:
                sum_avg_vel = np.zeros(len(v[:, 0]))
            print(file_name)
            # sum_avg_vel += v.mean(axis=1)
            ax.plot(t, v.mean(axis=1), "r--", alpha=0.1)

    def first_moment_ode(t, M):
        return G.step(M) - M

    sol = solve_ivp(
        first_moment_ode,
        (t.min(), t.max()),
        [v[0].mean()],
        t_eval=t.ravel(),
        rtol=10**-9,
        atol=10**-9,
    )
    t = sol.t
    M = sol.y
    ax.plot(t, M.ravel(), label="Numerical Sol")
    ax.plot(t, sum_avg_vel / len(file_names))
    ax.legend()

    plt.show()
    return
def phi_one_convergence(time_ax="linear"):
    # sns.set(style="white", context="paper")
    sns.set_style("ticks")
    fig1, ax1 = plt.subplots(figsize=(12, 6))

    cm = plt.get_cmap("coolwarm")
    cNorm = colors.DivergingNorm(vmin=-25, vcenter=0.0, vmax=25)
    scalarMap = mplcm.ScalarMappable(norm=cNorm, cmap=cm)
    yaml_path = "./Experiments/phi_one_convergence"
    history = get_master_yaml(yaml_path)
    file_names = match_parameters({}, history)
    dt = 0.01
    print(yaml_path)

    for file_name in file_names:
        x, v = load_traj_data(file_name, data_path="./Experiments/Data/")
        t = np.arange(0, len(v) * dt, dt)
        if time_ax == "linear":
            ax1.plot(
                t[:int(20 / dt)],
                v[:int(20 / dt)].mean(axis=1),
                color=scalarMap.to_rgba(np.sign(v[0].mean()) * v[0].var()),
            )
        else:
            ax1.semilogx(
                t[:int(20 / dt)],
                v[:int(20 / dt)].mean(axis=1),
                color=scalarMap.to_rgba(np.sign(v[0].mean()) * v[0].var()),
            )
    ax1.plot([0, 20], [1, 1], "k--", alpha=0.25)
    ax1.plot([0, 20], [-1, -1], "k--", alpha=0.25)
    ax1.set(xlabel="Time", ylabel=r"Average Velocity, $M^N(t)$")
    plt.subplots_adjust(top=0.905,
                        bottom=0.135,
                        left=0.115,
                        right=0.925,
                        hspace=0.2,
                        wspace=0.2)
    plt.show()
    return
def avg_vel(
    ax,
    scalarMap,
    file_path: str = "Experiments/Data/",
    yaml_path: str = "Experiments/vary_large_gamma_local",
    search_parameters: dict = {
        "particle_count": 450,
        "G": "Step",
        "scaling": "Local",
        "phi": "Gamma",
        "initial_dist_x": "two_clusters_2N_N",
        "initial_dist_v": "2N_N_cluster_const",
        "T_end": 100,
        "dt": 0.01,
    },
):

    list_of_names = []
    print(yaml_path)
    history = get_master_yaml(yaml_path)
    list_of_names = match_parameters(search_parameters, history)

    for file_name in list_of_names:
        simulation_parameters = history[file_name]
        t, x, v = load_traj_data(file_name, data_path=file_path)
        if t is None:
            t = np.arange(0,
                          len(x) * simulation_parameters["dt"],
                          simulation_parameters["dt"])
        if simulation_parameters["gamma"] >= 0.05:
            ax.semilogx(
                t,
                v.mean(axis=1),
                color=scalarMap.to_rgba(simulation_parameters["gamma"]),
                label="{:.2f}".format(simulation_parameters["gamma"]),
                # alpha=0.75,
            )
    return ax
예제 #5
0
    r"Average Velocity $\bar{M}^N(t)$",
    ha="center",
    va="center",
    rotation=90,
)
short_time_ax.set(xlabel="Time", ylabel=r"Average Velocity $M^N(t)$")
l1_ax.set(xlabel="Time", ylabel=r"$\bar{\ell}^1$ Distance")
long_time_axes[0].set(xlabel="Time")

cluster_colour = ["#0571b0", "#92c5de", "#f4a582", "#ca0020"]

# Populate the plots
for idx, parameter_value in enumerate(parameter_range):
    search_parameters["initial_dist_x"] = parameter_value
    file_names = match_parameters(search_parameters,
                                  history,
                                  exclude={"dt": [1.0]})
    if not file_names:
        print("Skipping...")
        continue
    metric_store = []
    l1_store = []
    if len(file_names) > 15:
        file_names = file_names[:15]

    for file_name in file_names:
        simulation_parameters = history[file_name]
        cluster_count = _get_number_of_clusters(
            simulation_parameters["initial_dist_x"])
        colour = cluster_colour[cluster_count - 1]
        cluster_label = f"{cluster_count} cluster{'' if cluster_count==1 else 's'}"
history = get_master_yaml(yaml_path)

fig, [ax1, ax2] = plt.subplots(1, 2, figsize=(15, 5), sharex=True)

# Create colour bar and scale
cm = plt.get_cmap("coolwarm")
cNorm = colors.DivergingNorm(vmin=0, vcenter=0.25, vmax=0.5)
scalarMap = mplcm.ScalarMappable(norm=cNorm, cmap=cm)
# Set tick locations
cbar = fig.colorbar(scalarMap, ticks=np.arange(0, 0.5, 0.05))

# For each matching desired parameters, calculate the l1 error and plot

for diffusion in np.arange(0.05, 0.5, 0.05).tolist():
    search_parameters["D"] = diffusion
    file_names = match_parameters(search_parameters, history)
    first_iter = True
    for idx, file_name in enumerate(file_names):
        simulation_parameters = history[file_name]
        t, x, v = load_traj_data(file_name)
        error = calculate_l1_convergence(t,
                                         x,
                                         v,
                                         final_plot_time=final_plot_time)
        avg_vel = calculate_avg_vel(t, x, v)
        if first_iter:
            avg_vel_store = np.zeros((len(file_names), len(avg_vel)))
            error_store = np.zeros((len(file_names), len(error)))
            first_iter = False

        avg_vel_store[idx, :] = avg_vel
예제 #7
0
def multiple_timescale_plot(
    search_parameters,
    break_time_step,
    metric,
    parameter,
    parameter_range,
    history,
    include_traj=False,
    data_path="Experiments/Data.nosync/",
):
    """Create figure with plot for beginning dynamics and split into one axis
    for each parameter value

    Example usage:
        fig = multiple_timescale_plot(search_parameters,break_time_step=40,metric=calculate_avg_vel,parameter="D", parameter_range=get_parameter_range("D", history), include_traj=False)
        plt.show()
    """

    parameter_labels = {
        "gamma": r"Interaction $\gamma$",
        "D": r"Diffusion $\sigma$",
        "dt": "Timestep",
    }
    metric_labels = {
        "calculate_avg_vel": r"Average Velocity $\bar{M}^N(t)$",
        "calculate_variance": "Variance",
        "calculate_l1_convergence": r"$\ell^1$ Error",
    }

    expected_error = {
        "480": 7.52,
        "600": 6.69,
        "700": 6.26,
        "1000": 5.25,
    }

    # Create figure and arrange plots
    fig = plt.figure(figsize=(12, 4))
    grid = plt.GridSpec(len(parameter_range), 3, wspace=0.35, bottom=0.13)
    short_time_ax = fig.add_subplot(grid[:, 0])
    long_time_axes = []
    for idx, elem in enumerate(parameter_range):
        try:
            long_time_axes.append(
                fig.add_subplot(grid[idx, 1:],
                                sharey=long_time_axes[0],
                                sharex=long_time_axes[0]))
        except IndexError:
            long_time_axes.append(fig.add_subplot(grid[idx, 1:]))
        if idx != len(parameter_range) - 1:
            plt.setp(long_time_axes[idx].get_xticklabels(), visible=False)

    # Reverse so that plots line up with colorbar
    long_time_axes = long_time_axes[::-1]

    # Create colorbar and labels
    fig.text(
        0.355,
        0.48,
        metric_labels[metric.__name__],
        ha="center",
        va="center",
        rotation=90,
    )
    short_time_ax.set(xlabel="Time", ylabel=metric_labels[metric.__name__])
    long_time_axes[0].set(xlabel="Time")
    cm = plt.get_cmap("coolwarm")

    cNorm = colors.BoundaryNorm(parameter_range + [parameter_range[-1]], cm.N)
    scalarMap = mplcm.ScalarMappable(norm=cNorm, cmap=cm)
    cbar = fig.colorbar(
        scalarMap,
        use_gridspec=True,
        ax=long_time_axes,
        ticks=np.array(parameter_range),  # + 0.025,
    )
    cbar.ax.set_yticklabels([f"{x:.2}" for x in parameter_range])
    try:
        cbar.set_label(parameter_labels[parameter], rotation=270)
    except KeyError:
        cbar.set_label(parameter, rotation=270)

    cbar.ax.get_yaxis().labelpad = 15

    # Populate the plots
    for idx, parameter_value in enumerate(parameter_range):
        search_parameters[parameter] = parameter_value
        file_names = match_parameters(search_parameters,
                                      history,
                                      exclude={"dt": [1.0]})
        if not file_names:
            print("Skipping...")
            continue
        metric_store = []
        for file_name in file_names:
            simulation_parameters = history[file_name]
            t, x, v = load_traj_data(file_name, data_path=data_path)
            metric_result = metric(t, x, v)
            metric_store.append(metric_result)

            short_time_ax.plot(
                t[:break_time_step],
                metric_result[:break_time_step],
                color=scalarMap.to_rgba(parameter_value),
                label=f"{parameter_value}",
                alpha=0.1,
                zorder=1,
            )

            if include_traj:
                long_time_axes[idx].plot(
                    t[break_time_step:],
                    metric_result[break_time_step:],
                    color=scalarMap.to_rgba(parameter_value),
                    label=f"{parameter_value}",
                    alpha=0.05,
                    zorder=1,
                )

        metric_store = np.array(metric_store)
        long_time_axes[idx].plot(
            t[break_time_step:],
            metric_store.mean(axis=0)[break_time_step:],
            color=scalarMap.to_rgba(parameter_value),
            label=f"{parameter_value}",
            alpha=1,
            zorder=2,
        )

        if metric == calculate_avg_vel:
            long_time_axes[idx].plot(
                [t[break_time_step], t[-1]],
                [np.sign(metric_result[0]),
                 np.sign(metric_result[0])],
                "k--",
                alpha=0.25,
            )
        elif metric == calculate_l1_convergence:
            long_time_axes[idx].plot(
                [t[break_time_step], t[-1]],
                [
                    expected_error[str(
                        simulation_parameters["particle_count"])],
                    expected_error[str(
                        simulation_parameters["particle_count"])],
                ],
                "k--",
                alpha=0.25,
            )
        elif metric == calculate_variance:
            long_time_axes[idx].plot(
                [t[break_time_step], t[-1]],
                [simulation_parameters["D"], simulation_parameters["D"]],
                "k--",
                alpha=0.25,
            )

    return fig
예제 #8
0
def plot_averaged_convergence_from_clusters(ax,
                                            search_parameters: dict,
                                            yaml_path: str,
                                            data_path: str,
                                            logx=True):
    history = get_master_yaml(yaml_path)
    for initial_dist_x in [
            "one_cluster",
            "two_clusters",
            "three_clusters",
            "four_clusters",
    ]:
        search_parameters["initial_dist_x"] = initial_dist_x
        file_names = match_parameters(search_parameters, history)
        cycle = plt.rcParams["axes.prop_cycle"].by_key()["color"]
        for idx, file_name in enumerate(file_names):
            print(file_name)
            simulation_parameters = history[file_name]
            cluster_count = _get_number_of_clusters(
                simulation_parameters["initial_dist_x"])
            cluster_label = f"{cluster_count} cluster{'' if cluster_count==1 else 's'}"

            if idx == 0:
                t, x, v = load_traj_data(file_name, data_path)
                t = t.flatten()
                error = calculate_l1_convergence(t[t <= 10],
                                                 x[t <= 10],
                                                 v[t <= 10],
                                                 plot_hist=False)
                error_store = np.zeros((len(file_names), len(error)))
                error_store[idx, :] = error
            else:
                t, x, v = load_traj_data(file_name, data_path)
                t = t.flatten()
                error = calculate_l1_convergence(t[t <= 10],
                                                 x[t <= 10],
                                                 v[t <= 10],
                                                 plot_hist=False)
                error_store[idx, :] = error

        if logx:
            t = t[t <= 10]
            ax.semilogx(
                t,
                np.mean(error_store, axis=0),
                label=cluster_label,
                color=cycle[cluster_count - 1],
            )
        else:
            t = t[t <= 10]
            ax.plot(
                t,
                np.mean(error_store, axis=0),
                label=cluster_label,
                color=cycle[cluster_count - 1],
            )
            # ax.plot(
            #     t[9:], moving_average(np.mean(error_store, axis=0), n=10), "r",
            # )

    ax.set(xlabel="Time", ylabel=r"$\ell^1$ Error")
    handles, labels = ax.get_legend_handles_labels()
    # sort both labels and handles by labels
    labels, handles = zip(*sorted(zip(labels, handles), key=lambda t: t[0]))
    ax.legend(handles, labels)
    plt.tight_layout()
    return ax
예제 #9
0
def plot_averaged_avg_vel(
    ax,
    search_parameters: dict,
    logx=True,
    include_traj=True,
    scalarMap=None,
    data_path: str = "Experiments/Data.nosync/",
    yaml_path: str = "Experiments/positive_phi_no_of_clusters",
    start_time_step: int = 0,
    end_time_step: int = -1,
):
    """Plots average velocity of particles on log scale, colours lines according to
    number of clusters in the inital condition
    """
    if scalarMap is None:
        cm = plt.get_cmap("coolwarm")
        cNorm = mpl.colors.DivergingNorm(vmin=1, vcenter=2, vmax=4)
        scalarMap = mpl.cm.ScalarMappable(norm=cNorm, cmap=cm)
    history = get_master_yaml(yaml_path)

    # for initial_dist_x in [
    #     "one_cluster",
    #     "two_clusters",
    #     "three_clusters",
    #     "four_clusters",
    # ]:
    #     search_parameters["initial_dist_x"] = initial_dist_x
    list_of_names = match_parameters(search_parameters, history)
    #     print(list_of_names)
    cycle = plt.rcParams["axes.prop_cycle"].by_key()["color"]
    for idx, file_name in enumerate(list_of_names):

        simulation_parameters = history[file_name]
        cluster_count = _get_number_of_clusters(
            simulation_parameters["initial_dist_x"])

        cluster_label = f"{cluster_count} cluster{'' if cluster_count==1 else 's'}"

        t, x, v = load_traj_data(file_name, data_path)
        t = t[start_time_step:end_time_step]
        v = v[start_time_step:end_time_step]
        avg_vel = v.mean(axis=1)
        cluster_count = _get_number_of_clusters(
            history[file_name]["initial_dist_x"])

        if include_traj and logx:
            ax.semilogx(t,
                        avg_vel,
                        color=cycle[cluster_count - 1],
                        alpha=0.1,
                        zorder=1)
        if include_traj and not logx:
            ax.plot(t,
                    avg_vel,
                    color=cycle[cluster_count - 1],
                    alpha=0.1,
                    zorder=1)

        if idx == 0:
            avg_vel_store = np.zeros((len(list_of_names), len(avg_vel)))

        avg_vel_store[idx, :] = avg_vel
    if logx:
        ax.semilogx(
            t,
            np.mean(avg_vel_store, axis=0),
            color=cycle[cluster_count - 1],  # history[file_name]["gamma"]),
            label=cluster_label,
            # alpha=0.5,
            zorder=2,
        )
    else:
        ax.plot(
            t,
            np.mean(avg_vel_store, axis=0),
            color=cycle[cluster_count - 1],  # history[file_name]["gamma"]),
            label=cluster_label,
            # alpha=0.5,
            zorder=2,
        )
    # plt.tight_layout()
    return ax