return fig_heatmap, fig_snapshots # %% Prepare the trajectories # =========================== nodes = pd.DataFrame({ 'node': node, 'x': attrs['x'], 'y': attrs['y'] } for node, attrs in graph.nodes(data=True)) all_nodes = list(graph.nodes) # Load the trajectories trajs_active = load_data( f'trajs/{GRAPH}_SwitchingNetwork_ExponentialWalker_tau{TAU:e}_N{NUM_TRAJS}_max_time{MAX_TIME}', suffix='.parquet') trajs_undirected = load_data( f'trajs/{GRAPH}_UndirectedNetwork_ExponentialWalker_N{NUM_TRAJS}_max_time{MAX_TIME}', suffix='.parquet') # %% Plot the time evolution of the distributions # =============================================== times = np.arange(MAX_TIME + 1) # Undirected fig_time_evol, fig_snapshots = plot_particle_distribution( trajs_undirected, times, all_nodes, snapshots=[0, 30, 60, 200]) fig_snapshots.savefig(
erplot.graph.node_label(ax, graph, 0, label='0') for node in nodes: erplot.graph.node_label(ax, graph, node, label='') plt.show() # %% MFPT vs switching rate # ========================= graph = load_graph(GRAPH) mfpts = [] nodes_d = distances[distances.distance == DISTANCE].index.values for tau in tqdm(TAUS): df = load_data( f'MFPT/{GRAPH}_SwitchingNetworkConstantRate_ExponentialWalker_tau{tau:e}_sim1000' ) mfpts.append(df[df.node.isin(nodes_d)].FPT.mean()) # Undirected df = load_data(f'MFPT/{GRAPH}_UndirectedNetwork_ExponentialWalker') df = df.join(distances, on='node') undirected_mfpt = df[df.distance == DISTANCE].FPT.mean() # %% fig, ax = plt.subplots() ax.plot(TAUS, mfpts, label='Active Network') ax.hlines(undirected_mfpt, 0, max(TAUS),
fig, ax = erplot.graph.structure(graph) erplot.graph.node_label(ax, graph, 0, label='0') for node in nodes: erplot.graph.node_label(ax, graph, node, label='') plt.show() # %% MFPT vs switching rate # ========================= graph = load_graph(GRAPH) mfpts = [] nodes_d = distances[distances.distance == DISTANCE].index.values for tau in tqdm(TAUS): df = load_data( f'MFPT/{GRAPH}_SwitchingNetwork_ExponentialWalker_tau{tau:e}') mfpts.append(df[df.node.isin(nodes_d)].FPT.mean()) df = load_data(f'MFPT/{GRAPH}_SwitchingNetwork_ExponentialWalker_tau{0.03:e}') active30ms_mfpt = df[df.node.isin(nodes_d)].FPT.mean() df = load_data(f'MFPT/{GRAPH}_SwitchingNetwork_ExponentialWalker_tau{0.3:e}') active300ms_mfpt = df[df.node.isin(nodes_d)].FPT.mean() df = load_data(f'MFPT/{GRAPH}_SwitchingNetwork_ExponentialWalker_tau{3:e}') active3s_mfpt = df[df.node.isin(nodes_d)].FPT.mean() # Undirected df = load_data(f'MFPT/{GRAPH}_UndirectedNetwork_ExponentialWalker') df = df.join(distances, on='node') undirected_mfpt = df[df.distance == DISTANCE].FPT.mean()
# ========================== graph = load_graph(GRAPH) out = data_path( f'trajs/{GRAPH}_SwitchingNetwork_ExponentialWalker_tau{TAU:e}_N{NUM_TRAJS}_max_time{MAX_TIME}', suffix='.parquet') if not out.exists(): print(f'Generating trajectories: {out}') network = SwitchingNetwork(graph, timescale=TAU) walker = ExponentialWalker(timescale=0.1) generator = TrajectoryGenerator(network, walker) trajs = generator.trajectories(NUM_TRAJS, MAX_TIME) trajs.to_parquet(str(out)) else: trajs = load_data(str(out.with_suffix('')), suffix='.parquet') # %% Create the movie # =================== df = reduce_trajs_steps(trajs).copy() df['step'] = df.groupby('id').cumcount() # add step count df['prev_node'] = df.groupby('id').node.shift().fillna(0).astype(int) # Generate frames MAX_TIME = 30 times = np.linspace(0, MAX_TIME, int(MAX_TIME * 20)) values = np.zeros((len(times), graph.number_of_nodes())) for i, t in enumerate(tqdm(times)): for node, count in count_nodes(t, df).iteritems(): values[i, node] = count
graph = load_graph(GRAPH) out = data_path(f'MFPT/{GRAPH}_SwitchingNetwork_ExponentialWalker_tau{TAU:e}') if not out.exists(): network = SwitchingNetwork(graph, timescale=TAU, memory=False) walker = ExponentialWalker(timescale=0.1) sim = MFPTSimulation(network, walker, num_sims=5000) print(f'Running simulation: {out}') res = sim.run() res.to_csv(str(out)) # %% MFPT heatmap # =============== data = load_data(f'MFPT/{GRAPH}_SwitchingNetwork_ExponentialWalker_tau{TAU:e}') # data = load_data(f'MFPT/{GRAPH}_UndirectedNetwork_ExponentialWalker') mfpt = data.groupby("node").FPT.mean() fig, ax, cb = erplot.graph.heatmap( graph, mfpt, cmap=cmocean.cm.delta, vmax=VMAX) erplot.graph.node_label(ax, graph, 0, "S", size=5) cb.remove() fig.savefig(FIGURES_PATH.joinpath("MFPT_heatmap_{}.svg".format(GRAPH))) plt.show() fig, ax, cb = erplot.graph.heatmap( graph, mfpt, cmap=cmocean.cm.delta, vmax=VMAX) ax.remove() cb.ax.set_ylabel("$\\bar{\\tau}_{S \\to T}$ (s)")
linestyle='--', label='Exponential fit') ax.legend() fig.savefig(FIGURES_PATH.joinpath(f'{GRAPH}_attractor_size_distribution.svg')) # %% Distribution of particles # ============================ M = graph.number_of_nodes() hist = np.zeros((NUM_RUNS, M + 1)) all_nodes = list(graph.nodes) # I am taking the average distribution over NUM_RUNS independent simulations for n_run in range(1, 1 + NUM_RUNS): trajs = load_data( f'trajs/{GRAPH}_SwitchingNetwork_ExponentialWalker_tau{TAU:e}_uniform_10_max_time{MAX_TIME}_run{n_run}', suffix='.parquet') trajs = trajs.set_index('time', drop=False).sort_index() counts = trajs.groupby( 'id', as_index=False).last().groupby('node').count().reindex(all_nodes, fill_value=0) hist[n_run - 1], bins = np.histogram(counts.id, bins=np.arange(M + 2)) hist_norm = np.sum(hist, axis=0) / np.sum(hist) sim_vals = hist_norm sim_xvals = (bins[:-1] + bins[1:]) / 2 # Theoretical functions
if out.exists(): continue print(f'Generating trajectories: {out}') network = SwitchingNetwork(graph, timescale=tau) walker = ExponentialWalker(timescale=0.1) generator = TrajectoryGenerator(network, walker) trajs = generator.trajectories(N, MAX_TIME) trajs.to_parquet(str(out)) # %% Generate the figures # ======================= for tau in TAUS: df = load_data( f'trajs/{GRAPH}_SwitchingNetwork_ExponentialWalker_tau{tau:e}_N{N}_max_time{MAX_TIME}', suffix='.parquet') df = df.sort_values(["id", "time"]) df = collapse_traps(df) df["step"] = df.groupby("id").cumcount() # add step count df["prev_node"] = df.node.shift() ts = np.linspace(0, 120, 1200) neighbors, count = count_by_source(TARGET, ts, df, graph) fig, ax = plt.subplots(figsize=(4, 2)) ax.stackplot(ts, count, labels=neighbors, colors=['#1b9e77', '#d95f02', '#7570b3'])
for sim_id in trange(NUM_SIMS): network.reset() walker.reset() generator = TrajectoryGenerator(network, walker) traj = generator.trajectories_to_target(num_trajs, TARGET, keep=1) traj['sim_id'] = sim_id trajs.append(traj) df = pd.concat(trajs) df.to_parquet(str(out)) # %% Length of fastest # ==================== df = load_data( f'trajs/{GRAPH}_UndirectedNetwork_ExponentialWalker_N{NUM_TRAJS}_1st_trajectories_S0_T{TARGET}', suffix='.parquet') dfagg = df.groupby(['sim_id', 'id']).agg({'node': 'count', 'time': 'max'}) fig, ax = plt.subplots() bins = range(0, dfagg.node.max() + dfagg.node.max() % 2 + 1, 2) ax.hist(dfagg.node, bins, density=True, label='Undirected network', color='gray') for tau in TAUS: df = load_data( f'trajs/{GRAPH}_SwitchingNetwork_ExponentialWalker_tau{tau:e}_N{NUM_TRAJS}_1st_trajectories_S0_T{TARGET}', suffix='.parquet')