def plot_ygl_weight(ax,
                    xlabel,
                    ylabel,
                    A,
                    m,
                    weight,
                    xs,
                    color,
                    linewidth,
                    alpha,
                    color_bias,
                    title_letter,
                    xs_multi=None,
                    A_unit=None,
                    group_index=None):
    """TODO: Docstring for plot_xs_weight.

    :data: TODO
    :returns: TODO

    """
    ax.annotate(title_letter,
                xy=(-0.2, 1.03),
                xycoords="axes fraction",
                size=labelsize * 0.7)
    k = np.sum(A, 0)
    cmap = sns.color_palette(color, as_cmap=True)
    simpleaxis(ax)

    xs_group = np.zeros((len(weight), len(A_unit)))
    for j, group_i in enumerate(group_index):
        xs_group[:, group_i] = xs[:, j:j + 1]
    ygl_group = betaspace(A_unit, xs_group)[-1]
    ax.plot(weight,
            ygl_group,
            linewidth=linewidth,
            alpha=alpha,
            color=cmap(1.0))
    #ax.plot(weight, ygl_group, linewidth=linewidth, alpha=0.8, color=cmap(0))
    #ax.plot(weight, ygl_group, linewidth=linewidth, alpha=0.8, color=cmap(0.5))

    if xs_multi is not None:
        ygl = np.zeros((len(weight)))
        for i, xs_multi_i in enumerate(xs_multi):
            ygl_i = betaspace(A_unit, xs_multi_i)[-1]
            ygl[i] = ygl_i
        ax.plot(weight,
                ygl,
                linewidth=linewidth * 0.8,
                alpha=0.7,
                color='tab:grey')

    ax.set_xlabel(xlabel, fontsize=17)
    ax.set_ylabel(ylabel, fontsize=17)
Beispiel #2
0
def tippingpoint_m(network_type, N, seed, d, weight_list, m_list,
                   attractor_value, space, tradeoff_para, method,
                   high_criteria):
    """TODO: Docstring for one_dimension_comparison.

    :network_type: TODO
    :N: TODO
    :seed: TODO
    :d: TODO
    :: TODO
    :returns: TODO

    """
    A, xs_multi, xs_reduction_multi = data_xs(network_type, N, seed, d,
                                              weight_list, m_list,
                                              attractor_value, space,
                                              tradeoff_para, method)

    y_reduction_weighted = np.vstack(([
        np.array([
            betaspace(A, xs_reduction_multi[m, k])[-1]
            for k in range(len(weight_list))
        ]) for m in range(len(m_list))
    ]))
    y_reduction_unweighted = np.vstack(([
        np.array([
            np.mean(xs_reduction_multi[m, k]) for k in range(len(weight_list))
        ]) for m in range(len(m_list))
    ]))
    y_multi_weighted = np.array(
        [betaspace(A, xs_multi[i])[-1] for i in range(len(weight_list))])
    y_multi_unweighted = np.array(
        [np.mean(xs_multi[i]) for i in range(len(weight_list))])

    wc_multi = weight_list[np.where(y_multi_weighted > high_criteria)[0][0]]
    wc_reduction = [
        weight_list[np.where(y_reduction_weighted[i] > high_criteria)[0][0]]
        for i in range(len(m_list))
    ]
    #plt.loglog(m_list, (wc_reduction-wc_multi))
    #plt.loglog(m_list, (wc_reduction-wc_multi)/(wc_reduction[0] - wc_multi))
    w_compare_list = np.arange(0.1, 0.41, 0.05)
    for w_compare in w_compare_list:
        index = np.where(np.abs(weight_list - w_compare) < 1e-8)[0][0]
        y_reduction_weighted_select = y_reduction_weighted[:, index]
        y_multi_weighted_select = y_multi_weighted[index]
        plt.loglog(m_list[1:],
                   y_reduction_weighted_select[1:] -
                   y_reduction_weighted_select[:-1],
                   label=f'w={w_compare}')
    plt.legend()
    plt.show()
Beispiel #3
0
def save_ygl(network_type, N, d, seed, dynamics, m, space):
    """TODO: Docstring for error_ygl.

    :network_type: TODO
    :N: TODO
    :d: TODO
    :seed_list: TODO
    :dynamics: TODO
    :returns: TODO

    """
    A_unit, A_interaction, index_i, index_j, cum_index = network_generate(
        network_type, N, 1, 0, seed, d)
    """
    file_A = '../data/A_matrix/' + network_type + '/' + f'N={N}_d={d}_seed={seed}_A.npz'
    A_unit = scipy.sparse.load_npz(file_A).toarray()
    """
    des = '../data/' + dynamics + '/' + network_type + '/xs_bifurcation/'
    beta_cal = betaspace(A_unit, [0])[0]
    if m == N:
        des_multi = des + 'xs_multi_beta/'
        file_multi = des_multi + f'N={N}_d={d}_seed={seed}.csv'
        data_multi = np.array(pd.read_csv(file_multi, header=None))
        weight = data_multi[:, 0]
        xs = data_multi[:, 1:]
        save_file = des + 'y_multi_beta/' + f'N={N}_d={d}_seed={seed}.csv'
    else:
        des_group = des + f'degree_kmeans_space={space}_beta/'
        file_group = des_group + f'N={N}_d={d}_number_groups={m}_seed={seed}.csv'
        data_group = np.array(pd.read_csv(file_group, header=None))
        G = nx.from_numpy_array(A_unit)
        feature = feature_from_network_topology(A_unit,
                                                G,
                                                space,
                                                tradeoff_para=0.5,
                                                method='degree')
        group_index = group_index_from_feature_Kmeans(feature, m)
        y_group = data_group[:, 1:]
        xs_group = np.zeros((len(data_group), N))
        for i, group_i in enumerate(group_index):
            xs_group[:, group_i] = y_group[:, i:i + 1]
        xs = xs_group
        weight = data_group[:, 0]
        save_file = des + 'y_group_beta/' + f'N={N}_d={d}_number_groups={m}_seed={seed}.csv'
    y_gl = betaspace(A_unit, xs)[-1]
    beta_list = beta_cal * weight
    data_save = np.vstack((weight, beta_list, y_gl))
    df = pd.DataFrame(data_save.transpose())
    df.to_csv(save_file, index=None, header=None, mode='w')
    return None
def coeff_K(arguments, beta, low=0.1, high=10):
    """TODO: Docstring for coeff_interaction.

    :R: TODO
    :alpha: TODO
    :beta: TODO
    :returns: TODO

    """
    network_type = '2D'
    seed = 0
    d = 0
    N = 9
    A, A_interaction, index_i, index_j, cum_index = network_generate(
        network_type, N, beta, seed, d)
    beta_eff, _ = betaspace(A, [0])
    weight = beta / beta_eff
    A = A * weight

    B, C, D, E, H, K = arguments
    xs_low, xs_high = stable_state(A, A_interaction, index_i, index_j,
                                   cum_index, low, high, arguments)
    xs = xs_high[0]
    P = (beta * E * xs * xs) / (D + E * xs + H * xs)**2 - (beta * xs) / (
        D + E * xs + H * xs) + (beta * H * xs * xs) / (
            D + E * xs + H * xs)**2 - (beta * xs) / (D + E * xs + H * xs) - (
                1 - xs / K) * (2 * xs / C - 1)
    Q = xs / K * (xs / C - 1)
    tau1 = np.arccos(-P / Q) / Q / np.sin(np.arccos(-P / Q))
    f = lambda x: Q * np.exp(1 + P * x) * x - 1
    initial_condition = np.array([0.1])
    tau2 = fsolve(f, initial_condition)

    return P, Q, tau1, tau2
Beispiel #5
0
def network_critical_point(dynamics, network_type, N, seed, d, critical_type, threshold_value, survival_threshold, wc_file, weight_list=None):
    """TODO: Docstring for critical_point.

    :network_type: TODO
    :N: TODO
    :: TODO
    :returns: TODO

    """
    file_A = '../data/A_matrix/' + network_type + '/' + f'N={N}_d={d}_seed={seed}_A.npz'
    A = scipy.sparse.load_npz(file_A).toarray()
    degrees = np.sum(A, 0)
    kmean = np.mean(degrees)
    h1 = np.mean(degrees ** 2) / kmean - kmean
    h2 = np.sum(np.abs(degrees.reshape(len(degrees), 1) - degrees)) / N**2 / kmean
    des_xs_multi = '../data/' + dynamics + '/' + network_type + '/xs_bifurcation/xs_multi/'
    des_file = des_xs_multi + f'N={N}_d={d}_seed={seed}.csv'
    data = np.array(pd.read_csv(des_file, header=None))
    if not weight_list:
        weight_list = np.sort(np.unique(data[:, 0]))
    index = [np.where(np.abs(w-data[:, 0]) < 1e-8)[0][0] for w in weight_list]
    xs_multi = data[index, 1:]
    y_multi = betaspace(A, xs_multi)[-1]
    if critical_type == 'survival_ratio':
        index = np.where(np.sum(xs_multi > survival_threshold, 1) / N > threshold_value) [0]
    else:
        index = np.where(y_multi > threshold_value)[0]
    if len(index):
        critical_weight = weight_list[index[0]]
    else:
        critical_weight = None
    df = pd.DataFrame(np.array([d, seed, kmean, h1, h2, critical_weight], dtype='object').reshape(1, 6))
    df.to_csv(wc_file, index=None, header=None, mode='a')
    return None
Beispiel #6
0
def network_critical_point(dynamics, network_type, N, seed, d, critical_type, threshold_value, survival_threshold, weight_list=None):
    """TODO: Docstring for critical_point.

    :network_type: TODO
    :N: TODO
    :: TODO
    :returns: TODO

    """

    file_des = '../data/' + dynamics + '/' + network_type + '/xs_bifurcation/A_matrix/' +  f'N={N}_d={d}_seed={seed}_A.npz'
    A = scipy.sparse.load_npz(file_des).toarray()
    des_xs_multi = '../data/' + dynamics + '/' + network_type + '/xs_bifurcation/xs_multi/'
    des_file = des_xs_multi + f'N={N}_d=' + str(d) + f'_seed={seed}.csv'
    data = np.array(pd.read_csv(des_file, header=None))
    if weight_list:
        index = [np.where(np.abs(w-data[:, 0]) < 1e-8)[0][0] for w in weight_list]
    else:
        weight_list = np.sort(np.unique(data[:, 0]))
        index = [np.where(np.abs(w-data[:, 0]) < 1e-8)[0][0] for w in weight_list]
    xs_multi = data[index, 1:]
    y_multi = betaspace(A, xs_multi)[-1]
    #y_multi = np.array([np.mean(xs_multi[i]) for i in range(len(weight_list))])
    if critical_type == 'survival_ratio':
        transition_index = np.where(np.sum(xs_multi > survival_threshold, 1) / N > threshold_value) [0][0]
    else:
        transition_index = np.where(y_multi > threshold_value)[0][0]
    critical_weight = weight_list[transition_index]
    return y_multi, critical_weight
Beispiel #7
0
def sensitivity_connection(network_type, dynamics, seed_list, d, weight_list):
    """TODO: Docstring for sensitivity_connection.

    :network_type: TODO
    :dynamics: TODO
    :seed: TODO
    :d: TODO
    :weight_list: TODO
    :returns: TODO

    """
    des = '../data/' + dynamics + '/' + network_type + '/xs_bifurcation/xs_multi/'
    for seed in seed_list:
        des_file = des + f'N={N}_d={d}_seed={seed}.csv'
        data = np.array(pd.read_csv(des_file, header=None))
        A_unit, A_interaction, index_i, index_j, cum_index = network_generate(
            network_type, N, 1, 0, seed, d)
        index = [
            np.where(np.abs(w - data[:, 0]) < 1e-8)[0][0] for w in weight_list
        ]
        xs_multi = data[index, 1:]
        y_multi = betaspace(A_unit, xs_multi)[-1]
        plt.plot(weight_list, y_multi, label=f'seed={seed[1]}')
    plt.legend()
    plt.show()
    return None
Beispiel #8
0
def plot_xs_onenet(network_type, N, d, seed, dynamics, m_list, space, weight_list):
    """TODO: Docstring for plot_P_w.

    :weight_list: TODO
    :returns: TODO

    """    
    fig, axes = plt.subplots(len(seed_list), len(weight_list) + 1, sharex=False, sharey=True, figsize=(3*(len(weight_list) +1), 3*len(seed_list)) ) 
    markers = ['o', '^', 's', 'p', 'P', 'h']
    linestyles = [(i, j) for i in [3, 6, 9] for j in [1, 5, 9, 13]]
    colors=sns.color_palette('hls', 11)
    alphas = [np.log(min(m_list)+1) / np.log(m+1) for m in m_list]
    sizes = [np.log(min(m_list)+1) / np.log(m+1) for m in m_list]
    s = StateDistribution(network_type, N, d, seed, dynamics)
    A_unit, A_interaction, index_i, index_j, cum_index = network_generate(network_type, N, 1, 0, seed, d)
    data_m = dict()
    data_all_m = dict()
    groups_node_nums = dict()
    for m in m_list:
        s.data_load(m, space)
            data = np.abs(s.data )
            data_all_m[m] = data
            weights = data[:, :1]
            index = [np.where(np.abs(weights - weight) < 1e-02 )[0][0]  for weight in weight_list]
            xs = data[index, 1:]
            data_m[m] = xs
            groups_node_nums[m] = s.group_node_number
        for j, weight in enumerate(weight_list):
            ax = axes[i][j]
            simpleaxis(ax)
            sizes = np.ravel(np.tile( (s.group_node_number / np.sum(s.group_node_number) + 0.01) * 100, (1, len(weight_list)) ))
            for k, m in enumerate(m_list):
                y = data_m[m][j]
                ax.scatter(x=np.ones(len(y)) * m, y=y, s= (groups_node_nums[m] / np.sum(groups_node_nums[m]) + 0.05) * 100, alpha=np.log(min(m_list)+0.5) / np.log(m+0.5), color=colors[k]) 
            ax.set(xscale='log', yscale='log')
               
            if i == 0:
                title_name = 'group  ' + f'$w={weight}$'
                ax.set_title(title_name, size=labelsize*0.5)

        ax = axes[i][j+1]
        simpleaxis(ax)
        for i_m, m in enumerate(m_list):
            data = data_all_m[m]
            weights = data[:, 0]
            weight_unique = np.arange(0.01, 0.6, 0.01)
            index_plot = [np.where(abs(weights - w_i) < 1e-5)[0][0] for w_i in weight_unique]
            y = data[index_plot, 1:]
            xs = np.repeat(y, groups_node_nums[m], axis=1)
            
            y_gl = betaspace(A_unit, xs)[-1]
            ax.plot(weight_unique, y_gl, linewidth=1, color=colors[i_m], label=f'$m={m}$', linestyle=(0, linestyles[i_m]) ) 
        if i == 0:
            title_name = f'global'
            ax.set_title(title_name, size=labelsize*0.5)
            ax.legend(fontsize=legendsize*0.5, ncol=2, loc='lower right', frameon=False, ) 
Beispiel #9
0
def evolution_multi(network_type, arguments, N, beta, betaeffect, d, seed, delay, initial_value):
    """TODO: Docstring for evolution_compare.

    :network_type: TODO
    :dynamics: TODO
    :arguments: TODO
    :N: TODO
    :beta: TODO
    :betaeffect: TODO
    :d: TODO
    :returns: TODO

    """

    A, A_interaction, index_i, index_j, cum_index = network_generate(network_type, N, beta, betaeffect, seed, d)
    N_actual = np.size(A, 0)
    net_arguments = (index_i, index_j, A_interaction, cum_index)
    dyn_multi = np.ones((N_actual)) * initial_value
    t = np.arange(0, 500, 0.01)
    xs = odeint(mutual_multi, dyn_multi, t, args=(arguments, net_arguments))[-1]
    iteration = 1
    deviation1 = np.abs(dyn_multi - xs)
    while 0 < iteration < 50:
        dyn_multi = ddeint_Cheng(mutual_multi_delay, dyn_multi, t, *(delay, arguments, net_arguments))[-1]
        deviation2 = np.abs(dyn_multi-xs)
        if np.max(deviation2)< 1e-2:
            iteration = 0
        elif np.sum(deviation2) < np.sum(deviation1):
            iteration += 1
            deviation1 = deviation2
        else:
            iteration = 0
    dyn_beta = betaspace(A, dyn_multi)[-1]
    if np.max(deviation2) < 1e-2:
        x = dyn_beta
    else:
        x = -1 
    data = np.hstack((seed, x))
    des = f'../data/mutual/' + network_type + '/xs/'
    if not os.path.exists(des):
        os.makedirs(des)
    if betaeffect == 0:
        des_file = des + f'N={N}_d={d}_wt={beta}_delay={delay}_x0={initial_value}.csv'
    else:
        des_file = des + f'N={N}_d={d}_beta={beta}_delay={delay}_x0={initial_value}.csv'
    df = pd.DataFrame(data.reshape(1, len(data)))
    df.to_csv(des_file, mode='a', index=None, header=None)
    #dyn_multi = ddeint_Cheng(mutual_multi_delay, xs-1e-3, t, *(delay, arguments, net_arguments))
    #dyn_decouple = ddeint_Cheng(mutual_decouple_two_delay, xs_decouple - 1e-3, t, *(delay, w, beta, arguments))
    #print(x, np.max(deviation2))
    return None
Beispiel #10
0
def evolution_analysis(network_type, N, beta, betaeffect, seed, d, delay):
    """TODO: Docstring for evolution_oscillation.

    :arg1: TODO
    :returns: TODO

    """

    A, A_interaction, index_i, index_j, cum_index = network_generate(
        network_type, N, beta, betaeffect, seed, d)
    xs_low, xs_high = stable_state(A, A_interaction, index_i, index_j,
                                   cum_index, arguments)
    beta_eff, _ = betaspace(A, [0])
    degree = np.sum(A > 0, 0)
    N = np.size(A, 0)
    initial_condition = np.ones(N) * 5
    initial_condition = xs_high - 0.0001
    t = np.arange(0, 50, 0.001)
    #dyn_all = ddeint_Cheng(mutual_multi_delay, initial_condition, t, *(delay, 0, 0, N, index_i, index_j, A_interaction, cum_index, arguments))
    dyn_all = ddeint_Cheng(
        mutual_single_delay, initial_condition, t,
        *(delay, 0, 0, N, [np.argmax(degree)], index_i, index_j, A_interaction,
          cum_index, arguments))
    w = np.sum(A[np.argmax(degree)])
    initial_condition = np.array([xs_high.max()])
    xs_eff = fsolve(mutual_1D,
                    initial_condition,
                    args=(0, beta_eff, arguments))
    xs_eff = np.mean(xs_high)
    print(xs_eff)
    #dyn_all = ddeint_Cheng(one_single_delay, initial_condition, t, *(delay, 0, 0, w, xs_eff, arguments))
    #xs_high = ddeint_Cheng(one_single_delay, initial_condition, t, *(0, 0, 0, w, xs_eff, arguments))[-1]

    diff = dyn_all - xs_high
    peaks = []
    peaks_index = []
    for i in diff.transpose():

        peak_index, _ = list(find_peaks(i))
        peak = i[peak_index]
        positive_index = np.where(peak > 0)[0]
        peak_positive = peak[positive_index]
        peak_index_positive = peak_index[positive_index]

        peaks.append(peak_positive)
        peaks_index.append(peak_index_positive)
        #plt.loglog(degree, peaks_last, 'o')
        plt.semilogy(peak_index_positive, peak_positive, '.', color='r')

    return degree, w, dyn_all, diff, peaks, peaks_index
Beispiel #11
0
def group_critical_point(dynamics, network_type, N, seed, d, m, space, tradeoff_para, method, critical_type, threshold_value, survival_threshold, weight_list=None):
    """TODO: Docstring for critical_point.

    :network_type: TODO
    :N: TODO
    :: TODO
    :returns: TODO

    """

    file_des = '../data/' + dynamics + '/' + network_type + '/xs_bifurcation/A_matrix/' +  f'N={N}_d={d}_seed={seed}_A.npz'
    A = scipy.sparse.load_npz(file_des).toarray()
    G = nx.from_numpy_array(A)
    N_actual = len(A)
    feature = feature_from_network_topology(A, G, space, tradeoff_para, method)
    group_index = group_index_from_feature_Kmeans(feature, m)
    des_reduction = '../data/' + dynamics + '/' + network_type + '/xs_bifurcation/' + method + '_kmeans_space=' + space + '/'
    des_file = des_reduction + f'N={N}_d=' + str(d) + f'_number_groups={m}_seed={seed}.csv'
    if not os.path.exists(des_file):
        return None, None
    data = np.array(pd.read_csv(des_file, header=None).iloc[:, :])
    if weight_list:
        index = [np.where(np.abs(w-data[:, 0]) < 1e-8)[0][0] for w in weight_list]
    else:
        weight_list = np.sort(np.unique(data[:, 0]))
        index = [np.where(np.abs(w-data[:, 0]) < 1e-8)[0][0] for w in weight_list]

    xs_reduction_multi = np.zeros((len(weight_list), N_actual))
    xs_i = data[index, 1:]
    for i, group_i in enumerate(group_index):
        xs_reduction_multi[:, group_i] = np.tile(xs_i[:, i], (len(group_i), 1)).transpose()

    y_reduction = betaspace(A, xs_reduction_multi)[-1]
    #y_multi = np.array([np.mean(xs_multi[i]) for i in range(len(weight_list))])
    if critical_type == 'survival_ratio':
        index = np.where(np.sum(xs_reduction_multi > survival_threshold, 1) / N > threshold_value)[0]
        if len(index):
            transition_index = index[0]
        else:
            return None, None
    else:
        index = np.where(y_reduction > threshold_value)[0]
        if len(index):
            transition_index = index[0]
        else:
            return None, None
    critical_weight = weight_list[transition_index]
    return y_reduction, critical_weight
Beispiel #12
0
def xs_beta(network_type, N, d, seed, dynamics, arguments, attractor_value,
            beta_list, m, space, des_file):
    """TODO: Docstring for wc_find.

    :dynamics: TODO
    :arguments: TODO
    :wl: TODO
    :wr: TODO
    :attractor_value: TODO
    :: TODO
    :returns: TODO

    """
    if 'high' in dynamics:
        dynamics_func = globals()[dynamics[:dynamics.find('_high')] + '_multi']
    else:
        dynamics_func = globals()[dynamics + '_multi']
    """
    file_A = '../data/A_matrix/' + network_type + '/' + f'N={N}_d={d}_seed={seed}_A.npz'
    A_unit = scipy.sparse.load_npz(file_A).toarray()
    A_index = np.where(A_unit>0)
    A_interaction = A_unit[A_index]
    index_i = A_index[0] 
    index_j = A_index[1] 
    degree = np.sum(A_unit>0, 1)
    cum_index = np.hstack((0, np.cumsum(degree)))
    """
    A_unit, A_interaction, index_i, index_j, cum_index = network_generate(
        network_type, N, 1, 0, seed, d)
    beta_cal = betaspace(A_unit, [0])[0]
    weight_list = beta_list / beta_cal
    net_arguments = (index_i, index_j, A_interaction, cum_index)
    if not m == N:
        G = nx.from_numpy_array(A_unit)
        feature = feature_from_network_topology(A_unit,
                                                G,
                                                space,
                                                tradeoff_para=0.5,
                                                method='degree')
        group_index = group_index_from_feature_Kmeans(feature, m)
        A_reduction_deg_part, net_arguments, _ = reducednet_effstate(
            A_unit, np.zeros(len(A_unit)), group_index)
    for weight in weight_list:
        xs_w(dynamics_func, net_arguments, weight, arguments, attractor_value,
             des_file)
    return None
Beispiel #13
0
def xs_compare(N_group, p, weight, seed, dynamics, arguments, attractor_value, number_groups):
    """TODO: Docstring for evolution_compare.

    :arg1: TODO
    :returns: TODO

    """
    "the original network"
    A, A_interaction, index_i, index_j, cum_index = SBM_ER(N_group, p, weight, seed)
    N_actual = len(A)
    net_arguments = (index_i, index_j, A_interaction, cum_index)
    t = np.arange(0, 1000, 0.01)
    initial_condition = np.ones(N_actual) * attractor_value
    dynamics_multi = globals()[dynamics + '_multi']
    xs_multi = odeint(dynamics_multi, initial_condition, t, args=(arguments, net_arguments))[-1]


    "the reduced system by community"
    A_reduction, net_arguments_reduction, x_eff = reduced_network(N_group, A, xs_multi, number_groups)
    initial_condition_reduction = np.ones(len(A_reduction)) * attractor_value
    xs_reduction = odeint(dynamics_multi, initial_condition_reduction, t, args=(arguments, net_arguments_reduction))[-1]

    "the one-dimension system by degree weighted average"
    dynamics_spectral_1D = globals()[dynamics + '_1D_spectral']
    beta_1D, x_eff_1D = betaspace(A, xs_multi)
    initial_condition_1D = np.ones(1) * attractor_value
    xs_reduction_1D = odeint(dynamics_spectral_1D, initial_condition_1D, t, args=(beta_1D, 1, arguments))[-1]

    "the reduced system by spectral decomposition"
    dynamics_spectral_1D = globals()[dynamics + '_1D_spectral']
    alpha_list, beta_list, R_list = spectral(A, xs_multi, number_groups)
    initial_condition_spectral = np.ones(number_groups) * attractor_value
    xs_spectral = odeint(dynamics_spectral_1D, initial_condition_spectral, t, args=(alpha_list, beta_list, arguments))[-1]

    "save data"
    data=  np.hstack((weight, np.ravel(A_reduction), x_eff, xs_reduction, alpha_list, beta_list, R_list, xs_spectral, beta_1D, x_eff_1D, xs_reduction_1D))
    network_type = 'SBM_ER'
    des = '../data/' + dynamics + '/' + network_type + f'/xs_compare_multi_community_spectral/'
    if not os.path.exists(des):
        os.makedirs(des)
    des_file = des + f'N={N_group}_p=' + str(p.tolist()) + f'_group_num={number_groups}_seed={seed}.csv'

    df = pd.DataFrame(data.reshape(1, len(data)))
    df.to_csv(des_file, index=None, header=None, mode='a')
    return A, xs_multi, A_reduction, x_eff, xs_reduction, alpha_list, beta_list, R_list, xs_spectral, beta_1D, x_eff_1D, xs_reduction_1D
Beispiel #14
0
def plot_error_w(network_type, N, d, seed, dynamics, weight_list, m_list, space):
    """TODO: Docstring for plot_error_w.

    :network_type: TODO
    :N: TODO
    :d: TODO
    :seed: TODO
    :weight_list: TODO
    :m: TODO
    :returns: TODO

    """
    s = StateDistribution(network_type, N, d, seed, dynamics)
    A_unit, A_interaction, index_i, index_j, cum_index = network_generate(network_type, N, 1, 0, seed, d)
    y_gl_list = []
    for i_m, m in enumerate(m_list):
        s.data_load(m, space)
        data = s.data
        group_node_number = s.group_node_number
        group_index = s.group_index
        
        weights = data[:, 0]
        weight_unique = np.arange(0.01, 0.6, 0.01)
        index_plot = [np.where(abs(weights - w_i) < 1e-5)[0][0] for w_i in weight_unique]
        y = data[index_plot, 1:]
        xs = np.zeros( (len(index_plot), N) )
        for i, group_i in enumerate(group_index):
            xs[:, group_i] = y[:, i:i+1]
        y_gl = betaspace(A_unit, xs)[-1]
        y_gl_list.append(y_gl)
    y_gl_list = np.vstack( (y_gl_list) ) 
    error_m = np.zeros(( len(m_list) -1))
    for i_m, m in enumerate(m_list[:-1]):
        error = np.abs(y_gl_list[-1] - y_gl_list[i_m] ) / (y_gl_list[-1] + y_gl_list[i_m])
        error_m[i_m] = error[-1]
        #plt.plot(weight_unique, error, label = f'm={m}')
    plt.plot(m_list[:-1], error_m ) 

    plt.yscale('log')
    plt.xscale('log')

    plt.legend()
    return error_m 
Beispiel #15
0
def group_critical_point(dynamics, network_type, N, seed, d, m, space, tradeoff_para, method, critical_type, threshold_value, survival_threshold, wc_file, weight_list=None):
    """TODO: Docstring for critical_point.

    :network_type: TODO
    :N: TODO
    :: TODO
    :returns: TODO

    """
    file_A = '../data/A_matrix/' + network_type + '/' + f'N={N}_d={d}_seed={seed}_A.npz'
    A = scipy.sparse.load_npz(file_A).toarray()
    degrees = np.sum(A, 0)
    kmean = np.mean(degrees)
    h1 = np.mean(degrees ** 2) / kmean - kmean
    h2 = np.sum(np.abs(degrees.reshape(len(degrees), 1) - degrees)) / N**2 / kmean
    G = nx.from_numpy_array(A)
    N_actual = len(A)
    feature = feature_from_network_topology(A, G, space, tradeoff_para, method)
    group_index = group_index_from_feature_Kmeans(feature, m)
    des_reduction = '../data/' + dynamics + '/' + network_type + '/xs_bifurcation/' + method + '_kmeans_space=' + space + '/'
    des_file = des_reduction + f'N={N}_d=' + str(d) + f'_number_groups={m}_seed={seed}.csv'
    data = np.array(pd.read_csv(des_file, header=None).iloc[:, :])
    if not weight_list:
        weight_list = np.sort(np.unique(data[:, 0]))
    index = [np.where(np.abs(w-data[:, 0]) < 1e-8)[0][0] for w in weight_list]
    xs_reduction_multi = np.zeros((len(weight_list), N_actual))
    xs_i = data[index, 1:]
    for i, group_i in enumerate(group_index):
        xs_reduction_multi[:, group_i] = np.tile(xs_i[:, i], (len(group_i), 1)).transpose()
    y_reduction = betaspace(A, xs_reduction_multi)[-1]
    if critical_type == 'survival_ratio':
        index = np.where(np.sum(xs_reduction_multi > survival_threshold, 1) / N > threshold_value)[0]
    else:
        index = np.where(y_reduction > threshold_value)[0]
    if len(index):
        critical_weight = weight_list[index[0]]
    else:
        critical_weight = None
    df = pd.DataFrame(np.array([d, seed, kmean, h1, h2, critical_weight], dtype='object').reshape(1, 6))
    df.to_csv(wc_file, index=None, header=None, mode='a')
    return None
Beispiel #16
0
def beta_calculation(network_type, N, wt, seed_list, d):
    """TODO: Docstring for diameter.

    :network_type: TODO
    :beta: TODO
    :betaeffect: TODO
    :d: TODO
    :returns: TODO

    """
    des = '../data/' + dynamics + '/' + network_type + '/beta_wt/'
    if not os.path.exists(des):
        os.makedirs(des)
    des_file = des + f'd={d}_wt={wt}.csv'
    for seed in seed_list:
        A, _, _, _, _ = network_generate(network_type, N, wt, 0, seed, d)
        beta, _ = betaspace(A, [0])
        data = np.hstack((seed, beta))
        df = pd.DataFrame(data.reshape(1, np.size(data)))
        df.to_csv(des_file, index=None, header=None, mode='a')

    return None
def evolution_analysis(network_type, N, beta, betaeffect, seed, d, delay):
    """TODO: Docstring for evolution_oscillation.

    :arg1: TODO
    :returns: TODO

    """

    A, A_interaction, index_i, index_j, cum_index = network_generate(
        network_type, N, beta, betaeffect, seed, d)
    xs_low, xs_high = stable_state(A, A_interaction, index_i, index_j,
                                   cum_index, arguments)
    beta_eff, _ = betaspace(A, [0])
    degree = np.sum(A > 0, 0)
    index = np.argmax(degree)
    N = np.size(A, 0)
    initial_condition = np.ones(N) * 5
    initial_condition = xs_high - 0.0001
    dt = 0.001
    t = np.arange(0, 50, dt)
    t1 = time.time()
    dyn_multi = ddeint_Cheng(
        mutual_multi_delay, initial_condition, t,
        *(delay, 0, 0, N, index_i, index_j, A_interaction, cum_index,
          arguments))
    t2 = time.time()
    plot_diff(dyn_multi, xs_high, dt, 'tab:red', 'multi-delay')
    dyn_single = ddeint_Cheng(
        mutual_single_delay, initial_condition, t,
        *(delay, 0, 0, N, [index], index_i, index_j, A_interaction, cum_index,
          arguments))
    t3 = time.time()
    plot_diff(dyn_single[:, index], xs_high[index], dt, 'tab:blue',
              'single-delay')
    w = np.sum(A[index])
    #xs_eff = fsolve(mutual_1D, initial_condition, args=(0, beta_eff, arguments))
    xs_eff = np.mean(xs_high)
    #xs_eff = np.mean(np.setdiff1d(xs_high, xs_high[index]))
    xs_high_max = ddeint_Cheng(one_single_delay, np.array([xs_high[index]]), t,
                               *(0, 0, 0, w, xs_eff, arguments))[-1]
    initial_condition = np.ones(1) * 5
    initial_condition = xs_high_max - 0.0001

    t4 = time.time()
    dyn_one = ddeint_Cheng(one_single_delay, initial_condition, t,
                           *(delay, 0, 0, w, xs_eff, arguments))[:, 0]
    t5 = time.time()
    print(t2 - t1, t3 - t2, t5 - t4)
    plot_diff(dyn_one, xs_high_max, dt, 'tab:green', 'one-component')
    plt.subplots_adjust(left=0.2,
                        right=0.98,
                        wspace=0.25,
                        hspace=0.25,
                        bottom=0.18,
                        top=0.98)
    plt.xticks(fontsize=ticksize)
    plt.yticks(fontsize=ticksize)
    plt.xlabel('$t$', fontsize=fs)
    plt.ylabel('$A_{x}$', fontsize=fs)
    plt.legend(frameon=False, fontsize=legendsize, loc='lower left')
    plt.show()

    return dyn_multi, xs_high
Beispiel #18
0
def tau_two_single_delay(network_type, N, d, beta, betaeffect, arguments,
                         seed_list):
    """TODO: Docstring for tau_kmax.

    :network_type: TODO
    :N: TODO
    :beta: TODO
    :betaeffect: TODO
    :returns: TODO

    """
    B, C, D, E, H, K = arguments
    des = '../data/'
    if not os.path.exists(des):
        os.makedirs(des)
    if betaeffect:
        des_file = des + network_type + f'_N={N}_d=' + str(
            d) + '_decouple_two_single_beta=' + str(beta) + '_logistic.csv'
    else:
        des_file = des + network_type + f'_N={N}_d=' + str(
            d) + '_decouple_two_single_wt=' + str(beta) + '_logistic.csv'

    for seed in seed_list:
        A, A_interaction, index_i, index_j, cum_index = network_generate(
            network_type, N, beta, betaeffect, seed, d)
        xs_low, xs_high = stable_state(A, A_interaction, index_i, index_j,
                                       cum_index, arguments)
        beta_eff, _ = betaspace(A, [0])
        degree = np.sum(A > 0, 0)
        x_fix = np.mean(xs_high)
        index_list = np.argsort(degree)[-10:]
        tau_individual = []
        for index in index_list:
            w = np.sum(A[index])
            xs = ddeint_Cheng(decouple_two_delay,
                              np.ones(2) * 5, np.arange(0, 100, 0.01),
                              *(0, 0, 0, w, beta_eff, arguments))[-1]
            #fx = np.array([(1-xs[0]/K) * (2*xs[0]/C-1), (1-xs[1]/K) * (2*xs[1]/C-1) -xs[1]/K*(xs[1]/C-1)])
            #fxt = np.array([-xs[0]/K*(xs[0]/C-1), 0])
            g11 = w * (xs[1] /
                       (D + E * xs[0] + H * xs[1]) - E * xs[0] * xs[1] /
                       (D + E * xs[0] + H * xs[1])**2)
            g12 = w * (xs[0] /
                       (D + E * xs[0] + H * xs[1]) - H * xs[0] * xs[1] /
                       (D + E * xs[0] + H * xs[1])**2)
            g21 = 0
            g22 = beta_eff * (2 * xs[1] /
                              (D + E * xs[1] + H * xs[1]) - xs[1]**2 *
                              (E + H) / (D + E * xs[1] + H * xs[1])**2)
            g_matrix = np.array([[g11, g12], [g21, g22]])
            tau_sol = []
            for initial_condition in np.array(np.meshgrid(
                    tau_set, nu_set)).reshape(
                        2,
                        int(np.size(tau_set) * np.size(nu_set))).transpose():
                tau_solution, nu_solution = fsolve(eigen_two_decouple,
                                                   initial_condition,
                                                   args=(fx, fxt, g_matrix))
                eigen_real, eigen_imag = eigen_two_decouple(
                    np.array([tau_solution, nu_solution]), fx, fxt, g_matrix)
                if abs(eigen_real) < 1e-5 and abs(eigen_imag) < 1e-5:
                    tau_sol.append(tau_solution)
            tau_sol = np.array(tau_sol)
            if np.size(tau_sol[tau_sol > 0]):
                tau_individual.append(np.min(tau_sol[tau_sol > 0]))

        tau = np.min(tau_individual)
        data = np.hstack((seed, degree.max(), tau))

        column_name = [f'seed{i}' for i in range(np.size(seed))]
        column_name.extend(['kmax', str(beta)])

        if not os.path.exists(des_file):
            df = pd.DataFrame(data.reshape(1, np.size(data)),
                              columns=column_name)
            df.to_csv(des_file, index=None, mode='a')
        else:
            df = pd.DataFrame(data.reshape(1, np.size(data)))
            df.to_csv(des_file, index=None, header=None, mode='a')
        print(seed, tau)

    return None
Beispiel #19
0
def tau_decouple_eff(network_type, N, d, beta, betaeffect, arguments,
                     seed_list):
    """TODO: 10 largest degree to decide critical point.

    :network_type: TODO
    :N: TODO
    :beta: TODO
    :betaeffect: TODO
    :returns: TODO

    """
    B, C, D, E, H, K = arguments
    des = '../data/'
    if not os.path.exists(des):
        os.makedirs(des)
    if betaeffect:
        des_file = des + network_type + f'_N={N}_d=' + str(
            d) + '_decouple_eff_beta=' + str(beta) + '_logistic.csv'
    else:
        des_file = des + network_type + f'_N={N}_d=' + str(
            d) + '_decouple_eff_wt=' + str(beta) + '_logistic.csv'

    for seed in seed_list:
        A, A_interaction, index_i, index_j, cum_index = network_generate(
            network_type, N, beta, betaeffect, seed, d)
        beta_eff, _ = betaspace(A, [0])
        xs_low, xs_high = stable_state(A, A_interaction, index_i, index_j,
                                       cum_index, arguments)
        wk = np.sum(A, 0)
        x_fix = odeint(mutual_1D,
                       np.ones(1) * 5,
                       np.arange(0, 200, 0.01),
                       args=(beta_eff, arguments))[-1]
        index_list = np.argsort(wk)[-10:]
        tau_list = np.ones(len(index_list)) * 100
        for index, i in zip(index_list, range(len(index_list))):
            w = np.sum(A[index])
            xs = ddeint_Cheng(one_single_delay,
                              np.ones(1) * 5, np.arange(0, 200, 0.01),
                              *(0, 0, 0, w, x_fix, arguments))[-1]
            P = -(w * x_fix) / (D + E * xs + H * x_fix) + (
                w * E * xs * x_fix) / (D + E * xs + H * x_fix)**2 - (
                    1 - xs / K) * (2 * xs / C - 1)
            Q = xs / K * (xs / C - 1)
            if abs(P / Q) <= 1:
                tau_list[i] = np.arccos(-P / Q) / Q / np.sin(np.arccos(-P / Q))
        tau = np.min(tau_list)
        tau_index = index_list[np.where(tau == tau_list)[0][0]]
        data = np.hstack((seed, wk.max(), tau, wk[tau_index],
                          np.where(np.sort(wk)[::-1] == wk[tau_index])[0][-1]))

        column_name = [f'seed{i}' for i in range(np.size(seed))]
        column_name.extend(['kmax', str(beta), 'wk', 'order'])

        if not os.path.exists(des_file):
            df = pd.DataFrame(data.reshape(1, np.size(data)),
                              columns=column_name)
            df.to_csv(des_file, index=None, mode='a')
        else:
            df = pd.DataFrame(data.reshape(1, np.size(data)))
            df.to_csv(des_file, index=None, header=None, mode='a')
        print(seed, tau)

    return None
Beispiel #20
0
def evolution_compare(network_type, arguments, N, beta, betaeffect, d, seed,
                      delay, index):
    """TODO: Docstring for evolution_compare.

    :network_type: TODO
    :dynamics: TODO
    :arguments: TODO
    :N: TODO
    :beta: TODO
    :betaeffect: TODO
    :d: TODO
    :returns: TODO

    """

    A, A_interaction, index_i, index_j, cum_index = network_generate(
        network_type, N, beta, betaeffect, seed, d)
    beta, _ = betaspace(A, [0])
    N_actual = np.size(A, 0)
    w_list = np.sum(A, 0)
    w = np.sort(w_list)[index]
    A_index = np.where(w_list == w)[0][0]
    net_arguments = (index_i, index_j, A_interaction, cum_index)

    initial_condition = np.ones((N_actual)) * 5.0
    t = np.arange(0, 200, 0.01)
    dt = 0.01
    xs = odeint(mutual_multi,
                initial_condition,
                t,
                args=(arguments, net_arguments))[-1]
    dyn_multi = ddeint_Cheng(mutual_multi_delay, initial_condition, t,
                             *(delay, arguments, net_arguments))
    dyn_decouple = ddeint_Cheng(mutual_decouple_two_delay,
                                initial_condition[:2], t,
                                *(delay, w, beta, arguments))
    xs_decouple = ddeint_Cheng(mutual_decouple_two_delay,
                               initial_condition[:2], t,
                               *(0, w, beta, arguments))[-1]
    #plt.plot(t[:2000], dyn_multi[:2000, A_index], '-', color='tab:red', linewidth=lw, alpha=alpha, label='multi')
    #plt.plot(t[:2000], dyn_decouple[:2000, 0], '-', color='tab:blue', linewidth=lw, alpha=alpha, label='decouple')

    index_neighbor = np.where(A[A_index] > 0)[0]
    s = np.sum(A[index_neighbor], 1)
    #plt.plot(t[:2000], np.mean(s * dyn_multi[:2000, index_neighbor], 1)/np.mean(s), '-', color='tab:red', linewidth=lw, alpha=alpha, label='multi')
    #plt.plot(t[:2000], np.mean(np.sum(A, 0) * dyn_multi[:2000, :], 1)/np.mean(np.sum(A, 0)), '-', color='tab:red', linewidth=lw, alpha=alpha, label='multi')
    #plt.plot(t[:2000], np.mean(dyn_multi[:2000, index_neighbor], 1), '-', color='tab:red', linewidth=lw, alpha=alpha, label='multi')
    #plt.plot(t[:2000], dyn_decouple[:2000, 1], '-', color='tab:blue', linewidth=lw, alpha=alpha, label='decouple')
    plt.subplots_adjust(left=0.18,
                        right=0.98,
                        wspace=0.25,
                        hspace=0.25,
                        bottom=0.18,
                        top=0.98)
    plt.xticks(fontsize=ticksize)
    plt.yticks(fontsize=ticksize)
    plt.xlabel('$t$', fontsize=fs)
    plt.ylabel('$x$', fontsize=fs)
    plt.locator_params(axis='x', nbins=5)
    plt.legend(fontsize=legendsize, frameon=False)

    plt.show()
    #plt.close()
    x_eff = np.mean(np.sum(A, 0) * dyn_multi[:, :], 1) / np.mean(np.sum(A, 0))
    plot_diff(x_eff, x_eff[-1], dt, 'tab:red', 'multi')
    plot_diff(dyn_decouple[:, 1], xs_decouple[1], dt, 'tab:blue', 'decouple')
    plt.subplots_adjust(left=0.18,
                        right=0.98,
                        wspace=0.25,
                        hspace=0.25,
                        bottom=0.18,
                        top=0.98)
    plt.xticks(fontsize=ticksize)
    plt.yticks(fontsize=ticksize)
    plt.xlabel('$t$', fontsize=fs)
    plt.ylabel('$A$', fontsize=fs)
    plt.locator_params(axis='x', nbins=5)
    plt.legend(fontsize=legendsize, frameon=False)
    #plt.ylim(10**(-9),1)
    plt.show()
    return None
Beispiel #21
0
def eigenvector_zeroeigenvalue(network_type, arguments, N, beta, betaeffect, d,
                               seed):
    """TODO: Docstring for eigenvector.

    :A: TODO
    :arguments: TODO
    :xs: TODO
    :returns: TODO

    """
    A, A_interaction, index_i, index_j, cum_index = network_generate(
        network_type, N, beta, betaeffect, seed, d)
    beta, _ = betaspace(A, [0])
    N_actual = np.size(A, 0)
    net_arguments = (index_i, index_j, A_interaction, cum_index)

    initial_condition = np.ones((N_actual)) * 5.0
    t = np.arange(0, 500, 0.01)
    dt = 0.01
    xs = odeint(mutual_multi,
                initial_condition,
                t,
                args=(arguments, net_arguments))[-1]

    B, C, D, E, H, K = arguments
    fx = (1 - xs / K) * (2 * xs / C - 1)
    fxt = -xs / K * (xs / C - 1)
    xs_T = xs.reshape(len(xs), 1)
    denominator = D + E * xs + H * xs_T
    "A should be transposed to A_ji"
    gx_i = np.sum(A * (xs_T / denominator - E * xs * xs_T / denominator**2), 0)
    gx_j = A * (xs / denominator - H * xs * xs_T / denominator**2)
    tau_sol = []
    nu_sol = []
    for initial_condition in np.array(np.meshgrid(tau_list, nu_list)).reshape(
            2, int(np.size(tau_list) * np.size(nu_list))).transpose():
        tau_solution, nu_solution = fsolve(eigenvalue_zero,
                                           initial_condition,
                                           args=(A, fx, fxt, gx_i, gx_j))
        "check the solution given by fsolve built-in function."
        eigen_real, eigen_imag = eigenvalue_zero(
            np.array([tau_solution, nu_solution]), A, fx, fxt, gx_i, gx_j)
        if abs(eigen_real) < 1e-5 and abs(eigen_imag) < 1e-5:
            #print(tau_solution, nu_solution)
            tau_sol.append(tau_solution)
            nu_sol.append(nu_solution)
    tau_sol = np.array(tau_sol)
    tau_positive = tau_sol[tau_sol > 0]
    nu_positive = np.array(nu_sol)[tau_sol > 0]
    min_index = np.argmin(tau_positive)
    tau = tau_positive[min_index]
    nu = nu_positive[min_index]
    imag = 1j
    M = np.diagflat(nu * imag - fx - fxt * np.exp(-nu * tau * imag) -
                    gx_i) - gx_j
    eigenvalue, eigenvector = np.linalg.eig(M)
    eigenvector_zero = eigenvector[:, np.argmin(np.abs(eigenvalue))]
    eigenvector_abs = np.abs(eigenvector_zero)
    plt.hist(eigenvector_abs, np.arange(0, 1, 0.01))
    plt.subplots_adjust(left=0.18,
                        right=0.98,
                        wspace=0.25,
                        hspace=0.25,
                        bottom=0.18,
                        top=0.98)
    plt.xticks(fontsize=ticksize)
    plt.yticks(fontsize=ticksize)
    plt.xlabel('$|v|$', fontsize=fs)
    plt.ylabel('Counts', fontsize=fs)
    plt.locator_params(axis='x', nbins=5)
    plt.legend(fontsize=legendsize, frameon=False)

    plt.show()

    return eigenvector_zero, tau
Beispiel #22
0
def plot_x_m(network_type, N, d, seed_list, dynamics, m_list, space, weight_list):
    """TODO: Docstring for plot_P_w.

    :weight_list: TODO
    :returns: TODO

    """    
    fig, axes = plt.subplots(len(seed_list), len(weight_list) + 1, sharex=False, sharey=True, figsize=(3*(len(weight_list) +1), 3*len(seed_list)) ) 
    markers = ['o', '^', 's', 'p', 'P', 'h']
    linestyles = [(i, j) for i in [3, 6, 9] for j in [1, 5, 9, 13]]
    colors=sns.color_palette('hls', 11)
    alphas = [np.log(min(m_list)+1) / np.log(m+1) for m in m_list]
    sizes = [np.log(min(m_list)+1) / np.log(m+1) for m in m_list]
    for i, seed in enumerate(seed_list):
        s = StateDistribution(network_type, N, d, seed, dynamics)
        A_unit, A_interaction, index_i, index_j, cum_index = network_generate(network_type, N, 1, 0, seed, d)

        data_m = dict()
        data_all_m = dict()
        groups_node_nums = dict()
        for m in m_list:
            s.data_load(m, space)
            data = np.abs(s.data )
            data_all_m[m] = data
            weights = data[:, :1]
            index = [np.where(np.abs(weights - weight) < 1e-02 )[0][0]  for weight in weight_list]
            xs = data[index, 1:]
            data_m[m] = xs
            groups_node_nums[m] = s.group_node_number
        for j, weight in enumerate(weight_list):
            ax = axes[i][j]
            simpleaxis(ax)
            sizes = np.ravel(np.tile( (s.group_node_number / np.sum(s.group_node_number) + 0.01) * 100, (1, len(weight_list)) ))
            for k, m in enumerate(m_list):
                y = data_m[m][j]
                ax.scatter(x=np.ones(len(y)) * m, y=y, s= (groups_node_nums[m] / np.sum(groups_node_nums[m]) + 0.05) * 100, alpha=np.log(min(m_list)+0.5) / np.log(m+0.5), color=colors[k]) 
            ax.set(xscale='log', yscale='log')
               
            if i == 0:
                title_name = 'group  ' + f'$w={weight}$'
                ax.set_title(title_name, size=labelsize*0.5)

        ax = axes[i][j+1]
        simpleaxis(ax)
        for i_m, m in enumerate(m_list):
            data = data_all_m[m]
            weights = data[:, 0]
            weight_unique = np.arange(0.01, 0.6, 0.01)
            index_plot = [np.where(abs(weights - w_i) < 1e-5)[0][0] for w_i in weight_unique]
            y = data[index_plot, 1:]
            xs = np.repeat(y, groups_node_nums[m], axis=1)
            
            y_gl = betaspace(A_unit, xs)[-1]
            ax.plot(weight_unique, y_gl, linewidth=1, color=colors[i_m], label=f'$m={m}$', linestyle=(0, linestyles[i_m]) ) 
        if i == 0:
            title_name = f'global'
            ax.set_title(title_name, size=labelsize*0.5)
            ax.legend(fontsize=legendsize*0.5, ncol=2, loc='lower right', frameon=False, ) 


    xlabel = '$ m $'
    ylabel = 'stable state'
    fig.text(x=0.03, y=0.5, verticalalignment='center', s=ylabel, size=labelsize*0.7, rotation=90)
    fig.text(x=0.5, y=0.03, horizontalalignment='center', s=xlabel, size=labelsize*0.6)
    plt.subplots_adjust(left=0.12, right=0.95, wspace=0.25, hspace=0.25, bottom=0.12, top=0.95)
    save_des = '../manuscript/dimension_reduction_v2_020322/' + network_type + '_subplots_xs_m.png'
    #plt.savefig(save_des, format='png')
    #plt.close()
    return None
Beispiel #23
0
def figure_plot(network_type, N, seed, d, weight_list, m_list, compare_weight,
                error_weight_list, compare_m_list, yi_m, space, tradeoff_para,
                method, method_weight, error_method, original_threshold_list,
                reduction_threshold_list, original_threshold_si,
                reduction_threshold_si):
    """TODO: Docstring for figure_plot.

    :network_type: TODO
    :N: TODO
    :seed: TODO
    :d: TODO
    :returns: TODO

    """
    des = '../manuscript/dimension_reduction_v1_111021/' + dynamics + '_' + network_type + '/'
    des = '../manuscript/dimension_reduction_v1_111021/' + dynamics + '_' + network_type + '_beta_pres=20/'
    if not os.path.exists(des):
        os.makedirs(des)
    A, xs_multi, xs_reduction_multi = data_xs(network_type, N, seed, d,
                                              weight_list, m_list, space,
                                              tradeoff_para, method)
    y_reduction_weighted = np.vstack(([
        np.array([
            betaspace(A, xs_reduction_multi[m, k])[-1]
            for k in range(len(weight_list))
        ]) for m in range(len(m_list))
    ]))
    y_reduction_unweighted = np.vstack(([
        np.array([
            np.mean(xs_reduction_multi[m, k]) for k in range(len(weight_list))
        ]) for m in range(len(m_list))
    ]))
    y_multi_weighted = np.array(
        [betaspace(A, xs_multi[i])[-1] for i in range(len(weight_list))])
    y_multi_unweighted = np.array(
        [np.mean(xs_multi[i]) for i in range(len(weight_list))])

    if method_weight == 'weighted':
        y_reduction = y_reduction_weighted
        y_multi = y_multi_weighted
    elif method_weight == 'unweighted':
        y_reduction = y_reduction_unweighted
        y_multi = y_multi_unweighted
    """
    "Fa"
    compare_weight_index = np.where(np.abs(compare_weight - weight_list) < 1e-8)[0][0]
    compare_m_index = [np.where(np.abs(m_list - m_i) < 1e-8)[0][0] for m_i in compare_m_list]
    compare_xs_multi(xs_multi[compare_weight_index], xs_reduction_multi[compare_m_index, compare_weight_index], compare_weight, compare_m_list, des)

    "Fb"
    plot_yglobal_m(y_multi, y_reduction[compare_m_index], weight_list, compare_m_list, des)

    "Fc"
    yi_m_index = [np.where(np.abs(m_list - yi_m[0]) < 1e-8)[0][0] ]
    plot_yi_group(xs_reduction_multi[yi_m_index], weight_list, yi_m, des)

    "Fd"
    error_weight_index = [np.where(np.abs(w_i - weight_list) < 1e-8)[0][0] for w_i in error_weight_list]
    plot_error_m(xs_multi[error_weight_index], xs_reduction_multi[:, error_weight_index], y_multi[error_weight_index], y_reduction[:, error_weight_index], error_weight_list, m_list, des)

    "Fe"
    heatmap_error_m(xs_multi, xs_reduction_multi, y_multi, y_reduction, weight_list, m_list, des)

    "Ff"
    heatmap_y_m(y_reduction, weight_list, m_list, des)
    "Fg"
    plot_y_m(y_reduction[:, error_weight_index], weight_list[error_weight_index], m_list, des)
    "Fh"
    critical_m_reduction(y_reduction, weight_list, m_list, des)

    "Fi"
    critical_m_multi_threshold(y_reduction, y_multi, weight_list, m_list, original_threshold_list, reduction_threshold_list, des)
    """

    "Fj"
    critical_m_threshold(y_reduction, y_multi, weight_list, m_list,
                         original_threshold_si, reduction_threshold_si, des)

    return None
Beispiel #24
0
def tau_decouple_eff(network_type, N, d, beta, betaeffect, arguments,
                     seed_list):
    """TODO: Docstring for tau_kmax.

    :network_type: TODO
    :N: TODO
    :beta: TODO
    :betaeffect: TODO
    :returns: TODO

    """
    B, C, D, E, H, K = arguments
    des = '../data/'
    if not os.path.exists(des):
        os.makedirs(des)
    if betaeffect:
        des_file = des + network_type + f'_N={N}_d=' + str(
            d) + '_decouple_eff_beta=' + str(beta) + '_logistic.csv'
    else:
        des_file = des + network_type + f'_N={N}_d=' + str(
            d) + '_decouple_eff_wt=' + str(beta) + '_logistic.csv'

    for seed in seed_list:
        A, A_interaction, index_i, index_j, cum_index = network_generate(
            network_type, N, beta, betaeffect, seed, d)
        beta_eff, _ = betaspace(A, [0])
        xs_low, xs_high = stable_state(A, A_interaction, index_i, index_j,
                                       cum_index, arguments)
        degree = np.sum(A > 0, 0)
        #x_fix = np.mean(xs_high)
        x_fix = odeint(mutual_1D,
                       np.ones(1) * 5,
                       np.arange(0, 100, 0.01),
                       args=(beta_eff, arguments))[-1]
        index_list = np.argsort(degree)[-10:]
        tau = []
        for index in index_list:
            w = np.sum(A[index])
            xs = ddeint_Cheng(one_single_delay,
                              np.ones(1) * 5, np.arange(0, 100, 0.01),
                              *(0, 0, 0, w, x_fix, arguments))[-1]
            #xs = fsolve(one_kmax, np.ones(1) * 10, args=(w, x_fix, arguments))
            P = -(w * x_fix) / (D + E * xs + H * x_fix) + (
                w * E * xs * x_fix) / (D + E * xs + H * x_fix)**2 - (
                    1 - xs / K) * (2 * xs / C - 1)
            Q = xs / K * (xs / C - 1)
            if abs(P / Q) <= 1:
                tau.append(np.arccos(-P / Q) / Q / np.sin(np.arccos(-P / Q)))
        tau = np.min(tau)
        data = np.hstack((seed, degree.max(), tau))

        column_name = [f'seed{i}' for i in range(np.size(seed))]
        column_name.extend(['kmax', str(beta)])

        if not os.path.exists(des_file):
            df = pd.DataFrame(data.reshape(1, np.size(data)),
                              columns=column_name)
            df.to_csv(des_file, index=None, mode='a')
        else:
            df = pd.DataFrame(data.reshape(1, np.size(data)))
            df.to_csv(des_file, index=None, header=None, mode='a')
        print(seed, tau)

    return None