Exemplo n.º 1
0
def cluster_transi_UD():
    f = FaceData()
    G = f.create_graph()  # 获取原始数据
    count = 3
    for i in xrange(100, 12000, 1000):
        avg_cluster = []
        avg_trans = []
        avg_assort = []
        if count > 0:
            for j in xrange(5):
                mh = child_graphs.impore_03(G, None, i, "unique")
                avg_cluster.append(round(nx.average_clustering(mh), 4))
                avg_trans.append(round(nx.transitivity(mh), 4))
                avg_assort.append(
                    round(nx.degree_assortativity_coefficient(mh), 4))
            a = sum(avg_cluster) / len(avg_cluster)
            b = sum(avg_trans) / len(avg_trans)
            c = sum(avg_assort) / len(avg_assort)
            count -= 1
            print float(i) / 1000, a, b, c
        else:
            mh = child_graphs.impore_03(G, None, i, "unique")
            a = round(nx.average_clustering(mh), 4)
            b = round(nx.transitivity(mh), 4)
            c = round(nx.degree_assortativity_coefficient(mh), 4)
            print float(i) / 1000, a, b, c
def multi_sampling():
    f = FaceData()
    G = f.create_graph()  # 获取原始数据

    _degree = dict()  # 分析重组数据
    chushu = 0
    for i in xrange(38):
        BFS = common.BFS(G, 10000)
        for i in BFS:
            _degree[G.degree(i)] = _degree.get(G.degree(i), 0) + 1
        chushu += len(BFS)
        print "step ", i

    # 处理数据,取平均的度分布
    x = sorted(_degree.iterkeys())  #生成x轴序列,从1到最大度
    num = chushu
    y = []
    for i in x:
        y.append(float(_degree[i]) / num)

    # 保存数据
    f = open("../compare/degree_plot/Epin_BFS.txt", "w")
    try:
        for i in x:
            f.write(str(i) + " " + str(float(_degree[i]) / num) + "\n")
    finally:
        f.close()
Exemplo n.º 3
0
def cluster_a_RW():
    f = FaceData()
    G = f.create_graph()  # 获取原始数据
    count = 3
    for i in xrange(100, 12000, 1000):
        avg_cluster = []
        avg_train = []
        avg_assort = []
        if count > 0:
            for j in xrange(5):
                rw = child_graphs.random_walk(G, None, i, "unique")
                avg_cluster.append(round(nx.average_clustering(rw), 4))
                avg_train.append(round(nx.transitivity(rw), 4))
                avg_assort.append(
                    round(nx.degree_assortativity_coefficient(rw), 4))
            a = sum(avg_cluster) / len(avg_cluster)
            b = sum(avg_train) / len(avg_train)
            c = sum(avg_assort) / len(avg_assort)
            count -= 1
            print float(i) / 1000, a, b, c
        else:
            rw = child_graphs.random_walk(G, None, i, "unique")
            a = round(nx.average_clustering(rw), 4)
            b = round(nx.transitivity(rw), 4)
            c = round(nx.degree_assortativity_coefficient(rw), 4)
            print float(i) / 1000, a, b, c
def sub_degree():
    f = FaceData()
    G = f.create_graph()  # 获取原始数据

    BFS = common.BFS(G, 10000)
    _plt = degree.degree(G, plt, "Original", "k-")  # 绘制原始网络的度分布
    _plt = degree.ego_degree(G, BFS, _plt, "BFS", "rh-")  # 随机爬虫子网的度分布
    _plt.legend(loc="upper right")  # 加入图例
    _plt.show()
Exemplo n.º 5
0
def multi_sampling():
    f = FaceData()
    G = f.create_graph()  # 获取原始数据

    _degree = dict()  # 分析重组数据
    chushu = 0
    for i in xrange(38):
        mhrw = common.metropolis_hastings_random_walk(G, None, 10000, "unique")
        # impore = improve_MH.impore_02(G, None, 10000, "unique")
        for i in mhrw:
            _degree[G.degree(i)] = _degree.get(G.degree(i), 0) + 1
        chushu += len(mhrw)
        print "step "

    # 处理数据,取平均的度分布
    x = sorted(_degree.iterkeys())  #生成x轴序列,从1到最大度
    num = chushu
    y = []
    for i in x:
        y.append(float(_degree[i]) / num)

    # 保存数据
    f = open("../compare/degree_plot/dup_Twitter_Mh.txt", "w")
    try:
        for i in x:
            f.write(str(i) + " " + str(float(_degree[i]) / num) + "\n")
    finally:
        f.close()

    ##################
    _degree = dict()  # 分析重组数据
    chushu = 0
    for i in xrange(38):
        # mhrw = common.metropolis_hastings_random_walk(G,None,10000,"unique")
        impore = improve_MH.impore_02(G, None, 10000, "unique")
        for i in impore:
            _degree[G.degree(i)] = _degree.get(G.degree(i), 0) + 1
        chushu += len(impore)
        print "step "

    # 处理数据,取平均的度分布
    x = sorted(_degree.iterkeys())  #生成x轴序列,从1到最大度
    num = chushu
    y = []
    for i in x:
        y.append(float(_degree[i]) / num)

    # 保存数据
    f = open("../compare/degree_plot/dup_Twitter_Ud.txt", "w")
    try:
        for i in x:
            f.write(str(i) + " " + str(float(_degree[i]) / num) + "\n")
    finally:
        f.close()
Exemplo n.º 6
0
def ori_avg_degree():
    f = FaceData()
    G = f.create_graph()  # 获取原始数据

    rw = common.random_walk(G, None, 16000, "unique")
    mhrw = common.metropolis_hastings_random_walk(G, None, 16000, "unique")
    bfs = common.BFS(G, None, 16000, "unique")
    print "原始网络的平均度为: ", degree.avg_degree(G)
    print "RW抽样得到的平均度为: ", degree.avg_degree(G, rw)
    print "MHRW抽样得到的平均度为: ", degree.avg_degree(G, mhrw)
    print "BFS抽样得到的平均度为: ", degree.avg_degree(G, bfs)
Exemplo n.º 7
0
def create_graph():
    t = FaceData()
    G = t.create_graph()  # 获取原始数据
    f = open("../compare/avg_degree/rw.txt", "w")
    try:
        rw = common.random_walk(G, None, 8000, "unique")
        avg = float(sum([G.degree(i) for i in rw])) / len(rw)
        f.write("RW_avgDegree ")
        f.write(str(round(avg, 2)))
    finally:
        f.close()
Exemplo n.º 8
0
def duplicate_epinions_Mhrw():
    t = FaceData()
    G = t.create_graph()  # 获取原始数据

    f = open("../compare/avg_degree/dup_Epinion_Mhrw.txt", "w")
    try:
        for i in xrange(100, 10000, 100):
            mhrw = common.metropolis_hastings_random_walk(G, None, i, "unique")
            f.write(str(i) + " " + str(mhrw) + "\n")
    finally:
        f.close()
Exemplo n.º 9
0
def duplicate_twitter_UD():
    t = FaceData()
    G = t.create_graph()  # 获取原始数据

    f = open("../compare/avg_degree/dup_Twitter_UD.txt", "w")
    try:
        for i in xrange(100, 10000, 100):
            mhrw = improve_MH.impore_02(G, None, i, "unique")
            f.write(str(i) + " " + str(mhrw) + "\n")
    finally:
        f.close()
Exemplo n.º 10
0
def sub_graph_assort():
    f = FaceData()
    G = f.create_graph()  # 获取原始数据

    rw = child_graphs.random_walk(G, None, 1000, "unique")
    mhrw = child_graphs.metropolis_hastings_random_walk(
        G, None, 1000, "unique")

    print "原始网络的匹配系数: ", avg_cluster(G)
    print "RW 子网的匹配系数: ", avg_cluster(G, rw)
    print "MHRW 子网的匹配系数: ", avg_cluster(G, mhrw)
Exemplo n.º 11
0
def sub_degree():
    f = FaceData()
    G = f.create_graph()  # 获取原始数据

    rw = child_graphs.random_walk(G, None, 10000, "unique")
    mhrw = child_graphs.metropolis_hastings_random_walk(
        G, None, 10000, "unique")
    _plt = degree.degree(G, plt, "UNI", "b--")  # 绘制原始网络的度分布
    _plt = degree.degree(rw, _plt, "RW", "k")  # 随机爬虫子网的度分布
    _plt = degree.degree(mhrw, _plt, "MHRW", "r-.")
    _plt.legend(loc="upper right")  # 加入图例
    _plt.show()
Exemplo n.º 12
0
def compare_press():
    f = FaceData()
    G = f.create_graph()  # 获取原始数据

    rw = common.random_walk(G, None, 10000, "unique")
    mhrw = common.metropolis_hastings_random_walk(G, None, 10000, "unique")
    # print round(1/impore - 1, 4)
    # 保存数据
    f = open("../compare/alpha_dup/com_dup.txt", "w")
    try:
        f.write(str("rw") + " " + str(rw) + "\n")
        f.write(str("mhrw") + " " + str(mhrw) + "\n")
    finally:
        f.close()
Exemplo n.º 13
0
def transi_RW():
    f = FaceData()
    G = f.create_graph()  # 获取原始数据
    count = 3
    for i in xrange(100, 12000, 1000):
        avg_cluster = []
        if count > 0:
            for j in xrange(5):
                rw = child_graphs.random_walk(G, None, i, "unique")
                avg_cluster.append(round(nx.transitivity(rw), 4))
            a = sum(avg_cluster) / len(avg_cluster)
            count -= 1
            print float(i) / 1000, a
        else:
            rw = child_graphs.random_walk(G, None, i, "unique")
            a = round(nx.transitivity(rw), 4)
            print float(i) / 1000, a
Exemplo n.º 14
0
def ego_degree():
    f = FaceData()
    G = f.create_graph()  # 获取原始数据

    # rw = common.random_walk(G,None,10000,"unique")
    mhrw = common.metropolis_hastings_random_walk(G, None, 10000, "unique")
    # bfs = common.BFS(G,None,8000,"unique")
    impore = improve_MH.impore_03(G, None, 10000, "unique")
    _plt = degree.degree(G, plt, "Original", "k-")  # 绘制原始网络的度分布
    # _plt = degree.ego_degree(G,bfs,_plt,"ego-bfs","c*")
    # _plt = degree.ego_degree(G,rw,_plt,"ego-RW","k-")
    # _plt = degree.ego_degree(G,mhrw,_plt,"MHRW","b-.")
    _plt = degree.ego_degree(G, mhrw, _plt, "MHRW", "bv-")
    # _plt = degree.ego_degree(G,impore,_plt,"UD","r:")
    _plt = degree.ego_degree(G, impore, _plt, "UD", "rh-")
    _plt.ylabel(r'$P(k_\upsilon=k) $')
    _plt.xlabel(u'node degree k')
    # _plt.xlim(0,100)
    _plt.legend(loc="upper right")  # 加入图例
    _plt.show()
Exemplo n.º 15
0
def alpha_press():
    f = FaceData()
    G = f.create_graph()  # 获取原始数据

    impore = improve_MH.impore_03(G, None, 10000, "unique")
    # print round(1/impore - 1, 4)
    # 保存数据
    f = open("../compare/alpha_dup/a_dup.txt", "w")
    try:
        for i in xrange(5, 100, 5):
            i = float(i) / 100
            print i
            dup = 0
            for j in xrange(0, 10):
                dup += improve_MH.impore_03(G, None, 10000, "unique", alpa=i)
            dup /= 10
            f.write(str(i) + " " + str(dup) + "\n")
            print dup
    finally:
        f.close()
Exemplo n.º 16
0
    f = FaceData()
    G = f.create_graph()  # 获取原始数据
    count = 3
    for i in xrange(100, 12000, 1000):
        avg_cluster = []
        avg_trans = []
        avg_assort = []
        if count > 0:
            for j in xrange(5):
                mh = child_graphs.BFS(G, i)
                avg_cluster.append(round(nx.average_clustering(mh), 4))
                avg_trans.append(round(nx.transitivity(mh), 4))
                avg_assort.append(
                    round(nx.degree_assortativity_coefficient(mh), 4))
            a = sum(avg_cluster) / len(avg_cluster)
            b = sum(avg_trans) / len(avg_trans)
            c = sum(avg_assort) / len(avg_assort)
            count -= 1
            print float(i) / 1000, a, b, c
        else:
            mh = child_graphs.BFS(G, i)
            a = round(nx.average_clustering(mh), 4)
            b = round(nx.transitivity(mh), 4)
            c = round(nx.degree_assortativity_coefficient(mh), 4)
            print float(i) / 1000, a, b, c


# cluster_transi_BFS()
f = FaceData()
G = f.create_graph()
print round(nx.degree_assortativity_coefficient(G), 4)
Exemplo n.º 17
0
 def __init__(self, x, limit):
     f = FaceData()
     self.G = f.create_graph()                     # 获取原始数据
     self.plt = plt
     self.x = x
     self.limit = limit
Exemplo n.º 18
0
def bfs_text():
    f = FaceData()
    G = f.create_graph()                            # 获取原始数据

    bfs = common.BFS(G, None, 5000, "total")
    print len(bfs)