Beispiel #1
0
def number_of_lp_lattice():
 
    for D in [2]:   
        myfile = open(str(D)+'number_lp_lattice', 'w')
        myfile.write('N_side' + '\t' + 'N'+ '\t'+ 'lp length' +  '\t'+ 'no. lp'+ '\n')
        start_time = time.time()
        for N_side in range(10,16,1):
            N = N_side**2

    #        model = models.COd(2, N)
            model = models.square_lattice_model(D, N_side)
            DAG = model[0]
            extremes = model[1]
            tr_DAG = tr.trans_red(DAG)
            lp = dl.lpd(tr_DAG, extremes[1], extremes[0])
            length_lp = lp[2]
            
            j=0
            paths_list = list(nx.all_simple_paths(tr_DAG, extremes[1], extremes[0], cutoff=length_lp+1))
            for i in range(len(paths_list)):
                if len(paths_list[i])==length_lp+1:
                    j+=1
    
            myfile.write(str(N_side) + '\t' + str(N) +'\t' + str(length_lp)+ '\t' + str(j) + '\n')
            print 'done', N_side
        elapsed = time.time() - start_time
        
        print 'finished.',D,'Dimension. Time elapsed = ',elapsed
        
    return
Beispiel #2
0
    def test(N, D):
        box = dl.box_generator(N, D)
        DAG = box[0]
        ext = box[1]

        start = ext[1]
        end = ext[0]

        print DAG.node[start]["birthday"]
        print "!!!!!"
        start_time = time.clock()
        the_greedy_path = greedy_path(DAG, start, max_depth=50)
        greedy_time = time.clock() - start_time
        start_time = time.clock()
        lp = dl.lpd(DAG, start, end)
        lp_time = time.clock() - start_time
        print "Greedy"
        greedy_path_length = len(the_greedy_path)
        # print the_greedy_path

        print "Longest"
        lp_length = lp[2]
        # print lp[1]

        print "Greedy Time = %s" % str(greedy_time)
        print "LP Time = %s" % str(lp_time)

        return (greedy_path_length, greedy_time, lp_length, lp_time)
def BA_lp(m=50, N_range = range(51,555,50)):

    number_of_trials = 100
    myfile = open('BA_lp', 'w')
    myfile.write('N' + '\t' + 'average lp length'+ '\t'+'std err'+'\n')
    
    for N in N_range:
        lp_list = []
        comparison_lp_list = []
        for _ in range(number_of_trials):
            model = models.barabasi_albert_graph(N,m)
    #        model = models.COd(D,N)
            G = model[0]
            extremes = model[1]
            
#            average_in_degree = float(m)*float((N-1))/(float(N))
#            print average_in_degree
            average_in_degree = float(nx.number_of_edges(G)) /float(nx.number_of_nodes(G))
            comparison_model = oneD_random_comparison(N,average_in_degree)
            comparison_G = comparison_model[0]
            comparison_extremes = comparison_model[1]
            
            if nx.has_path(comparison_G, comparison_extremes[1], comparison_extremes[0]): 
                comparison_lp = dl.lpd(comparison_G, comparison_extremes[1], comparison_extremes[0])
                comparison_lp_length = comparison_lp[2]
            else: comparison_lp_length=None
            
            #tr_DAG = tr.trans_red(G)
            lp = dl.lpd(G,extremes[1],extremes[0])
            lp_length = lp[2]
     
            lp_list.append(lp_length)
            comparison_lp_list.append(comparison_lp_length)
        statistics = stats.list_stats(lp_list)
        lp_av = statistics[3]
        lp_std_err = statistics[0]
        
        comparison_stats = stats.list_stats(comparison_lp_list)
        comparison_lp_av = comparison_stats[3]
        comparison_lp_std_err = comparison_stats[0]
        print "done ", N
        myfile.write(str(N) + '\t' + str(lp_av) + '\t' + str(lp_std_err) +'\t'+ str(comparison_lp_av) + '\t' + str(comparison_lp_std_err) + '\n')
    #nx.draw_random(G, with_labels=False, node_colour ='k', node_size=50, node_color = 'k', width=0.5) 
    #nx.draw_networkx(G, pos=random, with_labels=False, node_colour ='k', node_size=50, node_color = 'k', width=0.5)  
    #plt.show() # display figure
    return
Beispiel #4
0
def pythag_distance(DAG, node1, node2):

    bday1 = DAG.node[node1]["birthday"]
    bday2 = DAG.node[node2]["birthday"]
    rank1 = DAG.node[node1]["rank"]
    rank2 = DAG.node[node2]["rank"]

    if bday1 > bday2:
        lp = dl.lpd(DAG, node1, node2)[2]
    else:
        lp = dl.lpd(DAG, node1, node2)[2]

    dbday = bday1 - bday2
    drank = rank1 - rank2

    distance_bday = math.sqrt(lp ** 2 - dbday ** 2)  # unit mismatch
    distance_rank = math.sqrt(lp ** 2 - drank ** 2)

    print "The distance between %s and %s is %f for bday and %f for rank" % (node1, node2, distance_bday, distance_rank)
Beispiel #5
0
def test(N, D):
    
    box = dl.box_generator(N, D, 'minkowski')
    #box = mod2.COd(D,N)
    dag = box[0]
    ext = box[1]
    
    tr_dag = tr.trans_red(dag)
    good_dag = tr_dag
    lp = dl.lpd(tr_dag, ext[1], ext[0])
    length = lp[2]
    print length
    return length
Beispiel #6
0
def lpd_test(N, D):
    box = dl.box_generator(N, D, 'minkowski')
    dag = box[0]
    ext = box[1]
    
    tr_dag = tr.trans_red(dag)
    
    print ext[0]
    print ext[1]
    
    
    lp = dl.lpd(tr_dag, ext[1], ext[0])
    print lp[1]
    return lp
Beispiel #7
0
def number_of_lp_BA(m_range=[1,5,10,50], n_max=50, n_step=5):
 
    for m in m_range:  
        n_range=range(m+1,n_max,n_step) 
#        n_range=range(101,201,10)
        myfile = open(str(m)+'C_for_BA', 'w')
        myfile.write('n' + '\t' + 'lp_av'+'\t' + 'lp_err'+ '\t' + 'no._av' +'\t' +  'no._err'+ '\n')
        start_time = time.time()
        trials = 1000
        for n in n_range:
            lp_length_list = []
            j_list = []
            for _ in range(trials):
        #        model = models.COd(2, N)
                model = models.barabasi_albert_graph(n, m)
                DAG = model[0]
                extremes = model[1]
                tr_DAG = tr.trans_red(DAG)
                lp = dl.lpd(tr_DAG, extremes[1], extremes[0])
                length_lp = lp[2]
                lp_length_list.append(length_lp)
                
                j=0
                paths_list = list(nx.all_simple_paths(tr_DAG, extremes[1], extremes[0], cutoff=length_lp+1))
                for i in range(len(paths_list)):
                    if len(paths_list[i])==length_lp+1:
                        j+=1
                        
                j_list.append(j)
            
            print j_list
            
            lp_stats = stats.list_stats(lp_length_list)
            lp_av = lp_stats[3]
            lp_err = lp_stats[1]
        
            j_stats = stats.list_stats(j_list)
            j_av = j_stats[3]
            j_err = j_stats[1]
            print j_av
            print j_err
        
            myfile.write(str(n) + '\t' + str(lp_av)+'\t' + str(lp_err)+ '\t' + str(j_av) +'\t' +  str(j_err)+ '\n')
            print 'done', n
        elapsed = time.time() - start_time
        
        print 'finished.',m,'m. Time elapsed = ',elapsed
        
    return
def test(N, D):
    
    box = dl.box_generator(N, D)
    dag = box[0]
    ext = box[1]
    tr_dag = tr.trans_red(dag)
    good_dag = tr_dag
    lp = dl.lpd(tr_dag, ext[1], ext[0])
    path = lp[1]
    tr_dag = good_dag
    dim = mps.mpsd(tr_dag, path)
    mm_dim = mmd.MM_dimension(dag)
    print dim
    print mm_dim
    return [dim[0], mm_dim]
Beispiel #9
0
def axis_end(DAG, apex, type, surface):

    out = open("./axis_%s.txt" % apex, "w")

    cone_list = lightcone_list(DAG, apex, type)
    print "There are %d nodes in the cone of %s" % (len(cone_list), apex)
    print "There are %d nodes along the top of the DAG" % len(surface)

    cone_top = []
    for thing in surface:
        if thing in cone_list:
            cone_top.append(thing)

    print "There are %d nodes on the top of the cone" % len(cone_top)

    max = 0
    axis_node = []

    T = float(len(cone_top))
    i = 0.0  # use to print out progress bar
    for point in cone_top:

        DAG2 = DAG.copy()
        if type == "forward":
            start = point
            end = apex

        else:  # type = 'backward'
            start = apex
            end = point

        size = len(finish_interval(DAG2, end, start, cone_list, type))
        lp = dl.lpd(DAG2, start, end)[2] + 1  # to get lp in nodes, not edges
        out.write(str(point) + "\t" + str(size) + "\t" + str(lp) + "\n")

        # find which node in point gives the largest interval (in terms of number of nodes)
        if size > 0:
            if size > max:
                max = size
                axis_node = [point]
            elif size == max:
                axis_node.append(point)
        i += 1
        print (i / T) * 100.0

    return [axis_node, max]
Beispiel #10
0
def number_of_lp():
 
    for D in [2,3,4]:   
        myfile = open(str(D)+'how many longest paths?', 'w')
        myfile.write('N' + '\t' + 'L_scale=N^1/D'+ '\t'+ 'average lp length'+ '\t'+'std err'+  '\t'+ 'average no. lp'+ '\t'+'std err' +'\n')
        start_time = time.time()
        trials = 100
        for N in range(2,40,1):
            lp_length_list = []
            j_list = []
            for _ in range(trials):
                model = models.COd(2, N)
#                model = models.barabasi_albert_graph(n, m, seed)
                DAG = model[0]
                extremes = model[1]
                tr_DAG = tr.trans_red(DAG)
                lp = dl.lpd(tr_DAG, extremes[1], extremes[0])
                length_lp = lp[2]
                lp_length_list.append(length_lp)
                
                j=0
                paths_list = list(nx.all_simple_paths(tr_DAG, extremes[1], extremes[0], cutoff=length_lp+1))
                for i in range(len(paths_list)):
                    if len(paths_list[i])==length_lp+1:
                        j+=1
                        
                j_list.append(j)
            
            lp_stats = stats.list_stats(lp_length_list)
            lp_av = lp_stats[3]
            lp_err = lp_stats[1]
            
            j_stats = stats.list_stats(j_list)
            j_av = j_stats[3]
            j_err = j_stats[1]
            
            l_scale = (N**(1.0/float(D)))
        
            myfile.write(str(N) + '\t' + str(l_scale) +'\t' + str(lp_av)+'\t' + str(lp_err)+ '\t' + str(j_av) +'\t' +  str(j_err)+ '\n')
            print 'done', N
        elapsed = time.time() - start_time
        
        print 'finished.',D,'Dimension. Time elapsed = ',elapsed
        
    return
Beispiel #11
0
def interval_test(DAG,start,end):
    
    lp = dl.lpd(DAG,start,end)
    length = lp[2]
    print 'The longest path between %s and %s is %d edges long' %(start,end,length)
    
    interval = lc.interval(DAG,start,end)
    N = interval.number_of_nodes()
    E = interval.number_of_edges()
    print 'The interval contains %d nodes and %d edges' %(N,E)
    
    c = clus.clustering(interval)
    print 'For the interval, c+ is %f, c0 is %f, c- is %f' %(c[0],c[1],c[2])
    
    #MMd = MM.MM_dimension(interval)
    MPSD = mp.mpsd(interval,lp[1])
    #print 'The MM dimension of the interval is %f and the MPSD is %f' %(MMd,MPSD)
    print 'The MPSD is %f' %MPSD[0]
m = 1
number_of_trials = 100
D = 1
myfile = open("BA_lp", "w")
myfile.write("N" + "\t" + "average lp length" + "\t" + "std err" + "\n")

for N in range(2, 100, 5):
    lp_list = []
    for trial in range(number_of_trials):
        model = models.barabasi_albert_graph(N, m)
        #        model = models.COd(D,N)
        G = model[0]
        extremes = model[1]
        # tr_DAG = tr.trans_red(G)
        lp = dl.lpd(G, extremes[1], extremes[0])
        lp_length = lp[2]

        lp_list.append(lp_length)
    statistics = stats.list_stats(lp_list)
    lp_av = statistics[3]
    lp_std_err = statistics[0]
    print "done ", N
    myfile.write(str(N) + "\t" + str(lp_av) + "\t" + str(lp_std_err) + "\n")
# nx.draw_random(G, with_labels=False, node_colour ='k', node_size=50, node_color = 'k', width=0.5)
# nx.draw_networkx(G, pos=random, with_labels=False, node_colour ='k', node_size=50, node_color = 'k', width=0.5)
# plt.show() # display figure


# D=3
# filename = './' + str(D) + 'lp_with_N.txt'
Beispiel #13
0
import trans_red as tr
import matplotlib.pyplot as plt
import networkx as nx
import numpy as np
import time

start_time = time.time()

model = models.COd(2, 200)

DAG = model[0]
extremes = model[1]
print 'made model'
tr_DAG = tr.trans_red(DAG)
print 'done transitive reduction'
lp = dl.lpd(tr_DAG, extremes[1], extremes[0])
length_lp = lp[2]
path = lp[1]

print 'calculated lp'
t_list = []
x_list = []
path_t_list = []
path_x_list = []
#
for node in range(len(DAG.nodes())):
    t_list.append(DAG.nodes()[node][0])
    x_list.append(DAG.nodes()[node][1])
#    
#for node in range(len(path)):
#    path_t_list.append(path[node][0])
Beispiel #14
0
def lp_with_removal(number_of_trials, number_of_probabilities_considered, N, D, filename):

    """
    arguments: number_of_trials for each probability, output data is averaged over this.
    number_of_probabilities_considered = integer: number of data points
    N = integer: number of nodes in network
    D = integer: dimension of network
    filename = string: output file path e.g. './testing.txt'
    """
    start_time = time.time() #start clock
    myfile = open(filename, 'w')

    p_step = 10.0/number_of_probabilities_considered #define size of step in probability (prob = prob that edge is removed given it's causally allowed)
    myfile.write(str(D)+"D box w/ random removal prob p, and longest path l. Number trials averaged over=" + str(number_of_trials) + " N="+ str(N) + '\n')
    myfile.write('p' + '\t' + 'l(p)' +'\t' + 'std error'  +  '\n')  #send column headings to output file
    #initialize lists for each output variable
    p_list = []
    lp_average_list = []
    lp_std_error_list = []
    
    x_range = np.arange(1.0,10.0+p_step,p_step)  #defines list of x values to loop over   

    for x in x_range: #Loop over x, where x=10^p, so we get more points around prob's closer to 1 and can see phase transition-y behavior more clearly.
        #initialize lists for each quantity for which we require an individual value to be stored after every trial (e.g. for calculations stats)
        lp_list = []
        p = math.log10(x)

        for _ in range(int(number_of_trials)): 
            model = models.box_model(D, N,1-p)  #create model with prob of no edge given edge is causally allowed = p
            DAG_with_removal =  model[0]    #networkx DAG object from above model
            tr_DAG_with_removal = tr.trans_red(DAG_with_removal)    #transitively reduce DAG - doesn't effect lp or MM dimension, DOES MIDPOINT SCALING
            extremes = model[1] #returns list of extremes from model

            if nx.has_path(DAG_with_removal, extremes[1], extremes[0]):  
                longest_path_between_extremes = dl.lpd(tr_DAG_with_removal, extremes[1],extremes[0]) #calculate longest path between extremes
                length_of_longest_path = longest_path_between_extremes[2]   #save length of longest path between extremes
      
            else: 
                length_of_longest_path=0   #if no path between extremes, longest path = 0 is physical.
        
            lp_list.append(length_of_longest_path)
        
        statistics = stats(lp_list)
        lp_average = statistics[3] #calculates average lp over all trials for the current probability
        
        if p == 0.0:
            lp_0 = lp_average  #calculate average longest path for p==0 and use as normalization constant

        lp_average = lp_average / lp_0  #normalize average longest paths so can compare  results of different dimensions
       
        for i in range(len(lp_list)):
            lp_list[i] /= lp_0  #normalize longest paths (need to do this for std error calculation)
        
        statistics = stats(lp_list)
        lp_std_error = statistics[0]
        
        p_list.append(p)
        lp_average_list.append(lp_average)
        lp_std_error_list.append(lp_std_error)

        myfile.write(str(p)+ '\t' + str(lp_average) + '\t' + str(lp_std_error) + '\n')
        
        print "finished ", p , "probability"
        
    elapsed = (time.time() - start_time)   #calculate time for method completion
    print "finished. Time elapsed = " , elapsed

    return [p_list, lp_average_list, lp_std_error_list] 
Beispiel #15
0
def ms_dim_with_removal(number_of_trials, number_of_probabilities_considered, N, D, filename):

    """
    arguments: number_of_trials for each probability, output data is averaged over this.
    number_of_probabilities_considered = integer: number of data points
    N = integer: number of nodes in network
    D = integer: dimension of network
    filename = string: output file path e.g. './testing.txt'
    
    """

    start_time = time.time() #start clock
    myfile = open(filename, 'w')

    p_step = 1.0/number_of_probabilities_considered #define size of step in probability (prob = prob that edge is removed given it's causally allowed)
    myfile.write(str(D)+"D box w/ random removal prob p, and longest path l. Number trials averaged over=" + str(number_of_trials) + " N="+ str(N) + '\n')
    myfile.write('p' + '\t' + 'ms dimension (p)' +'\t' + 'std error' +  '\n')  #send column headings to output file
    #initialize lists for each output variable
    p_list_for_ms = []
    ms_average_list = []
    ms_std_error_list = []
    
    p_range = np.arange(0.0,1.0+p_step,p_step)   

    for p in p_range: #Loop over x, where x=10^p, so we get more points around prob's closer to 1 and can see phase transition-y behavior more clearly.
        #initialize lists for each quantity for which we require an individual value to be stored after every trial (e.g. for calculations stats)
       
        ms_list = []
        ms_diff_sq_list = []
        ms_failed_trials = 0
        
        ms_sum = ms_average = 0.0

        for _ in range(int(number_of_trials)): 
            model = models.box_model(D, N,1-p)  #create model with prob of no edge given edge is causally allowed = p
            DAG_with_removal =  model[0]    #networkx DAG object from above model
            tr_DAG_with_removal = tr.trans_red(DAG_with_removal)    #transitively reduce DAG - doesn't effect lp or MM dimension, DOES MIDPOINT SCALING
            extremes = model[1] #returns list of extremes from model

            if nx.has_path(DAG_with_removal, extremes[1], extremes[0]):  
                longest_path_between_extremes = dl.lpd(tr_DAG_with_removal, extremes[1],extremes[0]) #calculate longest path between extremes
                longest_path = longest_path_between_extremes[1]
                length_of_longest_path = longest_path_between_extremes[2]   #save length of longest path between extremes
      
            else: 
                length_of_longest_path=0   #if no path between extremes, longest path = 0 is physical.
        

            if length_of_longest_path > 2:
                ms_dimension = ms.mpsd(DAG_with_removal, longest_path)[0]  #need to trans red DAG first as otherwise lp method does not return the correct longest path. James said he'd sorted this?
            else: 
                ms_dimension = 0
                ms_failed_trials += 1

            ms_list.append(ms_dimension)
        
        if len(ms_list)<number_of_trials:
            ms_list.append(None)
        
        ms_sum = sum(ms_list)
        if (number_of_trials - ms_failed_trials - 1)>0:
            ms_average = ms_sum/float(number_of_trials-ms_failed_trials)
        else: ms_average=None
        
        ms_diff_sq_sum = 0.0
       
        if (number_of_trials - ms_failed_trials - 1)>0:
            for i in range(number_of_trials - ms_failed_trials):
                ms_diff_sq_list.append((ms_list[i] - ms_average)**2)
                ms_diff_sq_sum += ms_diff_sq_list[i]
            ms_std_dev = math.sqrt(ms_diff_sq_sum/float(number_of_trials - ms_failed_trials -1))
            ms_std_error = ms_std_dev/math.sqrt(float(number_of_trials - ms_failed_trials))
        else:
            ms_std_error = 0
            
        p_list_for_ms.append(p)
        ms_average_list.append(ms_average)
        ms_std_error_list.append(ms_std_error)

        myfile.write(str(p)+ '\t' + str(ms_average) + '\t' + str(ms_std_error) + '\n')
        
        clean = clean_for_plotting(ms_average_list,p_list_for_ms, ms_std_error_list)
        ms_average_list = clean[0]
        p_list_for_ms = clean[1]
        ms_std_error_list = clean[2]
        
        print "finished ", p , "probability"
        
    elapsed = (time.time() - start_time)   #calculate time for method completion
    print "finished. Time elapsed = " , elapsed

    return [p_list_for_ms, ms_average_list, ms_std_error_list]
Beispiel #16
0
    no_in = []
    for line in no_in_file:
        no_in.append(line.strip())
    print 'Length no_in is %d' %len(no_in)

    #print lc.cone_axis(DAG,'9910429','forward',no_in)  

    start = '0302265'
    end = '9205221'
    
    '''lc.pythag_distance(DAG,start,end)
    
    start = '9706432'
    end = '9205221'''
    
    lp = dl.lpd(DAG,start,end)
    
    path = lp[1]
    
    start_2 = choice(DAG.nodes())
    #print start_2
    
    '''cone = lc.lightcone(DAG,start_2,'backward')
    print cone.number_of_nodes()
    print cone.number_of_edges()
    
    print 'Doing TC'
    tr.tc_recur(cone,start_2)
    
    print 'Done TC!'
    for node in cone:
Beispiel #17
0
     end = node2
 else:
     start = node2
     end = node1
 if nx.has_path(DAG,start,end):
     if (DAG.node[start]['rank']-DAG.node[end]['rank']) < 3000:
         try:
             intvl = lc.interval(DAG,start,end)
             sample -= 1 #reduce sample number by 1 as we have found an eligable candidate pair
             
             N = len(intvl)
             E = intvl.number_of_edges()
             dbday = abs(bday1 - bday2)
             rankS = DAG.node[start]['rank']
             rankE = DAG.node[end]['rank']
             lp_result = dl.lpd(DAG,start,end)
             lp = lp_result[2]
             path = lp_result[1]
             print 'Finding MM'
             MM_result = MM.MM_dimension(intvl,node_list)
             intvl.clear() #for memory purposes
             MMd = MM_result[0]
             E_TC = MM_result[1]
             print 'Find MPSD'
             #MPS_result = mp.mpsd(DAG,path)
             #MPSd = MPS_result[0]
             #out.write(str(start) + '\t' + str(end) + '\t' + str(N) + '\t' + str(E) + '\t' + str(lp) + '\t' + str(E_TC) + '\t' + str(MMd) + '\t' + str(MPSd) + '\n')
             out.write(str(start) + '\t' + str(end) + '\t' + str(dbday) + '\t' + str(rankS) + '\t' + str(rankE) + '\t' + str(N) + '\t' + str(E) + '\t' + str(lp) + '\t' + str(E_TC) + '\t' + str(MMd) + '\n')
             print sample
         except:
             print 'Memory error with %s to %s' %(start,end)