def remove_nodes(g):
    """
    Modifies the INPUT graph by removing all nodes, whose degree is 2.
    Is not used.
    """
    g1 = g.copy()
    nodes = []
    le = nx.get_edge_attributes(g, 'length')
    for i in g1.nodes():
        if nx.degree(g1, i) == 2:
            nodes = np.append(nodes, i)
    for i in nodes:
        i = int(i)
        n = list(nx.neighbors(g, i))
        le = nx.get_edge_attributes(g, 'length')
        th = nx.get_edge_attributes(g, 'thick')
        num = nx.get_edge_attributes(g, 'number')
        if len(n)==1:
            continue
        else:
            n1 = n[0]
            n2 = n[1]
            k1 = num[min(n1, i), max(i, n1), 0]
            k2 = num[min(n2, i), max(i, n2), 0]
            l = le[min(n1, i), max(i, n1), 0] + le[min(i, n2), max(i, n2), 0]
            t = (th[min(n1, i), max(i, n1), 0] + th[min(i, n2), max(i, n2), 0])/2.
            le.update({(min(n1, n2), max(n2, n1), 0): l})
            th.update({(min(n1, n2), max(n2, n1), 0): t})
            num.update({(min(n1, n2), max(n2, n1), 0): k1+k2})
            g.remove_node(i)
            g.add_edge(n1, n2, length = l, thick = t, number = k1 + k2)
    return g
Beispiel #2
0
def weight_reshuffling(G,weight_tag='weight'):
	'''
	Input: 
		G: an undirected weighted network
		IR_weight_cutoff: threshold on the minimum weight to reach
	Output: 
		E: an undirected weighted graph with the same connectivity of G, but
		   reshuffled weights. 
	'''

	print('Begin creation of weight reshuffled graph...');
	weight_dictionary=nx.get_edge_attributes(G,'weight');
	weight_sequence=weight_dictionary.values();

	#preliminary scan of edge weights to define filtration steps
	print('Preliminary scan of edge weights to define filtration steps...');
	edge_weights=list(set(nx.get_edge_attributes(G,weight_tag).values()));
	edge_weights=sorted(edge_weights, reverse=True);    
	print('Preliminary scan and sorting completed.');
	E=nx.Graph();
	E.add_nodes_from(G.nodes(data=True));
	E.add_edges_from(G.edges());
	E.remove_edges_from(E.selfloop_edges());
	weight_sequence_temp=weight_sequence;
	rn.shuffle(weight_sequence_temp);

	print('Setting new weights.');

	for e in E.edges_iter():
	    E.edge[e[0]][e[1]]['weight']=weight_sequence_temp[0];
	    weight_sequence_temp=weight_sequence_temp[1:];
	    
	print('Weights setup completed.');
	return E
Beispiel #3
0
    def _reproduce_sexually(self, partner):  # TODO: Broken.
        """Sexual reproduction between two networks"""
        inherited_state = -1  # -1 would be most recent
        network_props = ['num_nodes']
        node_props = ['threshold', 'energy_consumption', 'spontaneity']
        # node_props = ['threshold']
        edge_props = ['weight']
        child = copy.deepcopy(self)
        partner.children.append(child)
        # partner.reproductive_energy_cost = self.reproductive_energy_cost
        child.parents, child.children = [self, partner], []
        if np.random.randint(0, 2) == 1:
            internal_net = copy.deepcopy(self.internal)
            child._cloned_from, child._not_cloned_from = self, partner
        else:
            internal_net = copy.deepcopy(partner.internal)
            child._cloned_from, child._not_cloned_from = partner, self
        # print "Kin with %d neurons, copied from net with %d neurons" %(internal_net.simdata[-1].number_of_nodes(), self.internal.simdata[-1].number_of_nodes())
        child.set_internal_network(copy.deepcopy(internal_net), t0=self.t)
        child.internal.simdata[inherited_state] = copy.copy(internal_net.simdata[inherited_state])

        choices = np.random.randint(2, size=(2, len(node_props)))  # randomly choose attributes
        for j, n in enumerate(node_props):
            p1 = nx.get_node_attributes(self.internal.simdata[inherited_state], n)
            p2 = nx.get_node_attributes(partner.internal.simdata[inherited_state], n)
            # add/remove nodal information based on the inherited number of nodes
            # chosen = self if choices[0][j] else partner
            # print "Using %s(N%d) for %s" %(chosen.ind_id, chosen.internal.simdata[inherited_state].number_of_nodes(), n)
            utils.set_node_attributes(child.internal.simdata[inherited_state], n, p1 if choices[0][j] else p2)

        for j, e in enumerate(edge_props):
            p1 = nx.get_edge_attributes(self.internal.simdata[inherited_state], e)
            p2 = nx.get_edge_attributes(partner.internal.simdata[inherited_state], e)
            utils.set_edge_attributes(child.internal.simdata[inherited_state], n, p1 if choices[1][j] else p2)
        return child
def normDAGl2Test(G_test, power):
    kern = nx.get_edge_attributes(G_test, 'kern_unnorm')
    tran = nx.get_edge_attributes(G_test, 'tran')

    kern = OrderedDict(sorted(kern.items(), key=lambda t: t[0]))
    val = kern.values()
    key = kern.keys()

    tran = OrderedDict(sorted(tran.items(), key=lambda t: t[0]))
    tran = tran.values()

    val = np.asarray(val, dtype=float)
    tran = np.asarray(tran, dtype=float)
    tran = np.log(1/tran)  # logarithm weighting
    tran[tran == np.inf] = 0
    tran[np.isnan(tran)] = 0

    if power == 2:
      tran = np.square(tran)

    if len(val.shape) == 2:
        # kern = val/tran[:, None]
        kern = val*tran[:, None]  # avoid numeric problems when using logarithm weighting
        kern = normalize(kern, norm='l2', axis=0)
    else:
        kern = val*tran
        kern = kern/np.linalg.norm(kern)

    kern = dict(zip(key, kern))
    nx.set_edge_attributes(G_test, 'kern', kern)

    return G_test
Beispiel #5
0
	def build(self):
		'''
		Produce a ``circuit``.
		'''
		g = self._g
		voltages    = nx.get_edge_attributes(g, '_voltage')
		resistances = nx.get_edge_attributes(g, '_resistance')
		sources     = nx.get_edge_attributes(g, '_source')

		# class invariant of CircuitBuilder; no attribute ever appears without the other two
		assert set(voltages) == set(resistances) == set(sources)

		# this covers edges present in the initial graph (passed into the constructor)
		# which were not addressed via make_resistor and friends
		missing_edges = [e for e in g.edges() if (e not in voltages) and (e[::-1] not in voltages)]
		for e in missing_edges:
			voltages[e]    = self._DEFAULT_VOLTAGE
			resistances[e] = self._DEFAULT_RESISTANCE
			sources[e]     = e[0]

		copy = _copy_graph_without_attributes(g)
		nx.set_edge_attributes(copy, EATTR_VOLTAGE, voltages)
		nx.set_edge_attributes(copy, EATTR_RESISTANCE, resistances)
		nx.set_edge_attributes(copy, EATTR_SOURCE, sources)
		assert validate_circuit(copy)
		return copy
def short_branches():
    """
    Visualization of short branches of the skeleton.
    
    """
    data1_sk = glob.glob('/backup/yuliya/vsi05/skeletons_largdom/*.h5')
    data1_sk.sort()

    for i,j, k in zip(d[1][37:47], data1_sk[46:56], ell[1][37:47]):
        g = nx.read_gpickle(i)
        dat = tb.openFile(j)
        skel = np.copy(dat.root.skel)
        bra = np.copy(dat.root.branches)
        mask = np.zeros_like(skel)    
        dat.close()
    
        length = nx.get_edge_attributes(g, 'length')
        number = nx.get_edge_attributes(g, 'number')
        num_dict = {}
        for m in number:
            for v in number[m]:
                num_dict.setdefault(v, []).append(m)
        find_br = ndimage.find_objects(bra)
        for l in list(length.keys()):
            if length[l]<0.5*k: #Criteria
                for b in number[l]:
                    mask[find_br[b-1]] = bra[find_br[b-1]]==b
        mlab.figure(bgcolor=(1,1,1), size=(1200,1200))
        mlab.contour3d(skel, colormap='hot')
        mlab.contour3d(mask)
        mlab.savefig('/backup/yuliya/vsi05/skeletons/short_bran/'+ i[42:-10] + '.png')
        mlab.close()
def length_distr_total():
    """    
    Length distribution total (all images). For all the branches and broken ones.
    
    d1 : array of lists of pathes for the break-ups dictionaries.
    d : array of lists of pathes for the graphs
    ell : array of lists of length scales
    pa : list of experiment names (pathes)
    """
    for data, data1, path, el in zip(d, d1, pa, ell):
        l1 = list() #all branches
        l2 = list() #breaking branches
        u = 0
        for i,j,le in zip(data, data1, el):
            g = nx.read_gpickle(i)
            br = np.load(j).item()
            length = nx.get_edge_attributes(g, 'length')
            l_mean = np.mean(np.asarray(length.values()))
            number = nx.get_edge_attributes(g, 'number')
            num_dict = {}
            for k in number:
                for v in number[k]:
                    num_dict.setdefault(v, []).append(k)
                
            for k in length.values():
                l1.append(k/float(1))
            for k in br.keys():
                for l in num_dict[k]:
                    l2.append(length[l]/float(1))
            u+=1
        hist1, bins1 = np.histogram(l1, np.arange(0, max(l1)+1, 0.06))
        hist2, bins2 = np.histogram(l2, np.arange(0, max(l1)+1, 0.06))
        center1 = (bins1[:-1] + bins1[1:])/2
        center2 = (bins2[:-1] + bins2[1:])/2
        #save to file if necessary
        #np.save('/home/yuliya/codes/lengths/' + path + '/total_lno_all_len.npy', center1)    
        #np.save('/home/yuliya/codes/lengths/' + path + '/total_lno_break_len.npy', center2)
        #np.save('/home/yuliya/codes/lengths/' + path + '/total_lno_all.npy', hist1/float(len(l1)))
        #np.save('/home/yuliya/codes/lengths/' + path + '/total_lno_break.npy', hist2/float(len(l1)))

    #Plot it
    ##plt.figure(2)
    #hist1, bins1 = np.histogram(l1, np.arange(0, max(l1)+1, 0.06))
    #hist2, bins2 = np.histogram(l2, np.arange(0, max(l2)+1, 0.06))
    #center1 = (bins1[:-1] + bins1[1:])/2
    #center2 = (bins2[:-1] + bins2[1:])/2
    #plt.plot(center1, hist1/float(len(l1)), '.', color='red', label = 'all branches')
    #plt.plot(center2, hist2/float(len(l1)), '.', color='blue', label = 'breaking branches')
    #



    """
    Probability as a function of length.
    """

    hist1, bins1 = np.histogram(l1, np.arange(0, max(l1)+1, 0.1))
    hist2, bins2 = np.histogram(l2, np.arange(0, max(l2)+1, 0.1))
    center1 = (bins1[:-1] + bins1[1:])/2
    plt.plot(center1, hist2/hist1.astype('float32'), '.', color='green', label = 'v35')
Beispiel #8
0
def init(G, uncon_comp_tups, contactor_tups):
    nodes_number = G.nodes()
    edges_number = G.edges()
    node_name_data = nx.get_node_attributes(G, 'name')
    edge_name_data = nx.get_edge_attributes(G, 'name')
    edge_type_data = nx.get_edge_attributes(G, 'type')
    node_type_data = nx.get_node_attributes(G, 'type')
    declaration = '(set-option :print-success false)\n'
    declaration += '(set-option :produce-models true)\n(set-logic QF_UF)\n'
    for i in range(0, len(nodes_number)):
        x = nodes_number[i]
        node_type = node_type_data[x]
        if node_type != 'dummy':
            clause = '(declare-fun ' + node_name_data[x] + ' () Bool)\n'
            if node_type == 'generator' or node_type=='APU' or node_type == 'rectifier_dc':
                uncon_comp_tups.append(node_name_data[x])
            declaration += clause
    for i in range(0, len(edges_number)):
        idx = edges_number[i]
        edge_type = edge_type_data[idx]
        if edge_type == 'contactor':
            edge_name = edge_name_data[idx]
            flag = 0
            for j in range(0, len(contactor_tups)):
                if edge_name == contactor_tups[j]:
                    flag = 1
                    break
            if flag == 0: contactor_tups.append(edge_name)
    for i in range(0, len(contactor_tups)):
        clause = '(declare-fun ' + contactor_tups[i] + ' () Bool)\n'
        declaration += clause
    return declaration
Beispiel #9
0
def no_paralleling_set(name_tups, G):
    nodes_number = G.nodes()
    edge_type_data = nx.get_edge_attributes(G,'type')
    node_name_data = nx.get_node_attributes(G,'name')
    edge_name_data = nx.get_edge_attributes(G,'name')
    num_tups = []
    for i in range(0, len(name_tups)):
        for j in range(0, len(nodes_number)):
            x = nodes_number[j]
            if node_name_data[x] == name_tups[i]:
                num_tups.append(x)
        #check if there's invalid input component       
        if j == len(nodes_number):
            print 'Error: Component ' + e_bus_list[i] + ' Not Found'
            exit()
    specs_assert = ''
    for i in range(0, len(num_tups)-1):
        for j in range(i+1, len(num_tups)):
            tups = list(nx.all_simple_paths(G, num_tups[i], num_tups[j]))
            clause = '(assert (not'
            if len(tups)>1: clause += ' (or'
            if tups != []:
                for k in range(0,len(tups)):
                    clause += ' (and'
                    one_path = tups[k]
                    for x in range(0,len(one_path)-1):
                        if edge_type_data[(one_path[x],one_path[x+1])]=='contactor':
                            clause += ' ' + edge_name_data[(one_path[x],one_path[x+1])]
                    clause += ')'
                if len(tups)>1: clause += ')))\n'
                else: clause += '))\n'
            specs_assert += clause
    return specs_assert
Beispiel #10
0
def no_paralleling(node1, node2, G):
    nodes_number = G.nodes()
    edge_type_data = nx.get_edge_attributes(G,'type')
    node_name_data = nx.get_node_attributes(G,'name')
    edge_name_data = nx.get_edge_attributes(G,'name')
    num1 = num2 = 0
    for i in range(0, len(nodes_number)):
        x = nodes_number[i]
        if node_name_data[x] == node1:
            num1 = x
        elif node_name_data[x] == node2:
            num2 = x
    #check if components are valid
    if num1 == 0: 
        print 'Error: ' + node1 + ' Not Found'
        exit()
    if num2 == 0: 
        print 'Error: ' + node2 + ' Not Found'
        exit()
    tups = list(nx.all_simple_paths(G, num1, num2))
    clause = '(assert (not'
    if len(tups)>1: clause += ' (or'
    if tups != []:
        for k in range(0,len(tups)):
            clause += ' (and'
            one_path = tups[k]
            for x in range(0,len(one_path)-1):
                if edge_type_data[(one_path[x],one_path[x+1])]=='contactor':
                    clause += ' ' + edge_name_data[(one_path[x],one_path[x+1])]
            clause += ')'
        if len(tups)>1: clause += ')))\n'
        else: clause += '))\n'
    return clause
Beispiel #11
0
    def compare_list(self, graph_list, types, h, D):
        """
        Compute the all-pairs kernel values for a list of graph representations of verification tasks
        """
        all_graphs_number_of_nodes = 0
        node_labels = [0] * (h+1)
        node_depth = [0] * len(graph_list)
        edge_types = [0] * len(graph_list)
        edge_truth = [0] * len(graph_list)

        for it in range(h+1):
            node_labels[it] = [0] * len(graph_list)

        for i, g in enumerate(graph_list):
            node_labels[0][i] = {key: self._compress(value)
                                 for key, value in nx.get_node_attributes(g, 'label').items()}
            node_depth[i] = nx.get_node_attributes(g, 'depth')
            edge_types[i] = nx.get_edge_attributes(g, 'type')
            edge_truth[i] = nx.get_edge_attributes(g, 'truth')
            all_graphs_number_of_nodes += len([node for node in nx.nodes_iter(g) if node_depth[i][node] <= D])
            # if i == 0:
            #     self._graph_to_dot(g, node_labels[0][i], "graph{}.dot".format(i))

        # all_graphs_number_of_nodes is upper bound for number of possible edge labels
        phi = np.zeros((all_graphs_number_of_nodes, len(graph_list)), dtype=np.uint64)

        # h = 0
        for i, g in enumerate(graph_list):
            for node in g.nodes_iter():
                if node_depth[i][node] <= D:
                    label = node_labels[0][i][node]
                    phi[self._compress(label), i] += 1

        K = np.dot(phi.transpose(), phi)

        # h > 0
        for it in range(1, h+1):
            # Todo check if the shape fits in all cases
            phi = np.zeros((2*all_graphs_number_of_nodes, len(graph_list)), dtype=np.uint64)

            print('Updating node labels of graphs in iteration {}'.format(it), flush=True)

            # for each graph update edge labels
            for i, g in tqdm(list(enumerate(graph_list))):
                node_labels[it][i] = {}
                for node in g.nodes_iter():
                    if node_depth[i][node] <= D:
                        label_collection = self._collect_labels(node, i, g, it-1, node_labels, node_depth, types, D, edge_types, edge_truth)
                        long_label = "_".join(str(x) for x in [np.concatenate([np.array([node_labels[it-1][i][node]]),
                                                               np.sort(label_collection)])])
                        node_labels[it][i][node] = self._compress(long_label)
                        phi[self._compress(long_label), i] += 1
                        # node_labels[it][i][node] = long_label
                        # phi[self._compress(long_label), i] += 1
                # if i == 0:
                #     self._graph_to_dot(g, node_labels[it][i], "graph{}_it{}.dot".format(i, it))

            K += np.dot(phi.transpose(), phi)

        return K
Beispiel #12
0
 def test_clear_delays(self):
     topo = fnss.star_topology(12)
     fnss.set_delays_constant(topo, 1, 'ms', None)
     self.assertEqual(topo.number_of_edges(),
                      len(nx.get_edge_attributes(topo, 'delay')))
     fnss.clear_delays(topo)
     self.assertEqual(0, len(nx.get_edge_attributes(topo, 'delay')))
Beispiel #13
0
 def _calc_attr_fuzzy_hist(self, G, attr, fuzzy_intervals):
     """
     Computes the fuzzy histogram for an attribute. Returns the
     number of elements in  each bin
     Args:
         G: nx.Graph
         attr: Attribute name
         fuzzy_intervals: list(list(a,b,c,d)) defining a traperzoidal fuzzy histogram
     Returns:
         list(elem_per_bin)
     """
     fi = fuzzy_intervals
     vals = nx.get_edge_attributes(G, attr) if nx.get_edge_attributes(G, attr) else nx.get_node_attributes(G, attr)
     vec = [0.0]*len(fi)
     for elem in vals:
         val = vals[elem]
         for i, interval in enumerate(fi):
             if (val >= interval[0]) and (val < interval[1]):
                 vec[i] += (val-interval[0]) / (interval[1]-interval[0])
             elif (val >= interval[1]) and (val <= interval[2]):
                 vec[i] += 1
             elif (val > interval[2]) and (val <= interval[3]):
                 vec[i] += (val-interval[3]) / (interval[2]-interval[3])
             else:
                 pass
     return vec
def aspect_rat_distr_total():
    """
    Aspect ratio distribution.
    
    d1 : list of pathes for the break-ups dictionaries. Used here for the 
            estimation of the necessary number of pairs.
    d : list of pathes for the graphs
    ell : list of length scales
    pa : array of the pathes to the experiments folder (to save data)

    """

    for data, data1, path, el in zip(d, d1, pa, ell):
        l1 = list() #all branches
        l2 = list() #breaking branches
        u = 0
        for i,j in zip(data, data1):
            g = nx.read_gpickle(i)
            br = np.load(j).item()
            thick = nx.get_edge_attributes(g, 'thick')
            m_th = np.mean(np.asarray(thick.values()))
            length = nx.get_edge_attributes(g, 'length')
            m_le = np.mean(np.asarray(length.values()))
            number = nx.get_edge_attributes(g, 'number')
            num_dict = {}
            for k in number:
                for v in number[k]:
                    num_dict.setdefault(v, []).append(k)
            for k,l in zip(thick.values(), length.values()):
                l1.append(float(l)*m_th/(m_le * k))
            for k in br.keys():
                for l in num_dict[k]:
                    l2.append(float(length[l] * m_th)/(m_le * thick[l]))
            u+=1

        hist1, bins1 = np.histogram(l1, np.arange(0, max(l1)+1, 0.3))
        hist2, bins2 = np.histogram(l2, np.arange(0, max(l1)+1, 0.3))
        center1 = (bins1[:-1] + bins1[1:])/2
        center2 = (bins2[:-1] + bins2[1:])/2
    #    np.save('/home/yuliya/codes/lengths/' + path + '/total_arm_all_len.npy', center1)    
    #    np.save('/home/yuliya/codes/lengths/' + path + '/total_arm_break_len.npy', center2)
    #    np.save('/home/yuliya/codes/lengths/' + path + '/total_arm_all.npy', hist1/float(len(l1)))
    #    np.save('/home/yuliya/codes/lengths/' + path + '/total_arm_break.npy', hist2/float(len(l1)))
    plt.plot(center1, hist1/float(len(l1)), '.', color='red', label = 'all branches')
    plt.plot(center2, hist2/float(len(l1)), '.', color='blue', label = 'breaking branches')
    plt.legend()
    plt.xlabel('l / d', fontsize=18)
    plt.ylabel('P(l/d)', fontsize = 18)

    """
    Probability as a function of aspect ratio.
    """
    hist1, bins1 = np.histogram(l1, np.arange(0, max(l1)+1, 0.3))
    hist2, bins2 = np.histogram(l2, np.arange(0, max(l1)+1, 0.3))
    center1 = (bins1[:-1] + bins1[1:])/2
    plt.plot(center1, hist2/hist1.astype('float32'), '.', color='red', label = 'v34')
    plt.legend()
    plt.xlabel('d / l', fontsize = 18)
    plt.ylabel('Breakup Probability', fontsize = 18)
Beispiel #15
0
 def populate_internal_edges(self):
     for block in self.blocks:
         weight = nx.get_edge_attributes(block, "weight")
         capacity = nx.get_edge_attributes(block, "capacity")
         for edge in block.edges():
             self.add_edge(edge[0], edge[1],
                           weight=weight[edge],
                           capacity=capacity[edge])
Beispiel #16
0
 def test_clear_weights(self):
     # create new topology to avoid parameters pollution
     G = fnss.star_topology(12)
     fnss.set_weights_constant(G, 3, None)
     self.assertEqual(G.number_of_edges(),
                      len(nx.get_edge_attributes(G, 'weight')))
     fnss.clear_weights(G)
     self.assertEqual(0, len(nx.get_edge_attributes(G, 'weight')))
Beispiel #17
0
def local_standard_weight_clique_rank_filtration(G,IR_weight_cutoff=None,verbose=False):
    
    if IR_weight_cutoff==None:
        IR_weight_cutoff=np.min(nx.get_edge_attributes(G,'weight').values());

    print('Preliminary scan of edge weights to define filtration steps...');
    edge_weights=nx.get_edge_attributes(G,'weight');
    weight_edge = {}
    for e,w in edge_weights.items():
        if w not in weight_edge:
            weight_edge[w] = []
        weight_edge[w].append(e);

    edge_weights=list(set(edge_weights.values()));
    edge_weights=sorted(edge_weights, reverse=True);
    max_index=len(edge_weights);
        
    # Define the clique dictionary
    Clique_dictionary={};
    print('Constructing filtration...');
    #Beginning of filtration construction
    G_supplementary=nx.Graph();

    #the max index will be used for the persistent homology computation 
    max_index=0; 
    current_nodes = []
    
    for index,thr in enumerate(edge_weights):
        new_nodes = [];
        if thr>=IR_weight_cutoff:
            G_supplementary.add_edges_from(weight_edge[thr]);
            [new_nodes.extend(edge) for edge in weight_edge[thr]];
            new_nodes = list(set(new_nodes));

            ## clique detection in partial graph, where there cliques are found only on the
            ## new nodes.
            relevant_nodes = []
            [relevant_nodes.extend(G_supplementary.neighbors(n)) for n in new_nodes];
            relevant_nodes = list(set(relevant_nodes));
            G_supp_supp = nx.subgraph(G_supplementary,relevant_nodes);
            cliques=nx.find_cliques_recursive(G_supp_supp);
            # adding cliques to the filtration
            for clique in cliques: #loop on new clique
                clique.sort();

                for k in range(1,len(clique)+1): #loop on clique dimension to find missed faces of simplex
                    for subclique in itertools.combinations(clique,k):
                        if str(list(subclique)) not in Clique_dictionary:
                            Clique_dictionary[str(list(subclique))]=[];
                            Clique_dictionary[str(list(subclique))].append(str(index));
                            Clique_dictionary[str(list(subclique))].append(str(thr))
                            max_index=index;

    print('Max filtration value: '+str(max_index));              
    print('Clique dictionary created.');
    return Clique_dictionary;
def IsUnassigned(G, node1, node2):
    for key in nx.get_edge_attributes(G, 'value').keys():
        if(key[0] == node1 and key[1] == node2):
            if (nx.get_edge_attributes(G, 'value')[key] == 'x'):
                return True
            else:
                "Do nothing"
        else:
            "Continue search"
    return False
Beispiel #19
0
 def populate_internal_edges(self):
     """
     Adds in each block in block list to the allocation graph. Blocks remain disjoint at this point.
     """
     for block in self.blocks:
         weight = nx.get_edge_attributes(block, "weight")  # {edge: edge attribute for edge in block.edges()}
         capacity = nx.get_edge_attributes(block, "capacity")
         for edge in block.edges():
             self.add_edge(edge[0], edge[1],
                           weight=weight[edge],
                           capacity=capacity[edge])
Beispiel #20
0
def read_network_from_file(edge_file, node_file):
    """"""

    '''
    Get edge list, thresholds and nodes from file

    Arguments:
        edge_file [csv delimited .dat file]
            format:
            source_node [str] , target_node [str] , weight [bool]

        node_file [csv delimited .dat file]
            format:
            node [str] , state [bool]

    Outputs:
        G [networkx Graph object]


    NOTES:
        weight = 1 if edge is inductive
        weight = 0 if edge is inhibiting

        state = 1 if expressed
        state = 0 if not expressed 

        weight and state are boolean, but MUST be 0 or 1.
        DO NOT USE 'True' and 'False'. 
        They will not be evaluated correctly.
    '''

    ## read edge list with its weight from edge_file
    G = nx.read_edgelist(edge_file, create_using=nx.DiGraph(), delimiter=',', nodetype=str, data=(('weight', int),))
    ## Raise error if weight is not a 0 or 1
    for edge in nx.get_edge_attributes(G,'weight'):
        weight = nx.get_edge_attributes(G,'weight')[edge]
        if weight != 0 and weight != 1:
            raise ValueError("All weights must be boolean, represented as either 0 or 1")

    ## read node list with its state from node_file
    for line in open(node_file, 'r').readlines():
        items = [x.strip() for x in line.rstrip().split(',')]
        # print items
        if line[0] == '#' or line=='':
            continue
        G.add_node(items[0], state=int(items[1]))
    ## Raise error if state is not a 0 or 1
    for node in nx.get_node_attributes(G,'state'):
        state = nx.get_node_attributes(G,'state')[node]
        if state != 0 and state != 1:
            raise ValueError("All states must be boolean, represented as either 0 or 1")

    return G
def thick_distr_total():
    """
    Thickness distributions for all images. For all the branches (hist1) and the broken ones (hist2).
    """
    for data, data1, path, el in zip(d, d1, pa, ell):
        l1 = list() #all branches
        l2 = list() #breaking branches
        u = 0
        for i,j,le in zip(data, data1, el):
            g = nx.read_gpickle(i)
            br = np.load(j).item()
            thick = nx.get_edge_attributes(g, 'thick')
            th_mean = np.mean(np.asarray(thick.values()))
            number = nx.get_edge_attributes(g, 'number')
            num_dict = {}
            for k in number:
                for v in number[k]:
                    num_dict.setdefault(v, []).append(k)
            for k in thick.values():
                l1.append(k/float(th_mean))
            for k in br.keys():
                for l in num_dict[k]:
                    l2.append(thick[l]/float(th_mean))
            u+=1
        hist1, bins1 = np.histogram(l1, np.arange(0, max(l1)+1, 0.05))
        hist2, bins2 = np.histogram(l2, np.arange(0, max(l1)+1, 0.05))
        center1 = (bins1[:-1] + bins1[1:])/2
        center2 = (bins2[:-1] + bins2[1:])/2
        #write to file if necessary
#        np.save('/home/yuliya/codes/lengths/' + path + '/total_rm_all_len.npy', center1)    
#        np.save('/home/yuliya/codes/lengths/' + path + '/total_rm_break_len.npy', center2)
#        np.save('/home/yuliya/codes/lengths/' + path + '/total_rm_all.npy', hist1/float(len(l1)))
#        np.save('/home/yuliya/codes/lengths/' + path + '/total_rm_break.npy', hist2/float(len(l1)))
    plt.plot(center1, hist1/float(len(l1)), '.', color='red', label = 'all branches')
    plt.plot(center2, hist2/float(len(l1)), '.', color='blue', label = 'breaking branches')
    plt.legend()
    plt.xlabel('d/l_typ', fontsize=18)
    plt.ylabel('P(d/l_typ)', fontsize = 18)


    """
    Probability as a function of thickness.
    """
    hist1, bins1 = np.histogram(l1, np.arange(0, max(l1)+1, 0.02))
    hist2, bins2 = np.histogram(l2, np.arange(0, max(l1)+1, 0.02))
    center1 = (bins1[:-1] + bins1[1:])/2
    plt.plot(center1, hist2/hist1.astype('float32'), '.', color='blue', label = 'v35')
    plt.legend()
    plt.xlabel('d/l_typ', fontsize=18)
    plt.ylabel('P(d/l_typ)', fontsize = 18)
    
    f = curve_fit(fit_d, center1[10:20], (hist2/hist1.astype('float32'))[10:20], p0=(1,0.2))[0]
    plt.plot(center1, fit_d(center1, f[0], f[1]), '.', color='blue', label = 'fitting vsi05')
Beispiel #22
0
def generate_sample_data(mC, numforecasts, tsteps, samplesize, fixfc0=True, useThisLPT=None, useThisIPT=None):

    gennet = copy.deepcopy(mC["netobj"].dinet)
    # for policy evaluation, there are tsteps dicts, one for every t#_capacity. each dict maps an edge to a
    #    singleton-sized list of capacities
    adpoutageattr = [[dict() for t in xrange(tsteps)] for nf in xrange(numforecasts)]
    adpftdmgattr = [[dict() for t in xrange(tsteps)] for nf in xrange(numforecasts)]
    # these attr dicts will have samplesize-sized capacity values
    mcoutageattr = [[dict() for t in xrange(tsteps)] for nf in xrange(numforecasts)]
    mcftdmgattr = [[dict() for t in xrange(tsteps)] for nf in xrange(numforecasts)]
##    # for benchmarking, there is only 1 dict, but it maps an edge to a samplesize list of capacities
##    lpoutageattr = [dict() for nf in xrange(numforecasts)]
##    ipoutageattr = [dict() for nf in xrange(numforecasts)]
##    # this gathers some stats on the samples generated, currently not in use
##    outagepcts = dict(zip(mC["temp_var_insts"],[[] for i in xrange(len(mC["temp_var_insts"] )) ] ))
##    # the SAA dicts may be at any time we want (typically either tsteps-1 or 0)
##    if useThisLPT == None:
##        useThisLPT = tsteps-1
##    if useThisIPT == None:
##        useThisIPT = tsteps-1

    # specifies if we want the same forecast at time 0 for every separate forecast timeline
    if fixfc0:
        fixedepictr = mC["mcsim"].generate_distributions(gennet, 1)[0]
        fixedattr = nx.get_edge_attributes(gennet, "t0_dmg_pct")

    for nf in xrange(numforecasts):
        mC["mcsim"].clear_scenarios(gennet)
        if fixfc0:
            mC["mcsim"].generate_distributions(gennet, tsteps, sfcastattr=fixedattr, sfcastepictr=fixedepictr)
        else:
            mC["mcsim"].generate_distributions(gennet, tsteps)
        # generate samplesize+1 to get policy eval set and the rest for SAA test
        mC["mcsim"].sample_scenarios(gennet, ksamples=1)
        for t in xrange(tsteps):
            adpftdmgattr[nf][t] = nx.get_edge_attributes(gennet, "t"+str(t)+"_dmg_pct")
            adpoutageattr[nf][t] = nx.get_edge_attributes(gennet, "t"+str(t)+"_capacity")
        mC["mcsim"].clear_scenarios(gennet)
        mC["mcsim"].sample_scenarios(gennet, ksamples=samplesize)
        for t in xrange(tsteps):
            mcftdmgattr[nf][t] = nx.get_edge_attributes(gennet, "t"+str(t)+"_dmg_pct")
            mcoutageattr[nf][t] = nx.get_edge_attributes(gennet, "t"+str(t)+"_capacity")
##        lpftdmgattr[nf] = nx.get_edge_attributes(gennet, "t"+str(useThisLPT)+"_dmg_pct")
##        lpoutageattr[nf] = nx.get_edge_attributes(gennet, "t"+str(useThisLPT)+"_capacity")
##        ipftdmgattr[nf] = nx.get_edge_attributes(gennet, "t"+str(useThisIPT)+"_dmg_pct")
##        ipoutageattr[nf] = nx.get_edge_attributes(gennet, "t"+str(useThisIPT)+"_capacity")

##        for k,v in lpoutageattr[nf].iteritems():
##            if k not in outagepcts:
##                outagepcts[k] = []
##            outagepcts[k].append(float(len([i for i in v if i == 0])) / len(v) )
# return remnants: lpftdmgattr, ipftdmgattr, adpoutageattr, lpoutageattr, ipoutageattr
    return adpftdmgattr, adpoutageattr, mcftdmgattr, mcoutageattr
Beispiel #23
0
    def update_winner(self, curnode):
        """."""
        # find nearest unit and second nearest unit
        winner1, winner2 = self.determine_2closest_vertices(curnode)
        winnernode = winner1[0]
        winnernode2 = winner2[0]
        win_dist_from_node = winner1[1]

        errorvectors = nx.get_node_attributes(self.graph, 'error')

        error1 = errorvectors[winner1[0]]
        # update the new error
        newerror = error1 + win_dist_from_node**2
        self.graph.add_node(winnernode, error=newerror)

        # move the winner node towards current node
        self.pos = nx.get_node_attributes(self.graph, 'pos')
        newposition = self.get_new_position(self.pos[winnernode], curnode)
        self.graph.add_node(winnernode, pos=newposition)

        # now update all the neighbors distances and their ages
        neighbors = nx.all_neighbors(self.graph, winnernode)
        age_of_edges = nx.get_edge_attributes(self.graph, 'age')
        for n in neighbors:
            newposition = self.get_new_position_neighbors(self.pos[n], curnode)
            self.graph.add_node(n, pos=newposition)
            key = (int(winnernode), n)
            if key in age_of_edges:
                newage = age_of_edges[(int(winnernode), n)] + 1
            else:
                newage = age_of_edges[(n, int(winnernode))] + 1
            self.graph.add_edge(winnernode, n, age=newage)

        # no sense in what I am writing here, but with algorithm it goes perfect
        # if winnner and 2nd winner are connected, update their age to zero
        if (self.graph.get_edge_data(winnernode, winnernode2) is not None):
            self.graph.add_edge(winnernode, winnernode2, age=0)
        else:
            # else create an edge between them
            self.graph.add_edge(winnernode, winnernode2, age=0)

        # if there are ages more than maximum allowed age, remove them
        age_of_edges = nx.get_edge_attributes(self.graph, 'age')
        for edge, age in iteritems(age_of_edges):

            if age > self.max_age:
                self.graph.remove_edge(edge[0], edge[1])

                # if it causes isolated vertix, remove that vertex as well

                for node in self.graph.nodes():
                    if not self.graph.neighbors(node):
                        self.graph.remove_node(node)
Beispiel #24
0
    def overview(self):
        O={}
        for sn in self.SubNet.iteritems():
            for ldp in self.LDP:    
                try:
                    O[sn[0]].update({ldp:nx.get_edge_attributes(sn[1],ldp)}) 
                except:            
                    O[sn[0]]={ldp:nx.get_edge_attributes(sn[1],ldp)} 



        return (O)
Beispiel #25
0
def isolate(component, G):
	nodes_number = G.nodes()
	edges_number = G.edges()
	edge_type_data = nx.get_edge_attributes(G,'type')
	node_name_data = nx.get_node_attributes(G,'name')
	edge_name_data = nx.get_edge_attributes(G,'name')
	comp2 = component
	if component[0] == 'T':
		comp1 = component + '_ac'
		comp2 = component + '_dc'
	comp_num1 = comp_num2 = comp_num = 0
	for i in range(0, len(nodes_number)):
		x = nodes_number[i]
		if component[0] != 'T' and node_name_data[x] == component:
			comp_num = x
			break
		elif component[0] == 'T' and node_name_data[x] == comp2:
			comp_num2 = x
			comp_num1 = x + '_ac'
			break
	if i == len(nodes_number):
		print 'Error: ' + component + ' Not Found'
		exit()
	neighbor_idx = []
	if component[0] != 'T':
		for i in range(0, len(edges_number)):
			if edges_number[i][0] == comp_num:
				neighbor_idx.append(edges_number[i])
	else:
		for i in range(0, len(edges_number)):
			if edges_number[i][0] == comp_num1 or edges_number[i][0] == comp_num2:
				neighbor_idx.append(edges_number[i])
	contactor_tups = []
	for i in range(0, len(neighbor_idx)):
		idx = neighbor_idx[i]
		if edge_type_data[idx] == 'contactor':
			edge_name = edge_name_data[idx]
			contactor_tups.append(edge_name)
	if len(contactor_tups) == 0:
		clause = ''
		return clause
	clause = '(assert (=> (not ' + comp2 + ')'
	if len(contactor_tups) > 1:
		clause += ' (and '
	for i in range(0, len(contactor_tups)):
		clause += '(not ' + contactor_tups[i] + ')'
	if len(contactor_tups) > 1:
		clause += ')))\n'   
	elif len(contactor_tups) == 1:
		clause += '))\n'
	return clause
def length_distr_dyn():
    """
    Length distributions evolving in time.
    
    d1 : array of lists of pathes for the break-ups dictionaries.
    d : array of lists of pathes for the graphs
    ell : array of lists of length scales
    pa : list of experiment names (pathes)
    """
    av = [20, 12, 12, 13] #Number of images for avereging for each experiment. Here 
                            #we use 20 images for one plot for v34 experiment etc.

    for data, data1, path, el, a in zip(d, d1, pa, ell, av):
        l1 = list() #all branches
        l2 = list() #breaking branches
        u = 0
        count=0
        co=0
        for i,j,le in zip(data, data1, el):
                g = nx.read_gpickle(i)
                br = np.load(j).item()
                length = nx.get_edge_attributes(g, 'length')
                l_mean = np.mean(np.asarray(length.values()))
                number = nx.get_edge_attributes(g, 'number')
                num_dict = {}
                for k in number:
                    for v in number[k]:
                        num_dict.setdefault(v, []).append(k)
                
                for k in length.values():
                    l1.append(k/float(l_mean))
                for k in br.keys():
                    for l in num_dict[k]:
                        l2.append(length[l]/float(l_mean))
                u+=1
                if count>a:
                    count=-1
                    hist1, bins1 = np.histogram(l1, np.arange(0, max(l1)+1, 0.06))
                    hist2, bins2 = np.histogram(l2, np.arange(0, max(l1)+1, 0.06))
                    center1 = (bins1[:-1] + bins1[1:])/2
                    center2 = (bins2[:-1] + bins2[1:])/2
                    
                    #write to file if necessary
                    np.save('/home/yuliya/codes/lengths/' + path + '/dyn_lm_all_len' + str(co)+'.npy', center1)    
                    np.save('/home/yuliya/codes/lengths/' + path + '/dyn_lm_break_len' + str(co)+'.npy', center2)
                    np.save('/home/yuliya/codes/lengths/' + path + '/dyn_lm_all' + str(co)+'.npy', hist1/float(len(l1)))
                    np.save('/home/yuliya/codes/lengths/' + path + '/dyn_lm_break' + str(co)+'.npy', hist2/float(len(l1)))
                    l1 = list()
                    l2 = list()
                    co+=1
                count+=1
Beispiel #27
0
    def test_decorating_edges(self):
        """
        Test that decorating actually sets the proper value.
        """
        new_graph = GRAPH.copy()
        properties = {
           "dummy": dict((e, "dummy") for e in new_graph.edges())
        }
        pydevDAG.Decorator.decorate_edges(new_graph, properties)
        values = networkx.get_edge_attributes(new_graph, "dummy").values()
        assert values and all(e == "dummy" for e in values)

        others = networkx.get_edge_attributes(GRAPH, "dummy").values()
        assert not others
Beispiel #28
0
def strength_rich_club_coefficient(G,ranking,thr,weight_name='weight',normalized=False):
	'''
	Calculates the members of the rich club given the ranking
	and the coefficient, normalized on the randomized version of the 
	network
	
	This does not work in general, needs to be improved
	Works now only for strength preserving randomization and strength ranking 
	'''
	from sets import Set
	club_nodes=[];
	for n in G.nodes():
		if ranking[n]>=thr:
			club_nodes.append(n);
	
	Club=nx.Graph(G.subgraph(club_nodes));
	W_club=float(np.sum(nx.get_edge_attributes(Club,weight_name).values()));
#	print Club.number_of_nodes()
	Extended_club=nx.Graph();
	for n in Club.nodes():
		Extended_club.add_node(n);
		for nn in G.neighbors(n):
			Extended_club.add_edge(n,nn,weight=G[n][nn][weight_name]);

	print Extended_club.number_of_nodes()
	W_ext=float(np.sum(nx.get_edge_attributes(Extended_club,weight_name).values()));
	
	if normalized==True:
		RandomGraph=Phom.strength_preserving_reshuffling(G,weight_name).to_undirected();
		club_nodes=[];
		for n in RandomGraph.nodes():
			if ranking[n]>=thr:
				club_nodes.append(n);
		
		Club=nx.Graph(RandomGraph.subgraph(club_nodes));
		W_club_r=float(np.sum(nx.get_edge_attributes(Club,weight_name).values()));
#		print Club.number_of_nodes()
		Extended_club=nx.Graph();
		for n in Club.nodes():
			Extended_club.add_node(n);
			for nn in RandomGraph.neighbors(n):
				Extended_club.add_edge(n,nn,weight=RandomGraph[n][nn][weight_name]);
	
		print Extended_club.number_of_edges()
		W_ext_r=float(np.sum(nx.get_edge_attributes(Extended_club,weight_name).values()));
		ranking_random=ranking;
		return (W_club/W_ext)/(W_club_r/W_ext_r);		
	else:
		return W_club/W_ext;
Beispiel #29
0
def form_big_spanning_tree(clouds):
  original_g = nx.Graph()

  if not clouds: return None
  for cloud in clouds:
    if not cloud.sites: return None
    for site in cloud.sites:
      if not site.switches: return None
      original_g.add_edge(cloud, site.of_domain,weight=site.switches[0].dpid, site=site)

  spt = nx.minimum_spanning_tree(original_g)
  spt_att = nx.get_edge_attributes(spt, 'site')
  original_g_att = nx.get_edge_attributes(original_g, 'site')

  return set(original_g_att.itervalues()) - set(spt_att.itervalues())
 def separate_conditional(klass, H, conditional_links=tuple()):
     labels = nx.get_edge_attributes(H, 'label')
     weights = nx.get_edge_attributes(H, 'weight')
     Hstat = nx.DiGraph()
     Hcond = nx.DiGraph()
     Hstat.add_nodes_from(H.nodes())
     Hcond.add_nodes_from(H.nodes())
     for edgeref in H.edges():
         edge = labels[edgeref]
         weight = weights[edgeref]
         if edge in conditional_links:
             Hcond.add_edge(*edgeref, label=edge, weight=weight)
         else:
             Hstat.add_edge(*edgeref, label=edge, weight=weight)
     return Hstat, Hcond
def cluster_subgraph_for_clustering(Graph,option='accumulate',
                             connected='yes',retain_clus='yes',
                             ):
    """
    option='accumulate' will accumulate the nodes and edges of the graph year on year
    option='separate' will only keep the nodes year on year, edges from previous years will not be retained
    connected='yes' will only use the largest connected component for each year
    connected='no' will use all available nodes for each year
    retain_clus='yes' will initialize the louvain calculation such that the previous year's cluster is used to initialize this year's cluster
    retain_clus='no' will use a random initialization for the louvain calculation
    res_louv is used to set the resolution parameter for the louvain clustering calculation
    wei is used for the edge weight in the louvain clustering calculation
    """
    
    # get node and edge year
    node_yr=nx.get_node_attributes(Graph,'Year')
    edge_yr=nx.get_edge_attributes(Graph,'Year')
    
    # dictionarys to filter nodes and edges by year
    n_year=int(max(node_yr.values())-min(node_yr.values()))+1
    min_year=min(node_yr.values())
    list_dict_node_year=[{} for i in range(n_year)]
    list_dict_edge_year=[{} for i in range(n_year)]
    for i in range(n_year):
        if option=='accumulate':
            list_dict_edge_year[i]={k:v for (k,v) in edge_yr.items() if  v<=min_year+i}
            list_dict_node_year[i]={k:v for (k,v) in node_yr.items() if  v<=min_year+i}
        elif option=='separate':
            list_dict_edge_year[i]={k:v for (k,v) in edge_yr.items() if  v==min_year+i}
            list_dict_node_year[i]={k:v for (k,v) in node_yr.items() if  v<=min_year+i}       
        
        else:
            raise Exception("wrong keyword for option. use accumulate or separate only")
     
    print('Input Graph has',Graph.number_of_nodes(),'nodes and',Graph.number_of_edges(),'edges')  
    H=[nx.Graph() for i in range(n_year)]
    if option=='accumulate':
        for i in range(n_year):
            if connected=='no':
                 H[i]=nx.subgraph(Graph,list(list_dict_node_year[i].keys()))
            elif connected=='yes':
                 H[i]=nx.subgraph(Graph,list(list_dict_node_year[i].keys()))
                 H[i]=max(nx.connected_component_subgraphs(H[i]), key=len)
            #H[i].add_nodes_from(list(list_dict_node_year[i].keys()))
            #H[i].add_edges_from(list(list_dict_edge_year[i].keys()))
            else:
                raise Exception("wrong keyword for connected. use yes or no only")
            print('Year:',str(i+min_year),'--',H[i].number_of_nodes(),'nodes --',H[i].number_of_edges(),'edges')
    elif option=='separate':
        for i in range(n_year):
            if connected=='no':
                 H[i]=nx.subgraph(Graph,list(list_dict_node_year[i].keys()))
            elif connected=='yes':
                 H[i]=nx.subgraph(Graph,list(list_dict_node_year[i].keys()))
                 H[i]=max(nx.connected_component_subgraphs(H[i]), key=len)
            #H[i].add_nodes_from(list(list_dict_node_year[i].keys()))
            #H[i].add_edges_from(list(list_dict_edge_year[i].keys()))
            else:
                raise Exception("wrong keyword for connected. use yes or no only")            
                      
            print('Year:',str(i+min_year),'--',H[i].number_of_nodes(),'nodes --',H[i].number_of_edges(),'edges')
    del node_yr, edge_yr, n_year, min_year, list_dict_edge_year, list_dict_node_year
    gc.collect()
    return H
Beispiel #32
0
def nxp(gen='C2H4.gen', ffield='ffield.json', nn='T', threshold=0.1):
    atoms = read(gen)
    atom_name = atoms.get_chemical_symbols()
    nn_ = True if nn == 'T' else False

    ir = IRFF_NP(atoms=atoms, libfile=ffield, rcut=None, nn=nn_)
    # ir.get_pot_energy(atoms)
    # ir.logout()
    ir.calculate_Delta(atoms)
    natom = ir.natom

    g = nx.Graph()
    g.clear()
    color = {'C': 'grey', 'H': 'yellow', 'O': 'red', 'N': 'blue'}
    size = {'C': 400, 'H': 150, 'O': 300, 'N': 300}
    nodeColor = []
    nodeSize = []
    labels0, labels1 = {}, {}
    for i in range(natom):
        c = color[atom_name[i]]
        s = size[atom_name[i]]
        nodeColor.append(c)
        nodeSize.append(s)
        g.add_node(atom_name[i] + str(i))
        labels0[atom_name[i] +
                str(i)] = atom_name[i] + str(i) + ':%4.3f' % ir.Deltap[i]
        labels1[atom_name[i] +
                str(i)] = atom_name[i] + str(i) + ':%4.3f' % ir.Delta[i]

    edgew = []
    for i in range(natom - 1):
        for j in range(i + 1, natom):
            if ir.r[i][j] < ir.r_cut[i][j]:
                if ir.bop[i][j] >= threshold:
                    g.add_edge(atom_name[i] + str(i),
                               atom_name[j] + str(j),
                               BO0='%5.4f' % ir.bop[i][j])
                    edgew.append(2.0 * ir.bop[i][j])

    pos = {}  # pos = nx.spring_layout(g)
    for i, a in enumerate(atoms):
        pos[atom_name[i] + str(i)] = [a.x, a.y]

    nx.draw(g,
            pos,
            node_color=nodeColor,
            node_size=nodeSize,
            width=edgew,
            with_labels=False)
    edge_labels = nx.get_edge_attributes(g, 'BO0')
    nx.draw_networkx_edge_labels(g, pos, labels=edge_labels, font_size=8)
    # nx.draw_networkx_labels(g,pos,labels=labels0,font_size=8)

    plt.savefig('%s_bo0.eps' % gen.split('.')[0])
    plt.close()

    g = nx.Graph()
    g.clear()
    for i in range(natom):
        g.add_node(atom_name[i] + str(i))

    edgew = []
    for i in range(natom - 1):
        for j in range(i + 1, natom):
            if ir.r[i][j] < ir.r_cut[i][j]:
                if ir.bo0[i][j] >= threshold:
                    g.add_edge(atom_name[i] + str(i),
                               atom_name[j] + str(j),
                               BO1='%5.4f' % ir.bo0[i][j])
                    edgew.append(2.0 * ir.bo0[i][j])

    nx.draw(g,
            pos,
            node_color=nodeColor,
            node_size=nodeSize,
            width=edgew,
            with_labels=False)
    edge_labels = nx.get_edge_attributes(g, 'BO1')
    nx.draw_networkx_edge_labels(g, pos, labels=edge_labels, font_size=8)
    # nx.draw_networkx_labels(g,pos,labels=labels1,font_size=8)

    plt.savefig('%s_bo1.eps' % gen.split('.')[0])
    plt.close()
    ir.close()
 def getWeymouthConst(self, arc):
     weymouth = nx.get_edge_attributes(self.G, 'weymouth')
     return float(weymouth[arc])
 def getArcLength(self, arc):
     length = nx.get_edge_attributes(self.G, 'length')
     return float(length[arc])
 def getArcName(self, arc):
     name = nx.get_edge_attributes(self.G, 'name')
     return name[arc]
Beispiel #36
0
def algo(G1, COST0,met=False,**kwds):
    G=nx.Graph()
    K,A,B,hmax,hmin=COST0[0],COST0[1],COST0[2],COST0[3],COST0[4]
    if met == True:
        G=nx.Graph()
        j=nx.erdos_renyi_graph(G1,1)
        for i in j.edges():
            r=list(i)
            e=np.random.random_integers(0,150)
            G.add_edge(r[0],r[1],weight=e)
        print ('edges, nodes','=',nx.number_of_edges(G),nx.number_of_nodes(G))
    else:
        G=G1;
    print ('ENTRAN_GRA(e,n)=',len(G.edges()),len(G.nodes()))
    InG,G,no_in=Grafo_inicial(G,hmax)
    print ('no_in =',no_in)
    """nx.draw(InG)
    plt.show()

    no_in = [1,8]
    for i in no_in:
        try:
          print i,InG.neighbors(i)
        except nx.exception.NetworkXError:
            pass"""
    #print G.neighbors(31)

    COVERh=nx.Graph()
    COVERh.add_nodes_from(G)
    COMPh=nx.number_connected_components(COVERh)

    inf=m.factorial(10)
    u=nx.get_edge_attributes(G,'weight')
    print ('edges, nodes','=',nx.number_of_edges(G),nx.number_of_nodes(G))
    def c(h):
         if h <= 5:
            costo=K*h*0.1
         elif h <= hmin and h > 5:
            costo=K
         elif h<(hmax) and h > hmin:
            costo=(A*h)+B
         else:
            costo=inf
         return costo
    def c_1(ci):
         if ci > (c(hmin)):
             l=np.ceil((ci-B)/A)
         elif ci>(c(5)) and ci <=(c(hmin)):
             l=hmin
         else:
             l=ci/(K*0.1)
         return l

    def beta(n1,n2,d):
         try:
             aux=(2.0*(u[(n1,n2)]))
         except (IndexError, KeyError):
             aux=(2.0*(u[(n2,n1)]))
         HI=h[n1]+h[n2]+d
         if HI<aux:
             B =aux-HI
         else:
             B=0
         return B

    def nbrfun(n,d):
         p=[]
         r=set()
         for i in range(0,nx.number_connected_components(COVERh)):
             subgraph=(list(nx.connected_component_subgraphs(COVERh)))
             p.append(subgraph[i])
             for o in p[i].nodes():
                 if o==n:
                     r=set(G.neighbors(n)) - set(p[i].neighbors(n))

         r=list(r)
         L=[]
         l=[]
         for i in range(0,len(r)):
             beTa=beta(n,r[i],d)
             G.add_node(r[i],hinc=beTa)
             G.add_node(r[i],cinc=(c(beTa + h[r[i]])- c(h[r[i]])))
             l.append((r[i],(c(beTa+ h[r[i]])-c(h[r[i]]))))
         l=dict(l)

         va=l.values()
         vk=list(l.keys())
         Li=[]
         va = list(va)
         while len(va)>0:
             mi=np.min(va)
             #retirar valor minimo
             d=va.index(mi)
             L.append(vk[d])
             vk.remove(vk[d])
             va.remove(va[d])
         va=[]
         for i in range(0,nx.number_connected_components(COVERh)):
             ri=list(set(L) & set(p[i].nodes()))
             u=inf
             for j in range(0,len(ri)):
                 if L.index(ri[j])< u:
                     try:
                         L.remove(L[u])
                         break
                     except (IndexError):
                         pass
                     u = L.index(ri[j])
         return r, L

    def START_TC_ALGO( h, n, dirac):
         G.add_node(n,hinc=(dirac))
         G.add_node(n,cinc=c(h[n]+dirac)-c(h[n]))
         nbr, L= nbrfun(n,dirac)
         rbest, kbest=inf,0
         for k in range(1,len(L)+1):
             sum=0
             for i in range(0,k):
                 sum= (G.node[L[i]]['cinc'])+sum
             rtemp= ((G.node[n]['cinc']) + sum)/(k)
             if rtemp <  rbest:
                 kbest,rbest=k,rtemp
         L=L[:kbest]
         ri=list(set(G.nodes()) - set(L))
         for i in ri:
             if i==n:
                pass
             else:
                G. add_node(i,hinc=0)
         incr= nx.get_node_attributes(G,'hinc')
         return rbest,incr, L

    for n in G.nodes():
         G.add_node(n,hi=0)
    for n in InG.nodes():
         InG.add_node(n,hi=0)

    while COMPh > 1:
         rbest=inf
         for n in G.nodes():
             h=nx.get_node_attributes(G,'hi')
             chn=c(h[n])
             i,e=1,(c_1(chn+ 1))
             H=[h[n],hmax]
             while e < hmax:
                 H.append(e)
                 e= c_1(chn + (i))
                 i=i+1
             for i in range(0, len(H)):
                 a=(H[i]-h[n])
                 if a<0:a=0
                 rtmp,incrtmp, mayl=START_TC_ALGO(h,n,a)
                 if (rtmp < rbest):
                     nodo,rbest,incrbest, mayL=n,rtmp,incrtmp, mayl
         #print incrbest,nodo,mayL,rbest
         for i in incrbest.keys():
             for j in range (0,len(mayL)):
                 if (incrbest[i] >= 0) and (i==mayL[j]):
                     COVERh.add_edge(nodo,i)
         for i in G.nodes():
             G.add_node(i,hi=h[i]+incrbest[i])
         COMPh=nx.number_connected_components(COVERh)
    costo =0
    for n in G.nodes():
        costo=costo+(c(G.node[n]['hi']))
    T=nx.minimum_spanning_tree(G)
    u1 =nx.get_edge_attributes(T,'weight')
    for n  in T.nodes():
        T.node[n]['hi']=0
    for n in T.edges():
        r=list(n)
        aux=T.node[r[0]]['hi']+T.node[r[1]]['hi']
        aux1=(2* u1[n])
        if aux< aux1:
            T.node[r[0]]['hi']=T.node[r[0]]['hi']+((aux1-aux)/2.0)
            T.node[r[1]]['hi']=T.node[r[1]]['hi']+((aux1-aux)/2.0)
    costo1 =0
    for n in T.nodes():
        costo1=costo1+(c(T.node[n]['hi']))
    InGr=nx.Graph()
    InGr.add_nodes_from(InG.nodes())
    InGr.add_edges_from(COVERh.edges())
    uw =nx.get_edge_attributes(COVERh,'weigth')
    nw =nx.get_node_attributes(G,'hi')
    u1 =nx.get_edge_attributes(T,'weight')
    nx.set_node_attributes(InGr,nw,'hi')
    print ('COVERh','=',nx.number_of_edges(COVERh),nx.number_of_nodes(COVERh))
    print ('T','=',nx.number_of_edges(T),nx.number_of_nodes(T))
    print ('COVERh','=',(nx.get_node_attributes(G,'hi')))
    print ('T','=',(nx.get_node_attributes(T,'hi')))
    print ('COVERh ed=',COVERh.edges())
    print ('T ed=',T.edges())
    return costo,costo1, InG,InGr, T
Beispiel #37
0
def spatial_points_merge(graph: GeoGraph,
                         points_gdf: gpd.GeoDataFrame,
                         inplace=False,
                         merge_direction="both",
                         node_filter=no_filter,
                         edge_filter=no_filter,
                         intersection_nodes_attr=None,
                         discretization_tol=None) -> GeoGraph:
    """Merge given points as node with a spatial merge. Points are projected on the closest edge of the
    graph and an intersection node is added if necessary. If two nodes a given point and a node have the same name, with
    equal coordinates, then the node is considered as already in the graph. A discretization tolerance is used for
    indexing edges lines. New nodes created from the geodataframe have attributes described by other columns (except if
    an attribute value is `nan`). When a point is projected on an edge, this edge is removed and replaced by two others
    that connect the extremities to the intersection node. A reference to the original edge is kept on theses new edges
    with the attribute ``settings.ORIGINAL_EDGE_KEY``. The original edge is the oldest parent of the new edge, to have
    the direct parent, the attribute has to be cleant first.

    Parameters
    ----------
    graph : GeoGraph, GeoDiGraph, GeoMultiGraph or GeoMultiDiGraph
        A GeoGraph or derived class describing a spatial graph.
    points_gdf : gpd.GeoDataFrame
        A list of point describing new nodes to add.
    inplace : bool
        If True, do operation inplace and return None. (Default value = False)
    merge_direction : str
        For directed graphs only:
        
        * ``'both'``: 2 edges are added: graph -> new node and new node -> graph
        * ``'in'``: 1 edge is added: new_node -> graph
        * ``'out'``: 1 edge is added: graph -> new_node (Default value = "both")
    node_filter :
        A node filter (lambda) to exclude nodes (and by the way all concerned edges) from the projection
        operation. (Default value = no_filter)
    edge_filter :
        An edge filter (lambda) to exclude edges on which the projection will not take place. (Default value = no_filter)
    intersection_nodes_attr : dict
        A dictionary of attributes (constant for all added intersection nodes). (Default value = None)
    discretization_tol: float
        A custom discretization tolerance for lines. If None, tolerance with the right order of magnitude is
        pre-defined for some CRS. For more details, see ``gnx.get_default_discretization_tolerance`` method.
        (Default value = None)

    Returns
    -------
    None or GeoGraph
        If not inplace, the created graph.


    See Also
    --------
    spatial_graph_merge gnx.get_default_discretization_tolerance

    """
    if not inplace:
        graph = graph.copy()
    # 1. Find closest edge for each point
    graph_view = nx.graphviews.subgraph_view(graph,
                                             filter_node=node_filter,
                                             filter_edge=edge_filter)
    edges_as_lines = nx.get_edge_attributes(graph_view,
                                            graph.edges_geometry_key)
    if len(edges_as_lines) == 0:
        raise ValueError(
            "No edge geometry has been found in the given merging edges, at least one edge geometry is"
            " required for a merge operation")
    points = points_gdf.geometry
    points_coords = np.array([[p.x, p.y] for p in points])
    if discretization_tol is None:
        discretization_tol = get_default_discretization_tolerance(graph.crs)
    lines_indexes = get_closest_line_from_points(points_coords,
                                                 edges_as_lines.values(),
                                                 discretization_tol)
    edges_to_split = defaultdict(dict)
    # Add node, intersection node and edge (node, intersection node)
    for p, p_index, point in zip(range(len(points_gdf)), points_gdf.index,
                                 points):
        # 1.1 Add given node
        if p_index in graph.nodes:
            if coordinates_almost_equal([point.x, point.y],
                                        graph.get_node_coordinates(p_index)):
                continue
            else:
                node_name = get_new_node_unique_name(graph, p_index)
        else:
            node_name = p_index
        node_info = {
            c: points_gdf.at[p_index, c]
            for c in points_gdf.columns
            if not is_nan(points_gdf.at[p_index, c])
        }
        node_info[graph.nodes_geometry_key] = point
        graph.add_node(node_name, **node_info)
        # 1.2 Add projected node if necessary
        closest_edge_name = list(edges_as_lines.keys())[lines_indexes[p]]
        closest_line = edges_as_lines[closest_edge_name]
        closest_line_length = closest_line.length
        intersection_distance_on_line = closest_line.project(point)
        # if the intersection point is on the edge
        if 0 < intersection_distance_on_line < closest_line_length:
            projected_point = closest_line.interpolate(
                intersection_distance_on_line)
            intersection_node_name = get_new_node_unique_name(
                graph, settings.INTERSECTION_PREFIX + str(p_index))
            intersection_node_info = {
                graph.nodes_geometry_key: projected_point
            }
            if intersection_nodes_attr is not None:
                intersection_node_info.update(intersection_nodes_attr)
            graph.add_node(intersection_node_name, **intersection_node_info)
            # Store line to modify
            edges_to_split[closest_edge_name][
                intersection_node_name] = intersection_distance_on_line
        else:  # if the intersection point is on of the two edge extremities
            first_node = closest_edge_name[0]
            first_node_point = graph.nodes[first_node][
                graph.nodes_geometry_key]
            second_node = closest_edge_name[1]
            second_node_point = graph.nodes[second_node][
                graph.nodes_geometry_key]
            distance_to_first_extremity = euclidian_distance(
                point, first_node_point)
            distance_to_second_extremity = euclidian_distance(
                point, second_node_point)
            if distance_to_first_extremity < distance_to_second_extremity:
                intersection_node_name = first_node
            else:
                intersection_node_name = second_node
        # 1.3 Add edge : node <-> intersection_node
        in_edge_data = {
            graph.edges_geometry_key:
            LineString([
                graph.get_node_coordinates(node_name),
                graph.get_node_coordinates(intersection_node_name)
            ])
        }
        if graph.is_directed():
            out_edge_data = {
                graph.edges_geometry_key:
                LineString([
                    graph.get_node_coordinates(intersection_node_name),
                    graph.get_node_coordinates(node_name)
                ])
            }
            if merge_direction == "both":
                graph.add_edge(node_name, intersection_node_name,
                               **in_edge_data)
                graph.add_edge(intersection_node_name, node_name,
                               **out_edge_data)
            elif merge_direction == "in":
                graph.add_edge(node_name, intersection_node_name,
                               **in_edge_data)
            else:  # "out"
                graph.add_edge(intersection_node_name, node_name,
                               **out_edge_data)
        else:
            graph.add_edge(node_name, intersection_node_name, **in_edge_data)
    # 2. Split edges where a node have been projected
    for e in edges_to_split:
        intersection_nodes = edges_to_split[e]
        if len(intersection_nodes) > 0:
            initial_line = edges_as_lines[e]
            # 2.1 remove initial edge and keep in memory the original edge
            original_edge_data = {
                settings.ORIGINAL_EDGE_KEY:
                graph.edges[e].get(settings.ORIGINAL_EDGE_KEY, e)
            }
            if graph.has_edge(*e):
                graph.remove_edge(*e)
            # 2.2 cut the initial line
            sorted_intersection_nodes = sorted(
                intersection_nodes.keys(), key=lambda n: intersection_nodes[n])
            distances_on_initial_line = [
                intersection_nodes[n] for n in sorted_intersection_nodes
            ]
            split_lines = []
            cut_lines = split_line(initial_line, distances_on_initial_line[0])
            split_lines.append(cut_lines[0])
            for i in range(len(sorted_intersection_nodes) - 1):
                cut_lines = split_line(
                    cut_lines[1], distances_on_initial_line[i + 1] -
                    distances_on_initial_line[i])
                split_lines.append(cut_lines[0])
            split_lines.append(cut_lines[1])
            # 2.2 add intermediary edges
            oriented_edge = get_line_ordered_edge(graph, e, initial_line)
            first_edge_data = {
                graph.edges_geometry_key: split_lines[0],
                **original_edge_data
            }
            graph.add_edge(oriented_edge[0], sorted_intersection_nodes[0],
                           **first_edge_data)
            last_edge_data = {
                graph.edges_geometry_key: split_lines[-1],
                **original_edge_data
            }
            graph.add_edge(sorted_intersection_nodes[-1], oriented_edge[1],
                           **last_edge_data)
            for i in range(len(sorted_intersection_nodes) - 1):
                edge_data = {
                    graph.edges_geometry_key: split_lines[i + 1],
                    **original_edge_data
                }
                graph.add_edge(sorted_intersection_nodes[i],
                               sorted_intersection_nodes[i + 1], **edge_data)
    if not inplace:
        return graph
G.add_edge(7, 10, length = 40)

G.add_edge(9, 1, length = 50)
G.add_edge(9, 11, length = 30)
G.add_edge(9, 12, length = 40)

G.add_edge(10, 12, length = 30)
G.add_edge(10, 1, length = 40)

G.add_edge(11, 1, length = 30)
G.add_edge(11, 2, length = 40)

G.add_edge(12, 2, length = 30)

G.add_edge(1, 2, length = 30)

path =nx.shortest_path (G, 7, 2, weight = 'length')
path_length=nx.shortest_path_length(G,7,2,weight='length')
print(path)
print(path_length)

# draw the graph
edge_labels = nx.get_edge_attributes(G,'length')
pos = nx.spring_layout(G)
nx.draw_networkx_edge_labels(G, pos, edge_labels = edge_labels)
nx.draw_networkx(G, pos)
plt.show()



Beispiel #39
0
def plotMCWandGraph(X,
                    fs,
                    W,
                    remove_self_loops=True,
                    title=None,
                    save_path=None):
    """Plots the corresponding weighted undirected or directed graph topology
    of the given adjacency matrix together with the corresponding multi-channel window

    Parameters
    ----------
    X : numpy array
        Multi-channel window of shape (num_channels, num_samples)
    fs : float
        Sampling frequency
    W : numpy array
        Adjacency matrix with shape (L, L). Currently, this function only
        supports L=8
    remove_self_loops : bool, optional
        if True, remove self loops in the graph, by default True
    title : str, optional
        Title of the plotted graph topology, by default None
    """
    fig = plt.figure(facecolor="w", figsize=(12, 4))
    ax1 = fig.add_subplot(121)
    ax = fig.add_subplot(122)

    ## plot the mutli-channel signals
    num_channels = X.shape[0]
    num_samples = X.shape[1]
    max_val = np.max(X)
    node_colors = plt.cm.rainbow(np.linspace(0, 1, num_channels))
    t = np.arange(0, num_samples, 1) / fs
    ytick_pos = []
    for i, c in zip(range(num_channels), node_colors):
        y = X[i] + max_val * 1.5 * i
        ax1.plot(t, y, c=c)
        ytick_pos.append(np.min(y))
        i += 1
    ax1.set_yticks(ytick_pos)
    ax1.set_yticklabels(
        ["CH1", "CH2", "CH3", "CH4", "CH5", "CH6", "CH7", "CH8"])
    ax1.set_xlabel("time (s)")
    if title is not None:
        ax1.set_title("Multi-Channel Window - {}".format(title))
    else:
        ax1.set_title("Multi-Channel Window")
    ax1.grid()

    ## plot the graph
    if np.allclose(W, W.T):
        graphIsDirected = False
    else:
        graphIsDirected = True
    if remove_self_loops:
        np.fill_diagonal(W, 0)
    m = W.shape[0]
    pos = getEllipticalCoordinates()

    if graphIsDirected:
        ## plot directed weighted graph
        G = nx.DiGraph()
        for i in range(0, m):
            for j in range(0, m):
                G.add_edge(i + 1, j + 1, weight=W[i, j])

        weights = list(nx.get_edge_attributes(G, 'weight').values())
        weightColors = (weights - min(weights)) / (max(weights) - min(weights))

        # plot the graph
        nx.draw_networkx_nodes(G,
                               pos,
                               with_labels=True,
                               node_color=node_colors,
                               ax=ax)
        nx.draw_networkx_edges(G,
                               pos,
                               arrows=True,
                               arrowsize=20,
                               arrowstyle='-|>',
                               width=2,
                               edge_color=weightColors,
                               edge_cmap=plt.cm.Greys,
                               edge_vmin=min(weightColors),
                               edge_vmax=max(weightColors),
                               connectionstyle='arc3, rad = 0.1',
                               ax=ax)
        nx.draw_networkx_labels(G, pos)

        # add colorbar
        norm = mpl.colors.Normalize(vmin=min(weights), vmax=max(weights))
        weightMap = plt.cm.ScalarMappable(cmap=plt.cm.Greys, norm=norm)
        plt.axis('on')
        plt.colorbar(weightMap)
        if title is not None:
            plt.title(title)
        plt.show()

    else:
        ## plot unidrected weighted graph
        G = nx.Graph()
        for i in range(0, m):
            for j in range(0, m):
                G.add_edge(i + 1, j + 1, weight=W[i, j])

        weights = list(nx.get_edge_attributes(G, 'weight').values())
        weightWidths = (weights - min(weights)) / (max(weights) -
                                                   min(weights)) * 5
        weightColors = (weights - min(weights)) / (max(weights) - min(weights))

        # plot the graph
        nx.draw(G,
                pos,
                width=weightWidths,
                with_labels=True,
                edge_color=weightColors,
                edge_cmap=plt.cm.Greys,
                edge_vmin=min(weightColors),
                edge_vmax=max(weightColors),
                node_color=node_colors,
                ax=ax)

        ## add a colorbar
        norm = mpl.colors.Normalize(vmin=min(weights), vmax=max(weights))
        weightMap = plt.cm.ScalarMappable(cmap=plt.cm.Greys, norm=norm)
        plt.axis('on')
        plt.colorbar(weightMap)
        if title is not None:
            plt.title("Graph - {}".format(title))
        else:
            plt.title("Graph")
        if save_path is not None:
            plt.savefig(save_path)
        else:
            plt.show()
Beispiel #40
0
def createWeightedGraph(W, remove_self_loops=True, title=None):
    """Plots the corresponding weighted undirecte or directed graph topology
    of the given adjacency matrix.

    Parameters
    ----------
    W : numpy array
        Adjacency matrix with shape (L, L). Currently, this function only
        supports L=8
    remove_self_loops : bool, optional
        if True, remove self loops in the graph, by default True
    title : str, optional
        Title of the plotted graph topology, by default None
    """
    if np.allclose(W, W.T):
        graphIsDirected = False
    else:
        graphIsDirected = True
    if remove_self_loops:
        np.fill_diagonal(W, 0)
    m = W.shape[0]
    pos = getEllipticalCoordinates()
    fig = plt.figure(facecolor="w")
    ax = fig.add_subplot(111)

    if graphIsDirected:
        ## plot directed weighted graph
        G = nx.DiGraph()
        for i in range(0, m):
            for j in range(0, m):
                G.add_edge(i + 1, j + 1, weight=W[i, j])

        weights = list(nx.get_edge_attributes(G, 'weight').values())
        weightColors = (weights - min(weights)) / (max(weights) - min(weights))

        # plot the graph
        nx.draw_networkx_nodes(G,
                               pos,
                               with_labels=True,
                               node_color='lightgreen',
                               ax=ax)
        nx.draw_networkx_edges(G,
                               pos,
                               arrows=True,
                               arrowsize=20,
                               arrowstyle='-|>',
                               width=2,
                               edge_color=weightColors,
                               edge_cmap=plt.cm.Blues,
                               edge_vmin=min(weightColors),
                               edge_vmax=max(weightColors),
                               connectionstyle='arc3, rad = 0.1',
                               ax=ax)
        nx.draw_networkx_labels(G, pos)

        # add colorbar
        norm = mpl.colors.Normalize(vmin=min(weights), vmax=max(weights))
        weightMap = plt.cm.ScalarMappable(cmap=plt.cm.Blues, norm=norm)
        plt.axis('on')
        plt.colorbar(weightMap)
        if title is not None:
            plt.title(title)
        plt.show()

    else:
        ## plot unidrected weighted graph
        G = nx.Graph()
        for i in range(0, m):
            for j in range(0, m):
                G.add_edge(i + 1, j + 1, weight=W[i, j])

        weights = list(nx.get_edge_attributes(G, 'weight').values())
        weightWidths = (weights - min(weights)) / (max(weights) -
                                                   min(weights)) * 5
        weightColors = (weights - min(weights)) / (max(weights) - min(weights))

        # plot the graph
        nx.draw(G,
                pos,
                width=weightWidths,
                with_labels=True,
                edge_color=weightColors,
                edge_cmap=plt.cm.Blues,
                edge_vmin=min(weightColors),
                edge_vmax=max(weightColors),
                node_color='lightgreen',
                ax=ax)

        ## add a colorbar
        norm = mpl.colors.Normalize(vmin=min(weights), vmax=max(weights))
        weightMap = plt.cm.ScalarMappable(cmap=plt.cm.Blues, norm=norm)
        plt.axis('on')
        plt.colorbar(weightMap)
        if title is not None:
            plt.title(title)
        plt.show()
 def generateAttackGraph(self, network, demo=False):
     """
     Building the attack graph of the network by a dict.
     """
     plt.close()
     counter = 0
     sum = 0
     G = nx.DiGraph()
     attackerInitialNode = self.getAttackerInitialNode(network)
     matrix = self.reachability.generateReachabilityMatrix(network)
     attackerNodes = set()
     attackerNodes.add(attackerInitialNode)
     # currentAV = "network"
     visitedNodes = list()
     labeldict = {}
     labeldict[attackerInitialNode] = attackerInitialNode.label
     lastNode = None
     while len(attackerNodes) > 0:
         curNode = attackerNodes.pop()
         lastNode = curNode
         G.add_node(lastNode)
         visitedNodes.append(curNode)
         v1 = curNode.vulnerabilities
         v1 = self.sortVul(v1)
         for v in v1:
             if demo is True:
                 vul = self.getVul(network["vulnerabilities"], v["id"])
             else:
                 vul = v
             if self.comparePriv(curNode.priv, vul.requires):
                 if self.comparePriv(vul.provides, curNode.priv):
                     curNode.priv = vul.provides
                     currentAV = vul.vector
                     G.add_node(vul)
                     weight = self.calcWeight(vul)
                     counter += 1
                     sum += weight
                     G.add_edge(lastNode, vul, weight=weight)
                     lastNode = vul
                     if vul.shortDesc != '':
                         labeldict[vul] = vul.shortDesc
                     else:
                         labeldict[vul] = vul.cve
         destNodes = self.reachability.getReachableNodesFromNodes(
             matrix, curNode, network["nodes"])
         for n in destNodes:
             v2 = n.vulnerabilities
             for v in v2:
                 if demo is True:
                     vul = self.getVul(network["vulnerabilities"], v["id"])
                 else:
                     vul = v
                 # if Scanner.compareAV(currentAV,vul.vector):
                 if self.comparePriv("None",
                                     vul.requires) or self.comparePriv(
                                         curNode.priv, vul.requires):
                     # if self.comparePriv(vul.provides, n.priv):
                     n.priv = vul.provides
                     if vul.shortDesc != '':
                         labeldict[vul] = vul.shortDesc
                     else:
                         labeldict[vul] = vul.cve
                     G.add_node(vul)
                     weight = self.calcWeight(vul)
                     counter += 1
                     sum += weight
                     G.add_edge(lastNode, vul, weight=weight)
                     G.add_node(n)
                     G.add_edge(vul, n)
                     if n.label != '':
                         labeldict[n] = n.label
                     else:
                         labeldict[n] = n.name
                     if n not in visitedNodes:
                         attackerNodes.add(n)
     pos = nx.spring_layout(G)
     nx.draw(G, pos=pos, labels=labeldict, with_labels=True)
     labels = nx.get_edge_attributes(G, 'weight')
     nx.draw_networkx_edge_labels(G, pos, edge_labels=labels)
     plt.savefig('Images//graph.png', format='PNG')
     result = 0
     if counter != 0:
         result = sum / counter
     answer = [result, G]
     return answer
 def getArcFlowMax(self, arc):
     flowMax = nx.get_edge_attributes(self.G, 'flowMax')
     return float(flowMax[arc])
Beispiel #43
0
def plot_nx_succession_diagram(G, pos=None, fig_dimensions=(None,None), nx_node_kwargs=None, nx_edge_kwargs=None,
    draw_node_labels=True, labeling_convention='label', draw_edge_labels=False, nx_node_label_kwargs=None, nx_edge_label_kwargs=None):
    """Plot the input succession diagram. Requires matplotlib. For finer control
    over plot appearance, it is recommended to plot g directly.

    Parameters
    ----------
    G : networkx.DiGraph
        Labeled succession diagram, e.g., as is output from
        export.networkx_succession_diagram_reduced_network_based().
    fig_dimensions : (int,int)
        Dimensions of the output figure. If (None,None), then the dimensions are
        calculated based on the number of nodes in g (the default is (None,None)).
    pos : str or graphviz_layout
        Layout for the nodes; A dictionary with nodes as keys and positions as
        values. Positions should be sequences of length 2. If none, we attempt to
        use pydot/graphviz to construct a layout, otherwise we fall back to the
        networkx planar_layout function (succession diagrams are always planar).
    draw_node_labels : bool
        Whether node labels should be drawn (True) or left as metadata (False)
        (the default is True).
    draw_edge_labels : bool
        Whether edge labels should be drawn (True) or left as metadata (False);
        only affects reduced-network-based (default) succession diagrams, not
        motif-based succession diagrams. (The default value is False.)
    labeling_convention : str
        Whether edge labels should be just the stable motifs ('label') or all stabilized states ('states')
        (the default is 'label').
    nx_node_kwargs : dictionary
        Keword arguments passed to nx.draw_networkx_nodes (in addition to G and pos).
        If None, we pass {'node_size':50*G.number_of_nodes()} by default.
    nx_edge_kwargs : dictionary
        Keword arguments passed to nx.draw_networkx_edges (in addition to G and pos).
        If None, we pass {'arrowstyle':'-|>','width':2,'arrowsize':30} by default.
    nx_node_label_kwargs : dictionary
        Keword arguments passed to nx.draw_networkx_labels (in addition to G and pos).
        If None, we pass {'font_size':16} by default.
    nx_edge_label_kwargs : dictionary
        Keword arguments passed to nx.draw_networkx_edge_labels (in addition to G and pos).
        If None, we pass {'font_size':16} by default.

    """
    import matplotlib.pyplot as plt

    if fig_dimensions == (None,None):
        fig_dimensions=(2*(G.number_of_nodes()+2),G.number_of_nodes()+2)

    if pos is None:
        try:
            from networkx.drawing.nx_agraph import graphviz_layout
            pos = graphviz_layout(G, prog='dot')
        except ImportError:
            pos = nx.planar_layout(G)

    plt.figure(figsize=fig_dimensions)

    if nx_node_kwargs is None:
        nx_node_kwargs = {'node_size':50*G.number_of_nodes()}
    if nx_edge_kwargs is None:
        nx_edge_kwargs = {'arrowstyle':'-|>','width':2,'arrowsize':30}
    if nx_node_label_kwargs is None:
        nx_node_label_kwargs = {'font_size':16}
    if nx_edge_label_kwargs is None:
        nx_edge_label_kwargs = {'font_size':16}

    nx.drawing.draw_networkx_nodes(G, pos,**nx_node_kwargs)
    nx.draw_networkx_edges(G, pos,**nx_edge_kwargs)
    if draw_node_labels:
        if labeling_convention=='label':
            nx.drawing.draw_networkx_labels(G,pos, labels=dict(G.nodes('label')),**nx_node_label_kwargs)
        else:
            nx.drawing.draw_networkx_labels(G,pos, labels=dict(G.nodes('states')),**nx_node_label_kwargs)
    if draw_edge_labels:
        nx.drawing.draw_networkx_edge_labels(G,pos,edge_labels=nx.get_edge_attributes(G,'motif'),**nx_edge_label_kwargs)
    plt.axis('off')
    plt.show()
                    ['Toronto', 'Calgary', 1500],
                    ['Toronto', 'LA', 1800],
                    ['Toronto', 'Chicago', 500],
                    ['Denver', 'Urbana', 1000],
                    ['Denver', 'Houston', 1500],
                    ['Houston', 'LA', 1500],
                    ['Denver', 'LA', 1000],],
                    columns = ['city1','city2','distance'])
# Construct a graph using MultiDiGraph from NetworkX library
G= nx.from_pandas_edgelist(df, 'city1', 'city2', edge_attr=['distance'],
                                    create_using=nx.MultiDiGraph())
#print(len(G))
print(G.nodes())
print(G.edges(data=True))
# Draw the graph showing the City Names (Nodes) and Distances (Attributes)
edge_labels = nx.get_edge_attributes(G,'distance')
pos = nx.spring_layout(G)
nx.draw(G,pos, with_labels=True)
nx.draw_networkx_edge_labels(G,pos, labels = edge_labels)
#plt.show()
####################################################################################
# Start the algorithm by finding "NY" in the data frame (both city 1 and city2)
# Create the lists to store the information
# frontiers List is the list we are going to explore


# For this problem, we will set Origin = "NY", Destination = "LA"
Origin = 'Calgary'
Destination = 'Chicago'

frontiers = []
def edge_width(graph):
    #assign edge width based on the score of the similarity between articles
    edge_width = []
    for key, value in nx.get_edge_attributes(graph, 'score').items():
        edge_width.append(assign_thickness(value))
    return edge_width
Beispiel #46
0
import matplotlib.pyplot as plt
import networkx as nx
import numpy as np

n = 50
G = nx.erdos_renyi_graph(n, 0.1, seed=None, directed=False)
pos = nx.spring_layout(G)

for u, v, d in G.edges(data=True):
    d['weight'] = 1 + 10 * np.random.rand()

edges, weights = zip(*nx.get_edge_attributes(G, 'weight').items())

path = nx.shortest_path(G, source=0, target=G.number_of_nodes() - 1)
path_edges = set(zip(path, path[1:]))
nx.draw_networkx_nodes(G, pos, nodelist=path, node_color='r')
nx.draw_networkx_edges(G, pos, edgelist=path_edges, edge_color='r', width=10)
nx.draw(G,
        pos,
        node_color='b',
        node_size=10,
        edgelist=edges,
        edge_color=weights,
        width=2,
        edge_cmap=plt.cm.Blues)
plt.show()
Beispiel #47
0
def entropy(G, sources=None, sinks=None):
    """ 
    Compute entropy, equations from [1].
    
    Entropy is a measure of uncertainty in a random variable.  
    In a water distribution network model, the random variable is 
    flow in the pipes and entropy can be used to measure alternate flow paths
    when a network component fails.  A network that carries maximum entropy 
    flow is considered reliable with multiple alternate paths.  

    Parameters
    ----------
    G : NetworkX or WNTR graph
        Entropy is computed using a directed graph based on pipe flow direction.  
        The 'weight' of each link is equal to the flow rate.
    
    sources : list of strings, optional (default = all reservoirs)
        List of node names to use as sources.
        
    sinks : list of strings, optional (default = all nodes)
        List of node names to use as sinks.
        
    Returns
    -------
    S : dict
        Node entropy, {node name: entropy value}
        
    Shat : float
        System entropy

    Examples
    --------
    The following example computes entropy using Net3 flow directions at time 3600 s.
    
    >>> inp_file = 'networks/Net3.inp'
    >>> wn = wntr.network.WaterNetworkModel(inp_file)
    >>> sim = wntr.sim.EpanetSimulator(wn)
    >>> results = sim.run_sim()
    >>> G = wn.get_graph_deep_copy()
    >>> attr = results.link.loc['flowrate', 3600, :]
    >>> G.weight_graph(link_attribute=attr) 
    >>> [S, Shat] = wntr.metrics.entropy(G)
    >>> wntr.network.draw_graph(wn, node_attribute = S, title = 'Node entropy")
    >>> Shat
    4.05
    
    References
    -----------
    [1] Awumah K, Goulter I, Bhatt SK. (1990). Assessment of reliability in 
    water distribution networks using entropy based measures. Stochastic 
    Hydrology and Hydraulics, 4(4), 309-320 
    """

    if G.is_directed() == False:
        return

    if sources is None:
        sources = [
            key for key, value in nx.get_node_attributes(G, 'type').items()
            if value == 'reservoir'
        ]

    if sinks is None:
        sinks = G.nodes()

    S = {}
    Q = {}
    for nodej in sinks:
        if nodej in sources:
            S[nodej] = 0  # nodej is the source
            continue

        sp = []  # simple path
        if G.node[nodej]['type'] == 'junction':
            for source in sources:
                if nx.has_path(G, source, nodej):
                    simple_paths = _all_simple_paths(G, source, target=nodej)
                    sp = sp + ([p for p in simple_paths])
                    # all_simple_paths was modified to check 'has_path' in the
                    # loop, but this is still slow for large networks
                    # what if the network was skeletonized based on series pipes
                    # that have the same flow direction?
                    # what about duplicating paths that have pipes in series?
                #print j, nodeid, len(sp)

        if len(sp) == 0:
            S[nodej] = np.nan  # nodej is not connected to any sources
            continue

        sp = np.array(sp)

        # Uj = set of nodes on the upstream ends of links incident on node j
        Uj = G.predecessors(nodej)
        # qij = flow in link from node i to node j
        qij = []
        # aij = number of equivalnet independent paths through the link from node i to node j
        aij = []
        for nodei in Uj:
            mask = np.array([nodei in path for path in sp])
            # NDij = number of paths through the link from node i to node j
            NDij = sum(mask)
            if NDij == 0:
                continue
            temp = sp[mask]
            # MDij = links in the NDij path
            MDij = [(t[idx], t[idx + 1]) for t in temp
                    for idx in range(len(t) - 1)]

            flow = 0
            for link in G[nodei][nodej].keys():
                flow = flow + G[nodei][nodej][link]['weight']
            qij.append(flow)

            # dk = degree of link k in MDij
            dk = Counter()
            for elem in MDij:
                # divide by the numnber of links between two nodes
                dk[elem] += 1 / len(G[elem[0]][elem[1]].keys())

            aij.append(
                NDij *
                (1 - float(sum(np.array(dk.values()) - 1)) / sum(dk.values())))

        Q[nodej] = sum(qij)  # Total flow into node j

        # Equation 7
        S[nodej] = 0
        for idx in range(len(qij)):
            if qij[idx] / Q[nodej] > 0:
                S[nodej] = S[nodej] - \
                    qij[idx]/Q[nodej]*math.log(qij[idx]/Q[nodej]) + \
                    qij[idx]/Q[nodej]*math.log(aij[idx])

    Q0 = sum(nx.get_edge_attributes(G, 'weight').values())

    # Equation 3
    Shat = 0
    for nodej in sinks:
        if not np.isnan(S[nodej]):
            if nodej not in sources:
                if Q[nodej] / Q0 > 0:
                    Shat = Shat + \
                        (Q[nodej]*S[nodej])/Q0 - \
                        Q[nodej]/Q0*math.log(Q[nodej]/Q0)

    return [S, Shat]
def max_flow(id):

    G = read_dot(
        r"C:\Users\Charun\Desktop\CSC301-Assignment6\input_graphs\input_" +
        str(id) + r".dot")
    my_source = list(G.nodes)[0]
    my_sink = list(G.nodes)[-1]

    def my_build_residual_network(G, capacity):

        R = nx.DiGraph()
        R.add_nodes_from(G)

        inf = float('inf')
        # Extract edges with positive capacities. Self loops excluded.
        edge_list = [
            (u, v, attr) for u, v, attr in G.edges(data=True)
            if u != v and float(attr.get(capacity, inf).replace('"', '')) > 0
        ]

        inf = 3 * sum(
            float(attr[capacity].replace('"', ''))
            for u, v, attr in edge_list if capacity.replace('"', '') in attr
            and attr[capacity] != inf) or 1
        if G.is_directed():
            for u, v, attr in edge_list:
                r = min(float(attr.get(capacity, inf).replace('"', '')), inf)
                if not R.has_edge(u, v):
                    # Both (u, v) and (v, u) must be present in the residual
                    # network.
                    R.add_edge(u, v, capacity=r)
                    R.add_edge(v, u, capacity=0)
                else:
                    # The edge (u, v) was added when (v, u) was visited.
                    R[u][v]['capacity'] = r
        else:
            for u, v, attr in edge_list:
                # Add a pair of edges with equal residual capacities.
                r = min(attr.get(capacity, inf), inf)
                R.add_edge(u, v, capacity=r)
                R.add_edge(v, u, capacity=r)

        # Record the value simulating infinity.
        R.graph['inf'] = inf

        return R

    def my_edmonds_karp(G,
                        s,
                        t,
                        capacity='capacity',
                        residual=None,
                        value_only=False,
                        cutoff=None):
        R = my_edmonds_karp_impl(G, s, t, capacity, residual, cutoff)
        return R

    def my_edmonds_karp_impl(G, s, t, capacity, residual, cutoff):
        if s not in G:
            raise Exception('source not in graph')
        if t not in G:
            raise Exception('sink not in graph')
        if s == t:
            raise Exception('source and sink need to be different nodes')

        if residual is None:
            R = my_build_residual_network(G, capacity)
        else:
            R = residual

        for u in R:
            for edge in R[u].values():
                edge['flow'] = 0

        if cutoff is None:
            cutoff = float('inf')

        R.graph['flow_value'] = my_edmonds_karp_core(R, s, t, cutoff)

        return R

    def my_edmonds_karp_core(R, s, t, cutoff):
        residual_nodes = R.nodes
        residual_pred = R.pred
        residual_succ = R.succ

        inf = R.graph['inf']

        def augment(path):
            flow = inf
            my_iter = iter(path)
            u = next(my_iter)
            for v in my_iter:
                attribute = residual_succ[u][v]
                flow = min(flow, attribute['capacity'] - attribute['flow'])
                u = v

            my_iter = iter(path)
            u = next(my_iter)
            for v in my_iter:
                residual_succ[u][v]['flow'] += flow
                residual_succ[v][u]['flow'] -= flow
                u = v
            return flow

        def bfs():
            predecessor = {s: None}
            queue_s = [s]
            successor = {t: None}
            queue_t = [t]
            while True:
                queue = []
                if len(queue_s) <= len(queue_t):
                    for u in queue_s:
                        for v, attribute in residual_succ[u].items():
                            if v not in predecessor and attribute[
                                    'flow'] < attribute['capacity']:
                                predecessor[v] = u
                                if v in successor:
                                    return v, predecessor, successor
                                queue.append(v)
                    if not queue:
                        return None, None, None
                    queue_s = queue
                else:
                    for u in queue_t:
                        for v, attribute in residual_pred[u].items():
                            if v not in successor and attribute[
                                    'flow'] < attribute['capacity']:
                                successor[v] = u
                                if v in predecessor:
                                    return v, predecessor, successor
                                queue.append(v)
                    if not queue:
                        return None, None, None
                    queue_t = queue

        flow_value = 0
        while flow_value < cutoff:
            v, predecessor, successor = bfs()
            if predecessor is None:
                break
            path = [v]
            u = v
            while u != s:
                u = predecessor[u]
                path.append(u)

            path.reverse()
            u = v
            while u != t:
                u = successor[u]
                path.append(u)
            flow_value += augment(path)

        return flow_value

    R = my_edmonds_karp(G, my_source, my_sink)
    print(R.graph['flow_value'])

    flow_attr = nx.get_edge_attributes(R, 'flow')

    # remove non-participating edges
    for k, v in flow_attr.items():
        if float(v) <= 0:
            R.remove_edge(k[0], k[1])

    flow_attr = nx.get_edge_attributes(R, 'flow')

    cap_attr = nx.get_edge_attributes(R, 'capacity')

    labels = {}

    for (k1, v1), (k2, v2) in zip(flow_attr.items(), cap_attr.items()):
        labels[k1] = str(v1) + "/" + str(v2)

    # Remove non participating nodes
    for final_node in list(R.nodes):
        if R.degree[final_node] == 0:
            R.remove_node(final_node)

    write_dot(
        R,
        r"C:\Users\Charun\Desktop\CSC301-Assignment6\output_graphs\output_" +
        str(id) + r".dot")

    pos = nx.spring_layout(R)

    nx.draw(R, pos, with_labels=True)
    nx.draw_networkx_edge_labels(R, pos, edge_labels=labels)
    plt.savefig(
        r"C:\Users\Charun\Desktop\CSC301-Assignment6\output_graphs\output_" +
        str(id) + r".png")

    plt.close()
    G.clear()
    R.clear()
 def getArcDiameter(self, arc):
     diameter = nx.get_edge_attributes(self.G, 'diameter')
     return float(diameter[arc])
Beispiel #50
0
          (n[0], n[3], 9), \
          (n[1], n[2], 3), \
          (n[1], n[4], 2), \
          (n[3], n[5], 7), \
          (n[2], n[6], 2), \
          (n[2], n[5], 4), \
          (n[5], n[6], 2), \
          (n[4], n[6], 1), \
          (n[6], n[7], 2),
          (n[5], n[7], 5), }

G.add_nodes_from(nodes_labels)
G.add_weighted_edges_from(edges)

k = 1 / sqrt(5)
pos = nx.spring_layout(G, k=k, iterations=20, scale=0.8)  #CHANGE FOR OVERLAPS
edges_labels = nx.get_edge_attributes(G, 'weight')

status = nx.info(G)
print(status)
print('Sortest path lenght = ',
      nx.shortest_path_length(G, source=n[0], target=n[7], weight='weight'))

nx.draw(G, pos, with_labels=True)
path = nx.shortest_path(G, source=n[0], target=n[7], weight='weight')
path_edges = list(zip(path, path[1:]))
nx.draw_networkx_nodes(G, pos, nodelist=path, node_color='y')
nx.draw_networkx_edges(G, pos, edgelist=path_edges, edge_color='r', width=2)
nx.draw_networkx_edge_labels(G, pos, edge_labels=edges_labels)
plt.axis('equal')
plt.show()
Beispiel #51
0
 def __get_edge_label_num(self, edge_label):
     el = set()
     for G in self.__graphs:
         el = el | set(nx.get_edge_attributes(G, edge_label).values())
     return len(el)
Beispiel #52
0
    def get(self):
        args = parser.parse_args()
        print(args)

        # todo remove this "hack"

        concepts = config['concepts']['list']
        target = config['concepts']['target']

        query = """
            copy (select positive, negative, """ + ", ".join(
            concepts
        ) + """ from stream_events where spam is null and creation_time between to_timestamp(%s) and to_timestamp(%s))
                to '/tmp/stream_events_classes.csv' with (format csv, header)
        """
        cur = pg_conn.cursor()
        # cur.execute(query, args['date_from'], args['date_to'])
        cur.close()

        df_no_spam = pd.read_csv("/tmp/stream_events_classes.csv")
        for column in df_no_spam:
            df_no_spam[column] = df_no_spam[column].map(
                lambda x: x if pd.isna(x) else column)

        tuples = []
        for x in df_no_spam.itertuples():
            t = []
            for y in x[1:]:
                if not pd.isna(y):
                    t.append(y)
            if len(t):
                tuples.append(tuple(sorted(t)))

        tuples_len = len(tuples)

        weights = dict()
        weights[target] = 0
        for c in concepts:
            weights[c] = 0

        itemsets, rules = apriori(tuples,
                                  min_support=0.0001,
                                  min_confidence=0.001)
        if 'weights' not in args or args['weights'] is None:
            args['weights'] = "{}"
        parsed_weights = json.loads(args['weights'])
        for c in weights:
            if c not in parsed_weights:
                continue
            weights[c] = float(parsed_weights[c])

        def activation(x, derivative=False):
            return np.tanh(x)  # x*(1-x) if derivative else 1/(1+np.exp(-x))

        DG = nx.DiGraph()
        DG.add_node(target)

        print(nx.get_node_attributes(DG, target))

        count_dict = itemsets[2]
        concept_values = dict({target: 0})
        for conc in concepts:
            concept_values[conc] = 0
            tneg = tuple(sorted([conc, 'negative']))
            tpos = tuple(sorted([conc, 'positive']))
            if tpos in count_dict:
                concept_values[conc] += count_dict[tpos] / tuples_len
            if tneg in count_dict:
                concept_values[conc] -= count_dict[tneg] / tuples_len

            DG.add_node(conc)
            DG.add_weighted_edges_from([(conc, target, weights[conc])])

        print("initial:", concept_values)
        nx.set_node_attributes(DG, concept_values, 'cv')

        EPOCHS = args['max_iters']

        DGtmp = DG.copy()
        DGresult = DG.copy()

        tmp_concepts = concept_values.copy()
        concepts_changes = dict()
        for c in concept_values:
            concepts_changes[c] = []

        effective_iters = 0
        for epoch in range(100):

            old_cv = nx.get_node_attributes(DGresult, 'cv')
            for c, inners in DGresult.pred.items():
                tmp_concepts[c] = activation(old_cv[c] + np.sum([
                    old_cv[factor_conc] * fc_weight['weight']
                    for factor_conc, fc_weight in inners.items()
                ]))
                concepts_changes[c].append(tmp_concepts[c])

            nx.set_node_attributes(DGresult, tmp_concepts, 'cv')
            effective_iters += 1

        labels = nx.get_node_attributes(DGresult, 'cv')
        for l, v in labels.items():
            labels[l] = "{0} {1:.4f}".format(l, v)

        res_cv = nx.get_node_attributes(DGresult, 'cv')
        pos = nx.spring_layout(DGresult)

        plt.clf()
        cf = plt.gcf()
        cf.set_size_inches(18, 10)

        ax = cf.gca()

        for c, inners in DGresult.pred.items():
            nx.draw_networkx_nodes(DGresult,
                                   pos,
                                   nodelist=[c],
                                   labels=labels,
                                   node_color="blue",
                                   node_size=500,
                                   alpha=0.5,
                                   ax=ax)
            for inner in inners:
                edge_color = 'red' if res_cv[inner] < 0 else 'green'
                nx.draw_networkx_edges(DGresult,
                                       pos,
                                       edgelist=[(inner, c)],
                                       width=3,
                                       alpha=0.3,
                                       edge_color=edge_color,
                                       ax=ax)

        nx.draw_networkx_labels(DGresult, pos, labels, font_size=10)

        weight_edge_labels = nx.get_edge_attributes(DGresult, 'weight')
        nx.draw_networkx_edge_labels(DGresult,
                                     pos,
                                     edge_labels=weight_edge_labels)

        graph_img = 'static/images/plot.png'
        # nx.draw(DGresult, with_labels=True, labels=labels, node_color='lightblue', weight=True, font_weight='normal')
        plt.savefig(graph_img, format="PNG", dpi=100)

        plt.clf()
        conv_img = 'static/images/conv.png'

        linspace = [i for i in range(effective_iters)]
        for c in concepts_changes:
            plt.plot(linspace, concepts_changes[c], label=c)
        plt.legend()

        plt.savefig(conv_img, format="PNG", dpi=100)
        return {
            'data': [
                'http://116.203.70.12:8000/' + graph_img.split("/")[2],
                'http://116.203.70.12:8000/' + conv_img.split("/")[2]
            ]
        }, 200, {
            'Access-Control-Allow-Origin': '*'
        }
 def getArcCost(self, arc):
     cost = nx.get_edge_attributes(self.G, 'cost')
     return float(cost[arc])
Beispiel #54
0
def run_iteration(G, test=False):
    weight = nx.get_edge_attributes(G, "weight")

    S_n = [n for n, v in G.nodes(data=True) if v['status'] == 'S']
    E_n = [n for n, v in G.nodes(data=True) if v['status'] == 'E']
    I_n = [n for n, v in G.nodes(data=True) if v['status'] == 'I']
    H_n = [n for n, v in G.nodes(data=True) if v['status'] == 'H']
    for i in I_n:
        for neighbor in G.neighbors(i):
            if G.nodes[neighbor]["status"] == "S":
                # Multiplying by the weight of the edge is supposed to model decreased
                # contact after a person is in quarentine
                if random.random(
                ) < 0.004576659038901602 * G[i][neighbor]["weight"]:
                    G.nodes[neighbor]["status"] = "E"
                    G.nodes[neighbor]["days_since_E"] = 0

    for e in E_n:
        G.nodes[e]["days_since_E"] += 1
        days_since_E = G.nodes[e]["days_since_E"]
        if days_since_E == 14:  # if you've been asymp for 14 days, you recover
            G.nodes[e]["status"] = "R"
        mu = 1.621
        std = 0.418
        prob = lognorm.pdf(days_since_E, s=std, scale=np.exp(mu)) * 0.8
        if random.random() < prob:
            G.nodes[e]["status"] = "I"
            G.nodes[e]["days_since_I"] = 0
            G.nodes[e]["onset of symptoms"] = random.normalvariate(5, 2.5)
    for i in I_n:
        G.nodes[i]["days_since_E"] += 1
        G.nodes[i]["days_since_I"] += 1
        days_since_I = G.nodes[i]["days_since_I"]
        # Added social distancing after 5 days of being infected
        if days_since_I > G.nodes[i]["onset of symptoms"]:
            cn_edges = G.edges(i)
            cn_edges = (e if e in weight else (e[1], e[0]) for e in cn_edges)
            updated_edges = {
                e: weight[e] * quarantine_infectivity
                for e in cn_edges
            }
            nx.set_edge_attributes(G, name="weight", values=updated_edges)
        dist = norm(10, 1)
        prob = dist.pdf(days_since_I) * 0.1755
        if random.random() < prob:
            G.nodes[i]["status"] = "H"
            G.nodes[i]["days_since_H"] = 0
        if G.nodes[i]["days_since_E"] >= 14:
            G.nodes[i]["status"] = "R"
    for h in H_n:
        G.nodes[h]["days_since_H"] += 1
        G.nodes[h]["days_since_E"] += 1
        if G.nodes[h]["days_since_E"] == 14:
            if random.random() < 0.01:
                G.nodes[h]["status"] = "D"
            else:
                G.nodes[h]["status"] = "R"

    # If we do testing, we now update the nodes depending on the test passed in
    if test == "random":
        src.test_strategies.test_strat_random_sample(G, 50)
    elif test == "high_connect":
        prev_t = prev_tested[0].union(prev_tested[1], prev_tested[2],
                                      prev_tested[3], prev_tested[4])
        (tested, num_tested,
         extra_tests) = src.test_strategies.test_strat_high_contact(
             G, 150, 50, prev_t)
        global ind_to_prev_tested
        prev_tested[ind_to_prev_tested] = set(tested)
        ind_to_prev_tested = (ind_to_prev_tested + 1) % 5
    elif test == "pool_family":
        src.test_strategies.test_strat_pool_family(G)
    elif test == "most_infected":
        src.test_strategies.test_strat_most_infected(G, 50)
 def getArcFlowMin(self, arc):
     flowMin = nx.get_edge_attributes(self.G, 'flowMin')
     return float(flowMin[arc])
    while True:
        ind = argmax(tsd_2)
        amax = unravel_index(ind, (NODES, NODES))
        s_star = amax[0]
        d_star = amax[1]
        if tsd_2[s_star, d_star] > 0:
            maxima.append([s_star, d_star])
            tsd_2[s_star, d_star] = 0
        else:
            break

    for couple in maxima:
        s = couple[0]
        d = couple[1]
        if G.node[s]['n_lasers'] > 0:
            if G.node[d]['n_photodiods'] > 0:
                G.add_edge(s, d, flow=0)
                G.node[s]['n_lasers'] -= 1
                G.node[d]['n_photodiods'] -= 1
                tsd_2[s_star][d_star] = 0

    flows = routeTraffic(G, tsd, NODES)
    fmax = max(flows)
    print fmax

    edge_labels = nx.get_edge_attributes(G, 'flow')

    nx.draw_circular(G, with_labels=True, node_color='y')

    plt.savefig("path.png")
Beispiel #57
0
    G = nx.Graph()
    nnodes = list(map(lambda v: v.id, graph.nodes))
    G.add_nodes_from(nnodes)

    paths = list(map(lambda e: e.graph(), D.tour))
    x = list(map(lambda edge: edge.graph(), graph.edges))
    y = list(map(lambda edge: edge.d, graph.edges))

    peso = 0
    for edge, d in zip(x, y):
        if edge in paths:
            G.add_edge(*edge, d=("%d" % d))
            peso += d

    plt.title("Peso del tour: {}\n".format(peso))

    pos = nx.spring_layout(G)
    edges = G.edges()
    labels = nx.get_edge_attributes(G, 'd')

    nx.draw_networkx(G, pos=pos,\
    with_labels=True, \
    width=1.0, \
    node_size=1000, \
    node_color='pink',\
    alpha=0.9)
    nx.draw_networkx_edge_labels(G, pos, labels)

    plt.show()
Beispiel #58
0
def case1_synthesis(formulas, ts_files, alpha, radius, time_wp, lab_testing):

    startFull = timeit.default_timer()
    startOff = timeit.default_timer()
    dfa_dict = {}
    for ind, f in enumerate(formulas):
        _, dfa_inf, bdd = twtl.translate(f, kind=DFAType.Infinity, norm=True)

        logging.debug('\nEnd of translate\n\n')
        logging.info('The bound of formula "%s" is (%d, %d)!', f, *bdd)
        logging.info(
            'Translated formula "%s" to infinity DFA of size (%d, %d)!', f,
            *dfa_inf.size())
        dfa_dict[ind + 1] = copy.deepcopy(
            dfa_inf)  # Note that the key is set to the agent number

    logging.debug('\n\nStart policy computation\n')

    ts_dict = {}
    ets_dict = {}
    for ind, ts_f in enumerate(ts_files):
        ts_dict[ind + 1] = Ts(directed=True, multi=False)
        ts_dict[ind + 1].read_from_file(ts_f)
        ets_dict[ind + 1] = expand_duration_ts(ts_dict[ind + 1])
    for ind in ts_dict:
        print 'Size of TS:', ets_dict[ind].size()
    # Get the nominal PA for each agent
    pa_nom_dict = {}
    norm_factor = {}
    startPA = timeit.default_timer()
    for key in dfa_dict:
        logging.info('Constructing product automaton with infinity DFA!')
        pa = ts_times_fsa(ets_dict[key], dfa_dict[key])
        # Give length and weight attributes to all edges in pa
        nom_weight_dict = {}
        edges_all = nx.get_edge_attributes(ts_dict[key].g, 'edge_weight')
        max_edge = max(edges_all, key=edges_all.get)
        norm_factor[key] = edges_all[max_edge]
        for pa_edge in pa.g.edges():
            edge = (pa_edge[0][0], pa_edge[1][0], 0)
            nom_weight_dict[pa_edge] = edges_all[edge] / norm_factor[key]
        nx.set_edge_attributes(pa.g, 'edge_weight', nom_weight_dict)
        nx.set_edge_attributes(pa.g, 'weight', 1)
        logging.info('Product automaton size is: (%d, %d)', *pa.size())
        # Make a copy of the nominal PA to change
        pa_nom_dict[key] = copy.deepcopy(pa)
    stopPA = timeit.default_timer()
    print 'Run Time (s) to get all three PAs is: ', stopPA - startPA

    for key in pa_nom_dict:
        print 'Size of PA:', pa_nom_dict[key].size()

    # Use alpha to perform weighted optimization of time and edge_weight and make this a
    # new edge attribute to find "shortest path" over
    for key in pa_nom_dict:
        weight_dict = {}
        time_weight = nx.get_edge_attributes(pa_nom_dict[key].g, 'weight')
        edge_weight = nx.get_edge_attributes(pa_nom_dict[key].g, 'edge_weight')
        for pa_edge in pa_nom_dict[key].g.edges():
            weight_dict[pa_edge] = alpha * time_weight[pa_edge] + (
                1 - alpha) * edge_weight[pa_edge]
        # Append the multi-objective cost to the edge attribtues of the PA
        nx.set_edge_attributes(pa_nom_dict[key].g, 'new_weight', weight_dict)

    # Compute the energy (multi-objective cost function) for each agent's PA at every node
    startEnergy = timeit.default_timer()
    for key in pa_nom_dict:
        compute_energy(pa_nom_dict[key])
    stopEnergy = timeit.default_timer()
    print 'Run Time (s) to get the moc energy function for all three PA: ', stopEnergy - startEnergy

    # Compute optimal path in PA and project onto the TS
    ts_policy_dict_nom = {}
    pa_policy_dict_nom = {}
    tau_dict_nom = {}
    for key in pa_nom_dict:
        ts_policy_dict_nom[key], pa_policy_dict_nom[key], tau_dict_nom[key] = \
                    compute_control_policy(pa_nom_dict[key], dfa_dict[key], dfa_dict[key].kind)
    # Perform initial check on nominal control policies
    for key in ts_policy_dict_nom:
        if ts_policy_dict_nom[key] is None:
            logging.info('No control policy found!')

    # set empty control policies that will be iteratively updated
    ts_control_policy_dict = {}
    pa_control_policy_dict = {}

    # Initialize policy variables
    for key in ts_policy_dict_nom:
        ts_control_policy_dict[key] = []
        pa_control_policy_dict[key] = []

    # Concatenate nominal policies for searching
    policy_match, key_list, policy_match_index = update_policy_match(
        ts_policy_dict_nom)

    # Initialize vars, give nominal policies
    iter_step = 0
    running = True
    traj_length = 0
    ts_policy = copy.deepcopy(ts_policy_dict_nom)
    pa_policy = copy.deepcopy(pa_policy_dict_nom)
    tau_dict = tau_dict_nom
    # Choose parameter for n-horizon local trajectory and information sharing,
    # must be at least 2
    num_hops = 2
    # Get agent priority based on lowest energy
    prev_states = {}
    for key in ts_policy_dict_nom:
        prev_states[key] = pa_policy_dict_nom[key][0]
    priority = get_priority(pa_nom_dict, pa_policy_dict_nom, prev_states,
                            key_list)
    # Create Agent energy dictionary for post-processing
    agent_energy_dict = {}
    for key in ts_policy_dict_nom:
        agent_energy_dict[key] = []

    # Print time statistics
    stopOff = timeit.default_timer()
    print 'Offline run time for all initial setup: ', stopOff - startOff
    startOnline = timeit.default_timer()

    # Execute takeoff command for all crazyflies in lab testing
    if lab_testing:
        startTakeoff = timeit.default_timer()
        os.chdir("/home/ryan/crazyswarm/ros_ws/src/crazyswarm/scripts")
        os.system(
            "/home/ryan/crazyswarm/ros_ws/src/crazyswarm/scripts/twtl_takeoff.py"
        )  # make sure file is an executable
        os.chdir("/home/ryan/Desktop/pyTWTL/src")
        stopTakeoff = timeit.default_timer()
        print 'Takeoff time, should be ~2.7sec: ', stopTakeoff - startTakeoff

    # Iterate through all policies sequentially
    while running:
        while policy_match:
            for p_ind, p_val in enumerate(priority):
                if p_ind < 1:
                    weighted_nodes = {}
                    for i in range(num_hops):
                        weighted_nodes[i] = []
                else:
                    # Get local neighborhood (n-hop) of nodes to search for a conflict
                    for k, key in enumerate(key_list):
                        if p_val == key:
                            node = policy_match[0][k]
                            break
                    # Note that communication range needs to be 2*H, the receding horizon length
                    local_set = get_neighborhood(node, ts_dict[p_val],
                                                 2 * num_hops)
                    one_hop_set = ts_dict[p_val].g.neighbors(node)
                    # Assign constraints for immediate transition
                    weighted_nodes = {}
                    weighted_nodes[0] = []
                    for pty in priority[0:p_ind]:
                        for k, key in enumerate(key_list):
                            if pty == key:
                                prev_node = policy_match[0][k]
                                if prev_node in one_hop_set:
                                    weighted_nodes[0].append(prev_node)
                                # Check if downwash constraint needs to be added, mostly for physical testing
                                downwash_weight = downwash_check(k, ets_dict[key], policy_match[0], \
                                                                priority[0:k], key_list, radius)
                                if downwash_weight:
                                    for downwash_node in downwash_weight:
                                        if downwash_node not in weighted_nodes:
                                            weighted_nodes[0].append(
                                                downwash_node)
                                break
                    # Get constraints for later transitions
                    for pty in priority[0:p_ind]:
                        for k, key in enumerate(key_list):
                            if pty == key:
                                ts_length = len(ts_policy[key])
                                if ts_length >= num_hops:
                                    for i in range(num_hops - 1):
                                        if ts_policy[key][i + 1] in local_set:
                                            try:
                                                weighted_nodes[i + 1]
                                                weighted_nodes[i + 1].append(
                                                    ts_policy[key][i + 1])
                                            except KeyError:
                                                weighted_nodes[i + 1] = [
                                                    ts_policy[key][i + 1]
                                                ]
                                else:
                                    for i in range(ts_length - 1):
                                        if ts_policy[key][i + 1] in local_set:
                                            try:
                                                weighted_nodes[i + 1]
                                                weighted_nodes[i + 1].append(
                                                    ts_policy[key][i + 1])
                                            except KeyError:
                                                weighted_nodes[i + 1] = [
                                                    ts_policy[key][i + 1]
                                                ]
                                for i in range(num_hops - 1):
                                    try:
                                        weighted_nodes[i + 1]
                                    except KeyError:
                                        weighted_nodes[i + 1] = []
                    # Update constraint set with intersecting transitions
                    ts_prev_states = []
                    ts_index = []
                    if len(policy_match[0]) > 1 and traj_length >= 1:
                        for key in ts_control_policy_dict:
                            if len(ts_control_policy_dict[key]) == traj_length:
                                ts_prev_states.append(
                                    ts_control_policy_dict[key][-1])
                    if ts_prev_states:
                        for p_ind2, p_val2 in enumerate(priority[0:p_ind + 1]):
                            if p_ind2 > 0:
                                for k_c, key in enumerate(key_list):
                                    if p_val2 == key:
                                        node = policy_match[0][k_c]
                                        break
                                # Check if the trajectories will cross each other in transition
                                cross_weight = check_intersect(k_c, ets_dict[key], ts_prev_states, policy_match[0], \
                                                                    priority[0:p_ind2], key_list, radius, time_wp)
                                if cross_weight:
                                    for cross_node in cross_weight:
                                        if cross_node not in weighted_nodes[0]:
                                            weighted_nodes[0].append(
                                                cross_node)
                                    # Check if agents using same transition
                                    for p_ind3, p_val3 in enumerate(
                                            priority[0:p_ind2]):
                                        for k, key in enumerate(key_list):
                                            if p_val3 == key:
                                                if ts_prev_states[k] == node:
                                                    if policy_match[0][
                                                            k] == ts_prev_states[
                                                                k_c]:
                                                        temp_node = policy_match[
                                                            0][k]
                                                        if temp_node not in weighted_nodes:
                                                            weighted_nodes[
                                                                0].append(
                                                                    temp_node)
                                                        if node not in weighted_nodes:
                                                            weighted_nodes[
                                                                0].append(node)
                                                        break
                                        else:
                                            continue
                                        break
                                    else:
                                        continue
                                    break
                                else:
                                    # Check if agents using same transition
                                    for p_ind3, p_val3 in enumerate(
                                            priority[0:p_ind2]):
                                        for k, key in enumerate(key_list):
                                            if p_val3 == key:
                                                if ts_prev_states[k] == node:
                                                    if policy_match[0][
                                                            k] == ts_prev_states[
                                                                k_c]:
                                                        temp_node = policy_match[
                                                            0][k]
                                                        if temp_node not in weighted_nodes:
                                                            weighted_nodes[
                                                                0].append(
                                                                    temp_node)
                                                        if node not in weighted_nodes:
                                                            weighted_nodes[
                                                                0].append(node)
                                                        break
                                        else:
                                            continue
                                        break
                                    else:
                                        continue
                                    break
                # Compute local horizon function to account for receding horizon all the time
                # while checking for termination
                if traj_length >= 1:
                    init_loc = pa_control_policy_dict[p_val][-1]
                    # Compute receding horizon shortest path
                    ts_policy[p_val], pa_policy[p_val] = \
                        local_horizon(pa_nom_dict[p_val], weighted_nodes, num_hops, init_loc)
                    # Write updates to file
                    iter_step += 1
                    # write_to_iter_file(ts_policy[p_val], ts_dict[p_val], ets_dict[p_val], p_val, iter_step)

                # Update policy match
                policy_match, key_list, policy_match_index = update_policy_match(
                    ts_policy)

            # Append trajectories
            for key in ts_policy:
                agent_energy_dict[key].append(
                    pa_nom_dict[key].g.node[pa_policy[key][0]]['energy'])
                ts_control_policy_dict[key].append(ts_policy[key].pop(0))
                pa_policy_temp = list(pa_policy[key])
                pa_control_policy_dict[key].append(pa_policy_temp.pop(0))
                pa_policy[key] = tuple(pa_policy_temp)
            ts_write = policy_match.pop(0)
            traj_length += 1
            # publish this waypoint to a csv file
            write_to_csv_iter(ts_dict, ts_write, key_list, time_wp)
            # Execute waypoint in crazyswarm lab testing
            if lab_testing:
                startWaypoint = timeit.default_timer()
                os.chdir("/home/ryan/crazyswarm/ros_ws/src/crazyswarm/scripts")
                os.system(
                    "/home/ryan/crazyswarm/ros_ws/src/crazyswarm/scripts/twtl_waypoint.py"
                )  # make sure executable
                os.chdir("/home/ryan/Desktop/pyTWTL/src")
                stopWaypoint = timeit.default_timer()
                print 'Waypoint time, should be ~2.0sec: ', stopWaypoint - startWaypoint

            # Update policy_match now that a trajectory has finalized and policy_match is empty
            if ts_policy:
                # Remove keys from policies that have terminated
                land_keys = []
                for key, val in ts_policy.items():
                    if len(val) == 0:
                        land_keys.append(key)
                        del ts_policy[key]
                        del pa_policy[key]
                # publish to the land csv file for lab testing
                if land_keys:
                    if lab_testing:
                        write_to_land_file(land_keys)
                        os.chdir(
                            "/home/ryan/crazyswarm/ros_ws/src/crazyswarm/scripts"
                        )
                        os.system(
                            "/home/ryan/crazyswarm/ros_ws/src/crazyswarm/scripts/twtl_land.py"
                        )  # make sure executable
                        os.chdir("/home/ryan/Desktop/pyTWTL/src")
                if not ts_policy:
                    running = False
                    break
                # Update policy match
                policy_match, key_list, policy_match_index = update_policy_match(
                    ts_policy)
                # Get agent priority based on lowest energy
                for key in key_list:
                    prev_states[key] = pa_control_policy_dict[key][-1]
                priority = get_priority(pa_nom_dict, pa_policy, prev_states,
                                        key_list)
            else:
                running = False

    # Print run time statistics
    stopOnline = timeit.default_timer()
    print 'Online run time for safe algorithm: ', stopOnline - startOnline
    stopFull = timeit.default_timer()
    print 'Full run time for safe algorithm: ', stopFull - startFull
    # Print other statistics from simulation
    print 'Number of iterations for run: ', iter_step
    print 'Average time for itertion is: ', (stopOnline -
                                             startOnline) / iter_step
    print 'Number of full updates in run: ', traj_length
    print 'Average update time for single step: ', (stopOnline -
                                                    startOnline) / traj_length
    #print(agent_energy_dict)
    #print(pa.g.neighbors)
    #print(pa.g())
    #print(ts_control_policy_dict)
    #print(pa.size())
    #print(pa.g.neighbors())
    print(pa.g.edges())
    pa_adj_st = nx.adjacency_matrix(
        pa.g)  # PA adjacaceny matrix, source and target
    # print(pa_adj_st)
    # print(pa_adj_st.todense())
    #pa.visualize(draw = 'matplotlib')
    #plt.show()
    #print(pa_edge)
    #print(pa_policy)

    # Print energy statistics from run

    # plot_energy(agent_energy_dict)

    # Possibly just set the relaxation to the nominal + additional nodes added *** Change (10/28)
    for key in pa_nom_dict:
        tau_dict[key] = tau_dict_nom[key] + len(
            ts_control_policy_dict[key]) - len(ts_policy_dict_nom[key])

    # Write the nominal and final control policies to a file
    for key in pa_nom_dict:
        write_to_control_policy_file(ts_policy_dict_nom[key], pa_policy_dict_nom[key], \
                tau_dict_nom[key], dfa_dict[key],ts_dict[key],ets_dict[key],\
                ts_control_policy_dict[key], pa_control_policy_dict[key], tau_dict[key], key)
    # Write the CSV files for experiments
    for key in pa_nom_dict:
        write_to_csv(ts_dict[key], ts_control_policy_dict[key], key, time_wp)
Beispiel #59
0
def prep_for_learning(ep_len, m, n, h, init_states, obstacles, pick_up_state,
                      delivery_state, rewards, rew_val, custom_flag,
                      custom_task):
    # Create the environment and get the TS #
    ts_start_time = timeit.default_timer()
    disc = 1
    TS, obs_mat, state_mat = create_ts(m, n, h)
    path = '../data/ts_' + str(m) + 'x' + str(n) + 'x' + str(h) + '_1Ag_1.txt'
    paths = [path]
    bases = {init_states[0]: 'Base1'}
    obs_mat = update_obs_mat(obs_mat, state_mat, m, obstacles, init_states[0])
    TS = update_adj_mat_3D(m, n, h, TS, obs_mat)
    create_input_file(TS, state_mat, obs_mat, paths[0], bases, disc, m, n, h,
                      0)
    ts_file = paths
    ts_dict = Ts(directed=True, multi=False)
    ts_dict.read_from_file(ts_file[0])
    ts = expand_duration_ts(ts_dict)
    ts_timecost = timeit.default_timer() - ts_start_time

    # Get the DFA #
    dfa_start_time = timeit.default_timer()
    pick_up = str(pick_up_state[0][0] * n + pick_up_state[0][1])
    delivery = str(delivery_state[0][0] * n + delivery_state[0][1])
    tf = str(ep_len)  # time bound
    if custom_flag == 1:
        phi = custom_task
    else:
        phi = '([H^1 r' + pick_up + ']^[0, ' + tf + '] * [H^1 r' + delivery + ']^[0,' + tf + '])^[0, ' + tf + ']'  # Construc the task according to pickup/delivery
    _, dfa_inf, bdd = twtl.translate(
        phi, kind=DFAType.Infinity, norm=True
    )  # states and sim. time ex. phi = '([H^1 r47]^[0, 30] * [H^1 r31]^[0, 30])^[0, 30]'
    dfa_timecost = timeit.default_timer() - dfa_start_time

    # Get the PA #
    pa_start_time = timeit.default_timer()
    alpha = 1
    nom_weight_dict = {}
    weight_dict = {}
    pa_or = ts_times_fsa(ts, dfa_inf)  # Original pa
    edges_all = nx.get_edge_attributes(ts_dict.g, 'edge_weight')
    max_edge = max(edges_all, key=edges_all.get)
    norm_factor = edges_all[max_edge]
    for pa_edge in pa_or.g.edges():
        edge = (pa_edge[0][0], pa_edge[1][0], 0)
        nom_weight_dict[pa_edge] = edges_all[edge] / norm_factor
    nx.set_edge_attributes(pa_or.g, 'edge_weight', nom_weight_dict)
    nx.set_edge_attributes(pa_or.g, 'weight', 1)
    pa = copy.deepcopy(pa_or)  # copy the pa
    time_weight = nx.get_edge_attributes(pa.g, 'weight')
    edge_weight = nx.get_edge_attributes(pa.g, 'edge_weight')
    for pa_edge in pa.g.edges():
        weight_dict[pa_edge] = alpha * time_weight[pa_edge] + (
            1 - alpha) * edge_weight[pa_edge]
    nx.set_edge_attributes(pa.g, 'new_weight', weight_dict)
    pa_timecost = timeit.default_timer() - pa_start_time

    # Compute the energy of the states #
    energy_time = timeit.default_timer()
    compute_energy(pa)
    energy_dict = nx.get_node_attributes(pa.g, 'energy')
    energy_pa = []
    for ind in range(len(pa.g.nodes())):
        energy_pa.append(pa.g.nodes([0])[ind][1].values()[0])

    # projection of pa on ts #
    init_state = [init_states[0][0] * n + init_states[0][1]]
    pa2ts = []
    for i in range(len(pa.g.nodes())):
        if pa.g.nodes()[i][0] != 'Base1':
            pa2ts.append(int(pa.g.nodes()[i][0].replace("r", "")))
        else:
            pa2ts.append(init_state[0])
            i_s = i  # Agent's initial location in pa
    energy_timecost = timeit.default_timer() - pa_start_time

    # TS adjacency matrix and source-target
    TS_adj = TS
    TS_s = []
    TS_t = []
    for i in range(len(TS_adj)):
        for j in range(len(TS_adj)):
            if TS_adj[i, j] != 0:
                TS_s.append(i)
                TS_t.append(j)

    # pa adjacency matrix and source-target
    pa_adj_st = nx.adjacency_matrix(pa.g)
    pa_adj = pa_adj_st.todense()
    pa_s = []  # source node
    pa_t = []  # target node
    for i in range(len(pa_adj)):
        for j in range(len(pa_adj)):
            if pa_adj[i, j] == 1:
                pa_s.append(i)
                pa_t.append(j)

# PA rewards matrix
    rewards_ts = np.zeros(m * n)
    rewards_pa = np.zeros(len(pa2ts))
    rewards_ts_indexes = []
    for i in range(len(rewards)):
        rewards_ts_indexes.append(
            rewards[i][0] * n + rewards[i][1]
        )  # rewards_ts_indexes[i] = rewards[i][0] * n + rewards[i][1]
        rewards_ts[rewards_ts_indexes[i]] = rew_val

    for i in range(len(rewards_pa)):
        rewards_pa[i] = rewards_ts[pa2ts[i]]

    # # Display some important info
    print('##### PICK-UP and DELIVERY MISSION #####' + "\n")
    print('Initial Location  : ' + str(init_states[0]) + ' <---> Region ' +
          str(init_state[0]))
    print('Pick-up Location  : ' + str(pick_up_state[0]) + ' <---> Region ' +
          pick_up)
    print('Delivery Location : ' + str(delivery_state[0]) + ' <---> Regions ' +
          delivery)
    print('Reward Locations  : ' + str(rewards) + ' <---> Regions ' +
          str(rewards_ts_indexes) + "\n")
    print('State Matrix : ')
    print(state_mat)
    print("\n")
    print('Mission Duration  : ' + tf + ' time steps')
    print('TWTL Task : ' + phi + "\n")
    print('Computational Costst : TS created in ' + str(ts_timecost) +
          ' seconds')
    # print('			TS created in ' + str(ts_timecost) + ' seconds')
    print('		       DFA created in ' + str(dfa_timecost) + ' seconds')
    print('		       PA created in ' + str(pa_timecost) + ' seconds')
    print('		       Energy of PA states calculated in ' +
          str(energy_timecost) + ' seconds')

    return i_s, pa, pa_s, pa_t, pa2ts, energy_pa, rewards_pa, pick_up
Beispiel #60
0
def draw_graph(grph,
               filename,
               edge_labels=True,
               node_color='#AFAFAF',
               edge_color='#CFCFCF',
               plot=True,
               store=False,
               node_size=2000,
               node_shape='o',
               with_labels=True,
               arrows=True):
    """
    Draw a graph. This function will be removed in future versions.

    Parameters
    ----------
    grph : networkxGraph
        A graph to draw.
    edge_labels : boolean
        Use nominal values of flow as edge label
    node_color : dict or stringh
        Hex color code oder matplotlib color for each node. If string, all
        colors are the same.

    edge_color : string
        Hex color code oder matplotlib color for edge color.

    plot : boolean
        Show matplotlib plot.

    node_size : integer
        Size of nodes.

    with_labels : boolean
        Draw node labels.

    arrows : boolean
        Draw arrows on directed edges. Works only if an optimization_model has
        been passed.
    layout : string
        networkx graph layout, one of: neato, dot, twopi, circo, fdp, sfdp.
    """
    if type(node_color) is dict:
        node_color = [node_color.get(g, '#AFAFAF') for g in grph.nodes()]

    # set drawing options
    options = {
        #'prog': 'dot',
        'with_labels': with_labels,
        'node_color': node_color,
        'edge_color': edge_color,
        'node_size': node_size,
        'node_shape': node_shape,
        'arrows': arrows
    }

    labeldict = {node: node.replace('_', '\n') for node in grph.nodes}

    # draw graph
    plt.figure(figsize=(12, 6))
    pos = nx.drawing.nx_agraph.graphviz_layout(grph,
                                               prog='dot',
                                               args="-Grankdir=LR")
    nx.draw(grph, pos=pos, labels=labeldict, **options)

    # add edge labels for all edges
    if edge_labels is True and plt:
        labels = nx.get_edge_attributes(grph, 'weight')
        nx.draw_networkx_edge_labels(grph, pos=pos, edge_labels=labels)

    if store is True:
        plt.savefig(filename, dpi=100, bbox_inches='tight')

    # show output
    if plot is True:
        plt.show()