Esempio n. 1
0
def _get_random_test_setup(nstates):
    """

    Returns
    -------
    T : undirected networkx graph
        Edges are annotated with transition matrix P.
    root : integer
        Root node.
    root_distn : dict
        Probability distribution at the root.
    node_to_allowed_states : dict
        Map from node to set of allowed states.

    """
    # Sample a random tree.
    branching_distn = [0.7, 0.1, 0.1, 0.1]
    T = get_random_branching_tree(branching_distn, maxnodes=6)
    root = 0

    # For each edge on the tree,
    # sample a random sparse state transition matrix.
    for na, nb in nx.bfs_edges(T, root):
        T[na][nb]['P'] = _get_random_nx_transition_matrix(nstates)

    # Sample a root distribution.
    # It should be a little bit sparse, for testing.
    weights = np.random.exponential(size=nstates)
    imissing = np.random.randint(nstates)
    pairs = [(i, w) for i, w in enumerate(weights) if i != imissing]
    weights[imissing] = 0
    total_weight = np.sum(weights)
    root_distn = dict((i, w / total_weight) for i, w in pairs)

    # Sample allowed states at each node.
    # Disallow a random state at each node.
    states = range(nstates)
    node_to_allowed_states = dict((n, set(states)) for n in T)
    for n in T:
        imissing = np.random.randint(nstates)
        node_to_allowed_states[n].remove(imissing)

    # Final check on transition matrices on edges of T.
    for na, nb in nx.bfs_edges(T, root):
        edge_object = T[na][nb]
        P = edge_object.get('P', None)
        if P is None:
            raise Exception('internal error')

    # Return the random info for testing.
    return T, root, root_distn, node_to_allowed_states
Esempio n. 2
0
def purge(graph, seeds):
    top = top_nodes(graph)

    dead = set(seeds)
    alive = top.difference(dead)

    dead_tree = nx.DiGraph()
    for n in dead:
        dead_tree.add_edges_from(nx.bfs_edges(graph, n))
    alive_tree = nx.DiGraph()
    for n in alive:
        alive_tree.add_edges_from(nx.bfs_edges(graph, n))

    return set(dead_tree.nodes()).difference(alive_tree.nodes())
Esempio n. 3
0
def fix_face_winding(mesh):
    '''
    Traverse and change mesh faces in-place to make sure winding is coherent, 
    or that edges on adjacent faces are in opposite directions
    '''
    # we create the face adjacency graph: 
    # every node in g is an index of mesh.faces
    # every edge in g represents two faces which are connected
    graph_all = nx.from_edgelist(mesh.face_adjacency)
    flipped   = 0
    # we are going to traverse the graph using BFS, so we have to start
    # a traversal for every connected component
    for graph in nx.connected_component_subgraphs(graph_all):
        start = graph.nodes()[0]
        # we traverse every pair of faces in the graph
        # we modify mesh.faces and mesh.face_normals in place 
        for face_pair in nx.bfs_edges(graph, start):
            # for each pair of faces, we convert them into edges,
            # find the edge that both faces share, and then see if the edges
            # are reversed in order as you would expect in a well constructed mesh
            pair    = mesh.faces[[face_pair]]            
            edges   = faces_to_edges(pair)
            overlap = group_rows(np.sort(edges,axis=1), require_count=2)
            if len(overlap) == 0:
                # only happens on non-watertight meshes
                continue
            edge_pair = edges[[overlap[0]]]
            if edge_pair[0][0] == edge_pair[1][0]:
                # if the edges aren't reversed, invert the order of one of the faces
                flipped += 1
                mesh.faces[face_pair[1]] = mesh.faces[face_pair[1]][::-1]
    log.info('Flipped %d/%d edges', flipped, len(mesh.faces)*3)
Esempio n. 4
0
def get_expm_augmented_tree(T, root, Q_default=None):
    """
    Add transition probability matrices to edges.

    Construct the augmented tree by annotating each edge
    with the appropriate state transition probability matrix.

    Parameters
    ----------
    T : weighted undirected networkx graph
        This tree is possibly annotated with edge-specific
        rate matrices Q.
    root : integer
        Root node.
    Q_default : weighted directed networkx graph, optional
        Sparse rate matrix.

    Returns
    -------
    T_aug : weighted undirected networkx graph
        Tree annotated with transition probability matrices P.

    """
    T_aug = nx.Graph()
    for na, nb in nx.bfs_edges(T, root):
        edge = T[na][nb]
        weight = edge['weight']
        Q = edge.get('Q', Q_default)
        if Q is None:
            raise ValueError('no rate matrix is available for this edge')
        P = sparse_expm(Q, weight)
        T_aug.add_edge(na, nb, weight=weight, P=P)
    return T_aug
Esempio n. 5
0
def get_edge_to_nxfset(T, edge_to_adjacency, root,
        root_prior_fset, node_to_data_fset):
    """
    For each edge, get the joint feasibility of states at edge endpoints.

    Parameters
    ----------
    {params}

    Returns
    -------
    edge_to_nxfset : map from directed edge to networkx DiGraph
        For each directed edge in the rooted tree report the networkx DiGraph
        among states, for which presence/absence of an edge defines the
        posterior feasibility of the corresponding state transition
        along the edge.

    """
    v_to_fset = get_node_to_fset(T, edge_to_adjacency, root,
            root_prior_fset, node_to_data_fset)
    edge_to_nxfset = {}
    for edge in nx.bfs_edges(T, root):
        A = edge_to_adjacency[edge]
        J = nx.DiGraph()
        va, vb = edge
        for sa in v_to_fset[va]:
            sbs = set(A[sa]) & v_to_fset[vb]
            J.add_edges_from((sa, sb) for sb in sbs)
        edge_to_nxfset[edge] = J
    return edge_to_nxfset
Esempio n. 6
0
def _forward(T, edge_to_P, root, v_to_subtree_partial_likelihoods):
    """
    Forward pass.

    Return a map from node to posterior state distribution.

    """
    root_partial_likelihoods = v_to_subtree_partial_likelihoods[root]
    v_to_posterior_distn = {}
    v_to_posterior_distn[root] = dict_distn(root_partial_likelihoods)
    for edge in nx.bfs_edges(T, root):
        va, vb = edge
        P = edge_to_P[edge]

        # For each parent state, compute the distribution over child states.
        distn = defaultdict(float)
        parent_distn = v_to_posterior_distn[va]
        for sa, pa in parent_distn.items():

            # Construct conditional transition probabilities.
            fset = set(P[sa]) & set(v_to_subtree_partial_likelihoods[vb])
            sb_weights = {}
            for sb in fset:
                a = P[sa][sb]['weight']
                b = v_to_subtree_partial_likelihoods[vb][sb]
                sb_weights[sb] = a * b
            sb_distn = dict_distn(sb_weights)

            # Add to the marginal distribution.
            for sb, pb in sb_distn.items():
                distn[sb] += pa * pb

        v_to_posterior_distn[vb] = dict(distn)

    return v_to_posterior_distn
Esempio n. 7
0
def _sample_states_preprocessed(T, edge_to_P, root,
        v_to_subtree_partial_likelihoods):
    """
    Jointly sample states on a tree.

    This variant requires subtree partial likelihoods.

    """
    root_partial_likelihoods = v_to_subtree_partial_likelihoods[root]
    n = root_partial_likelihoods.shape[0]
    if not root_partial_likelihoods.any():
        return None
    distn1d = normalized(root_partial_likelihoods)
    root_state = weighted_choice(n, p=distn1d)
    v_to_sampled_state = {root : root_state}
    for edge in nx.bfs_edges(T, root):
        va, vb = edge
        P = edge_to_P[edge]

        # For the relevant parent state,
        # compute an unnormalized distribution over child states.
        sa = v_to_sampled_state[va]

        # Construct conditional transition probabilities.
        sb_weights = P[sa] * v_to_subtree_partial_likelihoods[vb]

        # Sample the state.
        distn1d = normalized(sb_weights)
        v_to_sampled_state[vb] = weighted_choice(n, p=distn1d)

    return v_to_sampled_state
Esempio n. 8
0
def _sample_states_preprocessed(T, edge_to_P, root,
        v_to_subtree_partial_likelihoods):
    """
    Jointly sample states on a tree.

    This variant requires subtree partial likelihoods.

    """
    root_partial_likelihoods = v_to_subtree_partial_likelihoods[root]
    if not root_partial_likelihoods:
        return None
    v_to_sampled_state = {}
    v_to_sampled_state[root] = dict_random_choice(root_partial_likelihoods)
    for edge in nx.bfs_edges(T, root):
        va, vb = edge
        P = edge_to_P[edge]

        # For the relevant parent state,
        # compute an unnormalized distribution over child states.
        sa = v_to_sampled_state[va]

        # Construct conditional transition probabilities.
        fset = set(P[sa]) & set(v_to_subtree_partial_likelihoods[vb])
        sb_weights = {}
        for sb in fset:
            a = P[sa][sb]['weight']
            b = v_to_subtree_partial_likelihoods[vb][sb]
            sb_weights[sb] = a * b

        # Sample the state using the unnormalized dictionary of weights.
        v_to_sampled_state[vb] = dict_random_choice(sb_weights)

    return v_to_sampled_state
Esempio n. 9
0
 def compute_overlap(self):
     self.middle_modules = None
     self.mean_overlap = None
     if self.size() == 0:
         return
     self.middle_modules = list()
     for j in range(self.parameters.nodes_in):
         if not j in self:
             continue
         middle = set()
         for (u, v) in nx.bfs_edges(self, j):
             if self.parameters.is_middle(u):
                 middle.add(u)
             if self.parameters.is_middle(v):
                 middle.add(v)
         self.middle_modules.append(middle)
     num_in = len(self.middle_modules)
     if num_in == 0:
         return
     # compute overlap
     combinations = (num_in * (num_in - 1)) / 2
     middle_overlap = numpy.zeros(combinations)
     i = 0
     for j in range(num_in - 1):
         for k in range(j + 1, num_in):
             nominator = len(self.middle_modules[j].intersection(self.middle_modules[k]))
             denominator = float(len(self.middle_modules[j].union(self.middle_modules[k])))
             if denominator == 0.0:
                 middle_overlap[i] = numpy.nan
             else:
                 middle_overlap[i] = nominator / denominator
             i += 1
     middle_overlap = numpy.ma.masked_invalid(middle_overlap)
     self.mean_overlap = numpy.mean(middle_overlap[~middle_overlap.mask])
Esempio n. 10
0
def get_edge_to_fvec2d(*args):
    """
    For each edge, get the joint feasibility of states at edge endpoints.

    Parameters
    ----------
    {params}

    Returns
    -------
    edge_to_fvec2d : map from directed edge to networkx DiGraph
        For each directed edge in the rooted tree report the networkx DiGraph
        among states, for which presence/absence of an edge defines the
        posterior feasibility of the corresponding state transition
        along the edge.

    """
    args = validated_params(*args)
    T, edge_to_A, root, root_prior_fvec1d, node_to_data_fvec1d = args

    v_to_fvec1d = get_node_to_fvec1d(*args)
    edge_to_fvec2d = {}
    for edge in nx.bfs_edges(T, root):
        va, vb = edge
        A = edge_to_A[edge]
        fa = v_to_fvec1d[va]
        fb = v_to_fvec1d[vb]
        edge_to_fvec2d[edge] = A & np.outer(fa, fb)
    return edge_to_fvec2d
Esempio n. 11
0
    def ordering_graph(self):
        """Ordering graph

        t1 --> t2 in the ordering graph indicates that t1 happens before t2.
        A missing edge simply means that it is not clear yet.

        """

        g = nx.DiGraph()

        # add times
        for t in self.nodes_iter():
            g.add_node(t)

        # add existing edges
        for t1, t2 in self.edges_iter():
            g.add_edge(t1, t2)

        # connect every pair of anchored times
        anchored = sorted(self.anchored())
        for t1, t2 in itertools.combinations(anchored, 2):
            g.add_edge(t1, t2)

        # connect every time with its sucessors
        _g = g.copy()
        for t1 in _g:
            for t2 in set([target for (_, target) in nx.bfs_edges(_g, t1)]):
                g.add_edge(t1, t2)

        return g
Esempio n. 12
0
def km_random(g,k=5,m=3,start=None):
    """ k nodes of breath first sequence; m add and del number."""
    if start==None:
        start=g.nodes().pop()
    bfList=list(nx.bfs_edges(g,start))
    bfList.reverse()
    bfList.append((start,start))
    tempk=[]
    try:
        while bfList:
            for each in range(k):
                tempk.append(bfList.pop()[1])
            
            tg=nx.subgraph(g,tempk)
            e=del_edge(tg,m)
            g.remove_edges_from(e)

            tg=nx.subgraph(g,tempk)
            e=add_edge(tg,m)
            g.add_edges_from(e)

            tempk=[]

    except IndexError:
        print "pop finishing"
Esempio n. 13
0
def resolve(relations, entities):
    """Resolve the coreference chains."""
    mentions = graphize(relations, entities)
    for start_entity, _ in mentions.selfloop_edges():  # moet start er nog bij?
        chain = set(entity for edge in nx.bfs_edges(mentions, start_entity)
                    for entity in edge)
        yield chain if chain else set([start_entity])
	def choose_initial_labeling(self):
		''' Choose a labeling of the local graph that respects the n-hop decision rule'''
		source_neighbors = self.neighbors(self.source)

		# Choose the next virtual source
		weights = []
		for neighbor in source_neighbors:
			subtree = dfs_tree(self, neighbor)
			leaves = [i for i in subtree.nodes() if (subtree.out_degree(i)==0 and \
													 not nx.has_path(subtree,self.source,i))]
			weights.append(len(leaves))
		weights = [float(i)/sum(weights) for i in weights]
		virtual_source = np.random.choice(source_neighbors, p=weights)
		
		# Relabel the nodes so that only the infected nodes have positive values
		label = 1
		mapping = {virtual_source: label}
		# Generate a directed tree emanating from the virtual source away from the source
		directed_self = dfs_tree(self, self.source)
		infected_subgraph = dfs_tree(directed_self, virtual_source)
		bfs_edges = list(nx.bfs_edges(infected_subgraph, virtual_source))
		# In this first timestep, we only need the direct neighbors of V.S. to be infected
		bfs_edges = [item[1] for item in bfs_edges if (item[1] != self.source and item[0] == virtual_source)]
		for edge in bfs_edges:
			label += 1
			mapping[edge] = label
		self = nx.relabel_nodes(self, mapping, copy=False)
Esempio n. 15
0
    def _color_by_mfpt(self, min1, T=1.):
        print "coloring by the mean first passage time to get to minimum", min1._id
        # get a list of transition states in the same cluster as min1
        edges = nx.bfs_edges(self.graph, min1)
        transition_states = [ self.graph.get_edge_data(u, v)["ts"] for u, v in edges ]
        if not check_thermodynamic_info(transition_states):
            raise Exception("The thermodynamic information is not yet computed")

        
        # get an arbitrary second minimum2
        for ts in transition_states:
            if ts.minimum2 != min1:
                min2 = ts.minimum2
                break
        A = [min1]
        B = [min2]
        rcalc = RatesLinalg(transition_states, A, B, T=T)
        rcalc.compute_rates()
        mfptimes = rcalc.get_mfptimes()
        tmax = max(mfptimes.itervalues())
        def get_mfpt(m):
            try:
                return mfptimes[m]
            except KeyError:
                return tmax
        self.dg.color_by_value(get_mfpt)
        self.redraw_disconnectivity_graph()
Esempio n. 16
0
    def chooseplan(self, costfunc=None):
        """Return a join sequence object based on the join graph.  This one is
        simple -- it just adds the joins according to a breadth first search"""
        # choose first node in the original insertion order (networkx does not
        # guarantee even a deterministic order)
        firstnode = [n for n in self.joingraph.nodes() if n.originalorder == 0][0]

        # get a BFS ordering of the edges.  Ignores costs.
        edgesequence = [x for x in nx.bfs_edges(self.joingraph, firstnode)]

        LOG.debug("BFS: edgesequence: %s", edgesequence)

        # Make it deterministic but still in BFS order
        deterministic_edge_sequence = []
        while len(edgesequence) > 0:
            # Consider all edges that have the same first node -- these are all
            # "ties" in BFS order.
            firstx = edgesequence[0][0]
            new_edges = [(x, y) for (x, y) in edgesequence if x == firstx]
            # Sort edges on the originalorder of the source and destination
            deterministic_edge_sequence.extend(
                sorted(new_edges, key=lambda (x, y): (x.originalorder, y.originalorder))
            )  # noqa
            # Remove all those edges from edgesequence
            edgesequence = [(x, y) for (x, y) in edgesequence if x != firstx]

        LOG.debug("BFS: deterministic edge seq: %s", deterministic_edge_sequence)

        # Generate a concrete sequence of terms with conditions properly
        # adjusted
        joinsequence = self.toJoinSequence(deterministic_edge_sequence)
        LOG.debug("BFS: joinsequence: %s", joinsequence)
        return joinsequence
def firstCommonAncestors(dag, synListsSet):
	"""Return the list of first common ancestors"""
	i = 0
	nodesAndDistance = dict()
	
	for synList in synListsSet:
		for a, b in nx.bfs_edges(dag, synList, reverse=True):
				dag[b][i] = True
		i += 1
	
	for node in dag.nodes():
		test = True
		for key in xrange(i):
			if not key in dag[node]:
				test = False
		if test:
			nodesAndDistance[node] = len(nx.shortest_path(dag, target=node))
	
	maxDistance = max(nodesAndDistance.values())
	
	for e in nodesAndDistance.keys():
		if nodesAndDistance[e] < maxDistance:
			del nodesAndDistance[e]
	
	for key in xrange(i):
		removeKey(dag, key)
	
	return nodesAndDistance.keys()
Esempio n. 18
0
	def __second_order_bad_neighbour(self):  
		
		for node in self.OG.nodes():	
			#calculate the weighted sum of "bad neighbour in" score
			bfs_edge_list = list(nx.bfs_edges(self.OG, node))
			sumedgein = self.__get_sum_of_edge_in(bfs_edge_list)
			sumedgeout = self.__get_sum_of_edge_out(bfs_edge_list)
			total_in_pressure = 0.0
			total_out_pressure = 0.0

			for edge in bfs_edge_list:
				# weight * bni-score 
				# ignore if from node
				try:
					# put a test here??
					bni_score = self.OG.node[edge[1]]['bad_neighbour_in']
					in_weight = self.OG[edge[1]][node]['weight'] / sumedgein
					# get correct sumedge (bfs tree again)
					bni_score = self.__clean_bni(bni_score, node, edge[1])
					total_in_pressure += in_weight * bni_score
					

					bno_score = self.OG.node[edge[1]]['bad_neighbour_out']
					out_weight = self.OG[node][edge[1]]['weight'] / sumedgeout
					total_out_pressure += out_weight * bno_score
				except:
					total_out_pressure += 0
				

			self.OG.node[node]['bad_neighbour_in2'] = total_in_pressure
			self.OG.node[node]['bad_neighbour_out2'] = total_out_pressure
Esempio n. 19
0
 def information_diffusion(self, num_nodes, beta):
     # 		patients_zero = [random.randint(0,num_nodes) for r in xrange(beta)]
     # 		for i in patients_zero:
     # 			self.network.node[i]['color'] = 1
     # 		print patients_zero
     # 		for i in patients_zero:
     # 			for j in self.network.neighbors(i):
     root_node = random.randint(0, num_nodes - 1)
     self.network.node[root_node]["color"] = 1
     ordered_edges = list(nx.bfs_edges(self.network, root_node))
     print ordered_edges
     t_name = "plots/file_name"
     count = 0
     for i in ordered_edges:
         count = count + 1
         # 			print self.network.node[i[0]]['color']==1,  self.network.node[i[1]]['color']==0
         if self.network.node[i[0]]["color"] == 1 and self.network.node[i[1]]["color"] == 0:
             # 				probability =100* self.network.node[i[1]]['mew_final']*self.network.edge[i[0]][i[1]]['gossip']
             probability = random.random()
             print i, probability
             if probability > beta:
                 # 					print "hello from other side"
                 self.network.node[i[1]]["color"] = 1
         if count % 100 == 0:
             name = t_name + str(count) + ".gml"
             nx.write_gml(self.network, name)
Esempio n. 20
0
def get_expm_augmented_tree(T, root, Q_default=None):
    """
    Add transition probability matrices to edges.

    Construct the augmented tree by annotating each edge
    with the appropriate state transition probability matrix.

    Parameters
    ----------
    T : weighted undirected networkx graph
        This tree is possibly annotated with edge-specific
        rate matrices Q.
    root : integer
        Root node.
    Q_default : 2d ndarray, optional
        Default rate matrix.

    Returns
    -------
    T_aug : weighted undirected networkx graph
        Tree annotated with transition probability matrices P.

    """
    T_aug = nx.Graph()
    for na, nb in nx.bfs_edges(T, root):
        edge = T[na][nb]
        weight = edge['weight']
        Q = edge.get('Q', Q_default)
        _density.check_square_dense(Q)
        P = custom_expm(Q, weight)
        T_aug.add_edge(na, nb, weight=weight, P=P)
    return T_aug
Esempio n. 21
0
def reordered_nodes_and_edges(nodes, edges):
    """
    Find an order palatable to rtaln.

    The rtaln interface requires nodes to be ordered breadth first.
    The edges are ordered according to the index of the tail node of the edge.
    Ideally the host will call this function and use the recommended ordering,
    rather than having to also invert the ordering of the rtaln output.

    """
    T = nx.DiGraph()
    T.add_nodes_from(nodes)
    T.add_edges_from(edges)
    in_deg = T.in_degree()
    roots = [node for node in T if not in_deg[node]]
    if len(roots) != 1:
        raise Exception('expected one node with in-degree 1')
    root = roots[0]

    # Over-write the ordered list of edges, using a breadth first ordering.
    # Order the nodes according to the tail nodes of the ordered edges.
    edges = list(nx.bfs_edges(T, root))
    if len(edges) + 1 != len(nodes):
        raise Exception('expected the number of nodes to be one more '
                'than the number of edges')
    nodes = [root] + [tail for head, tail in edges]
    return nodes, edges
Esempio n. 22
0
def get_history_log_likelihood(T, root, node_to_state,
        root_distn=None, P_default=None):
    """
    Compute the log likelihood for a fully augmented history.

    Parameters
    ----------
    T : undirected acyclic networkx graph
        Tree optionally annotated with transition matrices.
    root : integer
        Root node.
    node_to_state : dict
        Each node in the tree is mapped to an integer state.
    root_distn : dict, optional
        Sparse prior distribution over states at the root.
    P_default : weighted directed networkx graph, optional
        A default universal probability transition matrix.

    Returns
    -------
    log_likelihood : float
        The log likelihood of the fully augmented history.

    """
    # Input validation.
    bad = set(T) - set(node_to_state)
    if bad:
        raise ValueError(
                'to compute the history log likelihood all nodes in the tree '
                'must have a known state, but this state has not been '
                'provided for the following nodes: ' + str(sorted(bad)))

    # Initialize the log likelihood.
    log_likelihood = 0

    # Add the log likelihood contribution from the root.
    root_state = node_to_state[root]
    if root_distn is not None:
        if root_state not in root_distn:
            raise StructuralZeroProb('zero prior for the root')
        log_likelihood += np.log(root_distn[root_state])

    # Add the log likelihood contribution from state transitions.
    for na, nb in nx.bfs_edges(T, root):
        edge = T[na][nb]
        P = edge.get('P', P_default)
        if P is None:
            raise ValueError('undefined transition matrix on this edge')
        sa = node_to_state[na]
        sb = node_to_state[nb]
        if not P.has_edge(sa, sb):
            raise StructuralZeroProb(
                    'the states of the endpoints of an edge '
                    'are incompatible with the transition matrix on the edge')
        p = P[sa][sb]['weight']
        log_likelihood += np.log(p)

    # Return the log likelihood.
    return log_likelihood
Esempio n. 23
0
def fix_winding(mesh):
    """
    Traverse and change mesh faces in-place to make sure winding
    is correct, with edges on adjacent faces in
    opposite directions.

    Parameters
    -------------
    mesh: Trimesh object

    Alters
    -------------
    mesh.face: will reverse columns of certain faces
    """
    # anything we would fix is already done
    if mesh.is_winding_consistent:
        return

    graph_all = nx.from_edgelist(mesh.face_adjacency)
    flipped = 0

    faces = mesh.faces.view(np.ndarray).copy()

    # we are going to traverse the graph using BFS
    # start a traversal for every connected component
    for components in nx.connected_components(graph_all):
        # get a subgraph for this component
        g = graph_all.subgraph(components)
        # get the first node in the graph in a way that works on nx's
        # new API and their old API
        start = next(iter(g.nodes()))

        # we traverse every pair of faces in the graph
        # we modify mesh.faces and mesh.face_normals in place
        for face_pair in nx.bfs_edges(g, start):
            # for each pair of faces, we convert them into edges,
            # find the edge that both faces share and then see if edges
            # are reversed in order as you would expect
            # (2, ) int
            face_pair = np.ravel(face_pair)
            # (2, 3) int
            pair = faces[face_pair]
            # (6, 2) int
            edges = faces_to_edges(pair)
            overlap = group_rows(np.sort(edges, axis=1),
                                 require_count=2)
            if len(overlap) == 0:
                # only happens on non-watertight meshes
                continue
            edge_pair = edges[overlap[0]]
            if edge_pair[0][0] == edge_pair[1][0]:
                # if the edges aren't reversed, invert the order of one face
                flipped += 1
                faces[face_pair[1]] = faces[face_pair[1]][::-1]

    if flipped > 0:
        mesh.faces = faces

    log.debug('flipped %d/%d edges', flipped, len(mesh.faces) * 3)
Esempio n. 24
0
def get_expm_augmented_tree(T, root, P_callback=None):
    T_aug = nx.Graph()
    for na, nb in nx.bfs_edges(T, root):
        edge = T[na][nb]
        weight = edge['weight']
        P = P_callback(weight)
        T_aug.add_edge(na, nb, weight=weight, P=P)
    return T_aug
Esempio n. 25
0
def bflist(g,source):
    bfli=[]
    bfli.append(source)
    for each in nx.bfs_edges(g,source):
        #print each
        bfli.append(each[1])
    #print bflist
    return bfli
Esempio n. 26
0
def fix_normals(mesh):
    '''
    Find and fix problems with mesh.face_normals and mesh.faces winding direction.
    
    For face normals ensure that vectors are consistently pointed outwards,
    and that mesh.faces is wound in the correct direction for all connected components.
    '''
    mesh.generate_face_normals()
    # we create the face adjacency graph: 
    # every node in g is an index of mesh.faces
    # every edge in g represents two faces which are connected
    graph = nx.from_edgelist(mesh.face_adjacency())
    
    # we are going to traverse the graph using BFS, so we have to start
    # a traversal for every connected component
    for connected in nx.connected_components(graph):
        # we traverse every pair of faces in the graph
        # we modify mesh.faces and mesh.face_normals in place 
        for face_pair in nx.bfs_edges(graph, connected[0]):
            # for each pair of faces, we convert them into edges,
            # find the edge that both faces share, and then see if the edges
            # are reversed in order as you would expect in a well constructed mesh
            pair      = mesh.faces[[face_pair]]
            edges     = faces_to_edges(pair, sort=False)
            overlap   = group_rows(np.sort(edges,axis=1), require_count=2)
            edge_pair = edges[[overlap[0]]]
            reversed  = edge_pair[0][0] != edge_pair[1][0]
            if reversed: continue
            # if the edges aren't reversed, invert the order of one of the faces
            # and negate its normal vector
            mesh.faces[face_pair[1]] = mesh.faces[face_pair[1]][::-1]
            mesh.face_normals[face_pair[1]] *= (reversed*2) - 1
            
        # the normals of every connected face now all pointed in 
        # the same direction, but there is no guarantee that they aren't all
        # pointed in the wrong direction
        faces           = mesh.faces[[connected]]
        faces_x         = np.min(mesh.vertices[:,0][[faces]], axis=1)
        left_order      = np.argsort(faces_x)
        left_values     = faces_x[left_order]
        left_candidates = np.abs(left_values - left_values[0]) < TOL_ZERO
        backwards       = None
        
        # note that we have to find a face which ISN'T perpendicular to the x axis 
        # thus we go through all the candidate faces that are at the extreme left
        # until we find one that has a nonzero dot product with the x axis
        for leftmost in left_order[left_candidates]:                
            face_dot = np.dot([-1.0,0,0], mesh.face_normals[leftmost]) 
            if abs(face_dot) > TOL_ZERO: 
                backwards = face_dot < 0.0
                break
        if backwards: mesh.face_normals[[connected]] *= -1.0
        
        winding_tri  = connected[0]
        winding_test = np.diff(mesh.vertices[[mesh.faces[winding_tri]]], axis=0)
        winding_dir  = np.dot(unitize(np.cross(*winding_test)), mesh.face_normals[winding_tri])
        if winding_dir < 0: mesh.faces[[connected]] = np.fliplr(mesh.faces[[connected]])
 def get_all_children(self, node):
     det = ""
     children = ""
     for edges in list(nx.bfs_edges(self.G, node)):
         if edges[1].attribute in ('det'):
             det = edges[1].word
         else:
             children = children + edges[1].word+" "
     return det, children
Esempio n. 28
0
def get_state_segmentation(G_in):
    """
    Segment the tree according to state.
    This does not use the branch lengths or the layout.
    @param G_in: undirected graph with state annotation on edges
    @return: segment_isostate_list, isostate_to_parity
    """

    # get leaf vertices
    # pick an arbitrary leaf as a distinguished (root) vertex
    vertices = list(G_in)
    leaves = sorted(v for v in vertices if len(G_in.neighbors(v)) == 1)
    root = leaves[0]

    # Build a directed breadth first tree starting at the distinguished vertex.
    # Note that the tree built by nx.bfs_tree and the edges yielded
    # by nx.bfs_edges do not retain the edge attributes.
    G_dag = nx.bfs_tree(G_in, root)

    # initialize the tree of isostate adjacencies
    G_isostate = nx.Graph()

    # Each contig is defined by a set of edges.
    root_state = G_in[root][G_dag.successors(root)[0]]['state']
    root_edge_list = []
    root_contig_index = 0
    contig_states = [root_state]
    contig_edge_lists = [root_edge_list]
    vertex_to_contig_index = {root : root_contig_index}
    for node in nx.topological_sort(G_dag):
        ci = vertex_to_contig_index[node]
        ci_state = contig_states[ci]
        successors = G_dag.successors(node)
        for v in successors:
            state = G_in[node][v]['state']
            if state == ci_state:
                contig_edge_lists[ci].append((node, v))
                vertex_to_contig_index[v] = ci
            else:
                ci_next = len(contig_states)
                G_isostate.add_edge(ci, ci_next)
                vertex_to_contig_index[v] = ci_next
                contig_states.append(state)
                contig_edge_lists.append([(node, v)])

    # Convert the G_isostate graph into a map from
    # isostate labels to parities.
    isostate_to_parity = {0 : 0}
    for va, vb in nx.bfs_edges(G_isostate, 0):
        isostate_to_parity[vb] = 1 - isostate_to_parity[va]
    
    # Get the isostate label associated with each edge.
    va_vb_isostate_list = []
    for isostate_label, edge_list in enumerate(contig_edge_lists):
        for va, vb in edge_list:
            va_vb_isostate_list.append((va, vb, isostate_label))
    return va_vb_isostate_list, isostate_to_parity
Esempio n. 29
0
 def daisyChainBFS(self):    
     currentGraph = self.prgraph.graph.copy()
     while True: 
         oldGraph = currentGraph
         for edge in nx.bfs_edges(currentGraph, self.source):
             yield edge[1]   
         currentGraph = self.prgraph.graph.copy()
         if currentGraph.nodes() == oldGraph.nodes():
             raise StopIteration
Esempio n. 30
0
def total_infection(g, source=None):
    '''
    Runs total infection using BFS

    params
    ------
    g: input graph
    source: node to start infection from

    returns
    ------
    iterations for animation
    '''

    # create dictionary of nodes to store state for animation
    infected = OrderedDict()
    for node in g.nodes():
        infected[node] = False

    g = g.to_undirected()

    # create generator for iterating on edges of
    # bfs search (created by networkx)
    if source:
        edges_generator = nx.bfs_edges(g, source)
    else:
        source = pick_source(g)
        edges_generator = nx.bfs_edges(g, source)

    # store initial state
    iterations = [list(infected.items())]

    # infect source
    g.node[source]['data'].version = 'new'
    infected[source] = True
    iterations.append(list(infected.items()))

    for n1, n2 in edges_generator:
        print(n1, n2)
        g.node[n2]['data'].version = 'new'
        infected[n2] = True
        iterations.append(list(infected.items()))

    return iterations
Esempio n. 31
0
def all_pairs_lowest_common_ancestor(G, pairs=None):
    """Compute the lowest common ancestor for pairs of nodes.

    Parameters
    ----------
    G : NetworkX directed graph

    pairs : iterable of pairs of nodes, optional (default: all pairs)
        The pairs of nodes of interest.
        If None, will find the LCA of all pairs of nodes.

    Returns
    -------
    An iterator over ((node1, node2), lca) where (node1, node2) are
    the pairs specified and lca is a lowest common ancestor of the pair.
    Note that for the default of all pairs in G, we consider
    unordered pairs, e.g. you will not get both (b, a) and (a, b).

    Notes
    -----
    Only defined on non-null directed acyclic graphs.

    Uses the $O(n^3)$ ancestor-list algorithm from:
    M. A. Bender, M. Farach-Colton, G. Pemmasani, S. Skiena, P. Sumazin.
    "Lowest common ancestors in trees and directed acyclic graphs."
    Journal of Algorithms, 57(2): 75-94, 2005.

    See Also
    --------
    tree_all_pairs_lowest_common_ancestor
    lowest_common_ancestor
    """
    if not nx.is_directed_acyclic_graph(G):
        raise nx.NetworkXError("LCA only defined on directed acyclic graphs.")
    elif len(G) == 0:
        raise nx.NetworkXPointlessConcept("LCA meaningless on null graphs.")
    elif None in G:
        raise nx.NetworkXError("None is not a valid node.")

    # The copy isn't ideal, neither is the switch-on-type, but without it users
    # passing an iterable will encounter confusing errors, and itertools.tee
    # does not appear to handle builtin types efficiently (IE, it materializes
    # another buffer rather than just creating listoperators at the same
    # offset). The Python documentation notes use of tee is unadvised when one
    # is consumed before the other.
    #
    # This will always produce correct results and avoid unnecessary
    # copies in many common cases.
    #
    if not isinstance(pairs, (Mapping, Set)) and pairs is not None:
        pairs = set(pairs)

    # Convert G into a dag with a single root by adding a node with edges to
    # all sources iff necessary.
    sources = [n for n, deg in G.in_degree if deg == 0]
    if len(sources) == 1:
        root = sources[0]
        super_root = None
    else:
        G = G.copy()
        super_root = root = generate_unique_node()
        for source in sources:
            G.add_edge(root, source)

    # Start by computing a spanning tree, and the DAG of all edges not in it.
    # We will then use the tree lca algorithm on the spanning tree, and use
    # the DAG to figure out the set of tree queries necessary.
    spanning_tree = nx.dfs_tree(G, root)
    dag = nx.DiGraph((u, v) for u, v in G.edges
                     if u not in spanning_tree or v not in spanning_tree[u])

    # Ensure that both the dag and the spanning tree contains all nodes in G,
    # even nodes that are disconnected in the dag.
    spanning_tree.add_nodes_from(G)
    dag.add_nodes_from(G)

    counter = count()

    # Necessary to handle graphs consisting of a single node and no edges.
    root_distance = {root: next(counter)}

    for edge in nx.bfs_edges(spanning_tree, root):
        for node in edge:
            if node not in root_distance:
                root_distance[node] = next(counter)

    # Index the position of all nodes in the Euler tour so we can efficiently
    # sort lists and merge in tour order.
    euler_tour_pos = {}
    for node in nx.depth_first_search.dfs_preorder_nodes(G, root):
        if node not in euler_tour_pos:
            euler_tour_pos[node] = next(counter)

    # Generate the set of all nodes of interest in the pairs.
    pairset = set()
    if pairs is not None:
        pairset = set(chain.from_iterable(pairs))

    for n in pairset:
        if n not in G:
            msg = f"The node {str(n)} is not in the digraph."
            raise nx.NodeNotFound(msg)

    # Generate the transitive closure over the dag (not G) of all nodes, and
    # sort each node's closure set by order of first appearance in the Euler
    # tour.
    ancestors = {}
    for v in dag:
        if pairs is None or v in pairset:
            my_ancestors = nx.dag.ancestors(dag, v)
            my_ancestors.add(v)
            ancestors[v] = sorted(my_ancestors, key=euler_tour_pos.get)

    def _compute_dag_lca_from_tree_values(tree_lca, dry_run):
        """Iterate through the in-order merge for each pair of interest.

        We do this to answer the user's query, but it is also used to
        avoid generating unnecessary tree entries when the user only
        needs some pairs.
        """
        for (node1, node2) in pairs if pairs is not None else tree_lca:
            best_root_distance = None
            best = None

            indices = [0, 0]
            ancestors_by_index = [ancestors[node1], ancestors[node2]]

            def get_next_in_merged_lists(indices):
                """Returns index of the list containing the next item

                Next order refers to the merged order.
                Index can be 0 or 1 (or None if exhausted).
                """
                index1, index2 = indices
                if index1 >= len(ancestors[node1]) and index2 >= len(
                        ancestors[node2]):
                    return None
                elif index1 >= len(ancestors[node1]):
                    return 1
                elif index2 >= len(ancestors[node2]):
                    return 0
                elif (euler_tour_pos[ancestors[node1][index1]] <
                      euler_tour_pos[ancestors[node2][index2]]):
                    return 0
                else:
                    return 1

            # Find the LCA by iterating through the in-order merge of the two
            # nodes of interests' ancestor sets. In principle, we need to
            # consider all pairs in the Cartesian product of the ancestor sets,
            # but by the restricted min range query reduction we are guaranteed
            # that one of the pairs of interest is adjacent in the merged list
            # iff one came from each list.
            i = get_next_in_merged_lists(indices)
            cur = ancestors_by_index[i][indices[i]], i
            while i is not None:
                prev = cur
                indices[i] += 1
                i = get_next_in_merged_lists(indices)
                if i is not None:
                    cur = ancestors_by_index[i][indices[i]], i

                    # Two adjacent entries must not be from the same list
                    # in order for their tree LCA to be considered.
                    if cur[1] != prev[1]:
                        tree_node1, tree_node2 = prev[0], cur[0]
                        if (tree_node1, tree_node2) in tree_lca:
                            ans = tree_lca[tree_node1, tree_node2]
                        else:
                            ans = tree_lca[tree_node2, tree_node1]
                        if not dry_run and (best is None or root_distance[ans]
                                            > best_root_distance):
                            best_root_distance = root_distance[ans]
                            best = ans

            # If the LCA is super_root, there is no LCA in the user's graph.
            if not dry_run and (super_root is None or best != super_root):
                yield (node1, node2), best

    # Generate the spanning tree lca for all pairs. This doesn't make sense to
    # do incrementally since we are using a linear time offline algorithm for
    # tree lca.
    if pairs is None:
        # We want all pairs so we'll need the entire tree.
        tree_lca = dict(
            tree_all_pairs_lowest_common_ancestor(spanning_tree, root))
    else:
        # We only need the merged adjacent pairs by seeing which queries the
        # algorithm needs then generating them in a single pass.
        tree_lca = defaultdict(int)
        for _ in _compute_dag_lca_from_tree_values(tree_lca, True):
            pass

        # Replace the bogus default tree values with the real ones.
        for (pair, lca) in tree_all_pairs_lowest_common_ancestor(
                spanning_tree, root, tree_lca):
            tree_lca[pair] = lca

    # All precomputations complete. Now we just need to give the user the pairs
    # they asked for, or all pairs if they want them all.
    return _compute_dag_lca_from_tree_values(tree_lca, False)
Esempio n. 32
0
def cluster_nodes_cdhit(
        G,
        nodes,
        outdir,
        id=0.95,
        dna=False,
        s=0.0,  # length difference cutoff (%), default 0.0
        aL=0.0,  # alignment coverage for the longer sequence
        AL=99999999,  # alignment coverage control for the longer sequence
        aS=0.0,  # alignment coverage for the shorter sequence
        AS=99999999,  # alignment coverage control for the shorter sequence
        accurate=True,  # use the slower but more accurate options
        use_local=False,  #whether to use local or global sequence alignment
        strand=1,  # default do both +/+ & +/- alignments if set to 0, only +/+
        quiet=False,
        prevent_para=True,
        n_cpu=1):

    # create the files we will need
    temp_input_file = tempfile.NamedTemporaryFile(delete=False, dir=outdir)
    temp_input_file.close()
    temp_output_file = tempfile.NamedTemporaryFile(delete=False, dir=outdir)
    temp_output_file.close()

    with open(temp_input_file.name, 'w') as outfile:
        for node in nodes:
            outfile.write(">" + str(node) + "\n")
            if dna:
                outfile.write(G.nodes[node]["dna"][G.nodes[node]['maxLenId']])
            else:
                outfile.write(
                    G.nodes[node]["protein"][G.nodes[node]['maxLenId']])

    # run cd-hit
    if dna:
        run_cdhit_est(input_file=temp_input_file.name,
                      output_file=temp_output_file.name,
                      id=id,
                      s=s,
                      aL=aL,
                      AL=AL,
                      aS=AS,
                      accurate=accurate,
                      use_local=use_local,
                      strand=strand,
                      quiet=quiet,
                      n_cpu=n_cpu)
    else:
        run_cdhit(input_file=temp_input_file.name,
                  output_file=temp_output_file.name,
                  id=id,
                  s=s,
                  aL=aL,
                  AL=AL,
                  aS=AS,
                  accurate=accurate,
                  use_local=use_local,
                  quiet=quiet,
                  n_cpu=n_cpu)

    # process the output
    clusters = []
    with open(temp_output_file.name + ".clstr", 'r') as infile:
        c = []
        for line in infile:
            if line[0] == ">":
                clusters.append(c)
                c = []
            else:
                c.append(int(line.split(">")[1].split("...")[0]))
        clusters.append(c)
    clusters = clusters[1:]

    # optionally split clusters to ensure we don't collapse paralogs
    if prevent_para:
        nodes = list(nodes)
        # set up node to cluster dict
        cluster_dict = {}
        for i, c in enumerate(clusters):
            for n in c:
                cluster_dict[n] = i

        # set up subgraph and new_cluster dict
        sub_G = G.subgraph(nodes)
        if not nx.is_connected(sub_G):
            raise ValueError("Sub graph is not connected!")

        new_clusters = defaultdict(list)

        # ref node with max size and degree > 2
        ref_node = nodes[0]
        for n in nodes[1:]:
            if sub_G.degree[n] > 2:
                if sub_G.nodes[n]['size'] >= sub_G.nodes[ref_node]['size']:
                    ref_node = n

        # nodes in Breadth First Search order
        nodes_BFS = [ref_node] + [v for u, v in nx.bfs_edges(sub_G, ref_node)]

        # iterate through making new clusters that satisfy conditions
        for node in nodes_BFS:
            c1 = cluster_dict[node]
            if len(new_clusters[c1]) < 1:
                new_clusters[c1].append([node])
            else:
                # try and add to first valid cluster
                found = False
                for i, c2 in enumerate(new_clusters[c1]):
                    if is_valid(G, node, c2):
                        new_clusters[c1][i].append(node)
                        found = True
                        break
                if not found:
                    # create a new cluster
                    new_clusters[c1].append([node])

        # collapse dictionary into original list format
        clusters = []
        for c1 in new_clusters:
            for c2 in new_clusters[c1]:
                clusters.append(c2)

    # check all nodes are accounted for
    clust_node_set = set([item for sublist in clusters for item in sublist])
    for node in nodes:
        if node not in clust_node_set:
            print("nodes:", nodes)
            print("clust_node_set:", clust_node_set)
            raise ValueError('Clusters are missing a node!')

    # remove temporary files
    os.remove(temp_input_file.name)
    os.remove(temp_output_file.name)
    os.remove(temp_output_file.name + ".clstr")

    return clusters
def EliminateOneEntryInColumn(party_map, steiner_tree_total, sub_steiner_trees,
                              sub_trees_root_node, terminal_nodes, q):
    '''
    See chapter 4.1, arXiv:1904.01972
    '''
    flag_debug = 0

    operation_CNOT = []

    for i in range(len(sub_steiner_trees)):
        '''if root node of a sub Steiner tree is not the biggest'''
        current_tree = sub_steiner_trees[i]
        current_root = sub_trees_root_node[i]
        leaf_nodes = FindAllLeafNodesInTree(current_tree)
        if current_root < max(leaf_nodes):
            #print('testing')
            add_operations = EliminateOneEntryInColumn(party_map,
                                                       steiner_tree_total,
                                                       [steiner_tree_total],
                                                       [max(terminal_nodes)],
                                                       terminal_nodes, q)
            operation_CNOT.extend(add_operations)
            return operation_CNOT
        '''else'''

    for i in range(len(sub_steiner_trees)):
        tree = sub_steiner_trees[i]
        root_node = sub_trees_root_node[i]
        R_operation = []
        shortest_length_tree = dict(
            nx.shortest_path_length(tree,
                                    source=None,
                                    target=None,
                                    weight=None,
                                    method='dijkstra'))
        BFS_edges = list(nx.bfs_edges(tree, source=root_node))
        BFS_edges.reverse()
        if flag_debug == 1: print('BFS edges are', BFS_edges)
        for edge in BFS_edges:
            if shortest_length_tree[edge[0]][root_node] < shortest_length_tree[
                    edge[1]][root_node]:
                control_q = q[edge[0]]
                target_q = q[edge[1]]
            else:
                control_q = q[edge[1]]
                target_q = q[edge[0]]
            R_operation.append(OperationCNOT(control_q, target_q))
        '''calculate R_prime'''
        R_prime = copy.deepcopy(R_operation)
        '''delete redundancy of R_prime'''
        for operation in copy.copy(R_prime):
            if operation.control_qubit[1] == root_node:
                R_prime.remove(operation)

        #R_prime.pop()
        R_prime.reverse()
        '''calculate R_star'''
        add_R_star = []
        leaf_nodes = FindAllLeafNodesInTree(tree)
        R_star = copy.deepcopy(R_operation) + copy.deepcopy(R_prime)
        for operation in copy.copy(R_star):
            if (operation.target_qubit[1]
                    in terminal_nodes) and (operation.target_qubit[1]
                                            in leaf_nodes):
                R_star.remove(operation)
            '''这个部分不正确,但是可以用来预估增加的CNOT数目'''
            if (operation.target_qubit[1] in terminal_nodes) and (
                    not operation.target_qubit[1] in leaf_nodes):
                add_R_star.append(
                    OperationCNOT(operation.target_qubit,
                                  operation.control_qubit))
        add_R_star_copy = copy.deepcopy(add_R_star)
        add_R_star_copy.reverse()
        if add_R_star != []: R_star = add_R_star + R_star + add_R_star_copy
        '''total operation'''
        R_total = R_operation + R_prime + R_star
        operation_CNOT.extend(R_total)
        if flag_debug == 1: print('R_operation')
        PerformOperationCNOTinPartyMap(party_map, R_operation)
        if flag_debug == 1: print('R_prime')
        PerformOperationCNOTinPartyMap(party_map, R_prime)
        if flag_debug == 1: print('R_star')
        PerformOperationCNOTinPartyMap(party_map, R_star)
        #PerformOperationCNOTinPartyMap(party_map, R_total)

    return operation_CNOT
Esempio n. 34
0
 def test_bfs_edges(self):
     edges = nx.bfs_edges(self.G, source=0)
     assert list(edges) == [(0, 1), (1, 2), (1, 3), (2, 4)]
Esempio n. 35
0
 def test_bfs_edges_reverse(self):
     D = nx.DiGraph()
     D.add_edges_from([(0, 1), (1, 2), (1, 3), (2, 4), (3, 4)])
     edges = nx.bfs_edges(D, source=4, reverse=True)
     assert_equal(list(edges), [(4, 2), (4, 3), (2, 1), (1, 0)])
Esempio n. 36
0
q = queue.Queue()
print(q.empty())
q.put(1)
q.put(2)
print(q.get())
print(q.get())
print(list(g.neighbors(1)))


def bfs(g):
    first = list(g.nodes())[0]
    q = queue.Queue()
    q.put(first)
    g.nodes[first]["state"] = OPEN
    while not q.empty():
        n = q.get()
        print(n)
        for neigh in g.neighbors(n):
            if g.nodes[neigh]["state"] == NEW:
                q.put(neigh)
                g.nodes[neigh]["state"] = OPEN
        g.nodes[n]["state"] = CLOSED


bfs(g)
print(list(map(lambda x: x[1], nx.bfs_edges(g, 1))))
print("Graf ma {} vrcholu a {} hran".format(g.number_of_nodes(),
                                            g.number_of_edges()))

print(list(nx.all_shortest_paths(g, 1, 5, 'weight')))
print(nx.shortest_path(g, 1, None, 'weight'))
Esempio n. 37
0
    if (a == "sudah"):
        break
    b = input("titik akhir : ")
    print("Rute terpendek harusnya : ", nx.dijkstra_path(G, a, b))
    # print(nx.all_pairs_dijkstra_path(G,a,b))
    print(nx.all_shortest_paths(G, a, b))
    print(list(nx.dfs_edges(G, a)))
    print(list(nx.dfs_tree(G, a)))
    lol = list(nx.all_simple_paths(G, a, b))
    for i in range(len(lol)):
        print(lol[i], end="\n")
    plt.show()

while True:
    # eL = nx.get_edge_attributes(G,'weight')
    print("Rute terpendek")
    a = input("titik mulai : ")
    plt.close('all')
    if (a == "sudah"):
        break
    b = input("titik akhir : ")
    print("Rute terpendek harusnya : ", nx.dijkstra_path(G, a, b))
    # print(nx.all_pairs_dijkstra_path(G,a,b))
    print(nx.all_shortest_paths(G, a, b))
    print(list(nx.bfs_edges(G, a)))
    print(list(nx.bfs_tree(G, a)))
    lol = list(nx.all_simple_paths(G, a, b))
    for i in range(len(lol)):
        print(lol[i], end="\n")
    plt.show()
    def __init__(self,
                 func_classes: List[Type[IFunc]],
                 wired: List[any] = None):
        """
        :param func_classes:
        :param wired: input, output
        """
        # map from function id to a tuple (idx of function, order of function (start from 1)).
        self.id2order = {}
        # map from idx of function to its order
        idx2order = {}
        # map from tuple (id, order) of function to its dataset preference
        self.preferences = {}

        for i, func_cls in enumerate(func_classes):
            if func_cls.id not in self.id2order:
                self.id2order[func_cls.id] = []
            self.id2order[func_cls.id].append(
                (i, len(self.id2order[func_cls.id]) + 1))
            idx2order[i] = len(self.id2order[func_cls.id])
            self.preferences[(func_cls.id, idx2order[i])] = {}

        wired = wired or []
        # mapping of wired from input to output
        self.wired = {}
        # inverse mapping of wired from output to all inputs
        self.inv_wired = {}
        # applying topological sort on func_classes to determine execution order based on wiring
        graph = DiGraph()
        graph.add_nodes_from(range(len(func_classes)))
        # mapping preferences of argtype "dataset" to determine backend for "dataset" outputs
        preference_roots, preference_graph = [], DiGraph()
        for i, o in wired:
            if i[1] is None:
                i[1] = self.get_func_order(i[0])
            if o[1] is None:
                o[1] = self.get_func_order(o[0])

            input_arg = func_classes[self.id2order[i[0]][i[1] -
                                                         1][0]].inputs[i[2]]
            output_arg = func_classes[self.id2order[o[0]][o[1] -
                                                          1][0]].outputs[o[2]]
            if input_arg != output_arg:
                raise ValidationError(
                    f"Incompatible ArgType while wiring {WiredIOArg.get_arg_name(i[0], i[1], i[2])} to {WiredIOArg.get_arg_name(o[0], o[1], o[2])}"
                )
            input_gname = (i[0], i[1], i[2])
            output_gname = (o[0], o[1], o[2])
            self.wired[input_gname] = output_gname
            if output_gname not in self.inv_wired:
                self.inv_wired[output_gname] = []
            self.inv_wired[output_gname].append(input_gname)
            graph.add_edge(self.id2order[o[0]][o[1] - 1][0],
                           self.id2order[i[0]][i[1] - 1][0])

            if output_arg.id == 'dataset':
                self.preferences[(o[0], o[1])][o[2]] = None
                node = (o[0], o[1], 'o', o[2])
                # if input_ref of "dataset" output is None, we take it as a new "dataset"
                if output_arg.input_ref is None:
                    preference_roots.append(node)
                elif output_arg.input_ref not in func_classes[self.id2order[
                        o[0]][o[1] - 1][0]].inputs:
                    raise ValidationError(
                        f"Invalid value for input_ref {output_arg.input_ref} of {output_gname} output dataset"
                    )
                elif func_classes[self.id2order[o[0]][o[1] - 1][0]].inputs[
                        output_arg.input_ref] != output_arg:
                    raise ValidationError(
                        f"Invalid ArgType for input_ref {output_arg.input_ref} of {output_gname} output dataset"
                    )
                else:
                    # adding dummy "internal" edges within the same adapter to link "dataset" output to its input_ref
                    preference_graph.add_edge(
                        (o[0], o[1], 'i', output_arg.input_ref),
                        node,
                        preference='n/a')
                preference_graph.add_edge(node, (i[0], i[1], 'i', i[2]),
                                          preference=input_arg.preference)

        self.func_classes = []
        self.idx2order = {}
        try:
            # reordering func_classes in topologically sorted order for execution
            for i in lexicographical_topological_sort(graph):
                self.func_classes.append(func_classes[i])
                # changing idx of functions to map to their new order
                self.idx2order[len(self.func_classes) - 1] = idx2order[i]

        except NetworkXUnfeasible:
            raise ValidationError("Pipeline is not a DAG")

        self.schema = {}
        for i, func_cls in enumerate(self.func_classes):
            for argname in func_cls.inputs:
                input_gname = (func_cls.id, self.idx2order[i], argname)
                if input_gname in self.wired:
                    continue
                argtype = func_cls.inputs[argname]
                self.schema[WiredIOArg.get_arg_name(
                    *input_gname)] = fields.Raw(
                        required=not argtype.optional,
                        validate=argtype.is_valid,
                        error_messages={
                            'validator_failed':
                            f"Invalid Argument type. Expected {argtype.id}"
                        })
        self.schema = Schema.from_dict(self.schema)

        # setting preferences for new "dataset" outputs
        for root in preference_roots:
            counter = Counter()
            # traversing subgraph from every new "dataset" as root and counting preferences
            for edge in bfs_edges(preference_graph, root):
                counter[preference_graph[edge[0]][edge[1]]['preference']] += 1
            preference = None
            if counter['graph'] > counter['array']:
                preference = 'graph'
            elif counter['array'] > counter['graph']:
                preference = 'array'
            self.preferences[(root[0], root[1])][root[3]] = preference
def em_algorithm(seed_val, samples, num_clusters, max_num_iter=30):
    best_likelihood = -100000000.0
    best_seed = 0
    for seedno in range(100):
        np.random.seed(seedno)
        #print("Running EM algorithm...")
        tm = TreeMixture(num_clusters=num_clusters, num_nodes=samples.shape[1])
        tm.simulate_pi(seedno)
        tm.simulate_trees(seedno)
        tm.sample_mixtures(num_samples=samples.shape[0], seed_val=seedno)

        topology_list = []
        theta_list = []
        for k in range(num_clusters):
            topology_list.append(tm.clusters[k].get_topology_array())
            theta_list.append(tm.clusters[k].get_theta_array())

        topology_list = np.array(topology_list)
        theta_list = np.array(theta_list)
        loglikelihood = np.zeros(max_num_iter)
        pi = tm.pi

        for it in range(max_num_iter):
            #print("start iteration",it)
            #1: compute responsibilities
            resp = responsibilities(num_clusters, samples, theta_list,
                                    topology_list, pi)
            #print(resp)
            #2: set pi' = sum(r[n,k]/N)
            pi = np.zeros(num_clusters)
            pi_newdenom = np.sum(resp)
            for k in range(num_clusters):
                pi[k] = np.sum(resp[:, k]) / pi_newdenom
            #print(pi)

        #3: calculate mutual information between x[s] and x[t]
            N_ind1, q_denom1 = q_parts1(num_clusters, samples, resp)
            N_ind0, q_denom0 = q_parts0(num_clusters, samples, resp)

            #4: set Tau'[k] as maximum spanning tree in G[k]
            ##PACKAGE NETWORKX USED TO CONVERT MAXIMUM SPANNING TREE TO TOPOLOGY
            trees = [
                Graph(samples.shape[1]),
                Graph(samples.shape[1]),
                Graph(samples.shape[1]),
                Graph(samples.shape[1])
            ]
            weights = np.zeros(
                (num_clusters, samples.shape[1], samples.shape[1]))
            MST = [
                Graph(samples.shape[1]),
                Graph(samples.shape[1]),
                Graph(samples.shape[1]),
                Graph(samples.shape[1])
            ]
            for k in range(num_clusters):
                for s in range(samples.shape[1]):
                    for t in range(samples.shape[1]):
                        weights[k, s,
                                t] = I_Info(k, s, t, N_ind1, N_ind0, q_denom0,
                                            q_denom1, num_clusters, samples)
                        #print(weights)
                        trees[k].addEdge(s, t, weights[k, s, t])
                MST[k] = trees[k].maximum_spanning_tree()
            tree_graphs = [nx.Graph(), nx.Graph(), nx.Graph(), nx.Graph()]
            treearray = [nx.Graph(), nx.Graph(), nx.Graph(), nx.Graph()]
            for k in range(num_clusters):
                for u_of_edge, v_of_edge, weight in MST[k]:
                    tree_graphs[k].add_edge(u_of_edge=u_of_edge,
                                            v_of_edge=v_of_edge)
                treearray[k] = list(nx.bfs_edges(G=tree_graphs[k], source=0))

            tau_new = topology_list
            for k in range(num_clusters):
                for s in range(0, len(treearray[k])):
                    parent = treearray[k][s][0]
                    child = treearray[k][s][1]
                    tau_new[k][child] = parent

        #5: set Theta'[k](X[r])
            theta_new = theta_list

            for k in range(num_clusters):
                theta_new[k][0][:] = [
                    q0(k, 0, 0, N_ind0, q_denom0),
                    q0(k, 0, 1, N_ind0, q_denom0)
                ]
                for s in range(1, samples.shape[1]):
                    for a in range(0, 2):
                        for b in range(0, 2):
                            theta_new[k][s][a][b] = q_parts1cond(
                                s, int(tau_new[k][s]), a, b, samples, resp[:,
                                                                           k])

        #6: calculate log-likelihood
            theta_list = theta_new
            topology_list = tau_new
            loglikelihood[it] = log_likelihood(num_clusters, samples,
                                               theta_list, topology_list, pi)
            #print("best_likelihood = ",best_likelihood, "loglikelihood = ",loglikelihood[9])
        if best_likelihood < loglikelihood[25]:
            print(best_likelihood, ">", loglikelihood[25])
            best_likelihood = loglikelihood[25]
            best_seed = seedno

    print("seed val = ", best_seed)
    "repeat algorithm after finding best seed"

    np.random.seed(best_seed)
    #print("Running EM algorithm...")
    # TODO: Implement EM algorithm here.
    tm = TreeMixture(num_clusters=num_clusters, num_nodes=samples.shape[1])
    tm.simulate_pi(best_seed)
    tm.simulate_trees(best_seed)
    tm.sample_mixtures(num_samples=samples.shape[0], seed_val=best_seed)

    topology_list = []
    theta_list = []
    for k in range(num_clusters):
        topology_list.append(tm.clusters[k].get_topology_array())
        theta_list.append(tm.clusters[k].get_theta_array())

    #loglikelihood = np.array(loglikelihood)
    topology_list = np.array(topology_list)
    theta_list = np.array(theta_list)
    #start iterations
    loglikelihood = np.zeros(max_num_iter)
    pi = tm.pi

    for it in range(max_num_iter):
        #print("start iteration",it)
        #1: compute responsibilities
        resp = responsibilities(num_clusters, samples, theta_list,
                                topology_list, pi)
        #print(resp)
        #2: set pi' = sum(r[n,k]/N)
        pi = np.zeros(num_clusters)
        pi_newdenom = np.sum(resp)
        for k in range(num_clusters):
            pi[k] = np.sum(resp[:, k]) / pi_newdenom
        #print(pi)

    #3: calculate mutual information between x[s] and x[t]
        N_ind1, q_denom1 = q_parts1(num_clusters, samples, resp)
        N_ind0, q_denom0 = q_parts0(num_clusters, samples, resp)

        #4: set Tau'[k] as maximum spanning tree in G[k]
        trees = [
            Graph(samples.shape[1]),
            Graph(samples.shape[1]),
            Graph(samples.shape[1]),
            Graph(samples.shape[1])
        ]
        weights = np.zeros((num_clusters, samples.shape[1], samples.shape[1]))
        MST = [
            Graph(samples.shape[1]),
            Graph(samples.shape[1]),
            Graph(samples.shape[1]),
            Graph(samples.shape[1])
        ]
        for k in range(num_clusters):
            for s in range(samples.shape[1]):
                for t in range(samples.shape[1]):
                    weights[k, s,
                            t] = I_Info(k, s, t, N_ind1, N_ind0, q_denom0,
                                        q_denom1, num_clusters, samples)
                    trees[k].addEdge(s, t, weights[k, s, t])
            MST[k] = trees[k].maximum_spanning_tree()

        ##PACKAGE NETWORKX USED TO CONVERT MAXIMUM SPANNING TREE TO TOPOLOGY
        tree_graphs = [nx.Graph(), nx.Graph(), nx.Graph(), nx.Graph()]
        treearray = [nx.Graph(), nx.Graph(), nx.Graph(), nx.Graph()]
        for k in range(num_clusters):
            for u_of_edge, v_of_edge, weight in MST[k]:
                tree_graphs[k].add_edge(u_of_edge=u_of_edge,
                                        v_of_edge=v_of_edge)
            treearray[k] = list(nx.bfs_edges(G=tree_graphs[k], source=0))

        tau_new = topology_list
        for k in range(num_clusters):
            for s in range(0, len(treearray[k])):
                parent = treearray[k][s][0]
                child = treearray[k][s][1]
                tau_new[k][child] = parent

    #5: set Theta'[k](X[r])
        theta_new = theta_list

        for k in range(num_clusters):
            theta_new[k][0][:] = [
                q0(k, 0, 0, N_ind0, q_denom0),
                q0(k, 0, 1, N_ind0, q_denom0)
            ]
            for s in range(1, samples.shape[1]):
                for a in range(0, 2):
                    for b in range(0, 2):
                        theta_new[k][s][a][b] = q_parts1cond(
                            s, int(tau_new[k][s]), a, b, samples, resp[:, k])

    #6: calculate log-likelihood
        theta_list = theta_new
        topology_list = tau_new
        loglikelihood[it] = log_likelihood(num_clusters, samples, theta_list,
                                           topology_list, pi)

    print("topology_list = ", topology_list)
    print(loglikelihood)
    return loglikelihood, np.array(topology_list), theta_list
Esempio n. 40
0
def tree_bfs(graph, root_value, node_property, node_func):
    graph_root = root(graph)
    graph.nodes[graph_root][node_property] = root_value
    for edge in nx.bfs_edges(graph, graph_root):
        _, child_name = edge
        graph.nodes[child_name][node_property] = node_func(graph, edge)
Esempio n. 41
0
 def get_bfs_edges(self, start_node):
     return [(self.nodes[edge[0]]['key'], self.nodes[edge[1]]['key'],
              self.edges[edge]['key'])
             for edge in nx.bfs_edges(self, start_node)]
Esempio n. 42
0
#            'rescale_layout',
#            'shell_layout',
#            'spring_layout',
#            'spectral_layout',
#            'planar_layout',
#            'fruchterman_reingold_layout',
#            'spiral_layout']
pos = nx.circular_layout(G)

nx.draw_networkx(
    G, pos, node_size=10, font_size=4, width=0.3
)  # ,node_color=values,  edge_color=edge_colors, edge_cmap=plt.cm.Reds
plt.savefig('table-relations.png', format="PNG", dpi=1000)
plt.show()  # block = False

specific_deps = list(nx.bfs_edges(G, regenerated_table))
print(specific_deps)

regenerated_tables_tmp = [n for a in specific_deps for n in a]  # 二维变1维度
regenerated_table_order = list(set(regenerated_tables_tmp))
regenerated_table_order.sort(key=regenerated_tables_tmp.index)

print(regenerated_table_order)  # [0, 3, 2, 1, 9, 8, 7]
drop_table_order = regenerated_table_order[::-1]

drop_table_order_sqls = '\n'.join(
    ['drop table if exists ' + t + ';' for t in drop_table_order])
regenerated_table_order_sqls = ('--' * 40).join([
    '\n' + table_name_runnable_sql_mappings[t] + '\n'
    for t in regenerated_table_order
])
Esempio n. 43
0
def t4():
    g = digraph().rand().show()
    es = nx.bfs_edges(g, 0)
    ns = [0] + list(map(lambda b: b[1], es))
    print('networx', list(ns))
    print('ours   ', list(id_nodes(g, 0)))
Esempio n. 44
0
plt.show()

graph3 = nx.complete_bipartite_graph(3, 5)
nx.draw(graph3, with_labels=True)
plt.show()

minMaxDegree(graph1, 1)
minMaxDegree(graph2, 2)
minMaxDegree(graph3, 3)

# Task 2: Create a directional graph with 5 nodes, 10 edges, and at least
# one node with a single outgoing edge and no incoming edges
DG = nx.DiGraph()
newnodes = (1,2,3,4,5)
newedges = [(1,2), (1,3), (1,4), (2,1), (2,3), (3,4), (3,2), (4,2), (4,3), (5,1)]
DG.add_nodes_from(newnodes)
DG.add_edges_from(newedges)
nx.draw_shell(DG, with_labels=True)
plt.show()

# Task 3: For each node in your generated graph, compute the number of nodes
# reachable using a BFS traversal starting at that node
for i in range(1, 6):
    print("Starting at node", i, "the reachable nodes are:")
    alledges = nx.bfs_edges(DG, i)
    edgelist = list(alledges)
    for u, v in edgelist:
        print(v)

    
Esempio n. 45
0
 def test_bfs_edges(self):
     edges = nx.bfs_edges(self.G, source=0)
     assert_equal(list(edges), [(0, 1), (1, 2), (1, 3), (2, 4)])
import networkx as nx

import matplotlib.pyplot as plt

pipeline = nx.DiGraph()
pos = nx.spring_layout(pipeline)

nodes = ["task1", "task2", "task3", "task4"]

for x in range(0, len(nodes) - 1):
    pipeline.add_edge(nodes[x], nodes[x + 1])

p = nx.drawing.nx_pydot.to_pydot(pipeline)
p.write_png('teste.png')

print(list(nx.bfs_edges(pipeline, "task1", reverse=False)))
Esempio n. 47
0
def neighbor_graph(G, root, k=2):
    '''return the induced k-degree or k-hop subgraph'''
    edges = nx.bfs_edges(G, root, depth_limit=k)
    nodes = [root] + [v for u, v in edges]
    return G.subgraph(nodes)
Esempio n. 48
0
    )
    quit()

source = 1
depth = 1
fname = 'input/cit-Patents_10.txt'

FrovedisServer.initialize(argvs[1])
G_nx = nx.read_edgelist(fname, nodetype=np.int32, create_using=nx.DiGraph())
G = fnx.read_edgelist(fname, nodetype=np.int32, create_using=nx.DiGraph())
#G.debug_print()

print("Frovedis BFS edges: ", list(fnx.bfs_edges(G, source,
                                                 depth_limit=depth)))
print("NetworkX BFS edges: ",
      list(nx.bfs_edges(G_nx, source, depth_limit=depth)))

print("Edges in Frovedis bfs_tree: ",
      fnx.bfs_tree(G, source, depth_limit=depth).number_of_edges())
print("Edges in NetworkX bfs_tree: ",
      nx.bfs_tree(G_nx, source, depth_limit=depth).number_of_edges())

print("Frovedis bfs_predecessors: ",
      list(fnx.bfs_predecessors(G, source, depth_limit=depth)))
print("NetworkX bfs_predecessors: ",
      list(nx.bfs_predecessors(G_nx, source, depth_limit=depth)))

print("Frovedis bfs_successors: ",
      list(fnx.bfs_successors(G, source, depth_limit=depth)))
print("NetworkX bfs_successors: ",
      list(nx.bfs_successors(G_nx, source, depth_limit=depth)))
Esempio n. 49
0
 def test_limited_bfs_edges(self):
     edges = nx.bfs_edges(self.G, source=9, depth_limit=4)
     assert list(edges) == [(9, 8), (9, 10), (8, 7), (7, 2), (2, 1), (2, 3)]
Esempio n. 50
0
def bfs():
    st = list(nx.bfs_edges(g, source=1))
    messagebox.showinfo("Info", "BSF thgrough edges {}".format(st))
Esempio n. 51
0
def multi_calib(imgs,
                cb_geom,
                detector,
                refiner,
                Cam=CamSF,
                Distortion=None,
                loss=SSE,
                cutoff_it=500,
                cutoff_norm=1e-6,
                dtype=torch.double,
                device=torch.device('cpu')):
    if Distortion is None:
        Distortion = lambda: Heikkila97Distortion(
            torch.zeros(4, dtype=dtype, device=device))

    # Get calibration board world coordinates
    ps_c_w = cb_geom.ps_c(dtype, device)

    # Get sorted unique indices of cams and cbs; torch.unique will sort according to docs
    idxs_cam = torch.unique(torch.LongTensor([img.idx_cam for img in imgs]))
    idxs_cb = torch.unique(torch.LongTensor([img.idx_cb for img in imgs]))
    assert_allclose(idxs_cam, torch.arange(len(idxs_cam)))
    assert_allclose(idxs_cb, torch.arange(len(idxs_cb)))

    # Form coordinate graph (from Bo Li's camera calibration paper)
    G = nx.DiGraph()
    nodes_cb = [CbNode(idx_cb) for idx_cb in idxs_cb]
    nodes_cam = []
    for idx_cam in idxs_cam:
        imgs_cam = [img for img in imgs if img.idx_cam == idx_cam]
        calib = single_calib(imgs_cam, cb_geom, detector, refiner, Cam,
                             Distortion, loss, cutoff_it, cutoff_norm, dtype,
                             device)
        for img_cam, ps_c_p in zip(imgs_cam, calib['pss_c_p']):
            img_cam.ps_c_p = ps_c_p
        node_cam = CamNode(idx_cam, calib['cam'], calib['distort'])
        for img_cam, rigid in zip(imgs_cam, calib['rigids']):
            node_cb = nodes_cb[img_cam.idx_cb]
            G.add_edge(node_cb, node_cam, rigid=rigid)
            G.add_edge(node_cam, node_cb, rigid=Inverse(rigid))
        nodes_cam.append(node_cam)

    # Do BFS and compute initial affines along the way
    nodes_cam[0].M = torch.eye(4, dtype=dtype, device=device)
    for (node_prnt, node_chld) in nx.bfs_edges(G, nodes_cam[0]):
        node_chld.M = node_prnt.M @ G.get_edge_data(
            node_chld, node_prnt)['rigid'].get_param()

    # Format control points
    ps_c_w = torch.cat((ps_c_w, ps_c_w.new_zeros(len(ps_c_w), 1)),
                       dim=1)  # 3rd dimension is zero
    pss_c_p = [img.ps_c_p for img in imgs]

    # Initialize modules
    cams = [node_cam.cam for node_cam in nodes_cam]
    distorts = [node_cam.distort for node_cam in nodes_cam]
    rigids_cb = [Rigid(*M2Rt(node_cb.M)) for node_cb in nodes_cb]
    rigids_cam = [Rigid(*M2Rt(node_cam.M)) for node_cam in nodes_cam]
    if isinstance(refiner, CheckerRefiner):
        w2ps = [
            torch.nn.Sequential(rigids_cb[img.idx_cb],
                                Inverse(rigids_cam[img.idx_cam]), Normalize(),
                                distorts[img.idx_cam], cams[img.idx_cam])
            for img in imgs
        ]
    else:
        raise RuntimeError(f'Dont know how to handle: {type(refiner)}')

    # Optimize parameters; make sure not to optimize first rigid camera transform (which is identity)
    print(f'Refining multi parameters...')
    for p in rigids_cam[0].parameters():
        p.requires_grad_(False)
    lbfgs_optimize(
        lambda: sum([
            list(m.parameters())
            for m in cams + distorts + rigids_cb + rigids_cam[1:]
        ], []), lambda: w2p_loss(w2ps, ps_c_w, pss_c_p, loss), cutoff_it,
        cutoff_norm)

    return {
        'imgs': imgs,
        'cb_geom': cb_geom,
        'cams': cams,
        'distorts': distorts,
        'rigids_cb': rigids_cb,
        'rigids_cam': rigids_cam,
        'pss_c_p': pss_c_p,
        'pss_c_p_m': [w2p(ps_c_w).detach() for w2p in w2ps],
        'graph': (G, nodes_cam, nodes_cb),
        'dtype': dtype,
        'device': device
    }
Esempio n. 52
0
#testando com exercício simples de sala:
H = nx.Graph()
H.add_edges_from([(1, 2), (1, 5), (2, 4), (2, 6), (2, 3), (4, 5), (4, 6),
                  (4, 7), (5, 6), (6, 8), (7, 8), (3, 8), (3, 7)])
print("\nExercicio Simples: ")
Exercicio = DFS(H, 1)
print(Exercicio)

print("BFS:")
print("\nKarate: ")
BfsTreeKarate = BFS(Karate, '1')
print("Usando nosso algoritmo: ")
print(BfsTreeKarate)

#para verificar se a BFS-tree está correta:
BfsTreeKarate_NetworkX = list(nx.bfs_edges(Karate, '1'))
print("Usando a função da biblioteca: ")
print(BfsTreeKarate_NetworkX)

print("\nDolphins: ")
BfsTreeDolphins = BFS(Dolphins, '1')
print("Usando nosso algoritmo:")
print(BfsTreeDolphins)

#para verificar se a BFS-tree está correta:
BfsTreeDolphins_NetworkX = list(nx.bfs_edges(Dolphins, '1'))
print("Usando a função da biblioteca: ")
print(BfsTreeDolphins_NetworkX)

print("\n\nDFS:")
print("\nKarate: ")
Esempio n. 53
0
def tree_size(G,source):
    return len(list(nx.bfs_edges(G,source)))+1
Esempio n. 54
0
##########################################################
##########################################################
##########################################################
##########################################################
##########################################################
if 1==2:
    plt.hist(img.ravel(), bins=256, range=(0.0, 255.0), fc='k', ec='k')
    plt.show()

    img=Image.open(listImgs[0])
    img.thumbnail((215, 125), Image.ANTIALIAS) # resizes image in-place
    imgplot = plt.imshow(img)
    plt.show()
    imgplot = plt.imshow(img, interpolation="nearest")#,origin=[20,50]) #interpolation="bicubic")
    plt.show()

    # print(list(nx.bfs_edges(G,0)))
    ''' find the way (if possible) from source to all other nodes. with a search path.
        can see if one's node is reachable from source node. and what is the path.
    '''
    treeEdges = list(nx.bfs_edges(G,'physics'))
    print(treeEdges)
    newG = nx.MultiDiGraph(name="tree graph")
    newG.add_edges_from(treeEdges)
    nx.draw_shell(newG,arrows=True)
    plt.show()

    nx.draw_random(G)
    plt.show()

Esempio n. 55
0
def dual_algorithm(G, source, target, upper_bound):
    iter_count = 0
    start_time = time.time()

    # STEP0 (shrink source - target path)
    reachable_nodes_from_source = set()
    for edge in nx.bfs_edges(G, source=source):
        tail, head = edge
        reachable_nodes_from_source |= {tail, head}
    if target not in reachable_nodes_from_source:
        print('There does not exist source {source} - target {target} path')
        return None, None, None
    reachable_nodes_to_target = set()
    for edge in nx.bfs_edges(G.reverse(), source=target):
        tail, head = edge
        reachable_nodes_to_target |= {tail, head}
    G = G.subgraph(reachable_nodes_from_source & reachable_nodes_to_target)
    
    print('\nRemove the nodes which does not contained source - target path (#STEP0)')
    print(f'remained the number of nodes: {G.number_of_nodes()}')
    print(f'remained number of edges: {G.number_of_edges()}\n')

    # STEP1 (obtain shortest pash respect to "weight")
    _, path_edges = shortest_path_bf(G, source, target, weight='c')
    path_length = sum(G[tail][head][key]['c'] for tail, head, key in path_edges)
    cost_length = sum(G[tail][head][key]['t'] for tail, head, key in path_edges) - upper_bound

    if cost_length <= 0:
        opt_path = path_edges
        return opt_path, path_length, cost_length+upper_bound
    else:
        path_plus = path_length
        cost_plus = cost_length
        LB        = path_length
        print('We obtain shortest path on weight (#STEP1)')
        if PRINT_PATH: print(f'    path {path_edges}')
        print(f'    f = {path_length:.3f}, g = {cost_length+upper_bound:.3f}\n')

    # STEP2(obtain shortest path respect to "cost")
    _, path_edges = shortest_path_bf(G, source=source, target=target, weight='t')
    path_length = sum(G[tail][head][key]['c'] for tail, head, key in path_edges)
    cost_length = sum(G[tail][head][key]['t'] for tail, head, key in path_edges) - upper_bound

    if cost_length > 0:
        print(f'We find there is not a path satisfies the constrainet')
        print(f'the minimum cost path length is {cost_length+upper_bound}')
        return None, None, None
    else:
        opt_minus  = path_edges
        path_minus = path_length
        cost_minus = cost_length
        UB         = path_length
        print('We obtain shortest path on cost (#STEP2)')
        if PRINT_PATH: print(f'    path {path_edges}')
        print(f'    f = {path_length:.3f}, g = {cost_length+upper_bound:.3f}\n')

    print(f'Best Solution: {path_minus: .3f}\n')
    u = (path_minus - path_plus) / (cost_plus - cost_minus)
    L = path_plus + u * cost_plus


    iter_count += 1
    print_log_head()
    print_log(step='#1', update='LB', iter_count=iter_count, LB=LB, time=time.time()-start_time)
    iter_count += 1
    print_log(step='#2', update='UB', iter_count=iter_count, gap=(UB-LB)/(abs(UB)-1), LB=LB, UB=UB, time=time.time()-start_time)


    # STEP3
    epsilon = 0.000001 # the terminating parametor of Step3
    while True:
        iter_count += 1
        if iter_count % 20 == 0:
            print_log_head()
        update = ""
        H = convert_graph_weight(G, u)
        _, path_edges = shortest_path_bf(H, source, target, weight='w')
        Lu = sum(H[tail][head][key]['w'] for tail, head, key in path_edges) - u * upper_bound
        path_length = sum(G[tail][head][key]['c'] for tail, head, key in path_edges)
        cost_length = sum(G[tail][head][key]['t'] for tail, head, key in path_edges) - upper_bound
        if cost_length == 0:
            return path_edges, path_length, cost_length+upper_bound # find opt sol
        elif abs(Lu - L) < epsilon and cost_length < 0:
            opt_minus = path_edges
            if LB < Lu:
                update += " LB"
                LB = Lu
            if path_length < UB:
                update += " UB"
                UB = path_length
                if PRINT_PATH:
                    print_best_sol(path_edges, path_length, cost_length+upper_bound)
            print_log(step='#3', update=update, iter_count=iter_count, gap=(UB-LB)/(abs(UB)-1), LB=LB, UB=UB, time=time.time()-start_time)
            break
        elif abs(Lu - L) < epsilon and cost_length > 0:
            if LB < Lu:
                update += " LB"
            if path_minus < UB:
                update += " UB"
            LB = Lu
            UB = path_minus
            print_log(step='#3', update=update, iter_count=iter_count, gap=(UB-LB)/(abs(UB)-1), LB=LB, UB=UB, time=time.time()-start_time)
            break
        elif cost_length > 0:
            path_plus = path_length
            cost_plus = cost_length
        elif cost_length <= 0:
            opt_minus  = path_edges
            path_minus = path_length
            cost_minus = cost_length
            if path_length < UB:
                update += " UB"
                UB = path_length
                if PRINT_PATH:
                    print_best_sol(path_edges, path_length, cost_length+upper_bound)
        u = (path_minus - path_plus) / (cost_plus - cost_minus)
        L = path_plus + u * cost_plus
        print_log(step='#3', update=update, iter_count=iter_count, gap=(UB-LB)/(abs(UB)-1), LB=LB, UB=UB, time=time.time()-start_time)

    if LB >= UB:
        return opt_minus, path_minus, cost_minus+upper_bound # find opt sol

    # STEP 4   CLOSING THE GAP
    H = convert_graph_weight(G, u)
    if YEN:
        k_shortest_paths = YenKSP(H, source, target, 'w')
    else:
        k_shortest_paths = EppsteinKSP(H, source, target, 'w')
    k_shortest_paths.__next__()
    k_shortest_paths.__next__()
    while True:
        iter_count += 1
        if iter_count % 20 == 0:
            print_log_head()
        update = ""
        try:
            _, path_edges = k_shortest_paths.__next__()
            Lu = sum(H[tail][head][key]['w'] for tail, head, key in path_edges) - u * upper_bound
            path_length = sum(G[tail][head][key]['c'] for tail, head, key in path_edges)
            cost_length = sum(G[tail][head][key]['t'] for tail, head, key in path_edges) - upper_bound
        except StopIteration:
            Lu = path_length = float('inf')
        if LB < Lu:
            update += " LB"
        LB = Lu
        if cost_length <= 0 and path_length < UB:
            UB = path_length
            opt_minus = path_edges
            path_minus = path_length
            cost_minus = cost_length
            update += " UB"
            if PRINT_PATH:
                print_best_sol(path_edges, path_length, cost_length+upper_bound)
        print_log(step='#4', update=update, iter_count=iter_count, gap=(UB-LB)/(abs(UB)-1), LB=LB, UB=UB, time=time.time()-start_time)
        if LB >= UB:
            return opt_minus, path_minus, cost_minus+upper_bound # find opt sol
Esempio n. 56
0
    if (argc < 2):
        print(
            'Please give frovedis_server calling command as the first argument \n(e.g. "mpirun -np 2 -x /opt/nec/nosupport/frovedis/ve/bin/frovedis_server")'
        )
        quit()
    FrovedisServer.initialize(argvs[1])

    frov_graph = fnx.read_edgelist(DATASET, nodetype=np.int32, delimiter=' ', \
                                       create_using=nx.DiGraph())
    fres = set(fnx.bfs_edges(frov_graph, src, depth_limit=depth))

    FrovedisServer.shut_down()
except Exception as e:
    print("status=Exception: " + str(e))
    sys.exit(1)

#NetworkX
try:
    nx_graph = nx.read_edgelist(DATASET, nodetype=np.int32, delimiter=' ', \
                                create_using=nx.DiGraph())
    nres = set(nx.bfs_edges(nx_graph, src, depth_limit=depth))
except Exception as e:
    print("status=Exception: " + str(e))
    sys.exit(1)
print(fres)
print(nres)
if len(fres - nres) == 0:
    print("status=Passed")
else:
    print("status=Failed")
def test_inversion_option(c1, c2, join_options, ograph, linked):
    a = c1
    b = linked[c1]
    c = c2
    d = linked[c2]

    #k=linked[internal_node]
    ograph.remove_edge(a, b)
    ograph.remove_edge(c, d)
    ##                                                                        ---b a-------c d----
    if a in nx.node_connected_component(
            ograph, c
    ):  ## exchance labels of a,b if necessary, so the nodes are in this config:  ---a b-------c d------
        x = b
        b = a
        a = x

        ##                                                                        ---b a-------d c----
    if a in nx.node_connected_component(
            ograph, d
    ):  ## exchance labels of a,b if necessary, so the nodes are in this config:  ---a b-------c d------
        x = b
        b = a
        a = x

        x = d
        d = c
        c = x

        ##                                                                        ---a b-------d c----
    if b in nx.node_connected_component(
            ograph, d
    ):  ## exchance labels of c,d if necessary, so the nodes are in this config:  ---a b-------c d------
        x = d
        d = c
        c = x

    n_scaffold = old_div(len(nx.node_connected_component(ograph, b)), 2)
    print("inversion n nodes", n_scaffold)

    total_i_len = sum(ograph[i][j]['length']
                      for i, j in nx.bfs_edges(ograph, b))
    print("inv len", total_i_len)
    if total_i_len < 10000.0 or n_scaffold < 2:
        print("inversion length", total_i_len, n_scaffold, "too short")
        join_options.append((0.0, (), ()))
        ograph.add_edge(a, b, length=default_gapsize, contig=False)
        ograph.add_edge(c, d, length=default_gapsize, contig=False)

        return

    interc_score0 = intercalation_score_raw(a, b, d, ograph)
    interc_score1 = intercalation_score_raw(a, c, d, ograph)
    print("inversion0", interc_score0)
    print("inversion", interc_score1)
    join_options.append((interc_score0, (), ()))
    join_options.append((interc_score1, ((a, c), (b, d)), ((a, b), (c, d))))
    ograph.add_edge(a, b, length=default_gapsize, contig=False)
    ograph.add_edge(c, d, length=default_gapsize, contig=False)

    return
Esempio n. 58
0
import networkx as nx
G = nx.path_graph(3)
list(nx.bfs_edges(G, 0))

print(list(nx.bfs_edges(G, source=0, depth_limit=1)))
Esempio n. 59
0
 def bfs(self, source=None):
     return nx.bfs_edges(self.graph, source=source or self.root_key)
Esempio n. 60
0
 def bs_edge_traversal(self):
     """Breadth-first traversal of the edges"""
     return nx.bfs_edges(self.dag, source=list(self.dag.nodes)[0])